code
stringlengths
4
991k
repo_name
stringlengths
6
116
path
stringlengths
4
249
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
4
991k
input_ids
listlengths
502
502
token_type_ids
listlengths
502
502
attention_mask
listlengths
502
502
labels
listlengths
502
502
<?php /* * This file is part of the Claroline Connect package. * * (c) Claroline Consortium <consortium@claroline.net> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Claroline\CoreBundle\Command\Dev; use Symfony\Bundle\FrameworkBundle\Command\ContainerAwareCommand; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Output\OutputInterface; use Symfony\Component\Console\Input\InputOption; use Symfony\Component\Yaml\Yaml; class TranslationCheckerCommand extends ContainerAwareCommand { protected function configure() { $this->setName('claroline:translation:checker') ->setDescription('Search the translations and order them in their different config.yml files'); $this->addOption( 'file', null, InputOption::VALUE_OPTIONAL, 'Wich translation file do you want to be parsed ?' ); } protected function execute(InputInterface $input, OutputInterface $output) { $ds = DIRECTORY_SEPARATOR; $projectDir = $this->getContainer()->getParameter('kernel.root_dir')."{$ds}..{$ds}vendor{$ds}claroline{$ds}core-bundle"; $routingFolder = "{$projectDir}{$ds}Claroline/CoreBundle/Resources/translations"; $fileName = $input->getOption('file'); if ($fileName == null) { foreach (new \DirectoryIterator($routingFolder) as $fileInfo) { $this->parseTranslationFile($fileInfo); } } else { $this->parseTranslationFile(new \SplFileInfo("{$projectDir}{$ds}..{$ds}$fileName")); } } private function parseTranslationFile(\SplFileInfo $fileInfo) { if ($fileInfo->isFile()) { try { $this->order($fileInfo); } catch (ParseException $e) { printf("Unable to parse the YAML string: %s", $e->getMessage()); } } } private function order($fileInfo) { $value = Yaml::parse($fileInfo->getRealPath()); ksort($value); $yaml = Yaml::dump($value); file_put_contents($fileInfo->getRealPath(), $yaml); } }
arnaudbey/CoreBundle
Command/Dev/TranslationCheckerCommand.php
PHP
gpl-3.0
2,257
[ 30522, 1026, 1029, 25718, 1013, 1008, 1008, 2023, 5371, 2003, 2112, 1997, 1996, 18856, 10464, 4179, 7532, 7427, 1012, 1008, 1008, 1006, 1039, 1007, 18856, 10464, 4179, 12360, 1026, 12360, 1030, 18856, 10464, 4179, 1012, 5658, 1028, 1008, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
using System; using System.Windows; namespace XamlCSS.WPF.TestApp { /// <summary> /// Interaktionslogik für "App.xaml" /// </summary> public partial class App : Application { public string cssStyle1 = @" @import ""Resources/baseStyle.scss""; Button { Foreground: Red; } .container { Background: Yellow; Button { Foreground: Brown; } } .jumbo { FontSize: 50; } Grid Grid { TextBlock:nth-of-type(1) { Grid.Row: 0; Grid.Column: 1; Text: #Binding Message; } TextBlock:nth-of-type(2) { Grid.Row: 1; Grid.Column: 0; Text: #StaticResource testString; } } "; public string cssStyle2 = @" Window { Background: #333333; Foreground: #ffffff; } Button { Foreground: Red; Height: 40; Width: 100; } .container { Background: #aaaaaa; } .container Button { Foreground: Brown; } #thebutton { FontSize: 30; } .jumbo { FontSize: 50; FontStyle: Italic; HorizontalAlignment: Center; } Grid Grid { TextBlock:nth-of-type(1) { Grid.Row: 1; Grid.Column: 0; } TextBlock:nth-of-type(2) { Grid.Row: 0; Grid.Column: 0; } } "; public string currentStyle; public App() { //dynamic t = this; //var u = t.Resources; Css.Initialize(); InitializeComponent(); } } }
warappa/XamlCSS
XamlCSS.WPF.TestApp/App.xaml.cs
C#
mit
1,515
[ 30522, 2478, 2291, 1025, 2478, 2291, 1012, 3645, 1025, 3415, 15327, 1060, 3286, 15472, 4757, 1012, 1059, 14376, 1012, 3231, 29098, 1063, 1013, 1013, 1013, 1026, 12654, 1028, 1013, 1013, 1013, 6970, 4817, 9285, 21197, 5480, 6519, 1000, 10439...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.impl.persistence.cache; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.engine.common.impl.persistence.entity.Entity; /** * @author Joram Barrez */ public class EntityCacheImpl implements EntityCache { protected Map<Class<?>, Map<String, CachedEntity>> cachedObjects = new HashMap<Class<?>, Map<String,CachedEntity>>(); @Override public CachedEntity put(Entity entity, boolean storeState) { Map<String, CachedEntity> classCache = cachedObjects.get(entity.getClass()); if (classCache == null) { classCache = new HashMap<String, CachedEntity>(); cachedObjects.put(entity.getClass(), classCache); } CachedEntity cachedObject = new CachedEntity(entity, storeState); classCache.put(entity.getId(), cachedObject); return cachedObject; } @Override @SuppressWarnings("unchecked") public <T> T findInCache(Class<T> entityClass, String id) { CachedEntity cachedObject = null; Map<String, CachedEntity> classCache = cachedObjects.get(entityClass); if (classCache == null) { classCache = findClassCacheByCheckingSubclasses(entityClass); } if (classCache != null) { cachedObject = classCache.get(id); } if (cachedObject != null) { return (T) cachedObject.getEntity(); } return null; } protected Map<String, CachedEntity> findClassCacheByCheckingSubclasses(Class<?> entityClass) { for (Class<?> clazz : cachedObjects.keySet()) { if (entityClass.isAssignableFrom(clazz)) { return cachedObjects.get(clazz); } } return null; } @Override public void cacheRemove(Class<?> entityClass, String entityId) { Map<String, CachedEntity> classCache = cachedObjects.get(entityClass); if (classCache == null) { return; } classCache.remove(entityId); } @Override public <T> Collection<CachedEntity> findInCacheAsCachedObjects(Class<T> entityClass) { Map<String, CachedEntity> classCache = cachedObjects.get(entityClass); if (classCache != null) { return classCache.values(); } return null; } @Override @SuppressWarnings("unchecked") public <T> List<T> findInCache(Class<T> entityClass) { Map<String, CachedEntity> classCache = cachedObjects.get(entityClass); if (classCache == null) { classCache = findClassCacheByCheckingSubclasses(entityClass); } if (classCache != null) { List<T> entities = new ArrayList<T>(classCache.size()); for (CachedEntity cachedObject : classCache.values()) { entities.add((T) cachedObject.getEntity()); } return entities; } return Collections.emptyList(); } public Map<Class<?>, Map<String, CachedEntity>> getAllCachedEntities() { return cachedObjects; } @Override public void close() { } @Override public void flush() { } }
motorina0/flowable-engine
modules/flowable-engine/src/main/java/org/flowable/engine/impl/persistence/cache/EntityCacheImpl.java
Java
apache-2.0
3,610
[ 30522, 1013, 1008, 7000, 2104, 1996, 15895, 6105, 1010, 2544, 1016, 1012, 1014, 1006, 1996, 1000, 6105, 1000, 1007, 1025, 1008, 2017, 2089, 2025, 2224, 2023, 5371, 3272, 1999, 12646, 2007, 1996, 6105, 1012, 1008, 2017, 2089, 6855, 1037, 6...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* +------------------------------------------------------------------------+ | Phalcon Framework | +------------------------------------------------------------------------+ | Copyright (c) 2011-2014 Phalcon Team (http://www.phalconphp.com) | +------------------------------------------------------------------------+ | This source file is subject to the New BSD License that is bundled | | with this package in the file docs/LICENSE.txt. | | | | If you did not receive a copy of the license and are unable to | | obtain it through the world-wide-web, please send an email | | to license@phalconphp.com so we can send you a copy immediately. | +------------------------------------------------------------------------+ | Authors: Andres Gutierrez <andres@phalconphp.com> | | Eduar Carvajal <eduar@phalconphp.com> | +------------------------------------------------------------------------+ */ #include "mvc/view/engine.h" #include "mvc/view/engineinterface.h" #include "di/injectable.h" #include "kernel/main.h" #include "kernel/memory.h" #include "kernel/object.h" #include "kernel/fcall.h" /** * Phalcon\Mvc\View\Engine * * All the template engine adapters must inherit this class. This provides * basic interfacing between the engine and the Phalcon\Mvc\View component. */ zend_class_entry *phalcon_mvc_view_engine_ce; PHP_METHOD(Phalcon_Mvc_View_Engine, __construct); PHP_METHOD(Phalcon_Mvc_View_Engine, getContent); PHP_METHOD(Phalcon_Mvc_View_Engine, partial); PHP_METHOD(Phalcon_Mvc_View_Engine, getView); ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_view_engine___construct, 0, 0, 1) ZEND_ARG_INFO(0, view) ZEND_ARG_INFO(0, dependencyInjector) ZEND_END_ARG_INFO() static const zend_function_entry phalcon_mvc_view_engine_method_entry[] = { PHP_ME(Phalcon_Mvc_View_Engine, __construct, arginfo_phalcon_mvc_view_engine___construct, ZEND_ACC_PUBLIC|ZEND_ACC_CTOR) PHP_ME(Phalcon_Mvc_View_Engine, getContent, NULL, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_View_Engine, partial, arginfo_phalcon_mvc_view_engineinterface_partial, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_View_Engine, getView, NULL, ZEND_ACC_PUBLIC) PHP_FE_END }; /** * Phalcon\Mvc\View\Engine initializer */ PHALCON_INIT_CLASS(Phalcon_Mvc_View_Engine){ PHALCON_REGISTER_CLASS_EX(Phalcon\\Mvc\\View, Engine, mvc_view_engine, phalcon_di_injectable_ce, phalcon_mvc_view_engine_method_entry, ZEND_ACC_EXPLICIT_ABSTRACT_CLASS); zend_declare_property_null(phalcon_mvc_view_engine_ce, SL("_view"), ZEND_ACC_PROTECTED TSRMLS_CC); zend_class_implements(phalcon_mvc_view_engine_ce TSRMLS_CC, 1, phalcon_mvc_view_engineinterface_ce); return SUCCESS; } /** * Phalcon\Mvc\View\Engine constructor * * @param Phalcon\Mvc\ViewInterface $view * @param Phalcon\DiInterface $dependencyInjector */ PHP_METHOD(Phalcon_Mvc_View_Engine, __construct){ zval *view, *dependency_injector = NULL; phalcon_fetch_params(0, 1, 1, &view, &dependency_injector); if (!dependency_injector) { dependency_injector = PHALCON_GLOBAL(z_null); } phalcon_update_property_this(this_ptr, SL("_view"), view TSRMLS_CC); phalcon_update_property_this(this_ptr, SL("_dependencyInjector"), dependency_injector TSRMLS_CC); } /** * Returns cached ouput on another view stage * * @return array */ PHP_METHOD(Phalcon_Mvc_View_Engine, getContent) { zval *view = phalcon_fetch_nproperty_this(this_ptr, SL("_view"), PH_NOISY TSRMLS_CC); PHALCON_RETURN_CALL_METHODW(view, "getcontent"); } /** * Renders a partial inside another view * * @param string $partialPath * @param array $params * @return string */ PHP_METHOD(Phalcon_Mvc_View_Engine, partial){ zval *partial_path, *params = NULL, *view; phalcon_fetch_params(0, 1, 1, &partial_path, &params); if (!params) { params = PHALCON_GLOBAL(z_null); } view = phalcon_fetch_nproperty_this(this_ptr, SL("_view"), PH_NOISY TSRMLS_CC); PHALCON_RETURN_CALL_METHODW(view, "partial", partial_path, params); } /** * Returns the view component related to the adapter * * @return Phalcon\Mvc\ViewInterface */ PHP_METHOD(Phalcon_Mvc_View_Engine, getView){ RETURN_MEMBER(this_ptr, "_view"); }
unisys12/phalcon-hhvm
ext/mvc/view/engine.cpp
C++
bsd-2-clause
4,384
[ 30522, 1013, 1008, 1009, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
var global = require('../../global'); module.exports = function (packing, offset) { var items = [].concat.apply([], packing.items); var iso = "FM.FP-GJ-15-003"; // var number = packing.code; // var colorName = packing.colorName; var orderType = (packing.orderType || "").toString().toLowerCase() === "printing" ? "Printing" : "Finishing"; var locale = global.config.locale; var buyerName = packing.buyerName ? packing.buyerName : ""; var colorType = packing.colorType ? packing.colorType : ""; var construction = packing.construction ? packing.construction : ""; var buyerAddress = packing.buyerAddress ? packing.buyerAddress : ""; var moment = require('moment'); moment.locale(locale.name); var footerStack = []; var footerStackValue = []; var footerStackDivide = []; if ((packing.orderType || "").toString().toLowerCase() === "solid") { footerStack = ['Buyer', "Jenis Order", "Jenis Warna", 'Konstruksi', 'Tujuan']; footerStackValue = [buyerName, orderType, colorType, construction, buyerAddress]; footerStackDivide = [':', ":", ":", ':', ':']; } else if ((packing.orderType || "").toString().toLowerCase() === "printing") { footerStack = ['Buyer', "Jenis Order", 'Konstruksi', 'Design/Motif', 'Tujuan']; footerStackValue = [buyerName, orderType, construction, packing.designNumber && packing.designCode ? `${packing.designNumber} - ${packing.designCode}` : "", buyerAddress]; footerStackDivide = [':', ":", ":", ':', ':']; } else { footerStack = ['Buyer', "Jenis Order", 'Konstruksi', 'Tujuan']; footerStackValue = [buyerName, orderType, construction, buyerAddress]; footerStackDivide = [':', ":", ":", ':']; } var header = [{ columns: [{ columns: [{ width: '*', stack: [{ text: 'BON PENYERAHAN PRODUKSI', style: ['size15'], alignment: "center" }] }] }] }]; var line = [{ canvas: [{ type: 'line', x1: 0, y1: 5, x2: 555, y2: 5, lineWidth: 0.5 } ] }]; var subheader = [{ columns: [{ columns: [{ width: '*', stack: [{ text: iso, style: ['size09', 'bold'], alignment: "right" } ] }] }] }]; var subheader2 = [{ columns: [{ width: '60%', columns: [{ width: '*', stack: ['Kepada Yth. Bagian Penjualan ', `Bersama ini kami kirimkan hasil produksi: Inspeksi ${orderType}`], }], style: ['size08'] } , { width: '5%', text: '' }, { width: '40%', columns: [{ width: '40%', stack: ['No', 'Sesuai No Order'], }, { width: '5%', stack: [':', ':'], }, { width: '*', stack: [packing.code, packing.productionOrderNo], }], style: ['size08'] } ] }]; var thead = [{ text: 'NO', style: 'tableHeader' }, { text: 'BARANG', style: 'tableHeader' }, { text: `Jumlah (${packing.packingUom})`, style: 'tableHeader' }, { text: 'Panjang (Meter)', style: 'tableHeader' }, { text: 'Panjang Total (Meter)', style: 'tableHeader' }, { text: 'Berat Total (Kg)', style: 'tableHeader' }, { text: 'Keterangan', style: 'tableHeader' } ]; var gradeItem = ""; var totalJumlah = 0; var totalBerat = 0; var totalPanjang = 0; var totalPanjangTotal = 0; var totalBeratTotal = 0; var tbody = items.map(function (item, index) { // if (item.grade.toLowerCase() == "a" || item.grade.toLowerCase() == "b" || item.grade.toLowerCase() == "c") { if (item.grade.toLowerCase() == "a") { gradeItem = "BQ"; } else { gradeItem = "BS"; } totalJumlah += item.quantity; totalBerat += item.weight; totalPanjang += item.length; totalPanjangTotal += item.length * item.quantity; totalBeratTotal += item.weight * item.quantity; return [{ text: (index + 1).toString() || '', style: ['size08', 'center'] }, { text: packing.colorName + ' ' + item.lot + ' ' + item.grade + ' ' + gradeItem, style: ['size08', 'center'] }, { text: item.quantity, style: ['size08', 'center'] }, { text: item.length, style: ['size08', 'center'] }, { text: (item.length * item.quantity).toFixed(2), style: ['size08', 'center'] }, { text: (item.weight * item.quantity).toFixed(2), style: ['size08', 'center'] }, { text: item.remark, style: ['size08', 'center'] } ]; }); var tfoot = [[{ text: " ", style: ['size08', 'center'] }, { text: "Total", style: ['size08', 'center'] }, { text: totalJumlah.toFixed(2), style: ['size08', 'center'] }, { text: totalPanjang.toFixed(2), style: ['size08', 'center'] }, { text: totalPanjangTotal.toFixed(2), style: ['size08', 'center'] }, { text: totalBeratTotal.toFixed(2), style: ['size08', 'center'] }, "",]]; tbody = tbody.length > 0 ? tbody : [ [{ text: "tidak ada barang", style: ['size08', 'center'], colSpan: 6 }, "", "", "", "", "", ""] ]; var table = [{ table: { widths: ['5%', '35%', '10%', '10%', '10%', '10%', '20%'], headerRows: 1, body: [].concat([thead], tbody, tfoot), } }]; var footer = [{ stack: [{ columns: [{ columns: [{ width: '15%', stack: footerStack }, { width: '2%', stack: footerStackDivide }, { width: '*', stack: footerStackValue }] }] } ], style: ['size08'] }, ]; var footer2 = ['\n', { columns: [{ width: '25%', stack: ['\n', 'Diterima oleh:', '\n\n\n\n', '( )'], style: ['center'] }, { width: '25%', stack: [], }, { width: '25%', stack: [], }, { width: '25%', stack: [`Sukoharjo, ${moment(packing.date).add(offset, 'h').format(locale.date.format)} `, 'Diserahkan oleh :', '\n\n\n\n', `( ${packing._createdBy} )`], style: ['center'] }], style: ['size08'] }]; var packingPDF = { pageSize: 'A5', pageOrientation: 'landscape', pageMargins: 20, // content: [].concat(header, line, subheader, subheader2, table, footer), content: [].concat(header, line, subheader, subheader2, table, footer, footer2), styles: { size06: { fontSize: 8 }, size07: { fontSize: 9 }, size08: { fontSize: 10 }, size09: { fontSize: 11 }, size10: { fontSize: 12 }, size15: { fontSize: 17 }, size30: { fontSize: 32 }, bold: { bold: true }, center: { alignment: 'center' }, left: { alignment: 'left' }, right: { alignment: 'right' }, justify: { alignment: 'justify' }, tableHeader: { bold: true, fontSize: 10, color: 'black', alignment: 'center' } } }; return packingPDF; }
indriHutabalian/dl-module
src/pdf/definitions/packing.js
JavaScript
mit
8,749
[ 30522, 13075, 3795, 1027, 5478, 1006, 1005, 1012, 1012, 1013, 1012, 1012, 1013, 3795, 1005, 1007, 1025, 11336, 1012, 14338, 1027, 3853, 1006, 14743, 1010, 16396, 1007, 1063, 13075, 5167, 1027, 1031, 1033, 1012, 9530, 11266, 1012, 6611, 1006...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<html> <head> <link rel="stylesheet" type="text/css" href="style.css"> </head> <body> <span class='rank0 0.0'>?6</span> <span class='rank0 0.0'>7</span> <span class='rank0 0.0'>8</span> <span class='rank0 0.0'>9</span> <span class='rank6 5.886281793427337'>10</span> <span class='rank5 5.380675333968457'>The</span> <span class='rank6 5.910214491268059'>New</span> <span class='rank5 5.027215308896533'>York</span> <span class='rank4 4.050174224831018'>copyright</span> <span class='rank2 1.7822710117187128'>reserved</span> <span class='rank3 2.5421264334187086'>Botanical</span> <span class='rank3 3.451197890939337'>Garden</span> </br> <span class='rank3 2.7567911231003848'>Form</span> <span class='rank4 3.9491477547735414'>48-R.</span> <span class='rank0 0.0'>8</span> <span class='rank3 3.4890632800057304'>TROPICAL</span> <span class='rank-2 -1.5188893385771962'>FOREST</span> <span class='rank1 1.2090757065657982'>HERBARIUM</span> <span class='rank22 21.865907816600597'>14785</span> <span class='rank0 0.0'>È</span> <span class='rank0 0.0'>;</span> </br> <span class='rank3 2.5421264334187086'>Botanical</span> <span class='rank20 19.515655058990756'>rfatae:</span> <span class='rank16 15.85013835474318'>Micropholia</span> <span class='rank0 0.2269638666185685'>Árare</span> <span class='rank8 8.391887420859'>iniaef</span> <span class='rank2 2.170560892596953'>alia</span> <span class='rank-9 -8.855910460777096'>(Accession</span> <span class='rank-5 -4.700653135656559'>camber</span> <span class='rank8 8.316700257931268'>ftÈr</span> </br> <span class='rank-1 -0.5100193904984991'>Pierre</span> <span class='rank-11 -11.025917259158149'>Washington</span> <span class='rank-5 -5.460941945403178'>office)</span> </br> <span class='rank15 15.09538534898661'>___;___________________-</span> <span class='rank0 0.0'>.</span> <span class='rank-5 -4.919912495177911'>Determine</span> <span class='rank0 0.0'>i</span> <span class='rank12 12.10153415317285'>by____________________________</span> </br> <span class='rank-8 -8.490883123409645'>Country:</span> <span class='rank1 0.547932610981583'>Puerto</span> <span class='rank12 11.973292338273875'>Sico</span> <span class='rank-7 -6.949734589090987'>Political</span> <span class='rank-11 -11.008870241850495'>division:</span> <span class='rank2 1.6367727765069588'>CaribÈ</span> <span class='rank13 12.964171927139102'>Natl*</span> <span class='rank17 17.195084556862017'>OIT</span> <span class='rank9 8.635153989049803'>Si</span> </br> <span class='rank-3 -3.1578321192597727'>Locality:</span> <span class='rank7 7.280608326244492'>La</span> <span class='rank0 -0.48860384864321915'>Mina,</span> <span class='rank18 18.241212460765077'>Luquillo</span> <span class='rank-3 -2.7295458878849317'>lita.</span> <span class='rank0 0.0'>WUd</span> <span class='rank0 0.0'>g</span> <span class='rank-1 -1.026133135639121'>Planted</span> <span class='rank0 0.0'>?</span> <span class='rank7 6.953395415036075'>........m.</span> </br> <span class='rank4 4.10049846212253'>Tree</span> <span class='rank0 0.0'>E</span> <span class='rank6 5.774193201635061'>Shrub</span> <span class='rank0 0.0'>G</span> <span class='rank2 2.3626620705738084'>Vine</span> <span class='rank9 8.932105957873064'>D___D</span> <span class='rank19 19.48397808696363'>Heigit.15.-ftm.</span> <span class='rank7 7.347299700743164'>D.</span> <span class='rank7 7.159247469240224'>B.</span> <span class='rank17 17.4953827660339'>H...i.i??cms.</span> </br> <span class='rank1 1.2514065862491037'>Flower</span> <span class='rank-5 -5.175210644927034'>color,</span> <span class='rank0 0.3162456894951404'>odor;</span> <span class='rank-7 -7.145662834933276'>greenish</span> <span class='rank3 2.8156836385716737'>fruit</span> <span class='rank-5 -5.175210644927034'>color,</span> <span class='rank-8 -8.197456267000966'>texture:</span> </br> <span class='rank-10 -10.213852110217502'>Abundance,</span> <span class='rank2 1.905123581406503'>site,</span> <span class='rank-4 -3.9373741790981924'>slope,</span> <span class='rank-10 -10.002131045514822'>vegetation</span> <span class='rank-3 -2.591882244416368'>type,</span> <span class='rank0 0.3761001161847233'>etc.:</span> <span class='rank-9 -8.764937937112862'>mountain</span> <span class='rank0 0.08761298916925853'>forest</span> </br> <span class='rank0 0.42756050891003383'>Uses,</span> <span class='rank4 4.275482490296059'>and</span> <span class='rank2 2.0540213974414776'>part</span> <span class='rank0 -0.16898566941258508'>used;</span> <span class='rank-4 -3.998174999181586'>locally</span> <span class='rank5 4.994283754122226'>or</span> <span class='rank-21 -21.139910876038243'>commercially:</span> </br> <span class='rank1 0.5246593024462953'>Other</span> <span class='rank-6 -5.923160636691719'>notes:</span> </br> <span class='rank0 0.0'>†</span> </br> <span class='rank7 7.248859627929912'>Á†Sa</span> </br> <span class='rank2 2.4566110390389504'>Local</span> <span class='rank1 0.8622148962401397'>name:</span> <span class='rank-2 -1.7127079282230326'>Collector:</span> <span class='rank1 0.6479994193832965'>Elbert</span> <span class='rank0 0.0'>L*</span> <span class='rank26 25.785450618891325'>ijittl,</span> <span class='rank7 7.298951518821651'>Jr.</span> </br> <span class='rank4 4.071225049211737'>Date</span> <span class='rank6 6.129628052059067'>of</span> <span class='rank26 25.615161226582266'>eotlertfon:</span> <span class='rank17 16.641699318677233'>11.</span> <span class='rank-6 -5.727691403076932'>CcHeitorÕs</span> <span class='rank43 42.80407169961167'>No,ãZ.-5.ã</span> </br> <span class='rank9 8.60037400668735'>Spec;Jtseaa</span> <span class='rank-2 -2.245741785904748'>Collected</span> <span class='rank-2 -2.285164926922988'>Under</span> <span class='rank0 0.0'>.</span> <span class='rank7 6.930405896811377'>be</span> <span class='rank-9 -8.779774109125974'>Auspices</span> <span class='rank0 0.0'>o*</span> <span class='rank5 4.551609905707915'>the</span> </br> <span class='rank10 10.021448350169694'>V.</span> <span class='rank7 7.347299700743164'>S.</span> <span class='rank-12 -11.744243055692252'>Department</span> <span class='rank6 6.129628052059067'>of</span> <span class='rank-16 -16.021998188372265'>Agriculture.</span> <span class='rank0 0.31872471013264203'>Forest</span> <span class='rank-7 -6.917864310961015'>Service;</span> </br> <span class='rank-5 -5.088209686205261'>Tropical</span> <span class='rank0 0.31872471013264203'>Forest</span> <span class='rank-3 -3.4111693503747773'>iuxperinser.t</span> <span class='rank3 2.9768066677116494'>Stalina.</span> <span class='rank6 6.466100288680279'>Uio</span> <span class='rank0 0.0'>l-</span> <span class='rank0 0.0'>:</span> <span class='rank1 1.4802174172381868'>*dras*</span> <span class='rank1 0.547932610981583'>Puerto</span> <span class='rank5 5.499659773120652'>Svco</span> </br> <span class='rank0 0.0'><3></span> </br> <span class='rank0 0.0'><Y>•f</span> <span class='rank6 6.4730807232262'>iAGED</span> </br> <span class='rank43 42.670025258321594'>01029066</span> </br> </br></br> <strong>Legend - </strong> Level of confidence that token is an accurately-transcribed word</br> <span class='rank-13'>&nbsp;&nbsp;&nbsp;</span> extremely low <span class='rank-7'>&nbsp;&nbsp;&nbsp;</span> very low <span class='rank-1'>&nbsp;&nbsp;&nbsp;</span> low <span class='rank0'>&nbsp;&nbsp;&nbsp;</span> undetermined <span class='rank1'>&nbsp;&nbsp;&nbsp;</span> medium <span class='rank6'>&nbsp;&nbsp;&nbsp;</span> high <span class='rank16'>&nbsp;&nbsp;&nbsp;</span> very high</br> </body> </html>
idigbio-citsci-hackathon/carrotFacetNgram
carrot2-webapp-3.8.1/herballsilvertrigram/01029066.txt.html
HTML
apache-2.0
7,574
[ 30522, 1026, 16129, 1028, 1026, 2132, 1028, 1026, 4957, 2128, 2140, 1027, 1000, 6782, 21030, 2102, 1000, 2828, 1027, 1000, 3793, 1013, 20116, 2015, 1000, 17850, 12879, 1027, 1000, 2806, 1012, 20116, 2015, 1000, 1028, 1026, 1013, 2132, 1028,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php namespace HealthCareAbroad\LogBundle\Repository; use Doctrine\ORM\EntityRepository; /** * ErrorLogRepository * * This class was generated by the Doctrine ORM. Add your own custom * repository methods below. */ class ErrorLogRepository extends EntityRepository { }
richtermarkbaay/MEDTrip
src/HealthCareAbroad/LogBundle/Repository/ErrorLogRepository.php
PHP
mit
277
[ 30522, 1026, 1029, 25718, 3415, 15327, 9871, 7875, 3217, 4215, 1032, 8833, 27265, 2571, 1032, 22409, 1025, 2224, 8998, 1032, 2030, 2213, 1032, 9178, 2890, 6873, 28307, 2100, 1025, 1013, 1008, 1008, 1008, 7561, 21197, 2890, 6873, 28307, 2100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
.width-1{width: 1%;}.width-2{width: 2%;}.width-3{width: 3%;}.width-4{width: 4%;}.width-5{width: 5%;}.width-6{width: 6%;} .width-7{width: 7%;}.width-8{width: 8%;}.width-9{width: 9%;}.width-10{width: 10%;}.width-11{width: 11%;}.width-12{width: 12%;} .width-13{width: 13%;}.width-14{width: 14%;}.width-15{width: 15%;}.width-16{width: 16%;}.width-17{width: 17%;}.width-18{width: 18%;} .width-19{width: 19%;}.width-20{width: 20%;}.width-21{width: 21%;}.width-22{width: 22%;}.width-23{width: 23%;}.width-24{width: 24%;} .width-25{width: 25%;}.width-26{width: 26%;}.width-27{width: 27%;}.width-28{width: 28%;}.width-29{width: 29%;}.width-30{width: 30%;} .width-31{width: 31%;}.width-32{width: 32%;}.width-33{width: 33%;}.width-34{width: 34%;}.width-35{width: 35%;}.width-36{width: 36%;} .width-37{width: 37%;}.width-38{width: 38%;}.width-39{width: 39%;}.width-40{width: 40%;}.width-41{width: 41%;}.width-42{width: 42%;} .width-43{width: 43%;}.width-44{width: 44%;}.width-45{width: 45%;}.width-46{width: 46%;}.width-47{width: 47%;}.width-48{width: 48%;} .width-49{width: 49%;}.width-50{width: 50%;}.width-51{width: 51%;}.width-52{width: 52%;}.width-53{width: 53%;}.width-54{width: 54%;} .width-55{width: 55%;}.width-56{width: 56%;}.width-57{width: 57%;}.width-58{width: 58%;}.width-59{width: 59%;}.width-60{width: 60%;} .width-61{width: 61%;}.width-62{width: 62%;}.width-63{width: 63%;}.width-64{width: 64%;}.width-65{width: 65%;}.width-66{width: 66%;} .width-67{width: 67%;}.width-68{width: 68%;}.width-69{width: 69%;}.width-70{width: 70%;}.width-71{width: 71%;}.width-72{width: 72%;} .width-73{width: 73%;}.width-74{width: 74%;}.width-75{width: 75%;}.width-76{width: 76%;}.width-77{width: 77%;}.width-78{width: 78%;} .width-79{width: 79%;}.width-80{width: 80%;}.width-81{width: 81%;}.width-82{width: 82%;}.width-83{width: 83%;}.width-84{width: 84%;} .width-85{width: 85%;}.width-86{width: 86%;}.width-87{width: 87%;}.width-88{width: 88%;}.width-89{width: 89%;}.width-90{width: 90%;} .width-91{width: 91%;}.width-92{width: 92%;}.width-93{width: 93%;}.width-94{width: 94%;}.width-95{width: 95%;}.width-96{width: 96%;} .width-97{width: 97%;}.width-98{width: 98%;}.width-99{width: 99%;}.width-100{width: 100%;}
ramkumarizaap/handzforhire
assets/css/width.css
CSS
mit
2,200
[ 30522, 1012, 9381, 1011, 1015, 1063, 9381, 1024, 1015, 1003, 1025, 1065, 1012, 9381, 1011, 1016, 1063, 9381, 1024, 1016, 1003, 1025, 1065, 1012, 9381, 1011, 1017, 1063, 9381, 1024, 1017, 1003, 1025, 1065, 1012, 9381, 1011, 1018, 1063, 938...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package amidst.gui; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import amidst.logging.Log; import amidst.resources.ResourceLoader; public class License { private InputStream fileStream; private String name; private String contents; private boolean loaded = false; public License(String name, String path) { this.name = name; try { fileStream = ResourceLoader.getResourceStream(path); } catch (NullPointerException e) { Log.w("Error finding license for: " + name + " at path: " + path); e.printStackTrace(); } } public String getName() { return name; } public void load() { if (loaded) { return; } BufferedReader fileReader = new BufferedReader(new InputStreamReader(fileStream)); BufferedReader bufferedReader = new BufferedReader(fileReader); try { StringBuilder stringBuilder = new StringBuilder(); String line = bufferedReader.readLine(); while (line != null) { stringBuilder.append(line); stringBuilder.append('\n'); line = bufferedReader.readLine(); } contents = stringBuilder.toString(); loaded = true; } catch (IOException e) { Log.w("Unable to read file: " + name + "."); e.printStackTrace(); } finally { try { bufferedReader.close(); } catch (IOException e) { Log.w("Unable to close BufferedReader for: " + name + "."); e.printStackTrace(); } } } public String getContents() { return contents; } public boolean isLoaded() { return loaded; } @Override public String toString() { return name; } }
Sedridor/AMIDST-CC
src/amidst/gui/License.java
Java
gpl-3.0
2,107
[ 30522, 7427, 17171, 1012, 26458, 1025, 12324, 9262, 1012, 22834, 1012, 17698, 2098, 16416, 4063, 1025, 12324, 9262, 1012, 22834, 1012, 22834, 10288, 24422, 1025, 12324, 9262, 1012, 22834, 1012, 20407, 25379, 1025, 12324, 9262, 1012, 22834, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php namespace Amber\Event\Parse; /** * Regex Parser * * Parses Regular Expressions * * @author Benjamin J. Anderson <andeb2804@gmail.com> * @package Amber\Event\Parse * @since Nov 4th, 2015 * @version v0.1 */ class Regex { protected $route; protected $regex; protected $matches; public function match( $route, $path ) { $this->route = $route; $this->regex = $this->route->path; $this->setRegexOptionalParams(); $this->setRegexParams(); $this->setRegexWildcard(); $this->regex = '#^' . $this->regex . '$#'; return preg_match($this->regex, $path, $this->matches); } public function getMatches() { return $this->matches; } protected function setRegexOptionalParams() { preg_match('#{/([a-z][a-zA-Z0-9_,]*)}#', $this->regex, $matches); if ($matches) { $repl = $this->getRegexOptionalParamsReplacement($matches[1]); $this->regex = str_replace($matches[0], $repl, $this->regex); } } protected function getRegexOptionalParamsReplacement( $list ) { $list = explode(',', $list); $head = $this->getRegexOptionalParamsReplacementHead($list); $tail = ''; foreach ($list as $name) { $head .= "(/{{$name}}"; $tail .= ')?'; } return $head . $tail; } protected function getRegexOptionalParamsReplacementHead( &$list ) { $head = ''; if (substr($this->regex, 0, 2) == '{/') { $name = array_shift($list); $head = "/({{$name}})?"; } return $head; } protected function setRegexParams() { $find = '#{([a-z][a-zA-Z0-9_]*)}#'; preg_match_all($find, $this->regex, $matches, PREG_SET_ORDER); foreach ($matches as $match) { $name = $match[1]; $subpattern = $this->getSubpattern($name); $this->regex = str_replace("{{$name}}", $subpattern, $this->regex); if (! isset($this->route->values[$name])) { $this->route->addValues(array($name => null)); } } } protected function getSubpattern( $name ) { if (isset($this->route->tokens[$name])) { return "(?P<{$name}>{$this->route->tokens[$name]})"; } return "(?P<{$name}>[^/]+)"; } protected function setRegexWildcard() { if (! $this->route->wildcard) { return; } $this->regex = rtrim($this->regex, '/') . "(/(?P<{$this->route->wildcard}>.*))?"; } }
Vandise/Amber
amber/event/src/parse/regex.php
PHP
mit
2,433
[ 30522, 1026, 1029, 25718, 3415, 15327, 8994, 1032, 2724, 1032, 11968, 3366, 1025, 1013, 1008, 1008, 1008, 19723, 10288, 11968, 8043, 1008, 1008, 11968, 8583, 3180, 11423, 1008, 1008, 1030, 3166, 6425, 1046, 1012, 5143, 1026, 1998, 15878, 22...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package com.austinv11.collectiveframework.utils; /** * Simple class for figuring out how long things take to be done */ public class TimeProfiler { private long startTime; /** * Starts a profiler at the current instant */ public TimeProfiler() { startTime = System.currentTimeMillis(); } /** * Gets the amount of time lapsed from instantiation to the method call * @return The time (in ms) */ public long getTime() { return System.currentTimeMillis()-startTime; } /** * Gets the time this object was instantiated at * @return The time (in ms) */ public long getStartTime() { return startTime; } }
austinv11/CollectiveFramework
src/main/java/com/austinv11/collectiveframework/utils/TimeProfiler.java
Java
gpl-2.0
640
[ 30522, 7427, 4012, 1012, 5899, 2615, 14526, 1012, 7268, 15643, 6198, 1012, 21183, 12146, 1025, 1013, 1008, 1008, 1008, 3722, 2465, 2005, 23218, 2041, 2129, 2146, 2477, 2202, 2000, 2022, 2589, 1008, 1013, 2270, 2465, 2051, 21572, 8873, 3917,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php /** * * @package Google_Api_Ads_AdWords_v201605 * @subpackage v201605 */ class BatchJobServiceMutateResponse { const WSDL_NAMESPACE = "https://adwords.google.com/api/adwords/cm/v201605"; const XSI_TYPE = ""; /** * @access public * @var BatchJobReturnValue */ public $rval; /** * Gets the namesapce of this class * @return string the namespace of this class */ public function getNamespace() { return self::WSDL_NAMESPACE; } /** * Gets the xsi:type name of this class * @return string the xsi:type name of this class */ public function getXsiTypeName() { return self::XSI_TYPE; } public function __construct($rval = null) { $this->rval = $rval; } }
SonicGD/google-adwords-api-light
Google/Api/Ads/AdWords/v201605/classes/BatchJobServiceMutateResponse.php
PHP
apache-2.0
797
[ 30522, 1026, 1029, 25718, 1013, 1008, 1008, 1008, 1008, 1030, 7427, 8224, 1035, 17928, 1035, 14997, 1035, 4748, 22104, 1035, 1058, 11387, 16048, 2692, 2629, 1008, 1030, 4942, 23947, 4270, 1058, 11387, 16048, 2692, 2629, 1008, 1013, 2465, 14...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/*+@@file@@----------------------------------------------------------------*//*! \file listeneradapter.h \par Description Extension and update of headers for PellesC compiler suite. \par Project: PellesC Headers extension \date Created on Sun Jul 17 22:03:03 2016 \date Modified on Sun Jul 17 22:03:03 2016 \author frankie \*//*-@@file@@----------------------------------------------------------------*/ #ifndef _LISTENER_ADAPTER_H_ #define _LISTENER_ADAPTER_H_ #if __POCC__ >= 500 #pragma once #endif typedef VOID(*PFN_WEBHOST_LISTENER_CONFIG_MANAGER_CONNECTED) (VOID *pContext); typedef VOID(*PFN_WEBHOST_LISTENER_CONFIG_MANAGER_DISCONNECT) (VOID *pContext, HRESULT hr); typedef VOID(*PFN_WEBHOST_LISTENER_CONFIG_MANAGER_INITIALIZATION_COMPLETED) (VOID *pContext); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_POOL_CREATED) (VOID *pContext, LPCWSTR AppPoolId, PSID PSID); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_POOL_DELETED) (VOID *pContext, LPCWSTR AppPoolId); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_POOL_IDENTITY_CHANGED) (VOID *pContext, LPCWSTR AppPoolId, PSID PSID); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_POOL_STATE_CHANGED) (VOID *pContext, LPCWSTR AppPoolId, BOOL fIsEnabled); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_POOL_CAN_OPEN_NEW_LISTENER_CHANNEL_INSTANCE) (VOID *pContext, LPCWSTR AppPoolId, DWORD ListenerChannelId); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_POOL_ALL_LISTENER_CHANNEL_INSTANCES_STOPPED) (VOID *pContext, LPCWSTR AppPoolId, DWORD ListenerChannelId); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_CREATED) (VOID *pContext, LPCWSTR AppKey, LPCWSTR Path, DWORD SiteId, LPCWSTR AppPoolId, PBYTE Bindings, DWORD NumberOfBindings, BOOL RequestsBlocked); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_DELETED) (VOID *pContext, LPCWSTR AppKey); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_BINDINGS_CHANGED) (VOID *pContext, LPCWSTR AppKey, PBYTE Bindings, DWORD NumberOfBindings); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_APP_POOL_CHANGED) (VOID *pContext, LPCWSTR AppKey, LPCWSTR AppPoolId); typedef VOID(*PFN_WEBHOST_LISTENER_APPLICATION_REQUESTS_BLOCKED_CHANGED) (VOID *pContext, LPCWSTR AppKey, BOOL RequestsBlocked); struct WEBHOST_LISTENER_CALLBACKS { DWORD dwBytesInCallbackStructure; PFN_WEBHOST_LISTENER_CONFIG_MANAGER_CONNECTED pfnWebhostListenerConfigManagerConnected; PFN_WEBHOST_LISTENER_CONFIG_MANAGER_DISCONNECT pfnWebhostListenerConfigManagerDisconnected; PFN_WEBHOST_LISTENER_CONFIG_MANAGER_INITIALIZATION_COMPLETED pfnWebhostListenerConfigManagerInitializationCompleted; PFN_WEBHOST_LISTENER_APPLICATION_POOL_CREATED pfnWebhostListenerApplicationPoolCreated; PFN_WEBHOST_LISTENER_APPLICATION_POOL_DELETED pfnWebhostListenerApplicationPoolDeleted; PFN_WEBHOST_LISTENER_APPLICATION_POOL_IDENTITY_CHANGED pfnWebhostListenerApplicationPoolIdentityChanged; PFN_WEBHOST_LISTENER_APPLICATION_POOL_STATE_CHANGED pfnWebhostListenerApplicationPoolStateChanged; PFN_WEBHOST_LISTENER_APPLICATION_POOL_CAN_OPEN_NEW_LISTENER_CHANNEL_INSTANCE pfnWebhostListenerApplicationPoolCanOpenNewListenerChannelInstance; PFN_WEBHOST_LISTENER_APPLICATION_POOL_ALL_LISTENER_CHANNEL_INSTANCES_STOPPED pfnWebhostListenerApplicationPoolAllListenerChannelInstancesStopped; PFN_WEBHOST_LISTENER_APPLICATION_CREATED pfnWebhostListenerApplicationCreated; PFN_WEBHOST_LISTENER_APPLICATION_DELETED pfnWebhostListenerApplicationDeleted; PFN_WEBHOST_LISTENER_APPLICATION_BINDINGS_CHANGED pfnWebhostListenerApplicationBindingsChanged; PFN_WEBHOST_LISTENER_APPLICATION_APP_POOL_CHANGED pfnWebhostListenerApplicationAppPoolChanged; PFN_WEBHOST_LISTENER_APPLICATION_REQUESTS_BLOCKED_CHANGED pfnWebhostListenerApplicationRequestsBlockedChanged; }; HRESULT WebhostGetVersion(DWORD *pMajorVersion, DWORD *pMinorVersion); HRESULT WebhostRegisterProtocol(LPCWSTR ProtocolId, VOID *pListenerCallbacks, VOID *pContext, DWORD *pProtocolHandle); HRESULT WebhostUnregisterProtocol(DWORD ProtocolHandle); HRESULT WebhostOpenListenerChannelInstance(DWORD ProtocolHandle, LPCWSTR AppPoolId, DWORD ListenerChannelId, PBYTE ListenerChannelBlob, DWORD ListenerChannelBlobByteCount); HRESULT WebhostCloseAllListenerChannelInstances(DWORD ProtocolHandle, LPCWSTR AppPoolId, DWORD ListenerChannelId); #endif
Frankie-PellesC/fSDK
Include/listeneradapter.h
C
lgpl-3.0
4,291
[ 30522, 1013, 1008, 1009, 1030, 1030, 5371, 1030, 1030, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.text.tx3g; import com.google.android.exoplayer.text.Cue; import com.google.android.exoplayer.text.Subtitle; import com.google.android.exoplayer.text.SubtitleParser; import com.google.android.exoplayer.util.MimeTypes; import com.google.android.exoplayer.util.ParsableByteArray; /** * A {@link SubtitleParser} for tx3g. * <p> * Currently only supports parsing of a single text track. */ public final class Tx3gParser implements SubtitleParser { private final ParsableByteArray parsableByteArray; public Tx3gParser() { parsableByteArray = new ParsableByteArray(); } @Override public boolean canParse(String mimeType) { return MimeTypes.APPLICATION_TX3G.equals(mimeType); } @Override public Subtitle parse(byte[] bytes, int offset, int length) { parsableByteArray.reset(bytes, length); int textLength = parsableByteArray.readUnsignedShort(); if (textLength == 0) { return Tx3gSubtitle.EMPTY; } String cueText = parsableByteArray.readString(textLength); return new Tx3gSubtitle(new Cue(cueText)); } }
Lee-Wills/-tv
mmd/library/src/main/java/com/google/android/exoplayer/text/tx3g/Tx3gParser.java
Java
gpl-3.0
1,720
[ 30522, 1013, 1008, 1008, 9385, 1006, 1039, 1007, 2297, 1996, 11924, 2330, 3120, 2622, 1008, 1008, 7000, 2104, 1996, 15895, 6105, 1010, 2544, 1016, 1012, 1014, 1006, 1996, 1000, 6105, 1000, 1007, 1025, 1008, 2017, 2089, 2025, 2224, 2023, 5...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.index.lucene; import org.junit.Test; import static org.junit.Assert.assertNotNull; /** * Tests for {@link OakCodec} */ public class OakCodecTest { @Test public void tesFormats() { OakCodec oakCodec = new OakCodec(); assertNotNull(oakCodec.docValuesFormat()); assertNotNull(oakCodec.fieldInfosFormat()); assertNotNull(oakCodec.liveDocsFormat()); assertNotNull(oakCodec.normsFormat()); assertNotNull(oakCodec.postingsFormat()); assertNotNull(oakCodec.segmentInfoFormat()); assertNotNull(oakCodec.storedFieldsFormat()); assertNotNull(oakCodec.termVectorsFormat()); } }
trekawek/jackrabbit-oak
oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakCodecTest.java
Java
apache-2.0
1,500
[ 30522, 1013, 1008, 1008, 7000, 2000, 1996, 15895, 4007, 3192, 1006, 2004, 2546, 1007, 2104, 2028, 2030, 2062, 1008, 12130, 6105, 10540, 1012, 2156, 1996, 5060, 5371, 5500, 2007, 1008, 2023, 2147, 2005, 3176, 2592, 4953, 9385, 6095, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# CMAKE generated file: DO NOT EDIT! # Generated by "Unix Makefiles" Generator, CMake Version 2.8 # The generator used is: SET(CMAKE_DEPENDS_GENERATOR "Unix Makefiles") # The top level Makefile was generated from the following files: SET(CMAKE_MAKEFILE_DEPENDS "CMakeCache.txt" "/home/guillaume/tf-cadence/gnuradio/zedboard/arm_cortex_a8_native.cmake" "../CMakeLists.txt" "../apps/CMakeLists.txt" "CMakeFiles/CMakeCCompiler.cmake" "CMakeFiles/CMakeCXXCompiler.cmake" "CMakeFiles/CMakeSystem.cmake" "../cmake/Modules/CMakeParseArgumentsCopy.cmake" "../cmake/Modules/FindCppUnit.cmake" "../cmake/Modules/FindGnuradioRuntime.cmake" "../cmake/Modules/GrMiscUtils.cmake" "../cmake/Modules/GrPlatform.cmake" "../cmake/Modules/GrPython.cmake" "../cmake/Modules/GrSwig.cmake" "../cmake/Modules/GrTest.cmake" "../cmake/cmake_uninstall.cmake.in" "../docs/CMakeLists.txt" "../grc/CMakeLists.txt" "../include/test1/CMakeLists.txt" "../lib/CMakeLists.txt" "../python/CMakeLists.txt" "../swig/CMakeLists.txt" "/usr/share/cmake-2.8/Modules/CMakeCCompiler.cmake.in" "/usr/share/cmake-2.8/Modules/CMakeCInformation.cmake" "/usr/share/cmake-2.8/Modules/CMakeCXXCompiler.cmake.in" "/usr/share/cmake-2.8/Modules/CMakeCXXInformation.cmake" "/usr/share/cmake-2.8/Modules/CMakeClDeps.cmake" "/usr/share/cmake-2.8/Modules/CMakeCommonLanguageInclude.cmake" "/usr/share/cmake-2.8/Modules/CMakeDetermineCCompiler.cmake" "/usr/share/cmake-2.8/Modules/CMakeDetermineCXXCompiler.cmake" "/usr/share/cmake-2.8/Modules/CMakeDetermineCompilerId.cmake" "/usr/share/cmake-2.8/Modules/CMakeDetermineSystem.cmake" "/usr/share/cmake-2.8/Modules/CMakeFindBinUtils.cmake" "/usr/share/cmake-2.8/Modules/CMakeFindFrameworks.cmake" "/usr/share/cmake-2.8/Modules/CMakeGenericSystem.cmake" "/usr/share/cmake-2.8/Modules/CMakeParseArguments.cmake" "/usr/share/cmake-2.8/Modules/CMakeSystem.cmake.in" "/usr/share/cmake-2.8/Modules/CMakeSystemSpecificInformation.cmake" "/usr/share/cmake-2.8/Modules/CMakeTestCCompiler.cmake" "/usr/share/cmake-2.8/Modules/CMakeTestCXXCompiler.cmake" "/usr/share/cmake-2.8/Modules/CMakeTestCompilerCommon.cmake" "/usr/share/cmake-2.8/Modules/CMakeUnixFindMake.cmake" "/usr/share/cmake-2.8/Modules/Compiler/GNU-C.cmake" "/usr/share/cmake-2.8/Modules/Compiler/GNU-CXX.cmake" "/usr/share/cmake-2.8/Modules/Compiler/GNU.cmake" "/usr/share/cmake-2.8/Modules/FindBoost.cmake" "/usr/share/cmake-2.8/Modules/FindDoxygen.cmake" "/usr/share/cmake-2.8/Modules/FindPackageHandleStandardArgs.cmake" "/usr/share/cmake-2.8/Modules/FindPackageMessage.cmake" "/usr/share/cmake-2.8/Modules/FindPkgConfig.cmake" "/usr/share/cmake-2.8/Modules/FindPythonInterp.cmake" "/usr/share/cmake-2.8/Modules/FindPythonLibs.cmake" "/usr/share/cmake-2.8/Modules/FindSWIG.cmake" "/usr/share/cmake-2.8/Modules/Platform/Linux-GNU-C.cmake" "/usr/share/cmake-2.8/Modules/Platform/Linux-GNU-CXX.cmake" "/usr/share/cmake-2.8/Modules/Platform/Linux-GNU.cmake" "/usr/share/cmake-2.8/Modules/Platform/Linux.cmake" "/usr/share/cmake-2.8/Modules/Platform/UnixPaths.cmake" "/usr/share/cmake-2.8/Modules/SelectLibraryConfigurations.cmake" "/usr/share/cmake-2.8/Modules/UseSWIG.cmake" ) # The corresponding makefile is: SET(CMAKE_MAKEFILE_OUTPUTS "Makefile" "CMakeFiles/cmake.check_cache" ) # Byproducts of CMake generate step: SET(CMAKE_MAKEFILE_PRODUCTS "CMakeFiles/CMakeDirectoryInformation.cmake" "include/test1/CMakeFiles/CMakeDirectoryInformation.cmake" "lib/CMakeFiles/CMakeDirectoryInformation.cmake" "swig/CMakeFiles/CMakeDirectoryInformation.cmake" "python/CMakeFiles/CMakeDirectoryInformation.cmake" "grc/CMakeFiles/CMakeDirectoryInformation.cmake" "apps/CMakeFiles/CMakeDirectoryInformation.cmake" "docs/CMakeFiles/CMakeDirectoryInformation.cmake" ) # Dependency information for all targets: SET(CMAKE_DEPEND_INFO_FILES "CMakeFiles/uninstall.dir/DependInfo.cmake" "lib/CMakeFiles/gnuradio-test1.dir/DependInfo.cmake" "lib/CMakeFiles/test-test1.dir/DependInfo.cmake" "swig/CMakeFiles/_test1_swig.dir/DependInfo.cmake" "swig/CMakeFiles/_test1_swig_swig_tag.dir/DependInfo.cmake" "swig/CMakeFiles/pygen_swig_104a7.dir/DependInfo.cmake" "python/CMakeFiles/pygen_python_30562.dir/DependInfo.cmake" "apps/CMakeFiles/pygen_apps_9a6dd.dir/DependInfo.cmake" )
guillaumeWBres/zynq7-sdr
gnuradio/fpga-src/build_cross/CMakeFiles/Makefile.cmake
CMake
gpl-2.0
4,368
[ 30522, 1001, 4642, 13808, 7013, 5371, 1024, 2079, 2025, 10086, 999, 1001, 7013, 2011, 1000, 19998, 2191, 8873, 4244, 1000, 13103, 1010, 4642, 13808, 2544, 1016, 1012, 1022, 1001, 1996, 13103, 2109, 2003, 1024, 2275, 1006, 4642, 13808, 1035,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# -*- coding: utf-8 -*- from odoo.tests.common import HttpCase from odoo.exceptions import ValidationError class AccountingTestCase(HttpCase): """ This class extends the base TransactionCase, in order to test the accounting with localization setups. It is configured to run the tests after the installation of all modules, and will SKIP TESTS ifit cannot find an already configured accounting (which means no localization module has been installed). """ post_install = True at_install = False def setUp(self): super(AccountingTestCase, self).setUp() domain = [('company_id', '=', self.env.ref('base.main_company').id)] if not self.env['account.account'].search_count(domain): self.skipTest("No Chart of account found") def check_complete_move(self, move, theorical_lines): for aml in move.line_ids: line = (aml.name, round(aml.debit, 2), round(aml.credit, 2)) if line in theorical_lines: theorical_lines.remove(line) else: raise ValidationError('Unexpected journal item. (label: %s, debit: %s, credit: %s)' % (aml.name, round(aml.debit, 2), round(aml.credit, 2))) if theorical_lines: raise ValidationError('Remaining theorical line (not found). %s)' % ([(aml[0], aml[1], aml[2]) for aml in theorical_lines])) return True def ensure_account_property(self, property_name): '''Ensure the ir.property targetting an account.account passed as parameter exists. In case it's not: create it with a random account. This is useful when testing with partially defined localization (missing stock properties for example) :param property_name: The name of the property. ''' company_id = self.env.user.company_id field_id = self.env['ir.model.fields'].search( [('model', '=', 'product.template'), ('name', '=', property_name)], limit=1) property_id = self.env['ir.property'].search([ ('company_id', '=', company_id.id), ('name', '=', property_name), ('res_id', '=', None), ('fields_id', '=', field_id.id)], limit=1) account_id = self.env['account.account'].search([('company_id', '=', company_id.id)], limit=1) value_reference = 'account.account,%d' % account_id.id if property_id and not property_id.value_reference: property_id.value_reference = value_reference else: self.env['ir.property'].create({ 'name': property_name, 'company_id': company_id.id, 'fields_id': field_id.id, 'value_reference': value_reference, })
Aravinthu/odoo
addons/account/tests/account_test_classes.py
Python
agpl-3.0
2,749
[ 30522, 1001, 1011, 1008, 1011, 16861, 1024, 21183, 2546, 1011, 1022, 1011, 1008, 1011, 2013, 1051, 3527, 2080, 1012, 5852, 1012, 2691, 12324, 8299, 18382, 2013, 1051, 3527, 2080, 1012, 11790, 12324, 27354, 2121, 29165, 2465, 9529, 22199, 18...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
module.exports = { // Load Mock Product Data Into localStorage init: function() { // localStorage.clear(); localStorage.setItem('thing', JSON.stringify([{ _id:'cbus-254-56-61', parent:null, label:'much test' }, { _id:'mesh-099', parent:'voltage', label:'wow' }])); localStorage.setItem('items',JSON.stringify([ { _id:'cbus-254-56-61.level', thing:'cbus-254-56-61', item:'level', label:'much test', value:1, type:'number', icon: 'scotch-beer.png', widget:'Slider' }, { _id:'mesh-099.voltage', thing:'mesh-099', item:'voltage', label:'wow', value:2, type:'number', icon: 'scotch-beer.png', widget:'Slider' } ])); } };
the1laz/quicksilver-ui
js/ThingData.js
JavaScript
mit
845
[ 30522, 11336, 1012, 14338, 1027, 1063, 1013, 1013, 7170, 12934, 4031, 2951, 2046, 10575, 4263, 4270, 1999, 4183, 1024, 3853, 1006, 1007, 1063, 1013, 1013, 10575, 4263, 4270, 1012, 3154, 1006, 1007, 1025, 10575, 4263, 4270, 1012, 2275, 4221,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Threading; using System.Threading.Tasks; using Platibus.Diagnostics; using Xunit; namespace Platibus.UnitTests.Diagnostics { [Trait("Category", "UnitTests")] [Trait("Dependency", "InfluxDB")] public class InfluxDBSinkTests { protected TimeSpan SampleRate = TimeSpan.FromSeconds(5); // docker run -it --rm --name influxdb -e INFLUXDB_ADMIN_ENABLED=true -p 8096:8086 -p 8093:8083 influxdb protected InfluxDBOptions Options = new InfluxDBOptions(new Uri("http://localhost:8096"), "platibus"); protected List<DiagnosticEvent> DiagnosticEvents = new List<DiagnosticEvent>(); public InfluxDBSinkTests() { CreateDatabase(); WaitForInfluxDB(Options.Uri); } private static void WaitForInfluxDB(Uri uri) { var pingUri = new UriBuilder(uri) { Path = "ping" }.Uri; using (var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) using (var client = new HttpClient()) { while (!cts.IsCancellationRequested) { try { var response = client.GetAsync(pingUri).Result; if (response.IsSuccessStatusCode) return; Task.Delay(TimeSpan.FromSeconds(1)).Wait(cts.Token); } catch (Exception) { } } } throw new TimeoutException("InfluxDB not available"); } private void CreateDatabase() { var uri = new UriBuilder(Options.Uri) { Path = "query", Query = "q=CREATE DATABASE " + Options.Database }.Uri; using (var client = new HttpClient()) { var response = client.PostAsync(uri, new StringContent("")).Result; Assert.True(response.IsSuccessStatusCode, $"Error creating InfluxDB database '{Options.Database}': {response}"); } } [Fact] public async Task AcknowledgementFailuresAreRecorded() { GivenQueuedMessageFlowWithAcknowledgementFailure(); await WhenConsumingEvents(); } protected void GivenQueuedMessageFlowWithAcknowledgementFailure() { var fakes = new DiagnosticFakes(this); DiagnosticEvents.AddRange(fakes.QueuedMessageFlowWithAcknowledgementFailure()); } protected async Task WhenConsumingEvents() { var sink = new InfluxDBSink(Options, SampleRate); var consumeTasks = DiagnosticEvents.Select(async e => await sink.ConsumeAsync(e)).ToList(); await Task.WhenAll(consumeTasks); sink.RecordMeasurements(); } } }
sweetlandj/Platibus
Source/Platibus.UnitTests/Diagnostics/InfluxDBSinkTests.cs
C#
mit
3,062
[ 30522, 2478, 2291, 1025, 2478, 2291, 1012, 6407, 1012, 12391, 1025, 2478, 2291, 1012, 11409, 4160, 1025, 2478, 2291, 1012, 5658, 1012, 8299, 1025, 2478, 2291, 1012, 11689, 2075, 1025, 2478, 2291, 1012, 11689, 2075, 1012, 8518, 1025, 2478, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
{% extends widget.get_base_template %} {% load i18n %} {% load leonardo_tags %} {% block content %} <form action="{% url 'haystack_search' 'leonardo.module.search.apps.search' %}" method="get" class="navbar-collapse collapse navbar-form navbar-left" role="search"> <div class="input-group"> <input value="{{ query }}" type="text" class="form-control" name="q" id="search_keywords" /> <span class="input-group-btn"> <button id='search-button' class="btn btn-default" type="submit">{% trans "Search" %}&nbsp;<i class=" glyphicon glyphicon-search" aria-hidden="true"></i></button> </span> </div> </form> {% endblock %}
amboycharlie/Child-Friendly-LCMS
leonardo/module/nav/templates/widget/sitesearch/default.html
HTML
apache-2.0
635
[ 30522, 1063, 1003, 8908, 15536, 24291, 1012, 2131, 1035, 2918, 1035, 23561, 1003, 1065, 1063, 1003, 7170, 1045, 15136, 2078, 1003, 1065, 1063, 1003, 7170, 14720, 1035, 22073, 1003, 1065, 1063, 1003, 3796, 4180, 1003, 1065, 1026, 2433, 2895,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/** AnalyzeController.js */ application.controller('AnalyzeController', ['$scope', '$stateParams', 'PathService', function ($scope, $stateParams, PathService) { $scope.path = {}; PathService.path({ Type: "SUBMISSION", EntityId: $stateParams.submissionId }) .success(function (data, status, headers, config) { $scope.path = data; }); }]);
bond95/IBP2013
js/controllers/AnalyzeController.js
JavaScript
mit
387
[ 30522, 1013, 1008, 1008, 17908, 8663, 13181, 10820, 1012, 1046, 2015, 1008, 1013, 4646, 1012, 11486, 1006, 1005, 17908, 8663, 13181, 10820, 1005, 1010, 1031, 1005, 1002, 9531, 1005, 1010, 1005, 1002, 2110, 28689, 5244, 1005, 1010, 1005, 104...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
\section{Interpolators} \label{sec:interpolate} VIPS has a general system for representing pixel interpolators. You can select an interpolator to pass to other VIPS operations, such as \verb+im_affinei()+, you can add new interpolators, and you can write operations which take a general interpolator as a parameter. An interpolator is a function of the form: \begin{verbatim} typedef void (*VipsInterpolateMethod)( VipsInterpolate *, PEL *out, REGION *in, double x, double y ); \end{verbatim} \noindent given the set of input pixels \verb+in+, it has to calculate a value for the fractional position $(x, y)$ and write this value to the memory pointed to by \verb+out+. VIPS uses corner convention, so the value of pixel $(0, 0)$ is the value of the surface the interpolator fits at the fractional position $(0.0, 0.0)$. \subsection{How an interpolator is represented} See the man page for \verb+VipsInterpolate+ for full details, but briefly, an interpolator is a subclass of \verb+VipsInterpolate+ implementing the following items: \begin{itemize} \item An interpolation method, with the type signature above. \item A function \verb+get_window_size()+ which returns the size of the area of pixels that the interpolator needs in order to calculate a value. For example, a bilinear interpolator needs the four pixels surrounding the point to be calculated, or a 2 by 2 window, so window size should be 2. \item Or if the window size is constant, you can leave \verb+get_window_size()+ NULL and just set the int value \verb+window_size+. \end{itemize} \subsection{A sample interpolator} As an example, \fref{fg:newinterpolator} shows how to register a new interpolator in a plugin. \begin{fig2} \begin{verbatim} // This interpolator adds no new members. typedef VipsInterpolate Myinterpolator; typedef VipsInterpolateClass MyinterpolatorClass; G_DEFINE_TYPE( Myinterpolator, myinterpolator, VIPS_TYPE_INTERPOLATE ); static void myinterpolator_interpolate( VipsInterpolate *interpolate, PEL *out, REGION *in, double x, double y ) { MyinterpolatorClass *class = MYINTERPOLATOR_GET_CLASS( interpolate ); /* Nearest-neighbor. */ memcpy( out, IM_REGION_ADDR( in, floor( x ), floor( y ) ), IM_IMAGE_SIZEOF_PEL( in->im ) ); } static void myinterpolator_class_init( MyinterpolatorClass *class ) { VipsObjectClass *object_class = (VipsObjectClass *) class; VipsInterpolateClass *interpolate_class = (VipsInterpolateClass *) class; object_class->nickname = "myinterpolator"; object_class->description = _( "My interpolator" ); interpolate_class->interpolate = myinterpolator_interpolate; } static void myinterpolate_init( Myinterpolate *object ) { } char * g_module_check_init( GModule *self ) { // register the type myinterpolator_get_type(); } \end{verbatim} \caption{Registering an interpolator in a plugin} \label{fg:newinterpolator} \end{fig2} \subsection{Writing a VIPS operation that takes an interpolator as an argument} Operations just take a \verb+VipsInterpolate+ as an argument, for example: \begin{verbatim} int im_affinei_all( IMAGE *in, IMAGE *out, VipsInterpolate *interpolate, double a, double b, double c, double d, double dx, double dy ); \end{verbatim} To use the interpolator, use \verb+vips_interpolate()+: \begin{verbatim} void vips_interpolate( VipsInterpolate *interpolate, PEL *out, REGION *in, double x, double y ); \end{verbatim} \noindent This looks up the interpolate method for the object and calls it for you. You can save the cost of the lookup in an inner loop with \verb+vips_interpolate_get_method()+: \begin{verbatim} VipsInterpolateMethod vips_interpolate_get_method( VipsInterpolate *interpolate ); \end{verbatim} \subsection{Passing an interpolator to a VIPS operation} You can build an instance of a \verb+VipsInterpolator+ with the \verb+vips_object_*()+ family of functions, see \pref{sec:object}. Convenience functions return a static instance of one of the standard interpolators: \begin{verbatim} VipsInterpolate *vips_interpolate_nearest_static( void ); VipsInterpolate *vips_interpolate_bilinear_static( void ); VipsInterpolate *vips_interpolate_bicubic_static( void ); \end{verbatim} \noindent Don't free the result. Finally, \verb+vips_interpolate_new()+ makes a \verb+VipsInterpolate+ from a nickname: \begin{verbatim} VipsInterpolate *vips_interpolate_new( const char *nickname ); \end{verbatim} For example: \begin{verbatim} VipsInterpolate *interpolate = vips_interpolate_new( "nohalo" ); \end{verbatim} \noindent You must drop your ref after you're done with the object with \verb+g_object_unref()+.
bamos/parsec-benchmark
pkgs/apps/vips/src/doc/src/interpolate.tex
TeX
bsd-3-clause
4,646
[ 30522, 1032, 2930, 1063, 6970, 18155, 18926, 1065, 1032, 3830, 1063, 10819, 1024, 6970, 18155, 3686, 1065, 21722, 2015, 2038, 1037, 2236, 2291, 2005, 5052, 22138, 6970, 18155, 18926, 1012, 2017, 2064, 7276, 2019, 6970, 18155, 8844, 2000, 34...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* Copyright 2016 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ // Tagger transition system. // // This transition system has one type of actions: // - The SHIFT action pushes the next input token to the stack and // advances to the next input token, assigning a part-of-speech tag to the // token that was shifted. // // The transition system operates with parser actions encoded as integers: // - A SHIFT action is encoded as number starting from 0. #include <string> #include "parser_features.h" #include "parser_state.h" #include "parser_transitions.h" #include "sentence_features.h" #include "shared_store.h" #include "task_context.h" #include "term_frequency_map.h" #include "utils.h" #include "tensorflow/core/lib/strings/strcat.h" namespace syntaxnet { class TaggerTransitionState : public ParserTransitionState { public: explicit TaggerTransitionState(const TermFrequencyMap *tag_map, const TagToCategoryMap *tag_to_category) : tag_map_(tag_map), tag_to_category_(tag_to_category) {} explicit TaggerTransitionState(const TaggerTransitionState *state) : TaggerTransitionState(state->tag_map_, state->tag_to_category_) { tag_ = state->tag_; gold_tag_ = state->gold_tag_; } // Clones the transition state by returning a new object. ParserTransitionState *Clone() const override { return new TaggerTransitionState(this); } // Reads gold tags for each token. void Init(ParserState *state) override { tag_.resize(state->sentence().token_size(), -1); gold_tag_.resize(state->sentence().token_size(), -1); for (int pos = 0; pos < state->sentence().token_size(); ++pos) { int tag = tag_map_->LookupIndex(state->GetToken(pos).tag(), -1); gold_tag_[pos] = tag; } } // Returns the tag assigned to a given token. int Tag(int index) const { DCHECK_GE(index, 0); DCHECK_LT(index, tag_.size()); return index == -1 ? -1 : tag_[index]; } // Sets this tag on the token at index. void SetTag(int index, int tag) { DCHECK_GE(index, 0); DCHECK_LT(index, tag_.size()); tag_[index] = tag; } // Returns the gold tag for a given token. int GoldTag(int index) const { DCHECK_GE(index, -1); DCHECK_LT(index, gold_tag_.size()); return index == -1 ? -1 : gold_tag_[index]; } // Returns the string representation of a POS tag, or an empty string // if the tag is invalid. string TagAsString(int tag) const { if (tag >= 0 && tag < tag_map_->Size()) { return tag_map_->GetTerm(tag); } return ""; } // Adds transition state specific annotations to the document. void AddParseToDocument(const ParserState &state, bool rewrite_root_labels, Sentence *sentence) const override { for (size_t i = 0; i < tag_.size(); ++i) { Token *token = sentence->mutable_token(i); token->set_tag(TagAsString(Tag(i))); if (tag_to_category_) { token->set_category(tag_to_category_->GetCategory(token->tag())); } } } // Whether a parsed token should be considered correct for evaluation. bool IsTokenCorrect(const ParserState &state, int index) const override { return GoldTag(index) == Tag(index); } // Returns a human readable string representation of this state. string ToString(const ParserState &state) const override { string str; for (int i = state.StackSize(); i > 0; --i) { const string &word = state.GetToken(state.Stack(i - 1)).word(); if (i != state.StackSize() - 1) str.append(" "); tensorflow::strings::StrAppend( &str, word, "[", TagAsString(Tag(state.StackSize() - i)), "]"); } for (int i = state.Next(); i < state.NumTokens(); ++i) { tensorflow::strings::StrAppend(&str, " ", state.GetToken(i).word()); } return str; } private: // Currently assigned POS tags for each token in this sentence. std::vector<int> tag_; // Gold POS tags from the input document. std::vector<int> gold_tag_; // Tag map used for conversions between integer and string representations // part of speech tags. Not owned. const TermFrequencyMap *tag_map_ = nullptr; // Tag to category map. Not owned. const TagToCategoryMap *tag_to_category_ = nullptr; TF_DISALLOW_COPY_AND_ASSIGN(TaggerTransitionState); }; class TaggerTransitionSystem : public ParserTransitionSystem { public: ~TaggerTransitionSystem() override { SharedStore::Release(tag_map_); } // Determines tag map location. void Setup(TaskContext *context) override { input_tag_map_ = context->GetInput("tag-map", "text", ""); join_category_to_pos_ = context->GetBoolParameter("join_category_to_pos"); if (!join_category_to_pos_) { input_tag_to_category_ = context->GetInput("tag-to-category", "text", ""); } } // Reads tag map and tag to category map. void Init(TaskContext *context) override { const string tag_map_path = TaskContext::InputFile(*input_tag_map_); tag_map_ = SharedStoreUtils::GetWithDefaultName<TermFrequencyMap>( tag_map_path, 0, 0); if (!join_category_to_pos_) { const string tag_to_category_path = TaskContext::InputFile(*input_tag_to_category_); tag_to_category_ = SharedStoreUtils::GetWithDefaultName<TagToCategoryMap>( tag_to_category_path); } } // The SHIFT action uses the same value as the corresponding action type. static ParserAction ShiftAction(int tag) { return tag; } // The tagger transition system doesn't look at the dependency tree, so it // allows non-projective trees. bool AllowsNonProjective() const override { return true; } // Returns the number of action types. int NumActionTypes() const override { return 1; } // Returns the number of possible actions. int NumActions(int num_labels) const override { return tag_map_->Size(); } // The default action for a given state is assigning the most frequent tag. ParserAction GetDefaultAction(const ParserState &state) const override { return ShiftAction(0); } // Returns the next gold action for a given state according to the // underlying annotated sentence. ParserAction GetNextGoldAction(const ParserState &state) const override { if (!state.EndOfInput()) { return ShiftAction(TransitionState(state).GoldTag(state.Next())); } return ShiftAction(0); } // Checks if the action is allowed in a given parser state. bool IsAllowedAction(ParserAction action, const ParserState &state) const override { return !state.EndOfInput(); } // Makes a shift by pushing the next input token on the stack and moving to // the next position. void PerformActionWithoutHistory(ParserAction action, ParserState *state) const override { DCHECK(!state->EndOfInput()); if (!state->EndOfInput()) { MutableTransitionState(state)->SetTag(state->Next(), action); state->Push(state->Next()); state->Advance(); } } // We are in a final state when we reached the end of the input and the stack // is empty. bool IsFinalState(const ParserState &state) const override { return state.EndOfInput(); } // Returns a string representation of a parser action. string ActionAsString(ParserAction action, const ParserState &state) const override { return tensorflow::strings::StrCat("SHIFT(", tag_map_->GetTerm(action), ")"); } // No state is deterministic in this transition system. bool IsDeterministicState(const ParserState &state) const override { return false; } // Returns a new transition state to be used to enhance the parser state. ParserTransitionState *NewTransitionState(bool training_mode) const override { return new TaggerTransitionState(tag_map_, tag_to_category_); } // Downcasts the const ParserTransitionState in ParserState to a const // TaggerTransitionState. static const TaggerTransitionState &TransitionState( const ParserState &state) { return *static_cast<const TaggerTransitionState *>( state.transition_state()); } // Downcasts the ParserTransitionState in ParserState to an // TaggerTransitionState. static TaggerTransitionState *MutableTransitionState(ParserState *state) { return static_cast<TaggerTransitionState *>( state->mutable_transition_state()); } // Input for the tag map. Not owned. TaskInput *input_tag_map_ = nullptr; // Tag map used for conversions between integer and string representations // part of speech tags. Owned through SharedStore. const TermFrequencyMap *tag_map_ = nullptr; // Input for the tag to category map. Not owned. TaskInput *input_tag_to_category_ = nullptr; // Tag to category map. Owned through SharedStore. const TagToCategoryMap *tag_to_category_ = nullptr; bool join_category_to_pos_ = false; }; REGISTER_TRANSITION_SYSTEM("tagger", TaggerTransitionSystem); // Feature function for retrieving the tag assigned to a token by the tagger // transition system. class PredictedTagFeatureFunction : public BasicParserSentenceFeatureFunction<Tag> { public: PredictedTagFeatureFunction() {} // Gets the TaggerTransitionState from the parser state and reads the assigned // tag at the focus index. Returns -1 if the focus is not within the sentence. FeatureValue Compute(const WorkspaceSet &workspaces, const ParserState &state, int focus, const FeatureVector *result) const override { if (focus < 0 || focus >= state.sentence().token_size()) return -1; return static_cast<const TaggerTransitionState *>(state.transition_state()) ->Tag(focus); } private: TF_DISALLOW_COPY_AND_ASSIGN(PredictedTagFeatureFunction); }; REGISTER_PARSER_IDX_FEATURE_FUNCTION("pred-tag", PredictedTagFeatureFunction); } // namespace syntaxnet
unsiloai/syntaxnet-ops-hack
tensorflow/core/syntaxnet/tagger_transitions.cc
C++
apache-2.0
10,523
[ 30522, 1013, 1008, 9385, 2355, 8224, 4297, 1012, 2035, 2916, 9235, 1012, 7000, 2104, 1996, 15895, 6105, 1010, 2544, 1016, 1012, 1014, 1006, 1996, 1000, 6105, 1000, 1007, 1025, 2017, 2089, 2025, 2224, 2023, 5371, 3272, 1999, 12646, 2007, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/googleads/v10/services/ad_group_ad_label_service.proto namespace Google\Ads\GoogleAds\V10\Services; use Google\Protobuf\Internal\GPBType; use Google\Protobuf\Internal\RepeatedField; use Google\Protobuf\Internal\GPBUtil; /** * Request message for [AdGroupAdLabelService.MutateAdGroupAdLabels][google.ads.googleads.v10.services.AdGroupAdLabelService.MutateAdGroupAdLabels]. * * Generated from protobuf message <code>google.ads.googleads.v10.services.MutateAdGroupAdLabelsRequest</code> */ class MutateAdGroupAdLabelsRequest extends \Google\Protobuf\Internal\Message { /** * Required. ID of the customer whose ad group ad labels are being modified. * * Generated from protobuf field <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> */ protected $customer_id = ''; /** * Required. The list of operations to perform on ad group ad labels. * * Generated from protobuf field <code>repeated .google.ads.googleads.v10.services.AdGroupAdLabelOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private $operations; /** * If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * Default is false. * * Generated from protobuf field <code>bool partial_failure = 3;</code> */ protected $partial_failure = false; /** * If true, the request is validated but not executed. Only errors are * returned, not results. * * Generated from protobuf field <code>bool validate_only = 4;</code> */ protected $validate_only = false; /** * Constructor. * * @param array $data { * Optional. Data for populating the Message object. * * @type string $customer_id * Required. ID of the customer whose ad group ad labels are being modified. * @type \Google\Ads\GoogleAds\V10\Services\AdGroupAdLabelOperation[]|\Google\Protobuf\Internal\RepeatedField $operations * Required. The list of operations to perform on ad group ad labels. * @type bool $partial_failure * If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * Default is false. * @type bool $validate_only * If true, the request is validated but not executed. Only errors are * returned, not results. * } */ public function __construct($data = NULL) { \GPBMetadata\Google\Ads\GoogleAds\V10\Services\AdGroupAdLabelService::initOnce(); parent::__construct($data); } /** * Required. ID of the customer whose ad group ad labels are being modified. * * Generated from protobuf field <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return string */ public function getCustomerId() { return $this->customer_id; } /** * Required. ID of the customer whose ad group ad labels are being modified. * * Generated from protobuf field <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param string $var * @return $this */ public function setCustomerId($var) { GPBUtil::checkString($var, True); $this->customer_id = $var; return $this; } /** * Required. The list of operations to perform on ad group ad labels. * * Generated from protobuf field <code>repeated .google.ads.googleads.v10.services.AdGroupAdLabelOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return \Google\Protobuf\Internal\RepeatedField */ public function getOperations() { return $this->operations; } /** * Required. The list of operations to perform on ad group ad labels. * * Generated from protobuf field <code>repeated .google.ads.googleads.v10.services.AdGroupAdLabelOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @param \Google\Ads\GoogleAds\V10\Services\AdGroupAdLabelOperation[]|\Google\Protobuf\Internal\RepeatedField $var * @return $this */ public function setOperations($var) { $arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::MESSAGE, \Google\Ads\GoogleAds\V10\Services\AdGroupAdLabelOperation::class); $this->operations = $arr; return $this; } /** * If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * Default is false. * * Generated from protobuf field <code>bool partial_failure = 3;</code> * @return bool */ public function getPartialFailure() { return $this->partial_failure; } /** * If true, successful operations will be carried out and invalid * operations will return errors. If false, all operations will be carried * out in one transaction if and only if they are all valid. * Default is false. * * Generated from protobuf field <code>bool partial_failure = 3;</code> * @param bool $var * @return $this */ public function setPartialFailure($var) { GPBUtil::checkBool($var); $this->partial_failure = $var; return $this; } /** * If true, the request is validated but not executed. Only errors are * returned, not results. * * Generated from protobuf field <code>bool validate_only = 4;</code> * @return bool */ public function getValidateOnly() { return $this->validate_only; } /** * If true, the request is validated but not executed. Only errors are * returned, not results. * * Generated from protobuf field <code>bool validate_only = 4;</code> * @param bool $var * @return $this */ public function setValidateOnly($var) { GPBUtil::checkBool($var); $this->validate_only = $var; return $this; } }
googleads/google-ads-php
src/Google/Ads/GoogleAds/V10/Services/MutateAdGroupAdLabelsRequest.php
PHP
apache-2.0
6,517
[ 30522, 1026, 30524, 1024, 8224, 1013, 14997, 1013, 8224, 19303, 1013, 1058, 10790, 1013, 2578, 1013, 4748, 1035, 2177, 1035, 4748, 1035, 3830, 1035, 2326, 1012, 15053, 3415, 15327, 8224, 1032, 14997, 1032, 8224, 19303, 1032, 1058, 10790, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php namespace Models; class Address { public $streetName; public $city; public $country; }
fendy3002/QzPhp
models/Address.php
PHP
mit
106
[ 30522, 1026, 1029, 25718, 3415, 15327, 4275, 1025, 2465, 4769, 1063, 2270, 1002, 2395, 18442, 1025, 2270, 1002, 2103, 1025, 2270, 1002, 2406, 1025, 1065, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package org.intermine.web.tags.disclosure; /* * Copyright (C) 2002-2016 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.IOException; import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspWriter; import javax.servlet.jsp.PageContext; import org.intermine.web.logic.Constants; import org.intermine.web.logic.results.WebState; /** * This class renders disclosure tag. See tag library descriptor for tag description. * @author Jakub Kulaviak */ public class DisclosureTag extends BaseDisclosureTag { private static final String DEFAULT_STYLE_CLASS = "disclosure"; private static final String CONSISTENT = "consistent"; private String id; private boolean opened = true; private String onClick; private String type = "simple"; /** * Returns type of tag. At this moment is relevant only 'consistent' type. * @return type of tag */ public String getType() { return type.toLowerCase(); } /** * Sets type of tag. * @param type type */ public void setType(String type) { this.type = type; } /** * {@inheritDoc} */ @Override protected String getDefaultStyleClass() { return DEFAULT_STYLE_CLASS; } /** * @return additional javascript code, that should be executed on element change. */ public String getOnClick() { return onClick; } /** * @param onChange additional javascript code, that should be executed on element change. */ public void setOnClick(String onChange) { this.onClick = onChange; } /** * Sets element id. Disclosure tag is implemented with div, it sets div id. * @param id element id */ public void setId(String id) { this.id = id; } /** * Gets element id. * @return element id */ public String getId() { return id; } /** * Returns true if disclosure is opened else false. * @return disclosure state */ public boolean getOpened() { if (isConsistentType()) { // Alas, we do not have access to the session, so we can't use SessionMethods WebState webState = (WebState) getJspContext().getAttribute(Constants.WEB_STATE, PageContext.SESSION_SCOPE); if (webState != null) { Boolean ret = webState.getToggledElements().get(getId()); if (ret != null) { return ret; } } } return opened; } /** * @return true if disclosure is consistent type, i.e. saves its state (opened or closed) * during user session */ public boolean isConsistentType() { return getType().equals(CONSISTENT); } /** * Sets new state of disclosure. * @param opened true if should be opened */ public void setOpened(boolean opened) { this.opened = opened; } /** * Renders tag. * @throws IOException if error occurs during writing to stream output * @throws JspException if JspException error occurs during rendering nested tags */ @Override public void doTag() throws JspException, IOException { JspWriter out = getJspContext().getOut(); out.write("<div"); printStyleAndClass(out); out.write(">"); getJspBody().invoke(null); // It is displayed opened and hidden (if specified) with javascript -> // Client browser without javascript doesn't hide the content and user can see it // Else he wouldn't have possibility to see the content if (!getOpened()) { printJavascriptHides(out); } out.write("</div>"); } private void printJavascriptHides(JspWriter out) throws IOException { out.write("<script type=\"text/javascript\">toggleHidden(\'"); out.write(getId()); out.write("\')</script>"); } /** * Returns link switching between displayed and hidden state. * @return link */ public String getLink() { StringBuilder sb = new StringBuilder(); sb.append("javascript:toggleHidden(\'"); sb.append(getId()); sb.append("\');"); if (isConsistentType()) { sb.append("saveToggleState(\'"); sb.append(getId()); sb.append("\');"); } if (getOnClick() != null) { sb.append(getOnClick()); sb.append(";"); } return sb.toString(); } }
JoeCarlson/intermine
intermine/web/main/src/org/intermine/web/tags/disclosure/DisclosureTag.java
Java
lgpl-2.1
4,788
[ 30522, 7427, 8917, 1012, 6970, 11233, 1012, 4773, 1012, 22073, 1012, 19380, 1025, 1013, 1008, 1008, 9385, 1006, 1039, 1007, 2526, 1011, 2355, 4875, 11233, 1008, 1008, 2023, 3642, 2089, 2022, 10350, 5500, 1998, 6310, 2104, 1996, 1008, 3408, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package jetbrains.mps.samples.VoiceMenu.editor; /*Generated by MPS */ import jetbrains.mps.nodeEditor.DefaultNodeEditor; import jetbrains.mps.openapi.editor.cells.EditorCell; import jetbrains.mps.openapi.editor.EditorContext; import org.jetbrains.mps.openapi.model.SNode; public class HangUp_Editor extends DefaultNodeEditor { public EditorCell createEditorCell(EditorContext editorContext, SNode node) { return new HangUp_EditorBuilder_a(editorContext, node).createCell(); } }
vaclav/voicemenu
languages/jetbrains.mps.samples.VoiceMenu/source_gen/jetbrains/mps/samples/VoiceMenu/editor/HangUp_Editor.java
Java
apache-2.0
489
[ 30522, 7427, 6892, 10024, 7076, 1012, 12616, 1012, 8168, 1012, 2376, 3549, 2226, 1012, 3559, 1025, 1013, 1008, 7013, 2011, 12616, 1008, 1013, 12324, 6892, 10024, 7076, 1012, 12616, 1012, 13045, 2098, 15660, 1012, 12398, 3630, 26095, 23194, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<div> <h1>{ title }</h1> <ul> <li rv-each-item="data"> <span class="label">{ item.label }</span> <span class="value">{ item.value }</span> </li> </ul> <p class="more-info" rv-show="moreInfo">{ moreInfo }</p> <p class="updated-at" rv-show="updatedAt">{ updatedAt }</p> </div>
MuckRock/muckrock
muckrock/assets/dashing/widgets/requestlist/requestlist.html
HTML
agpl-3.0
323
[ 30522, 1026, 4487, 2615, 1028, 1026, 1044, 2487, 1028, 1063, 2516, 1065, 1026, 1013, 1044, 2487, 1028, 1026, 17359, 1028, 1026, 5622, 27634, 1011, 2169, 1011, 8875, 1027, 1000, 2951, 1000, 1028, 1026, 8487, 2465, 1027, 1000, 3830, 1000, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package com.baeldung.beanfactory; import org.junit.Test; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.xml.XmlBeanFactory; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class BeanFactoryWithClassPathResourceIntegrationTest { @Test public void createBeanFactoryAndCheckEmployeeBean() { Resource res = new ClassPathResource("beanfactory-example.xml"); BeanFactory factory = new XmlBeanFactory(res); Employee emp = (Employee) factory.getBean("employee"); assertTrue(factory.isSingleton("employee")); assertTrue(factory.getBean("employee") instanceof Employee); assertTrue(factory.isTypeMatch("employee", Employee.class)); assertTrue(factory.getAliases("employee").length > 0); } }
Niky4000/UsefulUtils
projects/tutorials-master/tutorials-master/spring-core/src/test/java/com/baeldung/beanfactory/BeanFactoryWithClassPathResourceIntegrationTest.java
Java
gpl-3.0
947
[ 30522, 7427, 4012, 1012, 25818, 6392, 5575, 1012, 14068, 21450, 1025, 12324, 8917, 1012, 12022, 4183, 1012, 3231, 1025, 12324, 8917, 1012, 3500, 15643, 6198, 1012, 13435, 1012, 4713, 1012, 14068, 21450, 1025, 12324, 8917, 1012, 3500, 15643, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace DMT.Common.Rest { class UrlSanitizer { public string Sanitize(string url) { int index = url.IndexOf('?'); if (index >-1) { url = url.Substring(0, index); } return url.TrimEnd('/'); } } }
tmichel/thesis
Common/DMT.Common/Rest/UrlSanitizer.cs
C#
mit
430
[ 30522, 2478, 2291, 1025, 2478, 2291, 1012, 6407, 1012, 12391, 1025, 2478, 2291, 1012, 11409, 4160, 1025, 2478, 2291, 1012, 3793, 1025, 2478, 2291, 1012, 11689, 2075, 1012, 8518, 1025, 3415, 15327, 1040, 20492, 1012, 2691, 1012, 2717, 1063, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_08_01; import com.microsoft.azure.arm.model.HasInner; import com.microsoft.azure.management.network.v2019_08_01.implementation.ProbeInner; import com.microsoft.azure.arm.model.Indexable; import com.microsoft.azure.arm.model.Refreshable; import com.microsoft.azure.arm.resources.models.HasManager; import com.microsoft.azure.management.network.v2019_08_01.implementation.NetworkManager; import java.util.List; import com.microsoft.azure.SubResource; /** * Type representing Probe. */ public interface Probe extends HasInner<ProbeInner>, Indexable, Refreshable<Probe>, HasManager<NetworkManager> { /** * @return the etag value. */ String etag(); /** * @return the id value. */ String id(); /** * @return the intervalInSeconds value. */ Integer intervalInSeconds(); /** * @return the loadBalancingRules value. */ List<SubResource> loadBalancingRules(); /** * @return the name value. */ String name(); /** * @return the numberOfProbes value. */ Integer numberOfProbes(); /** * @return the port value. */ int port(); /** * @return the protocol value. */ ProbeProtocol protocol(); /** * @return the provisioningState value. */ ProvisioningState provisioningState(); /** * @return the requestPath value. */ String requestPath(); /** * @return the type value. */ String type(); }
navalev/azure-sdk-for-java
sdk/network/mgmt-v2019_08_01/src/main/java/com/microsoft/azure/management/network/v2019_08_01/Probe.java
Java
mit
1,770
[ 30522, 1013, 1008, 1008, 1008, 9385, 1006, 1039, 1007, 7513, 3840, 1012, 2035, 2916, 9235, 1012, 1008, 7000, 2104, 1996, 10210, 6105, 1012, 2156, 6105, 1012, 19067, 2102, 1999, 1996, 2622, 7117, 2005, 1008, 6105, 2592, 1012, 1008, 1008, 3...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/** * @file * * @brief IMFS Device Node Handlers * @ingroup IMFS */ /* * COPYRIGHT (c) 1989-2012. * On-Line Applications Research Corporation (OAR). * * The license and distribution terms for this file may be * found in the file LICENSE in this distribution or at * http://www.rtems.org/license/LICENSE. */ #if HAVE_CONFIG_H #include "config.h" #endif #include "imfs.h" #include <rtems/deviceio.h> int device_open( rtems_libio_t *iop, const char *pathname, int oflag, mode_t mode ) { const IMFS_device_t *device = IMFS_iop_to_device( iop ); return rtems_deviceio_open( iop, pathname, oflag, mode, device->major, device->minor ); } int device_close( rtems_libio_t *iop ) { const IMFS_device_t *device = IMFS_iop_to_device( iop ); return rtems_deviceio_close( iop, device->major, device->minor ); } ssize_t device_read( rtems_libio_t *iop, void *buffer, size_t count ) { const IMFS_device_t *device = IMFS_iop_to_device( iop ); return rtems_deviceio_read( iop, buffer, count, device->major, device->minor ); } ssize_t device_write( rtems_libio_t *iop, const void *buffer, size_t count ) { const IMFS_device_t *device = IMFS_iop_to_device( iop ); return rtems_deviceio_write( iop, buffer, count, device->major, device->minor ); } int device_ioctl( rtems_libio_t *iop, ioctl_command_t command, void *buffer ) { const IMFS_device_t *device = IMFS_iop_to_device( iop ); return rtems_deviceio_control( iop, command, buffer, device->major, device->minor ); } int device_ftruncate( rtems_libio_t *iop, off_t length ) { return 0; }
heshamelmatary/rtems-rumpkernel
cpukit/libfs/src/imfs/deviceio.c
C
gpl-2.0
1,789
[ 30522, 1013, 1008, 1008, 1008, 1030, 5371, 1008, 1008, 1030, 4766, 10047, 10343, 5080, 13045, 28213, 2015, 1008, 1030, 13749, 22107, 10047, 10343, 1008, 1013, 1013, 1008, 1008, 9385, 1006, 1039, 1007, 2960, 1011, 2262, 1012, 1008, 2006, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php namespace LeftRight\Center\Libraries; class Dates { //take a mysql date string and return a relative date from it public static function relative($string) { if (empty($string)) return ''; return \Carbon\Carbon::createFromFormat('Y-m-d H:i:s', $string)->diffForHumans(); } public static function absolute($string) { if (empty($string)) return ''; return date('M d, Y', strtotime($string)); } public static function time($string) { if (empty($string)) return ''; return date('g:i A', strtotime($string)); } }
left-right/center
src/libraries/Dates.php
PHP
lgpl-3.0
536
[ 30522, 1026, 1029, 25718, 3415, 15327, 2187, 15950, 1032, 2415, 1032, 8860, 1025, 2465, 5246, 1063, 1013, 1013, 2202, 1037, 2026, 2015, 4160, 2140, 3058, 5164, 1998, 2709, 1037, 5816, 3058, 2013, 2009, 2270, 10763, 3853, 5816, 1006, 1002, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
using NUnit.Framework; using RefactoringEssentials.CSharp.CodeRefactorings; namespace RefactoringEssentials.Tests.CSharp.CodeRefactorings { [TestFixture] public class InvertConditionalOperatorTests : CSharpCodeRefactoringTestBase { [Test] public void TestCase1() { Test<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine (i > j $? i : j); } } ", @" class Foo { void Bar (int i, int j) { Console.WriteLine (i <= j ? j : i); } } "); } [Test] public void TestConditionStart() { Test<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine ($i > j ? i : j); } } ", @" class Foo { void Bar (int i, int j) { Console.WriteLine (i <= j ? j : i); } } "); } [Test] public void TestTrueStart() { Test<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine (i > j ? $i : j); } } ", @" class Foo { void Bar (int i, int j) { Console.WriteLine (i <= j ? j : i); } } "); } [Test] public void TestFalseStart() { Test<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine (i > j ? i : $j); } } ", @" class Foo { void Bar (int i, int j) { Console.WriteLine (i <= j ? j : i); } } "); } [Test] public void TestPopupLocations() { TestWrongContext<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine (i > $j ? i : j); } } "); TestWrongContext<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine (i > j ? i$i : j); } } "); TestWrongContext<InvertConditionalOperatorCodeRefactoringProvider>(@" class Foo { void Bar (int i, int j) { Console.WriteLine (i > j ? ii : j$j); } } "); } } }
mrward/RefactoringEssentials
Tests/CSharp/CodeRefactorings/InvertConditionalOperatorTests.cs
C#
mit
2,115
[ 30522, 2478, 16634, 4183, 1012, 7705, 1025, 2478, 25416, 18908, 28741, 8449, 5054, 20925, 2015, 1012, 20116, 8167, 2361, 1012, 3642, 2890, 7011, 16761, 8613, 1025, 3415, 15327, 25416, 18908, 28741, 8449, 5054, 20925, 2015, 1012, 5852, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== // // <OWNER>LadiPro</OWNER> // <OWNER>RByers</OWNER> // <OWNER>ShawnFa</OWNER> using System; using System.Diagnostics.Contracts; namespace System.Runtime.InteropServices.WindowsRuntime { [ComImport] [Guid("4bd682dd-7554-40e9-9a9b-82654ede7e62")] [WindowsRuntimeImport] internal interface IPropertyValue { PropertyType Type { [Pure] get; } bool IsNumericScalar { [Pure] get; } [Pure] Byte GetUInt8(); [Pure] Int16 GetInt16(); [Pure] UInt16 GetUInt16(); [Pure] Int32 GetInt32(); [Pure] UInt32 GetUInt32(); [Pure] Int64 GetInt64(); [Pure] UInt64 GetUInt64(); [Pure] Single GetSingle(); [Pure] Double GetDouble(); [Pure] char GetChar16(); [Pure] Boolean GetBoolean(); [Pure] String GetString(); [Pure] Guid GetGuid(); [Pure] DateTimeOffset GetDateTime(); [Pure] TimeSpan GetTimeSpan(); [Pure] Point GetPoint(); [Pure] Size GetSize(); [Pure] Rect GetRect(); [Pure] Byte[] GetUInt8Array(); [Pure] Int16[] GetInt16Array(); [Pure] UInt16[] GetUInt16Array(); [Pure] Int32[] GetInt32Array(); [Pure] UInt32[] GetUInt32Array(); [Pure] Int64[] GetInt64Array(); [Pure] UInt64[] GetUInt64Array(); [Pure] Single[] GetSingleArray(); [Pure] Double[] GetDoubleArray(); [Pure] char[] GetChar16Array(); [Pure] Boolean[] GetBooleanArray(); [Pure] String[] GetStringArray(); [Pure] object[] GetInspectableArray(); [Pure] Guid[] GetGuidArray(); [Pure] DateTimeOffset[] GetDateTimeArray(); [Pure] TimeSpan[] GetTimeSpanArray(); [Pure] Point[] GetPointArray(); [Pure] Size[] GetSizeArray(); [Pure] Rect[] GetRectArray(); } // Specify size directly instead of fields to avoid warnings [StructLayoutAttribute(LayoutKind.Sequential, Size=8)] [WindowsRuntimeImport] internal struct Point { // float X; // float Y; } // Specify size directly instead of fields to avoid warnings [StructLayoutAttribute(LayoutKind.Sequential, Size=8)] [WindowsRuntimeImport] internal struct Size { // float Width; // float Height; } // Specify size directly instead of fields to avoid warnings [StructLayoutAttribute(LayoutKind.Sequential, Size=16)] [WindowsRuntimeImport] internal struct Rect { // float X; // float Y; // float Width; // float Height; } }
esdrubal/referencesource
mscorlib/system/runtime/interopservices/windowsruntime/ipropertyvalue.cs
C#
mit
3,119
[ 30522, 1013, 1013, 1027, 1027, 1009, 1009, 1027, 1027, 1013, 1013, 1013, 1013, 9385, 1006, 1039, 1007, 7513, 3840, 1012, 2035, 2916, 9235, 1012, 1013, 1013, 1013, 1013, 1027, 1027, 1011, 1011, 1027, 1027, 1013, 1013, 1013, 1013, 1026, 395...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * Copyright (c) 2012 Mellanox Technologies. All rights reserved * * This software is available to you under a choice of one of two * licenses. You may choose to be licensed under the terms of the GNU * General Public License (GPL) Version 2, available from the file * COPYING in the main directory of this source tree, or the * openfabric.org BSD license below: * * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. * * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #ifndef _LINUX_ETH_IB_IPOIB_H #define _LINUX_ETH_IB_IPOIB_H #include <net/sch_generic.h> struct eipoib_cb_data { /* * extra care taken not to collide with the usage done * by the qdisc layer in struct skb cb data. */ struct qdisc_skb_cb qdisc_cb; struct { /* must be <= 20 bytes */ u32 sqpn; struct napi_struct *napi; u16 slid; u8 data[6]; } __packed rx; }; #define IPOIB_HANDLER_CB(skb) ((struct eipoib_cb_data *)(skb)->cb) #endif /* _LINUX_ETH_IB_IPOIB_H */
u9621071/kernel-uek-UEK3
include/rdma/e_ipoib.h
C
gpl-2.0
1,919
[ 30522, 1013, 1008, 1008, 9385, 1006, 1039, 1007, 2262, 11463, 5802, 11636, 6786, 1012, 2035, 2916, 9235, 1008, 1008, 2023, 4007, 2003, 2800, 2000, 2017, 2104, 1037, 3601, 1997, 2028, 1997, 2048, 1008, 15943, 1012, 2017, 2089, 5454, 2000, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# # Cookbook Name:: cron # Recipe:: default # # Copyright 2010-2013, Opscode, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package 'cron' do package_name case node['platform_family'] when 'rhel', 'fedora' node['platform_version'].to_f >= 6.0 ? 'cronie' : 'vixie-cron' when 'solaris2' 'core-os' when 'gentoo' 'vixie-cron' end end service 'cron' do service_name 'crond' if platform_family?('rhel', 'fedora') service_name 'vixie-cron' if platform_family?('gentoo') action [:enable, :start] end
3ofcoins/idk-repo
vendor/cookbooks/cron/recipes/default.rb
Ruby
apache-2.0
1,121
[ 30522, 1001, 1001, 5660, 8654, 2171, 1024, 1024, 13675, 2239, 1001, 17974, 1024, 1024, 12398, 1001, 1001, 9385, 2230, 1011, 2286, 1010, 23092, 16044, 1010, 4297, 1012, 1001, 1001, 7000, 2104, 1996, 15895, 6105, 1010, 2544, 1016, 1012, 1014,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/*- * Copyright (c) 2008 Ed Schouten <ed@FreeBSD.org> * All rights reserved. * * Portions of this software were developed under sponsorship from Snow * B.V., the Netherlands. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #include <sys/cdefs.h> __FBSDID("$FreeBSD$"); #include <sys/param.h> #include <sys/kernel.h> #include <sys/lock.h> #include <sys/queue.h> #include <sys/systm.h> #include <sys/tty.h> #include <sys/uio.h> #include <vm/uma.h> /* * TTY output queue buffering. * * The previous design of the TTY layer offered the so-called clists. * These clists were used for both the input queues and the output * queue. We don't use certain features on the output side, like quoting * bits for parity marking and such. This mechanism is similar to the * old clists, but only contains the features we need to buffer the * output. */ struct ttyoutq_block { struct ttyoutq_block *tob_next; char tob_data[TTYOUTQ_DATASIZE]; }; static uma_zone_t ttyoutq_zone; #define TTYOUTQ_INSERT_TAIL(to, tob) do { \ if (to->to_end == 0) { \ tob->tob_next = to->to_firstblock; \ to->to_firstblock = tob; \ } else { \ tob->tob_next = to->to_lastblock->tob_next; \ to->to_lastblock->tob_next = tob; \ } \ to->to_nblocks++; \ } while (0) #define TTYOUTQ_REMOVE_HEAD(to) do { \ to->to_firstblock = to->to_firstblock->tob_next; \ to->to_nblocks--; \ } while (0) #define TTYOUTQ_RECYCLE(to, tob) do { \ if (to->to_quota <= to->to_nblocks) \ uma_zfree(ttyoutq_zone, tob); \ else \ TTYOUTQ_INSERT_TAIL(to, tob); \ } while(0) void ttyoutq_flush(struct ttyoutq *to) { to->to_begin = 0; to->to_end = 0; } void ttyoutq_setsize(struct ttyoutq *to, struct tty *tp, size_t size) { struct ttyoutq_block *tob; to->to_quota = howmany(size, TTYOUTQ_DATASIZE); while (to->to_quota > to->to_nblocks) { /* * List is getting bigger. * Add new blocks to the tail of the list. * * We must unlock the TTY temporarily, because we need * to allocate memory. This won't be a problem, because * in the worst case, another thread ends up here, which * may cause us to allocate too many blocks, but this * will be caught by the loop below. */ tty_unlock(tp); tob = uma_zalloc(ttyoutq_zone, M_WAITOK); tty_lock(tp); TTYOUTQ_INSERT_TAIL(to, tob); } } void ttyoutq_free(struct ttyoutq *to) { struct ttyoutq_block *tob; ttyoutq_flush(to); to->to_quota = 0; while ((tob = to->to_firstblock) != NULL) { TTYOUTQ_REMOVE_HEAD(to); uma_zfree(ttyoutq_zone, tob); } MPASS(to->to_nblocks == 0); } size_t ttyoutq_read(struct ttyoutq *to, void *buf, size_t len) { char *cbuf = buf; while (len > 0) { struct ttyoutq_block *tob; size_t cbegin, cend, clen; /* See if there still is data. */ if (to->to_begin == to->to_end) break; tob = to->to_firstblock; if (tob == NULL) break; /* * The end address should be the lowest of these three: * - The write pointer * - The blocksize - we can't read beyond the block * - The end address if we could perform the full read */ cbegin = to->to_begin; cend = MIN(MIN(to->to_end, to->to_begin + len), TTYOUTQ_DATASIZE); clen = cend - cbegin; /* Copy the data out of the buffers. */ memcpy(cbuf, tob->tob_data + cbegin, clen); cbuf += clen; len -= clen; if (cend == to->to_end) { /* Read the complete queue. */ to->to_begin = 0; to->to_end = 0; } else if (cend == TTYOUTQ_DATASIZE) { /* Read the block until the end. */ TTYOUTQ_REMOVE_HEAD(to); to->to_begin = 0; to->to_end -= TTYOUTQ_DATASIZE; TTYOUTQ_RECYCLE(to, tob); } else { /* Read the block partially. */ to->to_begin += clen; } } return (cbuf - (char *)buf); } /* * An optimized version of ttyoutq_read() which can be used in pseudo * TTY drivers to directly copy data from the outq to userspace, instead * of buffering it. * * We can only copy data directly if we need to read the entire block * back to the user, because we temporarily remove the block from the * queue. Otherwise we need to copy it to a temporary buffer first, to * make sure data remains in the correct order. */ int ttyoutq_read_uio(struct ttyoutq *to, struct tty *tp, struct uio *uio) { while (uio->uio_resid > 0) { int error; struct ttyoutq_block *tob; size_t cbegin, cend, clen; /* See if there still is data. */ if (to->to_begin == to->to_end) return (0); tob = to->to_firstblock; if (tob == NULL) return (0); /* * The end address should be the lowest of these three: * - The write pointer * - The blocksize - we can't read beyond the block * - The end address if we could perform the full read */ cbegin = to->to_begin; cend = MIN(MIN(to->to_end, to->to_begin + uio->uio_resid), TTYOUTQ_DATASIZE); clen = cend - cbegin; /* * We can prevent buffering in some cases: * - We need to read the block until the end. * - We don't need to read the block until the end, but * there is no data beyond it, which allows us to move * the write pointer to a new block. */ if (cend == TTYOUTQ_DATASIZE || cend == to->to_end) { /* * Fast path: zero copy. Remove the first block, * so we can unlock the TTY temporarily. */ TTYOUTQ_REMOVE_HEAD(to); to->to_begin = 0; if (to->to_end <= TTYOUTQ_DATASIZE) to->to_end = 0; else to->to_end -= TTYOUTQ_DATASIZE; /* Temporary unlock and copy the data to userspace. */ tty_unlock(tp); error = uiomove(tob->tob_data + cbegin, clen, uio); tty_lock(tp); /* Block can now be readded to the list. */ TTYOUTQ_RECYCLE(to, tob); } else { char ob[TTYOUTQ_DATASIZE - 1]; /* * Slow path: store data in a temporary buffer. */ memcpy(ob, tob->tob_data + cbegin, clen); to->to_begin += clen; MPASS(to->to_begin < TTYOUTQ_DATASIZE); /* Temporary unlock and copy the data to userspace. */ tty_unlock(tp); error = uiomove(ob, clen, uio); tty_lock(tp); } if (error != 0) return (error); } return (0); } size_t ttyoutq_write(struct ttyoutq *to, const void *buf, size_t nbytes) { const char *cbuf = buf; struct ttyoutq_block *tob; unsigned int boff; size_t l; while (nbytes > 0) { boff = to->to_end % TTYOUTQ_DATASIZE; if (to->to_end == 0) { /* First time we're being used or drained. */ MPASS(to->to_begin == 0); tob = to->to_firstblock; if (tob == NULL) { /* Queue has no blocks. */ break; } to->to_lastblock = tob; } else if (boff == 0) { /* We reached the end of this block on last write. */ tob = to->to_lastblock->tob_next; if (tob == NULL) { /* We've reached the watermark. */ break; } to->to_lastblock = tob; } else { tob = to->to_lastblock; } /* Don't copy more than was requested. */ l = MIN(nbytes, TTYOUTQ_DATASIZE - boff); MPASS(l > 0); memcpy(tob->tob_data + boff, cbuf, l); cbuf += l; nbytes -= l; to->to_end += l; } return (cbuf - (const char *)buf); } int ttyoutq_write_nofrag(struct ttyoutq *to, const void *buf, size_t nbytes) { size_t ret; if (ttyoutq_bytesleft(to) < nbytes) return (-1); /* We should always be able to write it back. */ ret = ttyoutq_write(to, buf, nbytes); MPASS(ret == nbytes); return (0); } static void ttyoutq_startup(void *dummy) { ttyoutq_zone = uma_zcreate("ttyoutq", sizeof(struct ttyoutq_block), NULL, NULL, NULL, NULL, UMA_ALIGN_PTR, 0); } SYSINIT(ttyoutq, SI_SUB_DRIVERS, SI_ORDER_FIRST, ttyoutq_startup, NULL);
jhbsz/OSI-OS
sys/kern/tty_outq.c
C
bsd-3-clause
8,799
[ 30522, 1013, 1008, 1011, 1008, 9385, 1006, 1039, 1007, 2263, 3968, 8040, 6806, 10421, 2078, 1026, 3968, 1030, 2489, 5910, 2094, 1012, 8917, 1028, 1008, 2035, 2916, 9235, 1012, 1008, 1008, 8810, 1997, 2023, 4007, 2020, 2764, 2104, 12026, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
.keypoint { background-color: #f5f5f5; background-repeat: no-repeat; padding-top: 1%; padding-right: 5%; padding-bottom: 2%; padding-left: 5%; background-size: auto; font-family: [[setting:fontname]], sans-serif !important; font-weight: [[setting:fontweight]] !important; }
faizkautsarr/edukatif
theme/adaptable/style/editor.css
CSS
gpl-3.0
310
[ 30522, 1012, 3145, 8400, 1063, 4281, 1011, 3609, 1024, 1001, 1042, 2629, 2546, 2629, 2546, 2629, 1025, 4281, 1011, 9377, 1024, 2053, 1011, 9377, 1025, 11687, 4667, 1011, 2327, 1024, 1015, 1003, 1025, 11687, 4667, 1011, 2157, 1024, 1019, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<!DOCTYPE html> <html xml:lang="en-US" lang="en-US" xmlns="http://www.w3.org/1999/xhtml"> <head lang="en-US"> <title>My Family Tree - Douglas, Frederick</title> <meta charset="UTF-8" /> <meta name ="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=1" /> <meta name ="apple-mobile-web-app-capable" content="yes" /> <meta name="generator" content="Gramps 4.2.2 http://gramps-project.org/" /> <meta name="author" content="" /> <link href="../../../images/favicon2.ico" rel="shortcut icon" type="image/x-icon" /> <link href="../../../css/narrative-screen.css" media="screen" rel="stylesheet" type="text/css" /> <link href="../../../css/narrative-print.css" media="print" rel="stylesheet" type="text/css" /> <link href="../../../css/ancestortree.css" media="screen" rel="stylesheet" type="text/css" /> </head> <body> <div id="header"> <h1 id="SiteTitle">My Family Tree</h1> </div> <div class="wrapper" id="nav" role="navigation"> <div class="container"> <ul class="menu" id="dropmenu"> <li class = "CurrentSection"><a href="../../../individuals.html" title="Individuals">Individuals</a></li> <li><a href="../../../index.html" title="Surnames">Surnames</a></li> <li><a href="../../../places.html" title="Places">Places</a></li> <li><a href="../../../sources.html" title="Sources">Sources</a></li> <li><a href="../../../media.html" title="Media">Media</a></li> <li><a href="../../../thumbnails.html" title="Thumbnails">Thumbnails</a></li> </ul> </div> </div> <div class="content" id="IndividualDetail"> <h3>Douglas, Frederick<sup><small> <a href="#sref1">1</a></small></sup></h3> <div id="summaryarea"> <table class="infolist"> <tr> <td class="ColumnAttribute">Birth Name</td> <td class="ColumnValue"> Douglas, Frederick </td> </tr> <tr> <td class="ColumnAttribute">Gramps&nbsp;ID</td> <td class="ColumnValue">I0996</td> </tr> <tr> <td class="ColumnAttribute">Gender</td> <td class="ColumnValue">male</td> </tr> </table> </div> <div class="subsection" id="events"> <h4>Events</h4> <table class="infolist eventlist"> <thead> <tr> <th class="ColumnEvent">Event</th> <th class="ColumnDate">Date</th> <th class="ColumnPlace">Place</th> <th class="ColumnDescription">Description</th> <th class="ColumnNotes">Notes</th> <th class="ColumnSources">Sources</th> </tr> </thead> <tbody> <tr> <td class="ColumnEvent"> Death </td> <td class="ColumnDate">&nbsp;</td> <td class="ColumnPlace"> <a href="../../../plc/5/c/9PDKQCPTOZZWMPSPC5.html" title="Dover, Kent, DE, USA"> Dover, Kent, DE, USA </a> </td> <td class="ColumnDescription"> Death of Douglas, Frederick </td> <td class="ColumnNotes"> <div> </div> </td> <td class="ColumnSources"> &nbsp; </td> </tr> </tbody> </table> </div> <div class="subsection" id="families"> <h4>Families</h4> <table class="infolist"> <tr class="BeginFamily"> <td class="ColumnType">&nbsp</td> <td class="ColumnAttribute">&nbsp</td> <td class="ColumnValue"><a href="" title="Family of Douglas, Frederick and Stanley, Barbara">Family of Douglas, Frederick and Stanley, Barbara<span class="grampsid"> [F0319]</span></a></td> </tr> <tr class="BeginFamily"> <td class="ColumnType">Married</td> <td class="ColumnAttribute">Wife</td> <td class="ColumnValue"> <a href="../../../ppl/i/3/OPDKQC8T84H79IVZ3I.html">Stanley, Barbara<span class="grampsid"> [I0997]</span></a> </td> </tr> <tr> <td class="ColumnType">&nbsp;</td> <td class="ColumnAttribute">&nbsp;</td> <td class="ColumnValue"> <table class="infolist eventlist"> <thead> <tr> <th class="ColumnEvent">Event</th> <th class="ColumnDate">Date</th> <th class="ColumnPlace">Place</th> <th class="ColumnDescription">Description</th> <th class="ColumnNotes">Notes</th> <th class="ColumnSources">Sources</th> </tr> </thead> <tbody> <tr> <td class="ColumnEvent"> Marriage </td> <td class="ColumnDate">&nbsp;</td> <td class="ColumnPlace">&nbsp;</td> <td class="ColumnDescription"> Marriage of Douglas, Frederick and Stanley, Barbara </td> <td class="ColumnNotes"> <div> </div> </td> <td class="ColumnSources"> &nbsp; </td> </tr> </tbody> </table> </td> <tr> <td class="ColumnType">&nbsp;</td> <td class="ColumnAttribute">Children</td> <td class="ColumnValue"> <ol> <li> <a href="../../../ppl/n/z/WVBKQC4M0WSS7YOMZN.html">Douglas, Mary&#8220;Polly&#8221;<span class="grampsid"> [I0921]</span></a> </li> </ol> </td> </tr> </tr> </table> </div> <div class="subsection" id="pedigree"> <h4>Pedigree</h4> <ol class="pedigreegen"> <li> <ol> <li class="thisperson"> Douglas, Frederick <ol class="spouselist"> <li class="spouse"> <a href="../../../ppl/i/3/OPDKQC8T84H79IVZ3I.html">Stanley, Barbara<span class="grampsid"> [I0997]</span></a> <ol> <li> <a href="../../../ppl/n/z/WVBKQC4M0WSS7YOMZN.html">Douglas, Mary&#8220;Polly&#8221;<span class="grampsid"> [I0921]</span></a> </li> </ol> </li> </ol> </li> </ol> </li> </ol> </div> <div class="subsection" id="sourcerefs"> <h4>Source References</h4> <ol> <li> <a href="../../../src/x/a/X5TJQC9JXU4RKT6VAX.html" title="Import from test2.ged" name ="sref1"> Import from test2.ged <span class="grampsid"> [S0003]</span> </a> </li> </ol> </div> </div> <div class="fullclear"></div> <div id="footer"> <p id="createdate"> Generated by <a href="http://gramps-project.org/">Gramps</a> 4.2.2 on 2015-12-25<br />Last change was the 2007-07-26 08:34:25 </p> <p id="copyright"> </p> </div> </body> </html>
belissent/testing-example-reports
gramps42/gramps/example_NAVWEB0/ppl/8/4/XODKQCEZISUYEE5J48.html
HTML
gpl-2.0
6,289
[ 30522, 1026, 999, 9986, 13874, 16129, 1028, 1026, 16129, 20950, 1024, 11374, 1027, 1000, 4372, 1011, 2149, 1000, 11374, 1027, 1000, 4372, 1011, 2149, 1000, 20950, 3619, 1027, 1000, 8299, 1024, 1013, 1013, 7479, 1012, 1059, 2509, 1012, 8917,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
({"text":"รายละเอียด","insertImageTitle":"คุณสมบัติอิมเมจ","set":"ตั้งค่า","newWindow":"หน้าต่างใหม่","topWindow":"หน้าต่างบนสุด","target":"เป้าหมาย:","createLinkTitle":"คุณสมบัติลิงก์","parentWindow":"หน้าต่างหลัก","currentWindow":"หน้าต่างปัจจุบัน","url":"URL:"})
ozoneplatform/owf-framework
web-app/js-lib/dojo-release-1.5.0/dijit/_editor/nls/th/LinkDialog.js
JavaScript
apache-2.0
479
[ 30522, 1006, 1063, 1000, 3793, 1000, 1024, 1000, 100, 1000, 1010, 1000, 19274, 9581, 18150, 4183, 2571, 1000, 1024, 1000, 100, 1000, 1010, 1000, 2275, 1000, 1024, 1000, 100, 1000, 1010, 1000, 30524, 1010, 1000, 2327, 11101, 5004, 1000, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
define(function () { return { customerAccount: ['CustomerAccountService', function (CustomerAccountService) { return CustomerAccountService.getCurrent(); }] }; });
lordmarkm/tyrael-laundry
tyrael-laundry-dist/src/main/webapp/common/resolve/JobOrderListResolve.js
JavaScript
gpl-2.0
182
[ 30522, 9375, 1006, 3853, 1006, 1007, 1063, 2709, 1063, 8013, 6305, 3597, 16671, 1024, 1031, 1005, 8013, 6305, 3597, 16671, 8043, 7903, 2063, 1005, 1010, 3853, 1006, 8013, 6305, 3597, 16671, 8043, 7903, 2063, 1007, 1063, 2709, 8013, 6305, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
use itertools::Itertools; use malachite_base::bools::exhaustive::exhaustive_bools; #[test] fn test_exhaustive_bools() { assert_eq!(exhaustive_bools().collect_vec(), &[false, true]); }
mhogrefe/malachite
malachite-base/tests/bools/exhaustive.rs
Rust
lgpl-3.0
189
[ 30522, 2224, 2009, 8743, 13669, 2015, 1024, 1024, 2009, 8743, 13669, 2015, 1025, 2224, 28935, 5428, 2618, 1035, 2918, 1024, 1024, 22017, 4877, 1024, 1024, 15095, 3512, 1024, 1024, 15095, 3512, 1035, 22017, 4877, 1025, 1001, 1031, 3231, 1033...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.uzaygezen.core; import java.math.BigInteger; import java.util.BitSet; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import com.google.common.base.Preconditions; /** * BitVector implementation for vectors of length 64 or less. * * @author Mehmet Akin * @author Daniel Aioanei */ public final class LongBitVector implements BitVector, Cloneable { private static final long WORD_MASK = -1L; private static final int BITS_PER_WORD = 64; private final int size; // used to clear excess bits after operations. // Equal to WORD_MASK >>> BITS_PER_WORD - size private final long mask; private long data; public LongBitVector(int size) { this(size, 0); Preconditions.checkArgument(size >= 0 && size <= BITS_PER_WORD, "Size must be positive and <= {1} size: {2}", BITS_PER_WORD, size); } /** * Unsafe constructor. Keep it private. */ private LongBitVector(int size, long data) { assert 64 - Long.numberOfLeadingZeros(data) <= size; this.size = size; this.data = data; mask = size == 0 ? 0 : WORD_MASK >>> BITS_PER_WORD - size; } private void checkSize(BitVector other) { if (size != other.size()) { throw new IllegalArgumentException( "Sizes must be equal. " + this.size + " : " + other.size()); } } private void checkIndex(int bitIndex) { if (bitIndex < 0 | bitIndex >= size) { throw new IndexOutOfBoundsException("Bit index out of range: " + bitIndex); } } private void checkBounds(int fromIndex, int toIndex) { if (fromIndex < 0 | toIndex > size | fromIndex > toIndex) { throw new IndexOutOfBoundsException( "Range [" + fromIndex + ", " + toIndex + ") is invalid for this bit vector"); } } @Override public void and(BitVector o) { checkSize(o); data &= o.toExactLong(); } @Override public void andNot(BitVector o) { checkSize(o); data &= ~o.toExactLong(); } @Override public int cardinality() { return Long.bitCount(data); } @Override public void clear() { data = 0; } @Override public void clear(int bitIndex) { checkIndex(bitIndex); data &= ~(1L << bitIndex); } @Override public void clear(int fromIndex, int toIndex) { checkBounds(fromIndex, toIndex); if (fromIndex != toIndex) { unsafeClearNonEmptySection(fromIndex, toIndex); } } private void unsafeClearNonEmptySection(int fromIndex, int toIndex) { data &= ~((WORD_MASK << fromIndex) & (WORD_MASK >>> -toIndex)); } @Override public void copyFrom(BitVector from) { checkSize(from); data = from.toExactLong(); } @Override public void flip(int bitIndex) { checkIndex(bitIndex); data ^= (1L << bitIndex); } @Override public void flip(int fromIndex, int toIndex) { checkBounds(fromIndex, toIndex); if (fromIndex != toIndex) { data ^= ((WORD_MASK << fromIndex) & (WORD_MASK >>> -toIndex)); } } @Override public boolean get(int bitIndex) { checkIndex(bitIndex); return unsafeGet(bitIndex); } private boolean unsafeGet(int bitIndex) { return (data & (1L << bitIndex)) != 0; } @Override public void grayCode() { data ^= (data >>> 1); } @Override public void grayCodeInverse() { long localData = data; localData ^= localData >>> 1; localData ^= localData >>> 2; localData ^= localData >>> 4; localData ^= localData >>> 8; localData ^= localData >>> 16; localData ^= localData >>> 32; data = localData; } @Override public boolean increment() { // Check for overflow if (data == mask) { return false; } data++; return true; } @Override public boolean intersects(BitVector o) { checkSize(o); return (data & o.toExactLong()) != 0; } @Override public int length() { return BITS_PER_WORD - Long.numberOfLeadingZeros(data); } @Override public int size() { return size; } @Override public int nextClearBit(int fromIndex) { Preconditions.checkArgument(fromIndex >= 0); if (fromIndex >= size) { return -1; } long w = ~data & (WORD_MASK << fromIndex); int tcb = Long.numberOfTrailingZeros(w); return tcb == size ? -1 : tcb; } @Override public int nextSetBit(int fromIndex) { Preconditions.checkArgument(fromIndex >= 0); if (fromIndex >= size) { return -1; } long w = data & (WORD_MASK << fromIndex); int tcb = Long.numberOfTrailingZeros(w); return tcb == 64 ? -1 : tcb; } @Override public void or(BitVector o) { checkSize(o); this.data |= o.toExactLong(); } @Override public void rotate(int count) { final int localSize = size; count %= localSize; final long localData = data; if (count > 0) { data = ((localData >>> count) | (localData << localSize - count)) & mask; } else { data = ((localData >>> localSize + count) | (localData << -count)) & mask; } } @Override public void set(int bitIndex) { checkIndex(bitIndex); data |= (1L << bitIndex); } public void set(int bitIndex, boolean value) { if (value) { set(bitIndex); } else { clear(bitIndex); } } @Override public void set(int fromIndex, int toIndex) { checkBounds(fromIndex, toIndex); if (fromIndex != toIndex) { data |= ((WORD_MASK << fromIndex) & (WORD_MASK >>> -toIndex)); } } @Override public void set(int fromIndex, int toIndex, boolean value) { if (value) { set(fromIndex, toIndex); } else { clear(fromIndex, toIndex); } } @Override public void xor(BitVector o) { checkSize(o); this.data ^= o.toExactLong(); } @Override public boolean isEmpty() { return data == 0; } @Override public LongBitVector clone() { try { return (LongBitVector) super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError("Cloning error. "); } } @Override public boolean equals(Object obj) { if (obj instanceof BitVector) { BitVector other = (BitVector) obj; // optimisation if (size <= 64) { return size == other.size() && data == other.toExactLong(); } else { return size == other.size() && toBitSet().equals(other.toBitSet()); } } else { return false; } } @Override public int hashCode() { // We imitate BitSet's hashcode implementation long h = 1234 ^ data; int bitSetHashCode = (int) ((h >> 32) ^ h); return size + 31 * bitSetHashCode; } @Override public String toString() { return StringUtils.leftPad(Long.toBinaryString(data), size, '0'); } @Override public BitSet toBitSet() { BitSet b = new BitSet(size); for (int i = 0; i < size; i++) { if (unsafeGet(i)) { b.set(i); } } return b; } @Override public long toLong() { return data; } @Override public BigInteger toBigInteger() { final BigInteger result; if (data >= 0) { result = BigInteger.valueOf(data); } else { BigInteger missingLowestBit = BigInteger.valueOf(data >>> 1).shiftLeft(1); if ((data & 1) == 1) { result = missingLowestBit.setBit(0); } else { result = missingLowestBit; } } return result; } public void copyFrom(long value) { Preconditions.checkArgument(64 - Long.numberOfLeadingZeros(value) <= size, "value doesn't fit"); data = value; } @Override public int compareTo(BitVector o) { checkSize(o); final int cmp; // optimisation if (o.size() <= 64) { // 0, positives, Long.MAX_VALUE, Long.MIN_VALUE, negatives, -1 long x = data + Long.MIN_VALUE; long y = o.toExactLong() + Long.MIN_VALUE; cmp = Long.compare(x, y); assert Integer.signum(cmp) == Integer.signum( BitSetComparator.INSTANCE.compare(toBitSet(), o.toBitSet())); } else { cmp = BitSetComparator.INSTANCE.compare(toBitSet(), o.toBitSet()); } return cmp; } @Override public void copyFrom(BitSet from) { int localSize = size; long value = 0; for (int i = from.nextSetBit(0); i != -1; i = from.nextSetBit(i + 1)) { Preconditions.checkArgument(i < localSize, "bit set too large"); value |= 1L << i; } data = value; } @Override public void copyFromSection(BitVector src, int fromIndex) { Preconditions.checkArgument(fromIndex >= 0, "fromIndex must be non-negative"); int srcSize = src.size(); int toIndex = fromIndex + size; Preconditions.checkArgument(toIndex <= srcSize, "not enough bits in src"); long value; if (toIndex <= 64) { long srcData = src.toLong(); value = (srcData >>> fromIndex) & mask; } else { value = 0; for (int i = src.nextSetBit(fromIndex); i < toIndex && i != -1; i = src.nextSetBit(i + 1)) { value |= 1L << (i - fromIndex); } } data = value; } @Override public long toExactLong() { return data; } @Override public void smallerEvenAndGrayCode() { long localData = data; if ((localData & 0x1) == 1) { assert size > 0; data = localData ^ (localData >>> 1) ^ 0x1; } else { if (localData != 0) { long dataMinusTwo = localData - 2; data = dataMinusTwo ^ (dataMinusTwo >>> 1); } } } @Override public void grayCodeRank(BitVector mu, BitVector w) { grayCodeRank(mu, w, true); } /** * Visible for testing. */ void grayCodeRank(BitVector mu, BitVector w, boolean optimiseIfPossible) { int theirSize = mu.size(); Preconditions.checkArgument(theirSize == w.size(), "mu/w size mismatch"); int muLen = mu.length(); long pow2pos = 1L; long value = 0; if (optimiseIfPossible & muLen <= 64) { // mu doesn't have any set bits over index 63 long muLong = mu.toExactLong(); // w might have some set bits over index 63, but they don't matter anyway long wLong = w.toLong(); long pow2i = 1L; for (int i = 0; i < muLen; ++i) { if ((muLong & pow2i) != 0) { if ((wLong & pow2i) != 0) { value |= pow2pos; } pow2pos <<= 1; } pow2i <<= 1; } } else { for (int j = theirSize == 0 ? -1 : mu.nextSetBit(0); j != -1; j = j == theirSize - 1 ? -1 : mu.nextSetBit(j + 1)) { if (w.get(j)) { value |= pow2pos; } pow2pos <<= 1; } } assert pow2pos == 1L << mu.cardinality(); Preconditions.checkArgument(1L << size == pow2pos, "wrong size"); data = value; } @Override public int lowestDifferentBit() { long localData = data; final int value; if ((localData & 0x1L) == 0) { if (localData == 0) { value = 0; } else { value = Long.numberOfTrailingZeros(localData); } } else { if (localData == mask) { value = 0; } else { value = Long.numberOfTrailingZeros(~localData); } } assert value == 0 || (0 < value & value < size); return value; } @Override public void grayCodeRankInverse(BitVector mu, BitVector known, BitVector r) { int muSize = mu.size(); Preconditions.checkArgument(size == muSize, "i/mu size mismatch"); // Will fail if the sizes are different. Preconditions.checkArgument(!known.intersects(mu), "known and mu must not intersect"); long muLong = mu.toExactLong(); long knownLong = known.toExactLong(); // Will check r.size() against mu.cardinality later. int rSize = r.size(); Preconditions.checkArgument(rSize <= muSize, "r is too large"); long rLong = r.toExactLong(); long value = 0; int pos = 0; int muLength = mu.length(); long pow2k = 1L; for (int k = 0; k < muLength; ++k) { if ((muLong & pow2k) != 0) { if ((rLong >> pos & 1L) != 0) { value |= pow2k; } ++pos; } pow2k <<= 1; } assert pos == mu.cardinality(); Preconditions.checkArgument(pos == rSize, "r.size()/mu.cardinality() mismatch"); int knownLength = known.length(); for (int k = Math.max(muLength - 1, knownLength); --k >= 0; ) { pow2k = 1L << k; if ((muLong & pow2k) == 0) { assert (value & pow2k) == 0; if (((knownLong & pow2k) ^ (value >> 1 & pow2k)) != 0) { value |= pow2k; } } } data = value; } @Override public void copySectionFrom(int offset, BitVector src) { int srcSize = src.size(); int toIndex = offset + srcSize; if (offset < 0 | toIndex > size) { throw new IndexOutOfBoundsException( "invalid range: offset=" + offset + " src.size()=" + src.size()); } if (offset != toIndex) { unsafeClearNonEmptySection(offset, toIndex); long srcData = src.toExactLong(); data |= srcData << offset; } } @Override public long[] toLongArray() { return size == 0 ? ArrayUtils.EMPTY_LONG_ARRAY : new long[] {data}; } @Override public byte[] toBigEndianByteArray() { int n = MathUtils.bitCountToByteCount(size); byte[] a = new byte[n]; long x = data; for (int i = 0; i < n; ) { a[n - ++i] = (byte) (x & 0xFF); x >>>= 8; } assert x == 0; return a; } @Override public void copyFrom(long[] array) { if (size == 0) { Preconditions.checkArgument(array.length == 0, "Array must be empty."); } else { Preconditions.checkArgument(array.length == 1, "Array length must be 1."); copyFrom(array[0]); } } @Override public void copyFromBigEndian(byte[] array) { int n = MathUtils.bitCountToByteCount(size); Preconditions.checkArgument(array.length == n, "Array length must be %s.", n); long x = 0; for (int i = 0; i < n - 1; ) { x |= (array[i++] & 0xFF); x <<= 8; } if (n != 0) { x |= (array[n - 1] & 0xFF); } copyFrom(x); } @Override public boolean areAllLowestBitsClear(int bitCount) { Preconditions.checkArgument(0 <= bitCount & bitCount <= size, "bitCount is out of range"); // Only bitCount == 64 is affected by xoring with (bitCount >> 6). return (data & (((1L << bitCount) ^ (bitCount >> 6)) - 1)) == 0; } @Override public void copyFrom(BigInteger s) { Preconditions.checkArgument(s.signum() >= 0, s); Preconditions.checkArgument(s.bitLength() <= size()); // Note that the long value will be negative iff bitLength == 644 and bit 63 // is set. copyFrom(s.longValue()); } }
GrammarViz2/Uzaygezen
uzaygezen-core/src/main/java/com/google/uzaygezen/core/LongBitVector.java
Java
apache-2.0
15,293
[ 30522, 1013, 1008, 1008, 9385, 1006, 1039, 1007, 2263, 8224, 4297, 1012, 1008, 1008, 7000, 2104, 1996, 15895, 6105, 1010, 2544, 1016, 1012, 1014, 1006, 1996, 1000, 6105, 1000, 1007, 1025, 1008, 2017, 2089, 2025, 2224, 2023, 5371, 3272, 19...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
jsonp({"cep":"75133834","logradouro":"Rua Carna\u00faba","bairro":"Residencial Cidade Industrial","cidade":"An\u00e1polis","uf":"GO","estado":"Goi\u00e1s"});
lfreneda/cepdb
api/v1/75133834.jsonp.js
JavaScript
cc0-1.0
158
[ 30522, 1046, 3385, 2361, 1006, 1063, 1000, 8292, 2361, 1000, 1024, 1000, 4293, 17134, 22025, 22022, 1000, 1010, 1000, 8833, 12173, 8162, 2080, 1000, 1024, 1000, 21766, 2050, 2482, 2532, 30524, 1024, 1000, 2019, 1032, 1057, 8889, 2063, 2487,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/** Problem 9. Extract e-mails Write a function for extracting all email addresses from given text. All sub-strings that match the format @… should be recognized as emails. Return the emails as array of strings. */ console.log('Problem 9. Extract e-mails'); var text='gosho@gmail.com bla bla bla pesho_peshev@yahoo.com bla bla gosho_geshev@outlook.com' function extractEmails(text) { var result=text.match(/[A-Z0-9._-]+@[A-Z0-9.-]+\.[A-Z]{2,4}/gi); return result; } console.log('Text: '+text); console.log('E-mail: '+extractEmails(text)); console.log('#########################################');
ztodorova/Telerik-Academy
JavaScript Fundamentals/Strings/scripts/9. Extract e-mails.js
JavaScript
mit
605
[ 30522, 1013, 1008, 1008, 3291, 1023, 1012, 14817, 1041, 1011, 5653, 2015, 4339, 1037, 3853, 2005, 14817, 2075, 2035, 10373, 11596, 2013, 2445, 3793, 1012, 2035, 4942, 1011, 7817, 2008, 2674, 1996, 4289, 1030, 2323, 2022, 3858, 2004, 22028, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * Copyright (c) 2009 The Regents of The University of Michigan * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer; * redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution; * neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Authors: Gabe Black */ #include "arch/mips/isa.hh" #include "arch/mips/mt.hh" #include "arch/mips/mt_constants.hh" #include "arch/mips/pra_constants.hh" #include "base/bitfield.hh" #include "cpu/base.hh" #include "cpu/thread_context.hh" #include "debug/MipsPRA.hh" #include "params/MipsISA.hh" namespace MipsISA { std::string ISA::miscRegNames[NumMiscRegs] = { "Index", "MVPControl", "MVPConf0", "MVPConf1", "", "", "", "", "Random", "VPEControl", "VPEConf0", "VPEConf1", "YQMask", "VPESchedule", "VPEScheFBack", "VPEOpt", "EntryLo0", "TCStatus", "TCBind", "TCRestart", "TCHalt", "TCContext", "TCSchedule", "TCScheFBack", "EntryLo1", "", "", "", "", "", "", "", "Context", "ContextConfig", "", "", "", "", "", "", "PageMask", "PageGrain", "", "", "", "", "", "", "Wired", "SRSConf0", "SRCConf1", "SRSConf2", "SRSConf3", "SRSConf4", "", "", "HWREna", "", "", "", "", "", "", "", "BadVAddr", "", "", "", "", "", "", "", "Count", "", "", "", "", "", "", "", "EntryHi", "", "", "", "", "", "", "", "Compare", "", "", "", "", "", "", "", "Status", "IntCtl", "SRSCtl", "SRSMap", "", "", "", "", "Cause", "", "", "", "", "", "", "", "EPC", "", "", "", "", "", "", "", "PRId", "EBase", "", "", "", "", "", "", "Config", "Config1", "Config2", "Config3", "", "", "", "", "LLAddr", "", "", "", "", "", "", "", "WatchLo0", "WatchLo1", "WatchLo2", "WatchLo3", "WatchLo4", "WatchLo5", "WatchLo6", "WatchLo7", "WatchHi0", "WatchHi1", "WatchHi2", "WatchHi3", "WatchHi4", "WatchHi5", "WatchHi6", "WatchHi7", "XCContext64", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "Debug", "TraceControl1", "TraceControl2", "UserTraceData", "TraceBPC", "", "", "", "DEPC", "", "", "", "", "", "", "", "PerfCnt0", "PerfCnt1", "PerfCnt2", "PerfCnt3", "PerfCnt4", "PerfCnt5", "PerfCnt6", "PerfCnt7", "ErrCtl", "", "", "", "", "", "", "", "CacheErr0", "CacheErr1", "CacheErr2", "CacheErr3", "", "", "", "", "TagLo0", "DataLo1", "TagLo2", "DataLo3", "TagLo4", "DataLo5", "TagLo6", "DataLo7", "TagHi0", "DataHi1", "TagHi2", "DataHi3", "TagHi4", "DataHi5", "TagHi6", "DataHi7", "ErrorEPC", "", "", "", "", "", "", "", "DESAVE", "", "", "", "", "", "", "", "LLFlag" }; ISA::ISA(Params *p) : SimObject(p), numThreads(p->num_threads), numVpes(p->num_vpes) { miscRegFile.resize(NumMiscRegs); bankType.resize(NumMiscRegs); for (int i=0; i < NumMiscRegs; i++) { miscRegFile[i].resize(1); bankType[i] = perProcessor; } miscRegFile_WriteMask.resize(NumMiscRegs); for (int i = 0; i < NumMiscRegs; i++) { miscRegFile_WriteMask[i].push_back(0); } // Initialize all Per-VPE regs uint32_t per_vpe_regs[] = { MISCREG_VPE_CONTROL, MISCREG_VPE_CONF0, MISCREG_VPE_CONF1, MISCREG_YQMASK, MISCREG_VPE_SCHEDULE, MISCREG_VPE_SCHEFBACK, MISCREG_VPE_OPT, MISCREG_SRS_CONF0, MISCREG_SRS_CONF1, MISCREG_SRS_CONF2, MISCREG_SRS_CONF3, MISCREG_SRS_CONF4, MISCREG_EBASE }; uint32_t num_vpe_regs = sizeof(per_vpe_regs) / 4; for (int i = 0; i < num_vpe_regs; i++) { if (numVpes > 1) { miscRegFile[per_vpe_regs[i]].resize(numVpes); } bankType[per_vpe_regs[i]] = perVirtProcessor; } // Initialize all Per-TC regs uint32_t per_tc_regs[] = { MISCREG_STATUS, MISCREG_TC_STATUS, MISCREG_TC_BIND, MISCREG_TC_RESTART, MISCREG_TC_HALT, MISCREG_TC_CONTEXT, MISCREG_TC_SCHEDULE, MISCREG_TC_SCHEFBACK, MISCREG_DEBUG, MISCREG_LLADDR }; uint32_t num_tc_regs = sizeof(per_tc_regs) / 4; for (int i = 0; i < num_tc_regs; i++) { miscRegFile[per_tc_regs[i]].resize(numThreads); bankType[per_tc_regs[i]] = perThreadContext; } clear(); } const MipsISAParams * ISA::params() const { return dynamic_cast<const Params *>(_params); } void ISA::clear() { for (int i = 0; i < NumMiscRegs; i++) { for (int j = 0; j < miscRegFile[i].size(); j++) miscRegFile[i][j] = 0; for (int k = 0; k < miscRegFile_WriteMask[i].size(); k++) miscRegFile_WriteMask[i][k] = (long unsigned int)(-1); } } void ISA::configCP() { DPRINTF(MipsPRA, "Resetting CP0 State with %i TCs and %i VPEs\n", numThreads, numVpes); CoreSpecific cp; panic("CP state must be set before the following code is used"); // Do Default CP0 initialization HERE // Do Initialization for MT cores here (eventually use // core_name parameter to toggle this initialization) // =================================================== DPRINTF(MipsPRA, "Initializing CP0 State.... "); PRIdReg procId = readMiscRegNoEffect(MISCREG_PRID); procId.coOp = cp.CP0_PRId_CompanyOptions; procId.coId = cp.CP0_PRId_CompanyID; procId.procId = cp.CP0_PRId_ProcessorID; procId.rev = cp.CP0_PRId_Revision; setMiscRegNoEffect(MISCREG_PRID, procId); // Now, create Write Mask for ProcID register MiscReg procIDMask = 0; // Read-Only register replaceBits(procIDMask, 0, 32, 0); setRegMask(MISCREG_PRID, procIDMask); // Config ConfigReg cfg = readMiscRegNoEffect(MISCREG_CONFIG); cfg.be = cp.CP0_Config_BE; cfg.at = cp.CP0_Config_AT; cfg.ar = cp.CP0_Config_AR; cfg.mt = cp.CP0_Config_MT; cfg.vi = cp.CP0_Config_VI; cfg.m = 1; setMiscRegNoEffect(MISCREG_CONFIG, cfg); // Now, create Write Mask for Config register MiscReg cfg_Mask = 0x7FFF0007; replaceBits(cfg_Mask, 0, 32, 0); setRegMask(MISCREG_CONFIG, cfg_Mask); // Config1 Config1Reg cfg1 = readMiscRegNoEffect(MISCREG_CONFIG1); cfg1.mmuSize = cp.CP0_Config1_MMU; cfg1.is = cp.CP0_Config1_IS; cfg1.il = cp.CP0_Config1_IL; cfg1.ia = cp.CP0_Config1_IA; cfg1.ds = cp.CP0_Config1_DS; cfg1.dl = cp.CP0_Config1_DL; cfg1.da = cp.CP0_Config1_DA; cfg1.fp = cp.CP0_Config1_FP; cfg1.ep = cp.CP0_Config1_EP; cfg1.wr = cp.CP0_Config1_WR; cfg1.md = cp.CP0_Config1_MD; cfg1.c2 = cp.CP0_Config1_C2; cfg1.pc = cp.CP0_Config1_PC; cfg1.m = cp.CP0_Config1_M; setMiscRegNoEffect(MISCREG_CONFIG1, cfg1); // Now, create Write Mask for Config register MiscReg cfg1_Mask = 0; // Read Only Register replaceBits(cfg1_Mask, 0, 32, 0); setRegMask(MISCREG_CONFIG1, cfg1_Mask); // Config2 Config2Reg cfg2 = readMiscRegNoEffect(MISCREG_CONFIG2); cfg2.tu = cp.CP0_Config2_TU; cfg2.ts = cp.CP0_Config2_TS; cfg2.tl = cp.CP0_Config2_TL; cfg2.ta = cp.CP0_Config2_TA; cfg2.su = cp.CP0_Config2_SU; cfg2.ss = cp.CP0_Config2_SS; cfg2.sl = cp.CP0_Config2_SL; cfg2.sa = cp.CP0_Config2_SA; cfg2.m = cp.CP0_Config2_M; setMiscRegNoEffect(MISCREG_CONFIG2, cfg2); // Now, create Write Mask for Config register MiscReg cfg2_Mask = 0x7000F000; // Read Only Register replaceBits(cfg2_Mask, 0, 32, 0); setRegMask(MISCREG_CONFIG2, cfg2_Mask); // Config3 Config3Reg cfg3 = readMiscRegNoEffect(MISCREG_CONFIG3); cfg3.dspp = cp.CP0_Config3_DSPP; cfg3.lpa = cp.CP0_Config3_LPA; cfg3.veic = cp.CP0_Config3_VEIC; cfg3.vint = cp.CP0_Config3_VInt; cfg3.sp = cp.CP0_Config3_SP; cfg3.mt = cp.CP0_Config3_MT; cfg3.sm = cp.CP0_Config3_SM; cfg3.tl = cp.CP0_Config3_TL; setMiscRegNoEffect(MISCREG_CONFIG3, cfg3); // Now, create Write Mask for Config register MiscReg cfg3_Mask = 0; // Read Only Register replaceBits(cfg3_Mask, 0, 32, 0); setRegMask(MISCREG_CONFIG3, cfg3_Mask); // EBase - CPUNum EBaseReg eBase = readMiscRegNoEffect(MISCREG_EBASE); eBase.cpuNum = cp.CP0_EBase_CPUNum; replaceBits(eBase, 31, 31, 1); setMiscRegNoEffect(MISCREG_EBASE, eBase); // Now, create Write Mask for Config register MiscReg EB_Mask = 0x3FFFF000;// Except Exception Base, the // entire register is read only replaceBits(EB_Mask, 0, 32, 0); setRegMask(MISCREG_EBASE, EB_Mask); // SRS Control - HSS (Highest Shadow Set) SRSCtlReg scsCtl = readMiscRegNoEffect(MISCREG_SRSCTL); scsCtl.hss = cp.CP0_SrsCtl_HSS; setMiscRegNoEffect(MISCREG_SRSCTL, scsCtl); // Now, create Write Mask for the SRS Ctl register MiscReg SC_Mask = 0x0000F3C0; replaceBits(SC_Mask, 0, 32, 0); setRegMask(MISCREG_SRSCTL, SC_Mask); // IntCtl - IPTI, IPPCI IntCtlReg intCtl = readMiscRegNoEffect(MISCREG_INTCTL); intCtl.ipti = cp.CP0_IntCtl_IPTI; intCtl.ippci = cp.CP0_IntCtl_IPPCI; setMiscRegNoEffect(MISCREG_INTCTL, intCtl); // Now, create Write Mask for the IntCtl register MiscReg IC_Mask = 0x000003E0; replaceBits(IC_Mask, 0, 32, 0); setRegMask(MISCREG_INTCTL, IC_Mask); // Watch Hi - M - FIXME (More than 1 Watch register) WatchHiReg watchHi = readMiscRegNoEffect(MISCREG_WATCHHI0); watchHi.m = cp.CP0_WatchHi_M; setMiscRegNoEffect(MISCREG_WATCHHI0, watchHi); // Now, create Write Mask for the IntCtl register MiscReg wh_Mask = 0x7FFF0FFF; replaceBits(wh_Mask, 0, 32, 0); setRegMask(MISCREG_WATCHHI0, wh_Mask); // Perf Ctr - M - FIXME (More than 1 PerfCnt Pair) PerfCntCtlReg perfCntCtl = readMiscRegNoEffect(MISCREG_PERFCNT0); perfCntCtl.m = cp.CP0_PerfCtr_M; perfCntCtl.w = cp.CP0_PerfCtr_W; setMiscRegNoEffect(MISCREG_PERFCNT0, perfCntCtl); // Now, create Write Mask for the IntCtl register MiscReg pc_Mask = 0x00007FF; replaceBits(pc_Mask, 0, 32, 0); setRegMask(MISCREG_PERFCNT0, pc_Mask); // Random setMiscRegNoEffect(MISCREG_CP0_RANDOM, 63); // Now, create Write Mask for the IntCtl register MiscReg random_Mask = 0; replaceBits(random_Mask, 0, 32, 0); setRegMask(MISCREG_CP0_RANDOM, random_Mask); // PageGrain PageGrainReg pageGrain = readMiscRegNoEffect(MISCREG_PAGEGRAIN); pageGrain.esp = cp.CP0_Config3_SP; setMiscRegNoEffect(MISCREG_PAGEGRAIN, pageGrain); // Now, create Write Mask for the IntCtl register MiscReg pg_Mask = 0x10000000; replaceBits(pg_Mask, 0, 32, 0); setRegMask(MISCREG_PAGEGRAIN, pg_Mask); // Status StatusReg status = readMiscRegNoEffect(MISCREG_STATUS); // Only CU0 and IE are modified on a reset - everything else needs // to be controlled on a per CPU model basis // Enable CP0 on reset // status.cu0 = 1; // Enable ERL bit on a reset status.erl = 1; // Enable BEV bit on a reset status.bev = 1; setMiscRegNoEffect(MISCREG_STATUS, status); // Now, create Write Mask for the Status register MiscReg stat_Mask = 0xFF78FF17; replaceBits(stat_Mask, 0, 32, 0); setRegMask(MISCREG_STATUS, stat_Mask); // MVPConf0 MVPConf0Reg mvpConf0 = readMiscRegNoEffect(MISCREG_MVP_CONF0); mvpConf0.tca = 1; mvpConf0.pvpe = numVpes - 1; mvpConf0.ptc = numThreads - 1; setMiscRegNoEffect(MISCREG_MVP_CONF0, mvpConf0); // VPEConf0 VPEConf0Reg vpeConf0 = readMiscRegNoEffect(MISCREG_VPE_CONF0); vpeConf0.mvp = 1; setMiscRegNoEffect(MISCREG_VPE_CONF0, vpeConf0); // TCBind for (ThreadID tid = 0; tid < numThreads; tid++) { TCBindReg tcBind = readMiscRegNoEffect(MISCREG_TC_BIND, tid); tcBind.curTC = tid; setMiscRegNoEffect(MISCREG_TC_BIND, tcBind, tid); } // TCHalt TCHaltReg tcHalt = readMiscRegNoEffect(MISCREG_TC_HALT); tcHalt.h = 0; setMiscRegNoEffect(MISCREG_TC_HALT, tcHalt); // TCStatus // Set TCStatus Activated to 1 for the initial thread that is running TCStatusReg tcStatus = readMiscRegNoEffect(MISCREG_TC_STATUS); tcStatus.a = 1; setMiscRegNoEffect(MISCREG_TC_STATUS, tcStatus); // Set Dynamically Allocatable bit to 1 for all other threads for (ThreadID tid = 1; tid < numThreads; tid++) { tcStatus = readMiscRegNoEffect(MISCREG_TC_STATUS, tid); tcStatus.da = 1; setMiscRegNoEffect(MISCREG_TC_STATUS, tcStatus, tid); } MiscReg mask = 0x7FFFFFFF; // Now, create Write Mask for the Index register replaceBits(mask, 0, 32, 0); setRegMask(MISCREG_INDEX, mask); mask = 0x3FFFFFFF; replaceBits(mask, 0, 32, 0); setRegMask(MISCREG_ENTRYLO0, mask); setRegMask(MISCREG_ENTRYLO1, mask); mask = 0xFF800000; replaceBits(mask, 0, 32, 0); setRegMask(MISCREG_CONTEXT, mask); mask = 0x1FFFF800; replaceBits(mask, 0, 32, 0); setRegMask(MISCREG_PAGEMASK, mask); mask = 0x0; replaceBits(mask, 0, 32, 0); setRegMask(MISCREG_BADVADDR, mask); setRegMask(MISCREG_LLADDR, mask); mask = 0x08C00300; replaceBits(mask, 0, 32, 0); setRegMask(MISCREG_CAUSE, mask); } inline unsigned ISA::getVPENum(ThreadID tid) const { TCBindReg tcBind = miscRegFile[MISCREG_TC_BIND][tid]; return tcBind.curVPE; } MiscReg ISA::readMiscRegNoEffect(int misc_reg, ThreadID tid) const { unsigned reg_sel = (bankType[misc_reg] == perThreadContext) ? tid : getVPENum(tid); DPRINTF(MipsPRA, "Reading CP0 Register:%u Select:%u (%s) (%lx).\n", misc_reg / 8, misc_reg % 8, miscRegNames[misc_reg], miscRegFile[misc_reg][reg_sel]); return miscRegFile[misc_reg][reg_sel]; } //@TODO: MIPS MT's register view automatically connects // Status to TCStatus depending on current thread //template <class TC> MiscReg ISA::readMiscReg(int misc_reg, ThreadContext *tc, ThreadID tid) { unsigned reg_sel = (bankType[misc_reg] == perThreadContext) ? tid : getVPENum(tid); DPRINTF(MipsPRA, "Reading CP0 Register:%u Select:%u (%s) with effect (%lx).\n", misc_reg / 8, misc_reg % 8, miscRegNames[misc_reg], miscRegFile[misc_reg][reg_sel]); return miscRegFile[misc_reg][reg_sel]; } void ISA::setMiscRegNoEffect(int misc_reg, const MiscReg &val, ThreadID tid) { unsigned reg_sel = (bankType[misc_reg] == perThreadContext) ? tid : getVPENum(tid); DPRINTF(MipsPRA, "[tid:%i]: Setting (direct set) CP0 Register:%u " "Select:%u (%s) to %#x.\n", tid, misc_reg / 8, misc_reg % 8, miscRegNames[misc_reg], val); miscRegFile[misc_reg][reg_sel] = val; } void ISA::setRegMask(int misc_reg, const MiscReg &val, ThreadID tid) { unsigned reg_sel = (bankType[misc_reg] == perThreadContext) ? tid : getVPENum(tid); DPRINTF(MipsPRA, "[tid:%i]: Setting CP0 Register: %u Select: %u (%s) to %#x\n", tid, misc_reg / 8, misc_reg % 8, miscRegNames[misc_reg], val); miscRegFile_WriteMask[misc_reg][reg_sel] = val; } // PROGRAMMER'S NOTES: // (1) Some CP0 Registers have fields that cannot // be overwritten. Make sure to handle those particular registers // with care! void ISA::setMiscReg(int misc_reg, const MiscReg &val, ThreadContext *tc, ThreadID tid) { int reg_sel = (bankType[misc_reg] == perThreadContext) ? tid : getVPENum(tid); DPRINTF(MipsPRA, "[tid:%i]: Setting CP0 Register:%u " "Select:%u (%s) to %#x, with effect.\n", tid, misc_reg / 8, misc_reg % 8, miscRegNames[misc_reg], val); MiscReg cp0_val = filterCP0Write(misc_reg, reg_sel, val); miscRegFile[misc_reg][reg_sel] = cp0_val; scheduleCP0Update(tc->getCpuPtr(), Cycles(1)); } /** * This method doesn't need to adjust the Control Register Offset * since it has already been done in the calling method * (setRegWithEffect) */ MiscReg ISA::filterCP0Write(int misc_reg, int reg_sel, const MiscReg &val) { MiscReg retVal = val; // Mask off read-only regions retVal &= miscRegFile_WriteMask[misc_reg][reg_sel]; MiscReg curVal = miscRegFile[misc_reg][reg_sel]; // Mask off current alue with inverse mask (clear writeable bits) curVal &= (~miscRegFile_WriteMask[misc_reg][reg_sel]); retVal |= curVal; // Combine the two DPRINTF(MipsPRA, "filterCP0Write: Mask: %lx, Inverse Mask: %lx, write Val: %x, " "current val: %lx, written val: %x\n", miscRegFile_WriteMask[misc_reg][reg_sel], ~miscRegFile_WriteMask[misc_reg][reg_sel], val, miscRegFile[misc_reg][reg_sel], retVal); return retVal; } void ISA::scheduleCP0Update(BaseCPU *cpu, Cycles delay) { if (!cp0Updated) { cp0Updated = true; //schedule UPDATE CP0Event *cp0_event = new CP0Event(this, cpu, UpdateCP0); cpu->schedule(cp0_event, cpu->clockEdge(delay)); } } void ISA::updateCPU(BaseCPU *cpu) { /////////////////////////////////////////////////////////////////// // // EVALUATE CP0 STATE FOR MIPS MT // /////////////////////////////////////////////////////////////////// MVPConf0Reg mvpConf0 = readMiscRegNoEffect(MISCREG_MVP_CONF0); ThreadID num_threads = mvpConf0.ptc + 1; for (ThreadID tid = 0; tid < num_threads; tid++) { TCStatusReg tcStatus = readMiscRegNoEffect(MISCREG_TC_STATUS, tid); TCHaltReg tcHalt = readMiscRegNoEffect(MISCREG_TC_HALT, tid); //@todo: add vpe/mt check here thru mvpcontrol & vpecontrol regs if (tcHalt.h == 1 || tcStatus.a == 0) { haltThread(cpu->getContext(tid)); } else if (tcHalt.h == 0 && tcStatus.a == 1) { restoreThread(cpu->getContext(tid)); } } num_threads = mvpConf0.ptc + 1; // Toggle update flag after we finished updating cp0Updated = false; } ISA::CP0Event::CP0Event(CP0 *_cp0, BaseCPU *_cpu, CP0EventType e_type) : Event(CPU_Tick_Pri), cp0(_cp0), cpu(_cpu), cp0EventType(e_type) { } void ISA::CP0Event::process() { switch (cp0EventType) { case UpdateCP0: cp0->updateCPU(cpu); break; } } const char * ISA::CP0Event::description() const { return "Coprocessor-0 event"; } void ISA::CP0Event::scheduleEvent(Cycles delay) { cpu->reschedule(this, cpu->clockEdge(delay), true); } void ISA::CP0Event::unscheduleEvent() { if (scheduled()) squash(); } } MipsISA::ISA * MipsISAParams::create() { return new MipsISA::ISA(this); }
yohanko88/gem5-DC
src/arch/mips/isa.cc
C++
bsd-3-clause
20,152
[ 30522, 1013, 1008, 1008, 9385, 1006, 1039, 1007, 2268, 1996, 22832, 1997, 1996, 2118, 1997, 4174, 1008, 2035, 2916, 9235, 1012, 1008, 1008, 25707, 1998, 2224, 1999, 3120, 1998, 12441, 3596, 1010, 2007, 2030, 2302, 1008, 14080, 1010, 2024, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
! function(factory) { if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') { var target = module['exports'] || exports; factory(target); } else if (typeof define === 'function' && define['amd']) { //define(['exports'],function(exports){ // exports.abc = function(){} //}); define(['exports'], factory); } else { factory(window['NC'] = {}); } }(function(exports) { function reMarker(templ, data, type) { var _type = type || 'JavaScript'; if (arguments.length === 1 ||(!data && type)) { var _templ = reMarker[_type].parse(templ); return _templ; /* return function(data) { return reMarker[_type].proc(_templ, data); }*/ } data = data || {}; return reMarker[_type].proc(reMarker[_type].parse(templ), data); } /** * 工具方法 * @type {Object} */ var _utils = { trim: function(str) { return str.replace(/(^\s*)|(\s*$)/g, ""); }, lTrim: function(str) { return str.replace(/(^\s*)/g, ""); }, rTrim: function(str) { return str.replace(/(\s*$)/g, ""); }, removeEmpty: function(arr) { var splitStr = _separator(arr); var REMOVE_REGEX = new RegExp(splitStr + splitStr); var REMOVE_HEAD_REGEX = new RegExp('^' + splitStr); return arr.join(splitStr).replace(REMOVE_REGEX, splitStr).replace(REMOVE_HEAD_REGEX, '').split(splitStr); }, filter: function(str) { return str.replace('&lt;', '<').replace('&gt;', '>'); } }; /** * 设定分隔符 * @param {String} str 字符串源 * @return {String} 分隔符 */ function _separator(str) { var separator = ''; do { separator = String.fromCharCode(Math.random(0, 1) * 100 + 255); } while (str.indexOf(separator) >= 0); return separator; }; /** * 移除不安全代码 * @param html * @returns {*|void} */ function removeUnsafe(html) { var _templ = html.replace(/[\r|\n|\t]/ig, '').replace(/\s{2,}/ig, ' ').replace(/\'/ig, "\\\'"); return _templ; } /** * 找出匹配的键值对 * @param {Array} value 数组 * @returns {Array} */ function findPairs(value) { var cache = []; if (Object.prototype.toString.call(value) === '[object Array]') { var KEY_REGEX = /\b(\w+)\s*?=/g; var commandStr = value.join(' '); var _sp = _separator(commandStr); commandStr = commandStr.replace(KEY_REGEX, _sp + "$1" + _sp); value = _utils.removeEmpty(commandStr.split(_sp)); if (value.length % 2 == 0) { for (var i = 0; i < value.length; i = i + 2) { var _pair = [value[i], value[i + 1]]; cache = cache.concat(_pair); } } } return cache; } var VAR_REGEX=/^[a-zA-Z_][a-zA-Z0-9_]*$/im; function _setVarToken(arr){ return arr.map(function(value){ if(VAR_REGEX.test(value)===true){ value='$'+value; } return value; }); } reMarker.PHP = (function() { var Ruler = { guid: 0 }; /** * 匹配语法规则处理 * @type {{ruler: Function, rulerAssign: Function, rulerEndSwitch: Function, rulerCase: Function, rulerDefault: Function, rulerSwitch: Function, rulerElseIf: Function, rulerBreak: Function, rulerElse: Function, rulerEndIf: Function, rulerIf: Function, rulerEndList: Function, rulerList: Function}} */ Ruler.regRuler = { ruler: function(str) { var listArr = Ruler.util.removeEmpty(str.split(' ')); //import,include var ruler = { "list": this.rulerList, "if": this.rulerIf, "break": this.rulerBreak, '/#list': this.rulerEndList, 'else': this.rulerElse, "/#if": this.rulerEndIf, 'elseif': this.rulerElseIf, 'switch': this.rulerSwitch, 'case': this.rulerCase, 'default': this.rulerDefault, '/#switch': this.rulerEndSwitch, 'assign': this.rulerAssign, 'return': this.rulerReturn }; return (ruler[listArr[0]]).call(this, listArr); }, rulerReturn: function() { return 'return;'; }, /** * 定义变量 * @param arr * @returns {string} */ rulerAssign: function(arr) { var result = [], count; var rt = findPairs(arr.slice(1)); count = rt.length; for (j = 0; j < count; j += 2) { var name = rt[j]; result.push('$' + name + '=' + rt[j + 1] + ';'); } return result.join(''); }, rulerEndSwitch: function(arr) { return '}'; }, rulerCase: function(arr) { return ('case ' + arr[1] + ':'); }, rulerDefault: function() { return 'default:'; }, rulerSwitch: function(arr) { arr= _setVarToken(arr); return 'switch(' + arr.join('').replace('switch', '') + '){'; }, rulerElseIf: function(arr) { if (arr.length < 2) { return false; } arr=_setVarToken(arr.slice(1)); return '}else if(' + Ruler.util.filter(arr.join('')) + '){'; }, rulerBreak: function() { return 'break;'; }, rulerElse: function(arr) { return '}else{'; }, rulerEndIf: function(arr) { return '}'; }, rulerIf: function(arr) { if (arr.length < 2) { return false; } arr=_setVarToken(arr.slice(1)); return 'if(' + Ruler.util.filter(arr.join('')) + '){'; }, rulerEndList: function(arr) { return '}'; }, /** * 循环列表方法 * @param arr * @returns {string} */ rulerList: function(arr) { var listName, loopName, loopIndexName, loopHasNextName, result = []; if (arr.length != 4) { return; } var _guid = Ruler.guid++; loopName = arr[3]; listName = arr[1]; loopIndexName = loopName + '_index'; loopHasNextName = loopName + '_has_next'; //如果变量名不是传统的字母或数字 if (!/^\w+$/.test(listName)) { if (listName.indexOf('$') !== 0) { listName = '$' + listName; } var _listName = '$_list' + _guid; result.push(_listName + '=' + listName + ';'); listName = _listName; } else { listName = '$' + listName; } loopName = '$' + loopName; loopIndexName = '$' + loopIndexName; loopHasNextName = '$' + loopHasNextName; result.push([ '$_i{guid}=0', '$count{guid}=count(' + listName + ')', loopName, loopIndexName, loopHasNextName + ';' ].join(';')); result.push('for(;$_i{guid}<$count{guid};$_i{guid}++){'); result.push(loopName + '=' + listName + '[$_i{guid}];'); result.push(loopIndexName + '=$_i{guid};'); result.push(loopHasNextName + '=$_i{guid}!==$count{guid}-1;'); return result.join('').replace(/\{guid\}/ig, _guid); } }; /** * 内嵌函数,待扩展 * @type {{trim: Function, lTrim: Function, rTrim: Function, removeEmpty: Function, filter: Function}} */ Ruler.util = { trim: function(str) { return str.replace(/(^\s*)|(\s*$)/g, ""); }, lTrim: function(str) { return str.replace(/(^\s*)/g, ""); }, rTrim: function(str) { return str.replace(/(\s*$)/g, ""); }, removeEmpty: function(arr) { var splitStr = _separator(arr); var REMOVE_REGEX = new RegExp(splitStr + splitStr); var REMOVE_HEAD_REGEX = new RegExp('^' + splitStr); return arr.join(splitStr).replace(REMOVE_REGEX, splitStr).replace(REMOVE_HEAD_REGEX, '').split(splitStr); }, filter: function(str) { return str.replace('&lt;', '<').replace('&gt;', '>'); } }; /** * 将模板语法解释为JS语法 * @param _templ 模板字符串 * @returns {String} 语法解析后的 * @private */ function _parse(_templ) { var chunks = [], replaced = [], compiled; var printPrefix = "$__buf__.="; var lastIndex = 0; var ss = /<#.+?>|\${.+?}|<\/#.+?>|<@.+?>/ig; /** * 将模块中的匹配替换为相应语言的语法 * @param {String} str 输入 * @param {Number} type 0普通字符 1变量 2表达式 * @return {Null} */ function _pushStr(str, type) { if (str !== '') { if (type == 2) { replaced.push(str) } else { if (type == 1) { replaced.push(printPrefix + str + ';') } else { str = str.replace(/"/ig, "\\\""); replaced.push(printPrefix + '"' + str + '";') } } } } //移除不安全代码 _templ = removeUnsafe(_templ); _templ.replace(ss, function repalceHandler(match, index) { if (lastIndex != index) { var _temp_ = _templ.substring(lastIndex, index); if (Ruler.util.trim(_temp_) != '') _pushStr(_templ.substring(lastIndex, index)); chunks.push(_temp_); } if (match[0] == '$') { _pushStr('$' + match.substring(2, match.length - 1), 1); } else { //是注释,暂时不处理 if (match[0] == '<' && match[1] == '#' && match[2] == '-') { } else { if (match[0] == '<' && match[1] == '#') { _pushStr(Ruler.regRuler.ruler(match.substring(2, match.length - 1)), 2); } else if (match[1] == '/' && match[2] == '#') { _pushStr(Ruler.regRuler.ruler(match.substring(1, match.length - 1)), 2); } chunks.push(match); } } //set the last match index as current match index plus matched value length lastIndex = index + match.length; }); //add the end string for replaced string if (lastIndex < _templ.length) { _pushStr(_templ.substring(lastIndex)); } //if no matched replace if (!replaced.length) { _pushStr(_templ); } replaced = ["$__buf__='';", replaced.join(''), ";echo($__buf__);"].join(''); return replaced; } function _proc(html, data) { return html; } return { parse: _parse, proc: _proc } })(); reMarker.JavaScript = (function() { var Ruler = {}; /** * 匹配语法规则处理 * @type {{ruler: Function, rulerAssign: Function, rulerEndSwitch: Function, rulerCase: Function, rulerDefault: Function, rulerSwitch: Function, rulerElseIf: Function, rulerBreak: Function, rulerElse: Function, rulerEndIf: Function, rulerIf: Function, rulerEndList: Function, rulerList: Function}} */ Ruler.regRuler = { ruler: function(str) { var listArr = Ruler.util.removeEmpty(str.split(' ')); //import,include var ruler = { "list": this.rulerList, "if": this.rulerIf, "break": this.rulerBreak, '/#list': this.rulerEndList, 'else': this.rulerElse, "/#if": this.rulerEndIf, 'elseif': this.rulerElseIf, 'switch': this.rulerSwitch, 'case': this.rulerCase, 'default': this.rulerDefault, '/#switch': this.rulerEndSwitch, 'assign': this.rulerAssign, 'return': this.rulerReturn }; return (ruler[listArr[0]]).call(this, listArr); }, rulerReturn: function() { return 'return;'; }, /** * 定义变量 * @param arr * @returns {string} */ rulerAssign: function(arr) { var result = [], count; var rt = findPairs(arr.slice(1)); count = rt.length; for (j = 0; j < count; j += 2) { var name = rt[j]; result.push('var '); result.push(name + '=' + rt[j + 1] + ';'); } return result.join(''); }, rulerEndSwitch: function(arr) { return '}'; }, rulerCase: function(arr) { return ('case ' + arr[1] + ':'); }, rulerDefault: function() { return 'default:'; }, rulerSwitch: function(arr) { return 'switch(' + arr.join('').replace('switch', '') + '){'; }, rulerElseIf: function(arr) { if (arr.length < 2) { return false; } return '}else if(' + Ruler.util.filter(arr.slice(1).join('')) + '){'; }, rulerBreak: function() { return 'break;'; }, rulerElse: function(arr) { return '}else{'; }, rulerEndIf: function(arr) { return '}'; }, rulerIf: function(arr) { if (arr.length < 2) { return false; } return 'if(' + Ruler.util.filter(arr.slice(1).join('')) + '){'; }, rulerEndList: function(arr) { return '}})();'; }, /** * 循环列表方法 * @param arr * @returns {string} */ rulerList: function(arr) { var listName, loopName, loopIndexName, loopHasNextName, result = []; if (arr.length != 4) { return; } loopName = arr[3]; listName = arr[1]; loopIndexName = loopName + '_index'; loopHasNextName = loopName + '_has_next'; result.push('(function(){'); if (!/^\w+$/.test(listName)) { result.push('var _list=' + listName + ';'); listName = '_list'; } result.push([ 'var _i=0', '_count=' + listName + '.length', loopName, loopIndexName, loopHasNextName + ';' ].join(',')); result.push('for(;_i<_count;_i++){'); result.push(loopName + '=' + listName + '[_i];'); result.push(loopIndexName + '=_i;'); result.push(loopHasNextName + '=_i!==_count-1;'); return result.join(''); } }; /** * 内嵌函数,待扩展 * @type {{trim: Function, lTrim: Function, rTrim: Function, removeEmpty: Function, filter: Function}} */ Ruler.util = { trim: function(str) { return str.replace(/(^\s*)|(\s*$)/g, ""); }, lTrim: function(str) { return str.replace(/(^\s*)/g, ""); }, rTrim: function(str) { return str.replace(/(\s*$)/g, ""); }, removeEmpty: function(arr) { var splitStr = _separator(arr); var REMOVE_REGEX = new RegExp(splitStr + splitStr); var REMOVE_HEAD_REGEX = new RegExp('^' + splitStr); return arr.join(splitStr).replace(REMOVE_REGEX, splitStr).replace(REMOVE_HEAD_REGEX, '').split(splitStr); }, filter: function(str) { return str.replace('&lt;', '<').replace('&gt;', '>'); } }; /** * 将模板语法解释为JS语法 * @param _templ 模板字符串 * @returns {String} 语法解析后的 * @private */ function _parse(_templ) { var chunks = [], replaced = [], compiled; var printPrefix = "__buf__.push("; var lastIndex = 0; var ss = /<#.+?>|\${.+?}|<\/#.+?>|<@.+?>/ig; /** * 将模块中的匹配替换为相应语言的语法 * @param {String} str 输入 * @param {Number} type 0普通字符 1变量 2表达式 * @return {Null} */ function _pushStr(str, type) { str = str.replace(/'/g, "\\'"); if (str !== '') { if (type == 1) { replaced.push(printPrefix + str + ');') } else if (type == 2) { replaced.push(str) } else { replaced.push(printPrefix + '\'' + str + '\');') } } } //移除不安全代码 _templ = removeUnsafe(_templ); _templ.replace(ss, function(match, index) { //the last match index of all template //上次匹配结束位置与当前匹配的位置之间可能会有一些字符,也要加进来 if (lastIndex != index) { var _temp_ = _templ.substring(lastIndex, index); if (Ruler.util.trim(_temp_) != '') _pushStr(_templ.substring(lastIndex, index)); chunks.push(_temp_); } if (match[0] == '$') { _pushStr(match.substring(2, match.length - 1), 1); //replaced.push(printPrefix + match.substring(2, match.length - 1) + ');'); } else { //是注释,暂时不处理 if (match[0] == '<' && match[1] == '#' && match[2] == '-') { } else { if (match[0] == '<' && match[1] == '#') { _pushStr(Ruler.regRuler.ruler(match.substring(2, match.length - 1)), 2); } else if (match[1] == '/' && match[2] == '#') { _pushStr(Ruler.regRuler.ruler(match.substring(1, match.length - 1)), 2); } else {} chunks.push(match); } } //set the last match index as current match index plus matched value length lastIndex = index + match.length; }); //add the end string for replaced string if (lastIndex < _templ.length) { _pushStr(_templ.substring(lastIndex)); } //if no matched replace if (!replaced.length) { _pushStr(_templ); } replaced = ["var __buf__=[],$index=null;with($data){", replaced.join(''), "} return __buf__.join('');"].join(''); return replaced; } function _proc(html, data) { var util = {}; if (Ruler.util) { var _util = Ruler.util; for (var key in _util) { util[key] = _util[key]; } } if (Object.prototype.toString.call(data) !== '[object Object]') { data = {}; } var replaced = html; try { compiled = new Function("$data", "$util", replaced); } catch (e) { throw "template code error"; } return compiled.call(window, data, util) } return { parse: _parse, proc: _proc } })(); /* 模板引擎,使用freemark语法,目前已知最快的 作者:陈鑫 */ var nc = typeof exports !== 'undefined' ? exports : {}; nc.reMarker = { /** * 柯里化模板语法,二次传入 * @param templ * @returns {Function} */ proc: reMarker, parse:reMarker }; }); //如果内嵌入web页面,则自动将模板导出为JS变量 ! function(factory) { if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') { var target = module['exports'] || exports; factory(target); } else if (typeof define === 'function' && define['amd']) { define(['exports'], factory); } else { var scriptTags = document.getElementsByTagName('script'), templates = []; for (var i = 0; i < scriptTags.length; i++) { if (scriptTags[i].getAttribute('type') == 'remark-template') { templates.push(scriptTags[i]); } } for (var t = 0; t < templates.length; t++) { var _id = '__' + templates[t].id + '__'; window[_id] = window.NC.reMarker.proc(templates[t].innerHTML); } } }(function(exports) {});
simon4545/cff
Build/template-build.js
JavaScript
gpl-2.0
23,369
[ 30522, 999, 3853, 1006, 4713, 1007, 1063, 2065, 1006, 2828, 11253, 5478, 1027, 1027, 1027, 1005, 3853, 1005, 1004, 1004, 2828, 11253, 14338, 1027, 1027, 1027, 1005, 4874, 1005, 1004, 1004, 2828, 11253, 11336, 1027, 1027, 1027, 1005, 4874, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<!DOCTYPE html> <html dir="ltr" lang="pt-br"> <head> <title>Apêndice B - Lista de Leitura - Rubinius</title> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta content='pt-br' http-equiv='content-language'> <meta content='Rubinius is an implementation of the Ruby programming language. The Rubinius bytecode virtual machine is written in C++. The bytecode compiler is written in pure Ruby. The vast majority of the core library is also written in Ruby, with some supporting primitives that interact with the VM directly.' name='description'> <link href='/' rel='home'> <link href='/' rel='start'> <link href='/doc/pt-br/appendix-a-glossary' rel='prev' title='Apêndice A - Glossário'> <link href='/doc/pt-br/terms-index' rel='next' title='Termos Principais'> <!--[if IE]><script src="http://html5shiv.googlecode.com/svn/trunk/html5.js" type="text/javascript"></script><![endif]--> <script src="/javascripts/jquery-1.3.2.js"></script> <script src="/javascripts/paging_keys.js"></script> <script src="/javascripts/application.js"></script> <style>article, aside, dialog, figure, footer, header, hgroup, menu, nav, section { display: block; }</style> <link href="/stylesheets/blueprint/screen.css" media="screen" rel="stylesheet" /> <link href="/stylesheets/application.css" media="screen" rel="stylesheet" /> <link href="/stylesheets/blueprint/print.css" media="print" rel="stylesheet" /> <!--[if IE]><link href="/stylesheets/blueprint/ie.css" media="screen" rel="stylesheet" type="text/css" /><![endif]--> <!--[if IE]><link href="/stylesheets/ie.css" media="screen" rel="stylesheet" type="text/css" /><![endif]--> <link href="/stylesheets/pygments.css" media="screen" rel="stylesheet" /> <link href="/favicon.ico" rel="shortcut icon" type="image/vnd.microsoft.icon" /> <link href="/images/apple-touch-icon.png" rel="apple-touch-icon" type="image/png" /> <link href="/images/apple-touch-icon.png" rel="apple-touch-icon" type="image/png" sizes="72x72" /> <link href="/images/apple-touch-icon.png" rel="apple-touch-icon" type="image/png" sizes="114x114" /> </head> <body> <div class='container'> <div class='span-21 doc_menu'> <header> <nav> <ul> <li><a href="/">Home</a></li> <li><a id="blog" href="/blog">Blog</a></li> <li><a id="documentation" href="/doc/en">Documentation</a></li> <li><a href="/projects">Projects</a></li> <li><a href="/roadmap">Roadmap</a></li> <li><a href="/releases">Releases</a></li> </ul> </nav> </header> </div> <div class='span-3 last'> <div id='version'> <a href="/releases/1.2.4">1.2.4</a> </div> </div> </div> <div class="container languages"> <nav> <span class="label">Languages:</span> <ul> <li><a href="/doc/de/appendix-b-reading-list/" >de</a></li> <li><a href="/doc/en/appendix-b-reading-list/" >en</a></li> <li><a href="/doc/es/appendix-b-reading-list/" >es</a></li> <li><a href="/doc/fr/appendix-b-reading-list/" >fr</a></li> <li><a href="/doc/ja/appendix-b-reading-list/" >ja</a></li> <li><a href="/doc/pl/appendix-b-reading-list/" >pl</a></li> <li><a href="/doc/pt-br/appendix-b-reading-list/" class="current" >pt-br</a></li> <li><a href="/doc/ru/appendix-b-reading-list/" >ru</a></li> </ul> </nav> </div> <div class="container doc_page_nav"> <span class="label">Previous:</span> <a href="/doc/pt-br/appendix-a-glossary">Apêndice A - Glossário</a> <span class="label">Up:</span> <a href="/doc/pt-br/">Tabela de Conteúdos</a> <span class="label">Next:</span> <a href="/doc/pt-br/terms-index">Termos Principais</a> </div> <div class="container documentation"> <h2>Apêndice B - Lista de Leitura</h2> <p>A Construção de máquinas virtuais em geral e implementações de linguagens de programação em particular, requerem algum conhecimento. O objetivo do Rubinius é reduzir as barreiras mantendo, tanto quanto possível em Ruby, mas para modificar garbage collector você precisa<br /> entender o que está acontecendo por trás das cortinas.</p> <p>Essa página contém refer6encias para livros, leituras online, posts em blogs e outras publicações que você achará util trabalhando com o Rubinius.</p> <p>Repare que algumas dessas referências possam ter informações desatualizadas sobre o Rubinius.</p> <h2 id="mquina-virtual">Máquina Virtual</h2> <ul> <li><a href="http://tinyurl.com/3a2pdq">Smalltalk-80: Linguagem e Implementação</a> by Goldberg, Robson, Harrison (conhecido como &ldquo;O livro Azul&rdquo;), Os capítulos de Implementação são o da parte IV <a href="http://tinyurl.com/6zlsd">available online</a></li> <li><a href="http://tinyurl.com/3ydkqg">Máquinas Virtuais</a> por Iain D. Craig</li> <li>Grandes posts do Adam Gardiner: <a href="http://tinyurl.com/35y2jh">introdução</a>, <a href="http://tinyurl.com/34c6e8">Como enviar trabalhos</a></li> </ul> <h2 id="coletor-de-lixo-garbage-collector">Coletor de Lixo (Garbage Collector)</h2> <ul> <li><a href="http://tinyurl.com/3dygmo">Coletor de Lixo: Algoritmos para Gerenciamento Dinâmico Automatizado de Memória</a> por Richard Jones</li> <li><a href="http://tinyurl.com/2mhek4">Garbage collection lectures</a></li> </ul> <h2 id="mtodos-primitivos">Métodos Primitivos</h2> <ul> <li><a href="http://talklikeaduck.denhaven2.com/articles/2007/06/04/ruby-extensions-vs-smalltalk-primitives">Extensões Primitivas do Ruby e Smalltalk</a></li> <li><a href="http://www.fit.vutbr.cz/study/courses/OMP/public/software/sqcdrom2/Tutorials/SqOnlineBook_(SOB)/englisch/sqk/sqk00083.htm">Guia Falando sobre Primitivas</a></li> </ul> <h2 id="ffi">FFI</h2> <ul> <li><a href="http://redartisan.com/2007/10/11/rubinius-coding">Implementando Arquivo#Usando Link FFI</a></li> <li><a href="http://blog.segment7.net/articles/2008/01/15/rubinius-foreign-function-interface">Rubinius&rsquo; Função de Interface Estrangeira</a></li> </ul> </div> <div class="container doc_page_nav"> <span class="label">Previous:</span> <a href="/doc/pt-br/appendix-a-glossary">Apêndice A - Glossário</a> <span class="label">Up:</span> <a href="/doc/pt-br/">Tabela de Conteúdos</a> <span class="label">Next:</span> <a href="/doc/pt-br/terms-index">Termos Principais</a> </div> <div class="container"> <div id="disqus_thread"></div> <script type="text/javascript"> var disqus_shortname = 'rubinius'; var disqus_identifier = '/doc/pt-br/appendix-b-reading-list/'; var disqus_url = 'http://rubini.us/doc/pt-br/appendix-b-reading-list/'; (function() { var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true; dsq.src = 'http://' + disqus_shortname + '.disqus.com/embed.js'; (document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq); })(); </script> <noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript> </div> <footer> <div class='container'> <nav> <ul> <li><a rel="external" href="http://twitter.com/rubinius">Follow Rubinius on Twitter</a></li> <li><a rel="external" href="http://github.com/rubinius/rubinius">Fork Rubinius on github</a></li> <li><a rel="external" href="http://engineyard.com">An Engine Yard project</a></li> </ul> </nav> </div> </footer> <script> var _gaq=[['_setAccount','UA-12328521-1'],['_trackPageview']]; (function(d,t){var g=d.createElement(t),s=d.getElementsByTagName(t)[0];g.async=1; g.src=('https:'==location.protocol?'//ssl':'//www')+'.google-analytics.com/ga.js'; s.parentNode.insertBefore(g,s)}(document,'script')); </script> </body> </html>
takano32/rubinius
web/_site/doc/pt-br/appendix-b-reading-list/index.html
HTML
bsd-3-clause
8,215
[ 30522, 1026, 999, 9986, 13874, 16129, 1028, 1026, 16129, 16101, 1027, 1000, 8318, 2099, 1000, 11374, 1027, 1000, 13866, 1011, 7987, 1000, 1028, 1026, 2132, 1028, 1026, 2516, 1028, 23957, 16089, 3401, 1038, 1011, 2862, 2050, 2139, 26947, 274...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
import nextConnect from 'next-connect' import auth from '../../middleware/auth' import { deleteUser, updateUserByUsername } from '../../lib/db' const handler = nextConnect() handler .use(auth) .get((req, res) => { // You do not generally want to return the whole user object // because it may contain sensitive field such as !!password!! Only return what needed // const { name, username, favoriteColor } = req.user // res.json({ user: { name, username, favoriteColor } }) res.json({ user: req.user }) }) .use((req, res, next) => { // handlers after this (PUT, DELETE) all require an authenticated user // This middleware to check if user is authenticated before continuing if (!req.user) { res.status(401).send('unauthenticated') } else { next() } }) .put((req, res) => { const { name } = req.body const user = updateUserByUsername(req, req.user.username, { name }) res.json({ user }) }) .delete((req, res) => { deleteUser(req) req.logOut() res.status(204).end() }) export default handler
flybayer/next.js
examples/with-passport-and-next-connect/pages/api/user.js
JavaScript
mit
1,087
[ 30522, 12324, 2279, 8663, 2638, 6593, 2013, 1005, 2279, 1011, 7532, 1005, 12324, 8740, 2705, 2013, 1005, 1012, 1012, 1013, 1012, 1012, 1013, 2690, 8059, 1013, 8740, 2705, 1005, 12324, 1063, 3972, 12870, 20330, 1010, 10651, 20330, 3762, 2033...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* ** File: evisimagedisplaywidget.h ** Author: Peter J. Ersts ( ersts at amnh.org ) ** Creation Date: 2007-03-13 ** ** Copyright ( c ) 2007, American Museum of Natural History. All rights reserved. ** ** This library/program is free software; you can redistribute it ** and/or modify it under the terms of the GNU Library General Public ** License as published by the Free Software Foundation; either ** version 2 of the License, or ( at your option ) any later version. ** ** This library/program is distributed in the hope that it will be useful, ** but WITHOUT ANY WARRANTY; without even the implied warranty of ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ** Library General Public License for more details. ** ** This work was made possible through a grant by the the John D. and ** Catherine T. MacArthur Foundation. Additionally, this program was prepared by ** the American Museum of Natural History under award No. NA05SEC46391002 ** from the National Oceanic and Atmospheric Administration, U.S. Department ** of Commerce. The statements, findings, conclusions, and recommendations ** are those of the author( s ) and do not necessarily reflect the views of the ** National Oceanic and Atmospheric Administration or the Department of Commerce. ** **/ #ifndef EVISIMAGEDISPLAYWIDGET_H #define EVISIMAGEDISPLAYWIDGET_H #include <QLabel> #include <QWidget> #include <QScrollArea> #include <QPushButton> #include <QBuffer> #include <QHttp> #include <QResizeEvent> /** * \class eVisGenericEventBrowserGui * \brief Generic viewer for browsing event * The eVisImageDisplayWidget is a component of the eVisGenericEventBrowser. This widget provides * the ability to display an image on the widget and basic zoom capabilities. This class was created * so the same display features could be easily added to other widgets as needed. */ class eVisImageDisplayWidget : public QWidget { Q_OBJECT public: /** \brief Constructor */ eVisImageDisplayWidget( QWidget* parent = 0, Qt::WFlags fl = 0 ); /** \brief Destructor */ ~eVisImageDisplayWidget(); /** \brief Load an image from disk and display */ void displayImage( QString ); /** \brief Load an image from a remote location using http and display */ void displayUrlImage( QString ); /* * There needs to be more logic around setting the zoom steps as you could change it mid display * and end up getting not being able to zoom in or out */ /** \brief Accessor for ZOOM_STEPS */ int getZoomSteps( ) { return ZOOM_STEPS; } /** \brief Mutator for ZOON_STEPS */ void setZoomSteps( int steps ) { ZOOM_STEPS = steps; } protected: void resizeEvent( QResizeEvent *event ); private: /** \brief Used to hold the http request to match the correct emits with the correct result */ int mCurrentHttpImageRequestId; /** \brief CUrrent Zoom level */ int mCurrentZoomStep; /** \brief widget to display the image in */ QScrollArea* mDisplayArea; /** \brief Method that acually display the image in the widget */ void displayImage( ); /** \brief Pointer to the http buffer */ QBuffer* mHttpBuffer; /** \brief Pointer to the http connection if needed */ QHttp* mHttpConnection; /** \brief This is a point to the actual image being displayed */ QPixmap* mImage; /** \brief Label to hold the image */ QLabel* mImageLabel; /** \brief Flag to indicate the success of the last load request */ bool mImageLoaded; /** \brief Ratio if height to width or width to height for the original image, which ever is smaller */ double mImageSizeRatio; /** \brief Boolean to indicate which feature the mImageSizeRation corresponds to */ bool mScaleByHeight; /** \brief Boolean to indicate which feature the mImageSizeRation corresponds to */ bool mScaleByWidth; /** \brief The increment by which the image is scaled during each scaling event */ double mScaleFactor; /** \brief The single factor by which the original image needs to be scaled to fit into current display area */ double mScaleToFit; /** \brief Zoom in button */ QPushButton* pbtnZoomIn; /** \brief Zoom out button */ QPushButton* pbtnZoomOut; /** \brief Zoom to full extent button */ QPushButton* pbtnZoomFull; /** \brief Method called to compute the various scaling parameters */ void setScalers( ); /** \brief The number of steps between the scale to fit image and full resolution */ int ZOOM_STEPS; private slots: void on_pbtnZoomIn_clicked( ); void on_pbtnZoomOut_clicked( ); void on_pbtnZoomFull_clicked( ); /** \brief Slot called when the http request is completed */ void displayUrlImage( int, bool ); }; #endif
polymeris/qgis
src/plugins/evis/eventbrowser/evisimagedisplaywidget.h
C
gpl-2.0
4,807
[ 30522, 1013, 1008, 1008, 1008, 5371, 1024, 23408, 17417, 26860, 10521, 13068, 9148, 24291, 1012, 1044, 1008, 1008, 3166, 1024, 2848, 1046, 1012, 9413, 12837, 1006, 9413, 12837, 2012, 2572, 25311, 1012, 8917, 1007, 1008, 1008, 4325, 3058, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Xamarin.Forms; namespace N64Track { public static class CustomFontEffect { public static readonly BindableProperty FontFileNameProperty = BindableProperty.CreateAttached("FontFileName", typeof(string), typeof(CustomFontEffect), "", propertyChanged: OnFileNameChanged); public static string GetFontFileName(BindableObject view) { return (string)view.GetValue(FontFileNameProperty); } /// <summary> /// Set Font binding based on File Name provided /// </summary> /// <param name="view"></param> /// <param name="value"></param> public static void SetFontFileName(BindableObject view, string value) { view.SetValue(FontFileNameProperty, value); } static void OnFileNameChanged(BindableObject bindable, object oldValue, object newValue) { var view = bindable as View; if (view == null) { return; } view.Effects.Add(new FontEffect()); } class FontEffect : RoutingEffect { public FontEffect() : base("Xamarin.FontEffect") { } } } }
hackmods/N64Track
N64Track/N64Track/CustomFontEffect.cs
C#
mit
1,348
[ 30522, 2478, 2291, 1025, 2478, 2291, 1012, 6407, 1012, 12391, 1025, 2478, 2291, 1012, 11409, 4160, 1025, 2478, 2291, 1012, 3793, 1025, 2478, 2291, 1012, 11689, 2075, 1012, 8518, 1025, 2478, 1060, 8067, 6657, 1012, 3596, 1025, 3415, 15327, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<html> <head> <title>User agent detail - Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9</title> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.3/css/materialize.min.css"> <link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet"> </head> <body> <div class="container"> <div class="section"> <h1 class="header center orange-text">User agent detail</h1> <div class="row center"> <h5 class="header light"> Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 </h5> </div> </div> <div class="section"> <table class="striped"><tr><th></th><th colspan="3">General</th><th colspan="5">Device</th><th colspan="3">Bot</th><th colspan="2"></th></tr><tr><th>Provider</th><th>Browser</th><th>Engine</th><th>OS</th><th>Brand</th><th>Model</th><th>Type</th><th>Is mobile</th><th>Is touch</th><th>Is bot</th><th>Name</th><th>Type</th><th>Parse time</th><th>Actions</th></tr><tr><th colspan="14" class="green lighten-3">Source result (test suite)</th></tr><tr><td>ua-parser/uap-core<br /><small>vendor/thadafinser/uap-core/tests/test_device.yaml</small></td><td> </td><td> </td><td> </td><td style="border-left: 1px solid #555">Samsung</td><td>GT-I9</td><td></td><td></td><td></td><td style="border-left: 1px solid #555"></td><td></td><td></td><td></td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-test">Detail</a> <!-- Modal Structure --> <div id="modal-test" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>Testsuite result detail</h4> <p><pre><code class="php">Array ( [user_agent_string] => Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 [family] => Samsung GT-I9 [brand] => Samsung [model] => GT-I9 ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><th colspan="14" class="green lighten-3">Providers</th></tr><tr><td>BrowscapPhp<br /><small>6012</small></td> <td colspan="12" class="center-align red lighten-1"> <strong>No result found</strong> </td> </tr><tr><td>DonatjUAParser<br /><small>v0.5.0</small></td> <td colspan="12" class="center-align red lighten-1"> <strong>No result found</strong> </td> </tr><tr><td>NeutrinoApiCom<br /><small></small></td><td>Android Browser </td><td><i class="material-icons">close</i></td><td>Android 4.2.2</td><td style="border-left: 1px solid #555">Samsung</td><td>GT-I9</td><td>mobile-browser</td><td>yes</td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.28597</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-9b0fa449-ec1b-40c8-8b1c-9486eb3b9cbc">Detail</a> <!-- Modal Structure --> <div id="modal-9b0fa449-ec1b-40c8-8b1c-9486eb3b9cbc" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>NeutrinoApiCom result detail</h4> <p><pre><code class="php">stdClass Object ( [mobile_screen_height] => 0 [is_mobile] => 1 [type] => mobile-browser [mobile_brand] => Samsung [mobile_model] => GT-I9 [version] => [is_android] => 1 [browser_name] => Android Browser [operating_system_family] => Android [operating_system_version] => 4.2.2 [is_ios] => [producer] => Samsung [operating_system] => Android 4.2.2 [mobile_screen_width] => 0 [mobile_browser] => ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>PiwikDeviceDetector<br /><small>3.5.2</small></td><td>Android Browser </td><td>WebKit </td><td>Android 4.2</td><td style="border-left: 1px solid #555">Samsung</td><td>GT-I9</td><td>smartphone</td><td>yes</td><td></td><td style="border-left: 1px solid #555"></td><td></td><td></td><td>0.005</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-21638055-738d-46ba-a1b1-f5114bc26475">Detail</a> <!-- Modal Structure --> <div id="modal-21638055-738d-46ba-a1b1-f5114bc26475" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>PiwikDeviceDetector result detail</h4> <p><pre><code class="php">Array ( [client] => Array ( [type] => browser [name] => Android Browser [short_name] => AN [version] => [engine] => WebKit ) [operatingSystem] => Array ( [name] => Android [short_name] => AND [version] => 4.2 [platform] => ) [device] => Array ( [brand] => SA [brandName] => Samsung [model] => GT-I9 [device] => 1 [deviceName] => smartphone ) [bot] => [extra] => Array ( [isBot] => [isBrowser] => 1 [isFeedReader] => [isMobileApp] => [isPIM] => [isLibrary] => [isMediaPlayer] => [isCamera] => [isCarBrowser] => [isConsole] => [isFeaturePhone] => [isPhablet] => [isPortableMediaPlayer] => [isSmartDisplay] => [isSmartphone] => 1 [isTablet] => [isTV] => [isDesktop] => [isMobile] => 1 [isTouchEnabled] => ) ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>SinergiBrowserDetector<br /><small>6.0.0</small></td><td>Navigator </td><td><i class="material-icons">close</i></td><td>Android 4.2.2</td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td></td><td><i class="material-icons">close</i></td><td>yes</td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-5415e7f2-ef7b-434c-abe0-b71ba9f6707c">Detail</a> <!-- Modal Structure --> <div id="modal-5415e7f2-ef7b-434c-abe0-b71ba9f6707c" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>SinergiBrowserDetector result detail</h4> <p><pre><code class="php">Array ( [browser] => Sinergi\BrowserDetector\Browser Object ( [userAgent:Sinergi\BrowserDetector\Browser:private] => Sinergi\BrowserDetector\UserAgent Object ( [userAgentString:Sinergi\BrowserDetector\UserAgent:private] => Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 ) [name:Sinergi\BrowserDetector\Browser:private] => Navigator [version:Sinergi\BrowserDetector\Browser:private] => unknown [isRobot:Sinergi\BrowserDetector\Browser:private] => [isChromeFrame:Sinergi\BrowserDetector\Browser:private] => ) [operatingSystem] => Sinergi\BrowserDetector\Os Object ( [name:Sinergi\BrowserDetector\Os:private] => Android [version:Sinergi\BrowserDetector\Os:private] => 4.2.2 [isMobile:Sinergi\BrowserDetector\Os:private] => 1 [userAgent:Sinergi\BrowserDetector\Os:private] => Sinergi\BrowserDetector\UserAgent Object ( [userAgentString:Sinergi\BrowserDetector\UserAgent:private] => Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 ) ) [device] => Sinergi\BrowserDetector\Device Object ( [name:Sinergi\BrowserDetector\Device:private] => unknown [userAgent:Sinergi\BrowserDetector\Device:private] => Sinergi\BrowserDetector\UserAgent Object ( [userAgentString:Sinergi\BrowserDetector\UserAgent:private] => Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 ) ) ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>UAParser<br /><small>v3.4.5</small></td><td>Android 4.2.2</td><td><i class="material-icons">close</i></td><td>Android 4.2.2</td><td style="border-left: 1px solid #555">Samsung</td><td>GT-I9</td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.006</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-346c1a98-5fd3-454f-b6c8-350f2f505d8b">Detail</a> <!-- Modal Structure --> <div id="modal-346c1a98-5fd3-454f-b6c8-350f2f505d8b" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>UAParser result detail</h4> <p><pre><code class="php">UAParser\Result\Client Object ( [ua] => UAParser\Result\UserAgent Object ( [major] => 4 [minor] => 2 [patch] => 2 [family] => Android ) [os] => UAParser\Result\OperatingSystem Object ( [major] => 4 [minor] => 2 [patch] => 2 [patchMinor] => [family] => Android ) [device] => UAParser\Result\Device Object ( [brand] => Samsung [model] => GT-I9 [family] => Samsung GT-I9 ) [originalUserAgent] => Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>UserAgentStringCom<br /><small></small></td> <td colspan="12" class="center-align red lighten-1"> <strong>No result found</strong> </td> </tr><tr><td>WhatIsMyBrowserCom<br /><small></small></td><td>Stock Android Browser </td><td> </td><td>Android 4.2.2</td><td style="border-left: 1px solid #555">Samsung</td><td></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0.41396</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-9795f66f-7271-430e-973a-a5c0e14dc35a">Detail</a> <!-- Modal Structure --> <div id="modal-9795f66f-7271-430e-973a-a5c0e14dc35a" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>WhatIsMyBrowserCom result detail</h4> <p><pre><code class="php">stdClass Object ( [operating_system_name] => Android [simple_sub_description_string] => [simple_browser_string] => Stock Android Browser on Android (Jelly Bean) [browser_version] => [extra_info] => Array ( ) [operating_platform] => [extra_info_table] => Array ( ) [layout_engine_name] => [detected_addons] => Array ( ) [operating_system_flavour_code] => [hardware_architecture] => [operating_system_flavour] => [operating_system_frameworks] => Array ( ) [browser_name_code] => stock-android-browser [operating_system_version] => Jelly Bean [simple_operating_platform_string] => Samsung GT-I9 [is_abusive] => [layout_engine_version] => [browser_capabilities] => Array ( ) [operating_platform_vendor_name] => Samsung [operating_system] => Android (Jelly Bean) [operating_system_version_full] => 4.2.2 [operating_platform_code] => GT-I9 [browser_name] => Stock Android Browser [operating_system_name_code] => android [user_agent] => Mozilla/5.0 (Linux; Android 4.2.2; de-de; SAMSUNG GT-I9 [browser_version_full] => [browser] => Stock Android Browser ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>WhichBrowser<br /><small>2.0.10</small></td><td>Android Browser </td><td> </td><td>Android 4.2.2</td><td style="border-left: 1px solid #555">Samsung</td><td>GT-I9</td><td>mobile:smart</td><td>yes</td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0.09101</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-342c8d32-4765-40a8-8a5c-af3a38d19ae4">Detail</a> <!-- Modal Structure --> <div id="modal-342c8d32-4765-40a8-8a5c-af3a38d19ae4" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>WhichBrowser result detail</h4> <p><pre><code class="php">Array ( [browser] => Array ( [name] => Android Browser ) [os] => Array ( [name] => Android [version] => 4.2.2 ) [device] => Array ( [type] => mobile [subtype] => smart [manufacturer] => Samsung [model] => GT-I9 ) ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>Woothee<br /><small>v1.2.0</small></td><td> </td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>smartphone</td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td style="border-left: 1px solid #555"></td><td></td><td><i class="material-icons">close</i></td><td>0</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-3f285ff5-314b-4db4-9948-54572e92e7b6">Detail</a> <!-- Modal Structure --> <div id="modal-3f285ff5-314b-4db4-9948-54572e92e7b6" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>Woothee result detail</h4> <p><pre><code class="php">Array ( [category] => smartphone [os] => Android [os_version] => 4.2.2 [name] => UNKNOWN [version] => UNKNOWN [vendor] => UNKNOWN ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr><tr><td>Wurfl<br /><small>1.6.4</small></td><td>Android Webkit 4.2</td><td><i class="material-icons">close</i></td><td>Android 4.2</td><td style="border-left: 1px solid #555"></td><td></td><td>Smartphone</td><td>yes</td><td>yes</td><td style="border-left: 1px solid #555"></td><td><i class="material-icons">close</i></td><td><i class="material-icons">close</i></td><td>0.047</td><td> <!-- Modal Trigger --> <a class="modal-trigger btn waves-effect waves-light" href="#modal-1a1aee36-7ce7-4111-a391-8e2c501f1532">Detail</a> <!-- Modal Structure --> <div id="modal-1a1aee36-7ce7-4111-a391-8e2c501f1532" class="modal modal-fixed-footer"> <div class="modal-content"> <h4>Wurfl result detail</h4> <p><pre><code class="php">Array ( [virtual] => Array ( [is_android] => true [is_ios] => false [is_windows_phone] => false [is_app] => false [is_full_desktop] => false [is_largescreen] => false [is_mobile] => true [is_robot] => false [is_smartphone] => true [is_touchscreen] => true [is_wml_preferred] => false [is_xhtmlmp_preferred] => false [is_html_preferred] => true [advertised_device_os] => Android [advertised_device_os_version] => 4.2 [advertised_browser] => Android Webkit [advertised_browser_version] => 4.2 [complete_device_name] => Generic Android 4.2 [form_factor] => Smartphone [is_phone] => true [is_app_webview] => false ) [all] => Array ( [brand_name] => Generic [model_name] => Android 4.2 [unique] => true [ununiqueness_handler] => [is_wireless_device] => true [device_claims_web_support] => true [has_qwerty_keyboard] => true [can_skip_aligned_link_row] => true [uaprof] => [uaprof2] => [uaprof3] => [nokia_series] => 0 [nokia_edition] => 0 [device_os] => Android [mobile_browser] => Android Webkit [mobile_browser_version] => [device_os_version] => 4.2 [pointing_method] => touchscreen [release_date] => 2012_october [marketing_name] => [model_extra_info] => [nokia_feature_pack] => 0 [can_assign_phone_number] => true [is_tablet] => false [manufacturer_name] => [is_bot] => false [is_google_glass] => false [proportional_font] => false [built_in_back_button_support] => false [card_title_support] => true [softkey_support] => false [table_support] => true [numbered_menus] => false [menu_with_select_element_recommended] => false [menu_with_list_of_links_recommended] => true [icons_on_menu_items_support] => false [break_list_of_links_with_br_element_recommended] => true [access_key_support] => false [wrap_mode_support] => false [times_square_mode_support] => false [deck_prefetch_support] => false [elective_forms_recommended] => true [wizards_recommended] => false [image_as_link_support] => false [insert_br_element_after_widget_recommended] => false [wml_can_display_images_and_text_on_same_line] => false [wml_displays_image_in_center] => false [opwv_wml_extensions_support] => false [wml_make_phone_call_string] => wtai://wp/mc; [chtml_display_accesskey] => false [emoji] => false [chtml_can_display_images_and_text_on_same_line] => false [chtml_displays_image_in_center] => false [imode_region] => none [chtml_make_phone_call_string] => tel: [chtml_table_support] => false [xhtml_honors_bgcolor] => true [xhtml_supports_forms_in_table] => true [xhtml_support_wml2_namespace] => false [xhtml_autoexpand_select] => false [xhtml_select_as_dropdown] => false [xhtml_select_as_radiobutton] => false [xhtml_select_as_popup] => false [xhtml_display_accesskey] => false [xhtml_supports_invisible_text] => false [xhtml_supports_inline_input] => false [xhtml_supports_monospace_font] => false [xhtml_supports_table_for_layout] => true [xhtml_supports_css_cell_table_coloring] => true [xhtml_format_as_css_property] => false [xhtml_format_as_attribute] => false [xhtml_nowrap_mode] => false [xhtml_marquee_as_css_property] => false [xhtml_readable_background_color1] => #FFFFFF [xhtml_readable_background_color2] => #FFFFFF [xhtml_allows_disabled_form_elements] => true [xhtml_document_title_support] => true [xhtml_preferred_charset] => iso-8859-1 [opwv_xhtml_extensions_support] => false [xhtml_make_phone_call_string] => tel: [xhtmlmp_preferred_mime_type] => text/html [xhtml_table_support] => true [xhtml_send_sms_string] => sms: [xhtml_send_mms_string] => mms: [xhtml_file_upload] => supported [cookie_support] => true [accept_third_party_cookie] => true [xhtml_supports_iframe] => full [xhtml_avoid_accesskeys] => true [xhtml_can_embed_video] => none [ajax_support_javascript] => true [ajax_manipulate_css] => true [ajax_support_getelementbyid] => true [ajax_support_inner_html] => true [ajax_xhr_type] => standard [ajax_manipulate_dom] => true [ajax_support_events] => true [ajax_support_event_listener] => true [ajax_preferred_geoloc_api] => w3c_api [xhtml_support_level] => 4 [preferred_markup] => html_web_4_0 [wml_1_1] => false [wml_1_2] => false [wml_1_3] => false [html_wi_w3_xhtmlbasic] => true [html_wi_oma_xhtmlmp_1_0] => true [html_wi_imode_html_1] => false [html_wi_imode_html_2] => false [html_wi_imode_html_3] => false [html_wi_imode_html_4] => false [html_wi_imode_html_5] => false [html_wi_imode_htmlx_1] => false [html_wi_imode_htmlx_1_1] => false [html_wi_imode_compact_generic] => false [html_web_3_2] => true [html_web_4_0] => true [voicexml] => false [multipart_support] => false [total_cache_disable_support] => false [time_to_live_support] => false [resolution_width] => 320 [resolution_height] => 480 [columns] => 60 [max_image_width] => 320 [max_image_height] => 480 [rows] => 40 [physical_screen_width] => 34 [physical_screen_height] => 50 [dual_orientation] => true [density_class] => 1.0 [wbmp] => true [bmp] => false [epoc_bmp] => false [gif_animated] => false [jpg] => true [png] => true [tiff] => false [transparent_png_alpha] => true [transparent_png_index] => true [svgt_1_1] => true [svgt_1_1_plus] => false [greyscale] => false [gif] => true [colors] => 65536 [webp_lossy_support] => true [webp_lossless_support] => true [post_method_support] => true [basic_authentication_support] => true [empty_option_value_support] => true [emptyok] => false [nokia_voice_call] => false [wta_voice_call] => false [wta_phonebook] => false [wta_misc] => false [wta_pdc] => false [https_support] => true [phone_id_provided] => false [max_data_rate] => 3600 [wifi] => true [sdio] => false [vpn] => false [has_cellular_radio] => true [max_deck_size] => 2000000 [max_url_length_in_requests] => 256 [max_url_length_homepage] => 0 [max_url_length_bookmark] => 0 [max_url_length_cached_page] => 0 [max_no_of_connection_settings] => 0 [max_no_of_bookmarks] => 0 [max_length_of_username] => 0 [max_length_of_password] => 0 [max_object_size] => 0 [downloadfun_support] => false [directdownload_support] => true [inline_support] => false [oma_support] => true [ringtone] => false [ringtone_3gpp] => false [ringtone_midi_monophonic] => false [ringtone_midi_polyphonic] => false [ringtone_imelody] => false [ringtone_digiplug] => false [ringtone_compactmidi] => false [ringtone_mmf] => false [ringtone_rmf] => false [ringtone_xmf] => false [ringtone_amr] => false [ringtone_awb] => false [ringtone_aac] => false [ringtone_wav] => false [ringtone_mp3] => false [ringtone_spmidi] => false [ringtone_qcelp] => false [ringtone_voices] => 1 [ringtone_df_size_limit] => 0 [ringtone_directdownload_size_limit] => 0 [ringtone_inline_size_limit] => 0 [ringtone_oma_size_limit] => 0 [wallpaper] => false [wallpaper_max_width] => 0 [wallpaper_max_height] => 0 [wallpaper_preferred_width] => 0 [wallpaper_preferred_height] => 0 [wallpaper_resize] => none [wallpaper_wbmp] => false [wallpaper_bmp] => false [wallpaper_gif] => false [wallpaper_jpg] => false [wallpaper_png] => false [wallpaper_tiff] => false [wallpaper_greyscale] => false [wallpaper_colors] => 2 [wallpaper_df_size_limit] => 0 [wallpaper_directdownload_size_limit] => 0 [wallpaper_inline_size_limit] => 0 [wallpaper_oma_size_limit] => 0 [screensaver] => false [screensaver_max_width] => 0 [screensaver_max_height] => 0 [screensaver_preferred_width] => 0 [screensaver_preferred_height] => 0 [screensaver_resize] => none [screensaver_wbmp] => false [screensaver_bmp] => false [screensaver_gif] => false [screensaver_jpg] => false [screensaver_png] => false [screensaver_greyscale] => false [screensaver_colors] => 2 [screensaver_df_size_limit] => 0 [screensaver_directdownload_size_limit] => 0 [screensaver_inline_size_limit] => 0 [screensaver_oma_size_limit] => 0 [picture] => false [picture_max_width] => 0 [picture_max_height] => 0 [picture_preferred_width] => 0 [picture_preferred_height] => 0 [picture_resize] => none [picture_wbmp] => false [picture_bmp] => false [picture_gif] => false [picture_jpg] => false [picture_png] => false [picture_greyscale] => false [picture_colors] => 2 [picture_df_size_limit] => 0 [picture_directdownload_size_limit] => 0 [picture_inline_size_limit] => 0 [picture_oma_size_limit] => 0 [video] => false [oma_v_1_0_forwardlock] => false [oma_v_1_0_combined_delivery] => false [oma_v_1_0_separate_delivery] => false [streaming_video] => true [streaming_3gpp] => true [streaming_mp4] => true [streaming_mov] => false [streaming_video_size_limit] => 0 [streaming_real_media] => none [streaming_flv] => false [streaming_3g2] => false [streaming_vcodec_h263_0] => 10 [streaming_vcodec_h263_3] => -1 [streaming_vcodec_mpeg4_sp] => 2 [streaming_vcodec_mpeg4_asp] => -1 [streaming_vcodec_h264_bp] => 3.0 [streaming_acodec_amr] => nb [streaming_acodec_aac] => lc [streaming_wmv] => none [streaming_preferred_protocol] => rtsp [streaming_preferred_http_protocol] => apple_live_streaming [wap_push_support] => false [connectionless_service_indication] => false [connectionless_service_load] => false [connectionless_cache_operation] => false [connectionoriented_unconfirmed_service_indication] => false [connectionoriented_unconfirmed_service_load] => false [connectionoriented_unconfirmed_cache_operation] => false [connectionoriented_confirmed_service_indication] => false [connectionoriented_confirmed_service_load] => false [connectionoriented_confirmed_cache_operation] => false [utf8_support] => true [ascii_support] => false [iso8859_support] => false [expiration_date] => false [j2me_cldc_1_0] => false [j2me_cldc_1_1] => false [j2me_midp_1_0] => false [j2me_midp_2_0] => false [doja_1_0] => false [doja_1_5] => false [doja_2_0] => false [doja_2_1] => false [doja_2_2] => false [doja_3_0] => false [doja_3_5] => false [doja_4_0] => false [j2me_jtwi] => false [j2me_mmapi_1_0] => false [j2me_mmapi_1_1] => false [j2me_wmapi_1_0] => false [j2me_wmapi_1_1] => false [j2me_wmapi_2_0] => false [j2me_btapi] => false [j2me_3dapi] => false [j2me_locapi] => false [j2me_nokia_ui] => false [j2me_motorola_lwt] => false [j2me_siemens_color_game] => false [j2me_siemens_extension] => false [j2me_heap_size] => 0 [j2me_max_jar_size] => 0 [j2me_storage_size] => 0 [j2me_max_record_store_size] => 0 [j2me_screen_width] => 0 [j2me_screen_height] => 0 [j2me_canvas_width] => 0 [j2me_canvas_height] => 0 [j2me_bits_per_pixel] => 0 [j2me_audio_capture_enabled] => false [j2me_video_capture_enabled] => false [j2me_photo_capture_enabled] => false [j2me_capture_image_formats] => none [j2me_http] => false [j2me_https] => false [j2me_socket] => false [j2me_udp] => false [j2me_serial] => false [j2me_gif] => false [j2me_gif89a] => false [j2me_jpg] => false [j2me_png] => false [j2me_bmp] => false [j2me_bmp3] => false [j2me_wbmp] => false [j2me_midi] => false [j2me_wav] => false [j2me_amr] => false [j2me_mp3] => false [j2me_mp4] => false [j2me_imelody] => false [j2me_rmf] => false [j2me_au] => false [j2me_aac] => false [j2me_realaudio] => false [j2me_xmf] => false [j2me_wma] => false [j2me_3gpp] => false [j2me_h263] => false [j2me_svgt] => false [j2me_mpeg4] => false [j2me_realvideo] => false [j2me_real8] => false [j2me_realmedia] => false [j2me_left_softkey_code] => 0 [j2me_right_softkey_code] => 0 [j2me_middle_softkey_code] => 0 [j2me_select_key_code] => 0 [j2me_return_key_code] => 0 [j2me_clear_key_code] => 0 [j2me_datefield_no_accepts_null_date] => false [j2me_datefield_broken] => false [receiver] => false [sender] => false [mms_max_size] => 0 [mms_max_height] => 0 [mms_max_width] => 0 [built_in_recorder] => false [built_in_camera] => true [mms_jpeg_baseline] => false [mms_jpeg_progressive] => false [mms_gif_static] => false [mms_gif_animated] => false [mms_png] => false [mms_bmp] => false [mms_wbmp] => false [mms_amr] => false [mms_wav] => false [mms_midi_monophonic] => false [mms_midi_polyphonic] => false [mms_midi_polyphonic_voices] => 0 [mms_spmidi] => false [mms_mmf] => false [mms_mp3] => false [mms_evrc] => false [mms_qcelp] => false [mms_ota_bitmap] => false [mms_nokia_wallpaper] => false [mms_nokia_operatorlogo] => false [mms_nokia_3dscreensaver] => false [mms_nokia_ringingtone] => false [mms_rmf] => false [mms_xmf] => false [mms_symbian_install] => false [mms_jar] => false [mms_jad] => false [mms_vcard] => false [mms_vcalendar] => false [mms_wml] => false [mms_wbxml] => false [mms_wmlc] => false [mms_video] => false [mms_mp4] => false [mms_3gpp] => false [mms_3gpp2] => false [mms_max_frame_rate] => 0 [nokiaring] => false [picturemessage] => false [operatorlogo] => false [largeoperatorlogo] => false [callericon] => false [nokiavcard] => false [nokiavcal] => false [sckl_ringtone] => false [sckl_operatorlogo] => false [sckl_groupgraphic] => false [sckl_vcard] => false [sckl_vcalendar] => false [text_imelody] => false [ems] => false [ems_variablesizedpictures] => false [ems_imelody] => false [ems_odi] => false [ems_upi] => false [ems_version] => 0 [siemens_ota] => false [siemens_logo_width] => 101 [siemens_logo_height] => 29 [siemens_screensaver_width] => 101 [siemens_screensaver_height] => 50 [gprtf] => false [sagem_v1] => false [sagem_v2] => false [panasonic] => false [sms_enabled] => true [wav] => false [mmf] => false [smf] => false [mld] => false [midi_monophonic] => false [midi_polyphonic] => false [sp_midi] => false [rmf] => false [xmf] => false [compactmidi] => false [digiplug] => false [nokia_ringtone] => false [imelody] => false [au] => false [amr] => false [awb] => false [aac] => true [mp3] => true [voices] => 1 [qcelp] => false [evrc] => false [flash_lite_version] => [fl_wallpaper] => false [fl_screensaver] => false [fl_standalone] => false [fl_browser] => false [fl_sub_lcd] => false [full_flash_support] => false [css_supports_width_as_percentage] => true [css_border_image] => webkit [css_rounded_corners] => webkit [css_gradient] => none [css_spriting] => true [css_gradient_linear] => none [is_transcoder] => false [transcoder_ua_header] => user-agent [rss_support] => false [pdf_support] => true [progressive_download] => true [playback_vcodec_h263_0] => 10 [playback_vcodec_h263_3] => -1 [playback_vcodec_mpeg4_sp] => 0 [playback_vcodec_mpeg4_asp] => -1 [playback_vcodec_h264_bp] => 3.0 [playback_real_media] => none [playback_3gpp] => true [playback_3g2] => false [playback_mp4] => true [playback_mov] => false [playback_acodec_amr] => nb [playback_acodec_aac] => none [playback_df_size_limit] => 0 [playback_directdownload_size_limit] => 0 [playback_inline_size_limit] => 0 [playback_oma_size_limit] => 0 [playback_acodec_qcelp] => false [playback_wmv] => none [hinted_progressive_download] => true [html_preferred_dtd] => html4 [viewport_supported] => true [viewport_width] => device_width_token [viewport_userscalable] => no [viewport_initial_scale] => [viewport_maximum_scale] => [viewport_minimum_scale] => [mobileoptimized] => false [handheldfriendly] => false [canvas_support] => full [image_inlining] => true [is_smarttv] => false [is_console] => false [nfc_support] => false [ux_full_desktop] => false [jqm_grade] => A [is_sencha_touch_ok] => false [controlcap_is_smartphone] => default [controlcap_is_ios] => default [controlcap_is_android] => default [controlcap_is_robot] => default [controlcap_is_app] => default [controlcap_advertised_device_os] => default [controlcap_advertised_device_os_version] => default [controlcap_advertised_browser] => default [controlcap_advertised_browser_version] => default [controlcap_is_windows_phone] => default [controlcap_is_full_desktop] => default [controlcap_is_largescreen] => default [controlcap_is_mobile] => default [controlcap_is_touchscreen] => default [controlcap_is_wml_preferred] => default [controlcap_is_xhtmlmp_preferred] => default [controlcap_is_html_preferred] => default [controlcap_form_factor] => default [controlcap_complete_device_name] => default ) ) </code></pre></p> </div> <div class="modal-footer"> <a href="#!" class="modal-action modal-close waves-effect waves-green btn-flat ">close</a> </div> </div> </td></tr></table> </div> <div class="section"> <h1 class="header center orange-text">About this comparison</h1> <div class="row center"> <h5 class="header light"> The primary goal of this project is simple<br /> I wanted to know which user agent parser is the most accurate in each part - device detection, bot detection and so on...<br /> <br /> The secondary goal is to provide a source for all user agent parsers to improve their detection based on this results.<br /> <br /> You can also improve this further, by suggesting ideas at <a href="https://github.com/ThaDafinser/UserAgentParserComparison">ThaDafinser/UserAgentParserComparison</a><br /> <br /> The comparison is based on the abstraction by <a href="https://github.com/ThaDafinser/UserAgentParser">ThaDafinser/UserAgentParser</a> </h5> </div> </div> <div class="card"> <div class="card-content"> Comparison created <i>2016-02-13 13:34:45</i> | by <a href="https://github.com/ThaDafinser">ThaDafinser</a> </div> </div> </div> <script src="https://code.jquery.com/jquery-2.1.4.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.3/js/materialize.min.js"></script> <script src="http://cdnjs.cloudflare.com/ajax/libs/list.js/1.1.1/list.min.js"></script> <script> $(document).ready(function(){ // the "href" attribute of .modal-trigger must specify the modal ID that wants to be triggered $('.modal-trigger').leanModal(); }); </script> </body> </html>
ThaDafinser/UserAgentParserComparison
v4/user-agent-detail/87/61/8761da88-f077-4dee-b78e-5dc491e35119.html
HTML
mit
40,898
[ 30522, 1026, 16129, 1028, 1026, 2132, 1028, 1026, 2516, 1028, 5310, 4005, 6987, 1011, 9587, 5831, 4571, 1013, 1019, 1012, 1014, 1006, 11603, 1025, 11924, 1018, 1012, 1016, 1012, 1016, 1025, 2139, 1011, 2139, 1025, 19102, 14181, 1011, 1045, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* Copyright 2010, 2011 Michael Steinert * This file is part of Log4g. * * Log4g is free software: you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License as published by the Free * Software Foundation, either version 2.1 of the License, or (at your option) * any later version. * * Log4g is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for * more details. * * You should have received a copy of the GNU Lesser General Public License * along with Log4g. If not, see <http://www.gnu.org/licenses/>. */ /** * SECTION: pattern-parser * @short_description: Parse pattern layout conversion patterns * @see_also: #Log4gPatternLayoutClass, #Log4gPatternConverterClass * * This class performs most of the work done by the pattern layout class. * The conversion pattern is parsed and a chained list of pattern converters * is created. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <errno.h> #include "helpers/pattern-parser.h" #include "log4g/layout.h" G_DEFINE_DYNAMIC_TYPE(Log4gPatternParser, log4g_pattern_parser, G_TYPE_OBJECT) #define ASSIGN_PRIVATE(instance) \ (G_TYPE_INSTANCE_GET_PRIVATE(instance, LOG4G_TYPE_PATTERN_PARSER, \ struct Private)) #define GET_PRIVATE(instance) \ ((struct Private *)((Log4gPatternParser *)instance)->priv) #define ESCAPE_CHAR '%' struct Private { GString *buffer; struct Log4gFormattingInfo formatting; gchar *pattern; gint length; gint i; Log4gPatternConverter *head; Log4gPatternConverter *tail; gint state; }; enum PatternParserState { LITERAL_STATE = 0, CONVERTER_STATE, DOT_STATE, MIN_STATE, MAX_STATE }; static void formatting_info_reset_(struct Log4gFormattingInfo *info) { info->min = -1; info->max = 0x7fffffff; info->align = FALSE; } static void log4g_pattern_parser_init(Log4gPatternParser *self) { self->priv = ASSIGN_PRIVATE(self); struct Private *priv = GET_PRIVATE(self); priv->buffer = g_string_sized_new(32); formatting_info_reset_(&priv->formatting); priv->length = 0; priv->i = 0; priv->head = priv->tail = NULL; priv->state = LITERAL_STATE; } static void dispose(GObject *base) { struct Private *priv = GET_PRIVATE(base); if (priv->head) { g_object_unref(priv->head); priv->head = priv->tail = NULL; } G_OBJECT_CLASS(log4g_pattern_parser_parent_class)->dispose(base); } static void finalize(GObject *base) { struct Private *priv = GET_PRIVATE(base); if (priv->buffer) { g_string_free(priv->buffer, TRUE); } g_free(priv->pattern); G_OBJECT_CLASS(log4g_pattern_parser_parent_class)->finalize(base); } static void log4g_pattern_parser_class_init(Log4gPatternParserClass *klass) { GObjectClass *object_class = G_OBJECT_CLASS(klass); object_class->dispose = dispose; object_class->finalize = finalize; g_type_class_add_private(klass, sizeof(struct Private)); } static void log4g_pattern_parser_class_finalize( G_GNUC_UNUSED Log4gPatternParserClass *klass) { /* do nothing */ } void log4g_pattern_parser_register(GTypeModule *module) { log4g_pattern_parser_register_type(module); } /** * log4g_pattern_parser_new: * @pattern: The conversion pattern to parse. * * Create a new pattern parser object. * * Returns: A new pattern parser object. * Since: 0.1 */ Log4gPatternParser * log4g_pattern_parser_new(const gchar *pattern) { Log4gPatternParser *self = g_object_new(LOG4G_TYPE_PATTERN_PARSER, NULL); if (!self) { return NULL; } struct Private *priv = GET_PRIVATE(self); priv->pattern = g_strdup(pattern); if (!priv->pattern) { g_object_unref(self); return NULL; } priv->length = strlen(pattern); return self; } /** * log4g_pattern_parser_add_to_list: * @self: A pattern converter object. * @pc: The pattern converter to add to @self. * * Add a pattern converter to the current list. * * Since: 0.1 */ void log4g_pattern_parser_add_to_list(Log4gPatternParser *self, Log4gPatternConverter *pc) { struct Private *priv = GET_PRIVATE(self); if (!priv->head) { priv->head = priv->tail = g_object_ref(pc); } else { log4g_pattern_converter_set_next(priv->tail, pc); priv->tail = pc; } } /** * log4g_pattern_parser_extract_option: * @self: A pattern converter object. * * Extract a braced conversion pattern option. * * Returns: A conversion pattern option or %NULL if none was found. The * caller must free the returned string with g_free(). * Since: 0.1 */ gchar * log4g_pattern_parser_extract_option(Log4gPatternParser *self) { gchar *option = NULL; struct Private *priv = GET_PRIVATE(self); if ((priv->i < priv->length) && ('{' == priv->pattern[priv->i])) { gint end = 0; while (priv->pattern[++end + priv->i]) { if ('}' == priv->pattern[end + priv->i]) { break; } } if (end) { option = g_strndup(&priv->pattern[priv->i + 1], end - 1); } priv->i += (end + 1); } return option; } /** * log4g_pattern_parser_extract_precision_option: * @self: A pattern converter object. * * Extract a precision option from a conversion pattern. * * Returns: The precision value or zero if none was found. * Since: 0.1 */ gint log4g_pattern_parser_extract_precision_option(Log4gPatternParser *self) { glong r = 0; gchar *option = log4g_pattern_parser_extract_option(self); if (option) { errno = 0; r = g_ascii_strtoll(option, NULL, 10); if (errno) { log4g_log_error(Q_("category option \"%s\" is not " "a decimal number: %s"), option, g_strerror(errno)); } if (r <= 0) { log4g_log_error(Q_("precision option (%s) is not " "a positive integer"), option); r = 0; } g_free(option); } return (gint)r; } /** * log4g_pattern_parser_parse: * @self: A pattern converter object. * * Parse the conversion pattern. * * Returns: A chained list of pattern converters or %NULL if none were created. * Since: 0.1 */ Log4gPatternConverter * log4g_pattern_parser_parse(Log4gPatternParser *self) { struct Private *priv = GET_PRIVATE(self); Log4gPatternConverter *pc; priv->i = 0; if (priv->head) { g_object_unref(priv->head); priv->head = NULL; } while (priv->i < priv->length) { gchar c = priv->pattern[priv->i++]; switch (priv->state) { case LITERAL_STATE: if (priv->i == priv->length) { g_string_append_c(priv->buffer, c); continue; } if (ESCAPE_CHAR == c) { switch (priv->pattern[priv->i]) { case ESCAPE_CHAR: g_string_append_c(priv->buffer, c); ++priv->i; break; case 'n': g_string_append(priv->buffer, LOG4G_LAYOUT_LINE_SEP); ++priv->i; break; default: if (priv->buffer->len != 0) { pc = log4g_literal_pattern_converter_new( priv->buffer->str); if (pc) { log4g_pattern_parser_add_to_list(self, pc); g_object_unref(pc); pc = NULL; } } g_string_set_size(priv->buffer, 0); g_string_append_c(priv->buffer, c); priv->state = CONVERTER_STATE; formatting_info_reset_(&priv->formatting); break; } } else { g_string_append_c(priv->buffer, c); } break; case CONVERTER_STATE: g_string_append_c(priv->buffer, c); switch (c) { case '-': priv->formatting.align = TRUE; break; case '.': priv->state = DOT_STATE; break; default: if (c >= '0' && c <= '9') { priv->formatting.min = c - '0'; priv->state = MIN_STATE; } else { log4g_pattern_parser_finalize_converter( self, c); } break; } break; case MIN_STATE: g_string_append_c(priv->buffer, c); if (c >= '0' && c <= '9') { priv->formatting.min = priv->formatting.min * 10 + (c - '0'); } else if ('.' == c) { priv->state = DOT_STATE; } else { log4g_pattern_parser_finalize_converter(self, c); } break; case DOT_STATE: g_string_append_c(priv->buffer, c); if (c >= '0' && c <= '9') { priv->formatting.max = c - '0'; priv->state = MAX_STATE; } else { log4g_log_error(Q_("error occurred in " "position %d\nwas expecting " "digit, instead got char %c"), priv->i, c); priv->state = LITERAL_STATE; } break; case MAX_STATE: g_string_append_c(priv->buffer, c); if (c >= '0' && c <= '9') { priv->formatting.max = priv->formatting.max * 10 + (c - '0'); } else { log4g_pattern_parser_finalize_converter(self, c); priv->state = LITERAL_STATE; } break; } /* switch */ } /* while */ if (priv->buffer->len != 0) { pc = log4g_literal_pattern_converter_new(priv->buffer->str); if (pc) { log4g_pattern_parser_add_to_list(self, pc); g_object_unref(pc); pc = NULL; } } pc = priv->head; priv->head = NULL; return pc; /* caller owns return value */ } /** * log4g_pattern_parser_finalize_converter: * @self: A pattern converter object. * @c: The current conversion character. * * Finalize the conversion pattern being parsed. * * Since: 0.1 */ void log4g_pattern_parser_finalize_converter(Log4gPatternParser *self, gchar c) { struct Private *priv = GET_PRIVATE(self); Log4gPatternConverter *pc = NULL; switch (c) { case 'c': pc = log4g_category_pattern_converter_new(&priv->formatting, log4g_pattern_parser_extract_precision_option( self)); break; case 'd': { char *format = log4g_pattern_parser_extract_option(self); if (!format) { format = g_strdup("%c"); if (!format) { break; } } pc = log4g_date_pattern_converter_new(&priv->formatting, format); if (!pc) { g_free(format); } break; } case 'F': pc = log4g_location_pattern_converter_new(&priv->formatting, FILE_LOCATION_CONVERTER); break; case 'l': pc = log4g_location_pattern_converter_new(&priv->formatting, FULL_LOCATION_CONVERTER); break; case 'L': pc = log4g_location_pattern_converter_new(&priv->formatting, LINE_LOCATION_CONVERTER); break; case 'm': pc = log4g_basic_pattern_converter_new(&priv->formatting, MESSAGE_CONVERTER); break; case 'M': pc = log4g_location_pattern_converter_new(&priv->formatting, METHOD_LOCATION_CONVERTER); break; case 'p': pc = log4g_basic_pattern_converter_new(&priv->formatting, LEVEL_CONVERTER); break; case 'r': pc = log4g_basic_pattern_converter_new(&priv->formatting, RELATIVE_TIME_CONVERTER); break; case 't': pc = log4g_basic_pattern_converter_new(&priv->formatting, THREAD_CONVERTER); break; case 'x': pc = log4g_basic_pattern_converter_new(&priv->formatting, NDC_CONVERTER); break; case 'X': { gchar *opt = log4g_pattern_parser_extract_option(self); if (opt) { pc = log4g_mdc_pattern_converter_new(&priv->formatting, opt); if (!pc) { g_free(opt); } } break; } default: log4g_log_error(Q_("unexpected char [%c] at position %d in " "conversion pattern"), c, priv->i); pc = log4g_literal_pattern_converter_new(priv->buffer->str); break; } if (pc) { log4g_pattern_parser_add_converter(self, pc); g_object_unref(pc); pc = NULL; } } /** * log4g_pattern_parser_add_converter: * @self: A pattern converter object. * @pc: The pattern converter to add to @self. * * Add a pattern converter to a pattern parser object. * * Since: 0.1 */ void log4g_pattern_parser_add_converter(Log4gPatternParser *self, Log4gPatternConverter *pc) { struct Private *priv = GET_PRIVATE(self); g_string_set_size(priv->buffer, 0); log4g_pattern_parser_add_to_list(self, pc); priv->state = LITERAL_STATE; formatting_info_reset_(&priv->formatting); }
msteinert/log4g
modules/layouts/pattern-parser.c
C
lgpl-2.1
11,630
[ 30522, 1013, 1008, 9385, 2230, 1010, 2249, 2745, 21264, 2102, 1008, 2023, 5371, 2003, 2112, 1997, 8833, 2549, 2290, 1012, 1008, 1008, 8833, 2549, 2290, 2003, 2489, 4007, 30524, 2923, 3089, 8569, 2618, 2009, 1998, 1013, 2030, 19933, 2009, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package jp.co.rakuten.checkout.lite.model; import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; import jp.co.rakuten.checkout.lite.RpayLite; import jp.co.rakuten.checkout.lite.RpayLiteTest; import jp.co.rakuten.checkout.lite.exception.UnexpectedValueException; import jp.co.rakuten.checkout.lite.net.Webhook; import jp.co.rakuten.checkout.lite.net.Webhook.SignatureVerificationException; public class EventTest extends RpayLiteTest { String payload = ""; @Before public void setUpAll() { payload += "{\"object\": \"event\",\"id\": \"evt_ace3a9e65ad548a8b5c8de7965efa160\",\"livemode\": false,\"type\": \"charge.check\",\"synchronous\": true,\"data\": {\"object\": {\"object\": \"charge\","; payload += "\"open_id\": \"https://myid.rakuten.co.jp/openid/user/h65MxxxxxxxQxn0wJENoHHsalseDD==\", \"id\": null,\"cipher\": null,\"livemode\": false,\"currency\": \"jpy\",\"amount\": 5000,\"point\": 1000,"; payload += "\"cart_id\": \"cart_id1\",\"paid\": false,\"captured\": false,\"status\": null,\"refunded\": false,"; payload += "\"items\": [{\"id\": \"item_id1\",\"name\": \"item1\",\"quantity\": 10,\"unit_price\": 1000},{\"id\": \"item_id2\",\"name\": \"item2\",\"quantity\": 20,\"unit_price\": 2000}],"; payload += "\"address\": null,\"created\": null,\"updated\": null}},\"pending_webhooks\":0,\"created\":1433862000}"; } @Test public void testConstruct() throws UnexpectedValueException, SignatureVerificationException { RpayLite.setWebhookSignature("123"); Event ev = Webhook.constructEvent(payload, "123", "123"); assertEquals(ev.getId(), "evt_ace3a9e65ad548a8b5c8de7965efa160"); assertEquals(ev.getData().getObject().getPoint(), 1000); } @Test(expected = SignatureVerificationException.class) public void testNullSigHeader() throws SignatureVerificationException, UnexpectedValueException { Webhook.constructEvent(payload, null, "123"); } @Test(expected = SignatureVerificationException.class) public void testSignatureNotEqual() throws SignatureVerificationException, UnexpectedValueException { RpayLite.setWebhookSignature("123"); Webhook.constructEvent(payload, "188", "123"); } @Test(expected = UnexpectedValueException.class) public void testInvalidJson() throws SignatureVerificationException, UnexpectedValueException { String payloadError = "{\"object\" \"event\",\"id\": \"evt_0a28558a912043d7bb82ba0702afda7f\",\"livemode\": false,\"type\": \"ping\",\"synchronous\": true,\"data\": null,\"pending_webhooks\": 0,\"created\": 1499068723}"; Webhook.constructEvent(payloadError, "188", "123"); } @Test(expected = SignatureVerificationException.class) public void testNullSignature() throws SignatureVerificationException, UnexpectedValueException { Webhook.constructEvent(payload, "188", null); } @Test public void testExceptionSigHeader() throws UnexpectedValueException { try { Webhook.constructEvent(payload, "188", "123"); } catch (SignatureVerificationException e) { assertEquals("188", e.getSigHeader()); } } }
rpayonline/rpayonline-lite-java
src/test/java/jp/co/rakuten/checkout/lite/model/EventTest.java
Java
mit
3,237
[ 30522, 7427, 16545, 1012, 2522, 1012, 10958, 5283, 6528, 1012, 4638, 5833, 1012, 5507, 2063, 1012, 2944, 1025, 12324, 10763, 8917, 1012, 12022, 4183, 1012, 20865, 1012, 20865, 2063, 26426, 2015, 1025, 12324, 8917, 1012, 12022, 4183, 1012, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/** * FreeRDP: A Remote Desktop Protocol Implementation * FreeRDP Proxy Server * * Copyright 2019 Kobi Mizrachi <kmizrachi18@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef FREERDP_SERVER_PROXY_RDPSND_H #define FREERDP_SERVER_PROXY_RDPSND_H #include <freerdp/client/rdpsnd.h> #include <freerdp/server/rdpsnd.h> #include <freerdp/server/proxy/proxy_context.h> BOOL pf_server_rdpsnd_init(pServerContext* ps); void pf_server_rdpsnd_free(pServerContext* ps); #endif /* FREERDP_SERVER_PROXY_RDPSND_H */
DavBfr/FreeRDP
server/proxy/pf_rdpsnd.h
C
apache-2.0
1,040
[ 30522, 1013, 1008, 1008, 1008, 2489, 4103, 2361, 1024, 1037, 6556, 15363, 8778, 7375, 1008, 2489, 4103, 2361, 24540, 8241, 1008, 1008, 9385, 10476, 12849, 5638, 2771, 2480, 22648, 4048, 1026, 2463, 10993, 22648, 4048, 15136, 1030, 20917, 40...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# <img src="http://i.imgur.com/h51t4bA.png" alt="AWESOME - Aberystwyth Web Evaluation Surveys Of Module Experiences" height="88px" /> [![Build Status](https://magnum.travis-ci.com/bbrks/AWESOME.svg?token=xqotcpsHvJmZdKpQBoyp)](https://magnum.travis-ci.com/bbrks/AWESOME) --- AWESOME is a web-based module evaluation questionnaire generator for the monitoring and evaluation of teaching. ## Installation Clone the repo, copy config.sample.php to config.php and edit ### Apache Make sure .htaccess rules are allowed and mod_rewrite is enabled. ### Lighttpd Use the following rewrite rule in place of the .htaccess rules ``` url.rewrite-if-not-file = ( "^/admin/?$" => "$0", "^/admin/([^?]*)(\?.*)?$" => "/admin/$1.php$2", "^/([^?]*)(\?(.*))?$" => "/index.php?url=$1&$3" ) ``` ### Database Run the SQL dump in `src/db` to populate table structure. ## Devblog http://diss.bbrks.me [<img src="http://dev.bbrks.me/feedimg/image.php?url=diss.bbrks.me/feed&scale=2" height="65px" />](http://diss.bbrks.me) ## Authors - Hannah Dee - Keiron O'Shea - Ben Brooks - Joseph Carter ## Copyright and License Code and documentation copyright 2014-2015. Code released under [the MIT license](https://github.com/bbrks/AWESOME/blob/master/LICENSE).
bbrks/AWESOME
README.md
Markdown
mit
1,254
[ 30522, 1001, 1026, 10047, 2290, 5034, 2278, 1027, 1000, 8299, 1024, 1013, 1013, 1045, 1012, 10047, 27390, 1012, 4012, 1013, 1044, 22203, 2102, 2549, 3676, 1012, 1052, 3070, 1000, 12456, 1027, 1000, 12476, 1011, 14863, 24769, 2102, 18418, 27...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Microsoft.TeamFoundation.Client; using Microsoft.TeamFoundation.WorkItemTracking.Client; using Microsoft.TeamFoundation.VersionControl.Client; using Microsoft.TeamFoundation.Server; namespace tfinfo { public class TfsInfo { public IList<WorkItemInfo> WorkItems { get; set; } public IList<ChangesetInfo> Changes { get; set; } public IList<Iteration> Iterations { get; set; } internal static TfsInfo Collect(Options options) { Uri collectionUri = new Uri(options.Collection); TfsTeamProjectCollection tpc = new TfsTeamProjectCollection(collectionUri); var changes = GetChangesets(tpc, options); var workItems = GetWorkitems(tpc, options, changes); var iterations = GetIterations(tpc, options, workItems); return new TfsInfo() { WorkItems = workItems, Changes = changes, Iterations = iterations }; } private static List<Iteration> GetIterations(TfsTeamProjectCollection tpc, Options options, IList<WorkItemInfo> workitems) { var iterations = new List<Iteration>(); WorkItemStore workItemStore = tpc.GetService<WorkItemStore>(); Project teamProject = workItemStore.Projects[options.Project]; var css = tpc.GetService<ICommonStructureService>(); foreach (Node iter in teamProject.IterationRootNodes) { var i = new Iteration() { Id = iter.Id, Uri = iter.Uri.ToString(), Name = iter.Name }; var info = css.GetNode(i.Uri); i.StartDate = info.StartDate; i.FinishDate = info.FinishDate; i.WorkItems.AddRange(workitems.Where(wi => wi.IterationId == i.Id)); iterations.Add(i); } return iterations.OrderByDescending(i => i.StartDate).ToList(); } private static IList<ChangesetInfo> GetChangesets(TfsTeamProjectCollection tpc, Options options) { var changes = new List<ChangesetInfo>(); if (options.NoChangesets) return changes; VersionControlServer versionControl = tpc.GetService<VersionControlServer>(); var css = versionControl.QueryHistory(options.Branch, RecursionType.Full); foreach (var cs in css) { ChangesetInfo change = new ChangesetInfo() { Id = cs.ChangesetId.ToString(), Uri = cs.ArtifactUri.ToString(), FullUri = cs.ArtifactUri.ToString() + "?url=" + tpc.Uri.AbsoluteUri, Comment = cs.Comment, Author = cs.CommitterDisplayName, CreatedAt = cs.CreationDate }; changes.Add(change); foreach (var note in cs.CheckinNote.Values) { change.Notes.Add(new Property() { Name = note.Name, Value = note.Value }); } foreach (var wi in cs.AssociatedWorkItems) { change.Related.Add(new WorkItemInfo() { Id = wi.Id.ToString(), Type = wi.WorkItemType, State = wi.State, Title = wi.Title, }); } foreach (var file in versionControl.ArtifactProvider.GetChangeset(cs.ArtifactUri).Changes) { change.Changes.Add(new Change() { Type = file.ChangeType.ToString(), FullPath = file.Item.ServerItem, Path = file.Item.ServerItem.Substring(options.Branch.Length) }); } } return changes; } private static IList<WorkItemInfo> GetWorkitems(TfsTeamProjectCollection tpc, Options options, IList<ChangesetInfo> changesets) { var workItems = new Dictionary<string, WorkItemInfo>(); if (options.NoWorkItems) return workItems.Values.ToList(); WorkItemStore workItemStore = tpc.GetService<WorkItemStore>(); Project teamProject = workItemStore.Projects[options.Project]; string wiql = string.Format("SELECT * FROM WorkItems WHERE [System.TeamProject] = '{0}' ORDER BY [System.Id] ", options.Project); WorkItemCollection wic = workItemStore.Query(wiql); foreach (WorkItem wi in wic) { var info = new WorkItemInfo() { Type = wi.Type.Name, Id = wi.Id.ToString(), Uri = wi.Uri.ToString(), FullUri = wi.Uri + "?url=" + tpc.Uri.AbsoluteUri, Title = wi.Title, State = wi.State, Tags = wi.Tags, Author = wi.CreatedBy, CreatedAt = wi.CreatedDate, Description = wi.Description, IterationId = wi.IterationId, Iteration = wi.IterationPath }; workItems[info.Id] = info; foreach (Link li in wi.Links) { var ex = li as ExternalLink; if (ex != null) { info.Related.Add(new WorkItemInfo() { Type = li.ArtifactLinkType.Name, Uri = ex.LinkedArtifactUri, FullUri = ex.LinkedArtifactUri + "?url=" + tpc.Uri.AbsoluteUri, Id = ex.LinkedArtifactUri.Split('/').Last(), Title = ex.Comment }); } } } foreach (var cs in changesets) { foreach (var wi in cs.Related) { WorkItemInfo rel; if(workItems.TryGetValue(wi.Id, out rel)) rel.Changesets.Add(cs); } } return workItems.Values.ToList(); } } }
zdeslav/tfinfo
tfinfo/TfsInfo.cs
C#
mit
6,508
[ 30522, 2478, 2291, 1025, 2478, 2291, 1012, 30524, 14876, 18426, 3508, 1012, 7396, 1025, 2478, 7513, 1012, 2136, 14876, 18426, 3508, 1012, 2147, 4221, 20492, 22648, 6834, 1012, 7396, 1025, 2478, 7513, 1012, 2136, 14876, 18426, 3508, 1012, 25...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<!DOCTYPE HTML> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Jasmine Spec Runner v2.0.0</title> <link rel="shortcut icon" type="image/png" href="lib/jasmine-2.0.0/jasmine_favicon.png"> <link rel="stylesheet" type="text/css" href="lib/jasmine-2.0.0/jasmine.css"> <script type="text/javascript" src="lib/jasmine-2.0.0/jasmine.js"></script> <script type="text/javascript" src="lib/jasmine-2.0.0/jasmine-html.js"></script> <script type="text/javascript" src="lib/jasmine-2.0.0/boot.js"></script> <!-- include source files here... --> <script type="text/javascript" src="../seecret-1.0.min.js"></script> <!-- include spec files here... --> <script type="text/javascript" src="spec/SeecretSpec.js"></script> </head> <body> </body> </html>
simpledynamics/seecret
javascript/tests/SpecRunner.html
HTML
mit
800
[ 30522, 1026, 999, 9986, 13874, 16129, 1028, 1026, 16129, 1028, 1026, 2132, 1028, 1026, 18804, 8299, 1011, 1041, 15549, 2615, 1027, 1000, 4180, 1011, 2828, 1000, 4180, 1027, 1000, 3793, 1013, 16129, 1025, 25869, 13462, 1027, 21183, 2546, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/*! Pushy - v0.9.2 - 2014-9-13 * Pushy is a responsive off-canvas navigation menu using CSS transforms & transitions. * https://github.com/christophery/pushy/ * by Christopher Yee */ /* Menu Appearance */ .pushy{ position: fixed; width: 200px; height: 100%; top: 0; z-index: 9999; background: #333332; font-size: 0.9em; font-weight: bold; -webkit-box-shadow: inset -10px 0 6px -9px rgba(0, 0, 0, .7); -moz-box-shadow: inset -10px 0 6px -9px rgba(0, 0, 0, .7); box-shadow: inset -10px 0 6px -9px rgba(0, 0, 0, .7); overflow: auto; -webkit-overflow-scrolling: touch; /* enables momentum scrolling in iOS overflow elements */ } .pushy a{ display: block; color: #b3b3b1; padding: 15px 30px; border-bottom: 1px solid rgba(0, 0, 0, .1); border-top: 1px solid rgba(255, 255, 255, .1); text-decoration: none; } .pushy a:hover{ background: #00b4ff; color: #FFF; } /* Menu Movement */ .pushy-left{ -webkit-transform: translate3d(-200px,0,0); -moz-transform: translate3d(-200px,0,0); -ms-transform: translate3d(-200px,0,0); -o-transform: translate3d(-200px,0,0); transform: translate3d(-200px,0,0); } .pushy-open{ -webkit-transform: translate3d(0,0,0); -moz-transform: translate3d(0,0,0); -ms-transform: translate3d(0,0,0); -o-transform: translate3d(0,0,0); transform: translate3d(0,0,0); } .container-push, .push-push{ -webkit-transform: translate3d(200px,0,0); -moz-transform: translate3d(200px,0,0); -ms-transform: translate3d(200px,0,0); -o-transform: translate3d(200px,0,0); transform: translate3d(200px,0,0); } /* Menu Transitions */ .pushy, #container, .push{ -webkit-transition: -webkit-transform .2s cubic-bezier(.16, .68, .43, .99); -moz-transition: -moz-transform .2s cubic-bezier(.16, .68, .43, .99); -o-transition: -o-transform .2s cubic-bezier(.16, .68, .43, .99); transition: transform .2s cubic-bezier(.16, .68, .43, .99); } /* Site Overlay */ .site-overlay{ display: none; } .pushy-active .site-overlay{ display: block; position: fixed; top: 0; right: 0; bottom: 0; left: 0; z-index: 9998; background-color: rgba(0,0,0,0.5); -webkit-animation: fade 500ms; -moz-animation: fade 500ms; -ms-animation: fade 500ms; -o-animation: fade 500ms; animation: fade 500ms; } @keyframes fade{ 0% { opacity: 0; } 100% { opacity: 1; } } @-moz-keyframes fade{ 0% { opacity: 0; } 100% { opacity: 1; } } @-webkit-keyframes fade{ 0% { opacity: 0; } 100% { opacity: 1; } } @-ms-keyframes fade{ 0% { opacity: 0; } 100% { opacity: 1; } }​ @-o-keyframes fade{ 0% { opacity: 0; } 100% { opacity: 1; } } /* Example Media Query */ @media screen and (max-width: 768px){ .pushy{ font-size: 1.0em; } }
esadhar/drupal-website
sites/all/themes/gui/esadhar/bower_components/pushy/css/pushy.css
CSS
gpl-2.0
2,883
[ 30522, 1013, 1008, 999, 5245, 2100, 1011, 1058, 2692, 1012, 1023, 1012, 1016, 1011, 2297, 1011, 1023, 1011, 2410, 1008, 5245, 2100, 2003, 1037, 26651, 2125, 1011, 10683, 9163, 12183, 2478, 20116, 2015, 21743, 1004, 22166, 1012, 1008, 16770,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* $Id$ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.tests; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.crawler.interfaces.*; import org.apache.manifoldcf.crawler.system.ManifoldCF; import java.io.*; import java.util.*; /** This is a test whether we deal with changes to configuration properly */ public class ConnectionChangeTester { protected final ManifoldCFInstance instance; public ConnectionChangeTester(ManifoldCFInstance instance) { this.instance = instance; } public void executeTest() throws Exception { instance.start(); // Hey, we were able to install the file system connector etc. // Now, create a local test job and run it. IThreadContext tc = ThreadContextFactory.make(); // Create a basic file system connection, and save it. IRepositoryConnectionManager mgr = RepositoryConnectionManagerFactory.make(tc); IRepositoryConnection conn = mgr.create(); conn.setName("ConnectionChangeTest Connection"); conn.setDescription("ConnectionChangeTest Connection"); conn.setClassName("org.apache.manifoldcf.crawler.tests.ConnectionChangeRepositoryConnector"); conn.setMaxConnections(100); // Now, save mgr.save(conn); // Create a basic null output connection, and save it. IOutputConnectionManager outputMgr = OutputConnectionManagerFactory.make(tc); IOutputConnection outputConn = outputMgr.create(); outputConn.setName("Null Connection"); outputConn.setDescription("Null Connection"); outputConn.setClassName("org.apache.manifoldcf.agents.tests.TestingOutputConnector"); outputConn.setMaxConnections(100); // Now, save outputMgr.save(outputConn); // Create a job. IJobManager jobManager = JobManagerFactory.make(tc); IJobDescription job = jobManager.createJob(); job.setDescription("Test Job"); job.setConnectionName("ConnectionChangeTest Connection"); job.addPipelineStage(-1,true,"Null Connection",""); //job.setOutputConnectionName("Null Connection"); job.setType(job.TYPE_SPECIFIED); job.setStartMethod(job.START_DISABLE); job.setHopcountMode(job.HOPCOUNT_ACCURATE); // Save the job. jobManager.save(job); // Now, start the job, and wait until it is running. jobManager.manualStart(job.getID()); instance.waitJobRunningNative(jobManager,job.getID(),30000L); // Now, update the connection to allow the job to finish. conn = mgr.load("ConnectionChangeTest Connection"); ConfigParams cp = conn.getConfigParams(); cp.setParameter("proceed","true"); mgr.save(conn); // Wait for the job to become inactive. The time should not exceed 10 seconds for the actual crawl. instance.waitJobInactiveNative(jobManager,job.getID(),30000L); // The document will be skipped in the end. if (jobManager.getStatus(job.getID()).getDocumentsProcessed() != 10) throw new Exception("Expected 10 documents, saw "+jobManager.getStatus(job.getID()).getDocumentsProcessed()); // Now, delete the job. jobManager.deleteJob(job.getID()); instance.waitJobDeletedNative(jobManager,job.getID(),30000L); // Shut down instance2 instance.stop(); } }
cogfor/mcf-cogfor
framework/pull-agent/src/test/java/org/apache/manifoldcf/crawler/tests/ConnectionChangeTester.java
Java
apache-2.0
4,071
[ 30522, 1013, 1008, 1002, 8909, 1002, 1008, 1013, 1013, 1008, 1008, 1008, 7000, 2000, 1996, 15895, 4007, 3192, 1006, 2004, 2546, 1007, 2104, 2028, 2030, 2062, 1008, 12130, 6105, 10540, 1012, 2156, 1996, 5060, 5371, 5500, 2007, 1008, 2023, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/** * This package contains the services of the server and the locator. * * @author Nicolas SYMPHORIEN (nicolas.symphorien@gmail.com) * */ package com.seikomi.janus.services;
Seikomi/Janus-Server
src/main/java/com/seikomi/janus/services/package-info.java
Java
gpl-3.0
187
[ 30522, 1013, 1008, 1008, 1008, 2023, 7427, 3397, 1996, 2578, 1997, 1996, 30524, 4012, 1007, 1008, 1008, 1013, 7427, 4012, 1012, 7367, 12676, 4328, 1012, 5553, 2271, 1012, 2578, 1025, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"> <html> <head> <!-- File : $Source: /cvsroot/jchart2d/jchart2d/src/info/monitorenter/gui/chart/controls/errorbarwizard/package.html,v $ Date : $Date: 2010/06/01 21:34:28 $ Version: $Revision: 1.2 $ This library is part of jchart2d - the Open Source real time charting library. Copyright (c) 2007 - 2010 Achim Westermann This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA --> </head> <body bgcolor="white"> GUI of the error bar wizard of jchart2d.<p> This package contains the error bar wizard of jjchart2d. <p> Don't look here for reusable classes or advice how to use jchart2d. This UI accesses the error bar API in package info.monitorenter.gui.chart.errorbars. <p> <!-- Put @see and @since tags down here. --> @version $Revision: 1.2 $ @since 2.1.0 </body> </html>
cheshirekow/codebase
third_party/lcm/lcm-java/jchart2d-code/src/info/monitorenter/gui/chart/controls/errorbarwizard/package.html
HTML
gpl-3.0
1,505
[ 30522, 1026, 999, 9986, 13874, 16129, 2270, 1000, 1011, 1013, 1013, 1059, 2509, 2278, 1013, 1013, 26718, 2094, 16129, 1017, 1012, 1016, 2345, 1013, 1013, 4372, 30524, 1011, 1011, 5371, 1024, 1002, 3120, 1024, 1013, 26226, 21338, 17206, 1013...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
import React from 'react' export default class NoteItem extends React.Component { render () { return ( <div> <p>Category = {this.props.note.category}</p> <p>The Note = {this.props.note.noteText}</p> <hr /> </div> ) } }
josedigital/koala-app
src/components/Note/NoteItem.js
JavaScript
mit
276
[ 30522, 12324, 10509, 2013, 1005, 10509, 1005, 9167, 12398, 2465, 3602, 4221, 2213, 8908, 10509, 1012, 6922, 1063, 17552, 1006, 1007, 1063, 2709, 1006, 1026, 4487, 2615, 1028, 1026, 1052, 1028, 4696, 1027, 1063, 2023, 1012, 24387, 1012, 3602...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
import datetime import typing from . import helpers from .tl import types, custom Phone = str Username = str PeerID = int Entity = typing.Union[types.User, types.Chat, types.Channel] FullEntity = typing.Union[types.UserFull, types.messages.ChatFull, types.ChatFull, types.ChannelFull] EntityLike = typing.Union[ Phone, Username, PeerID, types.TypePeer, types.TypeInputPeer, Entity, FullEntity ] EntitiesLike = typing.Union[EntityLike, typing.Sequence[EntityLike]] ButtonLike = typing.Union[types.TypeKeyboardButton, custom.Button] MarkupLike = typing.Union[ types.TypeReplyMarkup, ButtonLike, typing.Sequence[ButtonLike], typing.Sequence[typing.Sequence[ButtonLike]] ] TotalList = helpers.TotalList DateLike = typing.Optional[typing.Union[float, datetime.datetime, datetime.date, datetime.timedelta]] LocalPath = str ExternalUrl = str BotFileID = str FileLike = typing.Union[ LocalPath, ExternalUrl, BotFileID, bytes, typing.BinaryIO, types.TypeMessageMedia, types.TypeInputFile, types.TypeInputFileLocation ] # Can't use `typing.Type` in Python 3.5.2 # See https://github.com/python/typing/issues/266 try: OutFileLike = typing.Union[ str, typing.Type[bytes], typing.BinaryIO ] except TypeError: OutFileLike = typing.Union[ str, typing.BinaryIO ] MessageLike = typing.Union[str, types.Message] MessageIDLike = typing.Union[int, types.Message, types.TypeInputMessage] ProgressCallback = typing.Callable[[int, int], None]
expectocode/Telethon
telethon/hints.py
Python
mit
1,562
[ 30522, 12324, 3058, 7292, 12324, 22868, 2013, 1012, 12324, 2393, 2545, 2013, 1012, 1056, 2140, 12324, 4127, 1010, 7661, 3042, 1027, 2358, 2099, 5310, 18442, 1027, 2358, 2099, 8152, 3593, 1027, 20014, 9178, 1027, 22868, 1012, 2586, 1031, 412...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/// <reference path="Transform3D.ts" /> namespace zen { export class GameObject extends Transform3D { public name:string = "GameObject"; public tag:string = ""; public layer:string = ""; private _guid:string = zen.guid.create(); public get guid() { return this._guid; } private _app:Application; constructor(app:Application) { super(); this._app = app; } private _components:{[key:string]:Component} = {}; public addComponent(component:Component):void { let system:System = this._app.systemManager.getSystem(component.type); if(system) { if(!this._components[component.type]) { component.gameObject = this; system.addComponent(this, component); this._components[component.type] = component; } else { console.error("Game Object already has " + component.type + " Component"); } } else { console.error("System: " + component.type + " doesn't exist"); } } public getComponent<T extends Component>(type:ComponentType | number):T { return <T>this._components[type]; } public removeComponent(component:Component):void { let system:System = this._app.systemManager.getSystem(component.type); if(system) { if(this._components[component.type]) { component.gameObject = null; system.removeComponent(this); delete this._components[component.type]; } else { console.error("Game Object doesn't have " + component.type + " Component"); } } else { console.error("System: " + component.type + " doesn't exist"); } } } }
shawn0326/zen
src/GameObject.ts
TypeScript
mit
1,986
[ 30522, 1013, 1013, 1013, 1026, 4431, 4130, 1027, 1000, 10938, 29097, 1012, 24529, 1000, 1013, 1028, 3415, 15327, 16729, 1063, 9167, 2465, 2208, 16429, 20614, 8908, 10938, 29097, 1063, 2270, 2171, 1024, 5164, 1027, 1000, 2208, 16429, 20614, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Verifies that the types and values of const and static items // are safe. The rules enforced by this module are: // // - For each *mutable* static item, it checks that its **type**: // - doesn't have a destructor // - doesn't own a box // // - For each *immutable* static item, it checks that its **value**: // - doesn't own a box // - doesn't contain a struct literal or a call to an enum variant / struct constructor where // - the type of the struct/enum has a dtor // // Rules Enforced Elsewhere: // - It's not possible to take the address of a static item with unsafe interior. This is enforced // by borrowck::gather_loans use middle::cast::{CastKind}; use middle::const_eval; use middle::const_eval::EvalHint::ExprTypeChecked; use middle::def; use middle::expr_use_visitor as euv; use middle::infer; use middle::mem_categorization as mc; use middle::traits; use middle::ty::{self, Ty}; use util::nodemap::NodeMap; use syntax::ast; use syntax::codemap::Span; use syntax::visit::{self, Visitor}; use std::collections::hash_map::Entry; use std::cmp::Ordering; // Const qualification, from partial to completely promotable. bitflags! { #[derive(RustcEncodable, RustcDecodable)] flags ConstQualif: u8 { // Inner mutability (can not be placed behind a reference) or behind // &mut in a non-global expression. Can be copied from static memory. const MUTABLE_MEM = 1 << 0, // Constant value with a type that implements Drop. Can be copied // from static memory, similar to MUTABLE_MEM. const NEEDS_DROP = 1 << 1, // Even if the value can be placed in static memory, copying it from // there is more expensive than in-place instantiation, and/or it may // be too large. This applies to [T; N] and everything containing it. // N.B.: references need to clear this flag to not end up on the stack. const PREFER_IN_PLACE = 1 << 2, // May use more than 0 bytes of memory, doesn't impact the constness // directly, but is not allowed to be borrowed mutably in a constant. const NON_ZERO_SIZED = 1 << 3, // Actually borrowed, has to always be in static memory. Does not // propagate, and requires the expression to behave like a 'static // lvalue. The set of expressions with this flag is the minimum // that have to be promoted. const HAS_STATIC_BORROWS = 1 << 4, // Invalid const for miscellaneous reasons (e.g. not implemented). const NOT_CONST = 1 << 5, // Borrowing the expression won't produce &'static T if any of these // bits are set, though the value could be copied from static memory // if `NOT_CONST` isn't set. const NON_STATIC_BORROWS = ConstQualif::MUTABLE_MEM.bits | ConstQualif::NEEDS_DROP.bits | ConstQualif::NOT_CONST.bits } } #[derive(Copy, Clone, Eq, PartialEq)] enum Mode { Const, ConstFn, Static, StaticMut, // An expression that occurs outside of any constant context // (i.e. `const`, `static`, array lengths, etc.). The value // can be variable at runtime, but will be promotable to // static memory if we can prove it is actually constant. Var, } struct CheckCrateVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, mode: Mode, qualif: ConstQualif, rvalue_borrows: NodeMap<ast::Mutability> } impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R, { let (old_mode, old_qualif) = (self.mode, self.qualif); self.mode = mode; self.qualif = ConstQualif::empty(); let r = f(self); self.mode = old_mode; self.qualif = old_qualif; r } fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'b, 'tcx>) -> R, { let param_env = match item_id { Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id), None => self.tcx.empty_parameter_environment() }; let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, Some(param_env), false); f(&mut euv::ExprUseVisitor::new(self, &infcx)) } fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif { assert!(mode != Mode::Var); match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) { Entry::Occupied(entry) => return *entry.get(), Entry::Vacant(entry) => { // Prevent infinite recursion on re-entry. entry.insert(ConstQualif::empty()); } } self.with_mode(mode, |this| { this.with_euv(None, |euv| euv.consume_expr(expr)); this.visit_expr(expr); this.qualif }) } fn fn_like(&mut self, fk: visit::FnKind, fd: &ast::FnDecl, b: &ast::Block, s: Span, fn_id: ast::NodeId) -> ConstQualif { match self.tcx.const_qualif_map.borrow_mut().entry(fn_id) { Entry::Occupied(entry) => return *entry.get(), Entry::Vacant(entry) => { // Prevent infinite recursion on re-entry. entry.insert(ConstQualif::empty()); } } let mode = match fk { visit::FkItemFn(_, _, _, ast::Constness::Const, _, _) => { Mode::ConstFn } visit::FkMethod(_, m, _) => { if m.constness == ast::Constness::Const { Mode::ConstFn } else { Mode::Var } } _ => Mode::Var }; // Ensure the arguments are simple, not mutable/by-ref or patterns. if mode == Mode::ConstFn { for arg in &fd.inputs { match arg.pat.node { ast::PatIdent(ast::BindByValue(ast::MutImmutable), _, None) => {} _ => { span_err!(self.tcx.sess, arg.pat.span, E0022, "arguments of constant functions can only \ be immutable by-value bindings"); } } } } let qualif = self.with_mode(mode, |this| { this.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b)); visit::walk_fn(this, fk, fd, b, s); this.qualif }); // Keep only bits that aren't affected by function body (NON_ZERO_SIZED), // and bits that don't change semantics, just optimizations (PREFER_IN_PLACE). let qualif = qualif & (ConstQualif::NON_ZERO_SIZED | ConstQualif::PREFER_IN_PLACE); self.tcx.const_qualif_map.borrow_mut().insert(fn_id, qualif); qualif } fn add_qualif(&mut self, qualif: ConstQualif) { self.qualif = self.qualif | qualif; } /// Returns true if the call is to a const fn or method. fn handle_const_fn_call(&mut self, expr: &ast::Expr, def_id: ast::DefId, ret_ty: Ty<'tcx>) -> bool { if let Some(fn_like) = const_eval::lookup_const_fn_by_id(self.tcx, def_id) { if // we are in a static/const initializer self.mode != Mode::Var && // feature-gate is not enabled !self.tcx.sess.features.borrow().const_fn && // this doesn't come from a macro that has #[allow_internal_unstable] !self.tcx.sess.codemap().span_allows_unstable(expr.span) { self.tcx.sess.span_err( expr.span, &format!("const fns are an unstable feature")); fileline_help!( self.tcx.sess, expr.span, "in Nightly builds, add `#![feature(const_fn)]` to the crate \ attributes to enable"); } let qualif = self.fn_like(fn_like.kind(), fn_like.decl(), fn_like.body(), fn_like.span(), fn_like.id()); self.add_qualif(qualif); if ret_ty.type_contents(self.tcx).interior_unsafe() { self.add_qualif(ConstQualif::MUTABLE_MEM); } true } else { false } } fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) { match self.rvalue_borrows.entry(id) { Entry::Occupied(mut entry) => { // Merge the two borrows, taking the most demanding // one, mutability-wise. if mutbl == ast::MutMutable { entry.insert(mutbl); } } Entry::Vacant(entry) => { entry.insert(mutbl); } } } fn msg(&self) -> &'static str { match self.mode { Mode::Const => "constant", Mode::ConstFn => "constant function", Mode::StaticMut | Mode::Static => "static", Mode::Var => unreachable!(), } } fn check_static_mut_type(&self, e: &ast::Expr) { let node_ty = self.tcx.node_id_to_type(e.id); let tcontents = node_ty.type_contents(self.tcx); let suffix = if tcontents.has_dtor() { "destructors" } else if tcontents.owns_owned() { "boxes" } else { return }; span_err!(self.tcx.sess, e.span, E0397, "mutable statics are not allowed to have {}", suffix); } fn check_static_type(&self, e: &ast::Expr) { let ty = self.tcx.node_id_to_type(e.id); let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None, false); let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic); let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut(); fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); match fulfill_cx.select_all_or_error(&infcx) { Ok(()) => { }, Err(ref errors) => { traits::report_fulfillment_errors(&infcx, errors); } } } } impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { fn visit_item(&mut self, i: &ast::Item) { debug!("visit_item(item={})", self.tcx.map.node_to_string(i.id)); match i.node { ast::ItemStatic(_, ast::MutImmutable, ref expr) => { self.check_static_type(&**expr); self.global_expr(Mode::Static, &**expr); } ast::ItemStatic(_, ast::MutMutable, ref expr) => { self.check_static_mut_type(&**expr); self.global_expr(Mode::StaticMut, &**expr); } ast::ItemConst(_, ref expr) => { self.global_expr(Mode::Const, &**expr); } ast::ItemEnum(ref enum_definition, _) => { for var in &enum_definition.variants { if let Some(ref ex) = var.node.disr_expr { self.global_expr(Mode::Const, &**ex); } } } _ => { self.with_mode(Mode::Var, |v| visit::walk_item(v, i)); } } } fn visit_trait_item(&mut self, t: &'v ast::TraitItem) { match t.node { ast::ConstTraitItem(_, ref default) => { if let Some(ref expr) = *default { self.global_expr(Mode::Const, &*expr); } else { visit::walk_trait_item(self, t); } } _ => self.with_mode(Mode::Var, |v| visit::walk_trait_item(v, t)), } } fn visit_impl_item(&mut self, i: &'v ast::ImplItem) { match i.node { ast::ConstImplItem(_, ref expr) => { self.global_expr(Mode::Const, &*expr); } _ => self.with_mode(Mode::Var, |v| visit::walk_impl_item(v, i)), } } fn visit_fn(&mut self, fk: visit::FnKind<'v>, fd: &'v ast::FnDecl, b: &'v ast::Block, s: Span, fn_id: ast::NodeId) { self.fn_like(fk, fd, b, s, fn_id); } fn visit_pat(&mut self, p: &ast::Pat) { match p.node { ast::PatLit(ref lit) => { self.global_expr(Mode::Const, &**lit); } ast::PatRange(ref start, ref end) => { self.global_expr(Mode::Const, &**start); self.global_expr(Mode::Const, &**end); match const_eval::compare_lit_exprs(self.tcx, start, end) { Some(Ordering::Less) | Some(Ordering::Equal) => {} Some(Ordering::Greater) => { span_err!(self.tcx.sess, start.span, E0030, "lower range bound must be less than or equal to upper"); } None => { self.tcx.sess.span_bug( start.span, "literals of different types in range pat"); } } } _ => visit::walk_pat(self, p) } } fn visit_block(&mut self, block: &ast::Block) { // Check all statements in the block for stmt in &block.stmts { let span = match stmt.node { ast::StmtDecl(ref decl, _) => { match decl.node { ast::DeclLocal(_) => decl.span, // Item statements are allowed ast::DeclItem(_) => continue } } ast::StmtExpr(ref expr, _) => expr.span, ast::StmtSemi(ref semi, _) => semi.span, ast::StmtMac(..) => { self.tcx.sess.span_bug(stmt.span, "unexpanded statement \ macro in const?!") } }; self.add_qualif(ConstQualif::NOT_CONST); if self.mode != Mode::Var { span_err!(self.tcx.sess, span, E0016, "blocks in {}s are limited to items and \ tail expressions", self.msg()); } } visit::walk_block(self, block); } fn visit_expr(&mut self, ex: &ast::Expr) { let mut outer = self.qualif; self.qualif = ConstQualif::empty(); let node_ty = self.tcx.node_id_to_type(ex.id); check_expr(self, ex, node_ty); check_adjustments(self, ex); // Special-case some expressions to avoid certain flags bubbling up. match ex.node { ast::ExprCall(ref callee, ref args) => { for arg in args { self.visit_expr(&**arg) } let inner = self.qualif; self.visit_expr(&**callee); // The callee's size doesn't count in the call. let added = self.qualif - inner; self.qualif = inner | (added - ConstQualif::NON_ZERO_SIZED); } ast::ExprRepeat(ref element, _) => { self.visit_expr(&**element); // The count is checked elsewhere (typeck). let count = match node_ty.sty { ty::TyArray(_, n) => n, _ => unreachable!() }; // [element; 0] is always zero-sized. if count == 0 { self.qualif.remove(ConstQualif::NON_ZERO_SIZED | ConstQualif::PREFER_IN_PLACE); } } ast::ExprMatch(ref discr, ref arms, _) => { // Compute the most demanding borrow from all the arms' // patterns and set that on the discriminator. let mut borrow = None; for pat in arms.iter().flat_map(|arm| &arm.pats) { let pat_borrow = self.rvalue_borrows.remove(&pat.id); match (borrow, pat_borrow) { (None, _) | (_, Some(ast::MutMutable)) => { borrow = pat_borrow; } _ => {} } } if let Some(mutbl) = borrow { self.record_borrow(discr.id, mutbl); } visit::walk_expr(self, ex); } // Division by zero and overflow checking. ast::ExprBinary(op, _, _) => { visit::walk_expr(self, ex); let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem; match node_ty.sty { ty::TyUint(_) | ty::TyInt(_) if div_or_rem => { if !self.qualif.intersects(ConstQualif::NOT_CONST) { match const_eval::eval_const_expr_partial( self.tcx, ex, ExprTypeChecked) { Ok(_) => {} Err(msg) => { span_err!(self.tcx.sess, msg.span, E0020, "{} in a constant expression", msg.description()) } } } } _ => {} } } _ => visit::walk_expr(self, ex) } // Handle borrows on (or inside the autorefs of) this expression. match self.rvalue_borrows.remove(&ex.id) { Some(ast::MutImmutable) => { // Constants cannot be borrowed if they contain interior mutability as // it means that our "silent insertion of statics" could change // initializer values (very bad). // If the type doesn't have interior mutability, then `ConstQualif::MUTABLE_MEM` has // propagated from another error, so erroring again would be just noise. let tc = node_ty.type_contents(self.tcx); if self.qualif.intersects(ConstQualif::MUTABLE_MEM) && tc.interior_unsafe() { outer = outer | ConstQualif::NOT_CONST; if self.mode != Mode::Var { self.tcx.sess.span_err(ex.span, "cannot borrow a constant which contains \ interior mutability, create a static instead"); } } // If the reference has to be 'static, avoid in-place initialization // as that will end up pointing to the stack instead. if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) { self.qualif = self.qualif - ConstQualif::PREFER_IN_PLACE; self.add_qualif(ConstQualif::HAS_STATIC_BORROWS); } } Some(ast::MutMutable) => { // `&mut expr` means expr could be mutated, unless it's zero-sized. if self.qualif.intersects(ConstQualif::NON_ZERO_SIZED) { if self.mode == Mode::Var { outer = outer | ConstQualif::NOT_CONST; self.add_qualif(ConstQualif::MUTABLE_MEM); } else { span_err!(self.tcx.sess, ex.span, E0017, "references in {}s may only refer \ to immutable values", self.msg()) } } if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) { self.add_qualif(ConstQualif::HAS_STATIC_BORROWS); } } None => {} } self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif); // Don't propagate certain flags. self.qualif = outer | (self.qualif - ConstQualif::HAS_STATIC_BORROWS); } } /// This function is used to enforce the constraints on /// const/static items. It walks through the *value* /// of the item walking down the expression and evaluating /// every nested expression. If the expression is not part /// of a const/static item, it is qualified for promotion /// instead of producing errors. fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr, node_ty: Ty<'tcx>) { match node_ty.sty { ty::TyStruct(did, _) | ty::TyEnum(did, _) if v.tcx.has_dtor(did) => { v.add_qualif(ConstQualif::NEEDS_DROP); if v.mode != Mode::Var { v.tcx.sess.span_err(e.span, &format!("{}s are not allowed to have destructors", v.msg())); } } _ => {} } let method_call = ty::MethodCall::expr(e.id); match e.node { ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) if v.tcx.tables.borrow().method_map.contains_key(&method_call) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0011, "user-defined operators are not allowed in {}s", v.msg()); } } ast::ExprBox(..) | ast::ExprUnary(ast::UnUniq, _) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0010, "allocations are not allowed in {}s", v.msg()); } } ast::ExprUnary(op, ref inner) => { match v.tcx.node_id_to_type(inner.id).sty { ty::TyRawPtr(_) => { assert!(op == ast::UnDeref); v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0396, "raw pointers cannot be dereferenced in {}s", v.msg()); } } _ => {} } } ast::ExprBinary(op, ref lhs, _) => { match v.tcx.node_id_to_type(lhs.id).sty { ty::TyRawPtr(_) => { assert!(op.node == ast::BiEq || op.node == ast::BiNe || op.node == ast::BiLe || op.node == ast::BiLt || op.node == ast::BiGe || op.node == ast::BiGt); v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0395, "raw pointers cannot be compared in {}s", v.msg()); } } _ => {} } } ast::ExprCast(ref from, _) => { debug!("Checking const cast(id={})", from.id); match v.tcx.cast_kinds.borrow().get(&from.id) { None => v.tcx.sess.span_bug(e.span, "no kind for cast"), Some(&CastKind::PtrAddrCast) | Some(&CastKind::FnPtrAddrCast) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0018, "raw pointers cannot be cast to integers in {}s", v.msg()); } } _ => {} } } ast::ExprPath(..) => { let def = v.tcx.def_map.borrow().get(&e.id).map(|d| d.full_def()); match def { Some(def::DefVariant(_, _, _)) => { // Count the discriminator or function pointer. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } Some(def::DefStruct(_)) => { if let ty::TyBareFn(..) = node_ty.sty { // Count the function pointer. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } } Some(def::DefFn(..)) | Some(def::DefMethod(..)) => { // Count the function pointer. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } Some(def::DefStatic(..)) => { match v.mode { Mode::Static | Mode::StaticMut => {} Mode::Const | Mode::ConstFn => { span_err!(v.tcx.sess, e.span, E0013, "{}s cannot refer to other statics, insert \ an intermediate constant instead", v.msg()); } Mode::Var => v.add_qualif(ConstQualif::NOT_CONST) } } Some(def::DefConst(did)) | Some(def::DefAssociatedConst(did, _)) => { if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did, Some(e.id)) { let inner = v.global_expr(Mode::Const, expr); v.add_qualif(inner); } else { v.tcx.sess.span_bug(e.span, "DefConst or DefAssociatedConst \ doesn't point to a constant"); } } Some(def::DefLocal(_)) if v.mode == Mode::ConstFn => { // Sadly, we can't determine whether the types are zero-sized. v.add_qualif(ConstQualif::NOT_CONST | ConstQualif::NON_ZERO_SIZED); } def => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { debug!("(checking const) found bad def: {:?}", def); span_err!(v.tcx.sess, e.span, E0014, "paths in {}s may only refer to constants \ or functions", v.msg()); } } } } ast::ExprCall(ref callee, _) => { let mut callee = &**callee; loop { callee = match callee.node { ast::ExprParen(ref inner) => &**inner, ast::ExprBlock(ref block) => match block.expr { Some(ref tail) => &**tail, None => break }, _ => break }; } let def = v.tcx.def_map.borrow().get(&callee.id).map(|d| d.full_def()); let is_const = match def { Some(def::DefStruct(..)) => true, Some(def::DefVariant(..)) => { // Count the discriminator. v.add_qualif(ConstQualif::NON_ZERO_SIZED); true } Some(def::DefMethod(did, def::FromImpl(_))) | Some(def::DefFn(did, _)) => { v.handle_const_fn_call(e, did, node_ty) } _ => false }; if !is_const { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0015, "function calls in {}s are limited to \ constant functions, \ struct and enum constructors", v.msg()); } } } ast::ExprMethodCall(..) => { let method = v.tcx.tables.borrow().method_map[&method_call]; let is_const = match v.tcx.impl_or_trait_item(method.def_id).container() { ty::ImplContainer(_) => v.handle_const_fn_call(e, method.def_id, node_ty), ty::TraitContainer(_) => false }; if !is_const { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0378, "method calls in {}s are limited to \ constant inherent methods", v.msg()); } } } ast::ExprStruct(..) => { let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id()); if did == v.tcx.lang_items.unsafe_cell_type() { v.add_qualif(ConstQualif::MUTABLE_MEM); } } ast::ExprLit(_) | ast::ExprAddrOf(..) => { v.add_qualif(ConstQualif::NON_ZERO_SIZED); } ast::ExprRepeat(..) => { v.add_qualif(ConstQualif::PREFER_IN_PLACE); } ast::ExprClosure(..) => { // Paths in constant contexts cannot refer to local variables, // as there are none, and thus closures can't have upvars there. if v.tcx.with_freevars(e.id, |fv| !fv.is_empty()) { assert!(v.mode == Mode::Var, "global closures can't capture anything"); v.add_qualif(ConstQualif::NOT_CONST); } } ast::ExprBlock(_) | ast::ExprIndex(..) | ast::ExprField(..) | ast::ExprTupField(..) | ast::ExprVec(_) | ast::ExprParen(..) | ast::ExprTup(..) => {} // Conditional control flow (possible to implement). ast::ExprMatch(..) | ast::ExprIf(..) | ast::ExprIfLet(..) | // Loops (not very meaningful in constants). ast::ExprWhile(..) | ast::ExprWhileLet(..) | ast::ExprForLoop(..) | ast::ExprLoop(..) | // More control flow (also not very meaningful). ast::ExprBreak(_) | ast::ExprAgain(_) | ast::ExprRet(_) | // Miscellaneous expressions that could be implemented. ast::ExprRange(..) | // Expressions with side-effects. ast::ExprAssign(..) | ast::ExprAssignOp(..) | ast::ExprInlineAsm(_) | ast::ExprMac(_) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0019, "{} contains unimplemented expression type", v.msg()); } } } } /// Check the adjustments of an expression fn check_adjustments<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr) { match v.tcx.tables.borrow().adjustments.get(&e.id) { None | Some(&ty::AdjustReifyFnPointer) | Some(&ty::AdjustUnsafeFnPointer) => {} Some(&ty::AdjustDerefRef(ty::AutoDerefRef { autoderefs, .. })) => { if (0..autoderefs as u32).any(|autoderef| { v.tcx.is_overloaded_autoderef(e.id, autoderef) }) { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0400, "user-defined dereference operators are not allowed in {}s", v.msg()); } } } } } pub fn check_crate(tcx: &ty::ctxt) { visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, mode: Mode::Var, qualif: ConstQualif::NOT_CONST, rvalue_borrows: NodeMap() }, tcx.map.krate()); tcx.sess.abort_if_errors(); } impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { fn consume(&mut self, _consume_id: ast::NodeId, consume_span: Span, cmt: mc::cmt, _mode: euv::ConsumeMode) { let mut cur = &cmt; loop { match cur.cat { mc::cat_static_item => { if self.mode != Mode::Var { // statics cannot be consumed by value at any time, that would imply // that they're an initializer (what a const is for) or kept in sync // over time (not feasible), so deny it outright. span_err!(self.tcx.sess, consume_span, E0394, "cannot refer to other statics by value, use the \ address-of operator or a constant instead"); } break; } mc::cat_deref(ref cmt, _, _) | mc::cat_downcast(ref cmt, _) | mc::cat_interior(ref cmt, _) => cur = cmt, mc::cat_rvalue(..) | mc::cat_upvar(..) | mc::cat_local(..) => break } } } fn borrow(&mut self, borrow_id: ast::NodeId, borrow_span: Span, cmt: mc::cmt<'tcx>, _loan_region: ty::Region, bk: ty::BorrowKind, loan_cause: euv::LoanCause) { // Kind of hacky, but we allow Unsafe coercions in constants. // These occur when we convert a &T or *T to a *U, as well as // when making a thin pointer (e.g., `*T`) into a fat pointer // (e.g., `*Trait`). match loan_cause { euv::LoanCause::AutoUnsafe => { return; } _ => { } } let mut cur = &cmt; let mut is_interior = false; loop { match cur.cat { mc::cat_rvalue(..) => { if loan_cause == euv::MatchDiscriminant { // Ignore the dummy immutable borrow created by EUV. break; } let mutbl = bk.to_mutbl_lossy(); if mutbl == ast::MutMutable && self.mode == Mode::StaticMut { // Mutable slices are the only `&mut` allowed in // globals, but only in `static mut`, nowhere else. // FIXME: This exception is really weird... there isn't // any fundamental reason to restrict this based on // type of the expression. `&mut [1]` has exactly the // same representation as &mut 1. match cmt.ty.sty { ty::TyArray(_, _) | ty::TySlice(_) => break, _ => {} } } self.record_borrow(borrow_id, mutbl); break; } mc::cat_static_item => { if is_interior && self.mode != Mode::Var { // Borrowed statics can specifically *only* have their address taken, // not any number of other borrows such as borrowing fields, reading // elements of an array, etc. self.tcx.sess.span_err(borrow_span, "cannot refer to the interior of another \ static, use a constant instead"); } break; } mc::cat_deref(ref cmt, _, _) | mc::cat_downcast(ref cmt, _) | mc::cat_interior(ref cmt, _) => { is_interior = true; cur = cmt; } mc::cat_upvar(..) | mc::cat_local(..) => break } } } fn decl_without_init(&mut self, _id: ast::NodeId, _span: Span) {} fn mutate(&mut self, _assignment_id: ast::NodeId, _assignment_span: Span, _assignee_cmt: mc::cmt, _mode: euv::MutateMode) {} fn matched_pat(&mut self, _: &ast::Pat, _: mc::cmt, _: euv::MatchMode) {} fn consume_pat(&mut self, _consume_pat: &ast::Pat, _cmt: mc::cmt, _mode: euv::ConsumeMode) {} }
jroesch/rust
src/librustc/middle/check_const.rs
Rust
apache-2.0
37,529
[ 30522, 1013, 1013, 9385, 2262, 1011, 2297, 1996, 18399, 2622, 9797, 1012, 2156, 1996, 9385, 1013, 1013, 5371, 2012, 1996, 2327, 1011, 2504, 14176, 1997, 2023, 4353, 1998, 2012, 1013, 1013, 8299, 1024, 1013, 1013, 18399, 1011, 11374, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php class Kirki_Customize_Textarea_Control extends WP_Customize_Control { public $type = 'textarea'; public $description = ''; public $subtitle = ''; public $separator = false; public $required; public function render_content() { ?> <label class="customizer-textarea"> <span class="customize-control-title"> <?php echo esc_html( $this->label ); ?> <?php if ( isset( $this->description ) && '' != $this->description ) { ?> <a href="#" class="button tooltip" title="<?php echo strip_tags( esc_html( $this->description ) ); ?>">?</a> <?php } ?> </span> <?php if ( '' != $this->subtitle ) : ?> <div class="customizer-subtitle"><?php echo $this->subtitle; ?></div> <?php endif; ?> <textarea class="of-input" rows="5" style="width:100%;" <?php $this->link(); ?>><?php echo esc_textarea( $this->value() ); ?></textarea> </label> <?php if ( $this->separator ) echo '<hr class="customizer-separator">'; ?> <?php foreach ( $this->required as $id => $value ) : if ( isset($id) && isset($value) && get_theme_mod($id,0)==$value ) { ?> <script> jQuery(document).ready(function($) { $( "#customize-control-<?php echo $this->id; ?>" ).show(); $( "#<?php echo $id . get_theme_mod($id,0); ?>" ).click(function(){ $( "#customize-control-<?php echo $this->id; ?>" ).fadeOut(300); }); $( "#<?php echo $id . $value; ?>" ).click(function(){ $( "#customize-control-<?php echo $this->id; ?>" ).fadeIn(300); }); }); </script> <?php } if ( isset($id) && isset($value) && get_theme_mod($id,0)!=$value ) { ?> <script> jQuery(document).ready(function($) { $( "#customize-control-<?php echo $this->id; ?>" ).hide(); $( "#<?php echo $id . get_theme_mod($id,0); ?>" ).click(function(){ $( "#customize-control-<?php echo $this->id; ?>" ).fadeOut(300); }); $( "#<?php echo $id . $value; ?>" ).click(function(){ $( "#customize-control-<?php echo $this->id; ?>" ).fadeIn(300); }); }); </script> <?php } endforeach; } }
telemahos/shoestrap
lib/kirki/includes/controls/textarea.php
PHP
gpl-2.0
2,067
[ 30522, 1026, 1029, 25718, 2465, 11332, 2072, 1035, 7661, 4697, 1035, 3793, 12069, 2050, 1035, 2491, 8908, 1059, 2361, 1035, 7661, 4697, 1035, 2491, 1063, 2270, 1002, 2828, 1027, 1005, 3793, 12069, 2050, 1005, 1025, 2270, 1002, 6412, 1027, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
#include <iostream> #include "eastwood/StdDef.h" #include "eastwood/Exception.h" #include "eastwood/FntFile.h" #include "eastwood/IStream.h" #include "eastwood/Log.h" namespace eastwood { struct FNTHeader { uint16_t fsize; /* Size of the file */ uint16_t unknown1; /* Unknown entry (always 0x0500) */ uint16_t unknown2; /* Unknown entry (always 0x000e) */ uint16_t unknown3; /* Unknown entry (always 0x0014) */ uint16_t wpos; /* Offset of char. widths array (abs. from beg. of file) */ uint16_t cdata; /* Offset of char. graphics data (abs. from beg. of file) */ uint16_t hpos; /* Offset of char. heights array (abs. from beg. of file) */ uint16_t unknown4; /* Unknown entry (always 0x1012) */ uint8_t nchars; /* Number of characters in font minus 1*/ // dunk- the doc says uint16_t uint8_t height; /* Font height */ uint8_t maxw; /* Max. character width */ }; FntFile::FntFile(std::istream &stream) : _characters(), _height(0) { IStream &_stream = reinterpret_cast<IStream&>(stream); FNTHeader header; _stream.readU16LE(reinterpret_cast<uint16_t*>(&header), offsetof(FNTHeader, nchars)/sizeof(uint16_t)); if (header.unknown1 != 0x0500 || header.unknown2 != 0x000e || header.unknown3 != 0x0014) throw(Exception(LOG_ERROR, "FntFile", "Invalid header")); // alignment padding _stream.ignore(1); header.nchars = _stream.get() + 1; header.height = _stream.get(); header.maxw = _stream.get(); _height = header.height; std::vector<uint16_t> dchar(header.nchars); _stream.readU16LE(&dchar.front(), header.nchars); std::vector<uint8_t> wchar(header.nchars); _stream.seekg(header.wpos, std::ios::beg); _stream.read(reinterpret_cast<char*>(&wchar.front()), header.nchars); //if (wchar[0] != 8) LOG(LV_WARNING, "Font", "%d: bad!!", wchar[0]); std::vector<uint16_t> hchar(dchar.size()); _stream.seekg(header.hpos, std::ios::beg); _stream.readU16LE(&hchar.front(), header.nchars); _stream.seekg(header.cdata, std::ios::beg); _characters.resize(header.nchars); for (uint8_t i=0; i!=_characters.size(); i++) { uint8_t offset = hchar[i] & 0xFF; uint8_t height = hchar[i] >> 8; uint8_t width = (wchar[i] + 1)/ 2; _characters[i].width = width; _characters[i].height = height; _characters[i].y_offset = offset; _characters[i].bitmap.resize(static_cast<uint8_t>(width*height)); _stream.seekg(dchar[i], std::ios::beg); _stream.read(reinterpret_cast<char*>(&_characters[i].bitmap.front()), static_cast<uint8_t>(_characters[i].bitmap.size())); }; } FntFile::~FntFile() { } void FntFile::extents(std::string text, uint16_t& w, uint16_t& h) const { w = 0; h = _height; for (size_t c=0; c!=text.length(); c++) { const FNTCharacter &ch = _characters[static_cast<uint8_t>(text[c])]; w += (2 * ch.width) + 1; }; } void FntFile::render(std::string text, Surface &surface, uint16_t offx, uint16_t offy, uint8_t paloff) const { uint8_t* pixels = surface; for (size_t c=0; c!=text.length(); c++) { const FNTCharacter &ch = _characters[static_cast<uint8_t>(text[c])]; const std::vector<uint8_t> &bitmap = ch.bitmap; for (uint8_t y=0; y!=ch.height; y++) { for (uint8_t x=0; x!=ch.width*2; x+=2) { uint8_t byte = bitmap[static_cast<uint8_t>((x/2) + (y*ch.width))]; uint8_t lobyte = byte >> 4, hibyte = byte & 0x0F; if (hibyte!=0) pixels[(offx + x) + ((ch.y_offset + y + offy) * surface.width())] = paloff + hibyte; if (lobyte!=0) //(2 < ch.width) lobyte!=0) pixels[(offx + x + 1) + ((ch.y_offset + y + offy) * surface.width())] = paloff + lobyte; }; }; offx += (2*ch.width) + 1; }; } }
OmniBlade/libeastwood
src/FntFile.cpp
C++
gpl-3.0
3,761
[ 30522, 1001, 2421, 1026, 16380, 25379, 1028, 1001, 2421, 1000, 24201, 1013, 2358, 14141, 12879, 1012, 1044, 1000, 1001, 2421, 1000, 24201, 1013, 6453, 1012, 1044, 1000, 1001, 2421, 1000, 24201, 1013, 1042, 3372, 8873, 2571, 1012, 1044, 1000...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
// Copyright (c) 1999-2004 Brian Wellington (bwelling@xbill.org) package org.xbill.DNS; /** * The Name Server Identifier Option, define in RFC 5001. * * @see OPTRecord * * @author Brian Wellington */ public class NSIDOption extends GenericEDNSOption { private static final long serialVersionUID = 74739759292589056L; NSIDOption() { super(Code.NSID); } /** * Construct an NSID option. * @param data The contents of the option. */ public NSIDOption(byte [] data) { super(Code.NSID, data); } }
x-falcon/Virtual-Hosts
app/src/main/java/org/xbill/DNS/NSIDOption.java
Java
gpl-3.0
510
[ 30522, 1013, 1013, 9385, 1006, 1039, 1007, 2639, 1011, 2432, 4422, 8409, 1006, 1038, 4381, 2075, 1030, 1060, 24457, 1012, 8917, 1007, 7427, 8917, 1012, 1060, 24457, 1012, 1040, 3619, 1025, 1013, 1008, 1008, 1008, 1996, 2171, 8241, 8909, 4...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
#!/usr/bin/env python # # Copyright 2013 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # import numpy import scipy import random from gnuradio import gr, gr_unittest import blocks_swig as blocks import digital_swig as digital import channels_swig as channels from ofdm_txrx import ofdm_tx, ofdm_rx from utils import tagged_streams # Set this to true if you need to write out data LOG_DEBUG_INFO=False class ofdm_tx_fg (gr.top_block): def __init__(self, data, len_tag_key): gr.top_block.__init__(self, "ofdm_tx") tx_data, tags = tagged_streams.packets_to_vectors((data,), len_tag_key) src = blocks.vector_source_b(data, False, 1, tags) self.tx = ofdm_tx(packet_length_tag_key=len_tag_key, debug_log=LOG_DEBUG_INFO) self.sink = blocks.vector_sink_c() self.connect(src, self.tx, self.sink) def get_tx_samples(self): return self.sink.data() class ofdm_rx_fg (gr.top_block): def __init__(self, samples, len_tag_key, channel=None, prepend_zeros=100): gr.top_block.__init__(self, "ofdm_rx") if prepend_zeros: samples = (0,) * prepend_zeros + tuple(samples) src = blocks.vector_source_c(tuple(samples) + (0,) * 1000) self.rx = ofdm_rx(frame_length_tag_key=len_tag_key, debug_log=LOG_DEBUG_INFO) if channel is not None: self.connect(src, channel, self.rx) else: self.connect(src, self.rx) self.sink = blocks.vector_sink_b() self.connect(self.rx, self.sink) def get_rx_bytes(self): return self.sink.data() class test_ofdm_txrx (gr_unittest.TestCase): def setUp (self): self.tb = gr.top_block () def tearDown (self): self.tb = None def test_001_tx (self): """ Just make sure the Tx works in general """ len_tag_key = 'frame_len' n_bytes = 52 n_samples_expected = (numpy.ceil(1.0 * (n_bytes + 4) / 6) + 3) * 80 test_data = [random.randint(0, 255) for x in range(n_bytes)] tx_data, tags = tagged_streams.packets_to_vectors((test_data,), len_tag_key) src = blocks.vector_source_b(test_data, False, 1, tags) tx = ofdm_tx(packet_length_tag_key=len_tag_key) tx_fg = ofdm_tx_fg(test_data, len_tag_key) tx_fg.run() self.assertEqual(len(tx_fg.get_tx_samples()), n_samples_expected) def test_002_rx_only_noise(self): """ Run the RX with only noise, check it doesn't crash or return a burst. """ len_tag_key = 'frame_len' samples = (0,) * 1000 channel = channels.channel_model(0.1) rx_fg = ofdm_rx_fg(samples, len_tag_key, channel) rx_fg.run() self.assertEqual(len(rx_fg.get_rx_bytes()), 0) def test_003_tx1packet(self): """ Transmit one packet, with slight AWGN and slight frequency + timing offset. Check packet is received and no bit errors have occurred. """ len_tag_key = 'frame_len' n_bytes = 21 fft_len = 64 test_data = tuple([random.randint(0, 255) for x in range(n_bytes)]) # 1.0/fft_len is one sub-carrier, a fine freq offset stays below that freq_offset = 1.0 / fft_len * 0.7 #channel = channels.channel_model(0.01, freq_offset) channel = None # Tx tx_fg = ofdm_tx_fg(test_data, len_tag_key) tx_fg.run() tx_samples = tx_fg.get_tx_samples() # Rx rx_fg = ofdm_rx_fg(tx_samples, len_tag_key, channel, prepend_zeros=100) rx_fg.run() rx_data = rx_fg.get_rx_bytes() self.assertEqual(tuple(tx_fg.tx.sync_word1), tuple(rx_fg.rx.sync_word1)) self.assertEqual(tuple(tx_fg.tx.sync_word2), tuple(rx_fg.rx.sync_word2)) self.assertEqual(test_data, rx_data) def test_004_tx1packet_large_fO(self): """ Transmit one packet, with slight AWGN and large frequency offset. Check packet is received and no bit errors have occurred. """ fft_len = 64 len_tag_key = 'frame_len' n_bytes = 21 test_data = tuple([random.randint(0, 255) for x in range(n_bytes)]) #test_data = tuple([255 for x in range(n_bytes)]) # 1.0/fft_len is one sub-carrier frequency_offset = 1.0 / fft_len * 2.5 channel = channels.channel_model(0.00001, frequency_offset) # Tx tx_fg = ofdm_tx_fg(test_data, len_tag_key) tx_fg.run() tx_samples = tx_fg.get_tx_samples() # Rx rx_fg = ofdm_rx_fg(tx_samples, len_tag_key, channel, prepend_zeros=100) rx_fg.run() rx_data = rx_fg.get_rx_bytes() self.assertEqual(test_data, rx_data) if __name__ == '__main__': gr_unittest.run(test_ofdm_txrx, "test_ofdm_txrx.xml")
Gabotero/GNURadioNext
gr-digital/python/qa_ofdm_txrx.py
Python
gpl-3.0
5,471
[ 30522, 1001, 999, 1013, 2149, 2099, 1013, 8026, 1013, 4372, 2615, 18750, 1001, 1001, 9385, 2286, 2489, 4007, 3192, 1010, 4297, 1012, 1001, 1001, 2023, 5371, 2003, 2112, 1997, 27004, 2557, 1001, 1001, 27004, 2557, 2003, 2489, 4007, 1025, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package ChanteySongs.Data; import java.io.*; import java.util.*; import java.text.*; import com.thoughtworks.xstream.*; public class DataUtil { public static final DateFormat formatter = new SimpleDateFormat("MM/dd/yyyy"); public static String getData(Scanner in, PrintWriter out, String name) { String tmp; out.print(name + ": "); out.flush(); tmp = in.nextLine(); return (tmp.length() == 0) ? null : tmp; } public static String getDataMultiLine(Scanner in, PrintWriter out, String name) { String tmp = ""; String ret = ""; out.print(name + ": "); out.flush(); while(!tmp.equalsIgnoreCase("end")) { ret += tmp + "\n"; tmp = in.nextLine(); } return (ret.replaceAll("\\s","").length() == 0) ? null : ret; } public static int getDataInt(Scanner in, PrintWriter out, String name) { String tmp; out.print(name + ": "); out.flush(); tmp = in.nextLine(); try { return (tmp.length() == 0) ? -1 : Integer.parseInt(tmp); }catch(NumberFormatException nfe) { System.err.println("Not an integer value for " + name + ": " + tmp); return -1; } } public static Date getDataDate(Scanner in, PrintWriter out, String name) { String tmp; out.print(name + ": "); out.flush(); tmp = in.nextLine(); try { return (tmp.length() == 0) ? null : formatter.parse(tmp); }catch(ParseException pe) { System.err.println("Could not parse date for " + name + ": " + tmp); return null; } } public static Set<String> getDataSet(Scanner in, PrintWriter out, String name) { Set<String> ret = new HashSet<String>(); String tmp; do { tmp = getData(in, out, name); if(tmp != null) { ret.add(tmp); } }while(tmp != null); return (ret.size() == 0)? null : ret; } public static void prepare(XStream xstream) { xstream.alias("Person", Person.class); xstream.alias("Index", Index.class); xstream.alias("Collection", SongCollection.class); xstream.alias("Song", Song.class); } }
wingerjc/ChanteySongs
source/java/ChanteySongs/Data/DataUtil.java
Java
bsd-2-clause
2,472
[ 30522, 7427, 16883, 3240, 3385, 5620, 1012, 2951, 1025, 12324, 9262, 1012, 22834, 1012, 1008, 1025, 12324, 9262, 1012, 21183, 4014, 1012, 1008, 1025, 12324, 9262, 1012, 3793, 1012, 1008, 1025, 12324, 4012, 1012, 2245, 9316, 1012, 1060, 2142...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package org.altbeacon.bluetooth; import android.annotation.TargetApi; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.AsyncTask; import android.util.Log; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.util.Date; import java.util.HashSet; import java.util.Set; /** * * This class provides relief for Android Bug 67272. This bug in the Bluedroid stack causes crashes * in Android's BluetoothService when scanning for BLE devices encounters a large number of unique * devices. It is rare for most users but can be problematic for those with apps scanning for * Bluetooth LE devices in the background (e.g. beacon-enabled apps), especially when these users * are around Bluetooth LE devices that randomize their mac address like Gimbal beacons. * * This class can both recover from crashes and prevent crashes from happening in the first place * * More details on the bug can be found at the following URLs: * * https://code.google.com/p/android/issues/detail?id=67272 * https://github.com/RadiusNetworks/android-ibeacon-service/issues/16 * * Version 1.0 * * Created by dyoung on 3/24/14. */ @TargetApi(5) public class BluetoothCrashResolver { private static final String TAG = "BluetoothCrashResolver"; private static final boolean PREEMPTIVE_ACTION_ENABLED = true; private boolean debugEnabled = false; /** * This is not the same file that bluedroid uses. This is just to maintain state of this module */ private static final String DISTINCT_BLUETOOTH_ADDRESSES_FILE = "BluetoothCrashResolverState.txt"; private boolean recoveryInProgress = false; private boolean discoveryStartConfirmed = false; private long lastBluetoothOffTime = 0l; private long lastBluetoothTurningOnTime = 0l; private long lastBluetoothCrashDetectionTime = 0l; private int detectedCrashCount = 0; private int recoveryAttemptCount = 0; private boolean lastRecoverySucceeded = false; private long lastStateSaveTime = 0l; private static final long MIN_TIME_BETWEEN_STATE_SAVES_MILLIS = 60000l; private Context context = null; private UpdateNotifier updateNotifier; private Set<String> distinctBluetoothAddresses = new HashSet<String>(); private DiscoveryCanceller discoveryCanceller = new DiscoveryCanceller(); /** // It is very likely a crash if Bluetooth turns off and comes // back on in an extremely short interval. Testing on a Nexus 4 shows // that when the BluetoothService crashes, the time between the STATE_OFF // and the STATE_TURNING_ON ranges from 0ms-684ms // Out of 3614 samples: // 99.4% (3593) < 600 ms // 84.7% (3060) < 500 ms // So we will assume any power off sequence of < 600ms to be a crash // // While it is possible to manually turn bluetooth off then back on in // about 600ms, but it is pretty hard to do. // */ private static final long SUSPICIOUSLY_SHORT_BLUETOOTH_OFF_INTERVAL_MILLIS = 600l; /** * The Bluedroid stack can only track only 1990 unique Bluetooth mac addresses without crashing */ private static final int BLUEDROID_MAX_BLUETOOTH_MAC_COUNT = 1990; /** * The discovery process will pare back the mac address list to 256, but more may * be found in the time we let the discovery process run, depending hon how many BLE * devices are around. */ private static final int BLUEDROID_POST_DISCOVERY_ESTIMATED_BLUETOOTH_MAC_COUNT = 400; /** * It takes a little over 2 seconds after discovery is started before the pared-down mac file * is written to persistent storage. We let discovery run for a few more seconds just to be * sure. */ private static final int TIME_TO_LET_DISCOVERY_RUN_MILLIS = 5000; /* if 0, it means forever */ /** * Constructor should be called only once per long-running process that does Bluetooth LE * scanning. Must call start() to make it do anything. * * @param context the Activity or Service that is doing the Bluetooth scanning */ public BluetoothCrashResolver(Context context) { this.context = context.getApplicationContext(); if (isDebugEnabled()) Log.d(TAG, "constructed"); loadState(); } /** * Starts looking for crashes of the Bluetooth LE system and taking proactive steps to stop * crashes from happening. Proactive steps require calls to notifyScannedDevice(Device device) * so that crashes can be predicted ahead of time. */ public void start() { IntentFilter filter = new IntentFilter(); filter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED); filter.addAction(BluetoothAdapter.ACTION_DISCOVERY_STARTED); filter.addAction(BluetoothAdapter.ACTION_DISCOVERY_FINISHED); context.registerReceiver(receiver, filter); if (isDebugEnabled()) Log.d(TAG, "started listening for BluetoothAdapter events"); } /** * Stops looking for crashes. Does not need to be called in normal operations, but may be * useful for testing. */ public void stop() { context.unregisterReceiver(receiver); if (isDebugEnabled()) Log.d(TAG, "stopped listening for BluetoothAdapter events"); saveState(); } /** * Enable debug logging. By default no debug lines are logged. */ public void enableDebug() { debugEnabled = true; } /** * Disable debug logging */ public void disableDebug() { debugEnabled = false; } /** * Call this method from your BluetoothAdapter.LeScanCallback method. * Doing so is optional, but if you do, this class will be able to count the number of * disctinct bluetooth devices scanned, and prevent crashes before they happen. * * This works very well if the app containing this class is the only one running bluetooth * LE scans on the device, or it is constantly doing scans (e.g. is in the foreground for * extended periods of time.) * * This will not work well if the application using this class is only scanning periodically * (e.g. when in the background to save battery) and another application is also scanning on * the same device, because this class will only get the counts from this application. * * Future augmentation of this class may improve this by somehow centralizing the list of * unique scanned devices. * * @param device */ @TargetApi(18) public void notifyScannedDevice(BluetoothDevice device, BluetoothAdapter.LeScanCallback scanner) { int oldSize = 0, newSize = 0; if (isDebugEnabled()) oldSize = distinctBluetoothAddresses.size(); synchronized(distinctBluetoothAddresses) { distinctBluetoothAddresses.add(device.getAddress()); } if (isDebugEnabled()) { newSize = distinctBluetoothAddresses.size(); if (oldSize != newSize && newSize % 100 == 0) { if (isDebugEnabled()) Log.d(TAG, "Distinct bluetooth devices seen: "+distinctBluetoothAddresses.size()); } } if (distinctBluetoothAddresses.size() > getCrashRiskDeviceCount()) { if (PREEMPTIVE_ACTION_ENABLED && !recoveryInProgress) { Log.w(TAG, "Large number of bluetooth devices detected: "+distinctBluetoothAddresses.size()+" Proactively attempting to clear out address list to prevent a crash"); Log.w(TAG, "Stopping LE Scan"); BluetoothAdapter.getDefaultAdapter().stopLeScan(scanner); startRecovery(); processStateChange(); } } } public void crashDetected() { if (android.os.Build.VERSION.SDK_INT < 18) { if (isDebugEnabled()) Log.d(TAG, "Ignoring crashes before SDK 18, because BLE is unsupported."); return; } Log.w(TAG, "BluetoothService crash detected"); if (distinctBluetoothAddresses.size() > 0) { if (isDebugEnabled()) Log.d(TAG, "Distinct bluetooth devices seen at crash: "+distinctBluetoothAddresses.size()); } long nowTimestamp = new Date().getTime(); lastBluetoothCrashDetectionTime = nowTimestamp; detectedCrashCount++; if (recoveryInProgress) { if (isDebugEnabled()) Log.d(TAG, "Ignoring bluetooth crash because recovery is already in progress."); } else { startRecovery(); } processStateChange(); } public long getLastBluetoothCrashDetectionTime() { return lastBluetoothCrashDetectionTime; } public int getDetectedCrashCount() { return detectedCrashCount; } public int getRecoveryAttemptCount() { return recoveryAttemptCount; } public boolean isLastRecoverySucceeded() { return lastRecoverySucceeded; } public boolean isRecoveryInProgress() { return recoveryInProgress; } public interface UpdateNotifier { public void dataUpdated(); } public void setUpdateNotifier(UpdateNotifier updateNotifier) { this.updateNotifier = updateNotifier; } /** Used to force a recovery operation */ public void forceFlush() { startRecovery(); processStateChange(); } private boolean isDebugEnabled() { return debugEnabled; } private int getCrashRiskDeviceCount() { // 1990 distinct devices tracked by Bluedroid will cause a crash. But we don't know how many // devices bluedroid is tracking, we only know how many we have seen, which will be smaller // than the number tracked by bluedroid because the number we track does not include its // initial state. We therefore assume that there are some devices being tracked by bluedroid // after a recovery operation or on startup return BLUEDROID_MAX_BLUETOOTH_MAC_COUNT-BLUEDROID_POST_DISCOVERY_ESTIMATED_BLUETOOTH_MAC_COUNT; } private void processStateChange() { if (updateNotifier != null) { updateNotifier.dataUpdated(); } if (new Date().getTime() - lastStateSaveTime > MIN_TIME_BETWEEN_STATE_SAVES_MILLIS) { saveState(); } } @TargetApi(17) private void startRecovery() { // The discovery operation will start by clearing out the bluetooth mac list to only the 256 // most recently seen BLE mac addresses. recoveryAttemptCount++; BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter(); if (isDebugEnabled()) Log.d(TAG, "about to check if discovery is active"); if (!adapter.isDiscovering()) { Log.w(TAG, "Recovery attempt started"); recoveryInProgress = true; discoveryStartConfirmed = false; if (isDebugEnabled()) Log.d(TAG, "about to command discovery"); if (!adapter.startDiscovery()) { Log.w(TAG, "Can't start discovery. Is bluetooth turned on?"); } if (isDebugEnabled()) Log.d(TAG, "startDiscovery commanded. isDiscovering()="+adapter.isDiscovering()); // We don't actually need to do a discovery -- we just need to kick one off so the // mac list will be pared back to 256. Because discovery is an expensive operation in // terms of battery, we will cancel it. if (TIME_TO_LET_DISCOVERY_RUN_MILLIS > 0 ) { if (isDebugEnabled()) Log.d(TAG, "We will be cancelling this discovery in "+TIME_TO_LET_DISCOVERY_RUN_MILLIS+" milliseconds."); discoveryCanceller.doInBackground(); } else { Log.d(TAG, "We will let this discovery run its course."); } } else { Log.w(TAG, "Already discovering. Recovery attempt abandoned."); } } private void finishRecovery() { Log.w(TAG, "Recovery attempt finished"); synchronized(distinctBluetoothAddresses) { distinctBluetoothAddresses.clear(); } recoveryInProgress = false; } private final BroadcastReceiver receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (action.equals(BluetoothAdapter.ACTION_DISCOVERY_FINISHED)) { if (recoveryInProgress) { if (isDebugEnabled()) Log.d(TAG, "Bluetooth discovery finished"); finishRecovery(); } else { if (isDebugEnabled()) Log.d(TAG, "Bluetooth discovery finished (external)"); } } if (action.equals(BluetoothAdapter.ACTION_DISCOVERY_STARTED)) { if (recoveryInProgress) { discoveryStartConfirmed = true; if (isDebugEnabled()) Log.d(TAG, "Bluetooth discovery started"); } else { if (isDebugEnabled()) Log.d(TAG, "Bluetooth discovery started (external)"); } } if (action.equals(BluetoothAdapter.ACTION_STATE_CHANGED)) { final int state = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.ERROR); switch (state) { case BluetoothAdapter.ERROR: if (isDebugEnabled()) Log.d(TAG, "Bluetooth state is ERROR"); break; case BluetoothAdapter.STATE_OFF: if (isDebugEnabled()) Log.d(TAG, "Bluetooth state is OFF"); lastBluetoothOffTime = new Date().getTime(); break; case BluetoothAdapter.STATE_TURNING_OFF: break; case BluetoothAdapter.STATE_ON: if (isDebugEnabled()) Log.d(TAG, "Bluetooth state is ON"); if (isDebugEnabled()) Log.d(TAG, "Bluetooth was turned off for "+(lastBluetoothTurningOnTime - lastBluetoothOffTime)+" milliseconds"); if (lastBluetoothTurningOnTime - lastBluetoothOffTime < SUSPICIOUSLY_SHORT_BLUETOOTH_OFF_INTERVAL_MILLIS) { crashDetected(); } break; case BluetoothAdapter.STATE_TURNING_ON: lastBluetoothTurningOnTime = new Date().getTime(); if (isDebugEnabled()) Log.d(TAG, "Bluetooth state is TURNING_ON"); break; } } } }; private void saveState() { FileOutputStream outputStream = null; OutputStreamWriter writer = null; lastStateSaveTime = new Date().getTime(); try { outputStream = context.openFileOutput(DISTINCT_BLUETOOTH_ADDRESSES_FILE, Context.MODE_PRIVATE); writer = new OutputStreamWriter(outputStream); writer.write(lastBluetoothCrashDetectionTime+"\n"); writer.write(detectedCrashCount+"\n"); writer.write(recoveryAttemptCount+"\n"); writer.write(lastRecoverySucceeded ? "1\n" : "0\n"); synchronized (distinctBluetoothAddresses) { for (String mac : distinctBluetoothAddresses) { writer.write(mac); writer.write("\n"); } } } catch (IOException e) { Log.w(TAG, "Can't write macs to "+DISTINCT_BLUETOOTH_ADDRESSES_FILE); } finally { if (writer != null) { try { writer.close(); } catch (IOException e1) { } } } if (isDebugEnabled()) Log.d(TAG, "Wrote "+distinctBluetoothAddresses.size()+" bluetooth addresses"); } private void loadState() { FileInputStream inputStream = null; BufferedReader reader = null; try { inputStream = context.openFileInput(DISTINCT_BLUETOOTH_ADDRESSES_FILE); reader = new BufferedReader(new InputStreamReader(inputStream)); String line; line = reader.readLine(); if (line != null) { lastBluetoothCrashDetectionTime = Long.parseLong(line); } line = reader.readLine(); if (line != null) { detectedCrashCount = Integer.parseInt(line); } line = reader.readLine(); if (line != null) { recoveryAttemptCount = Integer.parseInt(line); } line = reader.readLine(); if (line != null) { lastRecoverySucceeded = false; if (line.equals("1")) { lastRecoverySucceeded = true; } } String mac; while ((mac = reader.readLine()) != null) { distinctBluetoothAddresses.add(mac); } } catch (IOException e) { Log.w(TAG, "Can't read macs from "+DISTINCT_BLUETOOTH_ADDRESSES_FILE); } catch (NumberFormatException e) { Log.w(TAG, "Can't parse file "+DISTINCT_BLUETOOTH_ADDRESSES_FILE); } finally { if (reader != null) { try { reader.close(); } catch (IOException e1) { } } } if (isDebugEnabled()) Log.d(TAG, "Read "+distinctBluetoothAddresses.size()+" bluetooth addresses"); } private class DiscoveryCanceller extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { try { Thread.sleep(TIME_TO_LET_DISCOVERY_RUN_MILLIS); if (!discoveryStartConfirmed) { Log.w(TAG, "BluetoothAdapter.ACTION_DISCOVERY_STARTED never received. Recovery may fail."); } final BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter(); if (adapter.isDiscovering()) { if (isDebugEnabled()) Log.d(TAG, "Cancelling discovery"); adapter.cancelDiscovery(); } else { if (isDebugEnabled()) Log.d(TAG, "Discovery not running. Won't cancel it"); } } catch (InterruptedException e) { if (isDebugEnabled()) Log.d(TAG, "DiscoveryCanceller sleep interrupted."); } return null; } @Override protected void onPostExecute(Void result) { } @Override protected void onPreExecute() { } @Override protected void onProgressUpdate(Void... values) { } } }
yangjae/android-beacon-library
src/main/java/org/altbeacon/bluetooth/BluetoothCrashResolver.java
Java
apache-2.0
19,332
[ 30522, 7427, 8917, 1012, 12456, 4783, 22684, 2078, 1012, 2630, 19392, 1025, 12324, 11924, 1012, 5754, 17287, 3508, 1012, 4539, 9331, 2072, 1025, 12324, 11924, 1012, 2630, 19392, 1012, 2630, 19392, 8447, 13876, 2121, 1025, 12324, 11924, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
#------------------------------------------------------------------------------ # Copyright (c) 2013 The University of Manchester, UK. # # BSD Licenced. See LICENCE.rdoc for details. # # Taverna Player was developed in the BioVeL project, funded by the European # Commission 7th Framework Programme (FP7), through grant agreement # number 283359. # # Author: Robert Haines #------------------------------------------------------------------------------ class TavernaPlayer::RunsController < ApplicationController # Do not remove the next line. include TavernaPlayer::Concerns::Controllers::RunsController # Extend the RunsController here. private alias_method :old_find_run, :find_run def update_params params.require(:run).permit(:name, :policy_attributes => [:id, :public_permissions => []]) end def run_params params.require(:run).permit( :create_time, :delayed_job, :embedded, :finish_time, :inputs_attributes, :log, :name, :parent_id, :results, :run_id, :start_time, :status_message_key, :user_id, :workflow_id, :inputs_attributes => [:depth, :file, :metadata, :name, :value], :policy_attributes => [:id, :public_permissions => []] ) end def find_runs select = { :embedded => false } select[:workflow_id] = params[:workflow_id] if params[:workflow_id] @runs = TavernaPlayer::Run.where(select).order("created_at DESC").with_permissions(current_user, :view).page(params[:page]) end def find_run old_find_run authorize(@run.can?(current_user, action_name)) end end
Samhane/taverna-player-portal
app/controllers/taverna_player/runs_controller.rb
Ruby
mit
1,563
[ 30522, 1001, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 101...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# Bauhinia khasiana var. tomentella T.Chen VARIETY #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Bauhinia/Bauhinia khasiana/Bauhinia khasiana tomentella/README.md
Markdown
apache-2.0
198
[ 30522, 1001, 8670, 27225, 23309, 1047, 14949, 11410, 13075, 1012, 21269, 10111, 4571, 1056, 1012, 8802, 3528, 1001, 1001, 1001, 1001, 3570, 3970, 1001, 1001, 1001, 1001, 2429, 2000, 1996, 10161, 1997, 2166, 1010, 3822, 2254, 2249, 1001, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
// Uncomment the following to provide samples for PageResult<T>. Must also add the Microsoft.AspNet.WebApi.OData // package to your project. ////#define Handle_PageResultOfT using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net.Http.Headers; using System.Reflection; using System.Web; using System.Web.Http; #if Handle_PageResultOfT using System.Web.Http.OData; #endif namespace TheBigCatProject.Server.Areas.HelpPage { /// <summary> /// Use this class to customize the Help Page. /// For example you can set a custom <see cref="System.Web.Http.Description.IDocumentationProvider"/> to supply the documentation /// or you can provide the samples for the requests/responses. /// </summary> public static class HelpPageConfig { [SuppressMessage("Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "TheBigCatProject.Server.Areas.HelpPage.TextSample.#ctor(System.String)", Justification = "End users may choose to merge this string with existing localized resources.")] [SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "bsonspec", Justification = "Part of a URI.")] public static void Register(HttpConfiguration config) { //// Uncomment the following to use the documentation from XML documentation file. //config.SetDocumentationProvider(new XmlDocumentationProvider(HttpContext.Current.Server.MapPath("~/App_Data/XmlDocument.xml"))); //// Uncomment the following to use "sample string" as the sample for all actions that have string as the body parameter or return type. //// Also, the string arrays will be used for IEnumerable<string>. The sample objects will be serialized into different media type //// formats by the available formatters. //config.SetSampleObjects(new Dictionary<Type, object> //{ // {typeof(string), "sample string"}, // {typeof(IEnumerable<string>), new string[]{"sample 1", "sample 2"}} //}); // Extend the following to provide factories for types not handled automatically (those lacking parameterless // constructors) or for which you prefer to use non-default property values. Line below provides a fallback // since automatic handling will fail and GeneratePageResult handles only a single type. #if Handle_PageResultOfT config.GetHelpPageSampleGenerator().SampleObjectFactories.Add(GeneratePageResult); #endif // Extend the following to use a preset object directly as the sample for all actions that support a media // type, regardless of the body parameter or return type. The lines below avoid display of binary content. // The BsonMediaTypeFormatter (if available) is not used to serialize the TextSample object. config.SetSampleForMediaType( new TextSample("Binary JSON content. See http://bsonspec.org for details."), new MediaTypeHeaderValue("application/bson")); //// Uncomment the following to use "[0]=foo&[1]=bar" directly as the sample for all actions that support form URL encoded format //// and have IEnumerable<string> as the body parameter or return type. //config.SetSampleForType("[0]=foo&[1]=bar", new MediaTypeHeaderValue("application/x-www-form-urlencoded"), typeof(IEnumerable<string>)); //// Uncomment the following to use "1234" directly as the request sample for media type "text/plain" on the controller named "Values" //// and action named "Put". //config.SetSampleRequest("1234", new MediaTypeHeaderValue("text/plain"), "Values", "Put"); //// Uncomment the following to use the image on "../images/aspNetHome.png" directly as the response sample for media type "image/png" //// on the controller named "Values" and action named "Get" with parameter "id". //config.SetSampleResponse(new ImageSample("../images/aspNetHome.png"), new MediaTypeHeaderValue("image/png"), "Values", "Get", "id"); //// Uncomment the following to correct the sample request when the action expects an HttpRequestMessage with ObjectContent<string>. //// The sample will be generated as if the controller named "Values" and action named "Get" were having string as the body parameter. //config.SetActualRequestType(typeof(string), "Values", "Get"); //// Uncomment the following to correct the sample response when the action returns an HttpResponseMessage with ObjectContent<string>. //// The sample will be generated as if the controller named "Values" and action named "Post" were returning a string. //config.SetActualResponseType(typeof(string), "Values", "Post"); } #if Handle_PageResultOfT private static object GeneratePageResult(HelpPageSampleGenerator sampleGenerator, Type type) { if (type.IsGenericType) { Type openGenericType = type.GetGenericTypeDefinition(); if (openGenericType == typeof(PageResult<>)) { // Get the T in PageResult<T> Type[] typeParameters = type.GetGenericArguments(); Debug.Assert(typeParameters.Length == 1); // Create an enumeration to pass as the first parameter to the PageResult<T> constuctor Type itemsType = typeof(List<>).MakeGenericType(typeParameters); object items = sampleGenerator.GetSampleObject(itemsType); // Fill in the other information needed to invoke the PageResult<T> constuctor Type[] parameterTypes = new Type[] { itemsType, typeof(Uri), typeof(long?), }; object[] parameters = new object[] { items, null, (long)ObjectGenerator.DefaultCollectionSize, }; // Call PageResult(IEnumerable<T> items, Uri nextPageLink, long? count) constructor ConstructorInfo constructor = type.GetConstructor(parameterTypes); return constructor.Invoke(parameters); } } return null; } #endif } }
EmilMitev/Telerik-Academy
Single Page Applications/07. AngularJS Workshop/TheBigCatProject/TheBigCatProject.Server/Areas/HelpPage/App_Start/HelpPageConfig.cs
C#
mit
6,517
[ 30522, 1013, 1013, 4895, 9006, 3672, 1996, 2206, 2000, 3073, 8168, 2005, 3931, 6072, 11314, 1026, 1056, 1028, 1012, 2442, 2036, 5587, 1996, 7513, 1012, 2004, 2361, 7159, 1012, 4773, 9331, 2072, 1012, 1051, 2850, 2696, 1013, 1013, 7427, 20...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package ch.epfl.yinyang package transformers import ch.epfl.yinyang._ import ch.epfl.yinyang.transformers._ import scala.reflect.macros.blackbox.Context import language.experimental.macros import scala.collection.mutable import scala.collection.mutable.ArrayBuffer /** * Converts captured variables to holes, which will be passed to the generated * code at runtime as arguments to the apply method. Exposes all holes in the * holeTable, which maps from holeIds to symbolIds. * * Features covered are: * - identifiers -> `hole[T](classTag[T], holeId)` * - fields (TODO) * - no parameter methods (TODO) * - no parameter functions (TODO) */ trait HoleTransformation extends MacroModule with TransformationUtils { def holeMethod: String import c.universe._ /** SymbolIds indexed by holeIds. */ val holeTable = new ArrayBuffer[Int] object HoleTransformer { def apply(toHoles: List[Symbol] = Nil, className: String)(tree: Tree) = { val t = new HoleTransformer(toHoles map symbolId).transform(tree) log("holeTransformed (transforming " + toHoles + "): " + code(t), 2) log("holeTable (holeId -> symbolId): " + holeTable, 2) t } } /** * Transforms all identifiers with symbolIds in `toHoles` to * `hole[T](classTag[T], holeId)` and builds the holeTable mapping from * holeIds to symbolIds. */ class HoleTransformer(toHoles: List[Int]) extends Transformer { override def transform(tree: Tree): Tree = tree match { case i @ Ident(s) if toHoles contains symbolId(i.symbol) => { val index = { val sId = symbolId(i.symbol) if (holeTable.contains(sId)) holeTable.indexOf(sId) else { holeTable += symbolId(i.symbol) holeTable.size - 1 } } Apply( Select(This(typeNames.EMPTY), TermName(holeMethod)), List( TypeApply( Select(This(typeNames.EMPTY), TermName("runtimeType")), List(TypeTree(i.tpe.widen))), Literal(Constant(index)))) } case _ => super.transform(tree) } } }
vjovanov/scala-yinyang
components/core/src/transformers/HoleTransformation.scala
Scala
bsd-3-clause
2,133
[ 30522, 7427, 10381, 1012, 4958, 10258, 1012, 18208, 12198, 7427, 19081, 12324, 10381, 1012, 4958, 10258, 1012, 18208, 12198, 1012, 1035, 12324, 10381, 1012, 4958, 10258, 1012, 18208, 12198, 1012, 19081, 1012, 1035, 12324, 26743, 1012, 8339, 1...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
// Seriously awesome GLSL noise functions. (C) Credits and kudos go to // Copyright (C) Stefan Gustavson, Ian McEwan Ashima Arts // MIT License. define(function(require, exports){ exports.permute1 = function(x){ return mod((34.0 * x + 1.0) * x, 289.0) } exports.permute3 = function(x){ return mod((34.0 * x + 1.0) * x, 289.0) } exports.permute4 = function(x){ return mod((34.0 * x + 1.0) * x, 289.0) } exports.isqrtT1 = function(r){ return 1.79284291400159 - 0.85373472095314 * r } exports.isqrtT4 = function(r){ return vec4(1.79284291400159 - 0.85373472095314 * r) } exports.snoise2 = function(x, y){ return snoise2v(vec2(x,y,z)) } exports.noise2d = exports.s2d = exports.snoise2v = function(v){ var C = vec4(0.211324865405187,0.366025403784439,-0.577350269189626,0.024390243902439) var i = floor(v + dot(v, C.yy) ) var x0 = v - i + dot(i, C.xx) var i1 = (x0.x > x0.y) ? vec2(1.0, 0.0) : vec2(0.0, 1.0) var x12 = x0.xyxy + C.xxzz x12.xy -= i1 i = mod(i, 289.0) // Avoid truncation effects in permutation var p = permute3(permute3(i.y + vec3(0.0, i1.y, 1.0)) + i.x + vec3(0.0, i1.x, 1.0 )) var m = max(0.5 - vec3(dot(x0,x0), dot(x12.xy,x12.xy), dot(x12.zw,x12.zw)), 0.0) m = m*m m = m*m var x = 2.0 * fract(p * C.www) - 1.0 var h = abs(x) - 0.5 var ox = floor(x + 0.5) var a0 = x - ox m *= (1.79284291400159 - 0.85373472095314 * ( a0*a0 + h*h )) var g = vec3() g.x = a0.x * x0.x + h.x * x0.y g.yz = a0.yz * x12.xz + h.yz * x12.yw return 130.0 * dot(m, g) } exports.snoise3 = function(x, y, z){ return snoise3v(vec3(x,y,z)) } exports.noise3d = exports.snoise3v = function(v){ var C = vec2(1.0/6.0, 1.0/3.0) var D = vec4(0.0, 0.5, 1.0, 2.0) // First corner var i = floor(v + dot(v, C.yyy)) var x0 = v - i + dot(i, C.xxx) var g = step(x0.yzx, x0.xyz) var l = 1.0 - g var i1 = min(g.xyz, l.zxy) var i2 = max(g.xyz, l.zxy) var x1 = x0 - i1 + 1.0 * C.xxx var x2 = x0 - i2 + 2.0 * C.xxx var x3 = x0 - 1. + 3.0 * C.xxx // Permutations i = mod(i, 289.0) var p = permute4(permute4(permute4( i.z + vec4(0.0, i1.z, i2.z, 1.0)) + i.y + vec4(0.0, i1.y, i2.y, 1.0)) + i.x + vec4(0.0, i1.x, i2.x, 1.0)) // ( N*N points uniformly over a square, mapped onto an octahedron.) var n_ = 1.0/7.0 var ns = n_ * D.wyz - D.xzx var j = p - 49.0 * floor(p * ns.z *ns.z) var x_ = floor(j * ns.z) var y_ = floor(j - 7.0 * x_) var x = x_ * ns.x + ns.yyyy var y = y_ * ns.x + ns.yyyy var h = 1.0 - abs(x) - abs(y) var b0 = vec4( x.xy, y.xy ) var b1 = vec4( x.zw, y.zw ) var s0 = floor(b0)*2.0 + 1.0 var s1 = floor(b1)*2.0 + 1.0 var sh = -step(h, vec4(0.0)) var a0 = b0.xzyw + s0.xzyw*sh.xxyy var a1 = b1.xzyw + s1.xzyw*sh.zzww var p0 = vec3(a0.xy, h.x) var p1 = vec3(a0.zw, h.y) var p2 = vec3(a1.xy, h.z) var p3 = vec3(a1.zw, h.w) //Normalise gradients var norm = isqrtT4(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3))) p0 *= norm.x; p1 *= norm.y; p2 *= norm.z; p3 *= norm.w; // Mix final noise value var m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0) m = m * m return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1), dot(p2,x2), dot(p3,x3) ) ) } exports.snoise4_g = function(j, ip){ var p = vec4() p.xyz = floor( fract (vec3(j) * ip.xyz) * 7.0) * ip.z - 1.0 p.w = 1.5 - dot(abs(p.xyz), vec3(1.0,1.0,1.0)) var s = vec4(lessThan(p, vec4(0.0))) p.xyz = p.xyz + (s.xyz*2.0 - 1.0) * s.www return p } exports.snoise4 = function(x, y, z, w){ return snoise4v(vec4(x,y,z,w)) } exports.snoise4v = function(v){ var C = vec4(0.138196601125011,0.276393202250021,0.414589803375032,-0.447213595499958) // First corner var i = floor(v + dot(v, vec4(0.309016994374947451)) ) var x0 = v - i + dot(i, C.xxxx) var i0 = vec4() var isX = step( x0.yzw, x0.xxx ) var isYZ = step( x0.zww, x0.yyz ) i0.x = isX.x + isX.y + isX.z i0.yzw = 1.0 - isX i0.y += isYZ.x + isYZ.y i0.zw += 1.0 - isYZ.xy i0.z += isYZ.z i0.w += 1.0 - isYZ.z var i3 = clamp( i0, 0.0, 1.0 ) var i2 = clamp( i0-1.0, 0.0, 1.0 ) var i1 = clamp( i0-2.0, 0.0, 1.0 ) var x1 = x0 - i1 + C.xxxx var x2 = x0 - i2 + C.yyyy var x3 = x0 - i3 + C.zzzz var x4 = x0 + C.wwww // Permutations i = mod(i, 289.0 ) var j0 = permute1( permute1( permute1( permute1(i.w) + i.z) + i.y) + i.x) var j1 = permute4( permute4( permute4( permute4( i.w + vec4(i1.w, i2.w, i3.w, 1.0 )) + i.z + vec4(i1.z, i2.z, i3.z, 1.0 )) + i.y + vec4(i1.y, i2.y, i3.y, 1.0 )) + i.x + vec4(i1.x, i2.x, i3.x, 1.0 )) // Gradients: 7x7x6 points over a cube, mapped onto a 4-cross polytope // 7*7*6 = 294, which is close to the ring size 17*17 = 289. var ip = vec4(1.0/294.0, 1.0/49.0, 1.0/7.0, 0.0) var p0 = snoise4_g(j0, ip) var p1 = snoise4_g(j1.x, ip) var p2 = snoise4_g(j1.y, ip) var p3 = snoise4_g(j1.z, ip) var p4 = snoise4_g(j1.w, ip) // Normalise gradients var nr = isqrtT4(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3))) p0 *= nr.x p1 *= nr.y p2 *= nr.z p3 *= nr.w p4 *= isqrtT1(dot(p4,p4)) // Mix contributions from the five corners var m0 = max(0.6 - vec3(dot(x0,x0), dot(x1,x1), dot(x2,x2)), 0.0) var m1 = max(0.6 - vec2(dot(x3,x3), dot(x4,x4)), 0.0) m0 = m0 * m0 m1 = m1 * m1 return 49.0 * (dot(m0*m0, vec3(dot( p0, x0 ), dot(p1, x1), dot(p2, x2))) + dot(m1*m1, vec2( dot(p3, x3), dot(p4, x4)))) } exports.cell2v = function(v){ return cell3v(vec3(v.x, v.y,0)) } exports.cell3v = function(P){ var K = 0.142857142857 // 1/7 var Ko = 0.428571428571 // 1/2-K/2 var K2 = 0.020408163265306 // 1/(7*7) var Kz = 0.166666666667 // 1/6 var Kzo = 0.416666666667 // 1/2-1/6*2 var ji = 0.8 // smaller jitter gives less errors in F2 var Pi = mod(floor(P), 289.0) var Pf = fract(P) var Pfx = Pf.x + vec4(0.0, -1.0, 0.0, -1.0) var Pfy = Pf.y + vec4(0.0, 0.0, -1.0, -1.0) var p = permute4(Pi.x + vec4(0.0, 1.0, 0.0, 1.0)) p = permute4(p + Pi.y + vec4(0.0, 0.0, 1.0, 1.0)) var p1 = permute4(p + Pi.z) // z+0 var p2 = permute4(p + Pi.z + vec4(1.0)) // z+1 var ox1 = fract(p1*K) - Ko var oy1 = mod(floor(p1*K), 7.0)*K - Ko var oz1 = floor(p1*K2)*Kz - Kzo // p1 < 289 guaranteed var ox2 = fract(p2*K) - Ko var oy2 = mod(floor(p2*K), 7.0)*K - Ko var oz2 = floor(p2*K2)*Kz - Kzo var dx1 = Pfx + ji*ox1 var dy1 = Pfy + ji*oy1 var dz1 = Pf.z + ji*oz1 var dx2 = Pfx + ji*ox2 var dy2 = Pfy + ji*oy2 var dz2 = Pf.z - 1.0 + ji*oz2 var d1 = dx1 * dx1 + dy1 * dy1 + dz1 * dz1 // z+0 var d2 = dx2 * dx2 + dy2 * dy2 + dz2 * dz2 // z+1 var d = min(d1,d2) // F1 is now in d d2 = max(d1,d2) // Make sure we keep all candidates for F2 d.xy = (d.x < d.y) ? d.xy : d.yx // Swap smallest to d.x d.xz = (d.x < d.z) ? d.xz : d.zx d.xw = (d.x < d.w) ? d.xw : d.wx // F1 is now in d.x d.yzw = min(d.yzw, d2.yzw) // F2 now not in d2.yzw d.y = min(d.y, d.z) // nor in d.z d.y = min(d.y, d.w) // nor in d.w d.y = min(d.y, d2.x) // F2 is now in d.y return sqrt(d.xy) // F1 and F2 }, exports.cell3w = function(P){ var K = 0.142857142857 var Ko = 0.428571428571 // 1/2-K/2 var K2 = 0.020408163265306// 1/(7*7) var Kz = 0.166666666667// 1/6 var Kzo = 0.416666666667// 1/2-1/6*2 var ji = 1.0// smaller jitter gives more regular pattern var Pi = mod(floor(P), 289.0) var Pf = fract(P) - 0.5 var Pfx = Pf.x + vec3(1.0, 0.0, -1.0) var Pfy = Pf.y + vec3(1.0, 0.0, -1.0) var Pfz = Pf.z + vec3(1.0, 0.0, -1.0) var p = permute3(Pi.x + vec3(-1.0, 0.0, 1.0)) var p1 = permute3(p + Pi.y - 1.0) var p2 = permute3(p + Pi.y) var p3 = permute3(p + Pi.y + 1.0) var p11 = permute3(p1 + Pi.z - 1.0) var p12 = permute3(p1 + Pi.z) var p13 = permute3(p1 + Pi.z + 1.0) var p21 = permute3(p2 + Pi.z - 1.0) var p22 = permute3(p2 + Pi.z) var p23 = permute3(p2 + Pi.z + 1.0) var p31 = permute3(p3 + Pi.z - 1.0) var p32 = permute3(p3 + Pi.z) var p33 = permute3(p3 + Pi.z + 1.0) var ox11 = fract(p11*K) - Ko var oy11 = mod(floor(p11*K), 7.0)*K - Ko var oz11 = floor(p11*K2)*Kz - Kzo // p11 < 289 guaranteed var ox12 = fract(p12*K) - Ko var oy12 = mod(floor(p12*K), 7.0)*K - Ko var oz12 = floor(p12*K2)*Kz - Kzo var ox13 = fract(p13*K) - Ko var oy13 = mod(floor(p13*K), 7.0)*K - Ko var oz13 = floor(p13*K2)*Kz - Kzo var ox21 = fract(p21*K) - Ko var oy21 = mod(floor(p21*K), 7.0)*K - Ko var oz21 = floor(p21*K2)*Kz - Kzo var ox22 = fract(p22*K) - Ko var oy22 = mod(floor(p22*K), 7.0)*K - Ko var oz22 = floor(p22*K2)*Kz - Kzo var ox23 = fract(p23*K) - Ko var oy23 = mod(floor(p23*K), 7.0)*K - Ko var oz23 = floor(p23*K2)*Kz - Kzo var ox31 = fract(p31*K) - Ko var oy31 = mod(floor(p31*K), 7.0)*K - Ko var oz31 = floor(p31*K2)*Kz - Kzo var ox32 = fract(p32*K) - Ko var oy32 = mod(floor(p32*K), 7.0)*K - Ko var oz32 = floor(p32*K2)*Kz - Kzo var ox33 = fract(p33*K) - Ko var oy33 = mod(floor(p33*K), 7.0)*K - Ko var oz33 = floor(p33*K2)*Kz - Kzo var dx11 = Pfx + ji*ox11 var dy11 = Pfy.x + ji*oy11 var dz11 = Pfz.x + ji*oz11 var dx12 = Pfx + ji*ox12 var dy12 = Pfy.x + ji*oy12 var dz12 = Pfz.y + ji*oz12 var dx13 = Pfx + ji*ox13 var dy13 = Pfy.x + ji*oy13 var dz13 = Pfz.z + ji*oz13 var dx21 = Pfx + ji*ox21 var dy21 = Pfy.y + ji*oy21 var dz21 = Pfz.x + ji*oz21 var dx22 = Pfx + ji*ox22 var dy22 = Pfy.y + ji*oy22 var dz22 = Pfz.y + ji*oz22 var dx23 = Pfx + ji*ox23 var dy23 = Pfy.y + ji*oy23 var dz23 = Pfz.z + ji*oz23 var dx31 = Pfx + ji*ox31 var dy31 = Pfy.z + ji*oy31 var dz31 = Pfz.x + ji*oz31 var dx32 = Pfx + ji*ox32 var dy32 = Pfy.z + ji*oy32 var dz32 = Pfz.y + ji*oz32 var dx33 = Pfx + ji*ox33 var dy33 = Pfy.z + ji*oy33 var dz33 = Pfz.z + ji*oz33 var d11 = dx11 * dx11 + dy11 * dy11 + dz11 * dz11 var d12 = dx12 * dx12 + dy12 * dy12 + dz12 * dz12 var d13 = dx13 * dx13 + dy13 * dy13 + dz13 * dz13 var d21 = dx21 * dx21 + dy21 * dy21 + dz21 * dz21 var d22 = dx22 * dx22 + dy22 * dy22 + dz22 * dz22 var d23 = dx23 * dx23 + dy23 * dy23 + dz23 * dz23 var d31 = dx31 * dx31 + dy31 * dy31 + dz31 * dz31 var d32 = dx32 * dx32 + dy32 * dy32 + dz32 * dz32 var d33 = dx33 * dx33 + dy33 * dy33 + dz33 * dz33 var d1a = min(d11, d12) d12 = max(d11, d12) d11 = min(d1a, d13) // Smallest now not in d12 or d13 d13 = max(d1a, d13) d12 = min(d12, d13) // 2nd smallest now not in d13 var d2a = min(d21, d22) d22 = max(d21, d22) d21 = min(d2a, d23) // Smallest now not in d22 or d23 d23 = max(d2a, d23) d22 = min(d22, d23) // 2nd smallest now not in d23 var d3a = min(d31, d32) d32 = max(d31, d32) d31 = min(d3a, d33) // Smallest now not in d32 or d33 d33 = max(d3a, d33) d32 = min(d32, d33) // 2nd smallest now not in d33 var da = min(d11, d21) d21 = max(d11, d21) d11 = min(da, d31) // Smallest now in d11 d31 = max(da, d31) // 2nd smallest now not in d31 d11.xy = (d11.x < d11.y) ? d11.xy : d11.yx d11.xz = (d11.x < d11.z) ? d11.xz : d11.zx // d11.x now smallest d12 = min(d12, d21) // 2nd smallest now not in d21 d12 = min(d12, d22) // nor in d22 d12 = min(d12, d31) // nor in d31 d12 = min(d12, d32) // nor in d32 d11.yz = min(d11.yz, d12.xy) // nor in d12.yz d11.y = min(d11.y, d12.z) // Only two more to go d11.y = min(d11.y, d11.z) // Done! (Phew!) return sqrt(d11.xy) // F1, F2 } })
teem2/dreem2.1
core/gl/glnoise.js
JavaScript
mit
11,369
[ 30522, 1013, 1013, 5667, 12476, 1043, 4877, 2140, 5005, 4972, 1012, 1006, 1039, 1007, 6495, 1998, 13970, 12269, 2175, 2000, 1013, 1013, 9385, 1006, 1039, 1007, 8852, 13430, 3385, 1010, 4775, 11338, 7974, 2319, 6683, 9581, 2840, 1013, 1013, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/**************************************************************************** ** ** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Contact: http://qt.digia.com/contact-us ** ** This file is part of the Enginio Qt Client Library. ** ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and Digia. For licensing terms and ** conditions see http://qt.digia.com/licensing. For further information ** use the contact form at http://qt.digia.com/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 2.1 requirements ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Digia gives you certain additional ** rights. These rights are described in the Digia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 3.0 as published by the Free Software ** Foundation and appearing in the file LICENSE.GPL included in the ** packaging of this file. Please review the following information to ** ensure the GNU General Public License version 3.0 requirements will be ** met: http://www.gnu.org/copyleft/gpl.html. ** ****************************************************************************/ #include "enginioacl.h" #include "enginioplugin.h" #include "enginioqmlacloperation.h" #include "enginioqmlclient.h" #include "enginioqmlfileoperation.h" #include "enginioqmlidentityauthoperation.h" #include "enginioqmlobjectmodel.h" #include "enginioqmlobjectoperation.h" #include "enginioqmlqueryoperation.h" #include "enginioqmlusergroupoperation.h" #include "enginioerror.h" #include <qqml.h> /*! * \qmlmodule enginio-plugin * \title Enginio QML Plugin * * The Enginio QML plugin provides access to the Enginio service through a set of * QML types. */ /*! * \qmltype Error * \instantiates EnginioError * \inqmlmodule enginio-plugin * \brief Used to describe errors that occur during Enginio operations */ /*! * \qmlproperty enumeration Error::error * \list * \li Error.NoError - No errors. * \li Error.UnknownError - Something went wrong but we don't know what. * \li Error.NetworkError - Enginio service is unavailable or can't handle request. * \li Error.RequestError - Request or reply is invalid. * \li Error.InternalError - Enginio service is malfunctioning. * \endlist */ /*! * \qmltype Acl * \instantiates EnginioAcl * \inqmlmodule enginio-plugin * \brief Access control list for Enginio objects. * * Access control list contains a list of subjects and set of permissions * granted for each subject. * * Subjects can be: * \list * \li Users * \li Usergroups * \li Well known subjects like "everyone" * \endlist * * And possible permissions in ACL for objects are: * \list * \li "read" * \li "update" * \li "delete" * \li "admin" * \endlist * * Subjects are presented as objects with \c id and \c objectType and * permissions as Acl::Permission type enumerations. * * Well known subjects are constants which identify generic Users or Usergroups. * For example 'everyone' subject presents all Users (even anonymous) and its * value is \c {{ "id": "*", "objectType": "aclSubject" }}. */ /*! * \qmlproperty enumeration Acl::Permission * \list * \li Acl.ReadPermission - Permission to read object data * \li Acl.UpdatePermission - Permission to update object data * \li Acl.DeletePermission - Permission to delete object * \li Acl.AdminPermission - Permission to read, update and delete object and to * read and change object permissions * \endlist */ /*! * \qmlproperty object Acl::readPermissions * List of subjects that have "read" permission. */ /*! * \qmlproperty object Acl::updatePermissions * List of subjects that have "update" permission. */ /*! * \qmlproperty object Acl::deletePermissions * List of subjects that have "delete" permission. */ /*! * \qmlproperty object Acl::adminPermissions * List of subjects that have "admin" permission. */ QQmlEngine *g_qmlEngine = 0; void EnginioPlugin::initializeEngine(QQmlEngine *engine, const char *uri) { Q_UNUSED(uri); g_qmlEngine = engine; } void EnginioPlugin::registerTypes(const char *uri) { // @uri io.engin qmlRegisterType<EnginioQmlClient>(uri, 1, 0, "Client"); qmlRegisterType<EnginioQmlObjectModel>(uri, 1, 0, "ObjectModel"); qmlRegisterType<EnginioQmlObjectOperation>(uri, 1, 0, "ObjectOperation"); qmlRegisterType<EnginioQmlQueryOperation>(uri, 1, 0, "QueryOperation"); qmlRegisterType<EnginioQmlIdentityAuthOperation>(uri, 1, 0, "IdentityAuthOperation"); qmlRegisterType<EnginioQmlAclOperation>(uri, 1, 0, "AclOperation"); qmlRegisterType<EnginioQmlFileOperation>(uri, 1, 0, "FileOperation"); qmlRegisterType<EnginioQmlUsergroupOperation>(uri, 1, 0, "UsergroupOperation"); qmlRegisterType<EnginioError>(uri, 1, 0, "Error"); qmlRegisterType<EnginioAcl>(uri, 1, 0, "Acl"); }
inadsan/enginio-qt
src/enginio_plugin/enginioplugin.cpp
C++
gpl-3.0
5,607
[ 30522, 1013, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 305...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
Intermediate PHP & MySQL ======================== ![PHP](images/php.png "PHP") ![MySQL](images/mysql.png "MySQL") ![Symfony](images/symfony.png "Symfony") > This course is designed for students with a fundamental understanding of programming. > We will spend some time reviewing PHP basics, for students who are familiar with another language. > We will then move on to commonly used linguistic constructs, talk about the web, learn Object Oriented principles, apply some of the most commonly used design patterns, learn about the MySQL database server and learn Symfony. > We will then spend the rest of the class working on a project, which will ultimately become a part of our portfolio. Week 1 - [PHP Basics](syllabus/01 PHP Basics.md) [:notebook_with_decorative_cover:](syllabus/homework/01_count_types.md) Week 2 - [Functions, Arrays & Strings](syllabus/02 Strings Functions Arrays.md) [:notebook_with_decorative_cover:](syllabus/homework/02_card_game.md) Week 3 - [Web Programming](syllabus/03 Web Programming.md) [:notebook_with_decorative_cover:](syllabus/homework/03_countries_on_earth.md) Week 4 - [Object Oriented Programming](syllabus/04 Object Oriented Programming.md) [:notebook_with_decorative_cover:](syllabus/homework/04_OO_card_game.md) Week 5 - [Design Patterns](syllabus/05 Design Patterns.md) [:notebook_with_decorative_cover:](syllabus/homework/05_simon_says.md) Week 6 - [MySQL Fundamentals](syllabus/06 MySQL Fundamentals.md) Week 7 - [Introduction to Symfony](syllabus/07 Introduction to Symfony.md) Week 8 - [ACAShop - Capstone Project Kickoff](syllabus/08 ACAShop Capstone Project Kickoff.md) Week 9 - In class coding and project completion Week 10 - Continue in class coding for ACAShop, Student Q&A, the state of PHP and the job market. #### Required Software Here are some applications you will need installed. - [VirtualBox](https://www.virtualbox.org/) - Create and run a virtual development environment - [Vagrant](https://www.vagrantup.com/) - Provision a virtual machine - [ansible](http://docs.ansible.com/intro_installation.html) - Configure the VM - [PHPStorm](https://www.jetbrains.com/phpstorm/download/) - State of the art PHP IDE (we will be providing everyone student licenses) - [git](http://git-scm.com/) - Version control system - [SourceTree](http://www.sourcetreeapp.com/) - Free git GUI client #### Developer Environment #### Virtual Machine We have created a seperate repository that contains instructions on how to setup and configure your VM. Clone [VirtualMachines](https://github.com/AustinCodingAcademy/VirtualMachines) and follow the instructions. *Note: We will host workshops, prior to class, to help students setup their machines.* #### Book [The Symfony Book](http://symfony.com/doc/current/book/index.html) - The Symfony bible, written and maintained by the core team #### Reference - [Helpful Links](Links.md) - [git Commands](GitCommands.md) *** `Instructor`: [Samir Patel](http://samirpatel.me) `Phone`: (512) 745-7846 `Email`: samir at austincodingacademy dot com `Office Hours`: 30 minutes after class
wes596/PHPIntermediate
README.md
Markdown
gpl-2.0
3,113
[ 30522, 7783, 25718, 1004, 2026, 2015, 4160, 2140, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 1027, 999, 1031, 25718, 1033, 1006, 4871, 1013, 25...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php class Neostrada { const API_HOST = 'https://api.neostrada.nl/'; private $_key; private $_secret; public function __construct($key, $secret) { $this->_key = $key; $this->_secret = $secret; } public function domain($domain) { return new Neostrada_Domain($this, $domain); } public function save(Neostrada_Domain $domain) { $data = []; foreach ($domain->getRecords() as $record) { $data[$record->neostradaDnsId] = $record->toNeostradaFormat(); } $this->request($domain, 'dns', [ 'dnsdata' => serialize($data), ]); return $this; } public function request(Neostrada_Domain $domain, $action, array $rawParams = []) { $params = [ 'domain' => $domain->getName(), 'extension' => $domain->getExtension(), ] + $rawParams; $params['api_sig'] = $this->_calculateSignature($action, $params); $params['action'] = $action; $params['api_key'] = $this->_key; $url = self::API_HOST . '?' . http_build_query($params, '', '&'); $c = curl_init(); if ($c === false) { throw new \RuntimeException('Could not initialize cURL'); } curl_setopt($c, CURLOPT_SSL_VERIFYPEER, 0); curl_setopt($c, CURLOPT_SSL_VERIFYHOST, 0); curl_setopt($c, CURLOPT_URL, $url); curl_setopt($c, CURLOPT_HEADER, 0); curl_setopt($c, CURLOPT_RETURNTRANSFER, 1); $rawData = curl_exec($c); if ($rawData === false) { throw new \RuntimeException('Could not complete cURL request: ' . curl_error($c)); } curl_close($c); $oldUseErrors = libxml_use_internal_errors(true); $xml = simplexml_load_string($rawData); if ($xml === false) { $message = libxml_get_errors()[0]->message; libxml_use_internal_errors($oldUseErrors); throw new \RuntimeException('Invalid XML: ' . $message); } libxml_use_internal_errors($oldUseErrors); $this->_validateResponse($xml); return $xml; } private function _validateResponse(SimpleXMLElement $xml) { if ((string) $xml->code !== '200') { throw new \UnexpectedValueException('Request failed [' . $xml->code . ']: ' . $xml->description); } } private function _calculateSignature($action, array $params = []) { $signature = $this->_secret . $this->_key . 'action' . $action; foreach ($params as $key => $value) { $signature .= $key . $value; } return md5($signature); } }
justim/neostrada-api-client
src/Neostrada.php
PHP
mit
2,315
[ 30522, 1026, 1029, 25718, 2465, 9253, 20528, 2850, 1063, 9530, 3367, 17928, 1035, 3677, 1027, 1005, 16770, 1024, 1013, 1013, 17928, 1012, 9253, 20528, 2850, 1012, 17953, 1013, 1005, 1025, 2797, 1002, 1035, 3145, 1025, 2797, 1002, 1035, 3595...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningGetResultsIT; import org.elasticsearch.client.MachineLearningIT; import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; import org.elasticsearch.client.ml.DeleteCalendarEventRequest; import org.elasticsearch.client.ml.DeleteCalendarJobRequest; import org.elasticsearch.client.ml.DeleteCalendarRequest; import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteExpiredDataRequest; import org.elasticsearch.client.ml.DeleteExpiredDataResponse; import org.elasticsearch.client.ml.DeleteFilterRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; import org.elasticsearch.client.ml.DeleteTrainedModelRequest; import org.elasticsearch.client.ml.EstimateModelMemoryRequest; import org.elasticsearch.client.ml.EstimateModelMemoryResponse; import org.elasticsearch.client.ml.EvaluateDataFrameRequest; import org.elasticsearch.client.ml.EvaluateDataFrameResponse; import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.FindFileStructureRequest; import org.elasticsearch.client.ml.FindFileStructureResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; import org.elasticsearch.client.ml.GetCalendarEventsRequest; import org.elasticsearch.client.ml.GetCalendarEventsResponse; import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCalendarsResponse; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetDatafeedResponse; import org.elasticsearch.client.ml.GetDatafeedStatsRequest; import org.elasticsearch.client.ml.GetDatafeedStatsResponse; import org.elasticsearch.client.ml.GetFiltersRequest; import org.elasticsearch.client.ml.GetFiltersResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobResponse; import org.elasticsearch.client.ml.GetJobStatsRequest; import org.elasticsearch.client.ml.GetJobStatsResponse; import org.elasticsearch.client.ml.GetModelSnapshotsRequest; import org.elasticsearch.client.ml.GetModelSnapshotsResponse; import org.elasticsearch.client.ml.GetOverallBucketsRequest; import org.elasticsearch.client.ml.GetOverallBucketsResponse; import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.GetRecordsResponse; import org.elasticsearch.client.ml.GetTrainedModelsRequest; import org.elasticsearch.client.ml.GetTrainedModelsResponse; import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; import org.elasticsearch.client.ml.MlInfoRequest; import org.elasticsearch.client.ml.MlInfoResponse; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.PostCalendarEventRequest; import org.elasticsearch.client.ml.PostCalendarEventResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.PreviewDatafeedRequest; import org.elasticsearch.client.ml.PreviewDatafeedResponse; import org.elasticsearch.client.ml.PutCalendarJobRequest; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedResponse; import org.elasticsearch.client.ml.PutFilterRequest; import org.elasticsearch.client.ml.PutFilterResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; import org.elasticsearch.client.ml.PutTrainedModelRequest; import org.elasticsearch.client.ml.PutTrainedModelResponse; import org.elasticsearch.client.ml.RevertModelSnapshotRequest; import org.elasticsearch.client.ml.RevertModelSnapshotResponse; import org.elasticsearch.client.ml.SetUpgradeModeRequest; import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.StartDatafeedRequest; import org.elasticsearch.client.ml.StartDatafeedResponse; import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.StopDatafeedRequest; import org.elasticsearch.client.ml.StopDatafeedResponse; import org.elasticsearch.client.ml.UpdateDatafeedRequest; import org.elasticsearch.client.ml.UpdateFilterRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; import org.elasticsearch.client.ml.UpdateModelSnapshotResponse; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.ScheduledEvent; import org.elasticsearch.client.ml.calendars.ScheduledEventTests; import org.elasticsearch.client.ml.datafeed.ChunkingConfig; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.client.ml.datafeed.DatafeedStats; import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig; import org.elasticsearch.client.ml.dataframe.Classification; import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats; import org.elasticsearch.client.ml.dataframe.OutlierDetection; import org.elasticsearch.client.ml.dataframe.QueryConfig; import org.elasticsearch.client.ml.dataframe.Regression; import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric; import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.ActualClass; import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.PredictedClass; import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric; import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric.ConfusionMatrix; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric; import org.elasticsearch.client.ml.dataframe.explain.FieldSelection; import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation; import org.elasticsearch.client.ml.filestructurefinder.FileStructure; import org.elasticsearch.client.ml.inference.InferenceToXContentCompressor; import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.client.ml.inference.TrainedModelConfig; import org.elasticsearch.client.ml.inference.TrainedModelDefinition; import org.elasticsearch.client.ml.inference.TrainedModelDefinitionTests; import org.elasticsearch.client.ml.inference.TrainedModelInput; import org.elasticsearch.client.ml.inference.TrainedModelStats; import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; import org.elasticsearch.client.ml.job.config.AnalysisConfig; import org.elasticsearch.client.ml.job.config.AnalysisLimits; import org.elasticsearch.client.ml.job.config.DataDescription; import org.elasticsearch.client.ml.job.config.DetectionRule; import org.elasticsearch.client.ml.job.config.Detector; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.config.ModelPlotConfig; import org.elasticsearch.client.ml.job.config.Operator; import org.elasticsearch.client.ml.job.config.RuleCondition; import org.elasticsearch.client.ml.job.process.DataCounts; import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.client.ml.job.results.AnomalyRecord; import org.elasticsearch.client.ml.job.results.Bucket; import org.elasticsearch.client.ml.job.results.CategoryDefinition; import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.tasks.TaskId; import org.junit.After; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testCreateJob() throws Exception { RestHighLevelClient client = highLevelClient(); // tag::put-job-detector Detector.Builder detectorBuilder = new Detector.Builder() .setFunction("sum") // <1> .setFieldName("total") // <2> .setDetectorDescription("Sum of total"); // <3> // end::put-job-detector // tag::put-job-analysis-config List<Detector> detectors = Collections.singletonList(detectorBuilder.build()); // <1> AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2> .setBucketSpan(TimeValue.timeValueMinutes(10)); // <3> // end::put-job-analysis-config // tag::put-job-data-description DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder() .setTimeField("timestamp"); // <1> // end::put-job-data-description { String id = "job_1"; // tag::put-job-config Job.Builder jobBuilder = new Job.Builder(id) // <1> .setAnalysisConfig(analysisConfigBuilder) // <2> .setDataDescription(dataDescriptionBuilder) // <3> .setDescription("Total sum of requests"); // <4> // end::put-job-config // tag::put-job-request PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1> // end::put-job-request // tag::put-job-execute PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT); // end::put-job-execute // tag::put-job-response Date createTime = response.getResponse().getCreateTime(); // <1> // end::put-job-response assertThat(createTime.getTime(), greaterThan(0L)); } { String id = "job_2"; Job.Builder jobBuilder = new Job.Builder(id) .setAnalysisConfig(analysisConfigBuilder) .setDataDescription(dataDescriptionBuilder) .setDescription("Total sum of requests"); PutJobRequest request = new PutJobRequest(jobBuilder.build()); // tag::put-job-execute-listener ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() { @Override public void onResponse(PutJobResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-job-execute-async client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-machine-learning-job1"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job2"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { // tag::get-job-request GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // <1> request.setAllowNoJobs(true); // <2> // end::get-job-request // tag::get-job-execute GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT); // end::get-job-execute // tag::get-job-response long numberOfJobs = response.count(); // <1> List<Job> jobs = response.jobs(); // <2> // end::get-job-response assertEquals(2, response.count()); assertThat(response.jobs(), hasSize(2)); assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), containsInAnyOrder(job.getId(), secondJob.getId())); } { GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // tag::get-job-execute-listener ActionListener<GetJobResponse> listener = new ActionListener<GetJobResponse>() { @Override public void onResponse(GetJobResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-job-execute-async client.machineLearning().getJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteJob() throws Exception { RestHighLevelClient client = highLevelClient(); String jobId = "my-first-machine-learning-job"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { //tag::delete-job-request DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); // <1> //end::delete-job-request //tag::delete-job-request-force deleteJobRequest.setForce(false); // <1> //end::delete-job-request-force //tag::delete-job-request-wait-for-completion deleteJobRequest.setWaitForCompletion(true); // <1> //end::delete-job-request-wait-for-completion //tag::delete-job-execute DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); //end::delete-job-execute //tag::delete-job-response Boolean isAcknowledged = deleteJobResponse.getAcknowledged(); // <1> TaskId task = deleteJobResponse.getTask(); // <2> //end::delete-job-response assertTrue(isAcknowledged); assertNull(task); } { //tag::delete-job-execute-listener ActionListener<DeleteJobResponse> listener = new ActionListener<DeleteJobResponse>() { @Override public void onResponse(DeleteJobResponse deleteJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job"); // tag::delete-job-execute-async client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testOpenJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("opening-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("opening-my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { // tag::open-job-request OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); // <1> openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <2> // end::open-job-request // tag::open-job-execute OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT); // end::open-job-execute // tag::open-job-response boolean isOpened = openJobResponse.isOpened(); // <1> String node = openJobResponse.getNode(); // <2> // end::open-job-response assertThat(node, notNullValue()); } { // tag::open-job-execute-listener ActionListener<OpenJobResponse> listener = new ActionListener<OpenJobResponse>() { @Override public void onResponse(OpenJobResponse openJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::open-job-execute-listener OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::open-job-execute-async client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::open-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testCloseJob() throws Exception { RestHighLevelClient client = highLevelClient(); { Job job = MachineLearningIT.buildJob("closing-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); // tag::close-job-request CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); // <1> closeJobRequest.setForce(false); // <2> closeJobRequest.setAllowNoJobs(true); // <3> closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <4> // end::close-job-request // tag::close-job-execute CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT); // end::close-job-execute // tag::close-job-response boolean isClosed = closeJobResponse.isClosed(); // <1> // end::close-job-response } { Job job = MachineLearningIT.buildJob("closing-my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); // tag::close-job-execute-listener ActionListener<CloseJobResponse> listener = new ActionListener<CloseJobResponse>() { @Override public void onResponse(CloseJobResponse closeJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::close-job-execute-listener CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::close-job-execute-async client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::close-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateJob() throws Exception { RestHighLevelClient client = highLevelClient(); String jobId = "test-update-job"; Job tempJob = MachineLearningIT.buildJob(jobId); Job job = new Job.Builder(tempJob) .setAnalysisConfig(new AnalysisConfig.Builder(tempJob.getAnalysisConfig()) .setCategorizationFieldName("categorization-field") .setDetector(0, new Detector.Builder().setFieldName("total") .setFunction("sum") .setPartitionFieldName("mlcategory") .setDetectorDescription(randomAlphaOfLength(10)) .build())) .build(); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); { List<DetectionRule> detectionRules = Arrays.asList( new DetectionRule.Builder(Arrays.asList(RuleCondition.createTime(Operator.GT, 100L))).build()); Map<String, Object> customSettings = new HashMap<>(); customSettings.put("custom-setting-1", "custom-value"); // tag::update-job-detector-options JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, // <1> "detector description", // <2> detectionRules); // <3> // end::update-job-detector-options // tag::update-job-options JobUpdate update = new JobUpdate.Builder(jobId) // <1> .setDescription("My description") // <2> .setAnalysisLimits(new AnalysisLimits(1000L, null)) // <3> .setBackgroundPersistInterval(TimeValue.timeValueHours(3)) // <4> .setCategorizationFilters(Arrays.asList("categorization-filter")) // <5> .setDetectorUpdates(Arrays.asList(detectorUpdate)) // <6> .setGroups(Arrays.asList("job-group-1")) // <7> .setResultsRetentionDays(10L) // <8> .setModelPlotConfig(new ModelPlotConfig(true, null, true)) // <9> .setModelSnapshotRetentionDays(7L) // <10> .setCustomSettings(customSettings) // <11> .setRenormalizationWindowDays(3L) // <12> .build(); // end::update-job-options // tag::update-job-request UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); // <1> // end::update-job-request // tag::update-job-execute PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT); // end::update-job-execute // tag::update-job-response Job updatedJob = updateJobResponse.getResponse(); // <1> // end::update-job-response assertEquals(update.getDescription(), updatedJob.getDescription()); } { // tag::update-job-execute-listener ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() { @Override public void onResponse(PutJobResponse updateJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-job-execute-listener UpdateJobRequest updateJobRequest = new UpdateJobRequest(new JobUpdate.Builder(jobId).build()); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-job-execute-async client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::update-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); { // We need to create a job for the datafeed request to be valid String jobId = "put-datafeed-job-1"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String id = "datafeed-1"; // tag::put-datafeed-config DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder(id, jobId) // <1> .setIndices("index_1", "index_2"); // <2> // end::put-datafeed-config AggregatorFactories.Builder aggs = AggregatorFactories.builder(); // tag::put-datafeed-config-set-aggregations datafeedBuilder.setAggregations(aggs); // <1> // end::put-datafeed-config-set-aggregations // Clearing aggregation to avoid complex validation rules datafeedBuilder.setAggregations((String) null); // tag::put-datafeed-config-set-chunking-config datafeedBuilder.setChunkingConfig(ChunkingConfig.newAuto()); // <1> // end::put-datafeed-config-set-chunking-config // tag::put-datafeed-config-set-frequency datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30)); // <1> // end::put-datafeed-config-set-frequency // tag::put-datafeed-config-set-query datafeedBuilder.setQuery(QueryBuilders.matchAllQuery()); // <1> // end::put-datafeed-config-set-query // tag::put-datafeed-config-set-query-delay datafeedBuilder.setQueryDelay(TimeValue.timeValueMinutes(1)); // <1> // end::put-datafeed-config-set-query-delay // tag::put-datafeed-config-set-delayed-data-check-config datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig .enabledDelayedDataCheckConfig(TimeValue.timeValueHours(1))); // <1> // end::put-datafeed-config-set-delayed-data-check-config // no need to accidentally trip internal validations due to job bucket size datafeedBuilder.setDelayedDataCheckConfig(null); List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList(); // tag::put-datafeed-config-set-script-fields datafeedBuilder.setScriptFields(scriptFields); // <1> // end::put-datafeed-config-set-script-fields // tag::put-datafeed-config-set-scroll-size datafeedBuilder.setScrollSize(1000); // <1> // end::put-datafeed-config-set-scroll-size // tag::put-datafeed-request PutDatafeedRequest request = new PutDatafeedRequest(datafeedBuilder.build()); // <1> // end::put-datafeed-request // tag::put-datafeed-execute PutDatafeedResponse response = client.machineLearning().putDatafeed(request, RequestOptions.DEFAULT); // end::put-datafeed-execute // tag::put-datafeed-response DatafeedConfig datafeed = response.getResponse(); // <1> // end::put-datafeed-response assertThat(datafeed.getId(), equalTo("datafeed-1")); } { // We need to create a job for the datafeed request to be valid String jobId = "put-datafeed-job-2"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String id = "datafeed-2"; DatafeedConfig datafeed = new DatafeedConfig.Builder(id, jobId).setIndices("index_1", "index_2").build(); PutDatafeedRequest request = new PutDatafeedRequest(datafeed); // tag::put-datafeed-execute-listener ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() { @Override public void onResponse(PutDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-datafeed-execute-async client.machineLearning().putDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("update-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { AggregatorFactories.Builder aggs = AggregatorFactories.builder(); List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList(); // tag::update-datafeed-config DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder(datafeedId) // <1> .setAggregations(aggs) // <2> .setIndices("index_1", "index_2") // <3> .setChunkingConfig(ChunkingConfig.newAuto()) // <4> .setFrequency(TimeValue.timeValueSeconds(30)) // <5> .setQuery(QueryBuilders.matchAllQuery()) // <6> .setQueryDelay(TimeValue.timeValueMinutes(1)) // <7> .setScriptFields(scriptFields) // <8> .setScrollSize(1000); // <9> // end::update-datafeed-config // Clearing aggregation to avoid complex validation rules datafeedUpdateBuilder.setAggregations((String) null); // tag::update-datafeed-request UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdateBuilder.build()); // <1> // end::update-datafeed-request // tag::update-datafeed-execute PutDatafeedResponse response = client.machineLearning().updateDatafeed(request, RequestOptions.DEFAULT); // end::update-datafeed-execute // tag::update-datafeed-response DatafeedConfig updatedDatafeed = response.getResponse(); // <1> // end::update-datafeed-response assertThat(updatedDatafeed.getId(), equalTo(datafeedId)); } { DatafeedUpdate datafeedUpdate = new DatafeedUpdate.Builder(datafeedId).setIndices("index_1", "index_2").build(); UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdate); // tag::update-datafeed-execute-listener ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() { @Override public void onResponse(PutDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-datafeed-execute-async client.machineLearning().updateDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::update-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::get-datafeed-request GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // <1> request.setAllowNoDatafeeds(true); // <2> // end::get-datafeed-request // tag::get-datafeed-execute GetDatafeedResponse response = client.machineLearning().getDatafeed(request, RequestOptions.DEFAULT); // end::get-datafeed-execute // tag::get-datafeed-response long numberOfDatafeeds = response.count(); // <1> List<DatafeedConfig> datafeeds = response.datafeeds(); // <2> // end::get-datafeed-response assertEquals(1, numberOfDatafeeds); assertEquals(1, datafeeds.size()); } { GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // tag::get-datafeed-execute-listener ActionListener<GetDatafeedResponse> listener = new ActionListener<GetDatafeedResponse>() { @Override public void onResponse(GetDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-datafeed-execute-async client.machineLearning().getDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); String jobId = "test-delete-datafeed-job"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = "test-delete-datafeed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId).setIndices("foo").build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::delete-datafeed-request DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); deleteDatafeedRequest.setForce(false); // <1> // end::delete-datafeed-request // tag::delete-datafeed-execute AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed( deleteDatafeedRequest, RequestOptions.DEFAULT); // end::delete-datafeed-execute // tag::delete-datafeed-response boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); // <1> // end::delete-datafeed-response } // Recreate datafeed to allow second deletion client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::delete-datafeed-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); // tag::delete-datafeed-execute-async client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPreviewDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("preview-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; String indexName = "preview_data_2"; createIndex(indexName); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::preview-datafeed-request PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // <1> // end::preview-datafeed-request // tag::preview-datafeed-execute PreviewDatafeedResponse response = client.machineLearning().previewDatafeed(request, RequestOptions.DEFAULT); // end::preview-datafeed-execute // tag::preview-datafeed-response BytesReference rawPreview = response.getPreview(); // <1> List<Map<String, Object>> semiParsedPreview = response.getDataList(); // <2> // end::preview-datafeed-response assertTrue(semiParsedPreview.isEmpty()); } { PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // tag::preview-datafeed-execute-listener ActionListener<PreviewDatafeedResponse> listener = new ActionListener<PreviewDatafeedResponse>() { @Override public void onResponse(PreviewDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::preview-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::preview-datafeed-execute-async client.machineLearning().previewDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::preview-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testStartDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("start-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; String indexName = "start_data_2"; createIndex(indexName); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); { // tag::start-datafeed-request StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // <1> // end::start-datafeed-request // tag::start-datafeed-request-options request.setEnd("2018-08-21T00:00:00Z"); // <1> request.setStart("2018-08-20T00:00:00Z"); // <2> request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> // end::start-datafeed-request-options // tag::start-datafeed-execute StartDatafeedResponse response = client.machineLearning().startDatafeed(request, RequestOptions.DEFAULT); // end::start-datafeed-execute // tag::start-datafeed-response boolean started = response.isStarted(); // <1> String node = response.getNode(); // <2> // end::start-datafeed-response assertTrue(started); assertThat(node, notNullValue()); } { StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // tag::start-datafeed-execute-listener ActionListener<StartDatafeedResponse> listener = new ActionListener<StartDatafeedResponse>() { @Override public void onResponse(StartDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::start-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::start-datafeed-execute-async client.machineLearning().startDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::start-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testStopDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::stop-datafeed-request StopDatafeedRequest request = new StopDatafeedRequest("datafeed_id1", "datafeed_id*"); // <1> // end::stop-datafeed-request request = StopDatafeedRequest.stopAllDatafeedsRequest(); // tag::stop-datafeed-request-options request.setAllowNoDatafeeds(true); // <1> request.setForce(true); // <2> request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> // end::stop-datafeed-request-options // tag::stop-datafeed-execute StopDatafeedResponse response = client.machineLearning().stopDatafeed(request, RequestOptions.DEFAULT); // end::stop-datafeed-execute // tag::stop-datafeed-response boolean stopped = response.isStopped(); // <1> // end::stop-datafeed-response assertTrue(stopped); } { StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest(); // tag::stop-datafeed-execute-listener ActionListener<StopDatafeedResponse> listener = new ActionListener<StopDatafeedResponse>() { @Override public void onResponse(StopDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::stop-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::stop-datafeed-execute-async client.machineLearning().stopDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::stop-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDatafeedStats() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats1"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats2"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); String datafeedId1 = job.getId() + "-feed"; String indexName = "datafeed_stats_data_2"; createIndex(indexName); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); String datafeedId2 = secondJob.getId() + "-feed"; DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT); { //tag::get-datafeed-stats-request GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("get-machine-learning-datafeed-stats1-feed", "get-machine-learning-datafeed*"); // <1> request.setAllowNoDatafeeds(true); // <2> //end::get-datafeed-stats-request //tag::get-datafeed-stats-execute GetDatafeedStatsResponse response = client.machineLearning().getDatafeedStats(request, RequestOptions.DEFAULT); //end::get-datafeed-stats-execute //tag::get-datafeed-stats-response long numberOfDatafeedStats = response.count(); // <1> List<DatafeedStats> datafeedStats = response.datafeedStats(); // <2> //end::get-datafeed-stats-response assertEquals(2, response.count()); assertThat(response.datafeedStats(), hasSize(2)); assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()), containsInAnyOrder(datafeed.getId(), secondDatafeed.getId())); } { GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("*"); // tag::get-datafeed-stats-execute-listener ActionListener<GetDatafeedStatsResponse> listener = new ActionListener<GetDatafeedStatsResponse>() { @Override public void onResponse(GetDatafeedStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-datafeed-stats-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-datafeed-stats-execute-async client.machineLearning().getDatafeedStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-datafeed-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-buckets"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a bucket IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-buckets-request GetBucketsRequest request = new GetBucketsRequest(jobId); // <1> // end::get-buckets-request // tag::get-buckets-timestamp request.setTimestamp("2018-08-17T00:00:00Z"); // <1> // end::get-buckets-timestamp // Set timestamp to null as it is incompatible with other args request.setTimestamp(null); // tag::get-buckets-anomaly-score request.setAnomalyScore(75.0); // <1> // end::get-buckets-anomaly-score // tag::get-buckets-desc request.setDescending(true); // <1> // end::get-buckets-desc // tag::get-buckets-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-buckets-end // tag::get-buckets-exclude-interim request.setExcludeInterim(true); // <1> // end::get-buckets-exclude-interim // tag::get-buckets-expand request.setExpand(true); // <1> // end::get-buckets-expand // tag::get-buckets-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-buckets-page // Set page params back to null so the response contains the bucket we indexed request.setPageParams(null); // tag::get-buckets-sort request.setSort("anomaly_score"); // <1> // end::get-buckets-sort // tag::get-buckets-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-buckets-start // tag::get-buckets-execute GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT); // end::get-buckets-execute // tag::get-buckets-response long count = response.count(); // <1> List<Bucket> buckets = response.buckets(); // <2> // end::get-buckets-response assertEquals(1, buckets.size()); } { GetBucketsRequest request = new GetBucketsRequest(jobId); // tag::get-buckets-execute-listener ActionListener<GetBucketsResponse> listener = new ActionListener<GetBucketsResponse>() { @Override public void onResponse(GetBucketsResponse getBucketsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-buckets-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-buckets-execute-async client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-buckets-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testFlushJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT); { // tag::flush-job-request FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); // <1> // end::flush-job-request // tag::flush-job-request-options flushJobRequest.setCalcInterim(true); // <1> flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); // <2> flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); // <3> flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); // <4> flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); // <5> // end::flush-job-request-options // tag::flush-job-execute FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT); // end::flush-job-execute // tag::flush-job-response boolean isFlushed = flushJobResponse.isFlushed(); // <1> Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); // <2> // end::flush-job-response } { // tag::flush-job-execute-listener ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() { @Override public void onResponse(FlushJobResponse FlushJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::flush-job-execute-listener FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::flush-job-execute-async client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::flush-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteForecast() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("deleting-forecast-for-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); for(int i = 0; i < 30; i++) { Map<String, Object> hashMap = new HashMap<>(); hashMap.put("total", randomInt(1000)); hashMap.put("timestamp", (i+1)*1000); builder.addDoc(hashMap); } PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder); client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); ForecastJobResponse forecastJobResponse = client.machineLearning(). forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT); String forecastId = forecastJobResponse.getForecastId(); GetRequest request = new GetRequest(".ml-anomalies-" + job.getId()); request.id(job.getId() + "_model_forecast_request_stats_" + forecastId); assertBusy(() -> { GetResponse getResponse = highLevelClient().get(request, RequestOptions.DEFAULT); assertTrue(getResponse.isExists()); assertTrue(getResponse.getSourceAsString().contains("finished")); }, 30, TimeUnit.SECONDS); { // tag::delete-forecast-request DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); // <1> // end::delete-forecast-request // tag::delete-forecast-request-options deleteForecastRequest.setForecastIds(forecastId); // <1> deleteForecastRequest.timeout("30s"); // <2> deleteForecastRequest.setAllowNoForecasts(true); // <3> // end::delete-forecast-request-options // tag::delete-forecast-execute AcknowledgedResponse deleteForecastResponse = client.machineLearning().deleteForecast(deleteForecastRequest, RequestOptions.DEFAULT); // end::delete-forecast-execute // tag::delete-forecast-response boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); // <1> // end::delete-forecast-response } { // tag::delete-forecast-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse DeleteForecastResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-forecast-execute-listener DeleteForecastRequest deleteForecastRequest = DeleteForecastRequest.deleteAllForecasts(job.getId()); deleteForecastRequest.setAllowNoForecasts(true); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-forecast-execute-async client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-forecast-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetJobStats() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { // tag::get-job-stats-request GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // <1> request.setAllowNoJobs(true); // <2> // end::get-job-stats-request // tag::get-job-stats-execute GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT); // end::get-job-stats-execute // tag::get-job-stats-response long numberOfJobStats = response.count(); // <1> List<JobStats> jobStats = response.jobStats(); // <2> // end::get-job-stats-response assertEquals(2, response.count()); assertThat(response.jobStats(), hasSize(2)); assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), containsInAnyOrder(job.getId(), secondJob.getId())); } { GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // tag::get-job-stats-execute-listener ActionListener<GetJobStatsResponse> listener = new ActionListener<GetJobStatsResponse>() { @Override public void onResponse(GetJobStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-job-stats-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-job-stats-execute-async client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-job-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testForecastJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("forecasting-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); for(int i = 0; i < 30; i++) { Map<String, Object> hashMap = new HashMap<>(); hashMap.put("total", randomInt(1000)); hashMap.put("timestamp", (i+1)*1000); builder.addDoc(hashMap); } PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder); client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); { // tag::forecast-job-request ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // <1> // end::forecast-job-request // tag::forecast-job-request-options forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); // <1> forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); // <2> forecastJobRequest.setMaxModelMemory(new ByteSizeValue(30, ByteSizeUnit.MB)); // <3> // end::forecast-job-request-options // tag::forecast-job-execute ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT); // end::forecast-job-execute // tag::forecast-job-response boolean isAcknowledged = forecastJobResponse.isAcknowledged(); // <1> String forecastId = forecastJobResponse.getForecastId(); // <2> // end::forecast-job-response assertTrue(isAcknowledged); assertNotNull(forecastId); } { // tag::forecast-job-execute-listener ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() { @Override public void onResponse(ForecastJobResponse forecastJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::forecast-job-execute-listener ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::forecast-job-execute-async client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::forecast-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetOverallBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId1 = "test-get-overall-buckets-1"; String jobId2 = "test-get-overall-buckets-2"; Job job1 = MachineLearningGetResultsIT.buildJob(jobId1); Job job2 = MachineLearningGetResultsIT.buildJob(jobId2); client.machineLearning().putJob(new PutJobRequest(job1), RequestOptions.DEFAULT); client.machineLearning().putJob(new PutJobRequest(job2), RequestOptions.DEFAULT); // Let us index some buckets BulkRequest bulkRequest = new BulkRequest(); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); { IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.source("{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}", XContentType.JSON); bulkRequest.add(indexRequest); } { IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.source("{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + "\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}", XContentType.JSON); bulkRequest.add(indexRequest); } client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::get-overall-buckets-request GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // <1> // end::get-overall-buckets-request // tag::get-overall-buckets-bucket-span request.setBucketSpan(TimeValue.timeValueHours(24)); // <1> // end::get-overall-buckets-bucket-span // tag::get-overall-buckets-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-overall-buckets-end // tag::get-overall-buckets-exclude-interim request.setExcludeInterim(true); // <1> // end::get-overall-buckets-exclude-interim // tag::get-overall-buckets-overall-score request.setOverallScore(75.0); // <1> // end::get-overall-buckets-overall-score // tag::get-overall-buckets-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-overall-buckets-start // tag::get-overall-buckets-top-n request.setTopN(2); // <1> // end::get-overall-buckets-top-n // tag::get-overall-buckets-execute GetOverallBucketsResponse response = client.machineLearning().getOverallBuckets(request, RequestOptions.DEFAULT); // end::get-overall-buckets-execute // tag::get-overall-buckets-response long count = response.count(); // <1> List<OverallBucket> overallBuckets = response.overallBuckets(); // <2> // end::get-overall-buckets-response assertEquals(1, overallBuckets.size()); assertThat(overallBuckets.get(0).getOverallScore(), is(closeTo(80.0, 0.001))); } { GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // tag::get-overall-buckets-execute-listener ActionListener<GetOverallBucketsResponse> listener = new ActionListener<GetOverallBucketsResponse>() { @Override public void onResponse(GetOverallBucketsResponse getOverallBucketsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-overall-buckets-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-overall-buckets-execute-async client.machineLearning().getOverallBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-overall-buckets-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetRecords() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-records"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a record IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-records\", \"result_type\":\"record\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"record_score\": 80.0}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-records-request GetRecordsRequest request = new GetRecordsRequest(jobId); // <1> // end::get-records-request // tag::get-records-desc request.setDescending(true); // <1> // end::get-records-desc // tag::get-records-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-records-end // tag::get-records-exclude-interim request.setExcludeInterim(true); // <1> // end::get-records-exclude-interim // tag::get-records-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-records-page // Set page params back to null so the response contains the record we indexed request.setPageParams(null); // tag::get-records-record-score request.setRecordScore(75.0); // <1> // end::get-records-record-score // tag::get-records-sort request.setSort("probability"); // <1> // end::get-records-sort // tag::get-records-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-records-start // tag::get-records-execute GetRecordsResponse response = client.machineLearning().getRecords(request, RequestOptions.DEFAULT); // end::get-records-execute // tag::get-records-response long count = response.count(); // <1> List<AnomalyRecord> records = response.records(); // <2> // end::get-records-response assertEquals(1, records.size()); } { GetRecordsRequest request = new GetRecordsRequest(jobId); // tag::get-records-execute-listener ActionListener<GetRecordsResponse> listener = new ActionListener<GetRecordsResponse>() { @Override public void onResponse(GetRecordsResponse getRecordsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-records-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-records-execute-async client.machineLearning().getRecordsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-records-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPostData() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("test-post-data"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); { // tag::post-data-request PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); // <1> Map<String, Object> mapData = new HashMap<>(); mapData.put("total", 109); jsonBuilder.addDoc(mapData); // <2> jsonBuilder.addDoc("{\"total\":1000}"); // <3> PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <4> // end::post-data-request // tag::post-data-request-options postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); // <1> postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); // <2> // end::post-data-request-options postDataRequest.setResetEnd(null); postDataRequest.setResetStart(null); // tag::post-data-execute PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); // end::post-data-execute // tag::post-data-response DataCounts dataCounts = postDataResponse.getDataCounts(); // <1> // end::post-data-response assertEquals(2, dataCounts.getInputRecordCount()); } { // tag::post-data-execute-listener ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() { @Override public void onResponse(PostDataResponse postDataResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::post-data-execute-listener PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); Map<String, Object> mapData = new HashMap<>(); mapData.put("total", 109); jsonBuilder.addDoc(mapData); PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <1> // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::post-data-execute-async client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); // <1> // end::post-data-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testFindFileStructure() throws Exception { RestHighLevelClient client = highLevelClient(); Path anInterestingFile = createTempFile(); String contents = "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\"," + "\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 1\",\"class\":\"ml\"," + "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n" + "{\"logger\":\"controller\",\"timestamp\":1478261151445," + "\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\"," + "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n"; Files.write(anInterestingFile, Collections.singleton(contents), StandardCharsets.UTF_8); { // tag::find-file-structure-request FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest(); // <1> findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile)); // <2> // end::find-file-structure-request // tag::find-file-structure-request-options findFileStructureRequest.setLinesToSample(500); // <1> findFileStructureRequest.setExplain(true); // <2> // end::find-file-structure-request-options // tag::find-file-structure-execute FindFileStructureResponse findFileStructureResponse = client.machineLearning().findFileStructure(findFileStructureRequest, RequestOptions.DEFAULT); // end::find-file-structure-execute // tag::find-file-structure-response FileStructure structure = findFileStructureResponse.getFileStructure(); // <1> // end::find-file-structure-response assertEquals(2, structure.getNumLinesAnalyzed()); } { // tag::find-file-structure-execute-listener ActionListener<FindFileStructureResponse> listener = new ActionListener<FindFileStructureResponse>() { @Override public void onResponse(FindFileStructureResponse findFileStructureResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::find-file-structure-execute-listener FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest(); findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile)); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::find-file-structure-execute-async client.machineLearning().findFileStructureAsync(findFileStructureRequest, RequestOptions.DEFAULT, listener); // <1> // end::find-file-structure-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetInfluencers() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-influencers"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a record IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-influencers\", \"result_type\":\"influencer\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"influencer_score\": 80.0, \"influencer_field_name\": \"my_influencer\"," + "\"influencer_field_value\":\"foo\"}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-influencers-request GetInfluencersRequest request = new GetInfluencersRequest(jobId); // <1> // end::get-influencers-request // tag::get-influencers-desc request.setDescending(true); // <1> // end::get-influencers-desc // tag::get-influencers-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-influencers-end // tag::get-influencers-exclude-interim request.setExcludeInterim(true); // <1> // end::get-influencers-exclude-interim // tag::get-influencers-influencer-score request.setInfluencerScore(75.0); // <1> // end::get-influencers-influencer-score // tag::get-influencers-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-influencers-page // Set page params back to null so the response contains the influencer we indexed request.setPageParams(null); // tag::get-influencers-sort request.setSort("probability"); // <1> // end::get-influencers-sort // tag::get-influencers-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-influencers-start // tag::get-influencers-execute GetInfluencersResponse response = client.machineLearning().getInfluencers(request, RequestOptions.DEFAULT); // end::get-influencers-execute // tag::get-influencers-response long count = response.count(); // <1> List<Influencer> influencers = response.influencers(); // <2> // end::get-influencers-response assertEquals(1, influencers.size()); } { GetInfluencersRequest request = new GetInfluencersRequest(jobId); // tag::get-influencers-execute-listener ActionListener<GetInfluencersResponse> listener = new ActionListener<GetInfluencersResponse>() { @Override public void onResponse(GetInfluencersResponse getInfluencersResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-influencers-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-influencers-execute-async client.machineLearning().getInfluencersAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-influencers-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetCategories() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-categories"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a category IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\": \"test-get-categories\", \"category_id\": 1, \"terms\": \"AAL\"," + " \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-categories-request GetCategoriesRequest request = new GetCategoriesRequest(jobId); // <1> // end::get-categories-request // tag::get-categories-category-id request.setCategoryId(1L); // <1> // end::get-categories-category-id // tag::get-categories-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-categories-page // Set page params back to null so the response contains the category we indexed request.setPageParams(null); // tag::get-categories-execute GetCategoriesResponse response = client.machineLearning().getCategories(request, RequestOptions.DEFAULT); // end::get-categories-execute // tag::get-categories-response long count = response.count(); // <1> List<CategoryDefinition> categories = response.categories(); // <2> // end::get-categories-response assertEquals(1, categories.size()); } { GetCategoriesRequest request = new GetCategoriesRequest(jobId); // tag::get-categories-execute-listener ActionListener<GetCategoriesResponse> listener = new ActionListener<GetCategoriesResponse>() { @Override public void onResponse(GetCategoriesResponse getcategoriesResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-categories-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-categories-execute-async client.machineLearning().getCategoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-categories-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteExpiredData() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-delete-expired-data"; MachineLearningIT.buildJob(jobId); { // tag::delete-expired-data-request DeleteExpiredDataRequest request = new DeleteExpiredDataRequest( // <1> null, // <2> 1000.0f, // <3> TimeValue.timeValueHours(12) // <4> ); // end::delete-expired-data-request // tag::delete-expired-data-execute DeleteExpiredDataResponse response = client.machineLearning().deleteExpiredData(request, RequestOptions.DEFAULT); // end::delete-expired-data-execute // tag::delete-expired-data-response boolean deleted = response.getDeleted(); // <1> // end::delete-expired-data-response assertTrue(deleted); } { // tag::delete-expired-data-execute-listener ActionListener<DeleteExpiredDataResponse> listener = new ActionListener<DeleteExpiredDataResponse>() { @Override public void onResponse(DeleteExpiredDataResponse deleteExpiredDataResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-expired-data-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteExpiredDataRequest deleteExpiredDataRequest = new DeleteExpiredDataRequest(); // tag::delete-expired-data-execute-async client.machineLearning().deleteExpiredDataAsync(deleteExpiredDataRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-expired-data-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteModelSnapshot() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-delete-model-snapshot"; String snapshotId = "1541587919"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON); { client.index(indexRequest, RequestOptions.DEFAULT); // tag::delete-model-snapshot-request DeleteModelSnapshotRequest request = new DeleteModelSnapshotRequest(jobId, snapshotId); // <1> // end::delete-model-snapshot-request // tag::delete-model-snapshot-execute AcknowledgedResponse response = client.machineLearning().deleteModelSnapshot(request, RequestOptions.DEFAULT); // end::delete-model-snapshot-execute // tag::delete-model-snapshot-response boolean isAcknowledged = response.isAcknowledged(); // <1> // end::delete-model-snapshot-response assertTrue(isAcknowledged); } { client.index(indexRequest, RequestOptions.DEFAULT); // tag::delete-model-snapshot-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-model-snapshot-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteModelSnapshotRequest deleteModelSnapshotRequest = new DeleteModelSnapshotRequest(jobId, "1541587919"); // tag::delete-model-snapshot-execute-async client.machineLearning().deleteModelSnapshotAsync(deleteModelSnapshotRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-model-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetModelSnapshots() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-model-snapshots"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-model-snapshots\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"test-get-model-snapshots\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-model-snapshots-request GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId); // <1> // end::get-model-snapshots-request // tag::get-model-snapshots-snapshot-id request.setSnapshotId("1541587919"); // <1> // end::get-model-snapshots-snapshot-id // Set snapshot id to null as it is incompatible with other args request.setSnapshotId(null); // tag::get-model-snapshots-desc request.setDesc(true); // <1> // end::get-model-snapshots-desc // tag::get-model-snapshots-end request.setEnd("2018-11-07T21:00:00Z"); // <1> // end::get-model-snapshots-end // tag::get-model-snapshots-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-model-snapshots-page // Set page params back to null so the response contains the snapshot we indexed request.setPageParams(null); // tag::get-model-snapshots-sort request.setSort("latest_result_time_stamp"); // <1> // end::get-model-snapshots-sort // tag::get-model-snapshots-start request.setStart("2018-11-07T00:00:00Z"); // <1> // end::get-model-snapshots-start // tag::get-model-snapshots-execute GetModelSnapshotsResponse response = client.machineLearning().getModelSnapshots(request, RequestOptions.DEFAULT); // end::get-model-snapshots-execute // tag::get-model-snapshots-response long count = response.count(); // <1> List<ModelSnapshot> modelSnapshots = response.snapshots(); // <2> // end::get-model-snapshots-response assertEquals(1, modelSnapshots.size()); } { GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId); // tag::get-model-snapshots-execute-listener ActionListener<GetModelSnapshotsResponse> listener = new ActionListener<GetModelSnapshotsResponse>() { @Override public void onResponse(GetModelSnapshotsResponse getModelSnapshotsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-model-snapshots-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-model-snapshots-execute-async client.machineLearning().getModelSnapshotsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-model-snapshots-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testRevertModelSnapshot() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-revert-model-snapshot"; String snapshotId = "1541587919"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot String documentId = jobId + "_model_snapshot_" + snapshotId; IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false, " + "\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " + "\"quantile_state\":\"state\"}}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::revert-model-snapshot-request RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // <1> // end::revert-model-snapshot-request // tag::revert-model-snapshot-delete-intervening-results request.setDeleteInterveningResults(true); // <1> // end::revert-model-snapshot-delete-intervening-results // tag::revert-model-snapshot-execute RevertModelSnapshotResponse response = client.machineLearning().revertModelSnapshot(request, RequestOptions.DEFAULT); // end::revert-model-snapshot-execute // tag::revert-model-snapshot-response ModelSnapshot modelSnapshot = response.getModel(); // <1> // end::revert-model-snapshot-response assertEquals(snapshotId, modelSnapshot.getSnapshotId()); assertEquals("State persisted due to job close at 2018-11-07T10:51:59+0000", modelSnapshot.getDescription()); assertEquals(51722, modelSnapshot.getModelSizeStats().getModelBytes()); } { RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // tag::revert-model-snapshot-execute-listener ActionListener<RevertModelSnapshotResponse> listener = new ActionListener<RevertModelSnapshotResponse>() { @Override public void onResponse(RevertModelSnapshotResponse revertModelSnapshotResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::revert-model-snapshot-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::revert-model-snapshot-execute-async client.machineLearning().revertModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::revert-model-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateModelSnapshot() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-update-model-snapshot"; String snapshotId = "1541587919"; String documentId = jobId + "_model_snapshot_" + snapshotId; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-update-model-snapshot\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"test-update-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::update-model-snapshot-request UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // <1> // end::update-model-snapshot-request // tag::update-model-snapshot-description request.setDescription("My Snapshot"); // <1> // end::update-model-snapshot-description // tag::update-model-snapshot-retain request.setRetain(true); // <1> // end::update-model-snapshot-retain // tag::update-model-snapshot-execute UpdateModelSnapshotResponse response = client.machineLearning().updateModelSnapshot(request, RequestOptions.DEFAULT); // end::update-model-snapshot-execute // tag::update-model-snapshot-response boolean acknowledged = response.getAcknowledged(); // <1> ModelSnapshot modelSnapshot = response.getModel(); // <2> // end::update-model-snapshot-response assertTrue(acknowledged); assertEquals("My Snapshot", modelSnapshot.getDescription()); } { UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // tag::update-model-snapshot-execute-listener ActionListener<UpdateModelSnapshotResponse> listener = new ActionListener<UpdateModelSnapshotResponse>() { @Override public void onResponse(UpdateModelSnapshotResponse updateModelSnapshotResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-model-snapshot-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-model-snapshot-execute-async client.machineLearning().updateModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::update-model-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); // tag::put-calendar-request Calendar calendar = new Calendar("public_holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest request = new PutCalendarRequest(calendar); // <1> // end::put-calendar-request // tag::put-calendar-execute PutCalendarResponse response = client.machineLearning().putCalendar(request, RequestOptions.DEFAULT); // end::put-calendar-execute // tag::put-calendar-response Calendar newCalendar = response.getCalendar(); // <1> // end::put-calendar-response assertThat(newCalendar.getId(), equalTo("public_holidays")); // tag::put-calendar-execute-listener ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() { @Override public void onResponse(PutCalendarResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-calendar-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-calendar-execute-async client.machineLearning().putCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-calendar-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } public void testPutCalendarJob() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { // tag::put-calendar-job-request PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", // <1> "job_2", "job_group_1"); // <2> // end::put-calendar-job-request // tag::put-calendar-job-execute PutCalendarResponse response = client.machineLearning().putCalendarJob(request, RequestOptions.DEFAULT); // end::put-calendar-job-execute // tag::put-calendar-job-response Calendar updatedCalendar = response.getCalendar(); // <1> // end::put-calendar-job-response assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_1", "job_2", "job_group_1")); } { PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", "job_4"); // tag::put-calendar-job-execute-listener ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() { @Override public void onResponse(PutCalendarResponse putCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-calendar-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-calendar-job-execute-async client.machineLearning().putCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-calendar-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteCalendarJob() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Arrays.asList("job_1", "job_group_1", "job_2"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { // tag::delete-calendar-job-request DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", // <1> "job_1", "job_group_1"); // <2> // end::delete-calendar-job-request // tag::delete-calendar-job-execute PutCalendarResponse response = client.machineLearning().deleteCalendarJob(request, RequestOptions.DEFAULT); // end::delete-calendar-job-execute // tag::delete-calendar-job-response Calendar updatedCalendar = response.getCalendar(); // <1> // end::delete-calendar-job-response assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_2")); } { DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", "job_2"); // tag::delete-calendar-job-execute-listener ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() { @Override public void onResponse(PutCalendarResponse deleteCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-calendar-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-calendar-job-execute-async client.machineLearning().deleteCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-calendar-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { // tag::get-calendars-request GetCalendarsRequest request = new GetCalendarsRequest(); // <1> // end::get-calendars-request // tag::get-calendars-id request.setCalendarId("holidays"); // <1> // end::get-calendars-id // tag::get-calendars-page request.setPageParams(new PageParams(10, 20)); // <1> // end::get-calendars-page // reset page params request.setPageParams(null); // tag::get-calendars-execute GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT); // end::get-calendars-execute // tag::get-calendars-response long count = response.count(); // <1> List<Calendar> calendars = response.calendars(); // <2> // end::get-calendars-response assertEquals(1, calendars.size()); } { GetCalendarsRequest request = new GetCalendarsRequest("holidays"); // tag::get-calendars-execute-listener ActionListener<GetCalendarsResponse> listener = new ActionListener<GetCalendarsResponse>() { @Override public void onResponse(GetCalendarsResponse getCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-calendars-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-calendars-execute-async client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-calendars-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putCalendarRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putCalendarRequest, RequestOptions.DEFAULT); // tag::delete-calendar-request DeleteCalendarRequest request = new DeleteCalendarRequest("holidays"); // <1> // end::delete-calendar-request // tag::delete-calendar-execute AcknowledgedResponse response = client.machineLearning().deleteCalendar(request, RequestOptions.DEFAULT); // end::delete-calendar-execute // tag::delete-calendar-response boolean isAcknowledged = response.isAcknowledged(); // <1> // end::delete-calendar-response assertTrue(isAcknowledged); // tag::delete-calendar-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-calendar-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-calendar-execute-async client.machineLearning().deleteCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-calendar-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } public void testGetCalendarEvent() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null)); client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT); { // tag::get-calendar-events-request GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // <1> // end::get-calendar-events-request // tag::get-calendar-events-page request.setPageParams(new PageParams(10, 20)); // <1> // end::get-calendar-events-page // tag::get-calendar-events-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-calendar-events-start // tag::get-calendar-events-end request.setEnd("2018-08-02T00:00:00Z"); // <1> // end::get-calendar-events-end // tag::get-calendar-events-jobid request.setJobId("job_1"); // <1> // end::get-calendar-events-jobid // reset params request.setPageParams(null); request.setJobId(null); request.setStart(null); request.setEnd(null); // tag::get-calendar-events-execute GetCalendarEventsResponse response = client.machineLearning().getCalendarEvents(request, RequestOptions.DEFAULT); // end::get-calendar-events-execute // tag::get-calendar-events-response long count = response.count(); // <1> List<ScheduledEvent> scheduledEvents = response.events(); // <2> // end::get-calendar-events-response assertEquals(1, scheduledEvents.size()); } { GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // tag::get-calendar-events-execute-listener ActionListener<GetCalendarEventsResponse> listener = new ActionListener<GetCalendarEventsResponse>() { @Override public void onResponse(GetCalendarEventsResponse getCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-calendar-events-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-calendar-events-execute-async client.machineLearning().getCalendarEventsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-calendar-events-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPostCalendarEvent() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null)); // tag::post-calendar-event-request PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", // <1> events); // <2> // end::post-calendar-event-request // tag::post-calendar-event-execute PostCalendarEventResponse response = client.machineLearning().postCalendarEvent(request, RequestOptions.DEFAULT); // end::post-calendar-event-execute // tag::post-calendar-event-response List<ScheduledEvent> scheduledEvents = response.getScheduledEvents(); // <1> // end::post-calendar-event-response assertEquals(1, scheduledEvents.size()); } { List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance()); PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", events); // <1> // tag::post-calendar-event-execute-listener ActionListener<PostCalendarEventResponse> listener = new ActionListener<PostCalendarEventResponse>() { @Override public void onResponse(PostCalendarEventResponse postCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::post-calendar-event-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::post-calendar-event-execute-async client.machineLearning().postCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::post-calendar-event-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteCalendarEvent() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Arrays.asList("job_1", "job_group_1", "job_2"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); List<ScheduledEvent> events = Arrays.asList(ScheduledEventTests.testInstance(calendar.getId(), null), ScheduledEventTests.testInstance(calendar.getId(), null)); client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT); GetCalendarEventsResponse getCalendarEventsResponse = client.machineLearning().getCalendarEvents(new GetCalendarEventsRequest("holidays"), RequestOptions.DEFAULT); { // tag::delete-calendar-event-request DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays", // <1> "EventId"); // <2> // end::delete-calendar-event-request request = new DeleteCalendarEventRequest("holidays", getCalendarEventsResponse.events().get(0).getEventId()); // tag::delete-calendar-event-execute AcknowledgedResponse response = client.machineLearning().deleteCalendarEvent(request, RequestOptions.DEFAULT); // end::delete-calendar-event-execute // tag::delete-calendar-event-response boolean acknowledged = response.isAcknowledged(); // <1> // end::delete-calendar-event-response assertThat(acknowledged, is(true)); } { DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays", getCalendarEventsResponse.events().get(1).getEventId()); // tag::delete-calendar-event-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse deleteCalendarEventResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-calendar-event-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-calendar-event-execute-async client.machineLearning().deleteCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-calendar-event-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::get-data-frame-analytics-request GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // <1> // end::get-data-frame-analytics-request // tag::get-data-frame-analytics-execute GetDataFrameAnalyticsResponse response = client.machineLearning().getDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::get-data-frame-analytics-execute // tag::get-data-frame-analytics-response List<DataFrameAnalyticsConfig> configs = response.getAnalytics(); // end::get-data-frame-analytics-response assertThat(configs, hasSize(1)); } { GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // tag::get-data-frame-analytics-execute-listener ActionListener<GetDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-data-frame-analytics-execute-async client.machineLearning().getDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDataFrameAnalyticsStats() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::get-data-frame-analytics-stats-request GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // <1> // end::get-data-frame-analytics-stats-request // tag::get-data-frame-analytics-stats-execute GetDataFrameAnalyticsStatsResponse response = client.machineLearning().getDataFrameAnalyticsStats(request, RequestOptions.DEFAULT); // end::get-data-frame-analytics-stats-execute // tag::get-data-frame-analytics-stats-response List<DataFrameAnalyticsStats> stats = response.getAnalyticsStats(); // end::get-data-frame-analytics-stats-response assertThat(stats, hasSize(1)); } { GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // tag::get-data-frame-analytics-stats-execute-listener ActionListener<GetDataFrameAnalyticsStatsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetDataFrameAnalyticsStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-data-frame-analytics-stats-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-data-frame-analytics-stats-execute-async client.machineLearning().getDataFrameAnalyticsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-data-frame-analytics-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); { // tag::put-data-frame-analytics-query-config QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder()); // end::put-data-frame-analytics-query-config // tag::put-data-frame-analytics-source-config DataFrameAnalyticsSource sourceConfig = DataFrameAnalyticsSource.builder() // <1> .setIndex("put-test-source-index") // <2> .setQueryConfig(queryConfig) // <3> .setSourceFiltering(new FetchSourceContext(true, new String[] { "included_field_1", "included_field_2" }, new String[] { "excluded_field" })) // <4> .build(); // end::put-data-frame-analytics-source-config // tag::put-data-frame-analytics-dest-config DataFrameAnalyticsDest destConfig = DataFrameAnalyticsDest.builder() // <1> .setIndex("put-test-dest-index") // <2> .build(); // end::put-data-frame-analytics-dest-config // tag::put-data-frame-analytics-outlier-detection-default DataFrameAnalysis outlierDetection = OutlierDetection.createDefault(); // <1> // end::put-data-frame-analytics-outlier-detection-default // tag::put-data-frame-analytics-outlier-detection-customized DataFrameAnalysis outlierDetectionCustomized = OutlierDetection.builder() // <1> .setMethod(OutlierDetection.Method.DISTANCE_KNN) // <2> .setNNeighbors(5) // <3> .setFeatureInfluenceThreshold(0.1) // <4> .setComputeFeatureInfluence(true) // <5> .setOutlierFraction(0.05) // <6> .setStandardizationEnabled(true) // <7> .build(); // end::put-data-frame-analytics-outlier-detection-customized // tag::put-data-frame-analytics-classification DataFrameAnalysis classification = Classification.builder("my_dependent_variable") // <1> .setLambda(1.0) // <2> .setGamma(5.5) // <3> .setEta(5.5) // <4> .setMaxTrees(50) // <5> .setFeatureBagFraction(0.4) // <6> .setNumTopFeatureImportanceValues(3) // <7> .setPredictionFieldName("my_prediction_field_name") // <8> .setTrainingPercent(50.0) // <9> .setRandomizeSeed(1234L) // <10> .setClassAssignmentObjective(Classification.ClassAssignmentObjective.MAXIMIZE_ACCURACY) // <11> .setNumTopClasses(1) // <12> .build(); // end::put-data-frame-analytics-classification // tag::put-data-frame-analytics-regression DataFrameAnalysis regression = org.elasticsearch.client.ml.dataframe.Regression.builder("my_dependent_variable") // <1> .setLambda(1.0) // <2> .setGamma(5.5) // <3> .setEta(5.5) // <4> .setMaxTrees(50) // <5> .setFeatureBagFraction(0.4) // <6> .setNumTopFeatureImportanceValues(3) // <7> .setPredictionFieldName("my_prediction_field_name") // <8> .setTrainingPercent(50.0) // <9> .setRandomizeSeed(1234L) // <10> .setLossFunction(Regression.LossFunction.MSE) // <11> .setLossFunctionParameter(1.0) // <12> .build(); // end::put-data-frame-analytics-regression // tag::put-data-frame-analytics-analyzed-fields FetchSourceContext analyzedFields = new FetchSourceContext( true, new String[] { "included_field_1", "included_field_2" }, new String[] { "excluded_field" }); // end::put-data-frame-analytics-analyzed-fields // tag::put-data-frame-analytics-config DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder() .setId("my-analytics-config") // <1> .setSource(sourceConfig) // <2> .setDest(destConfig) // <3> .setAnalysis(outlierDetection) // <4> .setAnalyzedFields(analyzedFields) // <5> .setModelMemoryLimit(new ByteSizeValue(5, ByteSizeUnit.MB)) // <6> .setDescription("this is an example description") // <7> .build(); // end::put-data-frame-analytics-config // tag::put-data-frame-analytics-request PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(config); // <1> // end::put-data-frame-analytics-request // tag::put-data-frame-analytics-execute PutDataFrameAnalyticsResponse response = client.machineLearning().putDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::put-data-frame-analytics-execute // tag::put-data-frame-analytics-response DataFrameAnalyticsConfig createdConfig = response.getConfig(); // end::put-data-frame-analytics-response assertThat(createdConfig.getId(), equalTo("my-analytics-config")); } { PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG); // tag::put-data-frame-analytics-execute-listener ActionListener<PutDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(PutDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-data-frame-analytics-execute-async client.machineLearning().putDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::delete-data-frame-analytics-request DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // <1> // end::delete-data-frame-analytics-request //tag::delete-data-frame-analytics-request-options request.setForce(false); // <1> request.setTimeout(TimeValue.timeValueMinutes(1)); // <2> //end::delete-data-frame-analytics-request-options // tag::delete-data-frame-analytics-execute AcknowledgedResponse response = client.machineLearning().deleteDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::delete-data-frame-analytics-execute // tag::delete-data-frame-analytics-response boolean acknowledged = response.isAcknowledged(); // end::delete-data-frame-analytics-response assertThat(acknowledged, is(true)); } client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // tag::delete-data-frame-analytics-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-data-frame-analytics-execute-async client.machineLearning().deleteDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testStartDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); highLevelClient().index( new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::start-data-frame-analytics-request StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // <1> // end::start-data-frame-analytics-request // tag::start-data-frame-analytics-execute StartDataFrameAnalyticsResponse response = client.machineLearning().startDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::start-data-frame-analytics-execute // tag::start-data-frame-analytics-response boolean acknowledged = response.isAcknowledged(); String node = response.getNode(); // <1> // end::start-data-frame-analytics-response assertThat(acknowledged, is(true)); assertThat(node, notNullValue()); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); { StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // tag::start-data-frame-analytics-execute-listener ActionListener<StartDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(StartDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::start-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::start-data-frame-analytics-execute-async client.machineLearning().startDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::start-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); } public void testStopDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); highLevelClient().index( new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::stop-data-frame-analytics-request StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // <1> request.setForce(false); // <2> // end::stop-data-frame-analytics-request // tag::stop-data-frame-analytics-execute StopDataFrameAnalyticsResponse response = client.machineLearning().stopDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::stop-data-frame-analytics-execute // tag::stop-data-frame-analytics-response boolean acknowledged = response.isStopped(); // end::stop-data-frame-analytics-response assertThat(acknowledged, is(true)); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); { StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // tag::stop-data-frame-analytics-execute-listener ActionListener<StopDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(StopDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::stop-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::stop-data-frame-analytics-execute-async client.machineLearning().stopDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::stop-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); } public void testEvaluateDataFrame() throws Exception { String indexName = "evaluate-test-index"; CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName) .mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("label") .field("type", "keyword") .endObject() .startObject("p") .field("type", "double") .endObject() .endObject() .endObject()); BulkRequest bulkRequest = new BulkRequest(indexName) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.1)) // #0 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.2)) // #1 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.3)) // #2 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.4)) // #3 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.7)) // #4 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.2)) // #5 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.3)) // #6 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.4)) // #7 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.8)) // #8 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.9)); // #9 RestHighLevelClient client = highLevelClient(); client.indices().create(createIndexRequest, RequestOptions.DEFAULT); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::evaluate-data-frame-evaluation-softclassification Evaluation evaluation = new BinarySoftClassification( // <1> "label", // <2> "p", // <3> // Evaluation metrics // <4> PrecisionMetric.at(0.4, 0.5, 0.6), // <5> RecallMetric.at(0.5, 0.7), // <6> ConfusionMatrixMetric.at(0.5), // <7> AucRocMetric.withCurve()); // <8> // end::evaluate-data-frame-evaluation-softclassification // tag::evaluate-data-frame-request EvaluateDataFrameRequest request = new EvaluateDataFrameRequest( // <1> indexName, // <2> new QueryConfig(QueryBuilders.termQuery("dataset", "blue")), // <3> evaluation); // <4> // end::evaluate-data-frame-request // tag::evaluate-data-frame-execute EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT); // end::evaluate-data-frame-execute // tag::evaluate-data-frame-response List<EvaluationMetric.Result> metrics = response.getMetrics(); // <1> // end::evaluate-data-frame-response // tag::evaluate-data-frame-results-softclassification PrecisionMetric.Result precisionResult = response.getMetricByName(PrecisionMetric.NAME); // <1> double precision = precisionResult.getScoreByThreshold("0.4"); // <2> ConfusionMatrixMetric.Result confusionMatrixResult = response.getMetricByName(ConfusionMatrixMetric.NAME); // <3> ConfusionMatrix confusionMatrix = confusionMatrixResult.getScoreByThreshold("0.5"); // <4> // end::evaluate-data-frame-results-softclassification assertThat( metrics.stream().map(EvaluationMetric.Result::getMetricName).collect(Collectors.toList()), containsInAnyOrder(PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME, AucRocMetric.NAME)); assertThat(precision, closeTo(0.6, 1e-9)); assertThat(confusionMatrix.getTruePositives(), equalTo(2L)); // docs #8 and #9 assertThat(confusionMatrix.getFalsePositives(), equalTo(1L)); // doc #4 assertThat(confusionMatrix.getTrueNegatives(), equalTo(4L)); // docs #0, #1, #2 and #3 assertThat(confusionMatrix.getFalseNegatives(), equalTo(3L)); // docs #5, #6 and #7 } { EvaluateDataFrameRequest request = new EvaluateDataFrameRequest( indexName, new QueryConfig(QueryBuilders.termQuery("dataset", "blue")), new BinarySoftClassification( "label", "p", PrecisionMetric.at(0.4, 0.5, 0.6), RecallMetric.at(0.5, 0.7), ConfusionMatrixMetric.at(0.5), AucRocMetric.withCurve())); // tag::evaluate-data-frame-execute-listener ActionListener<EvaluateDataFrameResponse> listener = new ActionListener<>() { @Override public void onResponse(EvaluateDataFrameResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::evaluate-data-frame-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::evaluate-data-frame-execute-async client.machineLearning().evaluateDataFrameAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::evaluate-data-frame-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testEvaluateDataFrame_Classification() throws Exception { String indexName = "evaluate-classification-test-index"; CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName) .mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("actual_class") .field("type", "keyword") .endObject() .startObject("predicted_class") .field("type", "keyword") .endObject() .endObject() .endObject()); BulkRequest bulkRequest = new BulkRequest(indexName) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #0 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #1 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #2 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "dog")) // #3 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "fox")) // #4 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "cat")) // #5 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #6 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #7 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #8 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "ant", "predicted_class", "cat")); // #9 RestHighLevelClient client = highLevelClient(); client.indices().create(createIndexRequest, RequestOptions.DEFAULT); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::evaluate-data-frame-evaluation-classification Evaluation evaluation = new org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification( // <1> "actual_class", // <2> "predicted_class", // <3> // Evaluation metrics // <4> new AccuracyMetric(), // <5> new org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric(), // <6> new org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric(), // <7> new MulticlassConfusionMatrixMetric(3)); // <8> // end::evaluate-data-frame-evaluation-classification EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation); EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT); // tag::evaluate-data-frame-results-classification AccuracyMetric.Result accuracyResult = response.getMetricByName(AccuracyMetric.NAME); // <1> double accuracy = accuracyResult.getOverallAccuracy(); // <2> org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.Result precisionResult = response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME); // <3> double precision = precisionResult.getAvgPrecision(); // <4> org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.Result recallResult = response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME); // <5> double recall = recallResult.getAvgRecall(); // <6> MulticlassConfusionMatrixMetric.Result multiclassConfusionMatrix = response.getMetricByName(MulticlassConfusionMatrixMetric.NAME); // <7> List<ActualClass> confusionMatrix = multiclassConfusionMatrix.getConfusionMatrix(); // <8> long otherClassesCount = multiclassConfusionMatrix.getOtherActualClassCount(); // <9> // end::evaluate-data-frame-results-classification assertThat(accuracyResult.getMetricName(), equalTo(AccuracyMetric.NAME)); assertThat(accuracy, equalTo(0.6)); assertThat( precisionResult.getMetricName(), equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME)); assertThat(precision, equalTo(0.675)); assertThat( recallResult.getMetricName(), equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME)); assertThat(recall, equalTo(0.45)); assertThat(multiclassConfusionMatrix.getMetricName(), equalTo(MulticlassConfusionMatrixMetric.NAME)); assertThat( confusionMatrix, equalTo( List.of( new ActualClass( "ant", 1L, List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 0L)), 0L), new ActualClass( "cat", 5L, List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 3L), new PredictedClass("dog", 1L)), 1L), new ActualClass( "dog", 4L, List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 3L)), 0L)))); assertThat(otherClassesCount, equalTo(0L)); } } public void testEvaluateDataFrame_Regression() throws Exception { String indexName = "evaluate-classification-test-index"; CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName) .mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("actual_value") .field("type", "double") .endObject() .startObject("predicted_value") .field("type", "double") .endObject() .endObject() .endObject()); BulkRequest bulkRequest = new BulkRequest(indexName) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 1.0)) // #0 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 0.9)) // #1 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.0, "predicted_value", 2.0)) // #2 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.5, "predicted_value", 1.4)) // #3 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.2, "predicted_value", 1.3)) // #4 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.7, "predicted_value", 2.0)) // #5 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.1, "predicted_value", 2.1)) // #6 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.7)) // #7 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 0.8, "predicted_value", 1.0)) // #8 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.4)); // #9 RestHighLevelClient client = highLevelClient(); client.indices().create(createIndexRequest, RequestOptions.DEFAULT); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::evaluate-data-frame-evaluation-regression Evaluation evaluation = new org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression( // <1> "actual_value", // <2> "predicted_value", // <3> // Evaluation metrics // <4> new MeanSquaredErrorMetric(), // <5> new RSquaredMetric()); // <6> // end::evaluate-data-frame-evaluation-regression EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation); EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT); // tag::evaluate-data-frame-results-regression MeanSquaredErrorMetric.Result meanSquaredErrorResult = response.getMetricByName(MeanSquaredErrorMetric.NAME); // <1> double meanSquaredError = meanSquaredErrorResult.getError(); // <2> RSquaredMetric.Result rSquaredResult = response.getMetricByName(RSquaredMetric.NAME); // <3> double rSquared = rSquaredResult.getValue(); // <4> // end::evaluate-data-frame-results-regression assertThat(meanSquaredError, closeTo(0.021, 1e-3)); assertThat(rSquared, closeTo(0.941, 1e-3)); } } public void testExplainDataFrameAnalytics() throws Exception { createIndex("explain-df-test-source-index"); BulkRequest bulkRequest = new BulkRequest("explain-df-test-source-index") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int i = 0; i < 10; ++i) { bulkRequest.add(new IndexRequest().source(XContentType.JSON, "timestamp", 123456789L, "total", 10L)); } RestHighLevelClient client = highLevelClient(); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::explain-data-frame-analytics-id-request ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest("existing_job_id"); // <1> // end::explain-data-frame-analytics-id-request // tag::explain-data-frame-analytics-config-request DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder() .setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build()) .setAnalysis(OutlierDetection.createDefault()) .build(); request = new ExplainDataFrameAnalyticsRequest(config); // <1> // end::explain-data-frame-analytics-config-request // tag::explain-data-frame-analytics-execute ExplainDataFrameAnalyticsResponse response = client.machineLearning().explainDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::explain-data-frame-analytics-execute // tag::explain-data-frame-analytics-response List<FieldSelection> fieldSelection = response.getFieldSelection(); // <1> MemoryEstimation memoryEstimation = response.getMemoryEstimation(); // <2> // end::explain-data-frame-analytics-response assertThat(fieldSelection.size(), equalTo(2)); assertThat(fieldSelection.stream().map(FieldSelection::getName).collect(Collectors.toList()), contains("timestamp", "total")); ByteSizeValue expectedMemoryWithoutDisk = memoryEstimation.getExpectedMemoryWithoutDisk(); // <1> ByteSizeValue expectedMemoryWithDisk = memoryEstimation.getExpectedMemoryWithDisk(); // <2> // We are pretty liberal here as this test does not aim at verifying concrete numbers but rather end-to-end user workflow. ByteSizeValue lowerBound = new ByteSizeValue(1, ByteSizeUnit.KB); ByteSizeValue upperBound = new ByteSizeValue(1, ByteSizeUnit.GB); assertThat(expectedMemoryWithoutDisk, allOf(greaterThan(lowerBound), lessThan(upperBound))); assertThat(expectedMemoryWithDisk, allOf(greaterThan(lowerBound), lessThan(upperBound))); } { DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder() .setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build()) .setAnalysis(OutlierDetection.createDefault()) .build(); ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest(config); // tag::explain-data-frame-analytics-execute-listener ActionListener<ExplainDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(ExplainDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::explain-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::explain-data-frame-analytics-execute-async client.machineLearning().explainDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::explain-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetTrainedModels() throws Exception { putTrainedModel("my-trained-model"); RestHighLevelClient client = highLevelClient(); { // tag::get-trained-models-request GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model") // <1> .setPageParams(new PageParams(0, 1)) // <2> .setIncludeDefinition(false) // <3> .setDecompressDefinition(false) // <4> .setAllowNoMatch(true) // <5> .setTags("regression") // <6> .setForExport(false); // <7> // end::get-trained-models-request request.setTags((List<String>)null); // tag::get-trained-models-execute GetTrainedModelsResponse response = client.machineLearning().getTrainedModels(request, RequestOptions.DEFAULT); // end::get-trained-models-execute // tag::get-trained-models-response List<TrainedModelConfig> models = response.getTrainedModels(); // end::get-trained-models-response assertThat(models, hasSize(1)); } { GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model"); // tag::get-trained-models-execute-listener ActionListener<GetTrainedModelsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetTrainedModelsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-trained-models-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-trained-models-execute-async client.machineLearning().getTrainedModelsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-trained-models-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutTrainedModel() throws Exception { TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build(); // tag::put-trained-model-config TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder() .setDefinition(definition) // <1> .setCompressedDefinition(InferenceToXContentCompressor.deflate(definition)) // <2> .setModelId("my-new-trained-model") // <3> .setInput(new TrainedModelInput("col1", "col2", "col3", "col4")) // <4> .setDescription("test model") // <5> .setMetadata(new HashMap<>()) // <6> .setTags("my_regression_models") // <7> .setInferenceConfig(new RegressionConfig("value", 0)) // <8> .build(); // end::put-trained-model-config trainedModelConfig = TrainedModelConfig.builder() .setDefinition(definition) .setInferenceConfig(new RegressionConfig(null, null)) .setModelId("my-new-trained-model") .setInput(new TrainedModelInput("col1", "col2", "col3", "col4")) .setDescription("test model") .setMetadata(new HashMap<>()) .setTags("my_regression_models") .build(); RestHighLevelClient client = highLevelClient(); { // tag::put-trained-model-request PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig); // <1> // end::put-trained-model-request // tag::put-trained-model-execute PutTrainedModelResponse response = client.machineLearning().putTrainedModel(request, RequestOptions.DEFAULT); // end::put-trained-model-execute // tag::put-trained-model-response TrainedModelConfig model = response.getResponse(); // end::put-trained-model-response assertThat(model.getModelId(), equalTo(trainedModelConfig.getModelId())); highLevelClient().machineLearning() .deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT); } { PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig); // tag::put-trained-model-execute-listener ActionListener<PutTrainedModelResponse> listener = new ActionListener<>() { @Override public void onResponse(PutTrainedModelResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-trained-model-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-trained-model-execute-async client.machineLearning().putTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-trained-model-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); highLevelClient().machineLearning() .deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT); } } public void testGetTrainedModelsStats() throws Exception { putTrainedModel("my-trained-model"); RestHighLevelClient client = highLevelClient(); { // tag::get-trained-models-stats-request GetTrainedModelsStatsRequest request = new GetTrainedModelsStatsRequest("my-trained-model") // <1> .setPageParams(new PageParams(0, 1)) // <2> .setAllowNoMatch(true); // <3> // end::get-trained-models-stats-request // tag::get-trained-models-stats-execute GetTrainedModelsStatsResponse response = client.machineLearning().getTrainedModelsStats(request, RequestOptions.DEFAULT); // end::get-trained-models-stats-execute // tag::get-trained-models-stats-response List<TrainedModelStats> models = response.getTrainedModelStats(); // end::get-trained-models-stats-response assertThat(models, hasSize(1)); } { GetTrainedModelsStatsRequest request = new GetTrainedModelsStatsRequest("my-trained-model"); // tag::get-trained-models-stats-execute-listener ActionListener<GetTrainedModelsStatsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetTrainedModelsStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-trained-models-stats-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-trained-models-stats-execute-async client.machineLearning() .getTrainedModelsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-trained-models-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteTrainedModel() throws Exception { RestHighLevelClient client = highLevelClient(); { putTrainedModel("my-trained-model"); // tag::delete-trained-model-request DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model"); // <1> // end::delete-trained-model-request // tag::delete-trained-model-execute AcknowledgedResponse response = client.machineLearning().deleteTrainedModel(request, RequestOptions.DEFAULT); // end::delete-trained-model-execute // tag::delete-trained-model-response boolean deleted = response.isAcknowledged(); // end::delete-trained-model-response assertThat(deleted, is(true)); } { putTrainedModel("my-trained-model"); DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model"); // tag::delete-trained-model-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-trained-model-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-trained-model-execute-async client.machineLearning().deleteTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-trained-model-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testCreateFilter() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::put-filter-config MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains") // <1> .setDescription("A list of safe domains") // <2> .setItems("*.google.com", "wikipedia.org"); // <3> // end::put-filter-config // tag::put-filter-request PutFilterRequest request = new PutFilterRequest(filterBuilder.build()); // <1> // end::put-filter-request // tag::put-filter-execute PutFilterResponse response = client.machineLearning().putFilter(request, RequestOptions.DEFAULT); // end::put-filter-execute // tag::put-filter-response MlFilter createdFilter = response.getResponse(); // <1> // end::put-filter-response assertThat(createdFilter.getId(), equalTo("my_safe_domains")); } { MlFilter.Builder filterBuilder = MlFilter.builder("safe_domains_async") .setDescription("A list of safe domains") .setItems("*.google.com", "wikipedia.org"); PutFilterRequest request = new PutFilterRequest(filterBuilder.build()); // tag::put-filter-execute-listener ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() { @Override public void onResponse(PutFilterResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-filter-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-filter-execute-async client.machineLearning().putFilterAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-filter-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetFilters() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String filterId = "get-filter-doc-test"; MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org"); client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT); { // tag::get-filters-request GetFiltersRequest request = new GetFiltersRequest(); // <1> // end::get-filters-request // tag::get-filters-filter-id request.setFilterId("get-filter-doc-test"); // <1> // end::get-filters-filter-id // tag::get-filters-page-params request.setFrom(100); // <1> request.setSize(200); // <2> // end::get-filters-page-params request.setFrom(null); request.setSize(null); // tag::get-filters-execute GetFiltersResponse response = client.machineLearning().getFilter(request, RequestOptions.DEFAULT); // end::get-filters-execute // tag::get-filters-response long count = response.count(); // <1> List<MlFilter> filters = response.filters(); // <2> // end::get-filters-response assertEquals(1, filters.size()); } { GetFiltersRequest request = new GetFiltersRequest(); request.setFilterId(filterId); // tag::get-filters-execute-listener ActionListener<GetFiltersResponse> listener = new ActionListener<GetFiltersResponse>() { @Override public void onResponse(GetFiltersResponse getfiltersResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-filters-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-filters-execute-async client.machineLearning().getFilterAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-filters-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateFilter() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String filterId = "update-filter-doc-test"; MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org"); client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT); { // tag::update-filter-request UpdateFilterRequest request = new UpdateFilterRequest(filterId); // <1> // end::update-filter-request // tag::update-filter-description request.setDescription("my new description"); // <1> // end::update-filter-description // tag::update-filter-add-items request.setAddItems(Arrays.asList("*.bing.com", "*.elastic.co")); // <1> // end::update-filter-add-items // tag::update-filter-remove-items request.setRemoveItems(Arrays.asList("*.google.com")); // <1> // end::update-filter-remove-items // tag::update-filter-execute PutFilterResponse response = client.machineLearning().updateFilter(request, RequestOptions.DEFAULT); // end::update-filter-execute // tag::update-filter-response MlFilter updatedFilter = response.getResponse(); // <1> // end::update-filter-response assertEquals(request.getDescription(), updatedFilter.getDescription()); } { UpdateFilterRequest request = new UpdateFilterRequest(filterId); // tag::update-filter-execute-listener ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() { @Override public void onResponse(PutFilterResponse putFilterResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-filter-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-filter-execute-async client.machineLearning().updateFilterAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::update-filter-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteFilter() throws Exception { RestHighLevelClient client = highLevelClient(); String filterId = createFilter(client); { // tag::delete-filter-request DeleteFilterRequest request = new DeleteFilterRequest(filterId); // <1> // end::delete-filter-request // tag::delete-filter-execute AcknowledgedResponse response = client.machineLearning().deleteFilter(request, RequestOptions.DEFAULT); // end::delete-filter-execute // tag::delete-filter-response boolean isAcknowledged = response.isAcknowledged(); // <1> // end::delete-filter-response assertTrue(isAcknowledged); } filterId = createFilter(client); { DeleteFilterRequest request = new DeleteFilterRequest(filterId); // tag::delete-filter-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-filter-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-filter-execute-async client.machineLearning().deleteFilterAsync(request, RequestOptions.DEFAULT, listener); //<1> // end::delete-filter-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetMlInfo() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::get-ml-info-request MlInfoRequest request = new MlInfoRequest(); // <1> // end::get-ml-info-request // tag::get-ml-info-execute MlInfoResponse response = client.machineLearning() .getMlInfo(request, RequestOptions.DEFAULT); // end::get-ml-info-execute // tag::get-ml-info-response final Map<String, Object> info = response.getInfo();// <1> // end::get-ml-info-response assertTrue(info.containsKey("defaults")); assertTrue(info.containsKey("limits")); } { MlInfoRequest request = new MlInfoRequest(); // tag::get-ml-info-execute-listener ActionListener<MlInfoResponse> listener = new ActionListener<MlInfoResponse>() { @Override public void onResponse(MlInfoResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-ml-info-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-ml-info-execute-async client.machineLearning() .getMlInfoAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-ml-info-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testSetUpgradeMode() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::set-upgrade-mode-request SetUpgradeModeRequest request = new SetUpgradeModeRequest(true); // <1> request.setTimeout(TimeValue.timeValueMinutes(10)); // <2> // end::set-upgrade-mode-request // Set to false so that the cluster setting does not have to be unset at the end of the test. request.setEnabled(false); // tag::set-upgrade-mode-execute AcknowledgedResponse acknowledgedResponse = client.machineLearning().setUpgradeMode(request, RequestOptions.DEFAULT); // end::set-upgrade-mode-execute // tag::set-upgrade-mode-response boolean acknowledged = acknowledgedResponse.isAcknowledged(); // <1> // end::set-upgrade-mode-response assertThat(acknowledged, is(true)); } { SetUpgradeModeRequest request = new SetUpgradeModeRequest(false); // tag::set-upgrade-mode-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::set-upgrade-mode-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::set-upgrade-mode-execute-async client.machineLearning() .setUpgradeModeAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::set-upgrade-mode-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testEstimateModelMemory() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::estimate-model-memory-request Detector.Builder detectorBuilder = new Detector.Builder() .setFunction("count") .setPartitionFieldName("status"); AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build())) .setBucketSpan(TimeValue.timeValueMinutes(10)) .setInfluencers(Collections.singletonList("src_ip")); EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfigBuilder.build()); // <1> request.setOverallCardinality(Collections.singletonMap("status", 50L)); // <2> request.setMaxBucketCardinality(Collections.singletonMap("src_ip", 30L)); // <3> // end::estimate-model-memory-request // tag::estimate-model-memory-execute EstimateModelMemoryResponse estimateModelMemoryResponse = client.machineLearning().estimateModelMemory(request, RequestOptions.DEFAULT); // end::estimate-model-memory-execute // tag::estimate-model-memory-response ByteSizeValue modelMemoryEstimate = estimateModelMemoryResponse.getModelMemoryEstimate(); // <1> long estimateInBytes = modelMemoryEstimate.getBytes(); // end::estimate-model-memory-response assertThat(estimateInBytes, greaterThan(10000000L)); } { AnalysisConfig analysisConfig = AnalysisConfig.builder(Collections.singletonList(Detector.builder().setFunction("count").build())).build(); EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfig); // tag::estimate-model-memory-execute-listener ActionListener<EstimateModelMemoryResponse> listener = new ActionListener<EstimateModelMemoryResponse>() { @Override public void onResponse(EstimateModelMemoryResponse estimateModelMemoryResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::estimate-model-memory-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::estimate-model-memory-execute-async client.machineLearning() .estimateModelMemoryAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::estimate-model-memory-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } private String createFilter(RestHighLevelClient client) throws IOException { MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains") .setDescription("A list of safe domains") .setItems("*.google.com", "wikipedia.org"); PutFilterRequest putFilterRequest = new PutFilterRequest(filterBuilder.build()); PutFilterResponse putFilterResponse = client.machineLearning().putFilter(putFilterRequest, RequestOptions.DEFAULT); MlFilter createdFilter = putFilterResponse.getResponse(); assertThat(createdFilter.getId(), equalTo("my_safe_domains")); return createdFilter.getId(); } private void createIndex(String indexName) throws IOException { CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("timestamp") .field("type", "date") .endObject() .startObject("total") .field("type", "long") .endObject() .endObject() .endObject()); highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); } private DataFrameAnalyticsState getAnalyticsState(String configId) throws IOException { GetDataFrameAnalyticsStatsResponse statsResponse = highLevelClient().machineLearning().getDataFrameAnalyticsStats( new GetDataFrameAnalyticsStatsRequest(configId), RequestOptions.DEFAULT); assertThat(statsResponse.getAnalyticsStats(), hasSize(1)); DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0); return stats.getState(); } private void putTrainedModel(String modelId) throws IOException { TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build(); TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder() .setDefinition(definition) .setModelId(modelId) .setInferenceConfig(new RegressionConfig("value", 0)) .setInput(new TrainedModelInput(Arrays.asList("col1", "col2", "col3", "col4"))) .setDescription("test model") .build(); highLevelClient().machineLearning().putTrainedModel(new PutTrainedModelRequest(trainedModelConfig), RequestOptions.DEFAULT); } @Override protected NamedXContentRegistry xContentRegistry() { return new NamedXContentRegistry(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); } private static final DataFrameAnalyticsConfig DF_ANALYTICS_CONFIG = DataFrameAnalyticsConfig.builder() .setId("my-analytics-config") .setSource(DataFrameAnalyticsSource.builder() .setIndex("put-test-source-index") .build()) .setDest(DataFrameAnalyticsDest.builder() .setIndex("put-test-dest-index") .build()) .setAnalysis(OutlierDetection.createDefault()) .build(); }
uschindler/elasticsearch
client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
Java
apache-2.0
199,205
[ 30522, 1013, 1008, 1008, 7000, 2000, 21274, 17310, 11140, 2104, 2028, 2030, 2062, 12130, 1008, 6105, 10540, 1012, 2156, 1996, 5060, 5371, 5500, 2007, 1008, 2023, 2147, 2005, 3176, 2592, 4953, 9385, 1008, 6095, 1012, 21274, 17310, 11140, 159...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php /** * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) * * Licensed under The MIT License * For full copyright and license information, please see the LICENSE.txt * Redistributions of files must retain the above copyright notice. * * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) * @link http://cakephp.org CakePHP(tm) Project * @since 0.1.0 * @license http://www.opensource.org/licenses/mit-license.php MIT License */ namespace Bake\Shell\Task; use Cake\Console\Shell; use Cake\Core\Configure; use Cake\Database\Exception; use Cake\Database\Schema\Table; use Cake\Datasource\ConnectionManager; use Cake\ORM\TableRegistry; use Cake\Utility\Inflector; use Cake\Utility\Text; use DateTimeInterface; /** * Task class for creating and updating fixtures files. * * @property \Bake\Shell\Task\BakeTemplateTask $BakeTemplate * @property \Bake\Shell\Task\ModelTask $Model */ class FixtureTask extends BakeTask { /** * Tasks to be loaded by this Task * * @var array */ public $tasks = [ 'Bake.Model', 'Bake.BakeTemplate' ]; /** * Get the file path. * * @return string */ public function getPath() { $dir = 'Fixture/'; $path = defined('TESTS') ? TESTS . $dir : ROOT . DS . 'tests' . DS . $dir; if (isset($this->plugin)) { $path = $this->_pluginPath($this->plugin) . 'tests/' . $dir; } return str_replace('/', DS, $path); } /** * Gets the option parser instance and configures it. * * @return \Cake\Console\ConsoleOptionParser */ public function getOptionParser() { $parser = parent::getOptionParser(); $parser = $parser->setDescription( 'Generate fixtures for use with the test suite. You can use `bake fixture all` to bake all fixtures.' )->addArgument('name', [ 'help' => 'Name of the fixture to bake (without the `Fixture` suffix). ' . 'You can use Plugin.name to bake plugin fixtures.' ])->addOption('table', [ 'help' => 'The table name if it does not follow conventions.', ])->addOption('count', [ 'help' => 'When using generated data, the number of records to include in the fixture(s).', 'short' => 'n', 'default' => 1 ])->addOption('schema', [ 'help' => 'Create a fixture that imports schema, instead of dumping a schema snapshot into the fixture.', 'short' => 's', 'boolean' => true ])->addOption('records', [ 'help' => 'Generate a fixture with records from the non-test database.' . ' Used with --count and --conditions to limit which records are added to the fixture.', 'short' => 'r', 'boolean' => true ])->addOption('conditions', [ 'help' => 'The SQL snippet to use when importing records.', 'default' => '1=1', ])->addSubcommand('all', [ 'help' => 'Bake all fixture files for tables in the chosen connection.' ]); return $parser; } /** * Execution method always used for tasks * Handles dispatching to interactive, named, or all processes. * * @param string|null $name The name of the fixture to bake. * @return null|bool */ public function main($name = null) { parent::main(); $name = $this->_getName($name); if (empty($name)) { $this->out('Choose a fixture to bake from the following:'); foreach ($this->Model->listUnskipped() as $table) { $this->out('- ' . $this->_camelize($table)); } return true; } $table = null; if (isset($this->params['table'])) { $table = $this->params['table']; } $model = $this->_camelize($name); $this->bake($model, $table); } /** * Bake All the Fixtures at once. Will only bake fixtures for models that exist. * * @return void */ public function all() { $tables = $this->Model->listUnskipped($this->connection, false); foreach ($tables as $table) { $this->main($table); } } /** * Assembles and writes a Fixture file * * @param string $model Name of model to bake. * @param string|null $useTable Name of table to use. * @return string Baked fixture content * @throws \RuntimeException */ public function bake($model, $useTable = null) { $table = $schema = $records = $import = $modelImport = null; if (!$useTable) { $useTable = Inflector::tableize($model); } elseif ($useTable !== Inflector::tableize($model)) { $table = $useTable; } $importBits = []; if (!empty($this->params['schema'])) { $modelImport = true; $importBits[] = "'table' => '{$useTable}'"; } if (!empty($importBits) && $this->connection !== 'default') { $importBits[] = "'connection' => '{$this->connection}'"; } if (!empty($importBits)) { $import = sprintf("[%s]", implode(', ', $importBits)); } $connection = ConnectionManager::get($this->connection); if (!method_exists($connection, 'schemaCollection')) { throw new \RuntimeException( 'Cannot generate fixtures for connections that do not implement schemaCollection()' ); } $schemaCollection = $connection->schemaCollection(); try { $data = $schemaCollection->describe($useTable); } catch (Exception $e) { $useTable = Inflector::underscore($model); $table = $useTable; $data = $schemaCollection->describe($useTable); } if ($modelImport === null) { $schema = $this->_generateSchema($data); } if (empty($this->params['records'])) { $recordCount = 1; if (isset($this->params['count'])) { $recordCount = $this->params['count']; } $records = $this->_makeRecordString($this->_generateRecords($data, $recordCount)); } if (!empty($this->params['records'])) { $records = $this->_makeRecordString($this->_getRecordsFromTable($model, $useTable)); } return $this->generateFixtureFile($model, compact('records', 'table', 'schema', 'import')); } /** * Generate the fixture file, and write to disk * * @param string $model name of the model being generated * @param array $otherVars Contents of the fixture file. * @return string Content saved into fixture file. */ public function generateFixtureFile($model, array $otherVars) { $defaults = [ 'name' => $model, 'table' => null, 'schema' => null, 'records' => null, 'import' => null, 'fields' => null, 'namespace' => Configure::read('App.namespace') ]; if ($this->plugin) { $defaults['namespace'] = $this->_pluginNamespace($this->plugin); } $vars = $otherVars + $defaults; $path = $this->getPath(); $filename = $vars['name'] . 'Fixture.php'; $this->BakeTemplate->set('model', $model); $this->BakeTemplate->set($vars); $content = $this->BakeTemplate->generate('tests/fixture'); $this->out("\n" . sprintf('Baking test fixture for %s...', $model), 1, Shell::QUIET); $this->createFile($path . $filename, $content); $emptyFile = $path . 'empty'; $this->_deleteEmptyFile($emptyFile); return $content; } /** * Generates a string representation of a schema. * * @param \Cake\Database\Schema\Table $table Table schema * @return string fields definitions */ protected function _generateSchema(Table $table) { $cols = $indexes = $constraints = []; foreach ($table->columns() as $field) { $fieldData = $table->column($field); $properties = implode(', ', $this->_values($fieldData)); $cols[] = " '$field' => [$properties],"; } foreach ($table->indexes() as $index) { $fieldData = $table->index($index); $properties = implode(', ', $this->_values($fieldData)); $indexes[] = " '$index' => [$properties],"; } foreach ($table->constraints() as $index) { $fieldData = $table->constraint($index); $properties = implode(', ', $this->_values($fieldData)); $constraints[] = " '$index' => [$properties],"; } $options = $this->_values($table->options()); $content = implode("\n", $cols) . "\n"; if (!empty($indexes)) { $content .= " '_indexes' => [\n" . implode("\n", $indexes) . "\n ],\n"; } if (!empty($constraints)) { $content .= " '_constraints' => [\n" . implode("\n", $constraints) . "\n ],\n"; } if (!empty($options)) { foreach ($options as &$option) { $option = ' ' . $option; } $content .= " '_options' => [\n" . implode(",\n", $options) . "\n ],\n"; } return "[\n$content ]"; } /** * Formats Schema columns from Model Object * * @param array $values options keys(type, null, default, key, length, extra) * @return array Formatted values */ protected function _values($values) { $vals = []; if (!is_array($values)) { return $vals; } foreach ($values as $key => $val) { if (is_array($val)) { $vals[] = "'{$key}' => [" . implode(", ", $this->_values($val)) . "]"; } else { $val = var_export($val, true); if ($val === 'NULL') { $val = 'null'; } if (!is_numeric($key)) { $vals[] = "'{$key}' => {$val}"; } else { $vals[] = "{$val}"; } } } return $vals; } /** * Generate String representation of Records * * @param \Cake\Database\Schema\Table $table Table schema array * @param int $recordCount The number of records to generate. * @return array Array of records to use in the fixture. */ protected function _generateRecords(Table $table, $recordCount = 1) { $records = []; for ($i = 0; $i < $recordCount; $i++) { $record = []; foreach ($table->columns() as $field) { $fieldInfo = $table->column($field); $insert = ''; switch ($fieldInfo['type']) { case 'decimal': $insert = $i + 1.5; break; case 'biginteger': case 'integer': case 'float': case 'smallinteger': case 'tinyinteger': $insert = $i + 1; break; case 'string': case 'binary': $isPrimary = in_array($field, $table->primaryKey()); if ($isPrimary) { $insert = Text::uuid(); } else { $insert = "Lorem ipsum dolor sit amet"; if (!empty($fieldInfo['length'])) { $insert = substr($insert, 0, (int)$fieldInfo['length'] - 2); } } break; case 'timestamp': $insert = time(); break; case 'datetime': $insert = date('Y-m-d H:i:s'); break; case 'date': $insert = date('Y-m-d'); break; case 'time': $insert = date('H:i:s'); break; case 'boolean': $insert = 1; break; case 'text': $insert = "Lorem ipsum dolor sit amet, aliquet feugiat."; $insert .= " Convallis morbi fringilla gravida,"; $insert .= " phasellus feugiat dapibus velit nunc, pulvinar eget sollicitudin"; $insert .= " venenatis cum nullam, vivamus ut a sed, mollitia lectus. Nulla"; $insert .= " vestibulum massa neque ut et, id hendrerit sit,"; $insert .= " feugiat in taciti enim proin nibh, tempor dignissim, rhoncus"; $insert .= " duis vestibulum nunc mattis convallis."; break; case 'uuid': $insert = Text::uuid(); break; } $record[$field] = $insert; } $records[] = $record; } return $records; } /** * Convert a $records array into a string. * * @param array $records Array of records to be converted to string * @return string A string value of the $records array. */ protected function _makeRecordString($records) { $out = "[\n"; foreach ($records as $record) { $values = []; foreach ($record as $field => $value) { if ($value instanceof DateTimeInterface) { $value = $value->format('Y-m-d H:i:s'); } $val = var_export($value, true); if ($val === 'NULL') { $val = 'null'; } $values[] = " '$field' => $val"; } $out .= " [\n"; $out .= implode(",\n", $values); $out .= "\n ],\n"; } $out .= " ]"; return $out; } /** * Interact with the user to get a custom SQL condition and use that to extract data * to build a fixture. * * @param string $modelName name of the model to take records from. * @param string|null $useTable Name of table to use. * @return array Array of records. */ protected function _getRecordsFromTable($modelName, $useTable = null) { $recordCount = (isset($this->params['count']) ? $this->params['count'] : 10); $conditions = (isset($this->params['conditions']) ? $this->params['conditions'] : '1=1'); if (TableRegistry::exists($modelName)) { $model = TableRegistry::get($modelName); } else { $model = TableRegistry::get($modelName, [ 'table' => $useTable, 'connection' => ConnectionManager::get($this->connection) ]); } $records = $model->find('all') ->where($conditions) ->limit($recordCount) ->enableHydration(false); return $records; } }
JayWalker512/CrueltyGame
cruelty/vendor/cakephp/bake/src/Shell/Task/FixtureTask.php
PHP
mit
15,668
[ 30522, 1026, 1029, 25718, 1013, 1008, 1008, 1008, 9850, 8458, 2361, 1006, 1056, 2213, 1007, 1024, 5915, 2458, 7705, 1006, 8299, 1024, 1013, 1013, 9850, 8458, 2361, 1012, 8917, 1007, 1008, 9385, 1006, 1039, 1007, 9850, 4007, 3192, 1010, 42...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# Lithophyllum sigi Lemoine SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Rhodophyta/Florideophyceae/Corallinales/Corallinaceae/Lithophyllum/Lithophyllum sigi/README.md
Markdown
apache-2.0
183
[ 30522, 1001, 5507, 18471, 27750, 9033, 5856, 3393, 5302, 3170, 2427, 1001, 1001, 1001, 1001, 3570, 3970, 1001, 1001, 1001, 1001, 2429, 2000, 1996, 10161, 1997, 2166, 1010, 3822, 2254, 2249, 1001, 1001, 1001, 1001, 2405, 1999, 19701, 1001, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
function flashMessage(type, message) { var flashContainer = $('#flash-message'); var flash = null; if (message.title) { flash = $('<div class="alert alert-block alert-' + type + '"><h4 class="alert-heading">' + message.title + '</h4><p>' + message.message + '</p></div>'); } else { flash = $('<div class="alert alert-block alert-' + type + '"><p>' + message + '</p></div>'); } flashContainer.append(flash); setupFlash.call(flash); } function setupFlash() { var flash = $(this); if (flash.html() != '') { var timeout = flash.data('timeout'); if (timeout) { clearTimeout(timeout); } if (!flash.hasClass('alert-danger')) { flash.data('timeout', setTimeout(function() { flash.fadeOut(400, function() { $(this).remove(); }); }, 5000)); } flash.fadeIn(); } } function showFlashMessage() { var flashes = $('#flash-message .alert'); flashes.each(setupFlash); } function initFlashMessage(){ $('#flash-message').on('click', '.alert', function() { $(this).fadeOut(400, function() { $(this).remove(); }) ; }); showFlashMessage(); }
marcos-sandim/phalcon-proj
public/js/app.js
JavaScript
mit
1,175
[ 30522, 3853, 5956, 7834, 3736, 3351, 1006, 2828, 1010, 4471, 1007, 1063, 13075, 5956, 8663, 18249, 2121, 1027, 1002, 1006, 1005, 1001, 5956, 1011, 4471, 1005, 1007, 1025, 13075, 5956, 1027, 19701, 1025, 2065, 1006, 4471, 1012, 2516, 1007, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
# Diceratium prostratum Lag. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Brassicales/Brassicaceae/Notoceras/Notoceras bicorne/ Syn. Diceratium prostratum/README.md
Markdown
apache-2.0
183
[ 30522, 1001, 18740, 8609, 5007, 4013, 20528, 11667, 2474, 2290, 1012, 2427, 1001, 1001, 1001, 1001, 3570, 10675, 1001, 1001, 1001, 1001, 2429, 2000, 1996, 10161, 1997, 2166, 1010, 3822, 2254, 2249, 1001, 1001, 1001, 1001, 2405, 1999, 19701,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* * JBoss, Home of Professional Open Source * Copyright 2012, Red Hat, Inc., and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.weld.tests.contexts.request.startup.ejb; import static org.junit.Assert.assertEquals; import jakarta.inject.Inject; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.BeanArchive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.weld.test.util.Utils; import org.jboss.weld.tests.category.Integration; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; @RunWith(Arquillian.class) @Category(Integration.class) public class RequestScopeActiveDuringSingletonStartupTest { @Inject private Initializer initializer; @Deployment public static Archive<?> getDeployment() { return ShrinkWrap.create(BeanArchive.class, Utils.getDeploymentNameAsHash(RequestScopeActiveDuringSingletonStartupTest.class)).addPackage(RequestScopeActiveDuringSingletonStartupTest.class.getPackage()); } @Test public void testRequestScopeActiveDuringSingletonStartup() { assertEquals("pong", initializer.getResponse()); } }
weld/core
tests-arquillian/src/test/java/org/jboss/weld/tests/contexts/request/startup/ejb/RequestScopeActiveDuringSingletonStartupTest.java
Java
apache-2.0
1,934
[ 30522, 1013, 1008, 1008, 1046, 15853, 2015, 1010, 2188, 1997, 2658, 2330, 3120, 1008, 9385, 2262, 1010, 2417, 6045, 1010, 4297, 1012, 1010, 1998, 3265, 16884, 1008, 2011, 1996, 1030, 6048, 6415, 1012, 2156, 1996, 9385, 1012, 19067, 2102, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
module AssemblyAndServiceOperationsMixin # Commands used from new dtk client def check_if_instance_running(node_address, port, path) endpoint = node_address + ":" + port response = request_response(path, {}, 'get', endpoint); response.code == 200 end def get_node_by_name(service_instance_name, node_name) nodes_response = send_request("/rest/api/v1/services/#{service_instance_name}/nodes", {}, 'get') nodes = nodes_response['data'] if nodes.empty? puts "No nodes found"; return false end selected_node_arr = nodes.select { |node| node['display_name'] == node_name } if nodes if selected_node_arr.empty? || selected_node_arr.length > 1 puts "Expected only one node, but found: #{selected_node_arr}" return false end node = selected_node_arr.first puts "Found requested node: #{node}" node end def verify_service_instance_nodes_terminated(service_instance_name) require 'aws-sdk-ec2' puts "Verify service instance nodes have been terminated", "-----------------------------------------------------" nodes_terminated = true ec2 = Aws::EC2::Client.new(region: 'us-east-1') ec2_instance = ec2.describe_instances(filters:[{ name: 'tag:Name', values: ["*" + service_instance_name + "*"] }]) ec2_instance.reservations.each do |status| puts "Instance details: #{status}" if status.instances.first.state.name == "running" nodes_terminated = false puts "Service instance: #{service_instance_name} nodes have not been terminated" end end puts "" puts "Service instance: #{service_instance_name} nodes have been terminated" if nodes_terminated nodes_terminated end def check_if_service_instance_exists(service_instance_name) puts "Check if service instance exists", "-----------------------------------" service_instance_exists = false service_instances_list = send_request("/rest/api/v1/services/list", {}, 'get') ap service_instances_list if service_instances_list['status'] == 'ok' && !service_instances_list['data'].empty? service_instances_list['data'].each do |instance| if instance['display_name'] == service_instance_name puts "Service instance: #{service_instance_name} found!" service_instance_exists = true end end else puts "Service instance #{service_instance_name} is not found!" end puts "Service instance #{service_instance_name} is not found!" unless service_instance_exists puts "" service_instance_exists end def check_if_node_exists_in_service_instance(service_instance_name, node_name) puts "Check if node exists in service instance", "---------------------------------------" node_exists = false nodes_list = send_request("/rest/api/v1/services/#{service_instance_name}/nodes", {}, 'get') ap nodes_list if nodes_list['status'] == 'ok' && !nodes_list['data'].empty? nodes_list['data'].each do |node| if node['display_name'] == node_name puts "Node: #{node_name} found!" node_exists = true end end else puts "Node #{node_name} is not found in #{service_instance_name}" end puts "Node #{node_name} is not found in #{service_instance_name}" unless node_exists puts "" node_exists end def check_if_node_group_exists_in_service_instance(service_instance_name, node_group_name, cardinality) puts "Check if node group exists in service instance", "-------------------------------------------" node_group_exist = false nodes_list = send_request("/rest/api/v1/services/#{service_instance_name}/nodes", {}, 'get') ap nodes_list if nodes_list['status'] == 'ok' && !nodes_list['data'].empty? node_group_members = [] nodes_list['data'].each do |node| if node['display_name'].include? node_group_name + ":" # indicator it is node group member node_group_members << node['display_name'] end end if node_group_members.size == cardinality puts "Node group #{node_group_name} is found in #{service_instance_name}" node_group_exist = true end else puts "Node group #{node_group_name} is not found in #{service_instance_name}" end puts "Node group #{node_group_name} is not found in #{service_instance_name}" unless node_group_exist puts "" node_group_exist end def check_if_component_exists_in_service_instance(service_instance_name, component_name) puts "Check if component exists in service instance", "-----------------------------------------" component_exists = false components_list = send_request("/rest/api/v1/services/#{service_instance_name}/components", {}, 'get') ap components_list if components_list['status'] == 'ok' && !components_list['data'].empty? components_list['data'].each do |cmp| if cmp['display_name'] == component_name puts "Component: #{component_name} found!" component_exists = true end end else puts "Component #{component_name} is not found in #{service_instance_name}" end puts "Component #{component_name} is not found in #{service_instance_name}" unless component_exists puts "" component_exists end def check_if_action_exists_in_service_instance(service_instance_name, action_to_check) puts "Check if action exists in service instance", "------------------------------------------" action_exists = false list_of_actions = send_request("/rest/api/v1/services/#{service_instance_name}/actions", {}, 'get') ap list_of_actions if list_of_actions['status'] == 'ok' && !list_of_actions['data'].empty? list_of_actions['data'].each do |action| if action['display_name'] == action_to_check puts "Action: #{action_to_check} found!" action_exists = true end end else puts "Action #{action_to_check} is not found in #{service_instance_name}" end puts "Action #{action_to_check} is not found in #{service_instance_name}" unless action_exists puts "" action_exists end def check_if_attributes_exists_in_service_instance(service_instance_name, attributes_to_check) puts "Check if attributes exist and are correct in service instance", "---------------------------------------------------" attributes_exist = false attributes_list = send_request("/rest/api/v1/services/#{service_instance_name}/attributes?all&format=yaml", {}, 'get') puts "Attributes to check:" ap attributes_to_check puts "" puts "Attributes on service instance:" ap attributes_list puts "" if attributes_list['status'] == 'ok' && !attributes_list['data'].empty? attributes_exist_and_values_correct = [] attributes_list['data'].each do |attr| if (attributes_to_check.keys.include? attr['name']) && (attributes_to_check.values.include? attr['value']) attributes_exist_and_values_correct << true end end if (attributes_exist_and_values_correct.count == attributes_to_check.count) && (!attributes_exist_and_values_correct.include? false) puts "All attributes: #{attributes_to_check} are verified and exist on service instance" attributes_exist = true else puts "Some attributes are missing or they don't have expected values on service instance" end else puts "Attributes #{attributes_to_check} are not found in #{service_instance_name}" end puts "" attributes_exist end def check_task_status(service_instance_name) puts "Check task status", "----------------" service_converged = { pass: false, error: nil } end_loop = false count = 0 max_num_of_retries = 80 while (count < max_num_of_retries) sleep 10 count += 1 task_status_response = send_request("/rest/api/v1/services/#{service_instance_name}/task_status", {}, 'get') if task_status_response['status'] == 'ok' if task_status_response['data'].first['status'] == 'succeeded' puts "Service was converged successfully!" service_converged[:pass] = true break elsif task_status_response['data'].first['status'] == 'failed' puts 'Service was not converged successfully!' ap task_status_response['data'] service_converged[:error] = task_status_response['data'] break end else ap task_status_response['data'] service_converged[:error] = task_status_response['data'] puts "Service was not converged successfully!" break end end puts '' service_converged end def check_task_status_with_breakpoint(service_instance_name, subtask_name_with_breakpoint) puts "Check task status with breakpoint", "---------------------------------" debug_passed = false end_loop = false count = 0 max_num_of_retries = 30 while (count < max_num_of_retries) sleep 10 count += 1 task_status_response = send_request("/rest/api/v1/services/#{service_instance_name}/task_status", {}, 'get') ap task_status_response if task_status_response['status'] == 'ok' if task_status_response['data'].first['status'] == 'debugging' subtask = task_status_response['data'].select { |subtask| subtask['type'].include? subtask_name_with_breakpoint }.first debug_command = subtask['info']['message'].match(/(byebug -R.+)'/)[1] debug_execution = `echo c | #{debug_command}` puts debug_execution if debug_execution.include? "Connected" debug_passed = true break else debug_passed = false break end end else ap task_status_response['data'] puts "Service was not converged successfully! Debug cannot proceed" break end end puts '' debug_passed end def check_delete_task_status(service_instance_name) puts "Check delete task status", "------------------------" service_deleted = { pass: false, error: nil } end_loop = false count = 0 max_num_of_retries = 50 while (count < max_num_of_retries) sleep 10 count += 1 task_status_response = send_request("/rest/api/v1/services/#{service_instance_name}/task_status", {}, 'get') if task_status_response['status'] == 'ok' if task_status_response['data'].first['status'] == 'succeeded' puts "Service was deleted successfully!" service_deleted[:pass] = true break elsif task_status_response['data'].first['status'] == 'failed' puts 'Service was not deleted successfully!' ap task_status_response['data'] service_deleted[:error] = task_status_response['data'] break end else ap task_status_response if task_status_response['errors'].first['message'] == "No context with the name '#{service_instance_name}' exists" puts "Service was deleted successfully!" service_deleted[:pass] = true else puts "Service was not deleted successfully!" service_deleted[:error] = task_status_response['data'] end break end end puts '' service_deleted end def stage_service_instance(service_instance_name, context = nil) #Get list of assemblies, extract selected assembly, stage service and return its id puts "Stage service:", "--------------" service_id = nil extract_id_regex = /id: (\d+)/ assembly_list = send_request('/rest/assembly/list', {:subtype=>'template'}) puts "List of avaliable assemblies: " pretty_print_JSON(assembly_list) test_template = assembly_list['data'].select { |x| x['display_name'] == @assembly }.first if (!test_template.nil?) puts "Assembly #{@assembly} found!" assembly_id = test_template['id'] puts "Assembly id: #{assembly_id}" if @is_context stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :service_module_name => service_instance_name, :is_context => @is_context}) else unless context stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :service_module_name => service_instance_name}) else stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :service_module_name => service_instance_name, :context_id=>context}) end end pretty_print_JSON(stage_service_response) if (stage_service_response['data'].include? "name: #{@service_name}") puts "Stage of #{@service_name} assembly completed successfully!" service_id_match = stage_service_response['data'].match(extract_id_regex) self.service_id = service_id_match[1].to_i puts "Service id for a staged service: #{self.service_id}" else puts "Stage service didnt pass!" end else puts "Assembly #{@service_name} not found!" end puts "" end # Commands used from old dtk client def stage_service(context = nil) #Get list of assemblies, extract selected assembly, stage service and return its id puts "Stage service:", "--------------" service_id = nil extract_id_regex = /id: (\d+)/ assembly_list = send_request('/rest/assembly/list', {:subtype=>'template'}) puts "List of avaliable assemblies: " pretty_print_JSON(assembly_list) test_template = assembly_list['data'].select { |x| x['display_name'] == @assembly }.first if (!test_template.nil?) puts "Assembly #{@assembly} found!" assembly_id = test_template['id'] puts "Assembly id: #{assembly_id}" if @is_context stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :is_context => @is_context}) else unless context stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name}) else stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :context_id=>context}) end end pretty_print_JSON(stage_service_response) if (stage_service_response['data'].include? "name: #{@service_name}") puts "Stage of #{@service_name} assembly completed successfully!" service_id_match = stage_service_response['data'].match(extract_id_regex) self.service_id = service_id_match[1].to_i puts "Service id for a staged service: #{self.service_id}" else puts "Stage service didnt pass!" end else puts "Assembly #{@service_name} not found!" end puts "" end def get_components_versions(service_id) puts "Get all component versions from service:", "-----------------------------" components_list = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'components'}) components_list = components_list['data'].map! { |c| c['version'] } puts "" return components_list end def get_default_context_service puts "Get default context service instance id:", "---------------------------------------" service_id = nil default_context_service_response = send_request('/rest/assembly/get_default_context', {}) if default_context_service_response['status'] == 'ok' puts "Default context service instance succesfully found." service_id = default_context_service_response['data']['id'] else puts "Default context service was not succesfully found." end puts '' service_id end def stage_service_with_namespace(namespace) #Get list of assemblies, extract selected assembly, stage service and return its id puts "Stage service:", "--------------" service_id = nil extract_id_regex = /id: (\d+)/ assembly_list = send_request('/rest/assembly/list', {:subtype=>'template'}) puts "List of avaliable assemblies: " pretty_print_JSON(assembly_list) test_template = assembly_list['data'].select { |x| x['display_name'] == @assembly && x['namespace'] == namespace }.first if (!test_template.nil?) puts "Assembly #{@assembly} from namespace #{namespace} found!" assembly_id = test_template['id'] puts "Assembly id: #{assembly_id}" stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name}) pretty_print_JSON(stage_service_response) if (stage_service_response['data'].include? "name: #{@service_name}") puts "Stage of #{@service_name} assembly completed successfully!" service_id_match = stage_service_response['data'].match(extract_id_regex) self.service_id = service_id_match[1].to_i puts "Service id for a staged service: #{self.service_id}" else puts "Stage service didnt pass!" end else puts "Assembly #{@service_name} not found!" end puts "" end def check_service_info(service_id, info_to_check) puts "Show service info:", "------------------" info_exist = false service_info_response = send_request('/rest/assembly/info', {:assembly_id=>service_id, :subtype=>:instance}) pretty_print_JSON(service_info_response) if service_info_response['data'].include? info_to_check puts "#{info_to_check} exists in info output!" info_exist = true else puts "#{info_to_check} does not exist in info output!" end puts "" return info_exist end def rename_service(service_id, new_service_name) puts "Rename service:", "---------------" service_renamed = false service_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance'}) service_name = service_list['data'].select { |x| x['id'] == service_id } if service_name.any? puts "Old service name is: #{service_name}. Proceed with renaming it to #{new_service_name}..." rename_status = send_request('/rest/assembly/rename', {:assembly_id=>service_id, :assembly_name=>service_name, :new_assembly_name=>new_service_name}) if rename_status['status'] == 'ok' puts "Service #{service_name} renamed to #{new_service_name} successfully!" service_renamed = true else puts "Service #{service_name} was not renamed to #{new_service_name} successfully!" end else puts "Service with id #{service_id} does not exist!" end puts "" return service_renamed end def create_attribute(service_id, attribute_name) #Create attribute puts "Create attribute:", "-----------------" attributes_created = false create_attribute_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :create=>true, :pattern=>attribute_name}) puts "List of service attributes:" service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id}) pretty_print_JSON(service_attributes) extract_attribute = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['display_name'] if (extract_attribute == attribute_name) puts "Creating #{attribute_name} attribute completed successfully!" attributes_created = true end puts "" return attributes_created end def check_if_attribute_exists(service_id, attribute_name) puts "Check if attribute exists:", "--------------------------" attribute_exists = false puts "List of service attributes:" service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id}) pretty_print_JSON(service_attributes) extract_attribute = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['display_name'] if (extract_attribute == attribute_name) puts "#{attribute_name} attribute exists!" attribute_exists = true else puts "#{attribute_name} attribute does not exist!" end puts "" return attribute_exists end def link_attributes(service_id, source_attribute, context_attribute) puts "Link attributes:", "----------------" attributes_linked = false link_attributes_response = send_request('/rest/assembly/add_ad_hoc_attribute_links', {:assembly_id=>service_id, :context_attribute_term=>context_attribute, :source_attribute_term=>"$#{source_attribute}"}) pretty_print_JSON(link_attributes_response) if link_attributes_response['status'] == 'ok' puts "Link between #{source_attribute} attribute and #{context_attribute} attribute is established!" attributes_linked = true else puts "Link between #{source_attribute} attribute and #{context_attribute} attribute is not established!" end puts "" return attributes_linked end def get_service_id_by_name(service_name) puts "Get service instance id by its name", "-----------------------------------" service_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance'}) puts "List of all services and its content:" service_instance = nil filtered_services = service_list['data'].select { |x| x['display_name'] == service_name } if filtered_services.length == 1 puts "Service instance with name #{service_name} exists: " pretty_print_JSON(filtered_services) service_instance = filtered_services[0] elsif filtered_services.length.zero? puts "Service instance with name #{service_name} does not exist." else puts "Multiple service instances with name #{service_name} exist." end end def check_if_service_exists(service_id) #Get list of existing services and check if staged service exists puts "Check if service exists:", "------------------------" service_exists = false service_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance'}) puts "List of all services and its content:" pretty_print_JSON(service_list) test_service = service_list['data'].select { |x| x['id'] == service_id } puts "Service with id #{service_id}: " pretty_print_JSON(test_service) if (test_service.any?) extract_service_id = test_service.first['id'] execution_status = test_service.first['execution_status'] if ((extract_service_id == service_id) && (execution_status == 'staged')) puts "Service with id #{service_id} exists!" service_exists = true end else puts "Service with id #{service_id} does not exist!" end puts "" return service_exists end def list_specific_success_service(service_name) puts "List success services:", "------------------------" service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'}) success_services = service_list['data'].select { |x| x['display_name'] == service_name && x['execution_status'] == 'succeeded' } pretty_print_JSON(success_services) return success_services end def list_matched_success_service(service_name) puts "List success services:", "------------------------" service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'}) success_services = service_list['data'].select { |x| (x['display_name'].include? service_name) && (x['execution_status'] == 'succeeded') } pretty_print_JSON(success_services) return success_services end def list_specific_failed_service(service_name) puts "List failed services:", "-------------------------" service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'}) failed_services = service_list['data'].select { |x| x['display_name'] == service_name && x['execution_status'] == 'failed' } pretty_print_JSON(failed_services) return failed_services end def list_matched_failed_service(service_name) puts "List failed services:", "-------------------------" service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'}) failed_services = service_list['data'].select { |x| (x['display_name'].include? service_name) && (x['execution_status'] == 'failed') } pretty_print_JSON(failed_services) return failed_services end def check_service_status(service_id, status_to_check) #Get list of services and check if service exists and its status puts "Check service status:", "---------------------" service_exists = false end_loop = false count = 0 max_num_of_retries = 50 while (end_loop == false) sleep 5 count += 1 service_list = send_request('/rest/assembly/list', {:subtype=>'instance'}) service = service_list['data'].select { |x| x['id'] == service_id }.first if (!service.nil?) test_service = send_request('/rest/assembly/info', {:assembly_id=>service_id,:subtype=>:instance}) op_status = test_service['data']['op_status'] extract_service_id = service['id'] if ((extract_service_id == service_id) && (op_status == status_to_check)) puts "Service with id #{extract_service_id} has current op status: #{status_to_check}" service_exists = true end_loop = true else puts "Service with id #{extract_service_id} still does not have current op status: #{status_to_check}" end else puts "Service with id #{service_id} not found in list" end_loop = true end if (count > max_num_of_retries) puts "Max number of retries reached..." end_loop = true end end puts "" return service_exists end def set_attribute(service_id, attribute_name, attribute_value) #Set attribute on particular service puts "Set attribute:", "--------------" is_attributes_set = false service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id}) attribute_id = service_attributes['data'].select { |x| x['display_name'].include? attribute_name } if attribute_id.empty? set_attribute_value_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :value=>attribute_value, :pattern=>attribute_name}) if set_attribute_value_response['status'] == 'ok' puts "Setting of attribute #{attribute_name} completed successfully!" is_attributes_set = true end else set_attribute_value_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :value=>attribute_value, :pattern=>attribute_id.first['id']}) service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id}) extract_attribute_value = service_attributes['data'].select { |x| x['value'] == attribute_value }.first['value'] if extract_attribute_value != nil puts "Setting of attribute #{attribute_name} completed successfully!" is_attributes_set = true end end puts "" return is_attributes_set end def set_attribute_on_service_level_component(service_id, attribute_name, attribute_value) #Set attribute on particular service puts "Set attribute:", "--------------" is_attributes_set = false #Get attribute id for which value will be set service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id}) attribute_id = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['id'] #Set attribute value for given attribute id set_attribute_value_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :value=>attribute_value, :pattern=>attribute_id}) service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id}) extract_attribute_value = attribute_id = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['value'] if extract_attribute_value == attribute_value puts "Setting of attribute #{attribute_name} completed successfully!" is_attributes_set = true end puts "" return is_attributes_set end def get_attribute_value(service_id, node_name, component_name, attribute_name) puts "Get attribute value by name:", "----------------------------" puts "List of service attributes:" service_attributes = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :filter=>nil, :about=>'attributes', :subtype=>'instance'}) pretty_print_JSON(service_attributes) attributes = service_attributes['data'].select { |x| x['display_name'] == "#{node_name}/#{component_name}/#{attribute_name}" }.first if !attributes.nil? attribute_value = service_attributes['data'].select { |x| x['display_name'] == "#{node_name}/#{component_name}/#{attribute_name}" }.first['value'] puts "Attribute value is: #{attribute_value}" else puts "Some of the input parameters is incorrect or missing. Node name: #{node_name}, Component name: #{component_name}, Attribute name: #{attribute_name}" end puts "" return attribute_value end # new client def check_component_depedency(service_instance_name, source_component, dependency_component, type) puts "Check component dependency:", "---------------------------" dependency_found = false puts "List service components with dependencies:" components_list = send_request("/rest/api/v1/services/#{service_instance_name}/component_links", {}, 'get') component = components_list['data'].select { |x| x['base_component'] == source_component} if (!component.nil?) puts "Component #{source_component} exists. Check its dependencies..." component.each do |deps| if (deps['dependent_component'] == dependency_component) && (deps['type'] == type) dependency_found = true puts "Component #{source_component} has expected dependency component #{dependency_component} with type #{type}" else puts "Component #{source_component} does not have expected dependency component #{dependency_component} with type #{type}" end end else puts "Component #{source_component} does not exist and therefore it does not have any dependencies" end puts "" return dependency_found end def converge_service(service_id, max_num_of_retries=15) puts "Converge service:", "-----------------" service_converged = false puts "Converge process for service with id #{service_id} started!" find_violations = send_request('/rest/assembly/find_violations', {'assembly_id' => service_id}) create_task_response = send_request('/rest/assembly/create_task', {'assembly_id' => service_id}) if (@error_message == "") task_id = create_task_response['data']['task_id'] puts "Task id: #{task_id}" task_execute_response = send_request('/rest/task/execute', {'task_id' => task_id}) end_loop = false count = 0 task_status = 'executing' while ((task_status.include? 'executing') && (end_loop == false)) sleep 20 count += 1 response_task_status = send_request('/rest/assembly/task_status', {'assembly_id'=> service_id}) status = response_task_status['data'].first['status'] unless status.nil? if (status.include? 'succeeded') service_converged = true puts "Task execution status: #{status}" puts "Converge process finished successfully!" end_loop = true elsif (status.include? 'failed') puts "Error details on subtasks:" ap response_task_status['data'] response_task_status['data'].each do |error_message| unless error_message['errors'].nil? puts error_message['errors']['message'] puts error_message['errors']['type'] end end puts "Task execution status: #{status}" puts "Converge process was not finished successfully! Some tasks failed!" end_loop = true end puts "Task execution status: #{status}" end if (count > max_num_of_retries) puts "Max number of retries reached..." puts "Converge process was not finished successfully!" end_loop = true end end else puts "Service was not converged successfully!" end puts "" return service_converged end def stop_running_service(service_id) puts "Stop running service:", "---------------------" service_stopped = false stop_service_response = send_request('/rest/assembly/stop', {:assembly_id => service_id}) if (stop_service_response['status'] == "ok") puts "Service stopped successfully!" service_stopped = true else puts "Service was not stopped successfully!" end puts "" return service_stopped end def create_assembly_from_service(service_id, service_module_name, assembly_name, namespace=nil) puts "Create assembly from service:", "-----------------------------" assembly_created = false create_assembly_response = send_request('/rest/assembly/promote_to_template', {:service_module_name=>service_module_name, :mode=>:create, :assembly_id=>service_id, :assembly_template_name=>assembly_name, :namespace=>namespace}) if (create_assembly_response['status'] == 'ok') puts "Assembly #{assembly_name} created in service module #{service_module_name}" assembly_created = true else puts "Assembly #{assembly_name} was not created in service module #{service_module_name}" end puts "" return assembly_created end def netstats_check(service_id, port) puts "Netstats check:", "---------------" netstats_check = false end_loop = false count = 0 max_num_of_retries = 15 while (end_loop == false) sleep 10 count += 1 if (count > max_num_of_retries) puts "Max number of retries for getting netstats reached..." end_loop = true end response = send_request('/rest/assembly/initiate_get_netstats', {:node_id=>nil, :assembly_id=>service_id}) pretty_print_JSON(response) action_results_id = response['data']['action_results_id'] 5.downto(1) do |i| sleep 1 response = send_request('/rest/assembly/get_action_results', {:disable_post_processing=>false, :return_only_if_complete=>true, :action_results_id=>action_results_id, :sort_key=>"port"}) puts "Netstats check:" pretty_print_JSON(response) if response['data']['is_complete'] port_to_check = response['data']['results'].select { |x| x['port'] == port}.first if (!port_to_check.nil?) puts "Netstats check completed! Port #{port} available!" netstats_check = true end_loop = true break else puts "Netstats check completed! Port #{port} is not available!" netstats_check = false break end end end end puts "" return netstats_check end def start_running_service(service_id) puts "Start service:", "--------------" service_started = false response = send_request('/rest/assembly/start', {:assembly_id => service_id, :node_pattern=>nil}) pretty_print_JSON(response) task_id = response['data']['task_id'] response = send_request('/rest/task/execute', {:task_id=>task_id}) if (response['status'] == 'ok') end_loop = false count = 0 max_num_of_retries = 30 while (end_loop == false) sleep 10 count += 1 response = send_request('/rest/assembly/info_about', {:assembly_id => service_id, :subtype => 'instance', :about => 'tasks'}) puts "Start instance check:" status = response['data'].select { |x| x['status'] == 'executing'}.first pretty_print_JSON(status) if (count > max_num_of_retries) puts "Max number of retries for starting instance reached..." end_loop = true elsif (status.nil?) puts "Instance started!" service_started = true end_loop = true end end else puts "Start instance is not completed successfully!" end puts "" return service_started end def add_component_by_name_to_service_node(service_id, node_name, component_name) puts "Add component to service:", "--------------------------" component_added = false service_nodes = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :filter=>nil, :about=>'nodes', :subtype=>'instance'}) if (service_nodes['data'].select { |x| x['display_name'] == node_name }.first) puts "Node #{node_name} exists in service. Get node id..." node_id = service_nodes['data'].select { |x| x['display_name'] == node_name }.first['id'] component_add_response = send_request('/rest/assembly/add_component', {:node_id=>node_id, :component_template_id=>component_name.split(":").last, :assembly_id=>service_id, :namespace=>component_name.split(":").first}) if (component_add_response['status'] == 'ok') puts "Component #{component_name} added to service!" component_added = true end else component_add_response = send_request('/rest/assembly/add_component', {:node_id=>nil, :component_template_id=>component_name.split(":").last, :assembly_id=>service_id, :namespace=>component_name.split(":").first}) if (component_add_response['status'] == 'ok') puts "Component #{component_name} added to service!" component_added = true end end puts "" return component_added end def delete_and_destroy_service(service_id) puts "Delete and destroy service:", "---------------------------" service_deleted = false delete_service_response = send_request('/rest/assembly/delete', {:assembly_id=>service_id}) if (delete_service_response['status'] == "ok") puts "Service deleted successfully!" service_deleted = true else puts "Service was not deleted successfully!" end puts "" return service_deleted end def delete_task_status(service_id, component_to_delete, delete_type, check_component_in_task_status=true) service_deleted = false end_loop = false count = 0 max_num_of_retries = 50 task_status = 'executing' while ((task_status.include? 'executing') && (end_loop == false)) sleep 2 count += 1 response_task_status = send_request('/rest/assembly/task_status', {'assembly_id'=> service_id}) delete_status = response_task_status['data'].first['status'] if !delete_status.nil? if check_component_in_task_status component_delete_status = response_task_status['data'].select { |x| x['type'].include? component_to_delete }.first['status'] else # case when performing delete action on staged service component_delete_status = 'succeeded' end if (delete_status.include? "succeeded") && (component_delete_status.include? "succeeded") service_deleted = true task_status = delete_status puts "Task execution status: #{delete_status}" puts "#{delete_type} finished successfully!" end_loop = true end if (delete_status.include? 'failed') puts "Error details:" ap response_task_status['data'] response_task_status['data'].each do |error_message| unless error_message['errors'].nil? puts error_message['errors']['message'] puts error_message['errors']['type'] end end puts "Task execution status: #{delete_status}" puts "#{delete_type} with workflow did not finish successfully!" task_status = delete_status end_loop = true end puts "Task execution status: #{delete_status}" else if delete_type == 'delete_service' # This is set to true only in case when we delete service instance # Reason: we cannot get task status details on instance that does not exist anymore service_deleted = true break end end if (count > max_num_of_retries) puts "Max number of retries reached..." puts "#{delete_type} with workflow did not finish successfully!" break end end service_deleted end def delete_service_with_workflow(service_id, component_to_delete, check_component_in_task_status) puts "Delete and destroy service with workflow:", "-----------------------------------------" service_deleted_successfully = false delete_service_response = send_request('/rest/assembly/delete_using_workflow', {:assembly_id=>service_id, :subtype => :instance}) if delete_service_response['status'] == 'ok' service_deleted_successfully = delete_task_status(service_id, component_to_delete, 'delete_service', check_component_in_task_status) puts "Service was deleted successfully!" else puts "Service was not deleted successfully!" end puts "" return service_deleted_successfully end def delete_node_with_workflow(service_id, node_name, component_to_delete, check_component_in_task_status) puts "Delete node with workflow:", "----------------------------------" node_deleted_successfully = false delete_node_response = send_request('/rest/assembly/delete_node_using_workflow', {:assembly_id=>service_id, :subtype => :instance, :node_id => node_name}) if delete_node_response['status'] == 'ok' node_deleted_successfully = delete_task_status(service_id, component_to_delete, 'delete_node', check_component_in_task_status) puts "Node: #{node_name} was deleted successfully!" else puts "Node: #{node_name} was not deleted successfully!" end puts "" return node_deleted_successfully end def delete_component_with_workflow(service_id, node_name, component_to_delete, check_component_in_task_status) puts "Delete component with workflow:", "---------------------------------------" component_deleted_successfully = false delete_component_response = send_request('/rest/assembly/delete_component_using_workflow', {:assembly_id=>service_id, :task_action => "#{component_to_delete}.delete", :task_params => { "node" => node_name }, :component_id => component_to_delete, :noop_if_no_action => nil, :cmp_full_name => "#{node_name}/#{component_to_delete}", :node_id => node_name }) if delete_component_response['status'] == 'ok' component_deleted_successfully = delete_task_status(service_id, component_to_delete, 'delete_component', check_component_in_task_status) puts "Component: #{component_to_delete} was deleted successfully!" else puts "Component: #{component_to_delete} was not deleted successfully!" end puts "" return component_deleted_successfully end def delete_context(context_name) puts "Delete context:", "-----------------" context_deleted = false delete_context_service_response = send_request('/rest/assembly/delete', {:assembly_id=>context_name}) if (delete_context_service_response['status'] == "ok") puts "context service deleted successfully!" context_deleted = true else puts "context service was not deleted successfully!" end puts "" return context_deleted end def push_assembly_updates(service_id, service_module) puts "Push assembly updates:", "---------------------" assembly_updated = false response = send_request('/rest/assembly/promote_to_template', {:assembly_id=>service_id, :mode => 'update', :use_module_namespace => true }) pretty_print_JSON(response) if response['status'] == 'ok' && response['data']['full_module_name'] == service_module assembly_updated = true end puts "" return assembly_updated end def push_component_module_updates_without_changes(service_id, component_module) puts "Push component module updates:", "-------------------------------" response = send_request('/rest/assembly/promote_module_updates', {:assembly_id=>service_id, :module_name => component_module, :module_type => "component_module" }) return response end def get_nodes(service_id) puts "Get all nodes from service:", "-----------------------------" nodes_list = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'nodes'}) nodes_list = nodes_list['data'].map! { |c| c['display_name'] } pretty_print_JSON(nodes_list) puts "" return nodes_list end def get_components(service_id) puts "Get all components from service:", "-----------------------------" components_list = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'components'}) components_list = components_list['data'].map! { |c| c['display_name'] } puts "" return components_list end def get_cardinality(service_id, node_name) puts "Get cardinality from service:", "-----------------------------" cardinality = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'attributes', :format=>'yaml'}) content = YAML.load(cardinality['data']) puts content attributes = (content["nodes"]["#{node_name}/"]||{})['attributes']||{} puts "" return attributes['cardinality'] && attributes['cardinality'].to_i end def get_workflow_info(service_id) puts "Get workflow info:", "----------------------" workflow_info = send_request('/rest/assembly/info_about_task', {:assembly_id=>service_id, :subtype => 'instance'}) content = YAML.load(workflow_info['data']) puts content puts "" return content end def grant_access(service_id, system_user, rsa_pub_name, ssh_key) puts "Grant access:", "-----------------" response = send_request('/rest/assembly/initiate_ssh_pub_access', {:agent_action => :grant_access, :assembly_id=>service_id, :system_user => system_user, :rsa_pub_name => rsa_pub_name, :rsa_pub_key => ssh_key}) pretty_print_JSON(response) puts "" return response end def revoke_access(service_id, system_user, rsa_pub_name, ssh_key) puts "Revoke access:", "-----------------" resp = send_request('/rest/assembly/initiate_ssh_pub_access', {:agent_action => :revoke_access, :assembly_id=>service_id, :system_user => system_user, :rsa_pub_name => rsa_pub_name, :rsa_pub_key => ssh_key}) pretty_print_JSON(resp) response = nil if resp['status'] != 'notok' response = send_request('/rest/assembly/get_action_results', {:action_results_id => resp['data']['action_results_id'], :return_only_if_complete => true, :disable_post_processing => true}) puts response else response = resp end puts "" return response end def list_services_by_property(key, value) # Get list of existing workspace service instances in a specific context puts "List service instances with #{value} value for #{key} property:", "----------------------------------------------------------------------------------" service_instance_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance', :include_namespaces => true}) filtered_services = nil if service_instance_list['status'] == 'ok' filtered_services = service_instance_list['data'].select{ |x| x[key].include? value } if filtered_services.length.zero? puts "No service instances with #{value} value for #{key} property been found." filtered_services = nil else puts "#{filtered_services.length} service instances with #{value} value for #{key} property found: " end else puts "Could not get service instance list." end puts '' filtered_services end def list_ssh_access(service_id, system_user, rsa_pub_name, nodes) puts "List ssh access:", "---------------------" sleep 5 response = send_request('/rest/assembly/list_ssh_access', {:assembly_id=>service_id}) pretty_print_JSON(response) list = response['data'].select { |x| x['attributes']['linux_user'] == system_user && x['attributes']['key_name'] == rsa_pub_name && (nodes.include? x['node_name']) } puts "" return list.map! { |x| x['attributes']['key_name']} end def get_task_action_output(service_id, action_id) puts "Get task action output:", "------------------------" response = send_request('/rest/assembly/task_action_detail', {:assembly_id=>service_id, :message_id=>action_id}) pretty_print_JSON(response) runs = {} if response['status'] == "ok" output = response['data'] output.gsub!("=","") if response['data'].include? "=" runs = output.split(/\n \n\n|\n\n\n|\n\n/) else puts "Task action details were not retrieved successfully!" end puts "" return runs end def verify_flags(service_id, component_module_name, update_flag, update_saved_flag) puts "Verify update and update saved flags:", "---------------------------------" flags_verified = false response = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :subtype=>:instance, :about=>'modules', :detail_to_include=>[:version_info]}) pretty_print_JSON(response) component_module_details = response['data'].select { |x| x['display_name'] == component_module_name }.first if !component_module_details.nil? puts "Component module found! Check flags..." pretty_print_JSON(component_module_details) unless component_module_details.key?('local_copy') || component_module_details.key?('update_saved') puts "Flags dont not exist in the output" end if component_module_details['local_copy'] == update_flag && component_module_details['update_saved'] == update_saved_flag puts "Update and update saved flags match the comparison" flags_verified = true else puts "Update and update saved flags does not match the comparison" end else puts "Component module was not found!" end puts "" flags_verified end end
dtk/dtk-server
test/functional/rspec/lib/mixins/assembly_and_service_operations_mixin.rb
Ruby
apache-2.0
51,221
[ 30522, 11336, 3320, 29560, 2121, 7903, 8780, 29487, 6491, 7646, 2378, 1001, 10954, 2109, 2013, 2047, 26718, 2243, 7396, 13366, 4638, 1035, 2065, 1035, 6013, 1035, 2770, 1006, 13045, 1035, 4769, 1010, 3417, 1010, 4130, 1007, 2203, 8400, 1027...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
// // buffered_write_stream.hpp // ~~~~~~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2013 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef BOOST_ASIO_BUFFERED_WRITE_STREAM_HPP #define BOOST_ASIO_BUFFERED_WRITE_STREAM_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include <boost/asio/detail/config.hpp> #include <cstddef> #include <boost/asio/buffered_write_stream_fwd.hpp> #include <boost/asio/buffer.hpp> #include <boost/asio/completion_condition.hpp> #include <boost/asio/detail/bind_handler.hpp> #include <boost/asio/detail/buffered_stream_storage.hpp> #include <boost/asio/detail/noncopyable.hpp> #include <boost/asio/detail/type_traits.hpp> #include <boost/asio/error.hpp> #include <boost/asio/io_service.hpp> #include <boost/asio/write.hpp> #include <boost/asio/detail/push_options.hpp> namespace pdalboost {} namespace boost = pdalboost; namespace pdalboost { namespace asio { /// Adds buffering to the write-related operations of a stream. /** * The buffered_write_stream class template can be used to add buffering to the * synchronous and asynchronous write operations of a stream. * * @par Thread Safety * @e Distinct @e objects: Safe.@n * @e Shared @e objects: Unsafe. * * @par Concepts: * AsyncReadStream, AsyncWriteStream, Stream, SyncReadStream, SyncWriteStream. */ template <typename Stream> class buffered_write_stream : private noncopyable { public: /// The type of the next layer. typedef typename remove_reference<Stream>::type next_layer_type; /// The type of the lowest layer. typedef typename next_layer_type::lowest_layer_type lowest_layer_type; #if defined(GENERATING_DOCUMENTATION) /// The default buffer size. static const std::size_t default_buffer_size = implementation_defined; #else BOOST_ASIO_STATIC_CONSTANT(std::size_t, default_buffer_size = 1024); #endif /// Construct, passing the specified argument to initialise the next layer. template <typename Arg> explicit buffered_write_stream(Arg& a) : next_layer_(a), storage_(default_buffer_size) { } /// Construct, passing the specified argument to initialise the next layer. template <typename Arg> buffered_write_stream(Arg& a, std::size_t buffer_size) : next_layer_(a), storage_(buffer_size) { } /// Get a reference to the next layer. next_layer_type& next_layer() { return next_layer_; } /// Get a reference to the lowest layer. lowest_layer_type& lowest_layer() { return next_layer_.lowest_layer(); } /// Get a const reference to the lowest layer. const lowest_layer_type& lowest_layer() const { return next_layer_.lowest_layer(); } /// Get the io_service associated with the object. pdalboost::asio::io_service& get_io_service() { return next_layer_.get_io_service(); } /// Close the stream. void close() { next_layer_.close(); } /// Close the stream. pdalboost::system::error_code close(pdalboost::system::error_code& ec) { return next_layer_.close(ec); } /// Flush all data from the buffer to the next layer. Returns the number of /// bytes written to the next layer on the last write operation. Throws an /// exception on failure. std::size_t flush(); /// Flush all data from the buffer to the next layer. Returns the number of /// bytes written to the next layer on the last write operation, or 0 if an /// error occurred. std::size_t flush(pdalboost::system::error_code& ec); /// Start an asynchronous flush. template <typename WriteHandler> BOOST_ASIO_INITFN_RESULT_TYPE(WriteHandler, void (pdalboost::system::error_code, std::size_t)) async_flush(BOOST_ASIO_MOVE_ARG(WriteHandler) handler); /// Write the given data to the stream. Returns the number of bytes written. /// Throws an exception on failure. template <typename ConstBufferSequence> std::size_t write_some(const ConstBufferSequence& buffers); /// Write the given data to the stream. Returns the number of bytes written, /// or 0 if an error occurred and the error handler did not throw. template <typename ConstBufferSequence> std::size_t write_some(const ConstBufferSequence& buffers, pdalboost::system::error_code& ec); /// Start an asynchronous write. The data being written must be valid for the /// lifetime of the asynchronous operation. template <typename ConstBufferSequence, typename WriteHandler> BOOST_ASIO_INITFN_RESULT_TYPE(WriteHandler, void (pdalboost::system::error_code, std::size_t)) async_write_some(const ConstBufferSequence& buffers, BOOST_ASIO_MOVE_ARG(WriteHandler) handler); /// Read some data from the stream. Returns the number of bytes read. Throws /// an exception on failure. template <typename MutableBufferSequence> std::size_t read_some(const MutableBufferSequence& buffers) { return next_layer_.read_some(buffers); } /// Read some data from the stream. Returns the number of bytes read or 0 if /// an error occurred. template <typename MutableBufferSequence> std::size_t read_some(const MutableBufferSequence& buffers, pdalboost::system::error_code& ec) { return next_layer_.read_some(buffers, ec); } /// Start an asynchronous read. The buffer into which the data will be read /// must be valid for the lifetime of the asynchronous operation. template <typename MutableBufferSequence, typename ReadHandler> BOOST_ASIO_INITFN_RESULT_TYPE(ReadHandler, void (pdalboost::system::error_code, std::size_t)) async_read_some(const MutableBufferSequence& buffers, BOOST_ASIO_MOVE_ARG(ReadHandler) handler) { detail::async_result_init< ReadHandler, void (pdalboost::system::error_code, std::size_t)> init( BOOST_ASIO_MOVE_CAST(ReadHandler)(handler)); next_layer_.async_read_some(buffers, BOOST_ASIO_MOVE_CAST(BOOST_ASIO_HANDLER_TYPE(ReadHandler, void (pdalboost::system::error_code, std::size_t)))(init.handler)); return init.result.get(); } /// Peek at the incoming data on the stream. Returns the number of bytes read. /// Throws an exception on failure. template <typename MutableBufferSequence> std::size_t peek(const MutableBufferSequence& buffers) { return next_layer_.peek(buffers); } /// Peek at the incoming data on the stream. Returns the number of bytes read, /// or 0 if an error occurred. template <typename MutableBufferSequence> std::size_t peek(const MutableBufferSequence& buffers, pdalboost::system::error_code& ec) { return next_layer_.peek(buffers, ec); } /// Determine the amount of data that may be read without blocking. std::size_t in_avail() { return next_layer_.in_avail(); } /// Determine the amount of data that may be read without blocking. std::size_t in_avail(pdalboost::system::error_code& ec) { return next_layer_.in_avail(ec); } private: /// Copy data into the internal buffer from the specified source buffer. /// Returns the number of bytes copied. template <typename ConstBufferSequence> std::size_t copy(const ConstBufferSequence& buffers); /// The next layer. Stream next_layer_; // The data in the buffer. detail::buffered_stream_storage storage_; }; } // namespace asio } // namespace pdalboost #include <boost/asio/detail/pop_options.hpp> #include <boost/asio/impl/buffered_write_stream.hpp> #endif // BOOST_ASIO_BUFFERED_WRITE_STREAM_HPP
verma/PDAL
boost/boost/asio/buffered_write_stream.hpp
C++
bsd-3-clause
7,635
[ 30522, 1013, 1013, 1013, 1013, 17698, 2098, 1035, 4339, 1035, 5460, 1012, 6522, 2361, 1013, 1013, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 1066, 10...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package main import ( "encoding/json" "errors" "fmt" "os" "github.com/idahobean/npm-resource/check" "github.com/idahobean/npm-resource/npm" ) func main() { NPM := npm.NewNPM() command := check.NewCommand(NPM) var request check.Request if err := json.NewDecoder(os.Stdin).Decode(&request); err != nil { fatal("reading request from stdin", err) } var err error if request.Source.PackageName == "" { err = errors.New("package_name") } if err != nil { fatal("parameter required", err) } response, err := command.Run(request) if err != nil { fatal("running command", err) } if err := json.NewEncoder(os.Stdout).Encode(response); err != nil { fatal("writing response to stdout", err) } } func fatal(message string, err error) { fmt.Fprintf(os.Stderr, "error %s: %s\n", message, err) os.Exit(1) }
idahobean/npm-resource
check/cmd/check/main.go
GO
mit
832
[ 30522, 7427, 2364, 12324, 1006, 1000, 17181, 1013, 1046, 3385, 1000, 1000, 10697, 1000, 1000, 4718, 2102, 1000, 1000, 9808, 1000, 1000, 21025, 2705, 12083, 1012, 4012, 1013, 9795, 4783, 2319, 1013, 27937, 2213, 1011, 7692, 1013, 4638, 1000,...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html data-context="Build Apps" xmlns="http://www.w3.org/1999/xhtml"> <head> <meta content="text/html; charset=utf-8" http-equiv="Content-Type"/> <title>Leaf Workspace Manager - Legato Docs</title> <meta content="legato™ is an open source Linux-based embedded platform designed to simplify connected IoT application development" name="description"/> <meta content="legato, iot" name="keywords"/> <meta content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no" name="viewport"/> <meta content="21.05.0" name="legato-version"/> <link href="resources/images/legato.ico" rel="shortcut icon"/> <link href="resources/images/legato.ico" rel="icon" type="image/x-icon"/> <link href="resources/images/legato.ico" rel="shortcut icon" type="image/x-icon"/> <link href="resources/images/legato.ico" rel="apple-touch-icon" type="image/x-icon"/> <link href="resources/css/style.css" media="screen" rel="stylesheet" type="text/css"/> <link href="resources/css/font-awesome.css" rel="stylesheet" type="text/css"/> <!--[if IE]> <script src="resources/js/html5shiv.js"></script> <script src="resources/js/respond.js"></script> <![endif]--> <script src="https://code.jquery.com/jquery-2.2.3.min.js"></script> <script src="resources/js/main.js"></script> <script src="tocs/Build_Apps_Get_Started.json"></script> </head> <body> <noscript> <input class="modal-closing-trick" id="modal-closing-trick" type="checkbox"/> <div id="nojs"> <label for="modal-closing-trick"> <span>You seem to not have Javascript <a href="http://enable-javascript.com">enabled</a>, so site functionality like the search and navigation tree won't work.</span> </label> </div> </noscript> <div class="wrapper"> <div class="fa fa-bars documentation" id="menu-trigger"></div> <div id="top"> <header> <nav> <a class="navlink" href="/">Introduction</a><a class="navlink selected" href="buildAppsMain.html">Build Apps</a><a class="navlink" href="buildPlatformMain.html">Build Platform</a><a class="navlink" href="aboutMain.html">About</a> </nav> </header> </div> <div class="white" id="menudocumentation"> <header> <a href="/"> <img alt="Back to Legato Homepage" id="logo" src="resources/images/legato_logo.png"/></a> <h2>/ Build Apps</h2> <nav class="secondary"> <a class="link-selected" href="getStarted.html">Get Started</a><a href="concepts.html">Concepts</a><a href="apiGuidesMain.html">API Guides</a><a href="tools.html">Tools</a><a href="howToMain.html">How To</a><a href="experimentalMain.html">Experimental Features</a><a href="migrationGuide.html">Linux 4.14 Migration Guide</a> </nav> <nav class="ui-front"> <i class="fa fa-search" id="search-icon"></i> <input id="searchbox" placeholder="Search"/> </nav> </header> </div> <div id="resizable"> <div id="left"> <div id="tree1"></div> </div> </div> <div class="content"> <div class="header"> <div class="headertitle"> <h1 class="title">Leaf Workspace Manager </h1> </div> </div><div class="contents"> <div class="textblock"><p>Leaf is a workspace manager that will download, install and configure the required software packages for a Legato development environment.</p> <p>This tutorial will walk you through how to:</p><ul> <li>Install Leaf</li> <li>Set up a remote to point to a package repository</li> <li>Search for a package to install</li> <li>Set up your development environment</li> <li>Use the built in shell to access the development tools</li> <li>Set up your workspace to start development</li> </ul> <p>The basic workflow that should be followed to download and set up a development environment for your target. These tutorials use the packages that have been created for the Legato project as examples.</p> <h1><a class="anchor" id="confLeafInstall"></a> Install Leaf</h1> <p>Leaf is hosted in the Sierra Wireless debian tools repository, and is provided as a <code></code>.deb package to be installed through <code>apt</code>. We have also provided <a class="el" href="confLeaf_Install.html">alternative install instructions</a> for older Ubuntu Distributions (14.04 and below) and instructions for manually installing from a tarball.</p> <p>Install leaf download and set up our debian package from the Sierra Wireless tools repository: </p><pre class="fragment">$ wget https://downloads.sierrawireless.com/tools/leaf/leaf_latest.deb -O /tmp/leaf_latest.deb &amp;&amp; sudo apt install /tmp/leaf_latest.deb </pre><p>Leaf installs tools, images and packages into the <code>~/</code>.leaf/ directory; all configuration is stored in <code>~/</code>.config/leaf/</p> <p>Before searching and installing your first SDK it is recommended to make a separate leaf workspace directory to store all of your custom Legato development. For this site and tutorials we will be setting up the leaf workspace in <code>$HOME/myWorkspace</code>.</p> <pre class="fragment">$ mkdir ~/myWorkspace; cd ~/myWorkspace </pre><h1><a class="anchor" id="confLeafsearch"></a> Search for Packages</h1> <p>Now that you have leaf installed, you can now search through a repository to find the packages to download, install and configure your Development Environment.</p> <dl class="section warning"><dt>Warning</dt><dd>You are able to search leaf from anywhere on your computer; running <code>leaf</code> <code>setup</code> will add config in the directory that you are currently in. It is recommended to create a workspace dir and set up leaf packages from within your workspace.</dd></dl> <p><b>leaf</b> <b>search</b> </p> <p>Using <code>leaf</code> <code>search</code> with no filters will list every package in any repository that you have enabled (it may be a huge list). It is better to search with filters that return a smaller list that is specific for your target.</p> <h2><a class="anchor" id="confLeafsearchTarget"></a> Search for a Target</h2> <pre class="fragment">$ leaf search -t &lt;target&gt; (i.e.; leaf search -t wp76xx will bring back all packages for the wp76 target) </pre><p>The search results will return a package identifier (the package name of the package to install), a high level description of what is in the package, and the tags of the package. You are also able to search filter the search results by tag using the -t flag. </p><pre class="fragment">$ leaf search -t latest (returns all the newest/latest published packages for all targets) $ leaf search -t wp76xx -t latest (returns the latest packages for the wp76 target) </pre><p>To see exactly what is contained in the package perform a search with a <code>-v</code> flag (verbose). This will list the details of what each package contains including the release date and the list of versions of all sub-packages listed as dependencies. </p><pre class="fragment">$ leaf search -t wp76xx -t latest -v (returns the details of the latest package for the wp76 target) </pre><p>For details on each of the components of the package visit the vendors page. Firmware Details (including Yocto distribution, toolchain and firmware): <a href="https://source.sierrawireless.com">source.sierrawireless.com</a> Legato Details: <a href="https://legato.io/releases">Releases</a></p> <h1><a class="anchor" id="confLeafSetup"></a> Set up Development Environment</h1> <p>Now that you know which package that you want to install on your development machine, the next step is to run <code>leaf</code> <code>setup</code>. The <code>setup</code> command will prepare your directory as a workspace and download, install and configure your workspace with a profile (settings specific to your target and version) preparing you to start developing for your target.</p> <pre class="fragment">$ leaf setup &lt;profile name&gt; -p &lt;package identifier&gt; $ leaf setup wp76stable -p swi-wp76_1.0.0 (downloads and installs the swi-wp76_1.0.0 package in the wp76stable profile) </pre><dl class="section note"><dt>Note</dt><dd>Downloading and installing the package may take a few minutes. Leaf configures everything that is needed for you to start developing for your target including the toolchain, Legato application framework and other development tools. It will also take care of installing any apt dependencies. The apt dependencies will require sudo and you will be prompted for your password for sudo privileges.</dd></dl> <p>After installation a new directory (<code>leaf-data</code>) and a new configuration file (<code>leaf-workspace.json</code>) will be created in your workspace directory. The directory contains symbolic links to all the resources needed for development and the leaf tools know how to find the resources for development.</p> <p>You will now be able to use leaf commands to view your environment and use the resources that you just downloaded and installed. For detailed help on the leaf sub-commands see <a class="el" href="toolsLeaf.html">Leaf</a>. </p><pre class="fragment">$ leaf status - displays the profile list and information about the profiles $ leaf select - lets you select different profiles (if you have more then one installed) $ leaf profile delete &lt;profile name&gt; $ leaf profile rename &lt;old name&gt; &lt;new name&gt; - renames your profile </pre><h1><a class="anchor" id="confLeafSetup2nd"></a> Set up a 2nd Profile</h1> <p>To set up a second profile (if you wish to use multiple targets, or multiple versions) run the <code>leaf</code> <code>setup</code> command again and choose a new profile name. </p><pre class="fragment">$ leaf setup &lt;2nd profile&gt; -p &lt;2nd package identifier&gt; $ leaf setup wp76dev -p swi-wp76_1.0.0 </pre><p>To see the two profiles set up use <code>leaf</code> <code>status</code> and <code>leaf</code> <code>select</code> to switch between profiles. </p><pre class="fragment">$ leaf status $ leaf select &lt;profile name&gt;. </pre><h1><a class="anchor" id="confLeafShell"></a> Leaf Shell</h1> <p><code>leaf</code> <code>shell</code> provides an interactive shell that is <b>profile</b> aware to run all of your tools for your development environment. If you need to switch to a different profile (different target or version of software) the shell environment will update all environment variables to point to the version that matches the profile that you are working with.</p> <dl class="section warning"><dt>Warning</dt><dd><code>shell</code> is $PWD dependant; if you switch to another directory outside of your workspace you will lose your <code>leaf</code> environment variables, and it will not be profile aware (automatically switch to the toolchain and tools that match the profile you are using).</dd></dl> <p>i.e.; Using <code>mksys</code> from within the leaf shell will use the version of the tool that is configured for that specific profile and will also build a Legato System with the correct toolchain. If you switch profiles and run <code>mksys</code> again it will use the version configured with the second profile and use the toolchain configured for the second profile environment.</p> <h1><a class="anchor" id="confLeafWorkspace"></a> Set up Workspace</h1> <p>Now that you have all your development environment set up and configured you are now able to start development.</p> <p>All leaf packages are downloaded to <code>$HOME/</code>.leaf by default (see <code>leaf</code> <code>config</code> to <code>update</code>) and are used as references to be included in to your workspace via environment variables.</p> <p>Any new environment variables that you would like added to your development environment can be added with <code>leaf</code> <code>env</code>. See <code>leaf</code> <code>help</code> <code>env</code> for details on adding new environment variables to either a profile or a workspace. </p><pre class="fragment">$ leaf env profile --set MYVAR=1 (sets the environment variable MYVAR to 1 for the current profile) </pre><h1><a class="anchor" id="confLeafDevelopment"></a> Legato Development</h1> <p>Leaf enables a new style of Legato development that allows you to create your component, apps and systems in your own workspace instead of working directly in the Legato directory. This will keep your custom code separate and still allow a full build of Legato Systems. Any changes that you do make directly to the Legato Application Framework will be reflected in your system when you run <code>mksys</code> from within the leaf shell in your workspace.</p> <p>If you do wish to use the Git tracked source code for Legato you are able to check-out the source code for Legato. This version requires you to have an account on GitHub. Use the command <a class="el" href="confLeafSrc.html">leaf getsrc legato</a> to checkout the version of Legato that matches your profile.</p> <h2><a class="anchor" id="confLeafDevelopmentLegato"></a> Legato Workflow Changes</h2> <ul> <li>The version of Legato that you install is pre-built for your module, meaning that there is no need to run make, or set-up the toolchain and other configuration tasks.</li> <li>You do not need to run <code>bin/legs</code> or source <code>bin/configlegatoenv</code> in your bash.rc file. The leaf shell makes sure that all environment variables are set up and are aware of the specific version of Legato that you are using within each profile.</li> <li>Do not add your apps and settings to default.sdef you are now able to <code>#include</code> <code>default.sdef</code> in an sdef in your workspace and build not only your settings but all the default legato apps and configuration.</li> </ul> <dl class="section note"><dt>Note</dt><dd>Because you are now working with a pre-built version of Legato, any changes that you do make to the Legato Application Framework are not tracked, if you wish to modify the framework and build from source code see <code>leaf</code> <code>help</code> <code>legato-source</code> to download and connect tracked Legato source code.</dd></dl> <h2><a class="anchor" id="confLeafDevelopmentSDEF"></a> Set-up SDEF</h2> <p>Using your own <code></code>.sdef file is easy to set up and maintain. Using this method leaves all the Legato configuration in <code>default.sdef</code> and allows you to quickly see and work with your customization to your Legato System.</p> <p>Create a new <code></code>.sdef file in your leaf workspace: </p><pre class="fragment">$ vim mySystem.sdef (or the editor of your choice) </pre><p>In <code>mySystem.sdef</code> use the following line to include all the default Legato settings: </p><pre class="fragment">#include $LEGATO_ROOT/default.sdef </pre><p>You are also able to include any other <code></code>.sdef files you wish using the same method.</p> <p>A couple of very useful environment variables that are set up in Legato:</p><ul> <li><code>$LEGATO_ROOT</code> - resolves to the location of the Legato Application Framework for your profile</li> <li><code>$CURDIR</code> - resolves to the directory where you run the mktools from (i.e.; add <code>$CURDIR/path/to/your/app</code> to the apps section of your .sdef and then run mksys from your workspace directory to build your apps into the update file)</li> </ul> <p>To build your system you no longer need to re-make the build. Run <code>mksys</code> and point it at your <code></code>.sdef. To build a Legato System using your custom sdef run: </p><pre class="fragment">$ mksys -t $LEGATO_TARGET &lt;sdef&gt; (i.e.; mksys -t wp76xx mySystem.sdef from your leaf workspace directory) </pre><h2><a class="anchor" id="confLeafDevelopmentWorkspace"></a> Workspace Layout</h2> <p>Because you are not working directly in the Legato directory anymore, we recommend setting up a directory structure that will be easy to use and organize your apps, kernel modules and other settings for the Legato Application Framework. Remember to use <code>$CURDIR</code> to reference the workspace folder in your .sdef.</p> <p>Example directory structure using the helloWorld app: </p><pre class="fragment"> . ├── apps │   └── helloWorld │   ├── CMakeLists.txt │   ├── helloComponent │   │   ├── Component.cdef │   │   └── helloWorld.c │   └── helloWorld.adef ├── components | └── ... (a directory for each component) ├── drivers | └── ... (a directory for each kernel module) ├── interfaces | └── ... (all apis that your apps export) ├── leaf-data │   └── ... (leaf symbolic links, do not edit) ├── leaf-workspace.json └── mySystem.sdef </pre><p>This is just an example of how you could set up your directory structure, it is up to you and how you connect all of your components, apps and system in your workspace. Working out of the workspace directory lets you easily work with different profiles and switch to a new profile (target and/or version of Legato) and continue to use your same components, apps and/or system to build for your target devices.</p> <h2><a class="anchor" id="confLeafWorkflows"></a> Workflows</h2> <p>The Leaf workflow design includes a hierarchy and is set up with the first install of a leaf package:</p><ul> <li>USER (universal config for all workspace and profiles)</li> <li>WORKSPACE (the working directory for your code and your customization)</li> <li>PROFILE (target/version specific configuration and settings)</li> </ul> <p>See <a class="el" href="confLeafWS.html">leaf help legato-workflow</a> for more details on the relationship between profiles, workspaces and users.</p> <p class="copyright">Copyright (C) Sierra Wireless Inc. </p> </div></div> <br clear="left"/> </div> </div> <link href="resources/css/jqtree.css" rel="stylesheet" type="text/css"/> <script src="resources/js/tree.jquery.js" type="text/javascript"></script> <script src="resources/js/jquery.cookie.js"></script> <script src="https://code.jquery.com/ui/1.11.4/jquery-ui.min.js"></script> <link href="resources/css/perfect-scrollbar.min.css" rel="stylesheet"/> <script src="resources/js/perfect-scrollbar.jquery.min.js"></script> </body> </html>
legatoproject/legato-docs
21_05/confLeaf.html
HTML
mpl-2.0
18,099
[ 30522, 1026, 999, 9986, 13874, 16129, 2270, 1000, 1011, 1013, 1013, 1059, 2509, 2278, 1013, 1013, 26718, 2094, 1060, 11039, 19968, 1015, 1012, 1014, 17459, 1013, 1013, 4372, 1000, 1000, 8299, 1024, 1013, 1013, 7479, 1012, 1059, 2509, 1012, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE78_OS_Command_Injection__char_console_w32_execvp_12.c Label Definition File: CWE78_OS_Command_Injection.strings.label.xml Template File: sources-sink-12.tmpl.c */ /* * @description * CWE: 78 OS Command Injection * BadSource: console Read input from the console * GoodSource: Fixed string * Sink: w32_execvp * BadSink : execute command with execvp * Flow Variant: 12 Control flow: if(globalReturnsTrueOrFalse()) * * */ #include "std_testcase.h" #include <wchar.h> #ifdef _WIN32 #define COMMAND_INT_PATH "%WINDIR%\\system32\\cmd.exe" #define COMMAND_INT "cmd.exe" #define COMMAND_ARG1 "/c" #define COMMAND_ARG2 "dir" #define COMMAND_ARG3 data #else /* NOT _WIN32 */ #include <unistd.h> #define COMMAND_INT_PATH "/bin/sh" #define COMMAND_INT "sh" #define COMMAND_ARG1 "ls" #define COMMAND_ARG2 "-la" #define COMMAND_ARG3 data #endif #include <process.h> #define EXECVP _execvp #ifndef OMITBAD void CWE78_OS_Command_Injection__char_console_w32_execvp_12_bad() { char * data; char dataBuffer[100] = ""; data = dataBuffer; if(globalReturnsTrueOrFalse()) { { /* Read input from the console */ size_t dataLen = strlen(data); /* if there is room in data, read into it from the console */ if (100-dataLen > 1) { /* POTENTIAL FLAW: Read data from the console */ if (fgets(data+dataLen, (int)(100-dataLen), stdin) != NULL) { /* The next few lines remove the carriage return from the string that is * inserted by fgets() */ dataLen = strlen(data); if (dataLen > 0 && data[dataLen-1] == '\n') { data[dataLen-1] = '\0'; } } else { printLine("fgets() failed"); /* Restore NUL terminator if fgets fails */ data[dataLen] = '\0'; } } } } else { /* FIX: Append a fixed string to data (not user / external input) */ strcat(data, "*.*"); } { char *args[] = {COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG2, COMMAND_ARG3, NULL}; /* execvp - searches for the location of the command among * the directories specified by the PATH environment variable */ /* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */ EXECVP(COMMAND_INT, args); } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B() - use goodsource and badsink by changing the "if" so that * both branches use the GoodSource */ static void goodG2B() { char * data; char dataBuffer[100] = ""; data = dataBuffer; if(globalReturnsTrueOrFalse()) { /* FIX: Append a fixed string to data (not user / external input) */ strcat(data, "*.*"); } else { /* FIX: Append a fixed string to data (not user / external input) */ strcat(data, "*.*"); } { char *args[] = {COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG2, COMMAND_ARG3, NULL}; /* execvp - searches for the location of the command among * the directories specified by the PATH environment variable */ /* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */ EXECVP(COMMAND_INT, args); } } void CWE78_OS_Command_Injection__char_console_w32_execvp_12_good() { goodG2B(); } #endif /* OMITGOOD */ /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); CWE78_OS_Command_Injection__char_console_w32_execvp_12_good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); CWE78_OS_Command_Injection__char_console_w32_execvp_12_bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
maurer/tiamat
samples/Juliet/testcases/CWE78_OS_Command_Injection/s02/CWE78_OS_Command_Injection__char_console_w32_execvp_12.c
C
mit
4,586
[ 30522, 1013, 1008, 23561, 7013, 3231, 18382, 5371, 5371, 18442, 1024, 19296, 2063, 2581, 2620, 1035, 9808, 1035, 3094, 1035, 13341, 1035, 1035, 25869, 1035, 10122, 1035, 1059, 16703, 1035, 4654, 8586, 2615, 2361, 1035, 2260, 1012, 1039, 383...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package net.tropicraft.world.genlayer; import net.minecraft.world.gen.layer.IntCache; public class GenLayerTropiVoronoiZoom extends GenLayerTropicraft { public enum Mode { CARTESIAN, MANHATTAN; } public Mode zoomMode; public GenLayerTropiVoronoiZoom(long seed, GenLayerTropicraft parent, Mode zoomMode) { super(seed); super.parent = parent; this.zoomMode = zoomMode; this.setZoom(1); } /** * Returns a list of integer values generated by this layer. These may be interpreted as temperatures, rainfall * amounts, or biomeList[] indices based on the particular GenLayer subclass. */ public int[] getInts(int x, int y, int width, int length) { final int randomResolution = 1024; final double half = 0.5D; final double almostTileSize = 3.6D; final double tileSize = 4D; x -= 2; y -= 2; int scaledX = x >> 2; int scaledY = y >> 2; int scaledWidth = (width >> 2) + 2; int scaledLength = (length >> 2) + 2; int[] parentValues = this.parent.getInts(scaledX, scaledY, scaledWidth, scaledLength); int bitshiftedWidth = scaledWidth - 1 << 2; int bitshiftedLength = scaledLength - 1 << 2; int[] aint1 = IntCache.getIntCache(bitshiftedWidth * bitshiftedLength); int i; for(int j = 0; j < scaledLength - 1; ++j) { i = 0; int baseValue = parentValues[i + 0 + (j + 0) * scaledWidth]; for(int advancedValueJ = parentValues[i + 0 + (j + 1) * scaledWidth]; i < scaledWidth - 1; ++i) { this.initChunkSeed((long)(i + scaledX << 2), (long)(j + scaledY << 2)); double offsetY = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize; double offsetX = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize; this.initChunkSeed((long)(i + scaledX + 1 << 2), (long)(j + scaledY << 2)); double offsetYY = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize + tileSize; double offsetXY = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize; this.initChunkSeed((long)(i + scaledX << 2), (long)(j + scaledY + 1 << 2)); double offsetYX = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize; double offsetXX = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize + tileSize; this.initChunkSeed((long)(i + scaledX + 1 << 2), (long)(j + scaledY + 1 << 2)); double offsetYXY = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize + tileSize; double offsetXXY = ((double)this.nextInt(randomResolution) / randomResolution - half) * almostTileSize + tileSize; int advancedValueI = parentValues[i + 1 + (j + 0) * scaledWidth] & 255; int advancedValueIJ = parentValues[i + 1 + (j + 1) * scaledWidth] & 255; for(int innerX = 0; innerX < 4; ++innerX) { int index = ((j << 2) + innerX) * bitshiftedWidth + (i << 2); for(int innerY = 0; innerY < 4; ++innerY) { double baseDistance; double distanceY; double distanceX; double distanceXY; switch(zoomMode) { case CARTESIAN: baseDistance = ((double)innerX - offsetX) * ((double)innerX - offsetX) + ((double)innerY - offsetY) * ((double)innerY - offsetY); distanceY = ((double)innerX - offsetXY) * ((double)innerX - offsetXY) + ((double)innerY - offsetYY) * ((double)innerY - offsetYY); distanceX = ((double)innerX - offsetXX) * ((double)innerX - offsetXX) + ((double)innerY - offsetYX) * ((double)innerY - offsetYX); distanceXY = ((double)innerX - offsetXXY) * ((double)innerX - offsetXXY) + ((double)innerY - offsetYXY) * ((double)innerY - offsetYXY); break; case MANHATTAN: baseDistance = Math.abs(innerX - offsetX) + Math.abs(innerY - offsetY); distanceY = Math.abs(innerX - offsetXY) + Math.abs(innerY - offsetYY); distanceX = Math.abs(innerX - offsetXX) + Math.abs(innerY - offsetYX); distanceXY = Math.abs(innerX - offsetXXY) + Math.abs(innerY - offsetYXY); break; default: baseDistance = ((double)innerX - offsetX) * ((double)innerX - offsetX) + ((double)innerY - offsetY) * ((double)innerY - offsetY); distanceY = ((double)innerX - offsetXY) * ((double)innerX - offsetXY) + ((double)innerY - offsetYY) * ((double)innerY - offsetYY); distanceX = ((double)innerX - offsetXX) * ((double)innerX - offsetXX) + ((double)innerY - offsetYX) * ((double)innerY - offsetYX); distanceXY = ((double)innerX - offsetXXY) * ((double)innerX - offsetXXY) + ((double)innerY - offsetYXY) * ((double)innerY - offsetYXY); } if(baseDistance < distanceY && baseDistance < distanceX && baseDistance < distanceXY) { aint1[index++] = baseValue; } else if(distanceY < baseDistance && distanceY < distanceX && distanceY < distanceXY) { aint1[index++] = advancedValueI; } else if(distanceX < baseDistance && distanceX < distanceY && distanceX < distanceXY) { aint1[index++] = advancedValueJ; } else { aint1[index++] = advancedValueIJ; } } } baseValue = advancedValueI; advancedValueJ = advancedValueIJ; } } int[] aint2 = IntCache.getIntCache(width * length); for(i = 0; i < length; ++i) { System.arraycopy(aint1, (i + (y & 3)) * bitshiftedWidth + (x & 3), aint2, i * width, width); } return aint2; } @Override public void setZoom(int zoom) { this.zoom = zoom; parent.setZoom(zoom * 4); } }
Vexatos/Tropicraft
src/main/java/net/tropicraft/world/genlayer/GenLayerTropiVoronoiZoom.java
Java
mpl-2.0
6,674
[ 30522, 7427, 5658, 1012, 19817, 7361, 2594, 27528, 2102, 1012, 2088, 1012, 8991, 24314, 1025, 12324, 5658, 1012, 3067, 10419, 1012, 2088, 1012, 8991, 1012, 6741, 1012, 20014, 3540, 5403, 1025, 2270, 2465, 8991, 24314, 13181, 8197, 14550, 17...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php /** * @package JMAP::SITEMAP::components::com_jmap * @subpackage views * @subpackage sitemap * @subpackage tmpl * @author Joomla! Extensions Store * @copyright (C) 2014 - Joomla! Extensions Store * @license GNU/GPLv2 http://www.gnu.org/licenses/gpl-2.0.html */ defined ( '_JEXEC' ) or die ( 'Restricted access' ); $sourceTitle = $this->sourceparams->get ( 'title', $this->source->name ); $showtitle = $this->sourceparams->get ( 'showtitle', 1 ); $openTarget = $this->sourceparams->get ( 'opentarget', $this->cparams->get ('opentarget') ); if (! $showtitle) { $sourceTitle = '&nbsp;'; } // Include common template init include 'default_common_user.php'; if (count ( $this->source->data )) { // If categorization detected for datasource elements according to adiacency/multi adiacency setup, Feature Detection if(isset($this->source->catRecursion) && isset($this->source->itemsByCat) && isset($this->source->catChildrenByCat)) { echo '<ul class="jmap_filetree"><li><span class="folder">' . $sourceTitle. '</span>'; // Start building tree recurseCats(0, $this->source->itemsByCat, $this->source->catChildrenByCat, 0, $this->asCategoryTitleField, $this->liveSite, $targetOption, $targetView, $targetViewName, $additionalQueryStringParams, $openTarget, $arrayKeysDiff, $titleIdentifier, $idIdentifier, $idURLFilter, $catidIdentifier, $catidURLFilter, $supportedRouterHelperAdapters, $guessItemid, $mainTable); echo '</ul></li></ul></li></ul>'; } // If categorization detected for datasource elements group by categories elseif(isset($this->source->data[0]->{$this->asCategoryTitleField}) || isset($this->source->data[0]->jsitemap_level)){ $first = true; $catsave = null; $catRecursion = false; $liIndent = null; $hasValidCategoryTitleField = (bool)(isset($this->asCategoryTitleField) && $this->asCategoryTitleField); // Manage levels, Feature Detection if(isset($this->source->catRecursion) && isset($this->source->data[0]->jsitemap_level) && $this->source->recursionType == 'level') { $catRecursion = true; } echo '<ul class="jmap_filetree"><li><span class="folder">' . $sourceTitle. '</span>'; foreach ( $this->source->data as $elm ) { // Calculate element indentation, Feature Detection $indent = $catRecursion ? ($elm->jsitemap_level - 1) * 15 : 0; // Subitems with categorization in multilevel mode if($hasValidCategoryTitleField) { if ($elm->{$this->asCategoryTitleField} != $catsave && ! $first) { echo '</ul></li></ul>'; echo '<ul class="jmap_filetree" style="margin-left:' . $indent . 'px"><li><span class="folder">' . $elm->{$this->asCategoryTitleField} . '</span>'; echo '<ul>'; $catsave = $elm->{$this->asCategoryTitleField}; } else { if ($first) { echo '<ul class="jmap_filetree" style="margin-left:' . $indent . 'px"><li><span class="folder">' . $elm->{$this->asCategoryTitleField} . '</span>'; echo '<ul>'; $first = false; $catsave = $elm->{$this->asCategoryTitleField}; } } } else { // Final categories items with categorization in multilevel mode if (! $first) { echo '</ul>'; echo '<ul>'; } else { if ($first) { echo '<ul>'; $first = false; } } } $title = isset($titleIdentifier) && $titleIdentifier != '' ? $elm->{$titleIdentifier} : null; // Additional fields $additionalParamsQueryString = null; $objectVars = array_diff_key(get_object_vars($elm), $arrayKeysDiff); // Filter URL safe alias fields id/catid if(isset($objectVars[$idIdentifier]) && $idURLFilter) { $objectVars[$idIdentifier] = JFilterOutput::stringURLSafe($objectVars[$idIdentifier]); } if(isset($objectVars[$catidIdentifier]) && $catidURLFilter) { $objectVars[$catidIdentifier] = JFilterOutput::stringURLSafe($objectVars[$catidIdentifier]); } if(is_array($objectVars) && count($objectVars)) { $additionalQueryStringFromObjectProp = '&' . http_build_query($objectVars); } if(isset($supportedRouterHelperAdapters[$targetOption]) && $supportedRouterHelperAdapters[$targetOption]) { include 'adapters/'.$targetOption.'.php'; } else { $guessedItemid = null; if($guessItemid) { $guessedItemid = JMapRouteHelper::getItemRoute($targetOption, $targetViewName, $elm->{$idIdentifier}, $elm, $mainTable); if($guessedItemid) { $guessedItemid = '&Itemid=' . $guessedItemid; } } $seflink = JRoute::_ ( 'index.php?option=' . $targetOption . $targetView . $additionalQueryStringFromObjectProp . $additionalQueryStringParams . $guessedItemid); } if(!$hasValidCategoryTitleField && $catRecursion) { $liIndent = ' style="margin-left:' . ($indent + 15) . 'px"'; } echo '<li' . $liIndent .'>' . '<a target="' . $openTarget . '" href="' . $this->liveSite . $seflink . '" >' . $title . '</a></li>'; } if($hasValidCategoryTitleField) { echo '</ul></li></ul>'; } else { echo '</ul>'; } echo '</li></ul>'; } else { // No categorization detected for datasource elements echo '<ul class="jmap_filetree"><li><span class="folder">' . $sourceTitle. '</span><ul>'; foreach ( $this->source->data as $elm ) { $title = isset($titleIdentifier) && $titleIdentifier != '' ? $elm->{$titleIdentifier} : null; // Additional fields $additionalQueryStringFromObjectProp = null; $objectVars = array_diff_key(get_object_vars($elm), $arrayKeysDiff); // Filter URL safe alias fields id/catid if(isset($objectVars[$idIdentifier]) && $idURLFilter) { $objectVars[$idIdentifier] = JFilterOutput::stringURLSafe($objectVars[$idIdentifier]); } if(isset($objectVars[$catidIdentifier]) && $catidURLFilter) { $objectVars[$catidIdentifier] = JFilterOutput::stringURLSafe($objectVars[$catidIdentifier]); } if(is_array($objectVars) && count($objectVars)) { $additionalQueryStringFromObjectProp = '&' . http_build_query($objectVars); } if(isset($supportedRouterHelperAdapters[$targetOption]) && $supportedRouterHelperAdapters[$targetOption]) { include 'adapters/'.$targetOption.'.php'; } else { $guessedItemid = null; if($guessItemid) { $guessedItemid = JMapRouteHelper::getItemRoute($targetOption, $targetViewName, $elm->{$idIdentifier}, $elm, $mainTable); if($guessedItemid) { $guessedItemid = '&Itemid=' . $guessedItemid; } } $seflink = JRoute::_ ( 'index.php?option=' . $targetOption . $targetView . $additionalQueryStringFromObjectProp . $additionalQueryStringParams . $guessedItemid); } echo '<li>' . '<a target="' . $openTarget . '" href="' . $this->liveSite . $seflink . '" >' . $title . '</a></li>'; } echo '</ul></li></ul>'; } }
neoandrew1000/crao_journal
components/com_jmap/views/sitemap/tmpl/default_html_user.php
PHP
gpl-2.0
6,817
[ 30522, 1026, 1029, 25718, 1013, 1008, 1008, 1008, 1030, 7427, 1046, 2863, 2361, 1024, 1024, 2609, 2863, 2361, 1024, 1024, 6177, 1024, 1024, 4012, 1035, 1046, 2863, 2361, 1008, 1030, 4942, 23947, 4270, 5328, 1008, 1030, 4942, 23947, 4270, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
#!/bin/bash # + # Convenient shortcuts. # - alias ..='cd ..; pwd' # cd .. alias ...='cd ../..; pwd' # cd ../.. alias cdvw='cd "$VW_DIR"; pwd' # cd vw dir # one liners chcount () { "$VW_DIR/tools/chcount.py" "$@" | pr -4t ; } # character count cpo () { cp "$@" "$OLDPWD" ; } # copy to $OLDPWD findext() { find . -name "*.$1" -print ; } # find by extension h() { fc -l $* ; } # history llt() { ls -lgo -t "$@" | head ; } # ls latest lsc() { ls -bC $* ; } # printable chars mo() { less -c $* ; } # less -c r() { fc -s $* ; } # redo root() { sudo bash ; } # be admin t() { cat $* ; } # cat vimrc() { vi ~/.vimrc ; } # edit .vimrc don () # do something a number of times { # + # For example, use `don 3 echo` to get 3 blank lines. Default # repetition is `3` and default command is `echo` so acutually, # just `don` does the same. # - local n=3 ((1$1 > 10)) &> /dev/null && n=$1 && shift while (($n > 0)) do ${*:-echo} let n=n-1 done } xv () # trace execution of bash script or function { # print separation don 5 # set verbosity and trap restoration test -f $1 && bash -xv "$@" && return set -xv "$@" set +xv } textbelt() # text phone using textbelt { local TB_=/tmp/textbelt$$ local REPLY=$(num textbelt | sed -e 's/ .*//') test -z "$REPLY" && read -p 'phone number? ' curl http://textbelt.com/text -d number=$REPLY -d message="$*" &> $TB_ grep -q 'success.:true' $TB_ && num -a $REPLY textbelt grep success $TB_ rm $TB_ } ea() # echo all { # + # Actually echoes just as many file names as will fit on one line. # Good for getting a quick idea of the file population of a folder # without spamming your screen. Prints `+nn` to show number of # files that were not listed. # - local EATMP=/tmp/ea.$$ MAXCHAR=${COLUMNS:-80} let MAXCHAR=$MAXCHAR-6 test "$*" || \ls -b > $EATMP.1 test "$*" && \ls -bd "$@" > $EATMP.1 2> /dev/null test -s $EATMP.1 && ( head -c $MAXCHAR $EATMP.1 > $EATMP if ! cmp -s $EATMP $EATMP.1 ; then # remove last (probably incomplete) file name sed '$d' $EATMP > $EATMP.2 # + number of unlisted files echo $(cat $EATMP.2) +$(comm -23 $EATMP.[12] | wc -l) > $EATMP fi echo $(cat $EATMP) ) rm -f $EATMP* } num() # phone numbers { local NUMS="$VW_DIR/tools/data/num.db" case $1 in -a) shift # append new info grep -v "$*" "$NUMS" > "$NUMS".tmp echo $* >> "$NUMS".tmp mv "$NUMS".tmp "$NUMS" ;; -e) vi "$NUMS" # edit db ;; *) grep -i "$1" "$NUMS" # search db ;; esac } fm() # fm with history and sceen width { history -a HISTFILE=$HISTFILE COLUMNS=$COLUMNS fm.py "$@" }
evanvliet/vw
base/ea.sh
Shell
mit
2,813
[ 30522, 1001, 999, 1013, 8026, 1013, 24234, 1001, 1009, 1001, 14057, 2460, 12690, 2015, 1012, 1001, 1011, 14593, 1012, 1012, 1027, 1005, 3729, 1012, 1012, 1025, 1052, 21724, 1005, 1001, 3729, 1012, 1012, 14593, 1012, 1012, 1012, 1027, 1005, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
package net.minecraft.block; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import java.util.List; import java.util.Random; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.entity.Entity; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.Facing; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; public abstract class BlockHalfSlab extends Block { protected final boolean field_72242_a; public BlockHalfSlab(int p_i2208_1_, boolean p_i2208_2_, Material p_i2208_3_) { super(p_i2208_1_, p_i2208_3_); this.field_72242_a = p_i2208_2_; if(p_i2208_2_) { field_71970_n[p_i2208_1_] = true; } else { this.func_71905_a(0.0F, 0.0F, 0.0F, 1.0F, 0.5F, 1.0F); } this.func_71868_h(255); } public void func_71902_a(IBlockAccess p_71902_1_, int p_71902_2_, int p_71902_3_, int p_71902_4_) { if(this.field_72242_a) { this.func_71905_a(0.0F, 0.0F, 0.0F, 1.0F, 1.0F, 1.0F); } else { boolean var5 = (p_71902_1_.func_72805_g(p_71902_2_, p_71902_3_, p_71902_4_) & 8) != 0; if(var5) { this.func_71905_a(0.0F, 0.5F, 0.0F, 1.0F, 1.0F, 1.0F); } else { this.func_71905_a(0.0F, 0.0F, 0.0F, 1.0F, 0.5F, 1.0F); } } } public void func_71919_f() { if(this.field_72242_a) { this.func_71905_a(0.0F, 0.0F, 0.0F, 1.0F, 1.0F, 1.0F); } else { this.func_71905_a(0.0F, 0.0F, 0.0F, 1.0F, 0.5F, 1.0F); } } public void func_71871_a(World p_71871_1_, int p_71871_2_, int p_71871_3_, int p_71871_4_, AxisAlignedBB p_71871_5_, List p_71871_6_, Entity p_71871_7_) { this.func_71902_a(p_71871_1_, p_71871_2_, p_71871_3_, p_71871_4_); super.func_71871_a(p_71871_1_, p_71871_2_, p_71871_3_, p_71871_4_, p_71871_5_, p_71871_6_, p_71871_7_); } public boolean func_71926_d() { return this.field_72242_a; } public int func_85104_a(World p_85104_1_, int p_85104_2_, int p_85104_3_, int p_85104_4_, int p_85104_5_, float p_85104_6_, float p_85104_7_, float p_85104_8_, int p_85104_9_) { return this.field_72242_a?p_85104_9_:(p_85104_5_ != 0 && (p_85104_5_ == 1 || (double)p_85104_7_ <= 0.5D)?p_85104_9_:p_85104_9_ | 8); } public int func_71925_a(Random p_71925_1_) { return this.field_72242_a?2:1; } public int func_71899_b(int p_71899_1_) { return p_71899_1_ & 7; } public boolean func_71886_c() { return this.field_72242_a; } @SideOnly(Side.CLIENT) public boolean func_71877_c(IBlockAccess p_71877_1_, int p_71877_2_, int p_71877_3_, int p_71877_4_, int p_71877_5_) { if(this.field_72242_a) { return super.func_71877_c(p_71877_1_, p_71877_2_, p_71877_3_, p_71877_4_, p_71877_5_); } else if(p_71877_5_ != 1 && p_71877_5_ != 0 && !super.func_71877_c(p_71877_1_, p_71877_2_, p_71877_3_, p_71877_4_, p_71877_5_)) { return false; } else { int var6 = p_71877_2_ + Facing.field_71586_b[Facing.field_71588_a[p_71877_5_]]; int var7 = p_71877_3_ + Facing.field_71587_c[Facing.field_71588_a[p_71877_5_]]; int var8 = p_71877_4_ + Facing.field_71585_d[Facing.field_71588_a[p_71877_5_]]; boolean var9 = (p_71877_1_.func_72805_g(var6, var7, var8) & 8) != 0; return var9?(p_71877_5_ == 0?true:(p_71877_5_ == 1 && super.func_71877_c(p_71877_1_, p_71877_2_, p_71877_3_, p_71877_4_, p_71877_5_)?true:!func_72241_e(p_71877_1_.func_72798_a(p_71877_2_, p_71877_3_, p_71877_4_)) || (p_71877_1_.func_72805_g(p_71877_2_, p_71877_3_, p_71877_4_) & 8) == 0)):(p_71877_5_ == 1?true:(p_71877_5_ == 0 && super.func_71877_c(p_71877_1_, p_71877_2_, p_71877_3_, p_71877_4_, p_71877_5_)?true:!func_72241_e(p_71877_1_.func_72798_a(p_71877_2_, p_71877_3_, p_71877_4_)) || (p_71877_1_.func_72805_g(p_71877_2_, p_71877_3_, p_71877_4_) & 8) != 0)); } } @SideOnly(Side.CLIENT) private static boolean func_72241_e(int p_72241_0_) { return p_72241_0_ == Block.field_72079_ak.field_71990_ca || p_72241_0_ == Block.field_72092_bO.field_71990_ca; } public abstract String func_72240_d(int var1); public int func_71873_h(World p_71873_1_, int p_71873_2_, int p_71873_3_, int p_71873_4_) { return super.func_71873_h(p_71873_1_, p_71873_2_, p_71873_3_, p_71873_4_) & 7; } @SideOnly(Side.CLIENT) public int func_71922_a(World p_71922_1_, int p_71922_2_, int p_71922_3_, int p_71922_4_) { return func_72241_e(this.field_71990_ca)?this.field_71990_ca:(this.field_71990_ca == Block.field_72085_aj.field_71990_ca?Block.field_72079_ak.field_71990_ca:(this.field_71990_ca == Block.field_72090_bN.field_71990_ca?Block.field_72092_bO.field_71990_ca:Block.field_72079_ak.field_71990_ca)); } }
HATB0T/RuneCraftery
forge/mcp/temp/src/minecraft/net/minecraft/block/BlockHalfSlab.java
Java
lgpl-3.0
4,843
[ 30522, 7427, 5658, 1012, 3067, 10419, 1012, 3796, 1025, 12324, 18133, 2860, 1012, 16913, 2015, 1012, 4718, 2140, 1012, 2128, 17298, 26091, 2099, 1012, 2217, 1025, 12324, 18133, 2860, 1012, 16913, 2015, 1012, 4718, 2140, 1012, 2128, 17298, 2...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <base data-ice="baseUrl" href="../../../"> <title data-ice="title">CondaExecutable | API Document</title> <link type="text/css" rel="stylesheet" href="css/style.css"> <link type="text/css" rel="stylesheet" href="css/prettify-tomorrow.css"> <script src="script/prettify/prettify.js"></script> <script src="script/manual.js"></script> </head> <body class="layout-container" data-ice="rootContainer"> <header> <a href="./">Home</a> <a href="identifiers.html">Reference</a> <a href="source.html">Source</a> <a data-ice="repoURL" href="https://github.com/jsoma/mcpyver" class="repo-url-github">Repository</a> <div class="search-box"> <span> <img src="./image/search.png"> <span class="search-input-edge"></span><input class="search-input"><span class="search-input-edge"></span> </span> <ul class="search-result"></ul> </div> </header> <nav class="navigation" data-ice="nav"><div> <ul> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-clear">clear</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-exec">exec</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-getConda">getConda</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-getJupyter">getJupyter</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-getJupyterList">getJupyterList</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-getPipList">getPipList</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-getPythonList">getPythonList</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-function">F</span><span data-ice="name"><span><a href="function/index.html#static-function-getVirtualEnv">getVirtualEnv</a></span></span></li> <li data-ice="doc"><div data-ice="dirPath" class="nav-dir-path">environments</div><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/environments/condaenv.js~CondaEnv.html">CondaEnv</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/environments/environment.js~Environment.html">Environment</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/environments/virtualenv.js~VirtualEnv.html">VirtualEnv</a></span></span></li> <li data-ice="doc"><div data-ice="dirPath" class="nav-dir-path">executables</div><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html">CondaExecutable</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html">Executable</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/executable_collection.js~ExecutableCollection.html">ExecutableCollection</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/jupyter.js~JupyterExecutable.html">JupyterExecutable</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/pip.js~PipExecutable.html">PipExecutable</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/python.js~PythonExecutable.html">PythonExecutable</a></span></span></li> <li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/src/executables/virtualenv.js~VirtualEnvExecutable.html">VirtualEnvExecutable</a></span></span></li> </ul> </div> </nav> <div class="content" data-ice="content"><div class="header-notice"> <div data-ice="importPath" class="import-path"><pre class="prettyprint"><code data-ice="importPathCode">import CondaExecutable from &apos;<span><a href="file/src/executables/conda.js.html#lineNumber5">mcpyver/src/executables/conda.js</a></span>&apos;</code></pre></div> <span data-ice="access">public</span> <span data-ice="kind">class</span> <span data-ice="source">| <span><a href="file/src/executables/conda.js.html#lineNumber5">source</a></span></span> </div> <div class="self-detail detail"> <h1 data-ice="name">CondaExecutable</h1> <div class="flat-list" data-ice="extendsChain"><h4>Extends:</h4><div><span><a href="class/src/executables/executable.js~Executable.html">Executable</a></span> &#x2192; CondaExecutable</div></div> </div> <div data-ice="memberSummary"><h2>Member Summary</h2><table class="summary" data-ice="summary"> <thead><tr><td data-ice="title" colspan="3">Public Members</td></tr></thead> <tbody> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-member-details">details</a></span></span><span data-ice="signature">: <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-member-environments">environments</a></span></span><span data-ice="signature">: <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-member-version">version</a></span></span><span data-ice="signature">: <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> </tbody> </table> </div> <div data-ice="methodSummary"><h2>Method Summary</h2><table class="summary" data-ice="summary"> <thead><tr><td data-ice="title" colspan="3">Public Methods</td></tr></thead> <tbody> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-method-cleanVersion">cleanVersion</a></span></span><span data-ice="signature">()</span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-method-getDetails">getDetails</a></span></span><span data-ice="signature">(): <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-method-populateEnvironments">populateEnvironments</a></span></span><span data-ice="signature">(): <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-method-setDetails">setDetails</a></span></span><span data-ice="signature">(): <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-method-setEnvironments">setEnvironments</a></span></span><span data-ice="signature">(): <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/conda.js~CondaExecutable.html#instance-method-setExtras">setExtras</a></span></span><span data-ice="signature">(): <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> </tbody> </table> </div> <div class="inherited-summary" data-ice="inheritedSummary"><h2>Inherited Summary</h2><table class="summary" data-ice="summary"> <thead><tr><td data-ice="title" colspan="3"><span class="toggle closed"></span> From class <span><a href="class/src/executables/executable.js~Executable.html">Executable</a></span></td></tr></thead> <tbody> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span data-ice="static">static</span> <span class="kind" data-ice="kind">get</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#static-get-searchPaths">searchPaths</a></span></span><span data-ice="signature">: <span><span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span><span>[]</span></span></span> </p> </div> <div> <div data-ice="description"><p>Paths to manually search in for executables</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span data-ice="static">static</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#static-method-findAll">findAll</a></span></span><span data-ice="signature">(command: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span> | <span><span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span><span>[]</span></span>): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span>&lt;<span><a href="class/src/executables/executable_collection.js~ExecutableCollection.html">ExecutableCollection</a></span>&gt;</span> </p> </div> <div> <div data-ice="description"><p>Given a command name or list of commands, returns an ExecutableCollection of all the executables with that name your computer might know about Looks in the path as well as looking in common paths.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span data-ice="static">static</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#static-method-findAllWithoutMerge">findAllWithoutMerge</a></span></span><span data-ice="signature">(command: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span>&lt;<span><a href="class/src/executables/executable_collection.js~ExecutableCollection.html">ExecutableCollection</a></span>&gt;</span> </p> </div> <div> <div data-ice="description"><p>Given a command name or list of commands, returns an ExecutableCollection of all the executables with that name your computer might know about Looks in the path as well as looking in common paths.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span data-ice="static">static</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#static-method-findByPaths">findByPaths</a></span></span><span data-ice="signature">(command: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span>&lt;<span><a href="class/src/executables/executable_collection.js~ExecutableCollection.html">ExecutableCollection</a></span>&gt;</span> </p> </div> <div> <div data-ice="description"><p>Manually searches paths to find executables with a given name</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span data-ice="static">static</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#static-method-findByWhich">findByWhich</a></span></span><span data-ice="signature">(command: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span>&lt;<span><a href="class/src/executables/executable_collection.js~ExecutableCollection.html">ExecutableCollection</a></span>&gt;</span> </p> </div> <div> <div data-ice="description"><p>Uses which to find all of the paths for a given command</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span data-ice="static">static</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#static-method-findOne">findOne</a></span></span><span data-ice="signature">(command: <span>*</span>): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span>&lt;<span><a href="class/src/executables/executable.js~Executable.html">Executable</a></span>&gt;</span> </p> </div> <div> <div data-ice="description"><p>Given a command name, creates an Executable from the executable file that would have been run had you typed the command in (e.g.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="kind" data-ice="kind">get</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-get-mergeField">mergeField</a></span></span><span data-ice="signature">: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span></span> </p> </div> <div> <div data-ice="description"><p>When merging an ExecutableCollection, this is what you group the executables by.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="kind" data-ice="kind">get</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-get-path">path</a></span></span><span data-ice="signature">: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span></span> </p> </div> <div> <div data-ice="description"><p>When you&apos;re looking for a path, but don&apos;t necessarily care if it&apos;s the symlinked one or the non-symlinked on?</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="kind" data-ice="kind">get</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-get-realpath">realpath</a></span></span><span data-ice="signature">: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>: <span>*</span></span> </p> </div> <div> <div data-ice="description"><p>The path of the executable, or the target of a symlinked executable</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="kind" data-ice="kind">set</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-set-realpath">realpath</a></span></span><span data-ice="signature">(the: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>): <span>*</span></span> </p> </div> <div> <div data-ice="description"><p>Set the realpath</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-atime">atime</a></span></span><span data-ice="signature">: <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-ctime">ctime</a></span></span><span data-ice="signature">: <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-defaultCommands">defaultCommands</a></span></span><span data-ice="signature">: <span><span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span><span>[]</span></span></span> </p> </div> <div> <div data-ice="description"><p>The commands for which this executable is first in line to run e.g., running which returns it</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-errors">errors</a></span></span><span data-ice="signature">: <span><span>*</span><span>[]</span></span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-isDefault">isDefault</a></span></span><span data-ice="signature">: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean">boolean</a></span></span> </p> </div> <div> <div data-ice="description"><p>Is this executable the default for any commands?</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-mtime">mtime</a></span></span><span data-ice="signature">: <span>*</span></span> </p> </div> <div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-paths">paths</a></span></span><span data-ice="signature">: <span><span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span><span>[]</span></span></span> </p> </div> <div> <div data-ice="description"><p>Any paths that you can find this executable at (symlinked or otherwise)</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-rawVersion">rawVersion</a></span></span><span data-ice="signature">: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span></span> </p> </div> <div> <div data-ice="description"><p>The raw output from stdout/stderr of --version</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-member-version">version</a></span></span><span data-ice="signature">: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span></span> </p> </div> <div> <div data-ice="description"><p>The version number of the program, typically cleaned up in a subclass</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-addCommand">addCommand</a></span></span><span data-ice="signature">(command: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>)</span> </p> </div> <div> <div data-ice="description"><p>Add a command that this executable is first in line for, e.g.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-addError">addError</a></span></span><span data-ice="signature">(error: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error">Error</a></span>)</span> </p> </div> <div> <div data-ice="description"><p>Take any rescued error and attach it to the object, e.g.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-addPath">addPath</a></span></span><span data-ice="signature">(path: <span>*</span>)</span> </p> </div> <div> <div data-ice="description"><p>Adds a known path to this executable (e.g.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-cleanVersion">cleanVersion</a></span></span><span data-ice="signature">()</span> </p> </div> <div> <div data-ice="description"><p>Clean up the rawVersion, pulling out the actual version number</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-populate">populate</a></span></span><span data-ice="signature">(): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span></span> </p> </div> <div> <div data-ice="description"><p>Fills in all of the details of the executable</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-requestVersion">requestVersion</a></span></span><span data-ice="signature">(): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span>&lt;<span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>&gt;</span> </p> </div> <div> <div data-ice="description"><p>Gets the version of the executable by shelling out and running --version</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="abstract" data-ice="abstract">abstract</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-setExtras">setExtras</a></span></span><span data-ice="signature">(): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span></span> </p> </div> <div> <div data-ice="description"><p>Subclasses that need extra details (lists of packages, etc) override this method</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-setRawVersion">setRawVersion</a></span></span><span data-ice="signature">(version: <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String">string</a></span>)</span> </p> </div> <div> <div data-ice="description"><p>Given a version-y string, do slight cleanup and set the executable&apos;s rawVersion. Typically comes from stdout/stderr.</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-setStats">setStats</a></span></span><span data-ice="signature">(): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span></span> </p> </div> <div> <div data-ice="description"><p>Query for the executable file&apos;s creation/modification/access time and save it to the object</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-setVersion">setVersion</a></span></span><span data-ice="signature">(): <span><a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise">Promise</a></span></span> </p> </div> <div> <div data-ice="description"><p>Query for and set the rawVersion and version</p> </div> </div> </td> <td> </td> </tr> <tr data-ice="target"> <td> <span class="access" data-ice="access">public</span> <span class="override" data-ice="override"></span> </td> <td> <div> <p> <span data-ice="name"><span><a href="class/src/executables/executable.js~Executable.html#instance-method-toJSON">toJSON</a></span></span><span data-ice="signature">(): <span>*</span></span> </p> </div> <div> <div data-ice="description"><p>Converts the executable&apos;s data to a JSON-friendly object it&apos;s mostly so we can rename _realpath to realpath</p> </div> </div> </td> <td> </td> </tr> </tbody> </table> </div> <div data-ice="memberDetails"><h2 data-ice="title">Public Members</h2> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-member-details"> <span class="access" data-ice="access">public</span> <span data-ice="name">details</span><span data-ice="signature">: <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber41">source</a></span></span> </span> </h3> <div data-ice="properties"> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-member-environments"> <span class="access" data-ice="access">public</span> <span data-ice="name">environments</span><span data-ice="signature">: <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber16">source</a></span></span> </span> </h3> <div data-ice="properties"> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-member-version"> <span class="access" data-ice="access">public</span> <span data-ice="name">version</span><span data-ice="signature">: <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber47">source</a></span></span> </span> </h3> <div data-ice="description"><p>The version number of the program, typically cleaned up in a subclass</p> </div> <div data-ice="override"><h4>Override:</h4><span><a href="class/src/executables/executable.js~Executable.html#instance-member-version">Executable#version</a></span></div> <div data-ice="properties"> </div> </div> </div> <div data-ice="methodDetails"><h2 data-ice="title">Public Methods</h2> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-method-cleanVersion"> <span class="access" data-ice="access">public</span> <span data-ice="name">cleanVersion</span><span data-ice="signature">()</span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber46">source</a></span></span> </span> </h3> <div data-ice="description"><p>Clean up the rawVersion, pulling out the actual version number</p> </div> <div data-ice="override"><h4>Override:</h4><span><a href="class/src/executables/executable.js~Executable.html#instance-method-cleanVersion">Executable#cleanVersion</a></span></div> <div data-ice="properties"> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-method-getDetails"> <span class="access" data-ice="access">public</span> <span data-ice="name">getDetails</span><span data-ice="signature">(): <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber26">source</a></span></span> </span> </h3> <div data-ice="properties"> </div> <div class="return-params" data-ice="returnParams"> <h4>Return:</h4> <table> <tbody> <tr> <td class="return-type" data-ice="returnType"><span>*</span></td> </tr> </tbody> </table> <div data-ice="returnProperties"> </div> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-method-populateEnvironments"> <span class="access" data-ice="access">public</span> <span data-ice="name">populateEnvironments</span><span data-ice="signature">(): <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber21">source</a></span></span> </span> </h3> <div data-ice="properties"> </div> <div class="return-params" data-ice="returnParams"> <h4>Return:</h4> <table> <tbody> <tr> <td class="return-type" data-ice="returnType"><span>*</span></td> </tr> </tbody> </table> <div data-ice="returnProperties"> </div> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-method-setDetails"> <span class="access" data-ice="access">public</span> <span data-ice="name">setDetails</span><span data-ice="signature">(): <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber38">source</a></span></span> </span> </h3> <div data-ice="properties"> </div> <div class="return-params" data-ice="returnParams"> <h4>Return:</h4> <table> <tbody> <tr> <td class="return-type" data-ice="returnType"><span>*</span></td> </tr> </tbody> </table> <div data-ice="returnProperties"> </div> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-method-setEnvironments"> <span class="access" data-ice="access">public</span> <span data-ice="name">setEnvironments</span><span data-ice="signature">(): <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber14">source</a></span></span> </span> </h3> <div data-ice="properties"> </div> <div class="return-params" data-ice="returnParams"> <h4>Return:</h4> <table> <tbody> <tr> <td class="return-type" data-ice="returnType"><span>*</span></td> </tr> </tbody> </table> <div data-ice="returnProperties"> </div> </div> </div> <div class="detail" data-ice="detail"> <h3 data-ice="anchor" id="instance-method-setExtras"> <span class="access" data-ice="access">public</span> <span data-ice="name">setExtras</span><span data-ice="signature">(): <span>*</span></span> <span class="right-info"> <span data-ice="source"><span><a href="file/src/executables/conda.js.html#lineNumber7">source</a></span></span> </span> </h3> <div data-ice="description"><p>Subclasses that need extra details (lists of packages, etc) override this method</p> </div> <div data-ice="override"><h4>Override:</h4><span><a href="class/src/executables/executable.js~Executable.html#instance-method-setExtras">Executable#setExtras</a></span></div> <div data-ice="properties"> </div> <div class="return-params" data-ice="returnParams"> <h4>Return:</h4> <table> <tbody> <tr> <td class="return-type" data-ice="returnType"><span>*</span></td> </tr> </tbody> </table> <div data-ice="returnProperties"> </div> </div> </div> </div> </div> <footer class="footer"> Generated by <a href="https://esdoc.org">ESDoc<span data-ice="esdocVersion">(0.5.2)</span><img src="./image/esdoc-logo-mini-black.png"></a> </footer> <script src="script/search_index.js"></script> <script src="script/search.js"></script> <script src="script/pretty-print.js"></script> <script src="script/inherited-summary.js"></script> <script src="script/test-summary.js"></script> <script src="script/inner-link.js"></script> <script src="script/patch-for-local.js"></script> </body> </html>
jsoma/mcpyver
dist/esdoc/class/src/executables/conda.js~CondaExecutable.html
HTML
mit
44,514
[ 30522, 1026, 999, 9986, 13874, 16129, 1028, 1026, 16129, 1028, 1026, 2132, 1028, 1026, 18804, 25869, 13462, 1027, 1000, 21183, 2546, 1011, 1022, 1000, 1028, 1026, 2918, 2951, 1011, 3256, 1027, 1000, 2918, 3126, 2140, 1000, 17850, 12879, 102...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
/************************************************************************** * * Copyright 2009 Younes Manton. * Copyright 2011 Christian König. * All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sub license, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice (including the * next paragraph) shall be included in all copies or substantial portions * of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **************************************************************************/ #ifndef vl_decoder_h #define vl_decoder_h #include "pipe/p_video_codec.h" /** * check if a given profile is supported with shader based decoding */ bool vl_profile_supported(struct pipe_screen *screen, enum pipe_video_profile profile, enum pipe_video_entrypoint entrypoint); /** * get the maximum supported level for the given profile with shader based decoding */ int vl_level_supported(struct pipe_screen *screen, enum pipe_video_profile profile); /** * standard implementation of pipe->create_video_codec */ struct pipe_video_codec * vl_create_decoder(struct pipe_context *pipe, const struct pipe_video_codec *templat); #endif /* vl_decoder_h */
execunix/vinos
xsrc/external/mit/MesaLib/dist/src/gallium/auxiliary/vl/vl_decoder.h
C
apache-2.0
2,061
[ 30522, 1013, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 1008, 100...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
<?php /** * Zym * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * * @author Robin Skoglund * @category Zym * @package Zym_Navigation * @copyright Copyright (c) 2008 Zym. (http://www.zym-project.com/) * @license http://www.zym-project.com/license New BSD License */ /** * @see Zend_Config */ require_once 'Zend/Config.php'; /** * Zym_Navigation_Container * * Container class for Zym_Navigation_Page classes. * * @author Robin Skoglund * @category Zym * @package Zym_Navigation * @copyright Copyright (c) 2008 Zym. (http://www.zym-project.com/) * @license http://www.zym-project.com/license New BSD License */ abstract class Zym_Navigation_Container implements RecursiveIterator, Countable { /** * Contains sub pages * * @var array */ protected $_pages = array(); /** * Order in which to display and iterate pages * * @var array */ protected $_order = array(); /** * Whether internal order has been updated * * @var bool */ protected $_orderUpdated = false; /** * Parent container * * @var Zym_Navigation_Container */ protected $_parent = null; // Internal methods: /** * Sort pages according to their given positions * * @return void */ protected function _sort() { if ($this->_orderUpdated) { $newOrder = array(); $index = 0; foreach ($this->_pages as $hash => $page) { $pos = $page->getPosition(); if ($pos === null) { $newOrder[$hash] = $index; $index++; } else { $newOrder[$hash] = $pos; } } asort($newOrder); $this->_order = $newOrder; $this->_orderUpdated = false; } } // Public methods: /** * Notifies container that the order of pages are updated * * @return void */ public function notifyOrderUpdated() { $this->_orderUpdated = true; } /** * Adds a page to the container * * @param Zym_Navigation_Page|array|Zend_Config $page page to add * @return Zym_Navigation_Container * @throws InvalidArgumentException if invalid page is given */ public function addPage($page) { if (is_array($page) || $page instanceof Zend_Config) { require_once 'Zym/Navigation/Page.php'; $page = Zym_Navigation_Page::factory($page); } elseif (!$page instanceof Zym_Navigation_Page) { $msg = '$page must be Zym_Navigation_Page|array|Zend_Config'; throw new InvalidArgumentException($msg); } $id = spl_object_hash($page); if (array_key_exists($id, $this->_order)) { return $this; } $this->_pages[$id] = $page; $this->_order[$id] = $page->getPosition(); $this->_orderUpdated = true; $page->setParent($this); return $this; } /** * Adds several pages at once * * @param array|Zend_Config $pages pages to add * @return Zym_Navigation_Container * @throws InvalidArgumentException if $pages is not array or Zend_Config */ public function addPages($pages) { if ($pages instanceof Zend_Config) { $pages = $pages->toArray(); } if (!is_array($pages)) { $msg = '$pages must be an array or a Zend_Config object'; throw new InvalidArgumentException($msg); } foreach ($pages as $page) { $this->addPage($page); } return $this; } /** * Sets pages this container should have, clearing existing ones * * @param array $pages pages to set * @return Zym_Navigation_Container */ public function setPages(array $pages) { $this->removePages(); return $this->addPages($pages); } /** * Removes the given page from the container * * @param int|Zym_Navigation_Page $page page to remove, either * position or instance * @return bool indicating whether the removal was successful */ public function removePage($page) { $this->_sort(); if (is_int($page)) { $hash = array_search($page, $this->_order); } elseif ($page instanceof Zym_Navigation_Page) { $hash = spl_object_hash($page); } else { return false; } if (isset($this->_order[$hash])) { unset($this->_order[$hash]); unset($this->_pages[$hash]); $this->_orderUpdated = true; return true; } return false; } /** * Removes all pages in container * * @return Zym_Navigation_Container_Abstract */ public function removePages() { $this->_pages = array(); $this->_order = array(); return $this; } /** * Checks if the container has the given page * * @param Zym_Navigation_Page $page * @param bool $recursive [optional] defaults to false * @return bool */ public function hasPage(Zym_Navigation_Page $page, $recursive = false) { $hash = spl_object_hash($page); if (array_key_exists($hash, $this->_order)) { return true; } elseif ($recursive) { foreach ($this->_pages as $childPage) { if ($childPage->hasPage($page, true)) { return true; } } } return false; } /** * Returns true if container contains any pages * * @return bool */ public function hasPages() { return count($this->_order) > 0; } /** * Sets parent container * * @param Zym_Navigation_Container $parent [optional] new parent to set, * defaults to null which will set * no parent * @return Zym_Navigation_Page */ public function setParent(Zym_Navigation_Container $parent = null) { // return if the given parent already is parent if ($parent === $this->_parent) { return $this; } // remove from old parent if page if (null !== $this->_parent && $this instanceof Zym_Navigation_Page) { $this->_parent->removePage($this); } // set new parent $this->_parent = $parent; // add to parent if page and not already a child if (null !== $this->_parent && $this instanceof Zym_Navigation_Page) { $this->_parent->addPage($this); } return $this; } /** * Returns parent container * * @return Zym_Navigation_Container|null */ public function getParent() { return $this->_parent; } /** * Returns a child page matching $property == $value, or null if not found * * @param string $property name of property to match against * @param mixed $value value to match property against * @return Zym_Navigation_Page|null matching page or null */ public function findOneBy($property, $value) { $iterator = new RecursiveIteratorIterator($this, RecursiveIteratorIterator::SELF_FIRST); foreach ($iterator as $page) { if ($page->get($property) == $value) { return $page; } } return null; } /** * Returns all child pages matching $property == $value, or an empty array * if not found * * @param string $property name of property to match against * @param mixed $value value to match property against * @return array containing only Zym_Navigation_Page elements */ public function findAllBy($property, $value) { $found = array(); $iterator = new RecursiveIteratorIterator($this, RecursiveIteratorIterator::SELF_FIRST); foreach ($iterator as $page) { if ($page->get($property) == $value) { $found[] = $page; } } return $found; } /** * Returns page(s) matching $property == $value * * @param string $property name of property to match against * @param mixed $value value to match property against * @param bool $all [optional] whether an array of all matching * pages should be returned, or only the first. * If true, an array will be returned, even if not * matching pages are found. If false, null will be * returned if no matching page is found. Default * is false. */ public function findBy($property, $value, $all = false) { if ($all) { return $this->findAllBy($property, $value); } else { return $this->findOneBy($property, $value); } } /** * Magic overload: Proxy calls to finder methods * * Examples of finder calls: * <code> * // METHOD // SAME AS * $nav->findByLabel('foo'); // $nav->findOneBy('label', 'foo'); * $nav->findOneByLabel('foo'); // $nav->findOneBy('label', 'foo'); * $nav->findAllById('foo'); // $nav->findAllBy('id', 'foo'); * </code> * * @param string $method method name * @param array $arguments method arguments * @throws BadMethodCallException if method does not exist */ public function __call($method, $arguments) { if (@preg_match('/(find(?:One|All)?By)(.+)/', $method, $match)) { return $this->{$match[1]}($match[2], $arguments[0]); } $msg = sprintf('Unknown method %s::%s', get_class($this), $method); throw new BadMethodCallException($msg); } /** * Returns an array representation of all pages in container * * @return array */ public function toArray() { $pages = array(); foreach ($this->_pages as $page) { $pages[] = $page->toArray(); } return $pages; } // RecursiveIterator interface: /** * RecursiveIterator: Returns current page * * @return Zym_Navigation_Page * @throws OutOfBoundsException if the index is invalid */ public function current() { $this->_sort(); current($this->_order); $key = key($this->_order); if (isset($this->_pages[$key])) { return $this->_pages[$key]; } else { $msg = 'Corruption detected in container; ' . 'invalid key found in internal iterator'; throw new OutOfBoundsException($msg); } } /** * RecursiveIterator: Returns current page id * * @return string */ public function key() { $this->_sort(); return key($this->_order); } /** * RecursiveIterator: Move pointer to next page in container * * @return void */ public function next() { $this->_sort(); next($this->_order); } /** * RecursiveIterator: Moves pointer to beginning of container * * @return void */ public function rewind() { $this->_sort(); reset($this->_order); } /** * RecursiveIterator: Determines if container is valid * * @return bool */ public function valid() { $this->_sort(); return (current($this->_order) !== false); } /** * RecursiveIterator: Proxy to hasPages() * * @return bool */ public function hasChildren() { return $this->hasPages(); } /** * RecursiveIterator: Returns pages * * @return Zym_Navigation_Page|null */ public function getChildren() { $key = key($this->_order); if (isset($this->_pages[$key])) { return $this->_pages[$key]; } return null; } // Countable interface: /** * Countable: Count of pages that are iterable * * @return int */ public function count() { return count($this->_order); } }
robinsk/zym
library/Zym/Navigation/Container.php
PHP
bsd-3-clause
13,043
[ 30522, 1026, 1029, 25718, 1013, 1008, 1008, 1008, 1062, 24335, 1008, 1008, 6105, 1008, 1008, 2023, 3120, 5371, 2003, 3395, 2000, 1996, 2047, 18667, 2094, 6105, 2008, 2003, 24378, 1008, 2007, 2023, 7427, 1999, 1996, 5371, 6105, 1012, 19067, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...
#ifndef PLAYER_H #define PLAYER_H #include "Vector2d.h" #include "Polygon.h" #include "CollisionController.h" #include <vector> const float WALKING_ACCEL = 0.5; const float MAX_WALKING_SPEED = 11.0; const float FALLING_ACCEL = 1.2; const float MAX_FALLING_SPEED = 20.0; const float JUMPING_SPEED = -25.0; class Player { public: Player(Vector2d, std::vector<Polygon*>*); ~Player(); void control(); void update(); void render(); bool verticalCollision(const Polygon&); bool horizontalCollision(const Polygon&); void setPosition(const Vector2d); void setVelocity(const Vector2d); void setAcceleration(const Vector2d); Vector2d getPosition() const; Vector2d getVelocity() const; Vector2d getAcceleration() const; private: bool grounded; bool groundedLatch; bool jumping; //Position and derivatives for movement. Vector2d m_Position; Vector2d m_Velocity; Vector2d m_Acceleration; //Polygon for rendering. Polygon m_Polygon; std::vector<Polygon*> *m_Platforms; void enableGrounded(); }; #endif
Slabity/Senior-Project-2013
src/Player.h
C
gpl-2.0
1,098
[ 30522, 1001, 2065, 13629, 2546, 2447, 1035, 1044, 1001, 9375, 2447, 1035, 1044, 1001, 2421, 1000, 9207, 2475, 2094, 1012, 1044, 1000, 1001, 2421, 1000, 26572, 7446, 1012, 1044, 1000, 1001, 2421, 1000, 12365, 8663, 13181, 10820, 1012, 1044, ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100...