text
stringlengths
2
1.04M
meta
dict
Changelog --------- Version 0.3.0 ````````````` *In development* - Added support for factory pattern usage. - Added :ref:`differences` section to the documentation. Version 0.2.1 ````````````` - Fixed bug in the HTML build of the documentation that could cause the title to overlap with the text in some cases. Version 0.2.0 ````````````` - Added support for globbing instead of regular expressions, using the `MAKESTATIC_FILEPATTERN_FORMAT` configuration variable. - Increased lowest supported version of Flask to 0.10, which is the first release supporting Python 3.x. Version 0.1.1 ````````````` - Fixed a typo in the documentation. Version 0.1.0 ````````````` Initial release.
{ "content_hash": "6b523a3d117da31f2185f97736bd0405", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 77, "avg_line_length": 20.529411764705884, "alnum_prop": 0.6790830945558739, "repo_name": "DasIch/Flask-MakeStatic", "id": "3cba1634062293af2987bd6f6ec9776b10b0f0e0", "size": "698", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CHANGELOG.rst", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "84" }, { "name": "Python", "bytes": "43714" }, { "name": "Shell", "bytes": "6721" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>raw_socket_service::connect</title> <link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.76.1"> <link rel="home" href="../../../boost_asio.html" title="Boost.Asio"> <link rel="up" href="../raw_socket_service.html" title="raw_socket_service"> <link rel="prev" href="close.html" title="raw_socket_service::close"> <link rel="next" href="construct.html" title="raw_socket_service::construct"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td> <td align="center"><a href="../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="close.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../raw_socket_service.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="construct.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_asio.reference.raw_socket_service.connect"></a><a class="link" href="connect.html" title="raw_socket_service::connect">raw_socket_service::connect</a> </h4></div></div></div> <p> <a class="indexterm" name="idp90980528"></a> Connect the raw socket to the specified endpoint. </p> <pre class="programlisting"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <span class="identifier">connect</span><span class="special">(</span> <span class="identifier">implementation_type</span> <span class="special">&amp;</span> <span class="identifier">impl</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">endpoint_type</span> <span class="special">&amp;</span> <span class="identifier">peer_endpoint</span><span class="special">,</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <span class="special">&amp;</span> <span class="identifier">ec</span><span class="special">);</span> </pre> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2015 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="close.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../raw_socket_service.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="construct.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "e16e457cbc7d3e981ae49c16ae419a1a", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 430, "avg_line_length": 74.9245283018868, "alnum_prop": 0.6426592797783933, "repo_name": "yinchunlong/abelkhan-1", "id": "da6b6ad00c9843d68317593cb3c54c1481931b52", "size": "3971", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "ext/c++/thirdpart/c++/boost/libs/asio/doc/html/boost_asio/reference/raw_socket_service/connect.html", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "118649" }, { "name": "Assembly", "bytes": "223360" }, { "name": "Batchfile", "bytes": "32410" }, { "name": "C", "bytes": "2956993" }, { "name": "C#", "bytes": "219949" }, { "name": "C++", "bytes": "184617089" }, { "name": "CMake", "bytes": "125437" }, { "name": "CSS", "bytes": "427629" }, { "name": "Cuda", "bytes": "52444" }, { "name": "DIGITAL Command Language", "bytes": "6246" }, { "name": "FORTRAN", "bytes": "1856" }, { "name": "Groff", "bytes": "5189" }, { "name": "HTML", "bytes": "234939732" }, { "name": "IDL", "bytes": "14" }, { "name": "JavaScript", "bytes": "682223" }, { "name": "Lex", "bytes": "1231" }, { "name": "M4", "bytes": "29689" }, { "name": "Makefile", "bytes": "1083341" }, { "name": "Max", "bytes": "36857" }, { "name": "Objective-C", "bytes": "11406" }, { "name": "Objective-C++", "bytes": "630" }, { "name": "PHP", "bytes": "59030" }, { "name": "Perl", "bytes": "38649" }, { "name": "Perl6", "bytes": "2053" }, { "name": "Python", "bytes": "1780184" }, { "name": "QML", "bytes": "593" }, { "name": "QMake", "bytes": "16692" }, { "name": "Rebol", "bytes": "354" }, { "name": "Ruby", "bytes": "5532" }, { "name": "Shell", "bytes": "354720" }, { "name": "Tcl", "bytes": "1172" }, { "name": "TeX", "bytes": "32117" }, { "name": "XSLT", "bytes": "552736" }, { "name": "Yacc", "bytes": "19623" } ], "symlink_target": "" }
package org.drools.testcoverage.regression; public class NumberRestriction { private Number value; public void setValue(Number number) { this.value = number; } public boolean isInt() { return value instanceof Integer; } public Number getValue() { return value; } public String getValueType() { return value.getClass().getName(); } }
{ "content_hash": "33fe8534763ae7afd3bbdce37b1412b5", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 43, "avg_line_length": 17.73913043478261, "alnum_prop": 0.6274509803921569, "repo_name": "lanceleverich/drools", "id": "9c8d3539951d93de37c5bb43412b94dba60ff8d1", "size": "1028", "binary": false, "copies": "15", "ref": "refs/heads/master", "path": "drools-test-coverage/test-suite/src/test/java/org/drools/testcoverage/regression/NumberRestriction.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "15980" }, { "name": "Batchfile", "bytes": "2554" }, { "name": "CSS", "bytes": "1412" }, { "name": "GAP", "bytes": "197299" }, { "name": "HTML", "bytes": "6163" }, { "name": "Java", "bytes": "35717579" }, { "name": "Python", "bytes": "4555" }, { "name": "Ruby", "bytes": "491" }, { "name": "Shell", "bytes": "1120" }, { "name": "XSLT", "bytes": "24302" } ], "symlink_target": "" }
/* eslint-disable */ 'use strict'; /*:: import type { ReaderFragment } from 'relay-runtime'; import type { FragmentReference } from "relay-runtime"; declare export opaque type RelayConcreteVariablesTest8Fragment$ref: FragmentReference; declare export opaque type RelayConcreteVariablesTest8Fragment$fragmentType: RelayConcreteVariablesTest8Fragment$ref; export type RelayConcreteVariablesTest8Fragment = {| +profilePicture: ?{| +uri: ?string, |}, +$refType: RelayConcreteVariablesTest8Fragment$ref, |}; export type RelayConcreteVariablesTest8Fragment$data = RelayConcreteVariablesTest8Fragment; export type RelayConcreteVariablesTest8Fragment$key = { +$data?: RelayConcreteVariablesTest8Fragment$data, +$fragmentRefs: RelayConcreteVariablesTest8Fragment$ref, ... }; */ var node/*: ReaderFragment*/ = { "argumentDefinitions": [ { "defaultValue": 42, "kind": "LocalArgument", "name": "size" } ], "kind": "Fragment", "metadata": null, "name": "RelayConcreteVariablesTest8Fragment", "selections": [ { "alias": null, "args": [ { "kind": "Variable", "name": "size", "variableName": "size" } ], "concreteType": "Image", "kind": "LinkedField", "name": "profilePicture", "plural": false, "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "uri", "storageKey": null } ], "storageKey": null } ], "type": "User", "abstractKey": null }; if (__DEV__) { (node/*: any*/).hash = "5ff9a88277cf3070772076b880c3b3c6"; } module.exports = node;
{ "content_hash": "2e73fb302315da56db5a6c03d430a297", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 117, "avg_line_length": 23.929577464788732, "alnum_prop": 0.6215420835785757, "repo_name": "yungsters/relay", "id": "c4d17aa5c21b1a7b05b39e23417626d158979fc3", "size": "1999", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "packages/relay-runtime/store/__tests__/__generated__/RelayConcreteVariablesTest8Fragment.graphql.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "21802" }, { "name": "HTML", "bytes": "308" }, { "name": "JavaScript", "bytes": "2070265" }, { "name": "Shell", "bytes": "396" } ], "symlink_target": "" }
// Package install installs the experimental API group, making it available as // an option to all of the API encoding/decoding machinery. package install import ( "k8s.io/apimachinery/pkg/apimachinery/announced" "k8s.io/apimachinery/pkg/apimachinery/registered" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/apis/authentication" "k8s.io/kubernetes/pkg/apis/authentication/v1beta1" ) func init() { Install(api.Registry, api.Scheme) } // Install registers the API group and adds types to a scheme func Install(registry *registered.APIRegistrationManager, scheme *runtime.Scheme) { if err := announced.NewGroupMetaFactory( &announced.GroupMetaFactoryArgs{ GroupName: authentication.GroupName, VersionPreferenceOrder: []string{v1beta1.SchemeGroupVersion.Version}, ImportPrefix: "k8s.io/kubernetes/pkg/apis/authentication", RootScopedKinds: sets.NewString("TokenReview"), AddInternalObjectsToScheme: authentication.AddToScheme, }, announced.VersionToSchemeFunc{ v1beta1.SchemeGroupVersion.Version: v1beta1.AddToScheme, }, ).Announce().RegisterAndEnable(registry, scheme); err != nil { panic(err) } }
{ "content_hash": "8faef82aa7ac4df8a7652df05596eb48", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 83, "avg_line_length": 34.08108108108108, "alnum_prop": 0.7486122125297383, "repo_name": "kzwang/kubernetes", "id": "14e4cf902f28ff5aa9f928ceaed656be81d58fc5", "size": "1830", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "pkg/apis/authentication/install/install.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2539" }, { "name": "Go", "bytes": "41032838" }, { "name": "HTML", "bytes": "2592459" }, { "name": "Makefile", "bytes": "76634" }, { "name": "Nginx", "bytes": "1608" }, { "name": "Protocol Buffer", "bytes": "593304" }, { "name": "Python", "bytes": "1323642" }, { "name": "SaltStack", "bytes": "55371" }, { "name": "Shell", "bytes": "1664037" } ], "symlink_target": "" }
<?php namespace SebastianBergmann\Comparator; /** * Compares arrays for equality. */ class ArrayComparator extends Comparator { /** * Returns whether the comparator can compare two values. * * @param mixed $expected The first value to compare * @param mixed $actual The second value to compare * @return bool */ public function accepts($expected, $actual) { return is_array($expected) && is_array($actual); } /** * Asserts that two values are equal. * * @param mixed $expected The first value to compare * @param mixed $actual The second value to compare * @param float $delta The allowed numerical distance between two values to * consider them equal * @param bool $canonicalize If set to TRUE, arrays are sorted before * comparison * @param bool $ignoreCase If set to TRUE, upper- and lowercasing is * ignored when comparing string values * @param array $processed * @throws ComparisonFailure Thrown when the comparison * fails. Contains information about the * specific alertas that lead to the failure. */ public function assertEquals($expected, $actual, $delta = 0.0, $canonicalize = false, $ignoreCase = false, array &$processed = array()) { if ($canonicalize) { sort($expected); sort($actual); } $remaining = $actual; $expString = $actString = "Array (\n"; $equal = true; foreach ($expected as $key => $value) { unset($remaining[$key]); if (!array_key_exists($key, $actual)) { $expString .= sprintf( " %s => %s\n", $this->exporter->export($key), $this->exporter->shortenedExport($value) ); $equal = false; continue; } try { $comparator = $this->factory->getComparatorFor($value, $actual[$key]); $comparator->assertEquals($value, $actual[$key], $delta, $canonicalize, $ignoreCase, $processed); $expString .= sprintf( " %s => %s\n", $this->exporter->export($key), $this->exporter->shortenedExport($value) ); $actString .= sprintf( " %s => %s\n", $this->exporter->export($key), $this->exporter->shortenedExport($actual[$key]) ); } catch (ComparisonFailure $e) { $expString .= sprintf( " %s => %s\n", $this->exporter->export($key), $e->getExpectedAsString() ? $this->indent($e->getExpectedAsString()) : $this->exporter->shortenedExport($e->getExpected()) ); $actString .= sprintf( " %s => %s\n", $this->exporter->export($key), $e->getActualAsString() ? $this->indent($e->getActualAsString()) : $this->exporter->shortenedExport($e->getActual()) ); $equal = false; } } foreach ($remaining as $key => $value) { $actString .= sprintf( " %s => %s\n", $this->exporter->export($key), $this->exporter->shortenedExport($value) ); $equal = false; } $expString .= ')'; $actString .= ')'; if (!$equal) { throw new ComparisonFailure( $expected, $actual, $expString, $actString, false, 'Failed asserting that two arrays are equal.' ); } } protected function indent($lines) { return trim(str_replace("\n", "\n ", $lines)); } }
{ "content_hash": "821071698b2fea1d68e672384054134c", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 139, "avg_line_length": 33.72093023255814, "alnum_prop": 0.44850574712643676, "repo_name": "fran-bravo/social-cocktail", "id": "cc6b1e79a0d9b42557b213e54a53362d173981d1", "size": "4586", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/sebastian/comparator/src/ArrayComparator.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "553" }, { "name": "CSS", "bytes": "2259" }, { "name": "HTML", "bytes": "2067424" }, { "name": "JavaScript", "bytes": "2953688" }, { "name": "PHP", "bytes": "201098" } ], "symlink_target": "" }
import argparse from ..basic.runner import PygameRunner __author__ = 'fyabc' class AntSpider(PygameRunner): def main_loop(self): pass def draw(self): pass def draw_background(self): pass def real_main(options): pass def build_parser(): parser = argparse.ArgumentParser(prog='antspider', description='A simple game of ants and spiders.') return parser def main(): parser = build_parser() options = parser.parse_args() real_main(options)
{ "content_hash": "e8c7877d0b81ec0ad3517087ff85125f", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 104, "avg_line_length": 15.029411764705882, "alnum_prop": 0.6477495107632094, "repo_name": "fyabc/MiniGames", "id": "fed9779e156cc93bd0c6a93bbde5c2e13a7b8d43", "size": "555", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GamePack/GamePack/AntSpider/antspider.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "821180" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "653287053a7fb76b88b79588515d98ab", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "0a73fe8fcb28ddacaa211e4f6b7e7b190007ef71", "size": "203", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Schizochilus/Schizochilus cecilii/Schizochilus cecilii transvaalensis/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle" > <corners android:radius="2dp" /> <solid android:color="@color/colorPrimary" /> </shape>
{ "content_hash": "3a94a893d736b74d027858f695c3be0e", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 65, "avg_line_length": 39.5, "alnum_prop": 0.6666666666666666, "repo_name": "ZhangZhenghao/Material-WeCenter", "id": "5f685aa93ec16b8ff1bec1b0784a4ae02dc2517e", "size": "237", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "app/src/main/res/drawable/stroker.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "26660" }, { "name": "Java", "bytes": "231397" } ], "symlink_target": "" }
IMPLEMENT_CONOBJECT(BehaviorInstance); //----------------------------------------------------------------------------- BehaviorInstance::BehaviorInstance( BehaviorTemplate* pTemplate ) : mTemplate( pTemplate ), mBehaviorOwner( NULL ), mBehaviorId( 0 ) { if ( pTemplate != NULL ) { // Fetch field prototype count. const U32 fieldCount = pTemplate->getBehaviorFieldCount(); // Set field prototypes. for( U32 index = 0; index < fieldCount; ++index ) { // Fetch fields. BehaviorTemplate::BehaviorField* pField = pTemplate->getBehaviorField( index ); // Set cloned field. setDataField( pField->mName, NULL, pField->mDefaultValue ); } } } //----------------------------------------------------------------------------- bool BehaviorInstance::onAdd() { if(! Parent::onAdd()) return false; // Store this object's namespace mNameSpace = Namespace::global()->find( getTemplateName() ); return true; } //----------------------------------------------------------------------------- void BehaviorInstance::onRemove() { Parent::onRemove(); } //----------------------------------------------------------------------------- void BehaviorInstance::initPersistFields() { addGroup("Behavior"); addField("template", TypeSimObjectName, Offset(mTemplate, BehaviorInstance), "Template this instance was created from."); addProtectedField( "Owner", TypeSimObjectPtr, Offset(mBehaviorOwner, BehaviorInstance), &setOwner, &defaultProtectedGetFn, "Behavior component owner." ); endGroup("Behavior"); Parent::initPersistFields(); } //----------------------------------------------------------------------------- const char* BehaviorInstance::getTemplateName( void ) { return mTemplate ? mTemplate->getName() : NULL; } // Get template. const char* BehaviorInstance::getTemplate(void* obj, const char* data) { return static_cast<BehaviorInstance*>(obj)->getTemplate()->getIdString(); }
{ "content_hash": "9555e3b7721097769542911b97a863cb", "timestamp": "", "source": "github", "line_count": 70, "max_line_length": 159, "avg_line_length": 29.3, "alnum_prop": 0.5363237445148707, "repo_name": "ktotheoz/Torque6", "id": "8eafc12cadb521e132941b5c5bf1816b47f623ad", "size": "3661", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/component/behaviors/behaviorInstance.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "29795" }, { "name": "Awk", "bytes": "3962" }, { "name": "Batchfile", "bytes": "428" }, { "name": "C", "bytes": "14722722" }, { "name": "C#", "bytes": "97913" }, { "name": "C++", "bytes": "23581326" }, { "name": "CMake", "bytes": "60458" }, { "name": "Groff", "bytes": "30763" }, { "name": "HTML", "bytes": "1117410" }, { "name": "Java", "bytes": "354" }, { "name": "JavaScript", "bytes": "2047" }, { "name": "Lex", "bytes": "15284" }, { "name": "Logos", "bytes": "577807" }, { "name": "Lua", "bytes": "28195" }, { "name": "Makefile", "bytes": "865213" }, { "name": "Mathematica", "bytes": "8776" }, { "name": "Module Management System", "bytes": "13253" }, { "name": "Objective-C", "bytes": "1765091" }, { "name": "Objective-C++", "bytes": "775131" }, { "name": "Perl", "bytes": "69" }, { "name": "Python", "bytes": "355429" }, { "name": "SAS", "bytes": "13756" }, { "name": "Scala", "bytes": "551" }, { "name": "Shell", "bytes": "1262449" }, { "name": "Smalltalk", "bytes": "1353" }, { "name": "SuperCollider", "bytes": "69054" }, { "name": "Yacc", "bytes": "15489" } ], "symlink_target": "" }
SecurityAssistant =================
{ "content_hash": "a5566e3d285576b688b7d9f3d90ed3fb", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 17, "avg_line_length": 18, "alnum_prop": 0.4722222222222222, "repo_name": "suphy2009/SecurityAssistant", "id": "be1567d62dc8a6cecf4c79190c930270f4c85361", "size": "36", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
""" sqldiff.py - Prints the (approximated) difference between models and database TODO: - better support for relations - better support for constraints (mainly postgresql?) - support for table spaces with postgresql - when a table is not managed (meta.managed==False) then only do a one-way sqldiff ? show differences from db->table but not the other way around since it's not managed. KNOWN ISSUES: - MySQL has by far the most problems with introspection. Please be carefull when using MySQL with sqldiff. - Booleans are reported back as Integers, so there's know way to know if there was a real change. - Varchar sizes are reported back without unicode support so their size may change in comparison to the real length of the varchar. - Some of the 'fixes' to counter these problems might create false positives or false negatives. """ from django.core.management.base import BaseCommand from django.core.management import sql as _sql from django.core.management import CommandError from django.core.management.color import no_style from django.db import transaction, connection from django.db.models.fields import IntegerField from optparse import make_option ORDERING_FIELD = IntegerField('_order', null=True) def flatten(l, ltypes=(list, tuple)): ltype = type(l) l = list(l) i = 0 while i < len(l): while isinstance(l[i], ltypes): if not l[i]: l.pop(i) i -= 1 break else: l[i:i + 1] = l[i] i += 1 return ltype(l) def all_local_fields(meta): all_fields = [] if meta.managed: if meta.proxy: for parent in meta.parents: all_fields.extend(all_local_fields(parent._meta)) else: for f in meta.local_fields: col_type = f.db_type(connection=connection) if col_type is None: continue all_fields.append(f) return all_fields class SQLDiff(object): DATA_TYPES_REVERSE_OVERRIDE = {} DIFF_TYPES = [ 'error', 'comment', 'table-missing-in-db', 'field-missing-in-db', 'field-missing-in-model', 'fkey-missing-in-db', 'fkey-missing-in-model', 'index-missing-in-db', 'index-missing-in-model', 'unique-missing-in-db', 'unique-missing-in-model', 'field-type-differ', 'field-parameter-differ', 'notnull-differ', ] DIFF_TEXTS = { 'error': 'error: %(0)s', 'comment': 'comment: %(0)s', 'table-missing-in-db': "table '%(0)s' missing in database", 'field-missing-in-db': "field '%(1)s' defined in model but missing in database", 'field-missing-in-model': "field '%(1)s' defined in database but missing in model", 'fkey-missing-in-db': "field '%(1)s' FOREIGN KEY defined in model but missing in database", 'fkey-missing-in-model': "field '%(1)s' FOREIGN KEY defined in database but missing in model", 'index-missing-in-db': "field '%(1)s' INDEX defined in model but missing in database", 'index-missing-in-model': "field '%(1)s' INDEX defined in database schema but missing in model", 'unique-missing-in-db': "field '%(1)s' UNIQUE defined in model but missing in database", 'unique-missing-in-model': "field '%(1)s' UNIQUE defined in database schema but missing in model", 'field-type-differ': "field '%(1)s' not of same type: db='%(3)s', model='%(2)s'", 'field-parameter-differ': "field '%(1)s' parameters differ: db='%(3)s', model='%(2)s'", 'notnull-differ': "field '%(1)s' null differ: db='%(3)s', model='%(2)s'", } SQL_FIELD_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD COLUMN'), style.SQL_FIELD(qn(args[1])), ' '.join(style.SQL_COLTYPE(a) if i == 0 else style.SQL_KEYWORD(a) for i, a in enumerate(args[2:]))) SQL_FIELD_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP COLUMN'), style.SQL_FIELD(qn(args[1]))) SQL_FKEY_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s %s (%s)%s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD COLUMN'), style.SQL_FIELD(qn(args[1])), ' '.join(style.SQL_COLTYPE(a) if i == 0 else style.SQL_KEYWORD(a) for i, a in enumerate(args[4:])), style.SQL_KEYWORD('REFERENCES'), style.SQL_TABLE(qn(args[2])), style.SQL_FIELD(qn(args[3])), connection.ops.deferrable_sql()) SQL_INDEX_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s%s);" % (style.SQL_KEYWORD('CREATE INDEX'), style.SQL_TABLE(qn("%s" % '_'.join(a for a in args[0:3] if a))), style.SQL_KEYWORD('ON'), style.SQL_TABLE(qn(args[0])), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD(args[3])) # FIXME: need to lookup index name instead of just appending _idx to table + fieldname SQL_INDEX_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s;" % (style.SQL_KEYWORD('DROP INDEX'), style.SQL_TABLE(qn("%s" % '_'.join(a for a in args[0:3] if a)))) SQL_UNIQUE_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD COLUMN'), style.SQL_KEYWORD('UNIQUE'), style.SQL_FIELD(qn(args[1]))) # FIXME: need to lookup unique constraint name instead of appending _key to table + fieldname SQL_UNIQUE_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_TABLE(qn("%s_key" % ('_'.join(args[:2]))))) SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2])) SQL_FIELD_PARAMETER_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2])) SQL_NOTNULL_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('MODIFY'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD(args[2]), style.SQL_KEYWORD('NOT NULL')) SQL_ERROR = lambda self, style, qn, args: style.NOTICE('-- Error: %s' % style.ERROR(args[0])) SQL_COMMENT = lambda self, style, qn, args: style.NOTICE('-- Comment: %s' % style.SQL_TABLE(args[0])) SQL_TABLE_MISSING_IN_DB = lambda self, style, qn, args: style.NOTICE('-- Table missing: %s' % args[0]) can_detect_notnull_differ = False def __init__(self, app_models, options): self.app_models = app_models self.options = options self.dense = options.get('dense_output', False) try: self.introspection = connection.introspection except AttributeError: from django.db import get_introspection_module self.introspection = get_introspection_module() self.cursor = connection.cursor() self.django_tables = self.get_django_tables(options.get('only_existing', True)) self.db_tables = self.introspection.get_table_list(self.cursor) self.differences = [] self.unknown_db_fields = {} self.new_db_fields = set() self.null = {} self.DIFF_SQL = { 'error': self.SQL_ERROR, 'comment': self.SQL_COMMENT, 'table-missing-in-db': self.SQL_TABLE_MISSING_IN_DB, 'field-missing-in-db': self.SQL_FIELD_MISSING_IN_DB, 'field-missing-in-model': self.SQL_FIELD_MISSING_IN_MODEL, 'fkey-missing-in-db': self.SQL_FKEY_MISSING_IN_DB, 'fkey-missing-in-model': self.SQL_FIELD_MISSING_IN_MODEL, 'index-missing-in-db': self.SQL_INDEX_MISSING_IN_DB, 'index-missing-in-model': self.SQL_INDEX_MISSING_IN_MODEL, 'unique-missing-in-db': self.SQL_UNIQUE_MISSING_IN_DB, 'unique-missing-in-model': self.SQL_UNIQUE_MISSING_IN_MODEL, 'field-type-differ': self.SQL_FIELD_TYPE_DIFFER, 'field-parameter-differ': self.SQL_FIELD_PARAMETER_DIFFER, 'notnull-differ': self.SQL_NOTNULL_DIFFER, } if self.can_detect_notnull_differ: self.load_null() def load_null(self): raise NotImplementedError("load_null functions must be implemented if diff backend has 'can_detect_notnull_differ' set to True") def add_app_model_marker(self, app_label, model_name): self.differences.append((app_label, model_name, [])) def add_difference(self, diff_type, *args): assert diff_type in self.DIFF_TYPES, 'Unknown difference type' self.differences[-1][-1].append((diff_type, args)) def get_django_tables(self, only_existing): try: django_tables = self.introspection.django_table_names(only_existing=only_existing) except AttributeError: # backwards compatibility for before introspection refactoring (r8296) try: django_tables = _sql.django_table_names(only_existing=only_existing) except AttributeError: # backwards compatibility for before svn r7568 django_tables = _sql.django_table_list(only_existing=only_existing) return django_tables def sql_to_dict(self, query, param): """ sql_to_dict(query, param) -> list of dicts code from snippet at http://www.djangosnippets.org/snippets/1383/ """ cursor = connection.cursor() cursor.execute(query, param) fieldnames = [name[0] for name in cursor.description] result = [] for row in cursor.fetchall(): rowset = [] for field in zip(fieldnames, row): rowset.append(field) result.append(dict(rowset)) return result def get_field_model_type(self, field): return field.db_type(connection=connection) def get_field_db_type(self, description, field=None, table_name=None): from django.db import models # DB-API cursor.description #(name, type_code, display_size, internal_size, precision, scale, null_ok) = description type_code = description[1] if type_code in self.DATA_TYPES_REVERSE_OVERRIDE: reverse_type = self.DATA_TYPES_REVERSE_OVERRIDE[type_code] else: try: try: reverse_type = self.introspection.data_types_reverse[type_code] except AttributeError: # backwards compatibility for before introspection refactoring (r8296) reverse_type = self.introspection.DATA_TYPES_REVERSE.get(type_code) except KeyError: reverse_type = self.get_field_db_type_lookup(type_code) if not reverse_type: # type_code not found in data_types_reverse map key = (self.differences[-1][:2], description[:2]) if key not in self.unknown_db_fields: self.unknown_db_fields[key] = 1 self.add_difference('comment', "Unknown database type for field '%s' (%s)" % (description[0], type_code)) return None kwargs = {} if isinstance(reverse_type, tuple): kwargs.update(reverse_type[1]) reverse_type = reverse_type[0] if reverse_type == "CharField" and description[3]: kwargs['max_length'] = description[3] if reverse_type == "DecimalField": kwargs['max_digits'] = description[4] kwargs['decimal_places'] = description[5] and abs(description[5]) or description[5] if description[6]: kwargs['blank'] = True if not reverse_type in ('TextField', 'CharField'): kwargs['null'] = True if '.' in reverse_type: from django.utils import importlib # TODO: when was importlib added to django.utils ? and do we # need to add backwards compatibility code ? module_path, package_name = reverse_type.rsplit('.', 1) module = importlib.import_module(module_path) field_db_type = getattr(module, package_name)(**kwargs).db_type(connection=connection) else: field_db_type = getattr(models, reverse_type)(**kwargs).db_type(connection=connection) return field_db_type def get_field_db_type_lookup(self, type_code): return None def get_field_db_nullable(self, field, table_name): tablespace = field.db_tablespace if tablespace == "": tablespace = "public" return self.null.get((tablespace, table_name, field.attname), 'fixme') def strip_parameters(self, field_type): if field_type and field_type != 'double precision': return field_type.split(" ")[0].split("(")[0].lower() return field_type def find_unique_missing_in_db(self, meta, table_indexes, table_name): for field in all_local_fields(meta): if field.unique: attname = field.db_column or field.attname if attname in table_indexes and table_indexes[attname]['unique']: continue self.add_difference('unique-missing-in-db', table_name, attname) def find_unique_missing_in_model(self, meta, table_indexes, table_name): # TODO: Postgresql does not list unique_togethers in table_indexes # MySQL does fields = dict([(field.db_column or field.name, field.unique) for field in all_local_fields(meta)]) for att_name, att_opts in table_indexes.iteritems(): if att_opts['unique'] and att_name in fields and not fields[att_name]: if att_name in flatten(meta.unique_together): continue self.add_difference('unique-missing-in-model', table_name, att_name) def find_index_missing_in_db(self, meta, table_indexes, table_name): for field in all_local_fields(meta): if field.db_index: attname = field.db_column or field.attname if not attname in table_indexes: self.add_difference('index-missing-in-db', table_name, attname, '', '') db_type = field.db_type(connection=connection) if db_type.startswith('varchar'): self.add_difference('index-missing-in-db', table_name, attname, 'like', ' varchar_pattern_ops') if db_type.startswith('text'): self.add_difference('index-missing-in-db', table_name, attname, 'like', ' text_pattern_ops') def find_index_missing_in_model(self, meta, table_indexes, table_name): fields = dict([(field.name, field) for field in all_local_fields(meta)]) for att_name, att_opts in table_indexes.iteritems(): if att_name in fields: field = fields[att_name] if field.db_index: continue if att_opts['primary_key'] and field.primary_key: continue if att_opts['unique'] and field.unique: continue if att_opts['unique'] and att_name in flatten(meta.unique_together): continue self.add_difference('index-missing-in-model', table_name, att_name) db_type = field.db_type(connection=connection) if db_type.startswith('varchar') or db_type.startswith('text'): self.add_difference('index-missing-in-model', table_name, att_name, 'like') def find_field_missing_in_model(self, fieldmap, table_description, table_name): for row in table_description: if row[0] not in fieldmap: self.add_difference('field-missing-in-model', table_name, row[0]) def find_field_missing_in_db(self, fieldmap, table_description, table_name): db_fields = [row[0] for row in table_description] for field_name, field in fieldmap.iteritems(): if field_name not in db_fields: field_output = [] if field.rel: field_output.extend([field.rel.to._meta.db_table, field.rel.to._meta.get_field(field.rel.field_name).column]) op = 'fkey-missing-in-db' else: op = 'field-missing-in-db' field_output.append(field.db_type(connection=connection)) if not field.null: field_output.append('NOT NULL') self.add_difference(op, table_name, field_name, *field_output) self.new_db_fields.add((table_name, field_name)) def find_field_type_differ(self, meta, table_description, table_name, func=None): db_fields = dict([(row[0], row) for row in table_description]) for field in all_local_fields(meta): if field.name not in db_fields: continue description = db_fields[field.name] model_type = self.get_field_model_type(field) db_type = self.get_field_db_type(description, field) # use callback function if defined if func: model_type, db_type = func(field, description, model_type, db_type) if not self.strip_parameters(db_type) == self.strip_parameters(model_type): self.add_difference('field-type-differ', table_name, field.name, model_type, db_type) def find_field_parameter_differ(self, meta, table_description, table_name, func=None): db_fields = dict([(row[0], row) for row in table_description]) for field in all_local_fields(meta): if field.name not in db_fields: continue description = db_fields[field.name] model_type = self.get_field_model_type(field) db_type = self.get_field_db_type(description, field, table_name) if not self.strip_parameters(model_type) == self.strip_parameters(db_type): continue # use callback function if defined if func: model_type, db_type = func(field, description, model_type, db_type) if not model_type == db_type: self.add_difference('field-parameter-differ', table_name, field.name, model_type, db_type) def find_field_notnull_differ(self, meta, table_description, table_name): if not self.can_detect_notnull_differ: return for field in all_local_fields(meta): if (table_name, field.attname) in self.new_db_fields: continue null = self.get_field_db_nullable(field, table_name) if field.null != null: action = field.null and 'DROP' or 'SET' self.add_difference('notnull-differ', table_name, field.attname, action) @transaction.commit_manually def find_differences(self): cur_app_label = None for app_model in self.app_models: meta = app_model._meta table_name = meta.db_table app_label = meta.app_label if cur_app_label != app_label: # Marker indicating start of difference scan for this table_name self.add_app_model_marker(app_label, app_model.__name__) #if not table_name in self.django_tables: if not table_name in self.db_tables: # Table is missing from database self.add_difference('table-missing-in-db', table_name) continue table_indexes = self.introspection.get_indexes(self.cursor, table_name) fieldmap = dict([(field.db_column or field.get_attname(), field) for field in all_local_fields(meta)]) # add ordering field if model uses order_with_respect_to if meta.order_with_respect_to: fieldmap['_order'] = ORDERING_FIELD try: table_description = self.introspection.get_table_description(self.cursor, table_name) except Exception as e: self.add_difference('error', 'unable to introspect table: %s' % str(e).strip()) transaction.rollback() # reset transaction continue else: transaction.commit() # Fields which are defined in database but not in model # 1) find: 'unique-missing-in-model' self.find_unique_missing_in_model(meta, table_indexes, table_name) # 2) find: 'index-missing-in-model' self.find_index_missing_in_model(meta, table_indexes, table_name) # 3) find: 'field-missing-in-model' self.find_field_missing_in_model(fieldmap, table_description, table_name) # Fields which are defined in models but not in database # 4) find: 'field-missing-in-db' self.find_field_missing_in_db(fieldmap, table_description, table_name) # 5) find: 'unique-missing-in-db' self.find_unique_missing_in_db(meta, table_indexes, table_name) # 6) find: 'index-missing-in-db' self.find_index_missing_in_db(meta, table_indexes, table_name) # Fields which have a different type or parameters # 7) find: 'type-differs' self.find_field_type_differ(meta, table_description, table_name) # 8) find: 'type-parameter-differs' self.find_field_parameter_differ(meta, table_description, table_name) # 9) find: 'field-notnull' self.find_field_notnull_differ(meta, table_description, table_name) def print_diff(self, style=no_style()): """ print differences to stdout """ if self.options.get('sql', True): self.print_diff_sql(style) else: self.print_diff_text(style) def print_diff_text(self, style): if not self.can_detect_notnull_differ: print(style.NOTICE("# Detecting notnull changes not implemented for this database backend")) print("") cur_app_label = None for app_label, model_name, diffs in self.differences: if not diffs: continue if not self.dense and cur_app_label != app_label: print("%s %s" % (style.NOTICE("+ Application:"), style.SQL_TABLE(app_label))) cur_app_label = app_label if not self.dense: print("%s %s" % (style.NOTICE("|-+ Differences for model:"), style.SQL_TABLE(model_name))) for diff in diffs: diff_type, diff_args = diff text = self.DIFF_TEXTS[diff_type] % dict((str(i), style.SQL_TABLE(e)) for i, e in enumerate(diff_args)) text = "'".join(i % 2 == 0 and style.ERROR(e) or e for i, e in enumerate(text.split("'"))) if not self.dense: print("%s %s" % (style.NOTICE("|--+"), text)) else: print("%s %s %s %s %s" % (style.NOTICE("App"), style.SQL_TABLE(app_label), style.NOTICE('Model'), style.SQL_TABLE(model_name), text)) def print_diff_sql(self, style): if not self.can_detect_notnull_differ: print(style.NOTICE("-- Detecting notnull changes not implemented for this database backend")) print("") cur_app_label = None qn = connection.ops.quote_name has_differences = max([len(diffs) for app_label, model_name, diffs in self.differences]) if not has_differences: if not self.dense: print(style.SQL_KEYWORD("-- No differences")) else: print(style.SQL_KEYWORD("BEGIN;")) for app_label, model_name, diffs in self.differences: if not diffs: continue if not self.dense and cur_app_label != app_label: print(style.NOTICE("-- Application: %s" % style.SQL_TABLE(app_label))) cur_app_label = app_label if not self.dense: print(style.NOTICE("-- Model: %s" % style.SQL_TABLE(model_name))) for diff in diffs: diff_type, diff_args = diff text = self.DIFF_SQL[diff_type](style, qn, diff_args) if self.dense: text = text.replace("\n\t", " ") print(text) print(style.SQL_KEYWORD("COMMIT;")) class GenericSQLDiff(SQLDiff): can_detect_notnull_differ = False class MySQLDiff(SQLDiff): can_detect_notnull_differ = False # All the MySQL hacks together create something of a problem # Fixing one bug in MySQL creates another issue. So just keep in mind # that this is way unreliable for MySQL atm. def get_field_db_type(self, description, field=None, table_name=None): from MySQLdb.constants import FIELD_TYPE # weird bug? in mysql db-api where it returns three times the correct value for field length # if i remember correctly it had something todo with unicode strings # TODO: Fix this is a more meaningful and better understood manner description = list(description) if description[1] not in [FIELD_TYPE.TINY, FIELD_TYPE.SHORT]: # exclude tinyints from conversion. description[3] = description[3] / 3 description[4] = description[4] / 3 db_type = super(MySQLDiff, self).get_field_db_type(description) if not db_type: return if field: if field.primary_key and (db_type == 'integer' or db_type == 'bigint'): db_type += ' AUTO_INCREMENT' # MySQL isn't really sure about char's and varchar's like sqlite field_type = self.get_field_model_type(field) # Fix char/varchar inconsistencies if self.strip_parameters(field_type) == 'char' and self.strip_parameters(db_type) == 'varchar': db_type = db_type.lstrip("var") # They like to call 'bool's 'tinyint(1)' and introspection makes that a integer # just convert it back to it's proper type, a bool is a bool and nothing else. if db_type == 'integer' and description[1] == FIELD_TYPE.TINY and description[4] == 1: db_type = 'bool' if db_type == 'integer' and description[1] == FIELD_TYPE.SHORT: db_type = 'smallint UNSIGNED' # FIXME: what about if it's not UNSIGNED ? return db_type class SqliteSQLDiff(SQLDiff): can_detect_notnull_differ = True def load_null(self): for table_name in self.db_tables: # sqlite does not support tablespaces tablespace = "public" # index, column_name, column_type, nullable, default_value # see: http://www.sqlite.org/pragma.html#pragma_table_info for table_info in self.sql_to_dict("PRAGMA table_info(%s);" % table_name, []): key = (tablespace, table_name, table_info['name']) self.null[key] = not table_info['notnull'] # Unique does not seem to be implied on Sqlite for Primary_key's # if this is more generic among databases this might be usefull # to add to the superclass's find_unique_missing_in_db method def find_unique_missing_in_db(self, meta, table_indexes, table_name): for field in all_local_fields(meta): if field.unique: attname = field.db_column or field.attname if attname in table_indexes and table_indexes[attname]['unique']: continue if attname in table_indexes and table_indexes[attname]['primary_key']: continue self.add_difference('unique-missing-in-db', table_name, attname) # Finding Indexes by using the get_indexes dictionary doesn't seem to work # for sqlite. def find_index_missing_in_db(self, meta, table_indexes, table_name): pass def find_index_missing_in_model(self, meta, table_indexes, table_name): pass def get_field_db_type(self, description, field=None, table_name=None): db_type = super(SqliteSQLDiff, self).get_field_db_type(description) if not db_type: return if field: field_type = self.get_field_model_type(field) # Fix char/varchar inconsistencies if self.strip_parameters(field_type) == 'char' and self.strip_parameters(db_type) == 'varchar': db_type = db_type.lstrip("var") return db_type class PostgresqlSQLDiff(SQLDiff): can_detect_notnull_differ = True DATA_TYPES_REVERSE_OVERRIDE = { 1042: 'CharField', # postgis types (TODO: support is very incomplete) 17506: 'django.contrib.gis.db.models.fields.PointField', 55902: 'django.contrib.gis.db.models.fields.MultiPolygonField', } DATA_TYPES_REVERSE_NAME = { 'hstore': 'django_hstore.hstore.DictionaryField', } # Hopefully in the future we can add constraint checking and other more # advanced checks based on this database. SQL_LOAD_CONSTRAINTS = """ SELECT nspname, relname, conname, attname, pg_get_constraintdef(pg_constraint.oid) FROM pg_constraint INNER JOIN pg_attribute ON pg_constraint.conrelid = pg_attribute.attrelid AND pg_attribute.attnum = any(pg_constraint.conkey) INNER JOIN pg_class ON conrelid=pg_class.oid INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace ORDER BY CASE WHEN contype='f' THEN 0 ELSE 1 END,contype,nspname,relname,conname; """ SQL_LOAD_NULL = """ SELECT nspname, relname, attname, attnotnull FROM pg_attribute INNER JOIN pg_class ON attrelid=pg_class.oid INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace; """ SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD("TYPE"), style.SQL_COLTYPE(args[2])) SQL_FIELD_PARAMETER_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD("TYPE"), style.SQL_COLTYPE(args[2])) SQL_NOTNULL_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER COLUMN'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD(args[2]), style.SQL_KEYWORD('NOT NULL')) def __init__(self, app_models, options): SQLDiff.__init__(self, app_models, options) self.check_constraints = {} self.load_constraints() def load_null(self): for dct in self.sql_to_dict(self.SQL_LOAD_NULL, []): key = (dct['nspname'], dct['relname'], dct['attname']) self.null[key] = not dct['attnotnull'] def load_constraints(self): for dct in self.sql_to_dict(self.SQL_LOAD_CONSTRAINTS, []): key = (dct['nspname'], dct['relname'], dct['attname']) if 'CHECK' in dct['pg_get_constraintdef']: self.check_constraints[key] = dct def get_field_db_type(self, description, field=None, table_name=None): db_type = super(PostgresqlSQLDiff, self).get_field_db_type(description) if not db_type: return if field: if field.primary_key: if db_type == 'integer': db_type = 'serial' elif db_type == 'bigint': db_type = 'bigserial' if table_name: tablespace = field.db_tablespace if tablespace == "": tablespace = "public" check_constraint = self.check_constraints.get((tablespace, table_name, field.attname), {}).get('pg_get_constraintdef', None) if check_constraint: check_constraint = check_constraint.replace("((", "(") check_constraint = check_constraint.replace("))", ")") check_constraint = '("'.join([')' in e and '" '.join(p.strip('"') for p in e.split(" ", 1)) or e for e in check_constraint.split("(")]) # TODO: might be more then one constraint in definition ? db_type += ' ' + check_constraint return db_type @transaction.autocommit def get_field_db_type_lookup(self, type_code): try: name = self.sql_to_dict("SELECT typname FROM pg_type WHERE typelem=%s;", [type_code])[0]['typname'] return self.DATA_TYPES_REVERSE_NAME.get(name.strip('_')) except (IndexError, KeyError): pass """ def find_field_type_differ(self, meta, table_description, table_name): def callback(field, description, model_type, db_type): if field.primary_key and db_type=='integer': db_type = 'serial' return model_type, db_type super(PostgresqlSQLDiff, self).find_field_type_differ(meta, table_description, table_name, callback) """ DATABASE_SQLDIFF_CLASSES = { 'postgis': PostgresqlSQLDiff, 'postgresql_psycopg2': PostgresqlSQLDiff, 'postgresql': PostgresqlSQLDiff, 'mysql': MySQLDiff, 'sqlite3': SqliteSQLDiff, 'oracle': GenericSQLDiff } class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--all-applications', '-a', action='store_true', dest='all_applications', help="Automaticly include all application from INSTALLED_APPS."), make_option('--not-only-existing', '-e', action='store_false', dest='only_existing', help="Check all tables that exist in the database, not only tables that should exist based on models."), make_option('--dense-output', '-d', action='store_true', dest='dense_output', help="Shows the output in dense format, normally output is spreaded over multiple lines."), make_option('--output_text', '-t', action='store_false', dest='sql', default=True, help="Outputs the differences as descriptive text instead of SQL"), ) help = """Prints the (approximated) difference between models and fields in the database for the given app name(s). It indicates how columns in the database are different from the sql that would be generated by Django. This command is not a database migration tool. (Though it can certainly help) It's purpose is to show the current differences as a way to check/debug ur models compared to the real database tables and columns.""" output_transaction = False args = '<appname appname ...>' def handle(self, *app_labels, **options): from django import VERSION if VERSION[:2] < (1, 0): raise CommandError("SQLDiff only support Django 1.0 or higher!") from django.db import models from django.conf import settings engine = None if hasattr(settings, 'DATABASES'): engine = settings.DATABASES['default']['ENGINE'] else: engine = settings.DATABASE_ENGINE if engine == 'dummy': # This must be the "dummy" database backend, which means the user # hasn't set DATABASE_ENGINE. raise CommandError("""Django doesn't know which syntax to use for your SQL statements, because you haven't specified the DATABASE_ENGINE setting. Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.""") if options.get('all_applications', False): app_models = models.get_models(include_auto_created=True) else: if not app_labels: raise CommandError('Enter at least one appname.') try: app_list = [models.get_app(app_label) for app_label in app_labels] except (models.ImproperlyConfigured, ImportError) as e: raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e) app_models = [] for app in app_list: app_models.extend(models.get_models(app, include_auto_created=True)) ## remove all models that are not managed by Django #app_models = [model for model in app_models if getattr(model._meta, 'managed', True)] if not app_models: raise CommandError('Unable to execute sqldiff no models founds.') if not engine: engine = connection.__module__.split('.')[-2] if '.' in engine: engine = engine.split('.')[-1] cls = DATABASE_SQLDIFF_CLASSES.get(engine, GenericSQLDiff) sqldiff_instance = cls(app_models, options) sqldiff_instance.find_differences() sqldiff_instance.print_diff(self.style) return
{ "content_hash": "4c219312337523cfd6209c735d0c26b0", "timestamp": "", "source": "github", "line_count": 760, "max_line_length": 448, "avg_line_length": 49.40657894736842, "alnum_prop": 0.6047830834376415, "repo_name": "bop/bauhaus", "id": "33c8749474489b4b38d98d1816b3423961cc4038", "size": "37549", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/python2.7/site-packages/django_extensions/management/commands/sqldiff.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "145210" }, { "name": "Groff", "bytes": "22" }, { "name": "HTML", "bytes": "1013469" }, { "name": "JavaScript", "bytes": "267371" }, { "name": "Python", "bytes": "6660999" }, { "name": "Shell", "bytes": "4317" } ], "symlink_target": "" }
.documentableElement { background-color: var(--action-primary-background-default-solid); padding: calc(3 * var(--base-spacing)); border-radius: 4px; margin-bottom: calc(3 * var(--base-spacing)); color: var(--text-primary); position: relative; } .documentableElement:last-child { margin-bottom: 0; } .documentableElement .signature { margin-right: calc(3 * var(--base-spacing)); line-height: 1.5; } .documentableElement:hover { cursor: pointer; } .documentableElement .documentableBrief { color: var(--text-secondary); } .documentableElement .annotations { display: none; } .documentableElement > div .cover { display: none; } .documentableElement.expand > div .cover { display: block; } .documentableElement.expand .annotations { display: inline-block; } .documentableElement.expand .documentableBrief { display: none; } .documentableElement .icon-button { position: absolute; top: calc(3 * var(--base-spacing)); right: calc(3 * var(--base-spacing)); display: none; } .documentableElement:hover .icon-button { display: block; } [t="k"] { color: var(--code-method-highlighting-keyword); } [t="t"] { color: var(--code-method-highlighting-type); } #content a[t="n"] { color: var(--code-method-highlighting-link-sig-fig); } #content a[t="t"] { color: var(--code-method-highlighting-type-link); }
{ "content_hash": "50dab41f59015544d8364852b10710c7", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 67, "avg_line_length": 18.84722222222222, "alnum_prop": 0.6897568165070007, "repo_name": "sjrd/dotty", "id": "74ed724232927ca1b3aaa3f93dae93614833fe15", "size": "1357", "binary": false, "copies": "3", "ref": "refs/heads/main", "path": "scaladoc/resources/dotty_res/styles/theme/components/api-member.css", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "836" }, { "name": "CSS", "bytes": "136099" }, { "name": "HTML", "bytes": "3463" }, { "name": "Java", "bytes": "227667" }, { "name": "JavaScript", "bytes": "153556" }, { "name": "Scala", "bytes": "18018431" }, { "name": "Shell", "bytes": "23230" }, { "name": "TypeScript", "bytes": "8378" } ], "symlink_target": "" }
'use strict'; module.exports.fileExists = fileExists; var fs = require('fs'); /** * @param {string} absolutePath * @return {boolean} if the path exists and is a file. */ function fileExists(absolutePath){ return fs.existsSync(absolutePath) && fs.statSync(absolutePath).isFile(); }
{ "content_hash": "3b5186c7cdbc1cb6698100ef82346a61", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 75, "avg_line_length": 22.153846153846153, "alnum_prop": 0.7118055555555556, "repo_name": "bifodus/node-easy-cluster", "id": "e1fcc972ca07589b23c3bc412c1b078aae64dd93", "size": "288", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/util/fsHelpers.js", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
title: Scarica product: remmina-plugin-folder depth: 1 ---
{ "content_hash": "dd0c5dbfdeec5a2d670135ec4e2257e2", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 30, "avg_line_length": 12, "alnum_prop": 0.7333333333333333, "repo_name": "muflone/grav-muflone", "id": "a5ab3b38c9dc255173df8dfad6570ffa872faa26", "size": "64", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "user/pages/12.remmina-plugin-folder/02.download/download.it.md", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "3982" }, { "name": "CSS", "bytes": "468085" }, { "name": "HTML", "bytes": "257967" }, { "name": "JavaScript", "bytes": "221067" }, { "name": "Logos", "bytes": "816" }, { "name": "Nginx", "bytes": "1443" }, { "name": "PHP", "bytes": "1393025" }, { "name": "Shell", "bytes": "643" }, { "name": "XSLT", "bytes": "827" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Copyright 2004 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <customer xmlns="http://openuri.org/xstypes/test" gender="male" hexAtt="474749515457594747" base64Att=" VGhpcyBzdHJpbmcgaXMgYmFzZTY0QmluYXJ5IGVuY29kZWQh " anyuriAtt="http://dmoz.org/World/Français/" qnameAtt="pref:localname" xmlns:pref="some_uri" notationAtt="JPEG" > <firstname>Howdy</firstname> <number>436</number> <number>123</number> <birthday>1998-08-26Z</birthday> <number>44</number> <number>933</number> <birthday>2000-08-06-08:00</birthday> <hex>454749515457595A4A</hex> <base64>VGhpcyBzdHJpbmcgaXMgYmFzZTY0QmluYXJ5IGVuY29kZWQh</base64> <anyuri>http://3space.org/space%20space/</anyuri> <qname>openuri_org_localname</qname> <notation>GIF</notation> </customer>
{ "content_hash": "64a6c5f812fd2adca68a2ef53f894b9f", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 77, "avg_line_length": 36.36842105263158, "alnum_prop": 0.7279305354558611, "repo_name": "apache/xmlbeans", "id": "5f724f02d8ea7498f8bbcf1545d68abfe4aa9940", "size": "1383", "binary": false, "copies": "2", "ref": "refs/heads/trunk", "path": "src/test/resources/xbean/xmlobject/person.xml", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "60552" }, { "name": "CSS", "bytes": "1961" }, { "name": "HTML", "bytes": "2640" }, { "name": "Java", "bytes": "7628118" }, { "name": "Shell", "bytes": "37436" }, { "name": "XQuery", "bytes": "2172" }, { "name": "XS", "bytes": "6502" }, { "name": "XSLT", "bytes": "78459" } ], "symlink_target": "" }
define(function(){ quickforms.domElements = []; quickforms.DomElement = function(dom) // Create one of these for every control { var me = this; this.dom = dom; if(dom) { this.name = dom.attr("name"); this.id = dom.attr("id"); if(dom.attr("remember")=="") { dom.on('change',function(){ setCookie(me.parent.id+me.id,$(this).val(),quickforms.rememberLength); }); } if(dom.is('[qf-users]') && dom.attr('qf-users').indexOf(getCookie('userRole'))<0) { dom.attr("disabled", "disabled"); dom.parent().attr('style','display:none'); $('label[for="'+dom[0].id+'"]').attr('style','display:none'); } } this.addedData = []; this.onChange = function(){}; this.onBlur = function(){}; quickforms.domElements.push(this); this.summary = function(){return this.name;} } });
{ "content_hash": "40157fb0a581d4c1493988ee541a98bc", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 83, "avg_line_length": 26, "alnum_prop": 0.5973557692307693, "repo_name": "uoForms/quickforms3", "id": "41994457b2d745d3cba88504bdaa62b39d7c373c", "size": "1920", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "NetBeansQFProject/web/js/dom/dom.js", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "17156" }, { "name": "CSS", "bytes": "1422627" }, { "name": "HTML", "bytes": "1911010" }, { "name": "Java", "bytes": "257081" }, { "name": "JavaScript", "bytes": "1914119" }, { "name": "SQLPL", "bytes": "4247" }, { "name": "Visual Basic", "bytes": "356" } ], "symlink_target": "" }
cask "with-conditional-caveats" do version "1.2.3" sha256 "67cdb8a02803ef37fdbf7e0be205863172e41a561ca446cd84f0d7ab35a99d94" url "file://#{TEST_FIXTURE_DIR}/cask/caffeine.zip" homepage "https://brew.sh/" app "Caffeine.app" # a do block may print and use a DSL caveats do puts "This caveat is conditional" if false # rubocop:disable Lint/LiteralAsCondition end end
{ "content_hash": "6ab15a4d26fd6afd36a943ced799cea7", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 88, "avg_line_length": 27.642857142857142, "alnum_prop": 0.7441860465116279, "repo_name": "mahori/brew", "id": "265bd7757d7e444e4b43dec9e286b71b37e2808e", "size": "387", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Library/Homebrew/test/support/fixtures/cask/Casks/with-conditional-caveats.rb", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Dockerfile", "bytes": "1460" }, { "name": "HTML", "bytes": "12438" }, { "name": "PostScript", "bytes": "485" }, { "name": "Roff", "bytes": "72771" }, { "name": "Ruby", "bytes": "2188228" }, { "name": "Shell", "bytes": "124249" }, { "name": "Swift", "bytes": "1788" } ], "symlink_target": "" }
package com.xh.jwt.config.auth; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @Data @NoArgsConstructor @AllArgsConstructor @ApiModel("登录注册请求对象") public class JwtAuthenticationRequest { @ApiModelProperty(value = "手机号", required = true, example = "+852-88888888") private String mobile; @ApiModelProperty(value = "验证码", required = true, example = "1234") private String verifycode; }
{ "content_hash": "daed8bcc84194aa3edbe1eee2679f736", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 80, "avg_line_length": 25.333333333333332, "alnum_prop": 0.768796992481203, "repo_name": "376453716/spring-demo", "id": "ebfa43d100aa8d09c96f940a6674db1289832d30", "size": "560", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "springboot-demo/jwt-demo/src/main/java/com/xh/jwt/config/auth/JwtAuthenticationRequest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "29595" } ], "symlink_target": "" }
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"> p, li {margin: 0px 0px 0px 0px; font: 12px Helvetica} </style> <title>The MacPorts Project's License</title> </head> <body> <p>Copyright (c) 2002 - 2003, Apple Inc.<br>Copyright (c) 2004 - 2012, The MacPorts Project.</p> <p>All rights reserved.</p> <p><br></p> <p>Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:</p> <ol> <li>Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.</li> <li>Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.</li> <li>Neither the name of Apple Inc., The MacPorts Project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.</li> </ol> <p><br></p> <p>THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.</p> </body> </html>
{ "content_hash": "24bf3ce40a62b70dbfb45990cceea80a", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 146, "avg_line_length": 53.7906976744186, "alnum_prop": 0.6740164288802422, "repo_name": "cooljeanius/MacPorts-fork", "id": "b5767455a3acd22de91861b6208026e2f5e8ffb4", "size": "2313", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "portmgr/dmg/License.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "1249005" }, { "name": "C++", "bytes": "11686" }, { "name": "HTML", "bytes": "466" }, { "name": "M4", "bytes": "753334" }, { "name": "Makefile", "bytes": "253078" }, { "name": "Objective-C", "bytes": "25256" }, { "name": "Perl", "bytes": "18162" }, { "name": "Shell", "bytes": "370415" }, { "name": "Tcl", "bytes": "1449326" } ], "symlink_target": "" }
/** * Save value to client. */ function saveValue(key, value) { if ('localStorage' in window) { window.localStorage.setItem(key, value); } } /** * Load value from client. */ function loadValue(key) { if ('localStorage' in window && window.localStorage.getItem(key)) { return window.localStorage.getItem(key); } } /** * Get value from parameter. */ function getParameterByName(name) { name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]"); var regexS = "[\\?&]" + name + "=([^&#]*)"; var regex = new RegExp(regexS); var results = regex.exec(window.location.search); if(results == null) return ""; else return decodeURIComponent(results[1].replace(/\+/g, " ")); }
{ "content_hash": "cc6714dce8c935341fa185a948176c41", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 69, "avg_line_length": 21.606060606060606, "alnum_prop": 0.6044880785413744, "repo_name": "peterkinmond/mazely", "id": "fe00853aed5c95491c1c0fd27862158f3e902c59", "size": "713", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "demos/maze/storage_helper.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "744615" }, { "name": "Python", "bytes": "5134" } ], "symlink_target": "" }
namespace atom { class NativeBrowserViewViews : public NativeBrowserView { public: explicit NativeBrowserViewViews( brightray::InspectableWebContentsView* web_contents_view); ~NativeBrowserViewViews() override; uint8_t GetAutoResizeFlags() { return auto_resize_flags_; } void SetAutoResizeFlags(uint8_t flags) override { auto_resize_flags_ = flags; } void SetBounds(const gfx::Rect& bounds) override; void SetBackgroundColor(SkColor color) override; private: uint8_t auto_resize_flags_; DISALLOW_COPY_AND_ASSIGN(NativeBrowserViewViews); }; } // namespace atom #endif // ATOM_BROWSER_NATIVE_BROWSER_VIEW_VIEWS_H_
{ "content_hash": "6322306e83f65fc08964dc821310a371", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 64, "avg_line_length": 27.083333333333332, "alnum_prop": 0.7523076923076923, "repo_name": "wan-qy/electron", "id": "5dcda13447cde098fccaf8782fefb97d861d47f4", "size": "934", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "atom/browser/native_browser_view_views.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "4055" }, { "name": "C++", "bytes": "2718789" }, { "name": "HTML", "bytes": "14554" }, { "name": "JavaScript", "bytes": "786278" }, { "name": "Objective-C", "bytes": "50166" }, { "name": "Objective-C++", "bytes": "265661" }, { "name": "PowerShell", "bytes": "99" }, { "name": "Python", "bytes": "201139" }, { "name": "Shell", "bytes": "3439" } ], "symlink_target": "" }
var App = angular.module('streamCtrl', [ 'ngRoute', 'btford.socket-io', 'ui.bootstrap', 'streamCtrlDirectives', 'streamCtrlControllers', 'streamCtrlFilters', ]) .config(['$routeProvider', function($routeProvider) { $routeProvider.when('/', { templateUrl: 'partials/frontpage.html', controller: 'mainCtrl', }); $routeProvider.otherwise({redirectTo: '/'}); }]) .factory('ctrlSocket', function (socketFactory) { return socketFactory(); });
{ "content_hash": "35832977ee6a24ef656c034137d1cfe2", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 53, "avg_line_length": 25.894736842105264, "alnum_prop": 0.6402439024390244, "repo_name": "bastibeckr/bmdStreamer", "id": "aa0c874dfa8fac09576f4ab1d5ce2610641164d9", "size": "493", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "frontend-src/js/app.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "79257" }, { "name": "JavaScript", "bytes": "69440" } ], "symlink_target": "" }
<iframe id=i></iframe> <script> var doc = i.contentDocument.cloneNode(); i.remove(); doc.appendChild(document.createElement("audio")); </script>
{ "content_hash": "95b39d75131c7641dc7e4547e6bf8849", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 49, "avg_line_length": 24.166666666666668, "alnum_prop": 0.7241379310344828, "repo_name": "scheib/chromium", "id": "33d52ca89946298d02349877164e5439626232df", "size": "145", "binary": false, "copies": "23", "ref": "refs/heads/main", "path": "third_party/blink/web_tests/external/wpt/html/semantics/embedded-content/the-audio-element/audio-appendChild-to-inactive-document-crash.html", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
alias hosts="head -2 ~/.ssh/known_hosts | tail -1 > ~/.ssh/known_hosts" # Pipe my public key to my clipboard. Fuck you, pay me. alias pubkey="more ~/.ssh/id_dsa.pub | pbcopy | echo '=> Public key copied to pasteboard.'"
{ "content_hash": "0ef794bc1932103da486cb781ab0bb28", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 91, "avg_line_length": 55.25, "alnum_prop": 0.6742081447963801, "repo_name": "zenom/dotfiles-new", "id": "72d81b6689b4b06f75dbfe669ada9f6c2f6bde4e", "size": "329", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "system/keys.zsh", "mode": "33188", "license": "mit", "language": [ { "name": "Perl", "bytes": "14407" }, { "name": "Ruby", "bytes": "4798" }, { "name": "Shell", "bytes": "17492" }, { "name": "VimL", "bytes": "8941" } ], "symlink_target": "" }
layout: "fluid/docs_base" version: "3.4.2" versionHref: "/docs/v3/3.4.2" path: "" category: api id: "fablist" title: "FabList" header_sub_title: "Ionic API Documentation" doc: "FabList" docType: "class" show_preview_device: true preview_device_url: "/docs/v3/demos/src/fab/www/" angular_controller: APIDemoCtrl --- <h1 class="api-title"> <a class="anchor" name="fab-list" href="#fab-list"></a> FabList <h3><code>ion-fab-list</code></h3> </h1> <a class="improve-v2-docs" href="http://github.com/ionic-team/ionic/edit/v3/src/components/fab/fab-list.ts#L6"> Improve this doc </a> <p><code>ion-fab-list</code> is a container for multiple FAB buttons. They are components of <code>ion-fab</code> and allow you to specificy the buttons position, left, right, top, bottom.</p> <!-- @usage tag --> <h2><a class="anchor" name="usage" href="#usage"></a>Usage</h2> <pre><code class="lang-html">&lt;ion-fab bottom right &gt; &lt;button ion-fab&gt;Share&lt;/button&gt; &lt;ion-fab-list side=&quot;top&quot;&gt; &lt;button ion-fab&gt;Facebook&lt;/button&gt; &lt;button ion-fab&gt;Twitter&lt;/button&gt; &lt;button ion-fab&gt;Youtube&lt;/button&gt; &lt;/ion-fab-list&gt; &lt;ion-fab-list side=&quot;left&quot;&gt; &lt;button ion-fab&gt;Vimeo&lt;/button&gt; &lt;/ion-fab-list&gt; &lt;/ion-fab&gt; </code></pre> <!-- @property tags --> <!-- instance methods on the class --> <h2 id="sass-variable-header"><a class="anchor" name="sass-variables" href="#sass-variables"></a>Sass Variables</h2> <div id="sass-variables" ng-controller="SassToggleCtrl"> <div class="sass-platform-toggle"> <a ng-init="setSassPlatform('base')" ng-class="{ active: active === 'base' }" ng-click="setSassPlatform('base')" >All</a> <a ng-class="{ active: active === 'ios' }" ng-click="setSassPlatform('ios')">iOS</a> <a ng-class="{ active: active === 'md' }" ng-click="setSassPlatform('md')">Material Design</a> <a ng-class="{ active: active === 'wp' }" ng-click="setSassPlatform('wp')">Windows Platform</a> </div> <table ng-show="active === 'base'" id="sass-base" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$fab-size</code></td> <td><code>56px</code></td> <td><p>Width and height of the FAB button</p> </td> </tr> <tr> <td><code>$fab-mini-size</code></td> <td><code>40px</code></td> <td><p>Width and height of the FAB button mini</p> </td> </tr> <tr> <td><code>$fab-content-margin</code></td> <td><code>10px</code></td> <td><p>Margin of the FAB Container</p> </td> </tr> <tr> <td><code>$fab-list-margin</code></td> <td><code>10px</code></td> <td><p>Margin of the FAB List</p> </td> </tr> <tr> <td><code>$fab-list-button-background-color</code></td> <td><code>#f4f4f4</code></td> <td><p>Background color of the button in a list</p> </td> </tr> </tbody> </table> <table ng-show="active === 'ios'" id="sass-ios" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$fab-ios-background-color</code></td> <td><code>color($colors-ios, primary)</code></td> <td><p>Background color of the button</p> </td> </tr> <tr> <td><code>$fab-ios-text-color</code></td> <td><code>color-contrast($colors-ios, $fab-ios-background-color)</code></td> <td><p>Text color of the button</p> </td> </tr> <tr> <td><code>$fab-ios-background-color-activated</code></td> <td><code>color-shade($fab-ios-background-color)</code></td> <td><p>Background color of the activated button</p> </td> </tr> <tr> <td><code>$fab-ios-list-button-background-color</code></td> <td><code>$fab-list-button-background-color</code></td> <td><p>Background color of the button in a list</p> </td> </tr> <tr> <td><code>$fab-ios-list-button-text-color</code></td> <td><code>color-contrast($colors-ios, $fab-ios-list-button-background-color)</code></td> <td><p>Text color of the button in a list</p> </td> </tr> <tr> <td><code>$fab-ios-list-button-background-color-activated</code></td> <td><code>color-shade($fab-ios-list-button-background-color)</code></td> <td><p>Background color of the activated button in a list</p> </td> </tr> <tr> <td><code>$fab-ios-list-button-transition-duration</code></td> <td><code>200ms</code></td> <td><p>Transition duration of the transform and opacity of the button in a list</p> </td> </tr> <tr> <td><code>$fab-ios-list-button-transition-timing-function</code></td> <td><code>ease</code></td> <td><p>Speed curve of the transition of the transform and opacity of the button in a list</p> </td> </tr> <tr> <td><code>$fab-ios-list-button-transition-delay</code></td> <td><code>10ms</code></td> <td><p>Transition delay of the transform and opacity of the button in a list</p> </td> </tr> </tbody> </table> <table ng-show="active === 'md'" id="sass-md" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$fab-md-box-shadow</code></td> <td><code>0 4px 6px 0 rgba(0, 0, 0, .14), 0 4px 5px rgba(0, 0, 0, .1)</code></td> <td><p>Box shadow of the FAB button</p> </td> </tr> <tr> <td><code>$fab-md-box-shadow-activated</code></td> <td><code>0 5px 15px 0 rgba(0, 0, 0, .4), 0 4px 7px 0 rgba(0, 0, 0, .1)</code></td> <td><p>Box shadow of the activated FAB button</p> </td> </tr> <tr> <td><code>$fab-md-background-color</code></td> <td><code>color($colors-md, primary)</code></td> <td><p>Background color of the button</p> </td> </tr> <tr> <td><code>$fab-md-text-color</code></td> <td><code>color-contrast($colors-md, $fab-md-background-color)</code></td> <td><p>Text color of the button</p> </td> </tr> <tr> <td><code>$fab-md-background-color-activated</code></td> <td><code>color-shade($fab-md-background-color)</code></td> <td><p>Background color of the activated button</p> </td> </tr> <tr> <td><code>$fab-md-list-button-background-color</code></td> <td><code>$fab-list-button-background-color</code></td> <td><p>Background color of the button in a list</p> </td> </tr> <tr> <td><code>$fab-md-list-button-text-color</code></td> <td><code>color-contrast($colors-md, $fab-md-list-button-background-color)</code></td> <td><p>Text color of the button in a list</p> </td> </tr> <tr> <td><code>$fab-md-list-button-background-color-activated</code></td> <td><code>color-shade($fab-md-list-button-background-color)</code></td> <td><p>Background color of the activated button in a list</p> </td> </tr> <tr> <td><code>$fab-md-list-button-transition-duration</code></td> <td><code>200ms</code></td> <td><p>Transition duration of the transform and opacity of the button in a list</p> </td> </tr> <tr> <td><code>$fab-md-list-button-transition-timing-function</code></td> <td><code>ease</code></td> <td><p>Speed curve of the transition of the transform and opacity of the button in a list</p> </td> </tr> <tr> <td><code>$fab-md-list-button-transition-delay</code></td> <td><code>10ms</code></td> <td><p>Transition delay of the transform and opacity of the button in a list</p> </td> </tr> </tbody> </table> <table ng-show="active === 'wp'" id="sass-wp" class="table param-table" style="margin:0;"> <thead> <tr> <th>Property</th> <th>Default</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td><code>$fab-wp-background-color</code></td> <td><code>color($colors-wp, primary)</code></td> <td><p>Background color of the button</p> </td> </tr> <tr> <td><code>$fab-wp-text-color</code></td> <td><code>color-contrast($colors-wp, $fab-wp-background-color)</code></td> <td><p>Text color of the button</p> </td> </tr> <tr> <td><code>$fab-wp-background-color-activated</code></td> <td><code>color-shade($fab-wp-background-color)</code></td> <td><p>Background color of the activated button</p> </td> </tr> <tr> <td><code>$fab-wp-list-button-background-color</code></td> <td><code>$fab-list-button-background-color</code></td> <td><p>Background color of the button in a list</p> </td> </tr> <tr> <td><code>$fab-wp-list-button-text-color</code></td> <td><code>color-contrast($colors-wp, $fab-wp-list-button-background-color)</code></td> <td><p>Text color of the button in a list</p> </td> </tr> <tr> <td><code>$fab-wp-list-button-background-color-activated</code></td> <td><code>color-shade($fab-wp-list-button-background-color)</code></td> <td><p>Background color of the activated button in a list</p> </td> </tr> <tr> <td><code>$fab-wp-list-button-transition-duration</code></td> <td><code>200ms</code></td> <td><p>Transition duration of the transform and opacity of the button in a list</p> </td> </tr> <tr> <td><code>$fab-wp-list-button-transition-timing-function</code></td> <td><code>ease</code></td> <td><p>Speed curve of the transition of the transform and opacity of the button in a list</p> </td> </tr> <tr> <td><code>$fab-wp-list-button-transition-delay</code></td> <td><code>10ms</code></td> <td><p>Transition delay of the transform and opacity of the button in a list</p> </td> </tr> </tbody> </table> </div> <!-- related link --> <h2><a class="anchor" name="related" href="#related"></a>Related</h2> <a href="/docs/v3/components#fab">Fab Component Docs</a><!-- end content block --> <!-- end body block -->
{ "content_hash": "8ee987b4012e534da4fe8692ccadd949", "timestamp": "", "source": "github", "line_count": 474, "max_line_length": 192, "avg_line_length": 22.957805907172997, "alnum_prop": 0.5600992464620475, "repo_name": "driftyco/ionic-site", "id": "f54d88d0d90c018beaf859453f0c981ccbd485bf", "size": "10886", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "content/docs/v3/3.4.2/api/components/fab/FabList/index.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "31121294" }, { "name": "HTML", "bytes": "886190" }, { "name": "JavaScript", "bytes": "116961330" }, { "name": "Ruby", "bytes": "1066" }, { "name": "Shell", "bytes": "2018" }, { "name": "TypeScript", "bytes": "667842" } ], "symlink_target": "" }
require 'pathname' require Pathname.new(__FILE__).dirname.dirname.expand_path + 'corosync' Puppet::Type.type(:cs_order).provide(:crm, :parent => Puppet::Provider::Corosync) do desc 'Specific provider for a rather specific type since I currently have no plan to abstract corosync/pacemaker vs. keepalived. This provider will check the state of current primitive start orders on the system; add, delete, or adjust various aspects.' # Path to the crm binary for interacting with the cluster configuration. commands :cibadmin => 'cibadmin' commands :crm_shadow => 'crm_shadow' commands :crm => 'crm' commands :crm_diff => 'crm_diff' commands :crm_attribute => 'crm_attribute' def self.instances block_until_ready instances = [] #cmd = [ command(:crm), 'configure', 'show', 'xml' ] raw, status = dump_cib doc = REXML::Document.new(raw) doc.root.elements['configuration'].elements['constraints'].each_element('rsc_order') do |e| items = e.attributes if items['first-action'] first = "#{items['first']}:#{items['first-action']}" else first = items['first'] end if items['then-action'] second = "#{items['then']}:#{items['then-action']}" else second = items['then'] end order_instance = { :name => items['id'], :ensure => :present, :first => first, :second => second, :score => items['score'], :provider => self.name } instances << new(order_instance) end instances end # Create just adds our resource to the property_hash and flush will take care # of actually doing the work. def create @property_hash = { :name => @resource[:name], :ensure => :present, :first => @resource[:first], :second => @resource[:second], :score => @resource[:score], :cib => @resource[:cib], } end # Unlike create we actually immediately delete the item. def destroy debug('Revmoving order directive') crm('configure', 'delete', @resource[:name]) @property_hash.clear end # Getters that obtains the first and second primitives and score in our # ordering definintion that have been populated by prefetch or instances # (depends on if your using puppet resource or not). def first @property_hash[:first] end def second @property_hash[:second] end def score @property_hash[:score] end # Our setters for the first and second primitives and score. Setters are # used when the resource already exists so we just update the current value # in the property hash and doing this marks it to be flushed. def first=(should) @property_hash[:first] = should end def second=(should) @property_hash[:second] = should end def score=(should) @property_hash[:score] = should end # Flush is triggered on anything that has been detected as being # modified in the property_hash. It generates a temporary file with # the updates that need to be made. The temporary file is then used # as stdin for the crm command. def flush unless @property_hash.empty? self.class.block_until_ready updated = 'order ' updated << "#{@property_hash[:name]} #{@property_hash[:score]}: " updated << "#{@property_hash[:first]} #{@property_hash[:second]}" Tempfile.open('puppet_crm_update') do |tmpfile| tmpfile.write(updated.rstrip) tmpfile.flush apply_changes(@resource[:name],tmpfile,'order') end end end end
{ "content_hash": "f9223fe103c8e3f365227a9f46bff44c", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 95, "avg_line_length": 29.617886178861788, "alnum_prop": 0.6310732912434807, "repo_name": "Axam/nsx-library", "id": "9232d3cab639baa2a778b41e134379260a9dae83", "size": "3643", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "deployment/puppet/corosync/lib/puppet/provider/cs_order/crm.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Perl", "bytes": "41847" }, { "name": "Puppet", "bytes": "1457443" }, { "name": "Python", "bytes": "216705" }, { "name": "Ruby", "bytes": "2174332" }, { "name": "Shell", "bytes": "99430" } ], "symlink_target": "" }
import time print time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) time.sleep(1) print time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
{ "content_hash": "b33aaa692916431fb7e400925cd634c7", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 69, "avg_line_length": 21.25, "alnum_prop": 0.6352941176470588, "repo_name": "yjwx0017/test", "id": "265631a227b037702747c63d8bc53d2fbb50708f", "size": "247", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python-codes/100-exercises/example10.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "37624" }, { "name": "CMake", "bytes": "1077" }, { "name": "CSS", "bytes": "600" }, { "name": "HTML", "bytes": "1724" }, { "name": "Python", "bytes": "16528" }, { "name": "QMake", "bytes": "878" } ], "symlink_target": "" }
var ResourceRouter = require('express').Router({ mergeParams: true }), ProjectController = require('../../controllers').Resources.ProjectController, UserController = require('../../controllers').Resources.UserController, ProjectStack = require('./projectStack') // Load owner and project from parameters // owner is a unique username ResourceRouter.param('owner', UserController.load) // project is a unique project name ResourceRouter.param('project', ProjectController.load) // These routes are shortcuts without /users prefix // and without /projects segment where it makes sense ResourceRouter.use('/:owner/:project/hubs', ProjectStack.HubRouter) ResourceRouter.use('/:owner/:project/bundles', ProjectStack.BundleRouter) ResourceRouter.use('/:owner/:project/matches', ProjectStack.MatchRouter) ResourceRouter.use('/:owner/projects', require('./project')) // This route is a pagination option for projects // When finished should support complete filtering and be used as a feed ResourceRouter.get('/projects', ProjectController.list) // This route is a full expanded version of the shortcuts // which is right according to REST API best practices // Plus it supports operations on users ResourceRouter.use('/users', require('./user')) module.exports = ResourceRouter
{ "content_hash": "89421659b23ccf89574192031369ad67", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 81, "avg_line_length": 43.333333333333336, "alnum_prop": 0.7630769230769231, "repo_name": "tenevdev/idiot", "id": "9cb283c926ce52c467836c86c67a955f6cad1ea5", "size": "1300", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/routes/resources/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "24139" }, { "name": "CSS", "bytes": "1526990" }, { "name": "CoffeeScript", "bytes": "3263" }, { "name": "Groff", "bytes": "160" }, { "name": "HTML", "bytes": "121733" }, { "name": "JavaScript", "bytes": "666815" }, { "name": "Makefile", "bytes": "745" } ], "symlink_target": "" }
import warnings from tencentcloud.common.abstract_model import AbstractModel class AddNodesRequest(AbstractModel): """AddNodes请求参数结构体 """ def __init__(self): r""" :param Placement: 集群中实例所在的位置。 :type Placement: :class:`tencentcloud.thpc.v20220401.models.Placement` :param ClusterId: 集群ID。 :type ClusterId: str :param ImageId: 指定有效的[镜像](https://cloud.tencent.com/document/product/213/4940)ID,格式形如`img-xxx`。目前仅支持公有镜。 :type ImageId: str :param VirtualPrivateCloud: 私有网络相关信息配置。 :type VirtualPrivateCloud: :class:`tencentcloud.thpc.v20220401.models.VirtualPrivateCloud` :param Count: 添加节点数量。 :type Count: int :param InstanceChargeType: 节点[计费类型](https://cloud.tencent.com/document/product/213/2180)。<br><li>PREPAID:预付费,即包年包月<br><li>POSTPAID_BY_HOUR:按小时后付费<br><li>SPOTPAID:竞价付费<br>默认值:POSTPAID_BY_HOUR。 :type InstanceChargeType: str :param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月节点的购买时长、是否设置自动续费等属性。若指定节点的付费模式为预付费则该参数必传。 :type InstanceChargePrepaid: :class:`tencentcloud.thpc.v20220401.models.InstanceChargePrepaid` :param InstanceType: 节点机型。不同实例机型指定了不同的资源规格。<br><li>具体取值可通过调用接口[DescribeInstanceTypeConfigs](https://cloud.tencent.com/document/api/213/15749)来获得最新的规格表或参见[实例规格](https://cloud.tencent.com/document/product/213/11518)描述。 :type InstanceType: str :param SystemDisk: 节点系统盘配置信息。若不指定该参数,则按照系统默认值进行分配。 :type SystemDisk: list of SystemDisk :param DataDisks: 节点数据盘配置信息。若不指定该参数,则默认不购买数据盘。支持购买的时候指定21块数据盘,其中最多包含1块LOCAL_BASIC数据盘或者LOCAL_SSD数据盘,最多包含20块CLOUD_BASIC数据盘、CLOUD_PREMIUM数据盘或者CLOUD_SSD数据盘。 :type DataDisks: list of DataDisk :param InternetAccessible: 公网带宽相关信息设置。若不指定该参数,则默认公网带宽为0Mbps。 :type InternetAccessible: :class:`tencentcloud.thpc.v20220401.models.InternetAccessible` :param InstanceName: 节点显示名称。 不指定节点显示名称则默认显示‘未命名’。 最多支持60个字符。 :type InstanceName: str :param LoginSettings: 集群登录设置。 :type LoginSettings: :class:`tencentcloud.thpc.v20220401.models.LoginSettings` :param SecurityGroupIds: 集群中实例所属安全组。该参数可以通过调用 [DescribeSecurityGroups](https://cloud.tencent.com/document/api/215/15808) 的返回值中的sgId字段来获取。若不指定该参数,则绑定默认安全组。 :type SecurityGroupIds: list of str :param ClientToken: 用于保证请求幂等性的字符串。该字符串由客户生成,需保证不同请求之间唯一,最大值不超过64个ASCII字符。若不指定该参数,则无法保证请求的幂等性。 :type ClientToken: str :param QueueName: 队列名称。 :type QueueName: str :param NodeRole: 添加节点类型。默认值:Compute<br><li>Compute:计算节点。<br><li>Login:登录节点。 :type NodeRole: str :param DryRun: 是否只预检此次请求。 true:发送检查请求,不会创建实例。检查项包括是否填写了必需参数,请求格式,业务限制和云服务器库存。 如果检查不通过,则返回对应错误码; 如果检查通过,则返回RequestId. false(默认):发送正常请求,通过检查后直接创建实例 :type DryRun: bool """ self.Placement = None self.ClusterId = None self.ImageId = None self.VirtualPrivateCloud = None self.Count = None self.InstanceChargeType = None self.InstanceChargePrepaid = None self.InstanceType = None self.SystemDisk = None self.DataDisks = None self.InternetAccessible = None self.InstanceName = None self.LoginSettings = None self.SecurityGroupIds = None self.ClientToken = None self.QueueName = None self.NodeRole = None self.DryRun = None def _deserialize(self, params): if params.get("Placement") is not None: self.Placement = Placement() self.Placement._deserialize(params.get("Placement")) self.ClusterId = params.get("ClusterId") self.ImageId = params.get("ImageId") if params.get("VirtualPrivateCloud") is not None: self.VirtualPrivateCloud = VirtualPrivateCloud() self.VirtualPrivateCloud._deserialize(params.get("VirtualPrivateCloud")) self.Count = params.get("Count") self.InstanceChargeType = params.get("InstanceChargeType") if params.get("InstanceChargePrepaid") is not None: self.InstanceChargePrepaid = InstanceChargePrepaid() self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid")) self.InstanceType = params.get("InstanceType") if params.get("SystemDisk") is not None: self.SystemDisk = [] for item in params.get("SystemDisk"): obj = SystemDisk() obj._deserialize(item) self.SystemDisk.append(obj) if params.get("DataDisks") is not None: self.DataDisks = [] for item in params.get("DataDisks"): obj = DataDisk() obj._deserialize(item) self.DataDisks.append(obj) if params.get("InternetAccessible") is not None: self.InternetAccessible = InternetAccessible() self.InternetAccessible._deserialize(params.get("InternetAccessible")) self.InstanceName = params.get("InstanceName") if params.get("LoginSettings") is not None: self.LoginSettings = LoginSettings() self.LoginSettings._deserialize(params.get("LoginSettings")) self.SecurityGroupIds = params.get("SecurityGroupIds") self.ClientToken = params.get("ClientToken") self.QueueName = params.get("QueueName") self.NodeRole = params.get("NodeRole") self.DryRun = params.get("DryRun") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class AddNodesResponse(AbstractModel): """AddNodes返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class BindAutoScalingGroupRequest(AbstractModel): """BindAutoScalingGroup请求参数结构体 """ def __init__(self): r""" :param ClusterId: 集群ID。 :type ClusterId: str :param LaunchConfigurationId: 弹性伸缩启动配置ID。 :type LaunchConfigurationId: str :param AutoScalingGroupId: 弹性伸缩组ID。 :type AutoScalingGroupId: str :param QueueName: 队列名称。 :type QueueName: str :param ExpansionBusyTime: 任务连续等待时间,队列的任务处于连续等待的时间。单位秒。默认值120。 :type ExpansionBusyTime: int :param ShrinkIdleTime: 节点连续空闲(未运行作业)时间,一个节点连续处于空闲状态时间。单位秒。默认值300。 :type ShrinkIdleTime: int :param EnableAutoExpansion: 是否开启自动扩容,默认值true。 :type EnableAutoExpansion: bool :param EnableAutoShrink: 是否开启自动缩容,默认值true。 :type EnableAutoShrink: bool :param DryRun: 是否只预检此次请求。 true:发送检查请求,不会绑定弹性伸缩组。检查项包括是否填写了必需参数,请求格式,业务限制。 如果检查不通过,则返回对应错误码; 如果检查通过,则返回RequestId。 false(默认):发送正常请求,通过检查后直接绑定弹性伸缩组。 :type DryRun: bool """ self.ClusterId = None self.LaunchConfigurationId = None self.AutoScalingGroupId = None self.QueueName = None self.ExpansionBusyTime = None self.ShrinkIdleTime = None self.EnableAutoExpansion = None self.EnableAutoShrink = None self.DryRun = None def _deserialize(self, params): self.ClusterId = params.get("ClusterId") self.LaunchConfigurationId = params.get("LaunchConfigurationId") self.AutoScalingGroupId = params.get("AutoScalingGroupId") self.QueueName = params.get("QueueName") self.ExpansionBusyTime = params.get("ExpansionBusyTime") self.ShrinkIdleTime = params.get("ShrinkIdleTime") self.EnableAutoExpansion = params.get("EnableAutoExpansion") self.EnableAutoShrink = params.get("EnableAutoShrink") self.DryRun = params.get("DryRun") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class BindAutoScalingGroupResponse(AbstractModel): """BindAutoScalingGroup返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class CFSOption(AbstractModel): """描述CFS文件系统版本和挂载信息 """ def __init__(self): r""" :param LocalPath: 文件系统本地挂载路径 :type LocalPath: str :param RemotePath: 文件系统远程挂载ip及路径 :type RemotePath: str :param Protocol: 文件系统协议类型,默认值NFS 3.0。 <li>NFS 3.0。 <li>NFS 4.0。 <li>TURBO。 :type Protocol: str :param StorageType: 文件系统存储类型,默认值SD;其中 SD 为通用标准型标准型存储, HP为通用性能型存储, TB为turbo标准型, TP 为turbo性能型。 :type StorageType: str """ self.LocalPath = None self.RemotePath = None self.Protocol = None self.StorageType = None def _deserialize(self, params): self.LocalPath = params.get("LocalPath") self.RemotePath = params.get("RemotePath") self.Protocol = params.get("Protocol") self.StorageType = params.get("StorageType") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ClusterOverview(AbstractModel): """集群概览信息。 """ def __init__(self): r""" :param ClusterId: 集群ID。 :type ClusterId: str :param ClusterStatus: 集群状态。取值范围:<br><li>PENDING:创建中<br><li>INITING:初始化中<br><li>INIT_FAILED:初始化失败<br><li>RUNNING:运行中<br><li>TERMINATING:销毁中 :type ClusterStatus: str :param ClusterName: 集群名称。 :type ClusterName: str :param Placement: 集群位置信息。 :type Placement: :class:`tencentcloud.thpc.v20220401.models.Placement` :param CreateTime: 集群创建时间。 :type CreateTime: str :param SchedulerType: 集群调度器。 :type SchedulerType: str :param ComputeNodeCount: 计算节点数量。 :type ComputeNodeCount: int :param ComputeNodeSet: 计算节点概览。 :type ComputeNodeSet: list of ComputeNodeOverview :param ManagerNodeCount: 管控节点数量。 :type ManagerNodeCount: int :param ManagerNodeSet: 管控节点概览。 :type ManagerNodeSet: list of ManagerNodeOverview :param LoginNodeSet: 登录节点概览。 :type LoginNodeSet: list of LoginNodeOverview :param LoginNodeCount: 登录节点数量。 :type LoginNodeCount: int :param VpcId: 集群所属私有网络ID。 :type VpcId: str """ self.ClusterId = None self.ClusterStatus = None self.ClusterName = None self.Placement = None self.CreateTime = None self.SchedulerType = None self.ComputeNodeCount = None self.ComputeNodeSet = None self.ManagerNodeCount = None self.ManagerNodeSet = None self.LoginNodeSet = None self.LoginNodeCount = None self.VpcId = None def _deserialize(self, params): self.ClusterId = params.get("ClusterId") self.ClusterStatus = params.get("ClusterStatus") self.ClusterName = params.get("ClusterName") if params.get("Placement") is not None: self.Placement = Placement() self.Placement._deserialize(params.get("Placement")) self.CreateTime = params.get("CreateTime") self.SchedulerType = params.get("SchedulerType") self.ComputeNodeCount = params.get("ComputeNodeCount") if params.get("ComputeNodeSet") is not None: self.ComputeNodeSet = [] for item in params.get("ComputeNodeSet"): obj = ComputeNodeOverview() obj._deserialize(item) self.ComputeNodeSet.append(obj) self.ManagerNodeCount = params.get("ManagerNodeCount") if params.get("ManagerNodeSet") is not None: self.ManagerNodeSet = [] for item in params.get("ManagerNodeSet"): obj = ManagerNodeOverview() obj._deserialize(item) self.ManagerNodeSet.append(obj) if params.get("LoginNodeSet") is not None: self.LoginNodeSet = [] for item in params.get("LoginNodeSet"): obj = LoginNodeOverview() obj._deserialize(item) self.LoginNodeSet.append(obj) self.LoginNodeCount = params.get("LoginNodeCount") self.VpcId = params.get("VpcId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ComputeNode(AbstractModel): """计算节点信息。 """ def __init__(self): r""" :param InstanceChargeType: 节点[计费类型](https://cloud.tencent.com/document/product/213/2180)。<br><li>PREPAID:预付费,即包年包月<br><li>POSTPAID_BY_HOUR:按小时后付费<br><li>SPOTPAID:竞价付费<br>默认值:POSTPAID_BY_HOUR。 :type InstanceChargeType: str :param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月节点的购买时长、是否设置自动续费等属性。若指定节点的付费模式为预付费则该参数必传。 :type InstanceChargePrepaid: :class:`tencentcloud.thpc.v20220401.models.InstanceChargePrepaid` :param InstanceType: 节点机型。不同实例机型指定了不同的资源规格。 <br><li>具体取值可通过调用接口[DescribeInstanceTypeConfigs](https://cloud.tencent.com/document/api/213/15749)来获得最新的规格表或参见[实例规格](https://cloud.tencent.com/document/product/213/11518)描述。 :type InstanceType: str :param SystemDisk: 节点系统盘配置信息。若不指定该参数,则按照系统默认值进行分配。 :type SystemDisk: :class:`tencentcloud.thpc.v20220401.models.SystemDisk` :param DataDisks: 节点数据盘配置信息。若不指定该参数,则默认不购买数据盘。支持购买的时候指定21块数据盘,其中最多包含1块LOCAL_BASIC数据盘或者LOCAL_SSD数据盘,最多包含20块CLOUD_BASIC数据盘、CLOUD_PREMIUM数据盘或者CLOUD_SSD数据盘。 :type DataDisks: list of DataDisk :param InternetAccessible: 公网带宽相关信息设置。若不指定该参数,则默认公网带宽为0Mbps。 :type InternetAccessible: :class:`tencentcloud.thpc.v20220401.models.InternetAccessible` :param InstanceName: 节点显示名称。<br><li> 不指定节点显示名称则默认显示‘未命名’。 最多支持60个字符。 :type InstanceName: str """ self.InstanceChargeType = None self.InstanceChargePrepaid = None self.InstanceType = None self.SystemDisk = None self.DataDisks = None self.InternetAccessible = None self.InstanceName = None def _deserialize(self, params): self.InstanceChargeType = params.get("InstanceChargeType") if params.get("InstanceChargePrepaid") is not None: self.InstanceChargePrepaid = InstanceChargePrepaid() self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid")) self.InstanceType = params.get("InstanceType") if params.get("SystemDisk") is not None: self.SystemDisk = SystemDisk() self.SystemDisk._deserialize(params.get("SystemDisk")) if params.get("DataDisks") is not None: self.DataDisks = [] for item in params.get("DataDisks"): obj = DataDisk() obj._deserialize(item) self.DataDisks.append(obj) if params.get("InternetAccessible") is not None: self.InternetAccessible = InternetAccessible() self.InternetAccessible._deserialize(params.get("InternetAccessible")) self.InstanceName = params.get("InstanceName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ComputeNodeOverview(AbstractModel): """计算节点概览。 """ def __init__(self): r""" :param NodeId: 计算节点ID。 注意:此字段可能返回 null,表示取不到有效值。 :type NodeId: str """ self.NodeId = None def _deserialize(self, params): self.NodeId = params.get("NodeId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class CreateClusterRequest(AbstractModel): """CreateCluster请求参数结构体 """ def __init__(self): r""" :param Placement: 集群中实例所在的位置。 :type Placement: :class:`tencentcloud.thpc.v20220401.models.Placement` :param ManagerNode: 指定管理节点。 :type ManagerNode: :class:`tencentcloud.thpc.v20220401.models.ManagerNode` :param ManagerNodeCount: 指定管理节点的数量。默认取值:1。取值范围:1~2。 :type ManagerNodeCount: int :param ComputeNode: 指定计算节点。 :type ComputeNode: :class:`tencentcloud.thpc.v20220401.models.ComputeNode` :param ComputeNodeCount: 指定计算节点的数量。默认取值:0。 :type ComputeNodeCount: int :param SchedulerType: 调度器类型。默认取值:SLURM。<br><li>SGE:SGE调度器。<br><li>SLURM:SLURM调度器。 :type SchedulerType: str :param ImageId: 指定有效的[镜像](https://cloud.tencent.com/document/product/213/4940)ID,格式形如`img-xxx`。目前仅支持公有镜像。 :type ImageId: str :param VirtualPrivateCloud: 私有网络相关信息配置。 :type VirtualPrivateCloud: :class:`tencentcloud.thpc.v20220401.models.VirtualPrivateCloud` :param LoginSettings: 集群登录设置。 :type LoginSettings: :class:`tencentcloud.thpc.v20220401.models.LoginSettings` :param SecurityGroupIds: 集群中实例所属安全组。该参数可以通过调用 [DescribeSecurityGroups](https://cloud.tencent.com/document/api/215/15808) 的返回值中的sgId字段来获取。若不指定该参数,则绑定默认安全组。 :type SecurityGroupIds: list of str :param ClientToken: 用于保证请求幂等性的字符串。该字符串由客户生成,需保证不同请求之间唯一,最大值不超过64个ASCII字符。若不指定该参数,则无法保证请求的幂等性。 :type ClientToken: str :param DryRun: 是否只预检此次请求。 true:发送检查请求,不会创建实例。检查项包括是否填写了必需参数,请求格式,业务限制和云服务器库存。 如果检查不通过,则返回对应错误码; 如果检查通过,则返回RequestId. false(默认):发送正常请求,通过检查后直接创建实例 :type DryRun: bool :param AccountType: 域名字服务类型。默认取值:NIS。 <li>NIS:NIS域名字服务。 :type AccountType: str :param ClusterName: 集群显示名称。 :type ClusterName: str :param StorageOption: 集群存储选项 :type StorageOption: :class:`tencentcloud.thpc.v20220401.models.StorageOption` :param LoginNode: 指定登录节点。 :type LoginNode: :class:`tencentcloud.thpc.v20220401.models.LoginNode` :param LoginNodeCount: 指定登录节点的数量。默认取值:0。取值范围:0~10。 :type LoginNodeCount: int :param Tags: 创建集群时同时绑定的标签对说明。 :type Tags: list of Tag :param AutoScalingType: 弹性伸缩类型。<br><li>AS:集群自动扩缩容由[弹性伸缩](https://cloud.tencent.com/document/product/377/3154)产品实现。<br><li>THPC_AS:集群自动扩缩容由THPC产品内部实现。 :type AutoScalingType: str """ self.Placement = None self.ManagerNode = None self.ManagerNodeCount = None self.ComputeNode = None self.ComputeNodeCount = None self.SchedulerType = None self.ImageId = None self.VirtualPrivateCloud = None self.LoginSettings = None self.SecurityGroupIds = None self.ClientToken = None self.DryRun = None self.AccountType = None self.ClusterName = None self.StorageOption = None self.LoginNode = None self.LoginNodeCount = None self.Tags = None self.AutoScalingType = None def _deserialize(self, params): if params.get("Placement") is not None: self.Placement = Placement() self.Placement._deserialize(params.get("Placement")) if params.get("ManagerNode") is not None: self.ManagerNode = ManagerNode() self.ManagerNode._deserialize(params.get("ManagerNode")) self.ManagerNodeCount = params.get("ManagerNodeCount") if params.get("ComputeNode") is not None: self.ComputeNode = ComputeNode() self.ComputeNode._deserialize(params.get("ComputeNode")) self.ComputeNodeCount = params.get("ComputeNodeCount") self.SchedulerType = params.get("SchedulerType") self.ImageId = params.get("ImageId") if params.get("VirtualPrivateCloud") is not None: self.VirtualPrivateCloud = VirtualPrivateCloud() self.VirtualPrivateCloud._deserialize(params.get("VirtualPrivateCloud")) if params.get("LoginSettings") is not None: self.LoginSettings = LoginSettings() self.LoginSettings._deserialize(params.get("LoginSettings")) self.SecurityGroupIds = params.get("SecurityGroupIds") self.ClientToken = params.get("ClientToken") self.DryRun = params.get("DryRun") self.AccountType = params.get("AccountType") self.ClusterName = params.get("ClusterName") if params.get("StorageOption") is not None: self.StorageOption = StorageOption() self.StorageOption._deserialize(params.get("StorageOption")) if params.get("LoginNode") is not None: self.LoginNode = LoginNode() self.LoginNode._deserialize(params.get("LoginNode")) self.LoginNodeCount = params.get("LoginNodeCount") if params.get("Tags") is not None: self.Tags = [] for item in params.get("Tags"): obj = Tag() obj._deserialize(item) self.Tags.append(obj) self.AutoScalingType = params.get("AutoScalingType") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class CreateClusterResponse(AbstractModel): """CreateCluster返回参数结构体 """ def __init__(self): r""" :param ClusterId: 集群ID。 注意:此字段可能返回 null,表示取不到有效值。 :type ClusterId: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.ClusterId = None self.RequestId = None def _deserialize(self, params): self.ClusterId = params.get("ClusterId") self.RequestId = params.get("RequestId") class DataDisk(AbstractModel): """描述了数据盘的信息 """ def __init__(self): r""" :param DiskSize: 数据盘大小,单位:GB。最小调整步长为10G,不同数据盘类型取值范围不同,具体限制详见:[存储概述](https://cloud.tencent.com/document/product/213/4952)。默认值为0,表示不购买数据盘。更多限制详见产品文档。 :type DiskSize: int :param DiskType: 数据盘类型。数据盘类型限制详见[存储概述](https://cloud.tencent.com/document/product/213/4952)。取值范围:<br><li>LOCAL_BASIC:本地硬盘<br><li>LOCAL_SSD:本地SSD硬盘<br><li>LOCAL_NVME:本地NVME硬盘,与InstanceType强相关,不支持指定<br><li>LOCAL_PRO:本地HDD硬盘,与InstanceType强相关,不支持指定<br><li>CLOUD_BASIC:普通云硬盘<br><li>CLOUD_PREMIUM:高性能云硬盘<br><li>CLOUD_SSD:SSD云硬盘<br><li>CLOUD_HSSD:增强型SSD云硬盘<br><li>CLOUD_TSSD:极速型SSD云硬盘<br><br>默认取值:LOCAL_BASIC。 :type DiskType: str """ self.DiskSize = None self.DiskType = None def _deserialize(self, params): self.DiskSize = params.get("DiskSize") self.DiskType = params.get("DiskType") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DeleteClusterRequest(AbstractModel): """DeleteCluster请求参数结构体 """ def __init__(self): r""" :param ClusterId: 集群ID。 :type ClusterId: str """ self.ClusterId = None def _deserialize(self, params): self.ClusterId = params.get("ClusterId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DeleteClusterResponse(AbstractModel): """DeleteCluster返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class DeleteNodesRequest(AbstractModel): """DeleteNodes请求参数结构体 """ def __init__(self): r""" :param ClusterId: 集群ID。 :type ClusterId: str :param NodeIds: 节点ID。 :type NodeIds: list of str """ self.ClusterId = None self.NodeIds = None def _deserialize(self, params): self.ClusterId = params.get("ClusterId") self.NodeIds = params.get("NodeIds") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DeleteNodesResponse(AbstractModel): """DeleteNodes返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class DescribeClustersRequest(AbstractModel): """DescribeClusters请求参数结构体 """ def __init__(self): r""" :param ClusterIds: 集群ID列表。通过该参数可以指定需要查询信息的集群列表。<br>如果您不指定该参数,则返回Limit数量以内的集群信息。 :type ClusterIds: list of str :param Offset: 偏移量,默认为0。关于`Offset`的更进一步介绍请参考 API [简介](https://cloud.tencent.com/document/api/213/15688)中的相关小节。 :type Offset: int :param Limit: 返回数量,默认为20,最大值为100。关于`Limit`的更进一步介绍请参考 API [简介](https://cloud.tencent.com/document/api/213/15688)中的相关小节。 :type Limit: int """ self.ClusterIds = None self.Offset = None self.Limit = None def _deserialize(self, params): self.ClusterIds = params.get("ClusterIds") self.Offset = params.get("Offset") self.Limit = params.get("Limit") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class DescribeClustersResponse(AbstractModel): """DescribeClusters返回参数结构体 """ def __init__(self): r""" :param ClusterSet: 集群概览信息列表。 :type ClusterSet: list of ClusterOverview :param TotalCount: 集群数量。 :type TotalCount: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.ClusterSet = None self.TotalCount = None self.RequestId = None def _deserialize(self, params): if params.get("ClusterSet") is not None: self.ClusterSet = [] for item in params.get("ClusterSet"): obj = ClusterOverview() obj._deserialize(item) self.ClusterSet.append(obj) self.TotalCount = params.get("TotalCount") self.RequestId = params.get("RequestId") class ExpansionNodeConfig(AbstractModel): """弹性扩容节点配置信息。 """ def __init__(self): r""" :param Placement: 扩容实例所在的位置。 :type Placement: :class:`tencentcloud.thpc.v20220401.models.Placement` :param InstanceChargeType: 节点[计费类型](https://cloud.tencent.com/document/product/213/2180)。<br><li>PREPAID:预付费,即包年包月<br><li>POSTPAID_BY_HOUR:按小时后付费<br><li>SPOTPAID:竞价付费<br>默认值:POSTPAID_BY_HOUR。 :type InstanceChargeType: str :param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月节点的购买时长、是否设置自动续费等属性。若指定节点的付费模式为预付费则该参数必传。 :type InstanceChargePrepaid: :class:`tencentcloud.thpc.v20220401.models.InstanceChargePrepaid` :param InstanceType: 节点机型。不同实例机型指定了不同的资源规格。 <br><li>具体取值可通过调用接口[DescribeInstanceTypeConfigs](https://cloud.tencent.com/document/api/213/15749)来获得最新的规格表或参见[实例规格](https://cloud.tencent.com/document/product/213/11518)描述。 :type InstanceType: str :param VirtualPrivateCloud: 私有网络相关信息配置。 :type VirtualPrivateCloud: :class:`tencentcloud.thpc.v20220401.models.VirtualPrivateCloud` """ self.Placement = None self.InstanceChargeType = None self.InstanceChargePrepaid = None self.InstanceType = None self.VirtualPrivateCloud = None def _deserialize(self, params): if params.get("Placement") is not None: self.Placement = Placement() self.Placement._deserialize(params.get("Placement")) self.InstanceChargeType = params.get("InstanceChargeType") if params.get("InstanceChargePrepaid") is not None: self.InstanceChargePrepaid = InstanceChargePrepaid() self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid")) self.InstanceType = params.get("InstanceType") if params.get("VirtualPrivateCloud") is not None: self.VirtualPrivateCloud = VirtualPrivateCloud() self.VirtualPrivateCloud._deserialize(params.get("VirtualPrivateCloud")) memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class GooseFSOption(AbstractModel): """描述GooseFS挂载信息 """ def __init__(self): r""" :param LocalPath: 文件系统本地挂载路径 :type LocalPath: str :param RemotePath: 文件系统远程挂载路径 :type RemotePath: str :param Masters: 文件系统master的ip和端口 :type Masters: list of str """ self.LocalPath = None self.RemotePath = None self.Masters = None def _deserialize(self, params): self.LocalPath = params.get("LocalPath") self.RemotePath = params.get("RemotePath") self.Masters = params.get("Masters") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class InstanceChargePrepaid(AbstractModel): """描述了实例的计费模式 """ def __init__(self): r""" :param Period: 购买实例的时长,单位:月。取值范围:1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 24, 36, 48, 60。 :type Period: int :param RenewFlag: 自动续费标识。取值范围: NOTIFY_AND_AUTO_RENEW:通知过期且自动续费 NOTIFY_AND_MANUAL_RENEW:通知过期不自动续费 DISABLE_NOTIFY_AND_MANUAL_RENEW:不通知过期不自动续费 默认取值:NOTIFY_AND_MANUAL_RENEW。若该参数指定为NOTIFY_AND_AUTO_RENEW,在账户余额充足的情况下,实例到期后将按月自动续费。 :type RenewFlag: str """ self.Period = None self.RenewFlag = None def _deserialize(self, params): self.Period = params.get("Period") self.RenewFlag = params.get("RenewFlag") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class InternetAccessible(AbstractModel): """描述了实例的公网可访问性,声明了实例的公网使用计费模式,最大带宽等 """ def __init__(self): r""" :param InternetChargeType: 网络计费类型。取值范围: BANDWIDTH_PREPAID:预付费按带宽结算 TRAFFIC_POSTPAID_BY_HOUR:流量按小时后付费 BANDWIDTH_POSTPAID_BY_HOUR:带宽按小时后付费 BANDWIDTH_PACKAGE:带宽包用户 默认取值:非带宽包用户默认与子机付费类型保持一致。 :type InternetChargeType: str :param InternetMaxBandwidthOut: 公网出带宽上限,单位:Mbps。默认值:0Mbps。不同机型带宽上限范围不一致,具体限制详见购买网络带宽。 :type InternetMaxBandwidthOut: int """ self.InternetChargeType = None self.InternetMaxBandwidthOut = None def _deserialize(self, params): self.InternetChargeType = params.get("InternetChargeType") self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class LoginNode(AbstractModel): """登录节点信息。 """ def __init__(self): r""" :param InstanceChargeType: 节点[计费类型](https://cloud.tencent.com/document/product/213/2180)。<br><li>PREPAID:预付费,即包年包月<br><li>POSTPAID_BY_HOUR:按小时后付费<br>默认值:POSTPAID_BY_HOUR。 :type InstanceChargeType: str :param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月节点的购买时长、是否设置自动续费等属性。若指定节点的付费模式为预付费则该参数必传。 :type InstanceChargePrepaid: :class:`tencentcloud.thpc.v20220401.models.InstanceChargePrepaid` :param InstanceType: 节点机型。不同实例机型指定了不同的资源规格。 <br><li>具体取值可通过调用接口[DescribeInstanceTypeConfigs](https://cloud.tencent.com/document/api/213/15749)来获得最新的规格表或参见[实例规格](https://cloud.tencent.com/document/product/213/11518)描述。 :type InstanceType: str :param SystemDisk: 节点系统盘配置信息。若不指定该参数,则按照系统默认值进行分配。 :type SystemDisk: list of SystemDisk :param DataDisks: 节点数据盘配置信息。若不指定该参数,则默认不购买数据盘。支持购买的时候指定21块数据盘,其中最多包含1块LOCAL_BASIC数据盘或者LOCAL_SSD数据盘,最多包含20块CLOUD_BASIC数据盘、CLOUD_PREMIUM数据盘或者CLOUD_SSD数据盘。 :type DataDisks: list of DataDisk :param InternetAccessible: 公网带宽相关信息设置。若不指定该参数,则默认公网带宽为0Mbps。 :type InternetAccessible: list of InternetAccessible :param InstanceName: 节点显示名称。<br><li> 不指定节点显示名称则默认显示‘未命名’。 最多支持60个字符。 :type InstanceName: str """ self.InstanceChargeType = None self.InstanceChargePrepaid = None self.InstanceType = None self.SystemDisk = None self.DataDisks = None self.InternetAccessible = None self.InstanceName = None def _deserialize(self, params): self.InstanceChargeType = params.get("InstanceChargeType") if params.get("InstanceChargePrepaid") is not None: self.InstanceChargePrepaid = InstanceChargePrepaid() self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid")) self.InstanceType = params.get("InstanceType") if params.get("SystemDisk") is not None: self.SystemDisk = [] for item in params.get("SystemDisk"): obj = SystemDisk() obj._deserialize(item) self.SystemDisk.append(obj) if params.get("DataDisks") is not None: self.DataDisks = [] for item in params.get("DataDisks"): obj = DataDisk() obj._deserialize(item) self.DataDisks.append(obj) if params.get("InternetAccessible") is not None: self.InternetAccessible = [] for item in params.get("InternetAccessible"): obj = InternetAccessible() obj._deserialize(item) self.InternetAccessible.append(obj) self.InstanceName = params.get("InstanceName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class LoginNodeOverview(AbstractModel): """登录节点概览。 """ def __init__(self): r""" :param NodeId: 登录节点ID。 :type NodeId: str """ self.NodeId = None def _deserialize(self, params): self.NodeId = params.get("NodeId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class LoginSettings(AbstractModel): """描述了实例登录相关配置与信息。 """ def __init__(self): r""" :param Password: 实例登录密码。不同操作系统类型密码复杂度限制不一样,具体如下:<br><li>Linux实例密码必须8到30位,至少包括两项[a-z],[A-Z]、[0-9] 和 [( ) \` ~ ! @ # $ % ^ & * - + = | { } [ ] : ; ' , . ? / ]中的特殊符号。<br><li>Windows实例密码必须12到30位,至少包括三项[a-z],[A-Z],[0-9] 和 [( ) \` ~ ! @ # $ % ^ & * - + = | { } [ ] : ; ' , . ? /]中的特殊符号。<br><br>若不指定该参数,则由系统随机生成密码,并通过站内信方式通知到用户。 :type Password: str """ self.Password = None def _deserialize(self, params): self.Password = params.get("Password") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ManagerNode(AbstractModel): """管控节点信息 """ def __init__(self): r""" :param InstanceChargeType: 节点[计费类型](https://cloud.tencent.com/document/product/213/2180)。<br><li>PREPAID:预付费,即包年包月<br><li>POSTPAID_BY_HOUR:按小时后付费<br>默认值:POSTPAID_BY_HOUR。 :type InstanceChargeType: str :param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月节点的购买时长、是否设置自动续费等属性。若指定节点的付费模式为预付费则该参数必传。 :type InstanceChargePrepaid: :class:`tencentcloud.thpc.v20220401.models.InstanceChargePrepaid` :param InstanceType: 节点机型。不同实例机型指定了不同的资源规格。 <br><li>对于付费模式为PREPAID或POSTPAID\_BY\_HOUR的实例创建,具体取值可通过调用接口[DescribeInstanceTypeConfigs](https://cloud.tencent.com/document/api/213/15749)来获得最新的规格表或参见[实例规格](https://cloud.tencent.com/document/product/213/11518)描述。 :type InstanceType: str :param SystemDisk: 节点系统盘配置信息。若不指定该参数,则按照系统默认值进行分配。 :type SystemDisk: :class:`tencentcloud.thpc.v20220401.models.SystemDisk` :param DataDisks: 节点数据盘配置信息。若不指定该参数,则默认不购买数据盘。支持购买的时候指定21块数据盘,其中最多包含1块LOCAL_BASIC数据盘或者LOCAL_SSD数据盘,最多包含20块CLOUD_BASIC数据盘、CLOUD_PREMIUM数据盘或者CLOUD_SSD数据盘。 :type DataDisks: list of DataDisk :param InternetAccessible: 公网带宽相关信息设置。若不指定该参数,则默认公网带宽为0Mbps。 :type InternetAccessible: :class:`tencentcloud.thpc.v20220401.models.InternetAccessible` :param InstanceName: 节点显示名称。<br><li> 不指定节点显示名称则默认显示‘未命名’。 </li><li>购买多个节点,如果指定模式串`{R:x}`,表示生成数字[`[x, x+n-1]`,其中`n`表示购买节点的数量,例如`server_{R:3}`,购买1个时,节点显示名称为`server_3`;购买2个时,节点显示名称分别为`server_3`,`server_4`。支持指定多个模式串`{R:x}`。 购买多个节点,如果不指定模式串,则在节点显示名称添加后缀`1、2...n`,其中`n`表示购买节点的数量,例如`server_`,购买2个时,节点显示名称分别为`server_1`,`server_2`。</li><li> 最多支持60个字符(包含模式串)。 :type InstanceName: str """ self.InstanceChargeType = None self.InstanceChargePrepaid = None self.InstanceType = None self.SystemDisk = None self.DataDisks = None self.InternetAccessible = None self.InstanceName = None def _deserialize(self, params): self.InstanceChargeType = params.get("InstanceChargeType") if params.get("InstanceChargePrepaid") is not None: self.InstanceChargePrepaid = InstanceChargePrepaid() self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid")) self.InstanceType = params.get("InstanceType") if params.get("SystemDisk") is not None: self.SystemDisk = SystemDisk() self.SystemDisk._deserialize(params.get("SystemDisk")) if params.get("DataDisks") is not None: self.DataDisks = [] for item in params.get("DataDisks"): obj = DataDisk() obj._deserialize(item) self.DataDisks.append(obj) if params.get("InternetAccessible") is not None: self.InternetAccessible = InternetAccessible() self.InternetAccessible._deserialize(params.get("InternetAccessible")) self.InstanceName = params.get("InstanceName") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class ManagerNodeOverview(AbstractModel): """管控节点概览。 """ def __init__(self): r""" :param NodeId: 管控节点ID。 注意:此字段可能返回 null,表示取不到有效值。 :type NodeId: str """ self.NodeId = None def _deserialize(self, params): self.NodeId = params.get("NodeId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class Placement(AbstractModel): """描述了实例的抽象位置 """ def __init__(self): r""" :param Zone: 实例所属的可用区名称。该参数可以通过调用 [DescribeZones](https://cloud.tencent.com/document/product/213/15707) 的返回值中的Zone字段来获取。 :type Zone: str """ self.Zone = None def _deserialize(self, params): self.Zone = params.get("Zone") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class QueueConfig(AbstractModel): """扩容队列配置。 """ def __init__(self): r""" :param QueueName: 队列名称。 :type QueueName: str :param MinSize: 队列中弹性节点数量最小值。取值范围0~200。 :type MinSize: int :param MaxSize: 队列中弹性节点数量最大值。取值范围0~200。 :type MaxSize: int :param EnableAutoExpansion: 是否开启自动扩容。 :type EnableAutoExpansion: bool :param EnableAutoShrink: 是否开启自动缩容。 :type EnableAutoShrink: bool :param ImageId: 指定有效的[镜像](https://cloud.tencent.com/document/product/213/4940)ID,格式形如`img-xxx`。目前仅支持公有镜和特定自定义镜像。 :type ImageId: str :param SystemDisk: 节点系统盘配置信息。若不指定该参数,则按照系统默认值进行分配。 :type SystemDisk: :class:`tencentcloud.thpc.v20220401.models.SystemDisk` :param DataDisks: 节点数据盘配置信息。若不指定该参数,则默认不购买数据盘。支持购买的时候指定21块数据盘,其中最多包含1块LOCAL_BASIC数据盘或者LOCAL_SSD数据盘,最多包含20块CLOUD_BASIC数据盘、CLOUD_PREMIUM数据盘或者CLOUD_SSD数据盘。 :type DataDisks: list of DataDisk :param InternetAccessible: 公网带宽相关信息设置。若不指定该参数,则默认公网带宽为0Mbps。 :type InternetAccessible: :class:`tencentcloud.thpc.v20220401.models.InternetAccessible` :param ExpansionNodeConfigs: 扩容节点配置信息。 :type ExpansionNodeConfigs: list of ExpansionNodeConfig """ self.QueueName = None self.MinSize = None self.MaxSize = None self.EnableAutoExpansion = None self.EnableAutoShrink = None self.ImageId = None self.SystemDisk = None self.DataDisks = None self.InternetAccessible = None self.ExpansionNodeConfigs = None def _deserialize(self, params): self.QueueName = params.get("QueueName") self.MinSize = params.get("MinSize") self.MaxSize = params.get("MaxSize") self.EnableAutoExpansion = params.get("EnableAutoExpansion") self.EnableAutoShrink = params.get("EnableAutoShrink") self.ImageId = params.get("ImageId") if params.get("SystemDisk") is not None: self.SystemDisk = SystemDisk() self.SystemDisk._deserialize(params.get("SystemDisk")) if params.get("DataDisks") is not None: self.DataDisks = [] for item in params.get("DataDisks"): obj = DataDisk() obj._deserialize(item) self.DataDisks.append(obj) if params.get("InternetAccessible") is not None: self.InternetAccessible = InternetAccessible() self.InternetAccessible._deserialize(params.get("InternetAccessible")) if params.get("ExpansionNodeConfigs") is not None: self.ExpansionNodeConfigs = [] for item in params.get("ExpansionNodeConfigs"): obj = ExpansionNodeConfig() obj._deserialize(item) self.ExpansionNodeConfigs.append(obj) memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SetAutoScalingConfigurationRequest(AbstractModel): """SetAutoScalingConfiguration请求参数结构体 """ def __init__(self): r""" :param ClusterId: 集群ID。 :type ClusterId: str :param ExpansionBusyTime: 任务连续等待时间,队列的任务处于连续等待的时间。单位秒。默认值120。 :type ExpansionBusyTime: int :param ShrinkIdleTime: 节点连续空闲(未运行作业)时间,一个节点连续处于空闲状态时间。单位秒。默认值300。 :type ShrinkIdleTime: int :param QueueConfigs: 扩容队列配置列表。 :type QueueConfigs: list of QueueConfig :param DryRun: 是否只预检此次请求。 true:发送检查请求,不会绑定弹性伸缩组。检查项包括是否填写了必需参数,请求格式,业务限制。 如果检查不通过,则返回对应错误码; 如果检查通过,则返回RequestId。 false(默认):发送正常请求,通过检查后直接绑定弹性伸缩组。 :type DryRun: bool """ self.ClusterId = None self.ExpansionBusyTime = None self.ShrinkIdleTime = None self.QueueConfigs = None self.DryRun = None def _deserialize(self, params): self.ClusterId = params.get("ClusterId") self.ExpansionBusyTime = params.get("ExpansionBusyTime") self.ShrinkIdleTime = params.get("ShrinkIdleTime") if params.get("QueueConfigs") is not None: self.QueueConfigs = [] for item in params.get("QueueConfigs"): obj = QueueConfig() obj._deserialize(item) self.QueueConfigs.append(obj) self.DryRun = params.get("DryRun") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SetAutoScalingConfigurationResponse(AbstractModel): """SetAutoScalingConfiguration返回参数结构体 """ def __init__(self): r""" :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.RequestId = None def _deserialize(self, params): self.RequestId = params.get("RequestId") class StorageOption(AbstractModel): """描述集群文件系统选项 """ def __init__(self): r""" :param CFSOptions: 集群挂载CFS文件系统选项 :type CFSOptions: list of CFSOption :param GooseFSOptions: 集群挂在GooseFS文件系统选项 :type GooseFSOptions: list of GooseFSOption """ self.CFSOptions = None self.GooseFSOptions = None def _deserialize(self, params): if params.get("CFSOptions") is not None: self.CFSOptions = [] for item in params.get("CFSOptions"): obj = CFSOption() obj._deserialize(item) self.CFSOptions.append(obj) if params.get("GooseFSOptions") is not None: self.GooseFSOptions = [] for item in params.get("GooseFSOptions"): obj = GooseFSOption() obj._deserialize(item) self.GooseFSOptions.append(obj) memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class SystemDisk(AbstractModel): """描述了操作系统所在块设备即系统盘的信息 """ def __init__(self): r""" :param DiskType: 系统盘类型。系统盘类型限制详见存储概述。取值范围: LOCAL_BASIC:本地硬盘 LOCAL_SSD:本地SSD硬盘 CLOUD_BASIC:普通云硬盘 CLOUD_SSD:SSD云硬盘 CLOUD_PREMIUM:高性能云硬盘 默认取值:当前有库存的硬盘类型。 :type DiskType: str :param DiskSize: 系统盘大小,单位:GB。默认值为 50 :type DiskSize: int """ self.DiskType = None self.DiskSize = None def _deserialize(self, params): self.DiskType = params.get("DiskType") self.DiskSize = params.get("DiskSize") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class Tag(AbstractModel): """标签键值对。 """ def __init__(self): r""" :param Key: 标签键 :type Key: str :param Value: 标签值 :type Value: str """ self.Key = None self.Value = None def _deserialize(self, params): self.Key = params.get("Key") self.Value = params.get("Value") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set)) class VirtualPrivateCloud(AbstractModel): """描述了VPC相关信息 """ def __init__(self): r""" :param VpcId: 私有网络ID,形如`vpc-xxx`。有效的VpcId可通过登录[控制台](https://console.cloud.tencent.com/vpc/vpc?rid=1)查询;也可以调用接口 [DescribeVpcEx](/document/api/215/1372) ,从接口返回中的`unVpcId`字段获取。若在创建子机时VpcId与SubnetId同时传入`DEFAULT`,则强制使用默认vpc网络。 :type VpcId: str :param SubnetId: 私有网络子网ID,形如`subnet-xxx`。有效的私有网络子网ID可通过登录[控制台](https://console.cloud.tencent.com/vpc/subnet?rid=1)查询;也可以调用接口 [DescribeSubnets](/document/api/215/15784) ,从接口返回中的`unSubnetId`字段获取。若在创建子机时SubnetId与VpcId同时传入`DEFAULT`,则强制使用默认vpc网络。 :type SubnetId: str """ self.VpcId = None self.SubnetId = None def _deserialize(self, params): self.VpcId = params.get("VpcId") self.SubnetId = params.get("SubnetId") memeber_set = set(params.keys()) for name, value in vars(self).items(): if name in memeber_set: memeber_set.remove(name) if len(memeber_set) > 0: warnings.warn("%s fileds are useless." % ",".join(memeber_set))
{ "content_hash": "1d3809e67bc59aec8ef95969087b35a5", "timestamp": "", "source": "github", "line_count": 1391, "max_line_length": 410, "avg_line_length": 36.835370237239395, "alnum_prop": 0.6278933603965806, "repo_name": "tzpBingo/github-trending", "id": "8c5467709a71be68068a6ce638ab09243c620b5f", "size": "61482", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "codespace/python/tencentcloud/thpc/v20220401/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "11470" }, { "name": "HTML", "bytes": "1543" }, { "name": "Python", "bytes": "49985109" }, { "name": "Shell", "bytes": "18039" } ], "symlink_target": "" }
<!doctype html> <html> <title>npm-build</title> <meta http-equiv="content-type" value="text/html;utf-8"> <link rel="stylesheet" type="text/css" href="../../static/style.css"> <link rel="canonical" href="https://www.npmjs.org/doc/cli/npm-build.html"> <script async=true src="../../static/toc.js"></script> <body> <div id="wrapper"> <h1><a href="../cli/npm-build.html">npm-build</a></h1> <p>Build a package</p> <h2 id="synopsis">SYNOPSIS</h2> <pre><code>npm build [&lt;package-folder&gt;] </code></pre><ul> <li><code>&lt;package-folder&gt;</code>: A folder containing a <code>package.json</code> file in its root.</li> </ul> <h2 id="description">DESCRIPTION</h2> <p>This is the plumbing command called by <code>npm link</code> and <code>npm install</code>.</p> <p>It should generally be called during installation, but if you need to run it directly, run:</p> <pre><code>npm run-script build </code></pre><h2 id="see-also">SEE ALSO</h2> <ul> <li><a href="../cli/npm-install.html">npm-install(1)</a></li> <li><a href="../cli/npm-link.html">npm-link(1)</a></li> <li><a href="../misc/npm-scripts.html">npm-scripts(7)</a></li> <li><a href="../files/package.json.html">package.json(5)</a></li> </ul> </div> <table border=0 cellspacing=0 cellpadding=0 id=npmlogo> <tr><td style="width:180px;height:10px;background:rgb(237,127,127)" colspan=18>&nbsp;</td></tr> <tr><td rowspan=4 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td><td style="width:40px;height:10px;background:#fff" colspan=4>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=4>&nbsp;</td><td style="width:40px;height:10px;background:#fff" colspan=4>&nbsp;</td><td rowspan=4 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td><td colspan=6 style="width:60px;height:10px;background:#fff">&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=4>&nbsp;</td></tr> <tr><td colspan=2 style="width:20px;height:30px;background:#fff" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:#fff" rowspan=3>&nbsp;</td><td style="width:20px;height:10px;background:#fff" rowspan=4 colspan=2>&nbsp;</td><td style="width:10px;height:20px;background:rgb(237,127,127)" rowspan=2>&nbsp;</td><td style="width:10px;height:10px;background:#fff" rowspan=3>&nbsp;</td><td style="width:20px;height:10px;background:#fff" rowspan=3 colspan=2>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:#fff" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=3>&nbsp;</td></tr> <tr><td style="width:10px;height:10px;background:#fff" rowspan=2>&nbsp;</td></tr> <tr><td style="width:10px;height:10px;background:#fff">&nbsp;</td></tr> <tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr> <tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr> </table> <p id="footer">npm-build &mdash; npm@3.3.12</p>
{ "content_hash": "28eb1e5a47f45ad46bada9bc79af1eb3", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 807, "avg_line_length": 76.61363636363636, "alnum_prop": 0.6997923464847227, "repo_name": "alexander-lee/storm-alert", "id": "b630367628840996409dc9dc2c688d566af66be8", "size": "3371", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "node_modules/npm/html/doc/cli/npm-build.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1334" }, { "name": "JavaScript", "bytes": "933523" } ], "symlink_target": "" }
package main import ( "flag" "fmt" "os" "runtime/pprof" "sort" "time" "golang.org/x/net/context" log "github.com/golang/glog" "github.com/youtube/vitess/go/exit" "github.com/youtube/vitess/go/rpcplus" "github.com/youtube/vitess/go/rpcwrap/bsonrpc" "github.com/youtube/vitess/go/sync2" "github.com/youtube/vitess/go/vt/logutil" "github.com/youtube/vitess/go/vt/topo" pb "github.com/youtube/vitess/go/vt/proto/topodata" ) var ( usage = ` Queries the topo server, for test purposes. ` mode = flag.String("mode", "get", "which operation to run on the node (getSrvKeyspaceNames, getSrvKeyspace, getEndPoints, qps)") server = flag.String("server", "localhost:3801", "topo server to dial") timeout = flag.Duration("timeout", 5*time.Second, "connection timeout") // flag can't overlap with servenv's cpu_profile cpuProfile = flag.String("zkclient_cpu_profile", "", "write cpu profile to file") ) func init() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) flag.PrintDefaults() fmt.Fprintf(os.Stderr, usage) } } func connect() *rpcplus.Client { rpcClient, err := bsonrpc.DialHTTP("tcp", *server, *timeout) if err != nil { log.Fatalf("Can't connect to topo server: %v", err) } return rpcClient } func getSrvKeyspaceNames(ctx context.Context, rpcClient *rpcplus.Client, cell string, verbose bool) { req := &topo.GetSrvKeyspaceNamesArgs{ Cell: cell, } reply := &topo.SrvKeyspaceNames{} if err := rpcClient.Call(ctx, "TopoReader.GetSrvKeyspaceNames", req, reply); err != nil { log.Fatalf("TopoReader.GetSrvKeyspaceNames error: %v", err) } if verbose { for i, entry := range reply.Entries { println(fmt.Sprintf("KeyspaceNames[%v] = %v", i, entry)) } } } func getSrvKeyspace(ctx context.Context, rpcClient *rpcplus.Client, cell, keyspace string, verbose bool) { req := &topo.GetSrvKeyspaceArgs{ Cell: cell, Keyspace: keyspace, } reply := &topo.SrvKeyspace{} if err := rpcClient.Call(ctx, "TopoReader.GetSrvKeyspace", req, reply); err != nil { log.Fatalf("TopoReader.GetSrvKeyspace error: %v", err) } if verbose { tabletTypes := make([]string, 0, len(reply.Partitions)) for t := range reply.Partitions { tabletTypes = append(tabletTypes, string(t)) } sort.Strings(tabletTypes) for _, t := range tabletTypes { println(fmt.Sprintf("Partitions[%v] =", t)) for i, s := range reply.Partitions[topo.TabletType(t)].ShardReferences { println(fmt.Sprintf(" ShardReferences[%v]=%v", i, s.KeyRange.String())) } } } } func getEndPoints(ctx context.Context, rpcClient *rpcplus.Client, cell, keyspace, shard, tabletType string, verbose bool) { req := &topo.GetEndPointsArgs{ Cell: cell, Keyspace: keyspace, Shard: shard, TabletType: topo.TabletType(tabletType), } reply := &pb.EndPoints{} if err := rpcClient.Call(ctx, "TopoReader.GetEndPoints", req, reply); err != nil { log.Fatalf("TopoReader.GetEndPoints error: %v", err) } if verbose { for i, e := range reply.Entries { println(fmt.Sprintf("Entries[%v] = %v %v", i, e.Uid, e.Host)) } } } // qps is a function used by tests to run a vtgate load check. // It will get the same srvKeyspaces as fast as possible and display the QPS. func qps(ctx context.Context, cell string, keyspaces []string) { var count sync2.AtomicInt32 for _, keyspace := range keyspaces { for i := 0; i < 10; i++ { go func() { rpcClient := connect() for true { getSrvKeyspace(ctx, rpcClient, cell, keyspace, false) count.Add(1) } }() } } ticker := time.NewTicker(time.Second) i := 0 for _ = range ticker.C { c := count.Get() count.Set(0) println(fmt.Sprintf("QPS = %v", c)) i++ if i == 10 { break } } } func main() { defer exit.Recover() defer logutil.Flush() flag.Parse() args := flag.Args() if len(args) == 0 { flag.Usage() exit.Return(1) } if *cpuProfile != "" { f, err := os.Create(*cpuProfile) if err != nil { log.Error(err) exit.Return(1) } pprof.StartCPUProfile(f) defer pprof.StopCPUProfile() } ctx := context.Background() if *mode == "getSrvKeyspaceNames" { rpcClient := connect() if len(args) == 1 { getSrvKeyspaceNames(ctx, rpcClient, args[0], true) } else { log.Errorf("getSrvKeyspaceNames only takes one argument") exit.Return(1) } } else if *mode == "getSrvKeyspace" { rpcClient := connect() if len(args) == 2 { getSrvKeyspace(ctx, rpcClient, args[0], args[1], true) } else { log.Errorf("getSrvKeyspace only takes two arguments") exit.Return(1) } } else if *mode == "getEndPoints" { rpcClient := connect() if len(args) == 4 { getEndPoints(ctx, rpcClient, args[0], args[1], args[2], args[3], true) } else { log.Errorf("getEndPoints only takes four arguments") exit.Return(1) } } else if *mode == "qps" { qps(ctx, args[0], args[1:]) } else { flag.Usage() log.Errorf("Invalid mode: %v", *mode) exit.Return(1) } }
{ "content_hash": "0ed5ed5cab25d16bf80edfe2fe86fffc", "timestamp": "", "source": "github", "line_count": 194, "max_line_length": 132, "avg_line_length": 25.54123711340206, "alnum_prop": 0.6595358224016146, "repo_name": "mlc0202/vitess", "id": "ba03f1a9bcb45d9f19b99f0298f05ded37d1067d", "size": "5112", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "go/cmd/zkclient2/zkclient2.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "40319" }, { "name": "CSS", "bytes": "80739" }, { "name": "Go", "bytes": "4538368" }, { "name": "HTML", "bytes": "86600" }, { "name": "Java", "bytes": "186832" }, { "name": "JavaScript", "bytes": "71420" }, { "name": "Liquid", "bytes": "15797" }, { "name": "Makefile", "bytes": "7867" }, { "name": "PHP", "bytes": "7167" }, { "name": "PLpgSQL", "bytes": "10072" }, { "name": "Protocol Buffer", "bytes": "61194" }, { "name": "Python", "bytes": "964742" }, { "name": "Ruby", "bytes": "465" }, { "name": "Shell", "bytes": "53683" }, { "name": "Yacc", "bytes": "18969" } ], "symlink_target": "" }
package com.esri.terraformer.core; import org.junit.Test; import static org.junit.Assert.assertEquals; public class BaseGeometryTest { @Test public void testNaiveEquals() throws Exception { BaseGeometry<Double> pt = new Point(100d, 0d); BaseGeometry<Double> pt2 = new Point(100d, 0d, 3d); BaseGeometry<Double> pt3 = new Point(0d, 100d); BaseGeometry<Double> pt4 = new Point(100d, 0d); // same as pt BaseGeometry<Point> mp = new MultiPoint((Point)pt, (Point)pt); BaseGeometry<Point> mp2 = new MultiPoint((Point)pt, (Point)pt3); BaseGeometry<Point> mp3 = new MultiPoint((Point)pt3, (Point)pt); assertEquals(false, BaseGeometry.naiveEquals(null, null)); assertEquals(false, BaseGeometry.naiveEquals(null, pt)); assertEquals(false, BaseGeometry.naiveEquals(pt, null)); assertEquals(false, BaseGeometry.naiveEquals(pt, mp)); assertEquals(false, BaseGeometry.naiveEquals(pt, pt2)); assertEquals(true, BaseGeometry.naiveEquals(pt, pt)); assertEquals(true, BaseGeometry.naiveEquals(pt, pt4)); assertEquals(null, BaseGeometry.naiveEquals(mp2, mp3)); } }
{ "content_hash": "50661ed81073e4c2842720377df0f7c1", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 72, "avg_line_length": 43.77777777777778, "alnum_prop": 0.6835871404399323, "repo_name": "esripdx/terraformer-java", "id": "193a9f8135ad374535f9c3c5111af7149f50fac3", "size": "1182", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/java/com/esri/terraformer/core/BaseGeometryTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "229347" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_37) on Mon Mar 25 00:16:21 EDT 2013 --> <TITLE> Uses of Class edu.american.student.stonewall.display.css.property.VerticalAlignmentPropertyTest </TITLE> <META NAME="date" CONTENT="2013-03-25"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class edu.american.student.stonewall.display.css.property.VerticalAlignmentPropertyTest"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../edu/american/student/stonewall/display/css/property/VerticalAlignmentPropertyTest.html" title="class in edu.american.student.stonewall.display.css.property"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../../index.html?edu/american/student/stonewall/display/css/property//class-useVerticalAlignmentPropertyTest.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="VerticalAlignmentPropertyTest.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>edu.american.student.stonewall.display.css.property.VerticalAlignmentPropertyTest</B></H2> </CENTER> No usage of edu.american.student.stonewall.display.css.property.VerticalAlignmentPropertyTest <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../edu/american/student/stonewall/display/css/property/VerticalAlignmentPropertyTest.html" title="class in edu.american.student.stonewall.display.css.property"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../../index.html?edu/american/student/stonewall/display/css/property//class-useVerticalAlignmentPropertyTest.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="VerticalAlignmentPropertyTest.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
{ "content_hash": "0349b6384d88f94cdf793a46e4426f85", "timestamp": "", "source": "github", "line_count": 144, "max_line_length": 298, "avg_line_length": 45.1875, "alnum_prop": 0.6213308744429077, "repo_name": "Ccook/Stonewall", "id": "efa632c30a0bb92d8bf17672f453ed26f724db9a", "size": "6507", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "javadoc/edu/american/student/stonewall/display/css/property/class-use/VerticalAlignmentPropertyTest.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "853867" }, { "name": "JavaScript", "bytes": "217" } ], "symlink_target": "" }
package cn.luo.yuan.maze.model.skill.elementalist; import cn.luo.yuan.maze.model.Data; import cn.luo.yuan.maze.model.Hero; import cn.luo.yuan.maze.model.skill.*; import cn.luo.yuan.maze.service.InfoControlInterface; import cn.luo.yuan.maze.utils.Field; /** * Created by luoyuan on 2017/7/16. */ public class ElementModel extends SkillModel { private static final long serialVersionUID = Field.SERVER_VERSION; public ElementModel(Skill skill){ super(skill); } public boolean canEnable(SkillParameter parameter){ InfoControlInterface context = parameter.get("context"); if(isSkillEnable("HeroHit", context) || isSkillEnable("EvilTalent", context)){ return false; }else { return (skill instanceof Elementalist || isSkillEnable("Elementalist", context)) && isEnablePointEnough(parameter); } } public boolean canMount(SkillParameter parameter) { InfoControlInterface context = parameter.get(SkillParameter.CONTEXT); return !isSkillEnable("HeroHit",context)&& !isSkillEnable("EvilTalent", context); } public boolean canUpgrade(SkillParameter parameter){ return skill.isEnable() && isUpgradePointEnough(parameter); } }
{ "content_hash": "7bcf53456c343955ad33f81c3641a7b5", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 127, "avg_line_length": 33.75675675675676, "alnum_prop": 0.7045636509207366, "repo_name": "luoyuan800/NeverEnd", "id": "dfe135cc58baa9bced3045ed793035a2a916f964", "size": "1249", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dataModel/src/cn/luo/yuan/maze/model/skill/elementalist/ElementModel.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "299752" }, { "name": "HTML", "bytes": "86849" }, { "name": "Java", "bytes": "1125483" }, { "name": "JavaScript", "bytes": "81097" }, { "name": "Kotlin", "bytes": "208133" } ], "symlink_target": "" }
<!DOCTYPE HTML> <meta charset="UTF-8"> <title>CSS Toggles: getComputedStyle() for toggle-trigger property</title> <link rel="author" title="L. David Baron" href="https://dbaron.org/"> <link rel="author" title="Google" href="http://www.google.com/"> <link rel="help" href="https://tabatkins.github.io/css-toggle/#toggle-trigger-property"> <script src="/resources/testharness.js"></script> <script src="/resources/testharnessreport.js"></script> <script src="/css/support/computed-testcommon.js"></script> <body> <div id="target"></div> <script> test_computed_value('toggle-trigger', 'none'); test_computed_value('toggle-trigger', 'self set 1'); test_computed_value('toggle-trigger', 'sticky set 1'); test_computed_value('toggle-trigger', 'group set 1'); test_computed_value('toggle-trigger', 'mytoggle'); test_computed_value('toggle-trigger', 'mytoggle set 0'); test_computed_value('toggle-trigger', 'mytoggle set +0', 'mytoggle set 0'); test_computed_value('toggle-trigger', 'mytoggle set -0', 'mytoggle set 0'); test_computed_value('toggle-trigger', 'mytoggle set +5', 'mytoggle set 5'); test_computed_value('toggle-trigger', 'mytoggle, yourtoggle'); test_computed_value('toggle-trigger', 'mytoggle, mytoggle'); test_computed_value('toggle-trigger', 'mytoggle set 1, yourtoggle'); test_computed_value('toggle-trigger', 'mytoggle set 1 , yourtoggle set 1', 'mytoggle set 1, yourtoggle set 1'); test_computed_value('toggle-trigger', 'mytoggle,yourtoggle set 1', 'mytoggle, yourtoggle set 1'); test_computed_value('toggle-trigger', 'mytoggle set calc(-3)', 'mytoggle set 0'); test_computed_value('toggle-trigger', 'mytoggle set calc(6)', 'mytoggle set 6'); test_computed_value('toggle-trigger', 'mytoggle set calc(6.4)', 'mytoggle set 6'); test_computed_value('toggle-trigger', 'mytoggle set calc(6.5)', 'mytoggle set 7'); test_computed_value('toggle-trigger', 'mytoggle set calc(6.6)', 'mytoggle set 7'); test_computed_value('toggle-trigger', 'mytoggle set two'); test_computed_value('toggle-trigger', 'mytoggle next 1', 'mytoggle'); test_computed_value('toggle-trigger', 'mytoggle next 2'); test_computed_value('toggle-trigger', 'mytoggle prev 1', 'mytoggle prev'); test_computed_value('toggle-trigger', 'mytoggle prev 2'); test_computed_value('toggle-trigger', 'mytoggle next calc(-3)', 'mytoggle'); test_computed_value('toggle-trigger', 'mytoggle prev calc(-3)', 'mytoggle prev'); test_computed_value('toggle-trigger', 'mytoggle next calc(6.4)', 'mytoggle next 6'); test_computed_value('toggle-trigger', 'mytoggle next calc(6.5)', 'mytoggle next 7'); test_computed_value('toggle-trigger', 'mytoggle prev calc(6.4)', 'mytoggle prev 6'); test_computed_value('toggle-trigger', 'mytoggle prev calc(6.5)', 'mytoggle prev 7'); </script>
{ "content_hash": "b2913a23a1a6b7fd76020e5db71fee54", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 111, "avg_line_length": 59.47826086956522, "alnum_prop": 0.7240497076023392, "repo_name": "chromium/chromium", "id": "4dab2b604545f1826a4e9d07bc8edd757559cad4", "size": "2736", "binary": false, "copies": "13", "ref": "refs/heads/main", "path": "third_party/blink/web_tests/external/wpt/css/css-toggle/parsing/toggle-trigger-computed.tentative.html", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
layout: model title: English RobertaForTokenClassification Cased model (from ml6team) author: John Snow Labs name: roberta_ner_keyphrase_extraction_kbir_semeval2017 date: 2022-08-10 tags: [bert, ner, open_source, en] task: Named Entity Recognition language: en edition: Spark NLP 4.1.0 spark_version: 3.0 supported: true annotator: RoBertaForTokenClassification article_header: type: cover use_language_switcher: "Python-Scala-Java" --- ## Description Pretrained RobertaForTokenClassification model, adapted from Hugging Face and curated to provide scalability and production-readiness using Spark NLP. `keyphrase-extraction-kbir-semeval2017` is a English model originally trained by `ml6team`. ## Predicted Entities `KEY` {:.btn-box} <button class="button button-orange" disabled>Live Demo</button> <button class="button button-orange" disabled>Open in Colab</button> [Download](https://s3.amazonaws.com/auxdata.johnsnowlabs.com/public/models/roberta_ner_keyphrase_extraction_kbir_semeval2017_en_4.1.0_3.0_1660140317706.zip){:.button.button-orange.button-orange-trans.arr.button-icon} ## How to use <div class="tabs-box" markdown="1"> {% include programmingLanguageSelectScalaPythonNLU.html %} ```python documentAssembler = DocumentAssembler() \ .setInputCol("text") \ .setOutputCol("document") sentenceDetector = SentenceDetectorDLModel.pretrained("sentence_detector_dl", "xx")\ .setInputCols(["document"])\ .setOutputCol("sentence") tokenizer = Tokenizer() \ .setInputCols("sentence") \ .setOutputCol("token") tokenClassifier = BertForTokenClassification.pretrained("roberta_ner_keyphrase_extraction_kbir_semeval2017","en") \ .setInputCols(["sentence", "token"]) \ .setOutputCol("ner") pipeline = Pipeline(stages=[documentAssembler, sentenceDetector, tokenizer, tokenClassifier]) data = spark.createDataFrame([["PUT YOUR STRING HERE"]]).toDF("text") result = pipeline.fit(data).transform(data) ``` ```scala val documentAssembler = new DocumentAssembler() .setInputCol("text") .setOutputCol("document") val sentenceDetector = SentenceDetectorDLModel.pretrained("sentence_detector_dl", "xx") .setInputCols(Array("document")) .setOutputCol("sentence") val tokenizer = new Tokenizer() .setInputCols(Array("sentence")) .setOutputCol("token") val tokenClassifier = BertForTokenClassification.pretrained("roberta_ner_keyphrase_extraction_kbir_semeval2017","en") .setInputCols(Array("sentence", "token")) .setOutputCol("ner") val pipeline = new Pipeline().setStages(Array(documentAssembler,sentenceDetector, tokenizer, tokenClassifier)) val data = Seq("PUT YOUR STRING HERE").toDF("text") val result = pipeline.fit(data).transform(data) ``` </div> {:.model-param} ## Model Information {:.table-model} |---|---| |Model Name:|roberta_ner_keyphrase_extraction_kbir_semeval2017| |Compatibility:|Spark NLP 4.1.0+| |License:|Open Source| |Edition:|Official| |Input Labels:|[document, token]| |Output Labels:|[ner]| |Language:|en| |Size:|1.3 GB| |Case sensitive:|true| |Max sentence length:|128| ## References - https://huggingface.co/ml6team/keyphrase-extraction-kbir-semeval2017 - https://paperswithcode.com/sota?task=Keyphrase+Extraction&dataset=semeval2017 - https://arxiv.org/abs/2112.08547 - https://arxiv.org/abs/1704.02853
{ "content_hash": "fc9276237772aa4a57b140b8e2e50ced", "timestamp": "", "source": "github", "line_count": 106, "max_line_length": 242, "avg_line_length": 31.566037735849058, "alnum_prop": 0.7453676031081888, "repo_name": "JohnSnowLabs/spark-nlp", "id": "23d1373f1992135ec86f5388e5d4979660747b30", "size": "3350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/_posts/gadde5300/2022-08-10-roberta_ner_keyphrase_extraction_kbir_semeval2017_en_3_0.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "14452" }, { "name": "Java", "bytes": "223289" }, { "name": "Makefile", "bytes": "819" }, { "name": "Python", "bytes": "1694517" }, { "name": "Scala", "bytes": "4116435" }, { "name": "Shell", "bytes": "5286" } ], "symlink_target": "" }
<?php namespace Illuminate\Database\Eloquent\Relations; use Illuminate\Database\Eloquent\Model; use Illuminate\Database\Eloquent\Builder; use Illuminate\Database\Query\Expression; use Illuminate\Database\Eloquent\Collection; use Illuminate\Database\Eloquent\ModelNotFoundException; class BelongsToMany extends Relation { /** * The intermediate table for the relation. * * @var string */ protected $table; /** * The foreign key of the parent model. * * @var string */ protected $foreignKey; /** * The associated key of the relation. * * @var string */ protected $otherKey; /** * The "name" of the relationship. * * @var string */ protected $relationName; /** * The pivot table columns to retrieve. * * @var array */ protected $pivotColumns = array(); /** * Create a new has many relationship instance. * * @param \Illuminate\Database\Eloquent\Builder $query * @param \Illuminate\Database\Eloquent\Model $parent * @param string $table * @param string $foreignKey * @param string $otherKey * @param string $relationName * @return void */ public function __construct(Builder $query, Model $parent, $table, $foreignKey, $otherKey, $relationName = null) { $this->table = $table; $this->otherKey = $otherKey; $this->foreignKey = $foreignKey; $this->relationName = $relationName; parent::__construct($query, $parent); } /** * Get the results of the relationship. * * @return mixed */ public function getResults() { return $this->get(); } /** * Set a where clause for a pivot table column. * * @param string $column * @param string $operator * @param mixed $value * @param string $boolean * @return \Illuminate\Database\Eloquent\Relations\BelongsToMany */ public function wherePivot($column, $operator = null, $value = null, $boolean = 'and') { return $this->where($this->table.'.'.$column, $operator, $value, $boolean); } /** * Set an or where clause for a pivot table column. * * @param string $column * @param string $operator * @param mixed $value * @return \Illuminate\Database\Eloquent\Relations\BelongsToMany */ public function orWherePivot($column, $operator = null, $value = null) { return $this->wherePivot($column, $operator, $value, 'or'); } /** * Execute the query and get the first result. * * @param array $columns * @return mixed */ public function first($columns = array('*')) { $results = $this->take(1)->get($columns); return count($results) > 0 ? $results->first() : null; } /** * Execute the query and get the first result or throw an exception. * * @param array $columns * @return \Illuminate\Database\Eloquent\Model|static * * @throws \Illuminate\Database\Eloquent\ModelNotFoundException */ public function firstOrFail($columns = array('*')) { if ( ! is_null($model = $this->first($columns))) return $model; throw new ModelNotFoundException; } /** * Execute the query as a "select" statement. * * @param array $columns * @return \Illuminate\Database\Eloquent\Collection */ public function get($columns = array('*')) { // First we'll add the proper select columns onto the query so it is run with // the proper columns. Then, we will get the results and hydrate out pivot // models with the result of those columns as a separate model relation. $columns = $this->query->getQuery()->columns ? array() : $columns; $select = $this->getSelectColumns($columns); $models = $this->query->addSelect($select)->getModels(); $this->hydratePivotRelation($models); // If we actually found models we will also eager load any relationships that // have been specified as needing to be eager loaded. This will solve the // n + 1 query problem for the developer and also increase performance. if (count($models) > 0) { $models = $this->query->eagerLoadRelations($models); } return $this->related->newCollection($models); } /** * Get a paginator for the "select" statement. * * @param int $perPage * @param array $columns * @return \Illuminate\Pagination\Paginator */ public function paginate($perPage = null, $columns = array('*')) { $this->query->addSelect($this->getSelectColumns($columns)); // When paginating results, we need to add the pivot columns to the query and // then hydrate into the pivot objects once the results have been gathered // from the database since this isn't performed by the Eloquent builder. $pager = $this->query->paginate($perPage, $columns); $this->hydratePivotRelation($pager->getItems()); return $pager; } /** * Hydrate the pivot table relationship on the models. * * @param array $models * @return void */ protected function hydratePivotRelation(array $models) { // To hydrate the pivot relationship, we will just gather the pivot attributes // and create a new Pivot model, which is basically a dynamic model that we // will set the attributes, table, and connections on so it they be used. foreach ($models as $model) { $pivot = $this->newExistingPivot($this->cleanPivotAttributes($model)); $model->setRelation('pivot', $pivot); } } /** * Get the pivot attributes from a model. * * @param \Illuminate\Database\Eloquent\Model $model * @return array */ protected function cleanPivotAttributes(Model $model) { $values = array(); foreach ($model->getAttributes() as $key => $value) { // To get the pivots attributes we will just take any of the attributes which // begin with "pivot_" and add those to this arrays, as well as unsetting // them from the parent's models since they exist in a different table. if (strpos($key, 'pivot_') === 0) { $values[substr($key, 6)] = $value; unset($model->$key); } } return $values; } /** * Set the base constraints on the relation query. * * @return void */ public function addConstraints() { $this->setJoin(); if (static::$constraints) $this->setWhere(); } /** * Add the constraints for a relationship count query. * * @param \Illuminate\Database\Eloquent\Builder $query * @param \Illuminate\Database\Eloquent\Builder $parent * @return \Illuminate\Database\Eloquent\Builder */ public function getRelationCountQuery(Builder $query, Builder $parent) { if ($parent->getQuery()->from == $query->getQuery()->from) { return $this->getRelationCountQueryForSelfJoin($query, $parent); } else { $this->setJoin($query); return parent::getRelationCountQuery($query, $parent); } } /** * Add the constraints for a relationship count query on the same table. * * @param \Illuminate\Database\Eloquent\Builder $query * @param \Illuminate\Database\Eloquent\Builder $parent * @return \Illuminate\Database\Eloquent\Builder */ public function getRelationCountQueryForSelfJoin(Builder $query, Builder $parent) { $query->select(new Expression('count(*)')); $tablePrefix = $this->query->getQuery()->getConnection()->getTablePrefix(); $query->from($this->table.' as '.$tablePrefix.$hash = $this->getRelationCountHash()); $key = $this->wrap($this->getQualifiedParentKeyName()); return $query->where($hash.'.'.$this->foreignKey, '=', new Expression($key)); } /** * Get a relationship join table hash. * * @return string */ public function getRelationCountHash() { return 'self_'.md5(microtime(true)); } /** * Set the select clause for the relation query. * * @param array $columns * @return \Illuminate\Database\Eloquent\Relations\BelongsToMany */ protected function getSelectColumns(array $columns = array('*')) { if ($columns == array('*')) { $columns = array($this->related->getTable().'.*'); } return array_merge($columns, $this->getAliasedPivotColumns()); } /** * Get the pivot columns for the relation. * * @return array */ protected function getAliasedPivotColumns() { $defaults = array($this->foreignKey, $this->otherKey); // We need to alias all of the pivot columns with the "pivot_" prefix so we // can easily extract them out of the models and put them into the pivot // relationships when they are retrieved and hydrated into the models. $columns = array(); foreach (array_merge($defaults, $this->pivotColumns) as $column) { $columns[] = $this->table.'.'.$column.' as pivot_'.$column; } return array_unique($columns); } /** * Set the join clause for the relation query. * * @param \Illuminate\Database\Eloquent\Builder|null * @return $this */ protected function setJoin($query = null) { $query = $query ?: $this->query; // We need to join to the intermediate table on the related model's primary // key column with the intermediate table's foreign key for the related // model instance. Then we can set the "where" for the parent models. $baseTable = $this->related->getTable(); $key = $baseTable.'.'.$this->related->getKeyName(); $query->join($this->table, $key, '=', $this->getOtherKey()); return $this; } /** * Set the where clause for the relation query. * * @return $this */ protected function setWhere() { $foreign = $this->getForeignKey(); $this->query->where($foreign, '=', $this->parent->getKey()); return $this; } /** * Set the constraints for an eager load of the relation. * * @param array $models * @return void */ public function addEagerConstraints(array $models) { $this->query->whereIn($this->getForeignKey(), $this->getKeys($models)); } /** * Initialize the relation on a set of models. * * @param array $models * @param string $relation * @return array */ public function initRelation(array $models, $relation) { foreach ($models as $model) { $model->setRelation($relation, $this->related->newCollection()); } return $models; } /** * Match the eagerly loaded results to their parents. * * @param array $models * @param \Illuminate\Database\Eloquent\Collection $results * @param string $relation * @return array */ public function match(array $models, Collection $results, $relation) { $dictionary = $this->buildDictionary($results); // Once we have an array dictionary of child objects we can easily match the // children back to their parent using the dictionary and the keys on the // the parent models. Then we will return the hydrated models back out. foreach ($models as $model) { if (isset($dictionary[$key = $model->getKey()])) { $collection = $this->related->newCollection($dictionary[$key]); $model->setRelation($relation, $collection); } } return $models; } /** * Build model dictionary keyed by the relation's foreign key. * * @param \Illuminate\Database\Eloquent\Collection $results * @return array */ protected function buildDictionary(Collection $results) { $foreign = $this->foreignKey; // First we will build a dictionary of child models keyed by the foreign key // of the relation so that we will easily and quickly match them to their // parents without having a possibly slow inner loops for every models. $dictionary = array(); foreach ($results as $result) { $dictionary[$result->pivot->$foreign][] = $result; } return $dictionary; } /** * Touch all of the related models for the relationship. * * E.g.: Touch all roles associated with this user. * * @return void */ public function touch() { $key = $this->getRelated()->getKeyName(); $columns = $this->getRelatedFreshUpdate(); // If we actually have IDs for the relation, we will run the query to update all // the related model's timestamps, to make sure these all reflect the changes // to the parent models. This will help us keep any caching synced up here. $ids = $this->getRelatedIds(); if (count($ids) > 0) { $this->getRelated()->newQuery()->whereIn($key, $ids)->update($columns); } } /** * Get all of the IDs for the related models. * * @return array */ public function getRelatedIds() { $related = $this->getRelated(); $fullKey = $related->getQualifiedKeyName(); return $this->getQuery()->select($fullKey)->lists($related->getKeyName()); } /** * Save a new model and attach it to the parent model. * * @param \Illuminate\Database\Eloquent\Model $model * @param array $joining * @param bool $touch * @return \Illuminate\Database\Eloquent\Model */ public function save(Model $model, array $joining = array(), $touch = true) { $model->save(array('touch' => false)); $this->attach($model->getKey(), $joining, $touch); return $model; } /** * Save an array of new models and attach them to the parent model. * * @param array $models * @param array $joinings * @return array */ public function saveMany(array $models, array $joinings = array()) { foreach ($models as $key => $model) { $this->save($model, (array) array_get($joinings, $key), false); } $this->touchIfTouching(); return $models; } /** * Create a new instance of the related model. * * @param array $attributes * @param array $joining * @param bool $touch * @return \Illuminate\Database\Eloquent\Model */ public function create(array $attributes, array $joining = array(), $touch = true) { $instance = $this->related->newInstance($attributes); // Once we save the related model, we need to attach it to the base model via // through intermediate table so we'll use the existing "attach" method to // accomplish this which will insert the record and any more attributes. $instance->save(array('touch' => false)); $this->attach($instance->getKey(), $joining, $touch); return $instance; } /** * Create an array of new instances of the related models. * * @param array $records * @param array $joinings * @return \Illuminate\Database\Eloquent\Model */ public function createMany(array $records, array $joinings = array()) { $instances = array(); foreach ($records as $key => $record) { $instances[] = $this->create($record, (array) array_get($joinings, $key), false); } $this->touchIfTouching(); return $instances; } /** * Sync the intermediate tables with a list of IDs or collection of models. * * @param $ids * @param bool $detaching * @return array */ public function sync($ids, $detaching = true) { $changes = array( 'attached' => array(), 'detached' => array(), 'updated' => array() ); if ($ids instanceof Collection) $ids = $ids->modelKeys(); // First we need to attach any of the associated models that are not currently // in this joining table. We'll spin through the given IDs, checking to see // if they exist in the array of current ones, and if not we will insert. $current = $this->newPivotQuery()->lists($this->otherKey); $records = $this->formatSyncList($ids); $detach = array_diff($current, array_keys($records)); // Next, we will take the differences of the currents and given IDs and detach // all of the entities that exist in the "current" array but are not in the // the array of the IDs given to the method which will complete the sync. if ($detaching && count($detach) > 0) { $this->detach($detach); $changes['detached'] = (array) array_map(function($v) { return (int) $v; }, $detach); } // Now we are finally ready to attach the new records. Note that we'll disable // touching until after the entire operation is complete so we don't fire a // ton of touch operations until we are totally done syncing the records. $changes = array_merge( $changes, $this->attachNew($records, $current, false) ); if (count($changes['attached']) || count($changes['updated'])) { $this->touchIfTouching(); } return $changes; } /** * Format the sync list so that it is keyed by ID. * * @param array $records * @return array */ protected function formatSyncList(array $records) { $results = array(); foreach ($records as $id => $attributes) { if ( ! is_array($attributes)) { list($id, $attributes) = array($attributes, array()); } $results[$id] = $attributes; } return $results; } /** * Attach all of the IDs that aren't in the current array. * * @param array $records * @param array $current * @param bool $touch * @return array */ protected function attachNew(array $records, array $current, $touch = true) { $changes = array('attached' => array(), 'updated' => array()); foreach ($records as $id => $attributes) { // If the ID is not in the list of existing pivot IDs, we will insert a new pivot // record, otherwise, we will just update this existing record on this joining // table, so that the developers will easily update these records pain free. if ( ! in_array($id, $current)) { $this->attach($id, $attributes, $touch); $changes['attached'][] = (int) $id; } // Now we'll try to update an existing pivot record with the attributes that were // given to the method. If the model is actually updated we will add it to the // list of updated pivot records so we return them back out to the consumer. elseif (count($attributes) > 0 && $this->updateExistingPivot($id, $attributes, $touch)) { $changes['updated'][] = (int) $id; } } return $changes; } /** * Update an existing pivot record on the table. * * @param mixed $id * @param array $attributes * @param bool $touch * @return void */ public function updateExistingPivot($id, array $attributes, $touch = true) { if (in_array($this->updatedAt(), $this->pivotColumns)) { $attributes = $this->setTimestampsOnAttach($attributes, true); } $updated = $this->newPivotStatementForId($id)->update($attributes); if ($touch) $this->touchIfTouching(); return $updated; } /** * Attach a model to the parent. * * @param mixed $id * @param array $attributes * @param bool $touch * @return void */ public function attach($id, array $attributes = array(), $touch = true) { if ($id instanceof Model) $id = $id->getKey(); $query = $this->newPivotStatement(); $query->insert($this->createAttachRecords((array) $id, $attributes)); if ($touch) $this->touchIfTouching(); } /** * Create an array of records to insert into the pivot table. * * @param array $ids * @param array $attributes * @return array */ protected function createAttachRecords($ids, array $attributes) { $records = array(); $timed = in_array($this->createdAt(), $this->pivotColumns); // To create the attachment records, we will simply spin through the IDs given // and create a new record to insert for each ID. Each ID may actually be a // key in the array, with extra attributes to be placed in other columns. foreach ($ids as $key => $value) { $records[] = $this->attacher($key, $value, $attributes, $timed); } return $records; } /** * Create a full attachment record payload. * * @param int $key * @param mixed $value * @param array $attributes * @param bool $timed * @return array */ protected function attacher($key, $value, $attributes, $timed) { list($id, $extra) = $this->getAttachId($key, $value, $attributes); // To create the attachment records, we will simply spin through the IDs given // and create a new record to insert for each ID. Each ID may actually be a // key in the array, with extra attributes to be placed in other columns. $record = $this->createAttachRecord($id, $timed); return array_merge($record, $extra); } /** * Get the attach record ID and extra attributes. * * @param mixed $key * @param mixed $value * @param array $attributes * @return array */ protected function getAttachId($key, $value, array $attributes) { if (is_array($value)) { return array($key, array_merge($value, $attributes)); } else { return array($value, $attributes); } } /** * Create a new pivot attachment record. * * @param int $id * @param bool $timed * @return array */ protected function createAttachRecord($id, $timed) { $record[$this->foreignKey] = $this->parent->getKey(); $record[$this->otherKey] = $id; // If the record needs to have creation and update timestamps, we will make // them by calling the parent model's "freshTimestamp" method which will // provide us with a fresh timestamp in this model's preferred format. if ($timed) { $record = $this->setTimestampsOnAttach($record); } return $record; } /** * Set the creation and update timestamps on an attach record. * * @param array $record * @param bool $exists * @return array */ protected function setTimestampsOnAttach(array $record, $exists = false) { $fresh = $this->parent->freshTimestamp(); if ( ! $exists) $record[$this->createdAt()] = $fresh; $record[$this->updatedAt()] = $fresh; return $record; } /** * Detach models from the relationship. * * @param int|array $ids * @param bool $touch * @return int */ public function detach($ids = array(), $touch = true) { if ($ids instanceof Model) $ids = (array) $ids->getKey(); $query = $this->newPivotQuery(); // If associated IDs were passed to the method we will only delete those // associations, otherwise all of the association ties will be broken. // We'll return the numbers of affected rows when we do the deletes. $ids = (array) $ids; if (count($ids) > 0) { $query->whereIn($this->otherKey, (array) $ids); } if ($touch) $this->touchIfTouching(); // Once we have all of the conditions set on the statement, we are ready // to run the delete on the pivot table. Then, if the touch parameter // is true, we will go ahead and touch all related models to sync. $results = $query->delete(); return $results; } /** * If we're touching the parent model, touch. * * @return void */ public function touchIfTouching() { if ($this->touchingParent()) $this->getParent()->touch(); if ($this->getParent()->touches($this->relationName)) $this->touch(); } /** * Determine if we should touch the parent on sync. * * @return bool */ protected function touchingParent() { return $this->getRelated()->touches($this->guessInverseRelation()); } /** * Attempt to guess the name of the inverse of the relation. * * @return string */ protected function guessInverseRelation() { return camel_case(str_plural(class_basename($this->getParent()))); } /** * Create a new query builder for the pivot table. * * @return \Illuminate\Database\Query\Builder */ protected function newPivotQuery() { $query = $this->newPivotStatement(); return $query->where($this->foreignKey, $this->parent->getKey()); } /** * Get a new plain query builder for the pivot table. * * @return \Illuminate\Database\Query\Builder */ public function newPivotStatement() { return $this->query->getQuery()->newQuery()->from($this->table); } /** * Get a new pivot statement for a given "other" ID. * * @param mixed $id * @return \Illuminate\Database\Query\Builder */ public function newPivotStatementForId($id) { $pivot = $this->newPivotStatement(); $key = $this->parent->getKey(); return $pivot->where($this->foreignKey, $key)->where($this->otherKey, $id); } /** * Create a new pivot model instance. * * @param array $attributes * @param bool $exists * @return \Illuminate\Database\Eloquent\Relations\Pivot */ public function newPivot(array $attributes = array(), $exists = false) { $pivot = $this->related->newPivot($this->parent, $attributes, $this->table, $exists); return $pivot->setPivotKeys($this->foreignKey, $this->otherKey); } /** * Create a new existing pivot model instance. * * @param array $attributes * @return \Illuminate\Database\Eloquent\Relations\Pivot */ public function newExistingPivot(array $attributes = array()) { return $this->newPivot($attributes, true); } /** * Set the columns on the pivot table to retrieve. * * @param array $columns * @return $this */ public function withPivot($columns) { $columns = is_array($columns) ? $columns : func_get_args(); $this->pivotColumns = array_merge($this->pivotColumns, $columns); return $this; } /** * Specify that the pivot table has creation and update timestamps. * * @param mixed $createdAt * @param mixed $updatedAt * @return \Illuminate\Database\Eloquent\Relations\BelongsToMany */ public function withTimestamps($createdAt = null, $updatedAt = null) { return $this->withPivot($createdAt ?: $this->createdAt(), $updatedAt ?: $this->updatedAt()); } /** * Get the related model's updated at column name. * * @return string */ public function getRelatedFreshUpdate() { return array($this->related->getUpdatedAtColumn() => $this->related->freshTimestamp()); } /** * Get the key for comparing against the parent key in "has" query. * * @return string */ public function getHasCompareKey() { return $this->getForeignKey(); } /** * Get the fully qualified foreign key for the relation. * * @return string */ public function getForeignKey() { return $this->table.'.'.$this->foreignKey; } /** * Get the fully qualified "other key" for the relation. * * @return string */ public function getOtherKey() { return $this->table.'.'.$this->otherKey; } /** * Get the fully qualified parent key name. * * @return string */ protected function getQualifiedParentKeyName() { return $this->parent->getQualifiedKeyName(); } /** * Get the intermediate table for the relationship. * * @return string */ public function getTable() { return $this->table; } /** * Get the relationship name for the relationship. * * @return string */ public function getRelationName() { return $this->relationName; } }
{ "content_hash": "0cde78229e44f65441cbdc1e27fbb165", "timestamp": "", "source": "github", "line_count": 1041, "max_line_length": 113, "avg_line_length": 25.01344860710855, "alnum_prop": 0.6594723299665886, "repo_name": "igrir/jdwlik", "id": "7dd72a5199ee475b6cc493f596fe0cd9444174e3", "size": "26039", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "vendor/laravel/framework/src/Illuminate/Database/Eloquent/Relations/BelongsToMany.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "382309" }, { "name": "JavaScript", "bytes": "463439" }, { "name": "PHP", "bytes": "475847" } ], "symlink_target": "" }
import expect from 'expect' import React, { Component } from 'react' import { Observable } from 'rxjs' import { mount } from 'enzyme' import connectStore from '../src/decorators/connectStore' import createDispatcher from '../src/createDispatcher' function Child({data}) { return <div>{data}</div> } describe('connectStore', () => { it('passes the correct static observable value', () => { const Tester = connectStore(Observable.of('test'))(Child) const wrapper = mount(<Tester/>) expect(wrapper.text()).toBe('test') }) it('passes selector-applied observable value based on props', () => { const Tester = connectStore((_, { test }) => Observable.of(test))(Child) const wrapper = mount(<Tester test='hello world'/>) expect(wrapper.text()).toBe('hello world') }) it('subscribes to state and updates children', done => { const something = Observable.of({ type: 'DO_SOMETHING' }) const reducer = (state = 'NOTHING', action) => { switch (action.type) { case 'DO_SOMETHING': return state + 'X' default: return state } } const dispatcher = createDispatcher() const Tester = connectStore(() => dispatcher.reduce(reducer))(Child) const wrapper = mount(<Tester/>) dispatcher .reduce(reducer) .subscribe(x => { expect(wrapper.text()).toBe(x) }, err => { throw err }, () => { done() }) dispatcher.next(something.concat(something)) dispatcher.next(something.concat(something)) dispatcher.complete() }) it('recomputes selector-applied observables on changing props', () => { const data = { a: 'a', b: 'b' } const Tester = connectStore((_, { id }) => Observable.of(data[id]))(Child) const wrapper = mount(<Tester id='a'/>) expect(wrapper.text()).toBe('a') wrapper.setProps({ id: 'b' }) expect(wrapper.text()).toBe('b') }) it('renders null if observable state is undefined', () => { const Tester = connectStore(Observable.of(undefined))(Child) const wrapper = mount(<Tester/>) expect(wrapper.isEmpty()).toExist }) })
{ "content_hash": "bf826ad76a05aed4756612200c43c45c", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 78, "avg_line_length": 29.027397260273972, "alnum_prop": 0.6238791882963662, "repo_name": "philplckthun/fluorine", "id": "642716a62463ace0f69b92dc052a1320a8624cb7", "size": "2119", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test/connectStore.spec.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "30080" } ], "symlink_target": "" }
1. [Overview](#overview) 2. [Module Description - What the module does and why it is useful](#module-description) 3. [Setup - The basics of getting started with rabbitmq](#setup) * [What rabbitmq affects](#what-rabbitmq-affects) * [Setup requirements](#setup-requirements) * [Beginning with rabbitmq](#beginning-with-rabbitmq) 4. [Usage - Configuration options and additional functionality](#usage) 5. [Reference - An under-the-hood peek at what the module is doing and how](#reference) 5. [Limitations - OS compatibility, etc.](#limitations) * [RedHat module dependencies](#redhat-module-dependecies) 6. [Development - Guide for contributing to the module](#development) ##Overview This module manages RabbitMQ (www.rabbitmq.com) ##Module Description The rabbitmq module sets up rabbitmq and has a number of providers to manage everything from vhosts to exchanges after setup. This module has been tested against 2.7.1 and is known to not support all features against earlier versions. ##Setup ###What rabbitmq affects * rabbitmq repository files. * rabbitmq package. * rabbitmq configuration file. * rabbitmq service. ###Beginning with rabbitmq ```puppet include '::rabbitmq' ``` ##Usage All options and configuration can be done through interacting with the parameters on the main rabbitmq class. These are documented below. ##rabbitmq class To begin with the rabbitmq class controls the installation of rabbitmq. In here you can control many parameters relating to the package and service, such as disabling puppet support of the service: ```puppet class { '::rabbitmq': service_manage => false, port => '5672', delete_guest_user => true, } ``` ### Environment Variables To use RabbitMQ Environment Variables, use the parameters `environment_variables` e.g.: ```puppet class { 'rabbitmq': port => '5672', environment_variables => { 'RABBITMQ_NODENAME' => 'node01', 'RABBITMQ_SERVICENAME' => 'RabbitMQ' } } ``` ### Variables Configurable in rabbitmq.config To change RabbitMQ Config Variables in rabbitmq.config, use the parameters `config_variables` e.g.: ```puppet class { 'rabbitmq': port => '5672', config_variables => { 'hipe_compile' => true, 'frame_max' => 131072, 'log_levels' => "[{connection, info}]" } } ``` To change Erlang Kernel Config Variables in rabbitmq.config, use the parameters `config_kernel_variables` e.g.: ```puppet class { 'rabbitmq': port => '5672', config_kernel_variables => { 'inet_dist_listen_min' => 9100, 'inet_dist_listen_max' => 9105, } } ``` ### Clustering To use RabbitMQ clustering facilities, use the rabbitmq parameters `config_cluster`, `cluster_nodes`, and `cluster_node_type`, e.g.: ```puppet class { 'rabbitmq': config_cluster => true, cluster_nodes => ['rabbit1', 'rabbit2'], cluster_node_type => 'ram', } ``` ##Reference ##Classes * rabbitmq: Main class for installation and service management. * rabbitmq::config: Main class for rabbitmq configuration/management. * rabbitmq::install: Handles package installation. * rabbitmq::params: Different configuration data for different systems. * rabbitmq::service: Handles the rabbitmq service. * rabbitmq::repo::apt: Handles apt repo for Debian systems. * rabbitmq::repo::rhel: Handles yum repo for Redhat systems. ###Parameters ####`admin_enable` If enabled sets up the management interface/plugin for RabbitMQ. ####`cluster_disk_nodes` DEPRECATED AND REPLACED BY CLUSTER_NODES. ####`cluster_node_type` Choose between disk and ram nodes. ####`cluster_nodes` An array of nodes for clustering. ####`config` The file to use as the rabbitmq.config template. ####`config_cluster` Boolean to enable or disable clustering support. ####`config_mirrored_queues` DEPRECATED Configuring queue mirroring should be done by setting the according policy for the queue. You can read more about it [here](http://www.rabbitmq.com/ha.html#genesis) ####`config_path` The path to write the RabbitMQ configuration file to. ####`config_stomp` Boolean to enable or disable stomp. ####`delete_guest_user` Boolean to decide if we should delete the default guest user. ####`env_config` The template file to use for rabbitmq_env.config. ####`env_config_path` The path to write the rabbitmq_env.config file to. ####`erlang_cookie` The erlang cookie to use for clustering - must be the same between all nodes. ####`config_variables` To set config variables in rabbitmq.config ####`node_ip_address` The value of RABBITMQ_NODE_IP_ADDRESS in rabbitmq_env.config ####`environment_variables` RabbitMQ Environment Variables in rabbitmq_env.config ####`package_ensure` Determines the ensure state of the package. Set to installed by default, but could be changed to latest. ####`package_name` The name of the package to install. ####`package_provider` What provider to use to install the package. ####`package_source` Where should the package be installed from? ####`plugin_dir` Location of RabbitMQ plugins. ####`port` The RabbitMQ port. ####`management_port` The port for the RabbitMQ management interface. ####`service_ensure` The state of the service. ####`service_manage` Determines if the service is managed. ####`service_name` The name of the service to manage. ####`ssl` Configures the service for using SSL. ####`ssl_only` Configures the service to only use SSL. No cleartext TCP listeners will be created. Requires that ssl => true also. ####`stomp_port` The port to use for Stomp. ####`stomp_ensure` Boolean to install the stomp plugin. ####`wipe_db_on_cookie_change` Boolean to determine if we should DESTROY AND DELETE the RabbitMQ database. ####`version` Sets the version to install. ##Native Types ### rabbitmq\_user query all current users: `$ puppet resource rabbitmq_user` ``` rabbitmq_user { 'dan': admin => true, password => 'bar', } ``` Optional parameter tags will set further rabbitmq tags like monitoring, policymaker, etc. To set the administrator tag use admin-flag. ```puppet rabbitmq_user { 'dan': admin => true, password => 'bar', tags => ['monitoring', 'tag1'], } ``` ### rabbitmq\_vhost query all current vhosts: `$ puppet resource rabbitmq_vhost` ```puppet rabbitmq_vhost { 'myhost': ensure => present, } ``` ### rabbitmq\_exchange ```puppet rabbitmq_exchange { 'myexchange@myhost': user => 'dan', password => 'bar', type => 'topic', ensure => present, } ``` ### rabbitmq\_user\_permissions ```puppet rabbitmq_user_permissions { 'dan@myhost': configure_permission => '.*', read_permission => '.*', write_permission => '.*', } ``` ### rabbitmq\_plugin query all currently enabled plugins `$ puppet resource rabbitmq_plugin` ```puppet rabbitmq_plugin {'rabbitmq_stomp': ensure => present, } ``` ### rabbitmq\_federation\_upstream `uri` and `vhost` are required. Other parameters default to the values shown in the example if not provided. ```puppet rabbitmq_federation_upstream { 'myupstream': uri => 'amqp://dan:bar@localhost/myhost', vhost => 'myhost', ack_mode => 'on-confirm', expires => 1000, # defaults to forever if not provided max_hops => 1, message_ttl => 1000, # defaults to forever if not provided prefetch_count => 1000, reconnect_delay => 1, trust_user_id => false, } ``` ###rabbitmq\_federation\_upstreamset `vhost` is required. Do not provide `upstreams` to set to `'all'`. NOTE: It is an error to provide `'all'` in the `upstreams` array. ```puppet rabbitmq_federation_upstreamset { 'myupstreamset': vhost => 'myhost', upstreams => ['myupstream', 'myupstream1'], } ``` ###rabbitmq\_policy `vhost`, `definition` and `pattern` are required. `definition` must be a non-empty Hash. Other parameters default to the values shown in the example if not provided. ```puppet rabbitmq_policy { 'mypolicy': vhost => 'myhost', definition => {'federation-upstream' => 'myfederationupstream'}, pattern => '^.*$', apply_to => 'all', priority => 0, } ``` ###rabbitmq\_parameter The resource title is parsed as '`vhost` `component` `name`'. All three are required. `value` is also required and must be a non-empty Hash. NOTE: Federation components (federation upstreams and federation upstream sets) cannot be managed with this type. Instead use the rabbitmq_federation_upstream and rabbitmq_federation_upstreamset types. ```puppet rabbitmq_parameter { 'myvhost shovel myparameter': value => {'src-uri' => 'amqp://dan:bar@localhost/', 'src-exchange' => '/', 'src-exchange-key' => 'mykey', 'dest-uri' => 'amqp://dan:bar@localhost/', 'dest-exchange' => '/', 'dest-exchange-key' => 'mykey1', 'add-forward-headers' => false, 'ack-mode' => 'on-confirm', 'delete-after' => 'never'}, } ``` ##Limitations This module has been built on and tested against Puppet 2.7 and higher. The module has been tested on: * RedHat Enterprise Linux 5/6 * Debian 6/7 * CentOS 5/6 * Ubuntu 12.04 Testing on other platforms has been light and cannot be guaranteed. ### Module dependencies To have a suitable erlang version installed on RedHat and Debian systems, you have to install another puppet module from http://forge.puppetlabs.com/garethr/erlang with: puppet module install garethr-erlang This module handles the packages for erlang. To use the module, add the following snippet to your site.pp or an appropriate profile class: For RedHat systems: include 'erlang' class { 'erlang': epel_enable => true} For Debian systems: include 'erlang' package { 'erlang-base': ensure => 'latest', } ##Development Puppet Labs modules on the Puppet Forge are open projects, and community contributions are essential for keeping them great. We can’t access the huge number of platforms and myriad of hardware, software, and deployment configurations that Puppet is intended to serve. We want to keep it as easy as possible to contribute changes so that our modules work in your environment. There are a few guidelines that we need contributors to follow so that we can have a chance of keeping on top of things. You can read the complete module contribution guide [on the Puppet Labs wiki.](http://projects.puppetlabs.com/projects/module-site/wiki/Module_contributing) ### Authors * Jeff McCune <jeff@puppetlabs.com> * Dan Bode <dan@puppetlabs.com> * RPM/RHEL packages by Vincent Janelle <randomfrequency@gmail.com> * Puppetlabs Module Team
{ "content_hash": "8f7558f68015d7743d9efe50b0bcf9f6", "timestamp": "", "source": "github", "line_count": 439, "max_line_length": 201, "avg_line_length": 24.471526195899774, "alnum_prop": 0.6941264078935121, "repo_name": "prachetasp/puppetlabs-rabbitmq", "id": "129e65b6abd0ed0fb8b1fe6c780154d5446c5d16", "size": "10779", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Puppet", "bytes": "25624" }, { "name": "Ruby", "bytes": "89282" } ], "symlink_target": "" }
react-lofty
{ "content_hash": "72ce1afda99aae8ced68e08d64afb7cf", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 11, "avg_line_length": 11, "alnum_prop": 0.9090909090909091, "repo_name": "rachardking/react-lofty", "id": "43b19232103e66de1b756ea9350abca6ea7990ef", "size": "11", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33261", "license": "mit", "language": [ { "name": "HTML", "bytes": "1236" }, { "name": "JavaScript", "bytes": "13403" } ], "symlink_target": "" }
/* * GET users listing. */ exports.index = function(req, res){ res.render('user/index', { title: '/ My Apps'}); };
{ "content_hash": "a145f49de1d154e340b97e56b30ec4ee", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 50, "avg_line_length": 17, "alnum_prop": 0.5882352941176471, "repo_name": "shutterfly/Mimo", "id": "0f521e4b1f8961787ea50038a8429b7f199409e4", "size": "119", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "routes/user.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1865" }, { "name": "JavaScript", "bytes": "53203" } ], "symlink_target": "" }
package com.bls.patronage.resources; import com.bls.patronage.api.UserRepresentation; import com.bls.patronage.db.dao.UserDAO; import com.bls.patronage.db.model.User; import io.dropwizard.auth.Auth; import io.dropwizard.jersey.params.UUIDParam; import javax.validation.Valid; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; @Path("/users") @Produces(MediaType.APPLICATION_JSON) public class UserResource { private final UserDAO userDAO; public UserResource(UserDAO userDAO) { this.userDAO = userDAO; } @Path("/{userId: [0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}}") @GET public UserRepresentation getUser( @Valid @PathParam("userId") UUIDParam userId) { return new UserRepresentation(userDAO.getUserById(userId.get())); } @Path("/me") @GET public UserRepresentation logInUser(@Auth User user) { return new UserRepresentation(user); } }
{ "content_hash": "61e159d01f4bcd2daca62048b5ec668e", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 73, "avg_line_length": 26.710526315789473, "alnum_prop": 0.7054187192118226, "repo_name": "blstream/StudyBox_Backend", "id": "78fada439a26856df832920b6eeb54384388f458", "size": "1015", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backend/app/src/main/java/com/bls/patronage/resources/UserResource.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "API Blueprint", "bytes": "27455" }, { "name": "Java", "bytes": "236609" } ], "symlink_target": "" }
namespace FakeItEasy { using System; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; /// <summary> /// Hides standard Object members to make fluent interfaces /// easier to read. Found in the source of Autofac: <see cref="!:https://code.google.com/p/autofac/"/> /// Based on blog post here: /// <see cref="!:http://blogs.clariusconsulting.net/kzu/how-to-hide-system-object-members-from-your-interfaces/"/>. /// </summary> [EditorBrowsable(EditorBrowsableState.Never)] public interface IHideObjectMembers { /// <summary> /// Hides the ToString-method. /// </summary> /// <returns>A string representation of the implementing object.</returns> [EditorBrowsable(EditorBrowsableState.Never)] string ToString(); /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to this instance. /// </summary> /// <param name="o">The <see cref="System.Object"/> to compare with this instance.</param> /// <returns> /// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> [EditorBrowsable(EditorBrowsableState.Never)] [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "o", Justification = "Uses the same name as the hidden method.")] bool Equals(object o); /// <summary> /// Returns a hash code for this instance. /// </summary> /// <returns> /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. /// </returns> [EditorBrowsable(EditorBrowsableState.Never)] [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate", Justification = "Hides object member.")] int GetHashCode(); /// <summary> /// Gets the type. /// </summary> /// <returns>The exact runtime type of the current instance.</returns> [EditorBrowsable(EditorBrowsableState.Never)] [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate", Justification = "Hides object member.")] [SuppressMessage("Microsoft.Naming", "CA1716:IdentifiersShouldNotMatchKeywords", MessageId = nameof(GetType), Justification = "Uses the name of the method to intercept.")] Type GetType(); } }
{ "content_hash": "ebf4de7f2b87ab8312e15f2d0cbdd433", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 179, "avg_line_length": 48.20754716981132, "alnum_prop": 0.6344422700587085, "repo_name": "adamralph/FakeItEasy", "id": "fb776987d3199ee7da63915ee95b63f0604acda4", "size": "2555", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/FakeItEasy/IHideObjectMembers.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "517" }, { "name": "C#", "bytes": "1818880" }, { "name": "Shell", "bytes": "109" }, { "name": "Visual Basic .NET", "bytes": "8003" } ], "symlink_target": "" }
const space = require('./space') const asset = require('./asset') const assets = require('./assets') const entry = require('./entry') const entries = require('./entries') const error = require('./error') const emptyArray = require('./emptyArray') module.exports = { space, asset, assets, entry, entries, error, emptyArray }
{ "content_hash": "5910462bd4b2f7ea1ab00294c9428828", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 42, "avg_line_length": 19.941176470588236, "alnum_prop": 0.6666666666666666, "repo_name": "remedyhealth/contentpull", "id": "7fc2149c6d2b91541793a119aa69e9f2ebbbd873", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/stubs/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "37129" } ], "symlink_target": "" }
const HOMOLOGS_STATES = { HOMOLOGENE: 'HOMOLOGENE', HOMOLOGENE_RESULT: 'HOMOLOGENE_RESULT', ORTHO_PARA: 'ORTHO_PARA', ORTHO_PARA_RESULT: 'ORTHO_PARA_RESULT' }; export default class ngbHomologsService { homologsServiceMap = {}; currentOrthoParaId; currentHomologeneId; _currentSearch; constructor(dispatcher, projectContext, ngbHomologeneTableService, ngbHomologeneResultService, ngbOrthoParaTableService, ngbOrthoParaResultService ) { Object.assign( this, { dispatcher, projectContext } ); this.homologsServiceMap = { HOMOLOGENE: ngbHomologeneTableService, HOMOLOGENE_RESULT: ngbHomologeneResultService, ORTHO_PARA: ngbOrthoParaTableService, ORTHO_PARA_RESULT: ngbOrthoParaResultService }; this.initEvents(); } get homologsStates() { return HOMOLOGS_STATES; } static instance(dispatcher, projectContext, ngbHomologeneTableService, ngbHomologeneResultService, ngbOrthoParaTableService, ngbOrthoParaResultService) { return new ngbHomologsService(dispatcher, projectContext, ngbHomologeneTableService, ngbHomologeneResultService, ngbOrthoParaTableService, ngbOrthoParaResultService); } get currentSearch() { return this._currentSearch; } set currentSearch(value) { this._currentSearch = value; } initEvents() { this.dispatcher.on('read:show:homologs', data => { this.currentSearch = data; }); } }
{ "content_hash": "fcee9d0313502e81418415d324e229b1", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 66, "avg_line_length": 28.084745762711865, "alnum_prop": 0.6391068195534098, "repo_name": "epam/NGB", "id": "1a4afbdd7bc46fd32d818525cf3fe6556035676a", "size": "1657", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "client/client/app/components/ngbHomologsPanel/ngbHomologs.service.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2130" }, { "name": "Dockerfile", "bytes": "2839" }, { "name": "EJS", "bytes": "344" }, { "name": "HTML", "bytes": "455953" }, { "name": "Java", "bytes": "6041257" }, { "name": "JavaScript", "bytes": "3315081" }, { "name": "PLSQL", "bytes": "1182" }, { "name": "SCSS", "bytes": "167986" }, { "name": "Shell", "bytes": "11389" }, { "name": "Smarty", "bytes": "1410" } ], "symlink_target": "" }
package io.netty.testsuite.transport.socket; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelOption; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.channel.WriteBufferWaterMark; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.oio.OioSocketChannel; import org.junit.Test; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; import java.nio.channels.ClosedChannelException; import java.util.concurrent.BlockingDeque; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.Assume.assumeFalse; public class SocketShutdownOutputBySelfTest extends AbstractClientSocketTest { @Test(timeout = 30000) public void testShutdownOutput() throws Throwable { run(); } public void testShutdownOutput(Bootstrap cb) throws Throwable { TestHandler h = new TestHandler(); ServerSocket ss = new ServerSocket(); Socket s = null; SocketChannel ch = null; try { ss.bind(newSocketAddress()); ch = (SocketChannel) cb.handler(h).connect(ss.getLocalSocketAddress()).sync().channel(); assertTrue(ch.isActive()); assertFalse(ch.isOutputShutdown()); s = ss.accept(); ch.writeAndFlush(Unpooled.wrappedBuffer(new byte[] { 1 })).sync(); assertEquals(1, s.getInputStream().read()); assertTrue(h.ch.isOpen()); assertTrue(h.ch.isActive()); assertFalse(h.ch.isInputShutdown()); assertFalse(h.ch.isOutputShutdown()); // Make the connection half-closed and ensure read() returns -1. ch.shutdownOutput().sync(); assertEquals(-1, s.getInputStream().read()); assertTrue(h.ch.isOpen()); assertTrue(h.ch.isActive()); assertFalse(h.ch.isInputShutdown()); assertTrue(h.ch.isOutputShutdown()); // If half-closed, the peer should be able to write something. s.getOutputStream().write(new byte[] { 1 }); assertEquals(1, (int) h.queue.take()); } finally { if (s != null) { s.close(); } if (ch != null) { ch.close(); } ss.close(); } } @Test(timeout = 30000) public void testShutdownOutputAfterClosed() throws Throwable { run(); } public void testShutdownOutputAfterClosed(Bootstrap cb) throws Throwable { TestHandler h = new TestHandler(); ServerSocket ss = new ServerSocket(); Socket s = null; try { ss.bind(newSocketAddress()); SocketChannel ch = (SocketChannel) cb.handler(h).connect(ss.getLocalSocketAddress()).sync().channel(); assertTrue(ch.isActive()); s = ss.accept(); ch.close().syncUninterruptibly(); try { ch.shutdownInput().syncUninterruptibly(); fail(); } catch (Throwable cause) { checkThrowable(cause); } try { ch.shutdownOutput().syncUninterruptibly(); fail(); } catch (Throwable cause) { checkThrowable(cause); } } finally { if (s != null) { s.close(); } ss.close(); } } @Test(timeout = 30000) public void testWriteAfterShutdownOutputNoWritabilityChange() throws Throwable { run(); } public void testWriteAfterShutdownOutputNoWritabilityChange(Bootstrap cb) throws Throwable { final TestHandler h = new TestHandler(); ServerSocket ss = new ServerSocket(); Socket s = null; SocketChannel ch = null; try { ss.bind(newSocketAddress()); cb.option(ChannelOption.WRITE_BUFFER_WATER_MARK, new WriteBufferWaterMark(2, 4)); ch = (SocketChannel) cb.handler(h).connect(ss.getLocalSocketAddress()).sync().channel(); assumeFalse(ch instanceof OioSocketChannel); assertTrue(ch.isActive()); assertFalse(ch.isOutputShutdown()); s = ss.accept(); byte[] expectedBytes = new byte[]{ 1, 2, 3, 4, 5, 6 }; ChannelFuture writeFuture = ch.write(Unpooled.wrappedBuffer(expectedBytes)); h.assertWritability(false); ch.flush(); writeFuture.sync(); h.assertWritability(true); for (int i = 0; i < expectedBytes.length; ++i) { assertEquals(expectedBytes[i], s.getInputStream().read()); } assertTrue(h.ch.isOpen()); assertTrue(h.ch.isActive()); assertFalse(h.ch.isInputShutdown()); assertFalse(h.ch.isOutputShutdown()); // Make the connection half-closed and ensure read() returns -1. ch.shutdownOutput().sync(); assertEquals(-1, s.getInputStream().read()); assertTrue(h.ch.isOpen()); assertTrue(h.ch.isActive()); assertFalse(h.ch.isInputShutdown()); assertTrue(h.ch.isOutputShutdown()); try { // If half-closed, the local endpoint shouldn't be able to write ch.writeAndFlush(Unpooled.wrappedBuffer(new byte[]{ 2 })).sync(); fail(); } catch (Throwable cause) { checkThrowable(cause); } assertNull(h.writabilityQueue.poll()); } finally { if (s != null) { s.close(); } if (ch != null) { ch.close(); } ss.close(); } } @Test(timeout = 30000) public void testShutdownOutputSoLingerNoAssertError() throws Throwable { run(); } public void testShutdownOutputSoLingerNoAssertError(Bootstrap cb) throws Throwable { testShutdownSoLingerNoAssertError0(cb, true); } @Test(timeout = 30000) public void testShutdownSoLingerNoAssertError() throws Throwable { run(); } public void testShutdownSoLingerNoAssertError(Bootstrap cb) throws Throwable { testShutdownSoLingerNoAssertError0(cb, false); } private void testShutdownSoLingerNoAssertError0(Bootstrap cb, boolean output) throws Throwable { ServerSocket ss = new ServerSocket(); Socket s = null; ChannelFuture cf = null; try { ss.bind(newSocketAddress()); cf = cb.option(ChannelOption.SO_LINGER, 1).handler(new ChannelInboundHandlerAdapter()) .connect(ss.getLocalSocketAddress()).sync(); s = ss.accept(); cf.sync(); if (output) { ((SocketChannel) cf.channel()).shutdownOutput().sync(); } else { ((SocketChannel) cf.channel()).shutdown().sync(); } } finally { if (s != null) { s.close(); } if (cf != null) { cf.channel().close(); } ss.close(); } } private static void checkThrowable(Throwable cause) throws Throwable { // Depending on OIO / NIO both are ok if (!(cause instanceof ClosedChannelException) && !(cause instanceof SocketException)) { throw cause; } } private static final class TestHandler extends SimpleChannelInboundHandler<ByteBuf> { volatile SocketChannel ch; final BlockingQueue<Byte> queue = new LinkedBlockingQueue<Byte>(); final BlockingDeque<Boolean> writabilityQueue = new LinkedBlockingDeque<Boolean>(); @Override public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception { writabilityQueue.add(ctx.channel().isWritable()); } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ch = (SocketChannel) ctx.channel(); } @Override public void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { queue.offer(msg.readByte()); } private void drainWritabilityQueue() throws InterruptedException { while ((writabilityQueue.poll(100, TimeUnit.MILLISECONDS)) != null) { // Just drain the queue. } } void assertWritability(boolean isWritable) throws InterruptedException { try { Boolean writability = writabilityQueue.takeLast(); assertEquals(isWritable, writability); // TODO(scott): why do we get multiple writability changes here ... race condition? drainWritabilityQueue(); } catch (Throwable c) { c.printStackTrace(); } } } }
{ "content_hash": "7f07e31e932f1adfcc4a82302d4cedff", "timestamp": "", "source": "github", "line_count": 273, "max_line_length": 114, "avg_line_length": 34.87179487179487, "alnum_prop": 0.5921218487394958, "repo_name": "andsel/netty", "id": "9cda05fe1d8ae8bc8db3255d677c998b37d566c3", "size": "10155", "binary": false, "copies": "2", "ref": "refs/heads/4.1", "path": "testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketShutdownOutputBySelfTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "149620" }, { "name": "C++", "bytes": "1637" }, { "name": "CSS", "bytes": "49" }, { "name": "Groovy", "bytes": "1755" }, { "name": "HTML", "bytes": "1466" }, { "name": "Java", "bytes": "13905690" }, { "name": "Makefile", "bytes": "1577" }, { "name": "Shell", "bytes": "8541" } ], "symlink_target": "" }
package com.aol.cyclops.guava; import javaslang.Function1; import javaslang.control.Option; import com.google.common.base.Function; import com.google.common.base.Optional; public class FromJavaslang { public static <T, R> Function<T, R> f1(Function1<T, R> fn) { return (t) -> fn.apply(t); } public static <T> Optional<T> option(Option<T> o) { if (o.isEmpty()) return Optional.absent(); return Optional.of(o.get()); } }
{ "content_hash": "660d406313512b3ec162028ac3ca61e6", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 61, "avg_line_length": 21.8, "alnum_prop": 0.7041284403669725, "repo_name": "sjfloat/cyclops", "id": "c4f3af20cfe3c5d6e9ee6b674d603752e07fd1d0", "size": "436", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cyclops-guava/src/main/java/com/aol/cyclops/guava/FromJavaslang.java", "mode": "33188", "license": "mit", "language": [ { "name": "Groovy", "bytes": "16212" }, { "name": "Java", "bytes": "1336491" } ], "symlink_target": "" }
var path = require('path'); var webpack = require('webpack'); var WebpackDevServer = require('webpack-dev-server'); var config = require('./webpack.dev.config'); var compiler = webpack(config); var server = new WebpackDevServer(compiler, { hot: true, // display no info to console (only warnings and errors) noInfo: false, inline: true, progress: true, historyApiFallback: true, publicPath: config.output.publicPath, stats: { // With console colors colors: true, // add the hash of the compilation hash: true, // add webpack version information version: false, // add timing information timings: true, // add assets information assets: false, // add chunk information chunks: false, // add built modules information to chunk information chunkModules: false, // add built modules information modules: false, // add also information about cached (not built) modules cached: false, // add information about the reasons why modules are included reasons: false, // add the source code of modules source: false, // add details to errors (like resolving log) errorDetails: true, // add the origins of chunks and chunk merging info chunkOrigins: false, // Add messages from child loaders children: false } }); server.listen(3333, 'localhost', function (err) { if (err) { console.log(err); return; } console.log("Listening at http://localhost:3333. Please wait, I'm building things for you..."); });
{ "content_hash": "fab749d07f4c690e52d3c66112743136", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 97, "avg_line_length": 29.327272727272728, "alnum_prop": 0.6447613143211407, "repo_name": "nsb/jobplanner-grommet", "id": "36a8bf38e94bed2ef4668c19f97c6b9c54aa0baa", "size": "1674", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "160" }, { "name": "HTML", "bytes": "392" }, { "name": "JavaScript", "bytes": "16911" }, { "name": "TypeScript", "bytes": "9601" } ], "symlink_target": "" }
"use strict"; const path = require('path'); const AdonError = require('./../AdonError'); //get file name without extension let scriptName = path.basename(__filename); scriptName = scriptName.substring(0, scriptName.length-3); class ServerError extends AdonError{ constructor(msg){ super(msg); this.name = scriptName; this.desc = "A server related error"; } } module.exports = ServerError;
{ "content_hash": "3ff7c70228fdc3992959bb92a91ae691", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 58, "avg_line_length": 24.875, "alnum_prop": 0.7261306532663316, "repo_name": "adonisv79/adon-errors", "id": "bf31bc3628299f5053a4298757d49284e54fcffc", "size": "398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "errors/server/ServerError.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "12366" } ], "symlink_target": "" }
package org.apache.spark.sql.execution.datasources.parquet import java.math.{BigDecimal => JBigDecimal} import java.nio.charset.StandardCharsets import java.sql.{Date, Timestamp} import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate, Operators} import org.apache.parquet.filter2.predicate.FilterApi._ import org.apache.parquet.filter2.predicate.Operators.{Column => _, _} import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.execution.datasources.{DataSourceStrategy, HadoopFsRelation, LogicalRelation} import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.types._ import org.apache.spark.util.{AccumulatorContext, AccumulatorV2} /** * A test suite that tests Parquet filter2 API based filter pushdown optimization. * * NOTE: * * 1. `!(a cmp b)` is always transformed to its negated form `a cmp' b` by the * `BooleanSimplification` optimization rule whenever possible. As a result, predicate `!(a < 1)` * results in a `GtEq` filter predicate rather than a `Not`. * * 2. `Tuple1(Option(x))` is used together with `AnyVal` types like `Int` to ensure the inferred * data type is nullable. * * NOTE: * * This file intendedly enables record-level filtering explicitly. If new test cases are * dependent on this configuration, don't forget you better explicitly set this configuration * within the test. */ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContext { private lazy val parquetFilters = new ParquetFilters(conf.parquetFilterPushDownDate, conf.parquetFilterPushDownTimestamp, conf.parquetFilterPushDownDecimal, conf.parquetFilterPushDownStringStartWith, conf.parquetFilterPushDownInFilterThreshold) override def beforeEach(): Unit = { super.beforeEach() // Note that there are many tests here that require record-level filtering set to be true. spark.conf.set(SQLConf.PARQUET_RECORD_FILTER_ENABLED.key, "true") } override def afterEach(): Unit = { try { spark.conf.unset(SQLConf.PARQUET_RECORD_FILTER_ENABLED.key) } finally { super.afterEach() } } private def checkFilterPredicate( df: DataFrame, predicate: Predicate, filterClass: Class[_ <: FilterPredicate], checker: (DataFrame, Seq[Row]) => Unit, expected: Seq[Row]): Unit = { val output = predicate.collect { case a: Attribute => a }.distinct withSQLConf( SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true", SQLConf.PARQUET_FILTER_PUSHDOWN_DATE_ENABLED.key -> "true", SQLConf.PARQUET_FILTER_PUSHDOWN_TIMESTAMP_ENABLED.key -> "true", SQLConf.PARQUET_FILTER_PUSHDOWN_DECIMAL_ENABLED.key -> "true", SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> "true", SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") { val query = df .select(output.map(e => Column(e)): _*) .where(Column(predicate)) var maybeRelation: Option[HadoopFsRelation] = None val maybeAnalyzedPredicate = query.queryExecution.optimizedPlan.collect { case PhysicalOperation(_, filters, LogicalRelation(relation: HadoopFsRelation, _, _, _)) => maybeRelation = Some(relation) filters }.flatten.reduceLeftOption(_ && _) assert(maybeAnalyzedPredicate.isDefined, "No filter is analyzed from the given query") val (_, selectedFilters, _) = DataSourceStrategy.selectFilters(maybeRelation.get, maybeAnalyzedPredicate.toSeq) assert(selectedFilters.nonEmpty, "No filter is pushed down") selectedFilters.foreach { pred => val maybeFilter = parquetFilters.createFilter( new SparkToParquetSchemaConverter(conf).convert(df.schema), pred) assert(maybeFilter.isDefined, s"Couldn't generate filter predicate for $pred") // Doesn't bother checking type parameters here (e.g. `Eq[Integer]`) maybeFilter.exists(_.getClass === filterClass) } checker(stripSparkFilter(query), expected) } } private def checkFilterPredicate (predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Seq[Row]) (implicit df: DataFrame): Unit = { checkFilterPredicate(df, predicate, filterClass, checkAnswer(_, _: Seq[Row]), expected) } private def checkFilterPredicate[T] (predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: T) (implicit df: DataFrame): Unit = { checkFilterPredicate(predicate, filterClass, Seq(Row(expected)))(df) } private def checkBinaryFilterPredicate (predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Seq[Row]) (implicit df: DataFrame): Unit = { def checkBinaryAnswer(df: DataFrame, expected: Seq[Row]) = { assertResult(expected.map(_.getAs[Array[Byte]](0).mkString(",")).sorted) { df.rdd.map(_.getAs[Array[Byte]](0).mkString(",")).collect().toSeq.sorted } } checkFilterPredicate(df, predicate, filterClass, checkBinaryAnswer _, expected) } private def checkBinaryFilterPredicate (predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Array[Byte]) (implicit df: DataFrame): Unit = { checkBinaryFilterPredicate(predicate, filterClass, Seq(Row(expected)))(df) } private def testTimestampPushdown(data: Seq[Timestamp]): Unit = { assert(data.size === 4) val ts1 = data.head val ts2 = data(1) val ts3 = data(2) val ts4 = data(3) withParquetDataFrame(data.map(i => Tuple1(i))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], data.map(i => Row.apply(i))) checkFilterPredicate('_1 === ts1, classOf[Eq[_]], ts1) checkFilterPredicate('_1 <=> ts1, classOf[Eq[_]], ts1) checkFilterPredicate('_1 =!= ts1, classOf[NotEq[_]], Seq(ts2, ts3, ts4).map(i => Row.apply(i))) checkFilterPredicate('_1 < ts2, classOf[Lt[_]], ts1) checkFilterPredicate('_1 > ts1, classOf[Gt[_]], Seq(ts2, ts3, ts4).map(i => Row.apply(i))) checkFilterPredicate('_1 <= ts1, classOf[LtEq[_]], ts1) checkFilterPredicate('_1 >= ts4, classOf[GtEq[_]], ts4) checkFilterPredicate(Literal(ts1) === '_1, classOf[Eq[_]], ts1) checkFilterPredicate(Literal(ts1) <=> '_1, classOf[Eq[_]], ts1) checkFilterPredicate(Literal(ts2) > '_1, classOf[Lt[_]], ts1) checkFilterPredicate(Literal(ts3) < '_1, classOf[Gt[_]], ts4) checkFilterPredicate(Literal(ts1) >= '_1, classOf[LtEq[_]], ts1) checkFilterPredicate(Literal(ts4) <= '_1, classOf[GtEq[_]], ts4) checkFilterPredicate(!('_1 < ts4), classOf[GtEq[_]], ts4) checkFilterPredicate('_1 < ts2 || '_1 > ts3, classOf[Operators.Or], Seq(Row(ts1), Row(ts4))) } } private def testDecimalPushDown(data: DataFrame)(f: DataFrame => Unit): Unit = { withTempPath { file => data.write.parquet(file.getCanonicalPath) readParquetFile(file.toString)(f) } } // This function tests that exactly go through the `canDrop` and `inverseCanDrop`. private def testStringStartsWith(dataFrame: DataFrame, filter: String): Unit = { withTempPath { dir => val path = dir.getCanonicalPath dataFrame.write.option("parquet.block.size", 512).parquet(path) Seq(true, false).foreach { pushDown => withSQLConf( SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> pushDown.toString) { val accu = new NumRowGroupsAcc sparkContext.register(accu) val df = spark.read.parquet(path).filter(filter) df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0))) if (pushDown) { assert(accu.value == 0) } else { assert(accu.value > 0) } AccumulatorContext.remove(accu.id) } } } } test("filter pushdown - boolean") { withParquetDataFrame((true :: false :: Nil).map(b => Tuple1.apply(Option(b)))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], Seq(Row(true), Row(false))) checkFilterPredicate('_1 === true, classOf[Eq[_]], true) checkFilterPredicate('_1 <=> true, classOf[Eq[_]], true) checkFilterPredicate('_1 =!= true, classOf[NotEq[_]], false) } } test("filter pushdown - tinyint") { withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toByte)))) { implicit df => assert(df.schema.head.dataType === ByteType) checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('_1 === 1.toByte, classOf[Eq[_]], 1) checkFilterPredicate('_1 <=> 1.toByte, classOf[Eq[_]], 1) checkFilterPredicate('_1 =!= 1.toByte, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('_1 < 2.toByte, classOf[Lt[_]], 1) checkFilterPredicate('_1 > 3.toByte, classOf[Gt[_]], 4) checkFilterPredicate('_1 <= 1.toByte, classOf[LtEq[_]], 1) checkFilterPredicate('_1 >= 4.toByte, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1.toByte) === '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1.toByte) <=> '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2.toByte) > '_1, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3.toByte) < '_1, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1.toByte) >= '_1, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4.toByte) <= '_1, classOf[GtEq[_]], 4) checkFilterPredicate(!('_1 < 4.toByte), classOf[GtEq[_]], 4) checkFilterPredicate('_1 < 2.toByte || '_1 > 3.toByte, classOf[Operators.Or], Seq(Row(1), Row(4))) } } test("filter pushdown - smallint") { withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toShort)))) { implicit df => assert(df.schema.head.dataType === ShortType) checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('_1 === 1.toShort, classOf[Eq[_]], 1) checkFilterPredicate('_1 <=> 1.toShort, classOf[Eq[_]], 1) checkFilterPredicate('_1 =!= 1.toShort, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('_1 < 2.toShort, classOf[Lt[_]], 1) checkFilterPredicate('_1 > 3.toShort, classOf[Gt[_]], 4) checkFilterPredicate('_1 <= 1.toShort, classOf[LtEq[_]], 1) checkFilterPredicate('_1 >= 4.toShort, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1.toShort) === '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1.toShort) <=> '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2.toShort) > '_1, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3.toShort) < '_1, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1.toShort) >= '_1, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4.toShort) <= '_1, classOf[GtEq[_]], 4) checkFilterPredicate(!('_1 < 4.toShort), classOf[GtEq[_]], 4) checkFilterPredicate('_1 < 2.toShort || '_1 > 3.toShort, classOf[Operators.Or], Seq(Row(1), Row(4))) } } test("filter pushdown - integer") { withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i)))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1) checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4) checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1) checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4) checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4) checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4))) } } test("filter pushdown - long") { withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toLong)))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1) checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4) checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1) checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4) checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4) checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4))) } } test("filter pushdown - float") { withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toFloat)))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1) checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4) checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1) checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4) checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4) checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4))) } } test("filter pushdown - double") { withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toDouble)))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1) checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1) checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4) checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1) checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4) checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4) checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4))) } } test("filter pushdown - string") { withParquetDataFrame((1 to 4).map(i => Tuple1(i.toString))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate( '_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(i => Row.apply(i.toString))) checkFilterPredicate('_1 === "1", classOf[Eq[_]], "1") checkFilterPredicate('_1 <=> "1", classOf[Eq[_]], "1") checkFilterPredicate( '_1 =!= "1", classOf[NotEq[_]], (2 to 4).map(i => Row.apply(i.toString))) checkFilterPredicate('_1 < "2", classOf[Lt[_]], "1") checkFilterPredicate('_1 > "3", classOf[Gt[_]], "4") checkFilterPredicate('_1 <= "1", classOf[LtEq[_]], "1") checkFilterPredicate('_1 >= "4", classOf[GtEq[_]], "4") checkFilterPredicate(Literal("1") === '_1, classOf[Eq[_]], "1") checkFilterPredicate(Literal("1") <=> '_1, classOf[Eq[_]], "1") checkFilterPredicate(Literal("2") > '_1, classOf[Lt[_]], "1") checkFilterPredicate(Literal("3") < '_1, classOf[Gt[_]], "4") checkFilterPredicate(Literal("1") >= '_1, classOf[LtEq[_]], "1") checkFilterPredicate(Literal("4") <= '_1, classOf[GtEq[_]], "4") checkFilterPredicate(!('_1 < "4"), classOf[GtEq[_]], "4") checkFilterPredicate('_1 < "2" || '_1 > "3", classOf[Operators.Or], Seq(Row("1"), Row("4"))) } } test("filter pushdown - binary") { implicit class IntToBinary(int: Int) { def b: Array[Byte] = int.toString.getBytes(StandardCharsets.UTF_8) } withParquetDataFrame((1 to 4).map(i => Tuple1(i.b))) { implicit df => checkBinaryFilterPredicate('_1 === 1.b, classOf[Eq[_]], 1.b) checkBinaryFilterPredicate('_1 <=> 1.b, classOf[Eq[_]], 1.b) checkBinaryFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkBinaryFilterPredicate( '_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(i => Row.apply(i.b)).toSeq) checkBinaryFilterPredicate( '_1 =!= 1.b, classOf[NotEq[_]], (2 to 4).map(i => Row.apply(i.b)).toSeq) checkBinaryFilterPredicate('_1 < 2.b, classOf[Lt[_]], 1.b) checkBinaryFilterPredicate('_1 > 3.b, classOf[Gt[_]], 4.b) checkBinaryFilterPredicate('_1 <= 1.b, classOf[LtEq[_]], 1.b) checkBinaryFilterPredicate('_1 >= 4.b, classOf[GtEq[_]], 4.b) checkBinaryFilterPredicate(Literal(1.b) === '_1, classOf[Eq[_]], 1.b) checkBinaryFilterPredicate(Literal(1.b) <=> '_1, classOf[Eq[_]], 1.b) checkBinaryFilterPredicate(Literal(2.b) > '_1, classOf[Lt[_]], 1.b) checkBinaryFilterPredicate(Literal(3.b) < '_1, classOf[Gt[_]], 4.b) checkBinaryFilterPredicate(Literal(1.b) >= '_1, classOf[LtEq[_]], 1.b) checkBinaryFilterPredicate(Literal(4.b) <= '_1, classOf[GtEq[_]], 4.b) checkBinaryFilterPredicate(!('_1 < 4.b), classOf[GtEq[_]], 4.b) checkBinaryFilterPredicate( '_1 < 2.b || '_1 > 3.b, classOf[Operators.Or], Seq(Row(1.b), Row(4.b))) } } test("filter pushdown - date") { implicit class StringToDate(s: String) { def date: Date = Date.valueOf(s) } val data = Seq("2018-03-18", "2018-03-19", "2018-03-20", "2018-03-21") withParquetDataFrame(data.map(i => Tuple1(i.date))) { implicit df => checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], data.map(i => Row.apply(i.date))) checkFilterPredicate('_1 === "2018-03-18".date, classOf[Eq[_]], "2018-03-18".date) checkFilterPredicate('_1 <=> "2018-03-18".date, classOf[Eq[_]], "2018-03-18".date) checkFilterPredicate('_1 =!= "2018-03-18".date, classOf[NotEq[_]], Seq("2018-03-19", "2018-03-20", "2018-03-21").map(i => Row.apply(i.date))) checkFilterPredicate('_1 < "2018-03-19".date, classOf[Lt[_]], "2018-03-18".date) checkFilterPredicate('_1 > "2018-03-20".date, classOf[Gt[_]], "2018-03-21".date) checkFilterPredicate('_1 <= "2018-03-18".date, classOf[LtEq[_]], "2018-03-18".date) checkFilterPredicate('_1 >= "2018-03-21".date, classOf[GtEq[_]], "2018-03-21".date) checkFilterPredicate( Literal("2018-03-18".date) === '_1, classOf[Eq[_]], "2018-03-18".date) checkFilterPredicate( Literal("2018-03-18".date) <=> '_1, classOf[Eq[_]], "2018-03-18".date) checkFilterPredicate( Literal("2018-03-19".date) > '_1, classOf[Lt[_]], "2018-03-18".date) checkFilterPredicate( Literal("2018-03-20".date) < '_1, classOf[Gt[_]], "2018-03-21".date) checkFilterPredicate( Literal("2018-03-18".date) >= '_1, classOf[LtEq[_]], "2018-03-18".date) checkFilterPredicate( Literal("2018-03-21".date) <= '_1, classOf[GtEq[_]], "2018-03-21".date) checkFilterPredicate(!('_1 < "2018-03-21".date), classOf[GtEq[_]], "2018-03-21".date) checkFilterPredicate( '_1 < "2018-03-19".date || '_1 > "2018-03-20".date, classOf[Operators.Or], Seq(Row("2018-03-18".date), Row("2018-03-21".date))) } } test("filter pushdown - timestamp") { // spark.sql.parquet.outputTimestampType = TIMESTAMP_MILLIS val millisData = Seq(Timestamp.valueOf("2018-06-14 08:28:53.123"), Timestamp.valueOf("2018-06-15 08:28:53.123"), Timestamp.valueOf("2018-06-16 08:28:53.123"), Timestamp.valueOf("2018-06-17 08:28:53.123")) withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> ParquetOutputTimestampType.TIMESTAMP_MILLIS.toString) { testTimestampPushdown(millisData) } // spark.sql.parquet.outputTimestampType = TIMESTAMP_MICROS val microsData = Seq(Timestamp.valueOf("2018-06-14 08:28:53.123456"), Timestamp.valueOf("2018-06-15 08:28:53.123456"), Timestamp.valueOf("2018-06-16 08:28:53.123456"), Timestamp.valueOf("2018-06-17 08:28:53.123456")) withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> ParquetOutputTimestampType.TIMESTAMP_MICROS.toString) { testTimestampPushdown(microsData) } // spark.sql.parquet.outputTimestampType = INT96 doesn't support pushdown withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> ParquetOutputTimestampType.INT96.toString) { withParquetDataFrame(millisData.map(i => Tuple1(i))) { implicit df => assertResult(None) { parquetFilters.createFilter( new SparkToParquetSchemaConverter(conf).convert(df.schema), sources.IsNull("_1")) } } } } test("filter pushdown - decimal") { Seq(true, false).foreach { legacyFormat => withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> legacyFormat.toString) { Seq( s"a decimal(${Decimal.MAX_INT_DIGITS}, 2)", // 32BitDecimalType s"a decimal(${Decimal.MAX_LONG_DIGITS}, 2)", // 64BitDecimalType "a decimal(38, 18)" // ByteArrayDecimalType ).foreach { schemaDDL => val schema = StructType.fromDDL(schemaDDL) val rdd = spark.sparkContext.parallelize((1 to 4).map(i => Row(new java.math.BigDecimal(i)))) val dataFrame = spark.createDataFrame(rdd, schema) testDecimalPushDown(dataFrame) { implicit df => assert(df.schema === schema) checkFilterPredicate('a.isNull, classOf[Eq[_]], Seq.empty[Row]) checkFilterPredicate('a.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_))) checkFilterPredicate('a === 1, classOf[Eq[_]], 1) checkFilterPredicate('a <=> 1, classOf[Eq[_]], 1) checkFilterPredicate('a =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_))) checkFilterPredicate('a < 2, classOf[Lt[_]], 1) checkFilterPredicate('a > 3, classOf[Gt[_]], 4) checkFilterPredicate('a <= 1, classOf[LtEq[_]], 1) checkFilterPredicate('a >= 4, classOf[GtEq[_]], 4) checkFilterPredicate(Literal(1) === 'a, classOf[Eq[_]], 1) checkFilterPredicate(Literal(1) <=> 'a, classOf[Eq[_]], 1) checkFilterPredicate(Literal(2) > 'a, classOf[Lt[_]], 1) checkFilterPredicate(Literal(3) < 'a, classOf[Gt[_]], 4) checkFilterPredicate(Literal(1) >= 'a, classOf[LtEq[_]], 1) checkFilterPredicate(Literal(4) <= 'a, classOf[GtEq[_]], 4) checkFilterPredicate(!('a < 4), classOf[GtEq[_]], 4) checkFilterPredicate('a < 2 || 'a > 3, classOf[Operators.Or], Seq(Row(1), Row(4))) } } } } } test("Ensure that filter value matched the parquet file schema") { val scale = 2 val schema = StructType(Seq( StructField("cint", IntegerType), StructField("cdecimal1", DecimalType(Decimal.MAX_INT_DIGITS, scale)), StructField("cdecimal2", DecimalType(Decimal.MAX_LONG_DIGITS, scale)), StructField("cdecimal3", DecimalType(DecimalType.MAX_PRECISION, scale)) )) val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema) val decimal = new JBigDecimal(10).setScale(scale) val decimal1 = new JBigDecimal(10).setScale(scale + 1) assert(decimal.scale() === scale) assert(decimal1.scale() === scale + 1) assertResult(Some(lt(intColumn("cdecimal1"), 1000: Integer))) { parquetFilters.createFilter(parquetSchema, sources.LessThan("cdecimal1", decimal)) } assertResult(None) { parquetFilters.createFilter(parquetSchema, sources.LessThan("cdecimal1", decimal1)) } assertResult(Some(lt(longColumn("cdecimal2"), 1000L: java.lang.Long))) { parquetFilters.createFilter(parquetSchema, sources.LessThan("cdecimal2", decimal)) } assertResult(None) { parquetFilters.createFilter(parquetSchema, sources.LessThan("cdecimal2", decimal1)) } assert(parquetFilters.createFilter( parquetSchema, sources.LessThan("cdecimal3", decimal)).isDefined) assertResult(None) { parquetFilters.createFilter(parquetSchema, sources.LessThan("cdecimal3", decimal1)) } } test("SPARK-6554: don't push down predicates which reference partition columns") { import testImplicits._ withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { withTempPath { dir => val path = s"${dir.getCanonicalPath}/part=1" (1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path) // If the "part = 1" filter gets pushed down, this query will throw an exception since // "part" is not a valid column in the actual Parquet file checkAnswer( spark.read.parquet(dir.getCanonicalPath).filter("part = 1"), (1 to 3).map(i => Row(i, i.toString, 1))) } } } test("SPARK-10829: Filter combine partition key and attribute doesn't work in DataSource scan") { import testImplicits._ withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { withTempPath { dir => val path = s"${dir.getCanonicalPath}/part=1" (1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path) // If the "part = 1" filter gets pushed down, this query will throw an exception since // "part" is not a valid column in the actual Parquet file checkAnswer( spark.read.parquet(dir.getCanonicalPath).filter("a > 0 and (part = 0 or a > 1)"), (2 to 3).map(i => Row(i, i.toString, 1))) } } } test("SPARK-12231: test the filter and empty project in partitioned DataSource scan") { import testImplicits._ withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { withTempPath { dir => val path = s"${dir.getCanonicalPath}" (1 to 3).map(i => (i, i + 1, i + 2, i + 3)).toDF("a", "b", "c", "d"). write.partitionBy("a").parquet(path) // The filter "a > 1 or b < 2" will not get pushed down, and the projection is empty, // this query will throw an exception since the project from combinedFilter expect // two projection while the val df1 = spark.read.parquet(dir.getCanonicalPath) assert(df1.filter("a > 1 or b < 2").count() == 2) } } } test("SPARK-12231: test the new projection in partitioned DataSource scan") { import testImplicits._ withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { withTempPath { dir => val path = s"${dir.getCanonicalPath}" (1 to 3).map(i => (i, i + 1, i + 2, i + 3)).toDF("a", "b", "c", "d"). write.partitionBy("a").parquet(path) // test the generate new projection case // when projects != partitionAndNormalColumnProjs val df1 = spark.read.parquet(dir.getCanonicalPath) checkAnswer( df1.filter("a > 1 or b > 2").orderBy("a").selectExpr("a", "b", "c", "d"), (2 to 3).map(i => Row(i, i + 1, i + 2, i + 3))) } } } test("Filter applied on merged Parquet schema with new column should work") { import testImplicits._ Seq("true", "false").foreach { vectorized => withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true", SQLConf.PARQUET_SCHEMA_MERGING_ENABLED.key -> "true", SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized) { withTempPath { dir => val path1 = s"${dir.getCanonicalPath}/table1" (1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path1) val path2 = s"${dir.getCanonicalPath}/table2" (1 to 3).map(i => (i, i.toString)).toDF("c", "b").write.parquet(path2) // No matter "c = 1" gets pushed down or not, this query should work without exception. val df = spark.read.parquet(path1, path2).filter("c = 1").selectExpr("c", "b", "a") checkAnswer( df, Row(1, "1", null)) val path3 = s"${dir.getCanonicalPath}/table3" val dfStruct = sparkContext.parallelize(Seq((1, 1))).toDF("a", "b") dfStruct.select(struct("a").as("s")).write.parquet(path3) val path4 = s"${dir.getCanonicalPath}/table4" val dfStruct2 = sparkContext.parallelize(Seq((1, 1))).toDF("c", "b") dfStruct2.select(struct("c").as("s")).write.parquet(path4) // No matter "s.c = 1" gets pushed down or not, this query should work without exception. val dfStruct3 = spark.read.parquet(path3, path4).filter("s.c = 1") .selectExpr("s") checkAnswer(dfStruct3, Row(Row(null, 1))) } } } } // The unsafe row RecordReader does not support row by row filtering so run it with it disabled. test("SPARK-11661 Still pushdown filters returned by unhandledFilters") { import testImplicits._ withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") { withTempPath { dir => val path = s"${dir.getCanonicalPath}/part=1" (1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path) val df = spark.read.parquet(path).filter("a = 2") // The result should be single row. // When a filter is pushed to Parquet, Parquet can apply it to every row. // So, we can check the number of rows returned from the Parquet // to make sure our filter pushdown work. assert(stripSparkFilter(df).count == 1) } } } } test("SPARK-12218: 'Not' is included in Parquet filter pushdown") { import testImplicits._ withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { withTempPath { dir => val path = s"${dir.getCanonicalPath}/table1" (1 to 5).map(i => (i, (i % 2).toString)).toDF("a", "b").write.parquet(path) checkAnswer( spark.read.parquet(path).where("not (a = 2) or not(b in ('1'))"), (1 to 5).map(i => Row(i, (i % 2).toString))) checkAnswer( spark.read.parquet(path).where("not (a = 2 and b in ('1'))"), (1 to 5).map(i => Row(i, (i % 2).toString))) } } } test("SPARK-12218 Converting conjunctions into Parquet filter predicates") { val schema = StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", StringType, nullable = true), StructField("c", DoubleType, nullable = true) )) val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema) assertResult(Some(and( lt(intColumn("a"), 10: Integer), gt(doubleColumn("c"), 1.5: java.lang.Double))) ) { parquetFilters.createFilter( parquetSchema, sources.And( sources.LessThan("a", 10), sources.GreaterThan("c", 1.5D))) } assertResult(None) { parquetFilters.createFilter( parquetSchema, sources.And( sources.LessThan("a", 10), sources.StringContains("b", "prefix"))) } assertResult(None) { parquetFilters.createFilter( parquetSchema, sources.Not( sources.And( sources.GreaterThan("a", 1), sources.StringContains("b", "prefix")))) } } test("SPARK-16371 Do not push down filters when inner name and outer name are the same") { withParquetDataFrame((1 to 4).map(i => Tuple1(Tuple1(i)))) { implicit df => // Here the schema becomes as below: // // root // |-- _1: struct (nullable = true) // | |-- _1: integer (nullable = true) // // The inner column name, `_1` and outer column name `_1` are the same. // Obviously this should not push down filters because the outer column is struct. assert(df.filter("_1 IS NOT NULL").count() === 4) } } test("Filters should be pushed down for vectorized Parquet reader at row group level") { import testImplicits._ withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "true", SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false") { withTempPath { dir => val path = s"${dir.getCanonicalPath}/table" (1 to 1024).map(i => (101, i)).toDF("a", "b").write.parquet(path) Seq(true, false).foreach { enablePushDown => withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> enablePushDown.toString) { val accu = new NumRowGroupsAcc sparkContext.register(accu) val df = spark.read.parquet(path).filter("a < 100") df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0))) if (enablePushDown) { assert(accu.value == 0) } else { assert(accu.value > 0) } AccumulatorContext.remove(accu.id) } } } } } test("SPARK-17213: Broken Parquet filter push-down for string columns") { Seq(true, false).foreach { vectorizedEnabled => withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorizedEnabled.toString) { withTempPath { dir => import testImplicits._ val path = dir.getCanonicalPath // scalastyle:off nonascii Seq("a", "é").toDF("name").write.parquet(path) // scalastyle:on nonascii assert(spark.read.parquet(path).where("name > 'a'").count() == 1) assert(spark.read.parquet(path).where("name >= 'a'").count() == 2) // scalastyle:off nonascii assert(spark.read.parquet(path).where("name < 'é'").count() == 1) assert(spark.read.parquet(path).where("name <= 'é'").count() == 2) // scalastyle:on nonascii } } } } test("SPARK-20364: Disable Parquet predicate pushdown for fields having dots in the names") { import testImplicits._ Seq(true, false).foreach { vectorized => withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized.toString, SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> true.toString, SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") { withTempPath { path => Seq(Some(1), None).toDF("col.dots").write.parquet(path.getAbsolutePath) val readBack = spark.read.parquet(path.getAbsolutePath).where("`col.dots` IS NOT NULL") assert(readBack.count() == 1) } } } } test("Filters should be pushed down for Parquet readers at row group level") { import testImplicits._ withSQLConf( // Makes sure disabling 'spark.sql.parquet.recordFilter' still enables // row group level filtering. SQLConf.PARQUET_RECORD_FILTER_ENABLED.key -> "false", SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true", SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") { withTempPath { path => val data = (1 to 1024) data.toDF("a").coalesce(1) .write.option("parquet.block.size", 512) .parquet(path.getAbsolutePath) val df = spark.read.parquet(path.getAbsolutePath).filter("a == 500") // Here, we strip the Spark side filter and check the actual results from Parquet. val actual = stripSparkFilter(df).collect().length // Since those are filtered at row group level, the result count should be less // than the total length but should not be a single record. // Note that, if record level filtering is enabled, it should be a single record. // If no filter is pushed down to Parquet, it should be the total length of data. assert(actual > 1 && actual < data.length) } } } test("SPARK-23852: Broken Parquet push-down for partially-written stats") { withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") { // parquet-1217.parquet contains a single column with values -1, 0, 1, 2 and null. // The row-group statistics include null counts, but not min and max values, which // triggers PARQUET-1217. val df = readResourceParquetFile("test-data/parquet-1217.parquet") // Will return 0 rows if PARQUET-1217 is not fixed. assert(df.where("col > 0").count() === 2) } } test("filter pushdown - StringStartsWith") { withParquetDataFrame((1 to 4).map(i => Tuple1(i + "str" + i))) { implicit df => checkFilterPredicate( '_1.startsWith("").asInstanceOf[Predicate], classOf[UserDefinedByInstance[_, _]], Seq("1str1", "2str2", "3str3", "4str4").map(Row(_))) Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix => checkFilterPredicate( '_1.startsWith(prefix).asInstanceOf[Predicate], classOf[UserDefinedByInstance[_, _]], "2str2") } Seq("2S", "null", "2str22").foreach { prefix => checkFilterPredicate( '_1.startsWith(prefix).asInstanceOf[Predicate], classOf[UserDefinedByInstance[_, _]], Seq.empty[Row]) } checkFilterPredicate( !'_1.startsWith("").asInstanceOf[Predicate], classOf[UserDefinedByInstance[_, _]], Seq().map(Row(_))) Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix => checkFilterPredicate( !'_1.startsWith(prefix).asInstanceOf[Predicate], classOf[UserDefinedByInstance[_, _]], Seq("1str1", "3str3", "4str4").map(Row(_))) } Seq("2S", "null", "2str22").foreach { prefix => checkFilterPredicate( !'_1.startsWith(prefix).asInstanceOf[Predicate], classOf[UserDefinedByInstance[_, _]], Seq("1str1", "2str2", "3str3", "4str4").map(Row(_))) } assertResult(None) { parquetFilters.createFilter( new SparkToParquetSchemaConverter(conf).convert(df.schema), sources.StringStartsWith("_1", null)) } } import testImplicits._ // Test canDrop() has taken effect testStringStartsWith(spark.range(1024).map(_.toString).toDF(), "value like 'a%'") // Test inverseCanDrop() has taken effect testStringStartsWith(spark.range(1024).map(c => "100").toDF(), "value not like '10%'") } test("SPARK-17091: Convert IN predicate to Parquet filter push-down") { val schema = StructType(Seq( StructField("a", IntegerType, nullable = false) )) val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema) assertResult(Some(FilterApi.eq(intColumn("a"), null: Integer))) { parquetFilters.createFilter(parquetSchema, sources.In("a", Array(null))) } assertResult(Some(FilterApi.eq(intColumn("a"), 10: Integer))) { parquetFilters.createFilter(parquetSchema, sources.In("a", Array(10))) } // Remove duplicates assertResult(Some(FilterApi.eq(intColumn("a"), 10: Integer))) { parquetFilters.createFilter(parquetSchema, sources.In("a", Array(10, 10))) } assertResult(Some(or(or( FilterApi.eq(intColumn("a"), 10: Integer), FilterApi.eq(intColumn("a"), 20: Integer)), FilterApi.eq(intColumn("a"), 30: Integer))) ) { parquetFilters.createFilter(parquetSchema, sources.In("a", Array(10, 20, 30))) } assert(parquetFilters.createFilter(parquetSchema, sources.In("a", Range(0, conf.parquetFilterPushDownInFilterThreshold).toArray)).isDefined) assert(parquetFilters.createFilter(parquetSchema, sources.In("a", Range(0, conf.parquetFilterPushDownInFilterThreshold + 1).toArray)).isEmpty) import testImplicits._ withTempPath { path => val data = 0 to 1024 data.toDF("a").selectExpr("if (a = 1024, null, a) AS a") // convert 1024 to null .coalesce(1).write.option("parquet.block.size", 512) .parquet(path.getAbsolutePath) val df = spark.read.parquet(path.getAbsolutePath) Seq(true, false).foreach { pushEnabled => withSQLConf( SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> pushEnabled.toString) { Seq(1, 5, 10, 11).foreach { count => val filter = s"a in(${Range(0, count).mkString(",")})" assert(df.where(filter).count() === count) val actual = stripSparkFilter(df.where(filter)).collect().length if (pushEnabled && count <= conf.parquetFilterPushDownInFilterThreshold) { assert(actual > 1 && actual < data.length) } else { assert(actual === data.length) } } assert(df.where("a in(null)").count() === 0) assert(df.where("a = null").count() === 0) assert(df.where("a is null").count() === 1) } } } } } class NumRowGroupsAcc extends AccumulatorV2[Integer, Integer] { private var _sum = 0 override def isZero: Boolean = _sum == 0 override def copy(): AccumulatorV2[Integer, Integer] = { val acc = new NumRowGroupsAcc() acc._sum = _sum acc } override def reset(): Unit = _sum = 0 override def add(v: Integer): Unit = _sum += v override def merge(other: AccumulatorV2[Integer, Integer]): Unit = other match { case a: NumRowGroupsAcc => _sum += a._sum case _ => throw new UnsupportedOperationException( s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}") } override def value: Integer = _sum }
{ "content_hash": "915c207ae0e91282bc6586bc8eeae2aa", "timestamp": "", "source": "github", "line_count": 1033, "max_line_length": 105, "avg_line_length": 42.531461761858665, "alnum_prop": 0.6214635256629111, "repo_name": "eyalfa/spark", "id": "be4f498c921abd4b47927e9797b019822edec235", "size": "44738", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "35042" }, { "name": "Batchfile", "bytes": "30285" }, { "name": "C", "bytes": "1493" }, { "name": "CSS", "bytes": "23956" }, { "name": "Dockerfile", "bytes": "7157" }, { "name": "HTML", "bytes": "65141" }, { "name": "HiveQL", "bytes": "1823425" }, { "name": "Java", "bytes": "3372693" }, { "name": "JavaScript", "bytes": "144886" }, { "name": "Makefile", "bytes": "9395" }, { "name": "PLpgSQL", "bytes": "163419" }, { "name": "PowerShell", "bytes": "3756" }, { "name": "Python", "bytes": "2715827" }, { "name": "R", "bytes": "1131137" }, { "name": "Roff", "bytes": "20789" }, { "name": "SQLPL", "bytes": "30039" }, { "name": "Scala", "bytes": "26996418" }, { "name": "Shell", "bytes": "189256" }, { "name": "Thrift", "bytes": "33605" }, { "name": "q", "bytes": "146878" } ], "symlink_target": "" }
/** @file include/my_thread.h Defines to make different thread packages compatible. */ #ifndef MY_THREAD_INCLUDED #define MY_THREAD_INCLUDED #include <errno.h> #include <stdbool.h> #include <stddef.h> #include <mysql/components/services/my_thread_bits.h> #include "my_compiler.h" #include "my_config.h" #include "my_inttypes.h" #include "my_macros.h" #ifndef ETIME #define ETIME ETIMEDOUT /* For FreeBSD */ #endif #ifndef ETIMEDOUT #define ETIMEDOUT 145 /* Win32 doesn't have this */ #endif // Pick a value which is enough for all mtr tests, // on all known/supported platforms. // Currently the largest stack requirement is with // clang with DEBUG and UBSAN -O0 -fno-inline #define DEFAULT_THREAD_STACK (1024UL * 1024UL) static inline int is_timeout(int e) { #if ETIMEDOUT == ETIME return e == ETIMEDOUT; #else return e == ETIMEDOUT || e == ETIME; #endif } #ifdef _WIN32 #define MY_THREAD_CREATE_JOINABLE 0 #define MY_THREAD_CREATE_DETACHED 1 typedef void *(__cdecl *my_start_routine)(void *); #else #define MY_THREAD_CREATE_JOINABLE PTHREAD_CREATE_JOINABLE #define MY_THREAD_CREATE_DETACHED PTHREAD_CREATE_DETACHED typedef void *(*my_start_routine)(void *); #endif static inline my_thread_t my_thread_self() { #ifdef _WIN32 return GetCurrentThreadId(); #else return pthread_self(); #endif } static inline int my_thread_equal(my_thread_t t1, my_thread_t t2) { #ifdef _WIN32 return t1 == t2; #else return pthread_equal(t1, t2); #endif } static inline int my_thread_attr_init(my_thread_attr_t *attr) { #ifdef _WIN32 attr->dwStackSize = 0; /* Set to joinable by default to match Linux */ attr->detachstate = MY_THREAD_CREATE_JOINABLE; return 0; #else return pthread_attr_init(attr); #endif } static inline int my_thread_attr_destroy(my_thread_attr_t *attr) { #ifdef _WIN32 attr->dwStackSize = 0; /* Set to joinable by default to match Linux */ attr->detachstate = MY_THREAD_CREATE_JOINABLE; return 0; #else return pthread_attr_destroy(attr); #endif } static inline int my_thread_attr_setstacksize(my_thread_attr_t *attr, size_t stacksize) { #ifdef _WIN32 attr->dwStackSize = (DWORD)stacksize; return 0; #else return pthread_attr_setstacksize(attr, stacksize); #endif } static inline int my_thread_attr_setdetachstate(my_thread_attr_t *attr, int detachstate) { #ifdef _WIN32 attr->detachstate = detachstate; return 0; #else return pthread_attr_setdetachstate(attr, detachstate); #endif } static inline int my_thread_attr_getstacksize(my_thread_attr_t *attr, size_t *stacksize) { #ifdef _WIN32 *stacksize = (size_t)attr->dwStackSize; return 0; #else return pthread_attr_getstacksize(attr, stacksize); #endif } static inline void my_thread_yield() { #ifdef _WIN32 SwitchToThread(); #else sched_yield(); #endif } int my_thread_create(my_thread_handle *thread, const my_thread_attr_t *attr, my_start_routine func, void *arg); int my_thread_join(my_thread_handle *thread, void **value_ptr); int my_thread_cancel(my_thread_handle *thread); void my_thread_exit(void *value_ptr) MY_ATTRIBUTE((noreturn)); /** Sets the name of the thread for system and debugger, if possible. @param name Name to set, must be shorter than SETNAME_MAX_LENGTH, including NULL character. */ void my_thread_self_setname(const char *name); extern bool my_thread_global_init(); extern void my_thread_global_reinit(); extern void my_thread_global_end(); // Need to be extern "C" for the time being, due to memcached. extern "C" bool my_thread_init(); extern "C" void my_thread_end(); #endif /* MY_THREAD_INCLUDED */
{ "content_hash": "3e3d71b213ef445884a1f296ce5c92e5", "timestamp": "", "source": "github", "line_count": 149, "max_line_length": 80, "avg_line_length": 24.993288590604028, "alnum_prop": 0.6952201933404941, "repo_name": "oleghnidets/OHMySQL", "id": "eb57bb0e93a3fc1815b0ae261789591aab2717b5", "size": "4870", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "OHMySQL/lib/MySQL.xcframework/ios-arm64_x86_64-simulator/MySQL.framework/Headers/my_thread.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "16732094" }, { "name": "C++", "bytes": "4287668" }, { "name": "Objective-C", "bytes": "153779" }, { "name": "Ruby", "bytes": "1650" }, { "name": "Swift", "bytes": "55389" } ], "symlink_target": "" }
require 'mina/bundler' require 'mina/rails' require 'mina/git' # require 'mina/rbenv' # for rbenv support. (http://rbenv.org) require 'mina/rvm' # for rvm support. (http://rvm.io) # Basic settings: # domain - The hostname to SSH to. # deploy_to - Path to deploy into. # repository - Git repo to clone from. (needed by mina/git) # branch - Branch name to deploy. (needed by mina/git) set :domains, ['123.57.7.239'] set :domain, '123.57.7.239' set :deploy_to, '/var/www/admin.iqv8' set :repository, 'git@github.com:mumaoxi/admin.iqv8.git' set :branch, 'master' set :keep_releases, 20 set :rails_env, :production # Manually create these paths in shared/ (eg: shared/config/database.yml) in your server. # They will be linked in the 'deploy:link_shared_paths' step. set :shared_paths, ['config/database.yml', 'config/newrelic.yml', 'config/puma.rb', 'config/secrets.yml', 'log', 'tmp','config/initializers/carrierwave.rb',] # mina deploy to=s1 case ENV['to'] when 's1' set :domain, '123.57.7.239' # production 1 end case ENV['for'] when 'master' set :branch, 'master' when 'develop' set :branch, 'develop' else if ENV['for'] set :branch, ENV['for'] end end # Optional settings: set :user, 'root' # Username in the server to SSH to. set :port, '22' # SSH port number. set :rvm_path, '/usr/local/rvm/scripts/rvm' set :app_path, lambda { "#{deploy_to}/#{current_path}" } # This task is the environment that is loaded for most commands, such as # `mina deploy` or `mina rake`. task :environment do # If you're using rbenv, use this to load the rbenv environment. # Be sure to commit your .rbenv-version to your repository. # invoke :'rbenv:load' # For those using RVM, use this to load an RVM version@gemset. invoke :'rvm:use[ruby-2.2.1@global]' end # Put any custom mkdir's in here for when `mina setup` is ran. # For Rails apps, we'll make some of the shared paths that are shared between # all releases. task :setup => :environment do queue! %[mkdir -p "#{deploy_to}/shared/log"] queue! %[chmod g+rx,u+rwx "#{deploy_to}/shared/log"] queue! %[mkdir -p "#{deploy_to}/shared/tmp"] queue! %[chmod g+rx,u+rwx "#{deploy_to}/shared/tmp"] queue! %[mkdir -p "#{deploy_to}/shared/tmp/pids"] queue! %[chmod g+rx,u+rwx "#{deploy_to}/shared/tmp/pids"] queue! %[mkdir -p "#{deploy_to}/shared/tmp/sockets"] queue! %[chmod g+rx,u+rwx "#{deploy_to}/shared/tmp/sockets"] queue! %[mkdir -p "#{deploy_to}/shared/config"] queue! %[chmod g+rx,u+rwx "#{deploy_to}/shared/config"] queue! %[touch "#{deploy_to}/shared/config/database.yml"] queue %[echo "-----> Be sure to edit 'shared/config/database.yml'."] queue! %[touch "#{deploy_to}/shared/config/newrelic.yml"] queue %[echo "-----> Be sure to edit 'shared/config/newrelic.yml'."] queue! %[touch "#{deploy_to}/shared/config/secrets.yml"] queue %[echo "-----> Be sure to edit 'shared/config/secrets.yml'."] queue! %[touch "#{deploy_to}/shared/config/puma.rb"] queue %[echo "-----> Be sure to edit 'shared/config/puma.rb'."] queue! %[mkdir "#{deploy_to}/shared/config/initializers"] queue! %[touch "#{deploy_to}/shared/config/initializers/carrierwave.rb"] queue %[echo "-----> Be sure to edit 'shared/config/initializers/carrierwave.rb'."] end # mina deploy:force_unlock deploy # How to use: mina deploy to=s3 for=develop # How to use: mina deploy to=s3 for=master desc "Deploys the current version to the server." task :deploy => :environment do deploy do # Put things that will set up an empty directory into a fully set-up # instance of your project. invoke :'git:clone' invoke :'deploy:link_shared_paths' invoke :'bundle:install' invoke :'rails:db_migrate' invoke :'rails:assets_precompile' invoke :'deploy:cleanup' to :launch do queue "cd #{app_path} ; bundle install --without nothing" # invoke :restart # invoke :start end end end desc 'Starts the application' task :start => :environment do queue "cd #{app_path} ; bundle exec puma -C config/puma.rb -e production -d" # queue "cd #{app_path} ; bundle exec puma" # queue "cd #{app_path} ; bundle exec pumactl -F config/puma.rb start" end desc 'Stop the application' task :stop => :environment do queue "cd #{app_path} ; bundle exec pumactl -P #{app_path}/tmp/pids/puma.pid stop" end desc 'Restart the application' task :restart => :environment do # queue "cd #{app_path} ; bundle exec pumactl -P #{app_path}/tmp/pids/puma.pid restart" invoke :stop invoke :start end task :cat_server_log => :environment do queue "tail -n 200 #{app_path}/log/production.log" end task :cat_err_log => :environment do queue "tail -n 200 #{app_path}/log/puma.err.log" end desc "Deploy to all servers" task :deploy_all do isolate do domains.each do |domain| set :domain, domain invoke :deploy run! end end end desc "Restart all servers" task :restart_all do isolate do domains.each do |domain| set :domain, domain invoke :restart run! end end end # For help in making your deploy script, see the Mina documentation: # # - http://nadarei.co/mina # - http://nadarei.co/mina/tasks # - http://nadarei.co/mina/settings # - http://nadarei.co/mina/helpers
{ "content_hash": "157528a4221c8c2e96ceb3cbbde157a9", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 157, "avg_line_length": 30.589595375722542, "alnum_prop": 0.6681783824640968, "repo_name": "iqv8/admin.iqv8", "id": "fc8a48b8eda1eb044fd3dc3432082eea2fa8fa88", "size": "5331", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/deploy.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1288" }, { "name": "HTML", "bytes": "6384" }, { "name": "JavaScript", "bytes": "6727" }, { "name": "Ruby", "bytes": "39031" } ], "symlink_target": "" }
<?php /** * Sequential Theme Customizer * * @package Sequential */ /** * Add postMessage support for site title and description for the Theme Customizer. * * @param WP_Customize_Manager $wp_customize Theme Customizer object. */ function sequential_customize_register( $wp_customize ) { $wp_customize->get_setting( 'blogname' )->transport = 'postMessage'; $wp_customize->get_setting( 'blogdescription' )->transport = 'postMessage'; $wp_customize->get_setting( 'header_textcolor' )->transport = 'postMessage'; /* Theme Options */ $wp_customize->add_section( 'sequential_theme_options', array( 'title' => __( 'Theme', 'sequential' ), 'priority' => 130, ) ); /* Show Tagline */ $wp_customize->add_setting( 'sequential_tagline', array( 'default' => '', 'sanitize_callback' => 'sequential_sanitize_checkbox', 'transport' => 'postMessage', ) ); $wp_customize->add_control( 'sequential_tagline', array( 'label' => __( 'Show Tagline', 'sequential' ), 'section' => 'sequential_theme_options', 'priority' => 10, 'type' => 'checkbox', ) ); /* Top Area Content */ $wp_customize->add_setting( 'sequential_top_area_content', array( 'default' => '', 'sanitize_callback' => 'wp_kses_post', ) ); $wp_customize->add_control( 'sequential_top_area_content', array( 'label' => __( 'Top Area Content', 'sequential' ), 'section' => 'sequential_theme_options', 'priority' => 20, 'type' => 'textarea', ) ); /* Front Page: Featured Page One */ $wp_customize->add_setting( 'sequential_featured_page_one_front_page', array( 'default' => '', 'sanitize_callback' => 'sequential_sanitize_dropdown_pages', ) ); $wp_customize->add_control( 'sequential_featured_page_one_front_page', array( 'label' => __( 'Front Page: Featured Page One', 'sequential' ), 'section' => 'sequential_theme_options', 'priority' => 30, 'type' => 'dropdown-pages', ) ); /* Front Page: Featured Page Two */ $wp_customize->add_setting( 'sequential_featured_page_two_front_page', array( 'default' => '', 'sanitize_callback' => 'sequential_sanitize_dropdown_pages', ) ); $wp_customize->add_control( 'sequential_featured_page_two_front_page', array( 'label' => __( 'Front Page: Featured Page Two', 'sequential' ), 'section' => 'sequential_theme_options', 'priority' => 40, 'type' => 'dropdown-pages', ) ); /* Front Page: show title */ $wp_customize->add_setting( 'sequential_title_front_page', array( 'default' => '', 'sanitize_callback' => 'sequential_sanitize_checkbox', ) ); $wp_customize->add_control( 'sequential_title_front_page', array( 'label' => __( 'Front Page: Show Page Titles', 'sequential' ), 'section' => 'sequential_theme_options', 'priority' => 50, 'type' => 'checkbox', ) ); } add_action( 'customize_register', 'sequential_customize_register' ); /** * Sanitize the checkbox. * * @param boolean $input. * @return boolean (true|false). */ function sequential_sanitize_checkbox( $input ) { if ( 1 == $input ) { return true; } else { return false; } } /** * Sanitize the dropdown pages. * * @param interger $input. * @return interger. */ function sequential_sanitize_dropdown_pages( $input ) { if ( is_numeric( $input ) ) { return intval( $input ); } } /** * Binds JS handlers to make Theme Customizer preview reload changes asynchronously. */ function sequential_customize_preview_js() { wp_enqueue_script( 'sequential-customizer', get_template_directory_uri() . '/js/customizer.js', array( 'customize-preview' ), '20141022', true ); } add_action( 'customize_preview_init', 'sequential_customize_preview_js' );
{ "content_hash": "3978d0092c2f65cf697d8b20ea9e95b6", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 146, "avg_line_length": 32.705882352941174, "alnum_prop": 0.6068859198355602, "repo_name": "Doap/sinkjuice.com", "id": "6ce48dc6bb0dd8d5c85ce6a35c5297bd5a355c48", "size": "3892", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "themes/sequential/inc/customizer.php", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "2903" }, { "name": "CSS", "bytes": "7024597" }, { "name": "CoffeeScript", "bytes": "2134" }, { "name": "HTML", "bytes": "366280" }, { "name": "JavaScript", "bytes": "6153683" }, { "name": "Makefile", "bytes": "1531248" }, { "name": "PHP", "bytes": "63534232" }, { "name": "Perl", "bytes": "1539" }, { "name": "Ruby", "bytes": "7550" }, { "name": "Shell", "bytes": "2092" }, { "name": "Visual Basic", "bytes": "2281" }, { "name": "XSLT", "bytes": "10938" } ], "symlink_target": "" }
// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. /** \mainpage V8 API Reference Guide * * V8 is Google's open source JavaScript engine. * * This set of documents provides reference material generated from the * V8 header file, include/v8.h. * * For other documentation see http://code.google.com/apis/v8/ */ #ifndef V8_H_ #define V8_H_ #include "v8stdint.h" #ifdef _WIN32 // Setup for Windows DLL export/import. When building the V8 DLL the // BUILDING_V8_SHARED needs to be defined. When building a program which uses // the V8 DLL USING_V8_SHARED needs to be defined. When either building the V8 // static library or building a program which uses the V8 static library neither // BUILDING_V8_SHARED nor USING_V8_SHARED should be defined. #if defined(BUILDING_V8_SHARED) && defined(USING_V8_SHARED) #error both BUILDING_V8_SHARED and USING_V8_SHARED are set - please check the\ build configuration to ensure that at most one of these is set #endif #ifdef BUILDING_V8_SHARED #define V8EXPORT __declspec(dllexport) #elif USING_V8_SHARED #define V8EXPORT __declspec(dllimport) #else #define V8EXPORT #endif // BUILDING_V8_SHARED #else // _WIN32 // Setup for Linux shared library export. There is no need to distinguish // between building or using the V8 shared library, but we should not // export symbols when we are building a static library. #if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED) #define V8EXPORT __attribute__ ((visibility("default"))) #else // defined(__GNUC__) && (__GNUC__ >= 4) #define V8EXPORT #endif // defined(__GNUC__) && (__GNUC__ >= 4) #endif // _WIN32 /** * The v8 JavaScript engine. */ namespace v8 { class Context; class String; class StringObject; class Value; class Utils; class Number; class NumberObject; class Object; class Array; class Int32; class Uint32; class External; class Primitive; class Boolean; class BooleanObject; class Integer; class Function; class Date; class ImplementationUtilities; class Signature; template <class T> class Handle; template <class T> class Local; template <class T> class Persistent; class FunctionTemplate; class ObjectTemplate; class Data; class AccessorInfo; class StackTrace; class StackFrame; namespace internal { class Arguments; class Object; class Heap; class HeapObject; class Isolate; } // --- Weak Handles --- /** * A weak reference callback function. * * This callback should either explicitly invoke Dispose on |object| if * V8 wrapper is not needed anymore, or 'revive' it by invocation of MakeWeak. * * \param object the weak global object to be reclaimed by the garbage collector * \param parameter the value passed in when making the weak global object */ typedef void (*WeakReferenceCallback)(Persistent<Value> object, void* parameter); // --- Handles --- #define TYPE_CHECK(T, S) \ while (false) { \ *(static_cast<T* volatile*>(0)) = static_cast<S*>(0); \ } /** * An object reference managed by the v8 garbage collector. * * All objects returned from v8 have to be tracked by the garbage * collector so that it knows that the objects are still alive. Also, * because the garbage collector may move objects, it is unsafe to * point directly to an object. Instead, all objects are stored in * handles which are known by the garbage collector and updated * whenever an object moves. Handles should always be passed by value * (except in cases like out-parameters) and they should never be * allocated on the heap. * * There are two types of handles: local and persistent handles. * Local handles are light-weight and transient and typically used in * local operations. They are managed by HandleScopes. Persistent * handles can be used when storing objects across several independent * operations and have to be explicitly deallocated when they're no * longer used. * * It is safe to extract the object stored in the handle by * dereferencing the handle (for instance, to extract the Object* from * a Handle<Object>); the value will still be governed by a handle * behind the scenes and the same rules apply to these values as to * their handles. */ template <class T> class Handle { public: /** * Creates an empty handle. */ inline Handle() : val_(0) {} /** * Creates a new handle for the specified value. */ inline explicit Handle(T* val) : val_(val) {} /** * Creates a handle for the contents of the specified handle. This * constructor allows you to pass handles as arguments by value and * to assign between handles. However, if you try to assign between * incompatible handles, for instance from a Handle<String> to a * Handle<Number> it will cause a compile-time error. Assigning * between compatible handles, for instance assigning a * Handle<String> to a variable declared as Handle<Value>, is legal * because String is a subclass of Value. */ template <class S> inline Handle(Handle<S> that) : val_(reinterpret_cast<T*>(*that)) { /** * This check fails when trying to convert between incompatible * handles. For example, converting from a Handle<String> to a * Handle<Number>. */ TYPE_CHECK(T, S); } /** * Returns true if the handle is empty. */ inline bool IsEmpty() const { return val_ == 0; } /** * Sets the handle to be empty. IsEmpty() will then return true. */ inline void Clear() { val_ = 0; } inline T* operator->() const { return val_; } inline T* operator*() const { return val_; } /** * Checks whether two handles are the same. * Returns true if both are empty, or if the objects * to which they refer are identical. * The handles' references are not checked. */ template <class S> inline bool operator==(Handle<S> that) const { internal::Object** a = reinterpret_cast<internal::Object**>(**this); internal::Object** b = reinterpret_cast<internal::Object**>(*that); if (a == 0) return b == 0; if (b == 0) return false; return *a == *b; } /** * Checks whether two handles are different. * Returns true if only one of the handles is empty, or if * the objects to which they refer are different. * The handles' references are not checked. */ template <class S> inline bool operator!=(Handle<S> that) const { return !operator==(that); } template <class S> static inline Handle<T> Cast(Handle<S> that) { #ifdef V8_ENABLE_CHECKS // If we're going to perform the type check then we have to check // that the handle isn't empty before doing the checked cast. if (that.IsEmpty()) return Handle<T>(); #endif return Handle<T>(T::Cast(*that)); } template <class S> inline Handle<S> As() { return Handle<S>::Cast(*this); } private: T* val_; }; /** * A light-weight stack-allocated object handle. All operations * that return objects from within v8 return them in local handles. They * are created within HandleScopes, and all local handles allocated within a * handle scope are destroyed when the handle scope is destroyed. Hence it * is not necessary to explicitly deallocate local handles. */ template <class T> class Local : public Handle<T> { public: inline Local(); template <class S> inline Local(Local<S> that) : Handle<T>(reinterpret_cast<T*>(*that)) { /** * This check fails when trying to convert between incompatible * handles. For example, converting from a Handle<String> to a * Handle<Number>. */ TYPE_CHECK(T, S); } template <class S> inline Local(S* that) : Handle<T>(that) { } template <class S> static inline Local<T> Cast(Local<S> that) { #ifdef V8_ENABLE_CHECKS // If we're going to perform the type check then we have to check // that the handle isn't empty before doing the checked cast. if (that.IsEmpty()) return Local<T>(); #endif return Local<T>(T::Cast(*that)); } template <class S> inline Local<S> As() { return Local<S>::Cast(*this); } /** Create a local handle for the content of another handle. * The referee is kept alive by the local handle even when * the original handle is destroyed/disposed. */ inline static Local<T> New(Handle<T> that); }; /** * An object reference that is independent of any handle scope. Where * a Local handle only lives as long as the HandleScope in which it was * allocated, a Persistent handle remains valid until it is explicitly * disposed. * * A persistent handle contains a reference to a storage cell within * the v8 engine which holds an object value and which is updated by * the garbage collector whenever the object is moved. A new storage * cell can be created using Persistent::New and existing handles can * be disposed using Persistent::Dispose. Since persistent handles * are passed by value you may have many persistent handle objects * that point to the same storage cell. For instance, if you pass a * persistent handle as an argument to a function you will not get two * different storage cells but rather two references to the same * storage cell. */ template <class T> class Persistent : public Handle<T> { public: /** * Creates an empty persistent handle that doesn't point to any * storage cell. */ inline Persistent(); /** * Creates a persistent handle for the same storage cell as the * specified handle. This constructor allows you to pass persistent * handles as arguments by value and to assign between persistent * handles. However, attempting to assign between incompatible * persistent handles, for instance from a Persistent<String> to a * Persistent<Number> will cause a compile-time error. Assigning * between compatible persistent handles, for instance assigning a * Persistent<String> to a variable declared as Persistent<Value>, * is allowed as String is a subclass of Value. */ template <class S> inline Persistent(Persistent<S> that) : Handle<T>(reinterpret_cast<T*>(*that)) { /** * This check fails when trying to convert between incompatible * handles. For example, converting from a Handle<String> to a * Handle<Number>. */ TYPE_CHECK(T, S); } template <class S> inline Persistent(S* that) : Handle<T>(that) { } /** * "Casts" a plain handle which is known to be a persistent handle * to a persistent handle. */ template <class S> explicit inline Persistent(Handle<S> that) : Handle<T>(*that) { } template <class S> static inline Persistent<T> Cast(Persistent<S> that) { #ifdef V8_ENABLE_CHECKS // If we're going to perform the type check then we have to check // that the handle isn't empty before doing the checked cast. if (that.IsEmpty()) return Persistent<T>(); #endif return Persistent<T>(T::Cast(*that)); } template <class S> inline Persistent<S> As() { return Persistent<S>::Cast(*this); } /** * Creates a new persistent handle for an existing local or * persistent handle. */ inline static Persistent<T> New(Handle<T> that); /** * Releases the storage cell referenced by this persistent handle. * Does not remove the reference to the cell from any handles. * This handle's reference, and any other references to the storage * cell remain and IsEmpty will still return false. */ inline void Dispose(); /** * Make the reference to this object weak. When only weak handles * refer to the object, the garbage collector will perform a * callback to the given V8::WeakReferenceCallback function, passing * it the object reference and the given parameters. */ inline void MakeWeak(void* parameters, WeakReferenceCallback callback); /** Clears the weak reference to this object.*/ inline void ClearWeak(); /** * Marks the reference to this object independent. Garbage collector * is free to ignore any object groups containing this object. * Weak callback for an independent handle should not * assume that it will be preceded by a global GC prologue callback * or followed by a global GC epilogue callback. */ inline void MarkIndependent(); /** *Checks if the handle holds the only reference to an object. */ inline bool IsNearDeath() const; /** * Returns true if the handle's reference is weak. */ inline bool IsWeak() const; /** * Assigns a wrapper class ID to the handle. See RetainedObjectInfo * interface description in v8-profiler.h for details. */ inline void SetWrapperClassId(uint16_t class_id); private: friend class ImplementationUtilities; friend class ObjectTemplate; }; /** * A stack-allocated class that governs a number of local handles. * After a handle scope has been created, all local handles will be * allocated within that handle scope until either the handle scope is * deleted or another handle scope is created. If there is already a * handle scope and a new one is created, all allocations will take * place in the new handle scope until it is deleted. After that, * new handles will again be allocated in the original handle scope. * * After the handle scope of a local handle has been deleted the * garbage collector will no longer track the object stored in the * handle and may deallocate it. The behavior of accessing a handle * for which the handle scope has been deleted is undefined. */ class V8EXPORT HandleScope { public: HandleScope(); ~HandleScope(); /** * Closes the handle scope and returns the value as a handle in the * previous scope, which is the new current scope after the call. */ template <class T> Local<T> Close(Handle<T> value); /** * Counts the number of allocated handles. */ static int NumberOfHandles(); /** * Creates a new handle with the given value. */ static internal::Object** CreateHandle(internal::Object* value); // Faster version, uses HeapObject to obtain the current Isolate. static internal::Object** CreateHandle(internal::HeapObject* value); private: // Make it impossible to create heap-allocated or illegal handle // scopes by disallowing certain operations. HandleScope(const HandleScope&); void operator=(const HandleScope&); void* operator new(size_t size); void operator delete(void*, size_t); // This Data class is accessible internally as HandleScopeData through a // typedef in the ImplementationUtilities class. class V8EXPORT Data { public: internal::Object** next; internal::Object** limit; int level; inline void Initialize() { next = limit = NULL; level = 0; } }; void Leave(); internal::Isolate* isolate_; internal::Object** prev_next_; internal::Object** prev_limit_; // Allow for the active closing of HandleScopes which allows to pass a handle // from the HandleScope being closed to the next top most HandleScope. bool is_closed_; internal::Object** RawClose(internal::Object** value); friend class ImplementationUtilities; }; // --- Special objects --- /** * The superclass of values and API object templates. */ class V8EXPORT Data { private: Data(); }; /** * Pre-compilation data that can be associated with a script. This * data can be calculated for a script in advance of actually * compiling it, and can be stored between compilations. When script * data is given to the compile method compilation will be faster. */ class V8EXPORT ScriptData { // NOLINT public: virtual ~ScriptData() { } /** * Pre-compiles the specified script (context-independent). * * \param input Pointer to UTF-8 script source code. * \param length Length of UTF-8 script source code. */ static ScriptData* PreCompile(const char* input, int length); /** * Pre-compiles the specified script (context-independent). * * NOTE: Pre-compilation using this method cannot happen on another thread * without using Lockers. * * \param source Script source code. */ static ScriptData* PreCompile(Handle<String> source); /** * Load previous pre-compilation data. * * \param data Pointer to data returned by a call to Data() of a previous * ScriptData. Ownership is not transferred. * \param length Length of data. */ static ScriptData* New(const char* data, int length); /** * Returns the length of Data(). */ virtual int Length() = 0; /** * Returns a serialized representation of this ScriptData that can later be * passed to New(). NOTE: Serialized data is platform-dependent. */ virtual const char* Data() = 0; /** * Returns true if the source code could not be parsed. */ virtual bool HasError() = 0; }; /** * The origin, within a file, of a script. */ class ScriptOrigin { public: inline ScriptOrigin( Handle<Value> resource_name, Handle<Integer> resource_line_offset = Handle<Integer>(), Handle<Integer> resource_column_offset = Handle<Integer>()) : resource_name_(resource_name), resource_line_offset_(resource_line_offset), resource_column_offset_(resource_column_offset) { } inline Handle<Value> ResourceName() const; inline Handle<Integer> ResourceLineOffset() const; inline Handle<Integer> ResourceColumnOffset() const; private: Handle<Value> resource_name_; Handle<Integer> resource_line_offset_; Handle<Integer> resource_column_offset_; }; /** * A compiled JavaScript script. */ class V8EXPORT Script { public: /** * Compiles the specified script (context-independent). * * \param source Script source code. * \param origin Script origin, owned by caller, no references are kept * when New() returns * \param pre_data Pre-parsing data, as obtained by ScriptData::PreCompile() * using pre_data speeds compilation if it's done multiple times. * Owned by caller, no references are kept when New() returns. * \param script_data Arbitrary data associated with script. Using * this has same effect as calling SetData(), but allows data to be * available to compile event handlers. * \return Compiled script object (context independent; when run it * will use the currently entered context). */ static Local<Script> New(Handle<String> source, ScriptOrigin* origin = NULL, ScriptData* pre_data = NULL, Handle<String> script_data = Handle<String>()); /** * Compiles the specified script using the specified file name * object (typically a string) as the script's origin. * * \param source Script source code. * \param file_name file name object (typically a string) to be used * as the script's origin. * \return Compiled script object (context independent; when run it * will use the currently entered context). */ static Local<Script> New(Handle<String> source, Handle<Value> file_name); /** * Compiles the specified script (bound to current context). * * \param source Script source code. * \param origin Script origin, owned by caller, no references are kept * when Compile() returns * \param pre_data Pre-parsing data, as obtained by ScriptData::PreCompile() * using pre_data speeds compilation if it's done multiple times. * Owned by caller, no references are kept when Compile() returns. * \param script_data Arbitrary data associated with script. Using * this has same effect as calling SetData(), but makes data available * earlier (i.e. to compile event handlers). * \return Compiled script object, bound to the context that was active * when this function was called. When run it will always use this * context. */ static Local<Script> Compile(Handle<String> source, ScriptOrigin* origin = NULL, ScriptData* pre_data = NULL, Handle<String> script_data = Handle<String>()); /** * Compiles the specified script using the specified file name * object (typically a string) as the script's origin. * * \param source Script source code. * \param file_name File name to use as script's origin * \param script_data Arbitrary data associated with script. Using * this has same effect as calling SetData(), but makes data available * earlier (i.e. to compile event handlers). * \return Compiled script object, bound to the context that was active * when this function was called. When run it will always use this * context. */ static Local<Script> Compile(Handle<String> source, Handle<Value> file_name, Handle<String> script_data = Handle<String>()); /** * Runs the script returning the resulting value. If the script is * context independent (created using ::New) it will be run in the * currently entered context. If it is context specific (created * using ::Compile) it will be run in the context in which it was * compiled. */ Local<Value> Run(); /** * Returns the script id value. */ Local<Value> Id(); /** * Associate an additional data object with the script. This is mainly used * with the debugger as this data object is only available through the * debugger API. */ void SetData(Handle<String> data); }; /** * An error message. */ class V8EXPORT Message { public: Local<String> Get() const; Local<String> GetSourceLine() const; /** * Returns the resource name for the script from where the function causing * the error originates. */ Handle<Value> GetScriptResourceName() const; /** * Returns the resource data for the script from where the function causing * the error originates. */ Handle<Value> GetScriptData() const; /** * Exception stack trace. By default stack traces are not captured for * uncaught exceptions. SetCaptureStackTraceForUncaughtExceptions allows * to change this option. */ Handle<StackTrace> GetStackTrace() const; /** * Returns the number, 1-based, of the line where the error occurred. */ int GetLineNumber() const; /** * Returns the index within the script of the first character where * the error occurred. */ int GetStartPosition() const; /** * Returns the index within the script of the last character where * the error occurred. */ int GetEndPosition() const; /** * Returns the index within the line of the first character where * the error occurred. */ int GetStartColumn() const; /** * Returns the index within the line of the last character where * the error occurred. */ int GetEndColumn() const; // TODO(1245381): Print to a string instead of on a FILE. static void PrintCurrentStackTrace(FILE* out); static const int kNoLineNumberInfo = 0; static const int kNoColumnInfo = 0; }; /** * Representation of a JavaScript stack trace. The information collected is a * snapshot of the execution stack and the information remains valid after * execution continues. */ class V8EXPORT StackTrace { public: /** * Flags that determine what information is placed captured for each * StackFrame when grabbing the current stack trace. */ enum StackTraceOptions { kLineNumber = 1, kColumnOffset = 1 << 1 | kLineNumber, kScriptName = 1 << 2, kFunctionName = 1 << 3, kIsEval = 1 << 4, kIsConstructor = 1 << 5, kScriptNameOrSourceURL = 1 << 6, kOverview = kLineNumber | kColumnOffset | kScriptName | kFunctionName, kDetailed = kOverview | kIsEval | kIsConstructor | kScriptNameOrSourceURL }; /** * Returns a StackFrame at a particular index. */ Local<StackFrame> GetFrame(uint32_t index) const; /** * Returns the number of StackFrames. */ int GetFrameCount() const; /** * Returns StackTrace as a v8::Array that contains StackFrame objects. */ Local<Array> AsArray(); /** * Grab a snapshot of the current JavaScript execution stack. * * \param frame_limit The maximum number of stack frames we want to capture. * \param options Enumerates the set of things we will capture for each * StackFrame. */ static Local<StackTrace> CurrentStackTrace( int frame_limit, StackTraceOptions options = kOverview); }; /** * A single JavaScript stack frame. */ class V8EXPORT StackFrame { public: /** * Returns the number, 1-based, of the line for the associate function call. * This method will return Message::kNoLineNumberInfo if it is unable to * retrieve the line number, or if kLineNumber was not passed as an option * when capturing the StackTrace. */ int GetLineNumber() const; /** * Returns the 1-based column offset on the line for the associated function * call. * This method will return Message::kNoColumnInfo if it is unable to retrieve * the column number, or if kColumnOffset was not passed as an option when * capturing the StackTrace. */ int GetColumn() const; /** * Returns the name of the resource that contains the script for the * function for this StackFrame. */ Local<String> GetScriptName() const; /** * Returns the name of the resource that contains the script for the * function for this StackFrame or sourceURL value if the script name * is undefined and its source ends with //@ sourceURL=... string. */ Local<String> GetScriptNameOrSourceURL() const; /** * Returns the name of the function associated with this stack frame. */ Local<String> GetFunctionName() const; /** * Returns whether or not the associated function is compiled via a call to * eval(). */ bool IsEval() const; /** * Returns whether or not the associated function is called as a * constructor via "new". */ bool IsConstructor() const; }; // --- Value --- /** * The superclass of all JavaScript values and objects. */ class Value : public Data { public: /** * Returns true if this value is the undefined value. See ECMA-262 * 4.3.10. */ V8EXPORT bool IsUndefined() const; /** * Returns true if this value is the null value. See ECMA-262 * 4.3.11. */ V8EXPORT bool IsNull() const; /** * Returns true if this value is true. */ V8EXPORT bool IsTrue() const; /** * Returns true if this value is false. */ V8EXPORT bool IsFalse() const; /** * Returns true if this value is an instance of the String type. * See ECMA-262 8.4. */ inline bool IsString() const; /** * Returns true if this value is a function. */ V8EXPORT bool IsFunction() const; /** * Returns true if this value is an array. */ V8EXPORT bool IsArray() const; /** * Returns true if this value is an object. */ V8EXPORT bool IsObject() const; /** * Returns true if this value is boolean. */ V8EXPORT bool IsBoolean() const; /** * Returns true if this value is a number. */ V8EXPORT bool IsNumber() const; /** * Returns true if this value is external. */ V8EXPORT bool IsExternal() const; /** * Returns true if this value is a 32-bit signed integer. */ V8EXPORT bool IsInt32() const; /** * Returns true if this value is a 32-bit unsigned integer. */ V8EXPORT bool IsUint32() const; /** * Returns true if this value is a Date. */ V8EXPORT bool IsDate() const; /** * Returns true if this value is a Boolean object. */ V8EXPORT bool IsBooleanObject() const; /** * Returns true if this value is a Number object. */ V8EXPORT bool IsNumberObject() const; /** * Returns true if this value is a String object. */ V8EXPORT bool IsStringObject() const; /** * Returns true if this value is a NativeError. */ V8EXPORT bool IsNativeError() const; /** * Returns true if this value is a RegExp. */ V8EXPORT bool IsRegExp() const; V8EXPORT Local<Boolean> ToBoolean() const; V8EXPORT Local<Number> ToNumber() const; V8EXPORT Local<String> ToString() const; V8EXPORT Local<String> ToDetailString() const; V8EXPORT Local<Object> ToObject() const; V8EXPORT Local<Integer> ToInteger() const; V8EXPORT Local<Uint32> ToUint32() const; V8EXPORT Local<Int32> ToInt32() const; /** * Attempts to convert a string to an array index. * Returns an empty handle if the conversion fails. */ V8EXPORT Local<Uint32> ToArrayIndex() const; V8EXPORT bool BooleanValue() const; V8EXPORT double NumberValue() const; V8EXPORT int64_t IntegerValue() const; V8EXPORT uint32_t Uint32Value() const; V8EXPORT int32_t Int32Value() const; /** JS == */ V8EXPORT bool Equals(Handle<Value> that) const; V8EXPORT bool StrictEquals(Handle<Value> that) const; private: inline bool QuickIsString() const; V8EXPORT bool FullIsString() const; }; /** * The superclass of primitive values. See ECMA-262 4.3.2. */ class Primitive : public Value { }; /** * A primitive boolean value (ECMA-262, 4.3.14). Either the true * or false value. */ class Boolean : public Primitive { public: V8EXPORT bool Value() const; static inline Handle<Boolean> New(bool value); }; /** * A JavaScript string value (ECMA-262, 4.3.17). */ class String : public Primitive { public: /** * Returns the number of characters in this string. */ V8EXPORT int Length() const; /** * Returns the number of bytes in the UTF-8 encoded * representation of this string. */ V8EXPORT int Utf8Length() const; /** * Write the contents of the string to an external buffer. * If no arguments are given, expects the buffer to be large * enough to hold the entire string and NULL terminator. Copies * the contents of the string and the NULL terminator into the * buffer. * * WriteUtf8 will not write partial UTF-8 sequences, preferring to stop * before the end of the buffer. * * Copies up to length characters into the output buffer. * Only null-terminates if there is enough space in the buffer. * * \param buffer The buffer into which the string will be copied. * \param start The starting position within the string at which * copying begins. * \param length The number of characters to copy from the string. For * WriteUtf8 the number of bytes in the buffer. * \param nchars_ref The number of characters written, can be NULL. * \param options Various options that might affect performance of this or * subsequent operations. * \return The number of characters copied to the buffer excluding the null * terminator. For WriteUtf8: The number of bytes copied to the buffer * including the null terminator (if written). */ enum WriteOptions { NO_OPTIONS = 0, HINT_MANY_WRITES_EXPECTED = 1, NO_NULL_TERMINATION = 2 }; // 16-bit character codes. V8EXPORT int Write(uint16_t* buffer, int start = 0, int length = -1, int options = NO_OPTIONS) const; // ASCII characters. V8EXPORT int WriteAscii(char* buffer, int start = 0, int length = -1, int options = NO_OPTIONS) const; // UTF-8 encoded characters. V8EXPORT int WriteUtf8(char* buffer, int length = -1, int* nchars_ref = NULL, int options = NO_OPTIONS) const; /** * A zero length string. */ V8EXPORT static v8::Local<v8::String> Empty(); /** * Returns true if the string is external */ V8EXPORT bool IsExternal() const; /** * Returns true if the string is both external and ASCII */ V8EXPORT bool IsExternalAscii() const; class V8EXPORT ExternalStringResourceBase { // NOLINT public: virtual ~ExternalStringResourceBase() {} protected: ExternalStringResourceBase() {} /** * Internally V8 will call this Dispose method when the external string * resource is no longer needed. The default implementation will use the * delete operator. This method can be overridden in subclasses to * control how allocated external string resources are disposed. */ virtual void Dispose() { delete this; } private: // Disallow copying and assigning. ExternalStringResourceBase(const ExternalStringResourceBase&); void operator=(const ExternalStringResourceBase&); friend class v8::internal::Heap; }; /** * An ExternalStringResource is a wrapper around a two-byte string * buffer that resides outside V8's heap. Implement an * ExternalStringResource to manage the life cycle of the underlying * buffer. Note that the string data must be immutable. */ class V8EXPORT ExternalStringResource : public ExternalStringResourceBase { public: /** * Override the destructor to manage the life cycle of the underlying * buffer. */ virtual ~ExternalStringResource() {} /** * The string data from the underlying buffer. */ virtual const uint16_t* data() const = 0; /** * The length of the string. That is, the number of two-byte characters. */ virtual size_t length() const = 0; protected: ExternalStringResource() {} }; /** * An ExternalAsciiStringResource is a wrapper around an ASCII * string buffer that resides outside V8's heap. Implement an * ExternalAsciiStringResource to manage the life cycle of the * underlying buffer. Note that the string data must be immutable * and that the data must be strict (7-bit) ASCII, not Latin-1 or * UTF-8, which would require special treatment internally in the * engine and, in the case of UTF-8, do not allow efficient indexing. * Use String::New or convert to 16 bit data for non-ASCII. */ class V8EXPORT ExternalAsciiStringResource : public ExternalStringResourceBase { public: /** * Override the destructor to manage the life cycle of the underlying * buffer. */ virtual ~ExternalAsciiStringResource() {} /** The string data from the underlying buffer.*/ virtual const char* data() const = 0; /** The number of ASCII characters in the string.*/ virtual size_t length() const = 0; protected: ExternalAsciiStringResource() {} }; /** * Get the ExternalStringResource for an external string. Returns * NULL if IsExternal() doesn't return true. */ inline ExternalStringResource* GetExternalStringResource() const; /** * Get the ExternalAsciiStringResource for an external ASCII string. * Returns NULL if IsExternalAscii() doesn't return true. */ V8EXPORT ExternalAsciiStringResource* GetExternalAsciiStringResource() const; static inline String* Cast(v8::Value* obj); /** * Allocates a new string from either UTF-8 encoded or ASCII data. * The second parameter 'length' gives the buffer length. * If the data is UTF-8 encoded, the caller must * be careful to supply the length parameter. * If it is not given, the function calls * 'strlen' to determine the buffer length, it might be * wrong if 'data' contains a null character. */ V8EXPORT static Local<String> New(const char* data, int length = -1); /** Allocates a new string from 16-bit character codes.*/ V8EXPORT static Local<String> New(const uint16_t* data, int length = -1); /** Creates a symbol. Returns one if it exists already.*/ V8EXPORT static Local<String> NewSymbol(const char* data, int length = -1); /** * Creates a new string by concatenating the left and the right strings * passed in as parameters. */ V8EXPORT static Local<String> Concat(Handle<String> left, Handle<String>right); /** * Creates a new external string using the data defined in the given * resource. When the external string is no longer live on V8's heap the * resource will be disposed by calling its Dispose method. The caller of * this function should not otherwise delete or modify the resource. Neither * should the underlying buffer be deallocated or modified except through the * destructor of the external string resource. */ V8EXPORT static Local<String> NewExternal(ExternalStringResource* resource); /** * Associate an external string resource with this string by transforming it * in place so that existing references to this string in the JavaScript heap * will use the external string resource. The external string resource's * character contents need to be equivalent to this string. * Returns true if the string has been changed to be an external string. * The string is not modified if the operation fails. See NewExternal for * information on the lifetime of the resource. */ V8EXPORT bool MakeExternal(ExternalStringResource* resource); /** * Creates a new external string using the ASCII data defined in the given * resource. When the external string is no longer live on V8's heap the * resource will be disposed by calling its Dispose method. The caller of * this function should not otherwise delete or modify the resource. Neither * should the underlying buffer be deallocated or modified except through the * destructor of the external string resource. */ V8EXPORT static Local<String> NewExternal( ExternalAsciiStringResource* resource); /** * Associate an external string resource with this string by transforming it * in place so that existing references to this string in the JavaScript heap * will use the external string resource. The external string resource's * character contents need to be equivalent to this string. * Returns true if the string has been changed to be an external string. * The string is not modified if the operation fails. See NewExternal for * information on the lifetime of the resource. */ V8EXPORT bool MakeExternal(ExternalAsciiStringResource* resource); /** * Returns true if this string can be made external. */ V8EXPORT bool CanMakeExternal(); /** Creates an undetectable string from the supplied ASCII or UTF-8 data.*/ V8EXPORT static Local<String> NewUndetectable(const char* data, int length = -1); /** Creates an undetectable string from the supplied 16-bit character codes.*/ V8EXPORT static Local<String> NewUndetectable(const uint16_t* data, int length = -1); /** * Converts an object to a UTF-8-encoded character array. Useful if * you want to print the object. If conversion to a string fails * (e.g. due to an exception in the toString() method of the object) * then the length() method returns 0 and the * operator returns * NULL. */ class V8EXPORT Utf8Value { public: explicit Utf8Value(Handle<v8::Value> obj); ~Utf8Value(); char* operator*() { return str_; } const char* operator*() const { return str_; } int length() const { return length_; } private: char* str_; int length_; // Disallow copying and assigning. Utf8Value(const Utf8Value&); void operator=(const Utf8Value&); }; /** * Converts an object to an ASCII string. * Useful if you want to print the object. * If conversion to a string fails (eg. due to an exception in the toString() * method of the object) then the length() method returns 0 and the * operator * returns NULL. */ class V8EXPORT AsciiValue { public: explicit AsciiValue(Handle<v8::Value> obj); ~AsciiValue(); char* operator*() { return str_; } const char* operator*() const { return str_; } int length() const { return length_; } private: char* str_; int length_; // Disallow copying and assigning. AsciiValue(const AsciiValue&); void operator=(const AsciiValue&); }; /** * Converts an object to a two-byte string. * If conversion to a string fails (eg. due to an exception in the toString() * method of the object) then the length() method returns 0 and the * operator * returns NULL. */ class V8EXPORT Value { public: explicit Value(Handle<v8::Value> obj); ~Value(); uint16_t* operator*() { return str_; } const uint16_t* operator*() const { return str_; } int length() const { return length_; } private: uint16_t* str_; int length_; // Disallow copying and assigning. Value(const Value&); void operator=(const Value&); }; private: V8EXPORT void VerifyExternalStringResource(ExternalStringResource* val) const; V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A JavaScript number value (ECMA-262, 4.3.20) */ class Number : public Primitive { public: V8EXPORT double Value() const; V8EXPORT static Local<Number> New(double value); static inline Number* Cast(v8::Value* obj); private: V8EXPORT Number(); V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A JavaScript value representing a signed integer. */ class Integer : public Number { public: V8EXPORT static Local<Integer> New(int32_t value); V8EXPORT static Local<Integer> NewFromUnsigned(uint32_t value); V8EXPORT int64_t Value() const; static inline Integer* Cast(v8::Value* obj); private: V8EXPORT Integer(); V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A JavaScript value representing a 32-bit signed integer. */ class Int32 : public Integer { public: V8EXPORT int32_t Value() const; private: V8EXPORT Int32(); }; /** * A JavaScript value representing a 32-bit unsigned integer. */ class Uint32 : public Integer { public: V8EXPORT uint32_t Value() const; private: V8EXPORT Uint32(); }; enum PropertyAttribute { None = 0, ReadOnly = 1 << 0, DontEnum = 1 << 1, DontDelete = 1 << 2 }; enum ExternalArrayType { kExternalByteArray = 1, kExternalUnsignedByteArray, kExternalShortArray, kExternalUnsignedShortArray, kExternalIntArray, kExternalUnsignedIntArray, kExternalFloatArray, kExternalDoubleArray, kExternalPixelArray }; /** * Accessor[Getter|Setter] are used as callback functions when * setting|getting a particular property. See Object and ObjectTemplate's * method SetAccessor. */ typedef Handle<Value> (*AccessorGetter)(Local<String> property, const AccessorInfo& info); typedef void (*AccessorSetter)(Local<String> property, Local<Value> value, const AccessorInfo& info); /** * Access control specifications. * * Some accessors should be accessible across contexts. These * accessors have an explicit access control parameter which specifies * the kind of cross-context access that should be allowed. * * Additionally, for security, accessors can prohibit overwriting by * accessors defined in JavaScript. For objects that have such * accessors either locally or in their prototype chain it is not * possible to overwrite the accessor by using __defineGetter__ or * __defineSetter__ from JavaScript code. */ enum AccessControl { DEFAULT = 0, ALL_CAN_READ = 1, ALL_CAN_WRITE = 1 << 1, PROHIBITS_OVERWRITING = 1 << 2 }; /** * A JavaScript object (ECMA-262, 4.3.3) */ class Object : public Value { public: V8EXPORT bool Set(Handle<Value> key, Handle<Value> value, PropertyAttribute attribs = None); V8EXPORT bool Set(uint32_t index, Handle<Value> value); // Sets a local property on this object bypassing interceptors and // overriding accessors or read-only properties. // // Note that if the object has an interceptor the property will be set // locally, but since the interceptor takes precedence the local property // will only be returned if the interceptor doesn't return a value. // // Note also that this only works for named properties. V8EXPORT bool ForceSet(Handle<Value> key, Handle<Value> value, PropertyAttribute attribs = None); V8EXPORT Local<Value> Get(Handle<Value> key); V8EXPORT Local<Value> Get(uint32_t index); /** * Gets the property attributes of a property which can be None or * any combination of ReadOnly, DontEnum and DontDelete. Returns * None when the property doesn't exist. */ V8EXPORT PropertyAttribute GetPropertyAttributes(Handle<Value> key); // TODO(1245389): Replace the type-specific versions of these // functions with generic ones that accept a Handle<Value> key. V8EXPORT bool Has(Handle<String> key); V8EXPORT bool Delete(Handle<String> key); // Delete a property on this object bypassing interceptors and // ignoring dont-delete attributes. V8EXPORT bool ForceDelete(Handle<Value> key); V8EXPORT bool Has(uint32_t index); V8EXPORT bool Delete(uint32_t index); V8EXPORT bool SetAccessor(Handle<String> name, AccessorGetter getter, AccessorSetter setter = 0, Handle<Value> data = Handle<Value>(), AccessControl settings = DEFAULT, PropertyAttribute attribute = None); /** * Returns an array containing the names of the enumerable properties * of this object, including properties from prototype objects. The * array returned by this method contains the same values as would * be enumerated by a for-in statement over this object. */ V8EXPORT Local<Array> GetPropertyNames(); /** * This function has the same functionality as GetPropertyNames but * the returned array doesn't contain the names of properties from * prototype objects. */ V8EXPORT Local<Array> GetOwnPropertyNames(); /** * Get the prototype object. This does not skip objects marked to * be skipped by __proto__ and it does not consult the security * handler. */ V8EXPORT Local<Value> GetPrototype(); /** * Set the prototype object. This does not skip objects marked to * be skipped by __proto__ and it does not consult the security * handler. */ V8EXPORT bool SetPrototype(Handle<Value> prototype); /** * Finds an instance of the given function template in the prototype * chain. */ V8EXPORT Local<Object> FindInstanceInPrototypeChain( Handle<FunctionTemplate> tmpl); /** * Call builtin Object.prototype.toString on this object. * This is different from Value::ToString() that may call * user-defined toString function. This one does not. */ V8EXPORT Local<String> ObjectProtoToString(); /** * Returns the name of the function invoked as a constructor for this object. */ V8EXPORT Local<String> GetConstructorName(); /** Gets the number of internal fields for this Object. */ V8EXPORT int InternalFieldCount(); /** Gets the value in an internal field. */ inline Local<Value> GetInternalField(int index); /** Sets the value in an internal field. */ V8EXPORT void SetInternalField(int index, Handle<Value> value); /** Gets a native pointer from an internal field. */ inline void* GetPointerFromInternalField(int index); /** Sets a native pointer in an internal field. */ V8EXPORT void SetPointerInInternalField(int index, void* value); // Testers for local properties. V8EXPORT bool HasOwnProperty(Handle<String> key); V8EXPORT bool HasRealNamedProperty(Handle<String> key); V8EXPORT bool HasRealIndexedProperty(uint32_t index); V8EXPORT bool HasRealNamedCallbackProperty(Handle<String> key); /** * If result.IsEmpty() no real property was located in the prototype chain. * This means interceptors in the prototype chain are not called. */ V8EXPORT Local<Value> GetRealNamedPropertyInPrototypeChain( Handle<String> key); /** * If result.IsEmpty() no real property was located on the object or * in the prototype chain. * This means interceptors in the prototype chain are not called. */ V8EXPORT Local<Value> GetRealNamedProperty(Handle<String> key); /** Tests for a named lookup interceptor.*/ V8EXPORT bool HasNamedLookupInterceptor(); /** Tests for an index lookup interceptor.*/ V8EXPORT bool HasIndexedLookupInterceptor(); /** * Turns on access check on the object if the object is an instance of * a template that has access check callbacks. If an object has no * access check info, the object cannot be accessed by anyone. */ V8EXPORT void TurnOnAccessCheck(); /** * Returns the identity hash for this object. The current implementation * uses a hidden property on the object to store the identity hash. * * The return value will never be 0. Also, it is not guaranteed to be * unique. */ V8EXPORT int GetIdentityHash(); /** * Access hidden properties on JavaScript objects. These properties are * hidden from the executing JavaScript and only accessible through the V8 * C++ API. Hidden properties introduced by V8 internally (for example the * identity hash) are prefixed with "v8::". */ V8EXPORT bool SetHiddenValue(Handle<String> key, Handle<Value> value); V8EXPORT Local<Value> GetHiddenValue(Handle<String> key); V8EXPORT bool DeleteHiddenValue(Handle<String> key); /** * Returns true if this is an instance of an api function (one * created from a function created from a function template) and has * been modified since it was created. Note that this method is * conservative and may return true for objects that haven't actually * been modified. */ V8EXPORT bool IsDirty(); /** * Clone this object with a fast but shallow copy. Values will point * to the same values as the original object. */ V8EXPORT Local<Object> Clone(); /** * Returns the context in which the object was created. */ V8EXPORT Local<Context> CreationContext(); /** * Set the backing store of the indexed properties to be managed by the * embedding layer. Access to the indexed properties will follow the rules * spelled out in CanvasPixelArray. * Note: The embedding program still owns the data and needs to ensure that * the backing store is preserved while V8 has a reference. */ V8EXPORT void SetIndexedPropertiesToPixelData(uint8_t* data, int length); V8EXPORT bool HasIndexedPropertiesInPixelData(); V8EXPORT uint8_t* GetIndexedPropertiesPixelData(); V8EXPORT int GetIndexedPropertiesPixelDataLength(); /** * Set the backing store of the indexed properties to be managed by the * embedding layer. Access to the indexed properties will follow the rules * spelled out for the CanvasArray subtypes in the WebGL specification. * Note: The embedding program still owns the data and needs to ensure that * the backing store is preserved while V8 has a reference. */ V8EXPORT void SetIndexedPropertiesToExternalArrayData( void* data, ExternalArrayType array_type, int number_of_elements); V8EXPORT bool HasIndexedPropertiesInExternalArrayData(); V8EXPORT void* GetIndexedPropertiesExternalArrayData(); V8EXPORT ExternalArrayType GetIndexedPropertiesExternalArrayDataType(); V8EXPORT int GetIndexedPropertiesExternalArrayDataLength(); /** * Checks whether a callback is set by the * ObjectTemplate::SetCallAsFunctionHandler method. * When an Object is callable this method returns true. */ V8EXPORT bool IsCallable(); /** * Call an Object as a function if a callback is set by the * ObjectTemplate::SetCallAsFunctionHandler method. */ V8EXPORT Local<Value> CallAsFunction(Handle<Object> recv, int argc, Handle<Value> argv[]); /** * Call an Object as a constructor if a callback is set by the * ObjectTemplate::SetCallAsFunctionHandler method. * Note: This method behaves like the Function::NewInstance method. */ V8EXPORT Local<Value> CallAsConstructor(int argc, Handle<Value> argv[]); V8EXPORT static Local<Object> New(); static inline Object* Cast(Value* obj); private: V8EXPORT Object(); V8EXPORT static void CheckCast(Value* obj); V8EXPORT Local<Value> CheckedGetInternalField(int index); V8EXPORT void* SlowGetPointerFromInternalField(int index); /** * If quick access to the internal field is possible this method * returns the value. Otherwise an empty handle is returned. */ inline Local<Value> UncheckedGetInternalField(int index); }; /** * An instance of the built-in array constructor (ECMA-262, 15.4.2). */ class Array : public Object { public: V8EXPORT uint32_t Length() const; /** * Clones an element at index |index|. Returns an empty * handle if cloning fails (for any reason). */ V8EXPORT Local<Object> CloneElementAt(uint32_t index); /** * Creates a JavaScript array with the given length. If the length * is negative the returned array will have length 0. */ V8EXPORT static Local<Array> New(int length = 0); static inline Array* Cast(Value* obj); private: V8EXPORT Array(); V8EXPORT static void CheckCast(Value* obj); }; /** * A JavaScript function object (ECMA-262, 15.3). */ class Function : public Object { public: V8EXPORT Local<Object> NewInstance() const; V8EXPORT Local<Object> NewInstance(int argc, Handle<Value> argv[]) const; V8EXPORT Local<Value> Call(Handle<Object> recv, int argc, Handle<Value> argv[]); V8EXPORT void SetName(Handle<String> name); V8EXPORT Handle<Value> GetName() const; /** * Returns zero based line number of function body and * kLineOffsetNotFound if no information available. */ V8EXPORT int GetScriptLineNumber() const; V8EXPORT ScriptOrigin GetScriptOrigin() const; static inline Function* Cast(Value* obj); V8EXPORT static const int kLineOffsetNotFound; private: V8EXPORT Function(); V8EXPORT static void CheckCast(Value* obj); }; /** * An instance of the built-in Date constructor (ECMA-262, 15.9). */ class Date : public Object { public: V8EXPORT static Local<Value> New(double time); /** * A specialization of Value::NumberValue that is more efficient * because we know the structure of this object. */ V8EXPORT double NumberValue() const; static inline Date* Cast(v8::Value* obj); /** * Notification that the embedder has changed the time zone, * daylight savings time, or other date / time configuration * parameters. V8 keeps a cache of various values used for * date / time computation. This notification will reset * those cached values for the current context so that date / * time configuration changes would be reflected in the Date * object. * * This API should not be called more than needed as it will * negatively impact the performance of date operations. */ V8EXPORT static void DateTimeConfigurationChangeNotification(); private: V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A Number object (ECMA-262, 4.3.21). */ class NumberObject : public Object { public: V8EXPORT static Local<Value> New(double value); /** * Returns the Number held by the object. */ V8EXPORT double NumberValue() const; static inline NumberObject* Cast(v8::Value* obj); private: V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A Boolean object (ECMA-262, 4.3.15). */ class BooleanObject : public Object { public: V8EXPORT static Local<Value> New(bool value); /** * Returns the Boolean held by the object. */ V8EXPORT bool BooleanValue() const; static inline BooleanObject* Cast(v8::Value* obj); private: V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A String object (ECMA-262, 4.3.18). */ class StringObject : public Object { public: V8EXPORT static Local<Value> New(Handle<String> value); /** * Returns the String held by the object. */ V8EXPORT Local<String> StringValue() const; static inline StringObject* Cast(v8::Value* obj); private: V8EXPORT static void CheckCast(v8::Value* obj); }; /** * An instance of the built-in RegExp constructor (ECMA-262, 15.10). */ class RegExp : public Object { public: /** * Regular expression flag bits. They can be or'ed to enable a set * of flags. */ enum Flags { kNone = 0, kGlobal = 1, kIgnoreCase = 2, kMultiline = 4 }; /** * Creates a regular expression from the given pattern string and * the flags bit field. May throw a JavaScript exception as * described in ECMA-262, 15.10.4.1. * * For example, * RegExp::New(v8::String::New("foo"), * static_cast<RegExp::Flags>(kGlobal | kMultiline)) * is equivalent to evaluating "/foo/gm". */ V8EXPORT static Local<RegExp> New(Handle<String> pattern, Flags flags); /** * Returns the value of the source property: a string representing * the regular expression. */ V8EXPORT Local<String> GetSource() const; /** * Returns the flags bit field. */ V8EXPORT Flags GetFlags() const; static inline RegExp* Cast(v8::Value* obj); private: V8EXPORT static void CheckCast(v8::Value* obj); }; /** * A JavaScript value that wraps a C++ void*. This type of value is * mainly used to associate C++ data structures with JavaScript * objects. * * The Wrap function V8 will return the most optimal Value object wrapping the * C++ void*. The type of the value is not guaranteed to be an External object * and no assumptions about its type should be made. To access the wrapped * value Unwrap should be used, all other operations on that object will lead * to unpredictable results. */ class External : public Value { public: V8EXPORT static Local<Value> Wrap(void* data); static inline void* Unwrap(Handle<Value> obj); V8EXPORT static Local<External> New(void* value); static inline External* Cast(Value* obj); V8EXPORT void* Value() const; private: V8EXPORT External(); V8EXPORT static void CheckCast(v8::Value* obj); static inline void* QuickUnwrap(Handle<v8::Value> obj); V8EXPORT static void* FullUnwrap(Handle<v8::Value> obj); }; // --- Templates --- /** * The superclass of object and function templates. */ class V8EXPORT Template : public Data { public: /** Adds a property to each instance created by this template.*/ void Set(Handle<String> name, Handle<Data> value, PropertyAttribute attributes = None); inline void Set(const char* name, Handle<Data> value); private: Template(); friend class ObjectTemplate; friend class FunctionTemplate; }; /** * The argument information given to function call callbacks. This * class provides access to information about the context of the call, * including the receiver, the number and values of arguments, and * the holder of the function. */ class Arguments { public: inline int Length() const; inline Local<Value> operator[](int i) const; inline Local<Function> Callee() const; inline Local<Object> This() const; inline Local<Object> Holder() const; inline bool IsConstructCall() const; inline Local<Value> Data() const; private: static const int kDataIndex = 0; static const int kCalleeIndex = -1; static const int kHolderIndex = -2; friend class ImplementationUtilities; inline Arguments(internal::Object** implicit_args, internal::Object** values, int length, bool is_construct_call); internal::Object** implicit_args_; internal::Object** values_; int length_; bool is_construct_call_; }; /** * The information passed to an accessor callback about the context * of the property access. */ class V8EXPORT AccessorInfo { public: inline AccessorInfo(internal::Object** args) : args_(args) { } inline Local<Value> Data() const; inline Local<Object> This() const; inline Local<Object> Holder() const; private: internal::Object** args_; }; typedef Handle<Value> (*InvocationCallback)(const Arguments& args); /** * NamedProperty[Getter|Setter] are used as interceptors on object. * See ObjectTemplate::SetNamedPropertyHandler. */ typedef Handle<Value> (*NamedPropertyGetter)(Local<String> property, const AccessorInfo& info); /** * Returns the value if the setter intercepts the request. * Otherwise, returns an empty handle. */ typedef Handle<Value> (*NamedPropertySetter)(Local<String> property, Local<Value> value, const AccessorInfo& info); /** * Returns a non-empty handle if the interceptor intercepts the request. * The result is an integer encoding property attributes (like v8::None, * v8::DontEnum, etc.) */ typedef Handle<Integer> (*NamedPropertyQuery)(Local<String> property, const AccessorInfo& info); /** * Returns a non-empty handle if the deleter intercepts the request. * The return value is true if the property could be deleted and false * otherwise. */ typedef Handle<Boolean> (*NamedPropertyDeleter)(Local<String> property, const AccessorInfo& info); /** * Returns an array containing the names of the properties the named * property getter intercepts. */ typedef Handle<Array> (*NamedPropertyEnumerator)(const AccessorInfo& info); /** * Returns the value of the property if the getter intercepts the * request. Otherwise, returns an empty handle. */ typedef Handle<Value> (*IndexedPropertyGetter)(uint32_t index, const AccessorInfo& info); /** * Returns the value if the setter intercepts the request. * Otherwise, returns an empty handle. */ typedef Handle<Value> (*IndexedPropertySetter)(uint32_t index, Local<Value> value, const AccessorInfo& info); /** * Returns a non-empty handle if the interceptor intercepts the request. * The result is an integer encoding property attributes. */ typedef Handle<Integer> (*IndexedPropertyQuery)(uint32_t index, const AccessorInfo& info); /** * Returns a non-empty handle if the deleter intercepts the request. * The return value is true if the property could be deleted and false * otherwise. */ typedef Handle<Boolean> (*IndexedPropertyDeleter)(uint32_t index, const AccessorInfo& info); /** * Returns an array containing the indices of the properties the * indexed property getter intercepts. */ typedef Handle<Array> (*IndexedPropertyEnumerator)(const AccessorInfo& info); /** * Access type specification. */ enum AccessType { ACCESS_GET, ACCESS_SET, ACCESS_HAS, ACCESS_DELETE, ACCESS_KEYS }; /** * Returns true if cross-context access should be allowed to the named * property with the given key on the host object. */ typedef bool (*NamedSecurityCallback)(Local<Object> host, Local<Value> key, AccessType type, Local<Value> data); /** * Returns true if cross-context access should be allowed to the indexed * property with the given index on the host object. */ typedef bool (*IndexedSecurityCallback)(Local<Object> host, uint32_t index, AccessType type, Local<Value> data); /** * A FunctionTemplate is used to create functions at runtime. There * can only be one function created from a FunctionTemplate in a * context. The lifetime of the created function is equal to the * lifetime of the context. So in case the embedder needs to create * temporary functions that can be collected using Scripts is * preferred. * * A FunctionTemplate can have properties, these properties are added to the * function object when it is created. * * A FunctionTemplate has a corresponding instance template which is * used to create object instances when the function is used as a * constructor. Properties added to the instance template are added to * each object instance. * * A FunctionTemplate can have a prototype template. The prototype template * is used to create the prototype object of the function. * * The following example shows how to use a FunctionTemplate: * * \code * v8::Local<v8::FunctionTemplate> t = v8::FunctionTemplate::New(); * t->Set("func_property", v8::Number::New(1)); * * v8::Local<v8::Template> proto_t = t->PrototypeTemplate(); * proto_t->Set("proto_method", v8::FunctionTemplate::New(InvokeCallback)); * proto_t->Set("proto_const", v8::Number::New(2)); * * v8::Local<v8::ObjectTemplate> instance_t = t->InstanceTemplate(); * instance_t->SetAccessor("instance_accessor", InstanceAccessorCallback); * instance_t->SetNamedPropertyHandler(PropertyHandlerCallback, ...); * instance_t->Set("instance_property", Number::New(3)); * * v8::Local<v8::Function> function = t->GetFunction(); * v8::Local<v8::Object> instance = function->NewInstance(); * \endcode * * Let's use "function" as the JS variable name of the function object * and "instance" for the instance object created above. The function * and the instance will have the following properties: * * \code * func_property in function == true; * function.func_property == 1; * * function.prototype.proto_method() invokes 'InvokeCallback' * function.prototype.proto_const == 2; * * instance instanceof function == true; * instance.instance_accessor calls 'InstanceAccessorCallback' * instance.instance_property == 3; * \endcode * * A FunctionTemplate can inherit from another one by calling the * FunctionTemplate::Inherit method. The following graph illustrates * the semantics of inheritance: * * \code * FunctionTemplate Parent -> Parent() . prototype -> { } * ^ ^ * | Inherit(Parent) | .__proto__ * | | * FunctionTemplate Child -> Child() . prototype -> { } * \endcode * * A FunctionTemplate 'Child' inherits from 'Parent', the prototype * object of the Child() function has __proto__ pointing to the * Parent() function's prototype object. An instance of the Child * function has all properties on Parent's instance templates. * * Let Parent be the FunctionTemplate initialized in the previous * section and create a Child FunctionTemplate by: * * \code * Local<FunctionTemplate> parent = t; * Local<FunctionTemplate> child = FunctionTemplate::New(); * child->Inherit(parent); * * Local<Function> child_function = child->GetFunction(); * Local<Object> child_instance = child_function->NewInstance(); * \endcode * * The Child function and Child instance will have the following * properties: * * \code * child_func.prototype.__proto__ == function.prototype; * child_instance.instance_accessor calls 'InstanceAccessorCallback' * child_instance.instance_property == 3; * \endcode */ class V8EXPORT FunctionTemplate : public Template { public: /** Creates a function template.*/ static Local<FunctionTemplate> New( InvocationCallback callback = 0, Handle<Value> data = Handle<Value>(), Handle<Signature> signature = Handle<Signature>()); /** Returns the unique function instance in the current execution context.*/ Local<Function> GetFunction(); /** * Set the call-handler callback for a FunctionTemplate. This * callback is called whenever the function created from this * FunctionTemplate is called. */ void SetCallHandler(InvocationCallback callback, Handle<Value> data = Handle<Value>()); /** Get the InstanceTemplate. */ Local<ObjectTemplate> InstanceTemplate(); /** Causes the function template to inherit from a parent function template.*/ void Inherit(Handle<FunctionTemplate> parent); /** * A PrototypeTemplate is the template used to create the prototype object * of the function created by this template. */ Local<ObjectTemplate> PrototypeTemplate(); /** * Set the class name of the FunctionTemplate. This is used for * printing objects created with the function created from the * FunctionTemplate as its constructor. */ void SetClassName(Handle<String> name); /** * Determines whether the __proto__ accessor ignores instances of * the function template. If instances of the function template are * ignored, __proto__ skips all instances and instead returns the * next object in the prototype chain. * * Call with a value of true to make the __proto__ accessor ignore * instances of the function template. Call with a value of false * to make the __proto__ accessor not ignore instances of the * function template. By default, instances of a function template * are not ignored. */ void SetHiddenPrototype(bool value); /** * Sets the ReadOnly flag in the attributes of the 'prototype' property * of functions created from this FunctionTemplate to true. */ void ReadOnlyPrototype(); /** * Returns true if the given object is an instance of this function * template. */ bool HasInstance(Handle<Value> object); private: FunctionTemplate(); void AddInstancePropertyAccessor(Handle<String> name, AccessorGetter getter, AccessorSetter setter, Handle<Value> data, AccessControl settings, PropertyAttribute attributes); void SetNamedInstancePropertyHandler(NamedPropertyGetter getter, NamedPropertySetter setter, NamedPropertyQuery query, NamedPropertyDeleter remover, NamedPropertyEnumerator enumerator, Handle<Value> data); void SetIndexedInstancePropertyHandler(IndexedPropertyGetter getter, IndexedPropertySetter setter, IndexedPropertyQuery query, IndexedPropertyDeleter remover, IndexedPropertyEnumerator enumerator, Handle<Value> data); void SetInstanceCallAsFunctionHandler(InvocationCallback callback, Handle<Value> data); friend class Context; friend class ObjectTemplate; }; /** * An ObjectTemplate is used to create objects at runtime. * * Properties added to an ObjectTemplate are added to each object * created from the ObjectTemplate. */ class V8EXPORT ObjectTemplate : public Template { public: /** Creates an ObjectTemplate. */ static Local<ObjectTemplate> New(); /** Creates a new instance of this template.*/ Local<Object> NewInstance(); /** * Sets an accessor on the object template. * * Whenever the property with the given name is accessed on objects * created from this ObjectTemplate the getter and setter callbacks * are called instead of getting and setting the property directly * on the JavaScript object. * * \param name The name of the property for which an accessor is added. * \param getter The callback to invoke when getting the property. * \param setter The callback to invoke when setting the property. * \param data A piece of data that will be passed to the getter and setter * callbacks whenever they are invoked. * \param settings Access control settings for the accessor. This is a bit * field consisting of one of more of * DEFAULT = 0, ALL_CAN_READ = 1, or ALL_CAN_WRITE = 2. * The default is to not allow cross-context access. * ALL_CAN_READ means that all cross-context reads are allowed. * ALL_CAN_WRITE means that all cross-context writes are allowed. * The combination ALL_CAN_READ | ALL_CAN_WRITE can be used to allow all * cross-context access. * \param attribute The attributes of the property for which an accessor * is added. */ void SetAccessor(Handle<String> name, AccessorGetter getter, AccessorSetter setter = 0, Handle<Value> data = Handle<Value>(), AccessControl settings = DEFAULT, PropertyAttribute attribute = None); /** * Sets a named property handler on the object template. * * Whenever a named property is accessed on objects created from * this object template, the provided callback is invoked instead of * accessing the property directly on the JavaScript object. * * \param getter The callback to invoke when getting a property. * \param setter The callback to invoke when setting a property. * \param query The callback to invoke to check if a property is present, * and if present, get its attributes. * \param deleter The callback to invoke when deleting a property. * \param enumerator The callback to invoke to enumerate all the named * properties of an object. * \param data A piece of data that will be passed to the callbacks * whenever they are invoked. */ void SetNamedPropertyHandler(NamedPropertyGetter getter, NamedPropertySetter setter = 0, NamedPropertyQuery query = 0, NamedPropertyDeleter deleter = 0, NamedPropertyEnumerator enumerator = 0, Handle<Value> data = Handle<Value>()); /** * Sets an indexed property handler on the object template. * * Whenever an indexed property is accessed on objects created from * this object template, the provided callback is invoked instead of * accessing the property directly on the JavaScript object. * * \param getter The callback to invoke when getting a property. * \param setter The callback to invoke when setting a property. * \param query The callback to invoke to check if an object has a property. * \param deleter The callback to invoke when deleting a property. * \param enumerator The callback to invoke to enumerate all the indexed * properties of an object. * \param data A piece of data that will be passed to the callbacks * whenever they are invoked. */ void SetIndexedPropertyHandler(IndexedPropertyGetter getter, IndexedPropertySetter setter = 0, IndexedPropertyQuery query = 0, IndexedPropertyDeleter deleter = 0, IndexedPropertyEnumerator enumerator = 0, Handle<Value> data = Handle<Value>()); /** * Sets the callback to be used when calling instances created from * this template as a function. If no callback is set, instances * behave like normal JavaScript objects that cannot be called as a * function. */ void SetCallAsFunctionHandler(InvocationCallback callback, Handle<Value> data = Handle<Value>()); /** * Mark object instances of the template as undetectable. * * In many ways, undetectable objects behave as though they are not * there. They behave like 'undefined' in conditionals and when * printed. However, properties can be accessed and called as on * normal objects. */ void MarkAsUndetectable(); /** * Sets access check callbacks on the object template. * * When accessing properties on instances of this object template, * the access check callback will be called to determine whether or * not to allow cross-context access to the properties. * The last parameter specifies whether access checks are turned * on by default on instances. If access checks are off by default, * they can be turned on on individual instances by calling * Object::TurnOnAccessCheck(). */ void SetAccessCheckCallbacks(NamedSecurityCallback named_handler, IndexedSecurityCallback indexed_handler, Handle<Value> data = Handle<Value>(), bool turned_on_by_default = true); /** * Gets the number of internal fields for objects generated from * this template. */ int InternalFieldCount(); /** * Sets the number of internal fields for objects generated from * this template. */ void SetInternalFieldCount(int value); private: ObjectTemplate(); static Local<ObjectTemplate> New(Handle<FunctionTemplate> constructor); friend class FunctionTemplate; }; /** * A Signature specifies which receivers and arguments a function can * legally be called with. */ class V8EXPORT Signature : public Data { public: static Local<Signature> New(Handle<FunctionTemplate> receiver = Handle<FunctionTemplate>(), int argc = 0, Handle<FunctionTemplate> argv[] = 0); private: Signature(); }; /** * A utility for determining the type of objects based on the template * they were constructed from. */ class V8EXPORT TypeSwitch : public Data { public: static Local<TypeSwitch> New(Handle<FunctionTemplate> type); static Local<TypeSwitch> New(int argc, Handle<FunctionTemplate> types[]); int match(Handle<Value> value); private: TypeSwitch(); }; // --- Extensions --- /** * Ignore */ class V8EXPORT Extension { // NOLINT public: Extension(const char* name, const char* source = 0, int dep_count = 0, const char** deps = 0); virtual ~Extension() { } virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(v8::Handle<v8::String> name) { return v8::Handle<v8::FunctionTemplate>(); } const char* name() { return name_; } const char* source() { return source_; } int dependency_count() { return dep_count_; } const char** dependencies() { return deps_; } void set_auto_enable(bool value) { auto_enable_ = value; } bool auto_enable() { return auto_enable_; } private: const char* name_; const char* source_; int dep_count_; const char** deps_; bool auto_enable_; // Disallow copying and assigning. Extension(const Extension&); void operator=(const Extension&); }; void V8EXPORT RegisterExtension(Extension* extension); /** * Ignore */ class V8EXPORT DeclareExtension { public: inline DeclareExtension(Extension* extension) { RegisterExtension(extension); } }; // --- Statics --- Handle<Primitive> V8EXPORT Undefined(); Handle<Primitive> V8EXPORT Null(); Handle<Boolean> V8EXPORT True(); Handle<Boolean> V8EXPORT False(); /** * A set of constraints that specifies the limits of the runtime's memory use. * You must set the heap size before initializing the VM - the size cannot be * adjusted after the VM is initialized. * * If you are using threads then you should hold the V8::Locker lock while * setting the stack limit and you must set a non-default stack limit separately * for each thread. */ class V8EXPORT ResourceConstraints { public: ResourceConstraints(); int max_young_space_size() const { return max_young_space_size_; } void set_max_young_space_size(int value) { max_young_space_size_ = value; } int max_old_space_size() const { return max_old_space_size_; } void set_max_old_space_size(int value) { max_old_space_size_ = value; } int max_executable_size() { return max_executable_size_; } void set_max_executable_size(int value) { max_executable_size_ = value; } uint32_t* stack_limit() const { return stack_limit_; } // Sets an address beyond which the VM's stack may not grow. void set_stack_limit(uint32_t* value) { stack_limit_ = value; } private: int max_young_space_size_; int max_old_space_size_; int max_executable_size_; uint32_t* stack_limit_; }; bool V8EXPORT SetResourceConstraints(ResourceConstraints* constraints); // --- Exceptions --- typedef void (*FatalErrorCallback)(const char* location, const char* message); typedef void (*MessageCallback)(Handle<Message> message, Handle<Value> data); /** * Schedules an exception to be thrown when returning to JavaScript. When an * exception has been scheduled it is illegal to invoke any JavaScript * operation; the caller must return immediately and only after the exception * has been handled does it become legal to invoke JavaScript operations. */ Handle<Value> V8EXPORT ThrowException(Handle<Value> exception); /** * Create new error objects by calling the corresponding error object * constructor with the message. */ class V8EXPORT Exception { public: static Local<Value> RangeError(Handle<String> message); static Local<Value> ReferenceError(Handle<String> message); static Local<Value> SyntaxError(Handle<String> message); static Local<Value> TypeError(Handle<String> message); static Local<Value> Error(Handle<String> message); }; // --- Counters Callbacks --- typedef int* (*CounterLookupCallback)(const char* name); typedef void* (*CreateHistogramCallback)(const char* name, int min, int max, size_t buckets); typedef void (*AddHistogramSampleCallback)(void* histogram, int sample); // --- Memory Allocation Callback --- enum ObjectSpace { kObjectSpaceNewSpace = 1 << 0, kObjectSpaceOldPointerSpace = 1 << 1, kObjectSpaceOldDataSpace = 1 << 2, kObjectSpaceCodeSpace = 1 << 3, kObjectSpaceMapSpace = 1 << 4, kObjectSpaceLoSpace = 1 << 5, kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldPointerSpace | kObjectSpaceOldDataSpace | kObjectSpaceCodeSpace | kObjectSpaceMapSpace | kObjectSpaceLoSpace }; enum AllocationAction { kAllocationActionAllocate = 1 << 0, kAllocationActionFree = 1 << 1, kAllocationActionAll = kAllocationActionAllocate | kAllocationActionFree }; typedef void (*MemoryAllocationCallback)(ObjectSpace space, AllocationAction action, int size); // --- Failed Access Check Callback --- typedef void (*FailedAccessCheckCallback)(Local<Object> target, AccessType type, Local<Value> data); // --- AllowCodeGenerationFromStrings callbacks --- /** * Callback to check if code generation from strings is allowed. See * Context::AllowCodeGenerationFromStrings. */ typedef bool (*AllowCodeGenerationFromStringsCallback)(Local<Context> context); // --- Garbage Collection Callbacks --- /** * Applications can register callback functions which will be called * before and after a garbage collection. Allocations are not * allowed in the callback functions, you therefore cannot manipulate * objects (set or delete properties for example) since it is possible * such operations will result in the allocation of objects. */ enum GCType { kGCTypeScavenge = 1 << 0, kGCTypeMarkSweepCompact = 1 << 1, kGCTypeAll = kGCTypeScavenge | kGCTypeMarkSweepCompact }; enum GCCallbackFlags { kNoGCCallbackFlags = 0, kGCCallbackFlagCompacted = 1 << 0 }; typedef void (*GCPrologueCallback)(GCType type, GCCallbackFlags flags); typedef void (*GCEpilogueCallback)(GCType type, GCCallbackFlags flags); typedef void (*GCCallback)(); /** * Collection of V8 heap information. * * Instances of this class can be passed to v8::V8::HeapStatistics to * get heap statistics from V8. */ class V8EXPORT HeapStatistics { public: HeapStatistics(); size_t total_heap_size() { return total_heap_size_; } size_t total_heap_size_executable() { return total_heap_size_executable_; } size_t used_heap_size() { return used_heap_size_; } size_t heap_size_limit() { return heap_size_limit_; } private: void set_total_heap_size(size_t size) { total_heap_size_ = size; } void set_total_heap_size_executable(size_t size) { total_heap_size_executable_ = size; } void set_used_heap_size(size_t size) { used_heap_size_ = size; } void set_heap_size_limit(size_t size) { heap_size_limit_ = size; } size_t total_heap_size_; size_t total_heap_size_executable_; size_t used_heap_size_; size_t heap_size_limit_; friend class V8; }; class RetainedObjectInfo; /** * Isolate represents an isolated instance of the V8 engine. V8 * isolates have completely separate states. Objects from one isolate * must not be used in other isolates. When V8 is initialized a * default isolate is implicitly created and entered. The embedder * can create additional isolates and use them in parallel in multiple * threads. An isolate can be entered by at most one thread at any * given time. The Locker/Unlocker API can be used to synchronize. */ class V8EXPORT Isolate { public: /** * Stack-allocated class which sets the isolate for all operations * executed within a local scope. */ class V8EXPORT Scope { public: explicit Scope(Isolate* isolate) : isolate_(isolate) { isolate->Enter(); } ~Scope() { isolate_->Exit(); } private: Isolate* const isolate_; // Prevent copying of Scope objects. Scope(const Scope&); Scope& operator=(const Scope&); }; /** * Creates a new isolate. Does not change the currently entered * isolate. * * When an isolate is no longer used its resources should be freed * by calling Dispose(). Using the delete operator is not allowed. */ static Isolate* New(); /** * Returns the entered isolate for the current thread or NULL in * case there is no current isolate. */ static Isolate* GetCurrent(); /** * Methods below this point require holding a lock (using Locker) in * a multi-threaded environment. */ /** * Sets this isolate as the entered one for the current thread. * Saves the previously entered one (if any), so that it can be * restored when exiting. Re-entering an isolate is allowed. */ void Enter(); /** * Exits this isolate by restoring the previously entered one in the * current thread. The isolate may still stay the same, if it was * entered more than once. * * Requires: this == Isolate::GetCurrent(). */ void Exit(); /** * Disposes the isolate. The isolate must not be entered by any * thread to be disposable. */ void Dispose(); /** * Associate embedder-specific data with the isolate */ void SetData(void* data); /** * Retrive embedder-specific data from the isolate. * Returns NULL if SetData has never been called. */ void* GetData(); private: Isolate(); Isolate(const Isolate&); ~Isolate(); Isolate& operator=(const Isolate&); void* operator new(size_t size); void operator delete(void*, size_t); }; class StartupData { public: enum CompressionAlgorithm { kUncompressed, kBZip2 }; const char* data; int compressed_size; int raw_size; }; /** * A helper class for driving V8 startup data decompression. It is based on * "CompressedStartupData" API functions from the V8 class. It isn't mandatory * for an embedder to use this class, instead, API functions can be used * directly. * * For an example of the class usage, see the "shell.cc" sample application. */ class V8EXPORT StartupDataDecompressor { // NOLINT public: StartupDataDecompressor(); virtual ~StartupDataDecompressor(); int Decompress(); protected: virtual int DecompressData(char* raw_data, int* raw_data_size, const char* compressed_data, int compressed_data_size) = 0; private: char** raw_data; }; /** * EntropySource is used as a callback function when v8 needs a source * of entropy. */ typedef bool (*EntropySource)(unsigned char* buffer, size_t length); /** * Container class for static utility functions. */ class V8EXPORT V8 { public: /** Set the callback to invoke in case of fatal errors. */ static void SetFatalErrorHandler(FatalErrorCallback that); /** * Set the callback to invoke to check if code generation from * strings should be allowed. */ static void SetAllowCodeGenerationFromStringsCallback( AllowCodeGenerationFromStringsCallback that); /** * Ignore out-of-memory exceptions. * * V8 running out of memory is treated as a fatal error by default. * This means that the fatal error handler is called and that V8 is * terminated. * * IgnoreOutOfMemoryException can be used to not treat an * out-of-memory situation as a fatal error. This way, the contexts * that did not cause the out of memory problem might be able to * continue execution. */ static void IgnoreOutOfMemoryException(); /** * Check if V8 is dead and therefore unusable. This is the case after * fatal errors such as out-of-memory situations. */ static bool IsDead(); /** * The following 4 functions are to be used when V8 is built with * the 'compress_startup_data' flag enabled. In this case, the * embedder must decompress startup data prior to initializing V8. * * This is how interaction with V8 should look like: * int compressed_data_count = v8::V8::GetCompressedStartupDataCount(); * v8::StartupData* compressed_data = * new v8::StartupData[compressed_data_count]; * v8::V8::GetCompressedStartupData(compressed_data); * ... decompress data (compressed_data can be updated in-place) ... * v8::V8::SetDecompressedStartupData(compressed_data); * ... now V8 can be initialized * ... make sure the decompressed data stays valid until V8 shutdown * * A helper class StartupDataDecompressor is provided. It implements * the protocol of the interaction described above, and can be used in * most cases instead of calling these API functions directly. */ static StartupData::CompressionAlgorithm GetCompressedStartupDataAlgorithm(); static int GetCompressedStartupDataCount(); static void GetCompressedStartupData(StartupData* compressed_data); static void SetDecompressedStartupData(StartupData* decompressed_data); /** * Adds a message listener. * * The same message listener can be added more than once and in that * case it will be called more than once for each message. */ static bool AddMessageListener(MessageCallback that, Handle<Value> data = Handle<Value>()); /** * Remove all message listeners from the specified callback function. */ static void RemoveMessageListeners(MessageCallback that); /** * Tells V8 to capture current stack trace when uncaught exception occurs * and report it to the message listeners. The option is off by default. */ static void SetCaptureStackTraceForUncaughtExceptions( bool capture, int frame_limit = 10, StackTrace::StackTraceOptions options = StackTrace::kOverview); /** * Sets V8 flags from a string. */ static void SetFlagsFromString(const char* str, int length); /** * Sets V8 flags from the command line. */ static void SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags); /** Get the version string. */ static const char* GetVersion(); /** * Enables the host application to provide a mechanism for recording * statistics counters. */ static void SetCounterFunction(CounterLookupCallback); /** * Enables the host application to provide a mechanism for recording * histograms. The CreateHistogram function returns a * histogram which will later be passed to the AddHistogramSample * function. */ static void SetCreateHistogramFunction(CreateHistogramCallback); static void SetAddHistogramSampleFunction(AddHistogramSampleCallback); /** * Enables the computation of a sliding window of states. The sliding * window information is recorded in statistics counters. */ static void EnableSlidingStateWindow(); /** Callback function for reporting failed access checks.*/ static void SetFailedAccessCheckCallbackFunction(FailedAccessCheckCallback); /** * Enables the host application to receive a notification before a * garbage collection. Allocations are not allowed in the * callback function, you therefore cannot manipulate objects (set * or delete properties for example) since it is possible such * operations will result in the allocation of objects. It is possible * to specify the GCType filter for your callback. But it is not possible to * register the same callback function two times with different * GCType filters. */ static void AddGCPrologueCallback( GCPrologueCallback callback, GCType gc_type_filter = kGCTypeAll); /** * This function removes callback which was installed by * AddGCPrologueCallback function. */ static void RemoveGCPrologueCallback(GCPrologueCallback callback); /** * The function is deprecated. Please use AddGCPrologueCallback instead. * Enables the host application to receive a notification before a * garbage collection. Allocations are not allowed in the * callback function, you therefore cannot manipulate objects (set * or delete properties for example) since it is possible such * operations will result in the allocation of objects. */ static void SetGlobalGCPrologueCallback(GCCallback); /** * Enables the host application to receive a notification after a * garbage collection. Allocations are not allowed in the * callback function, you therefore cannot manipulate objects (set * or delete properties for example) since it is possible such * operations will result in the allocation of objects. It is possible * to specify the GCType filter for your callback. But it is not possible to * register the same callback function two times with different * GCType filters. */ static void AddGCEpilogueCallback( GCEpilogueCallback callback, GCType gc_type_filter = kGCTypeAll); /** * This function removes callback which was installed by * AddGCEpilogueCallback function. */ static void RemoveGCEpilogueCallback(GCEpilogueCallback callback); /** * The function is deprecated. Please use AddGCEpilogueCallback instead. * Enables the host application to receive a notification after a * major garbage collection. Allocations are not allowed in the * callback function, you therefore cannot manipulate objects (set * or delete properties for example) since it is possible such * operations will result in the allocation of objects. */ static void SetGlobalGCEpilogueCallback(GCCallback); /** * Enables the host application to provide a mechanism to be notified * and perform custom logging when V8 Allocates Executable Memory. */ static void AddMemoryAllocationCallback(MemoryAllocationCallback callback, ObjectSpace space, AllocationAction action); /** * This function removes callback which was installed by * AddMemoryAllocationCallback function. */ static void RemoveMemoryAllocationCallback(MemoryAllocationCallback callback); /** * Allows the host application to group objects together. If one * object in the group is alive, all objects in the group are alive. * After each garbage collection, object groups are removed. It is * intended to be used in the before-garbage-collection callback * function, for instance to simulate DOM tree connections among JS * wrapper objects. * See v8-profiler.h for RetainedObjectInfo interface description. */ static void AddObjectGroup(Persistent<Value>* objects, size_t length, RetainedObjectInfo* info = NULL); /** * Allows the host application to declare implicit references between * the objects: if |parent| is alive, all |children| are alive too. * After each garbage collection, all implicit references * are removed. It is intended to be used in the before-garbage-collection * callback function. */ static void AddImplicitReferences(Persistent<Object> parent, Persistent<Value>* children, size_t length); /** * Initializes from snapshot if possible. Otherwise, attempts to * initialize from scratch. This function is called implicitly if * you use the API without calling it first. */ static bool Initialize(); /** * Allows the host application to provide a callback which can be used * as a source of entropy for random number generators. */ static void SetEntropySource(EntropySource source); /** * Adjusts the amount of registered external memory. Used to give * V8 an indication of the amount of externally allocated memory * that is kept alive by JavaScript objects. V8 uses this to decide * when to perform global garbage collections. Registering * externally allocated memory will trigger global garbage * collections more often than otherwise in an attempt to garbage * collect the JavaScript objects keeping the externally allocated * memory alive. * * \param change_in_bytes the change in externally allocated memory * that is kept alive by JavaScript objects. * \returns the adjusted value. */ static int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); /** * Suspends recording of tick samples in the profiler. * When the V8 profiling mode is enabled (usually via command line * switches) this function suspends recording of tick samples. * Profiling ticks are discarded until ResumeProfiler() is called. * * See also the --prof and --prof_auto command line switches to * enable V8 profiling. */ static void PauseProfiler(); /** * Resumes recording of tick samples in the profiler. * See also PauseProfiler(). */ static void ResumeProfiler(); /** * Return whether profiler is currently paused. */ static bool IsProfilerPaused(); /** * Retrieve the V8 thread id of the calling thread. * * The thread id for a thread should only be retrieved after the V8 * lock has been acquired with a Locker object with that thread. */ static int GetCurrentThreadId(); /** * Forcefully terminate execution of a JavaScript thread. This can * be used to terminate long-running scripts. * * TerminateExecution should only be called when then V8 lock has * been acquired with a Locker object. Therefore, in order to be * able to terminate long-running threads, preemption must be * enabled to allow the user of TerminateExecution to acquire the * lock. * * The termination is achieved by throwing an exception that is * uncatchable by JavaScript exception handlers. Termination * exceptions act as if they were caught by a C++ TryCatch exception * handler. If forceful termination is used, any C++ TryCatch * exception handler that catches an exception should check if that * exception is a termination exception and immediately return if * that is the case. Returning immediately in that case will * continue the propagation of the termination exception if needed. * * The thread id passed to TerminateExecution must have been * obtained by calling GetCurrentThreadId on the thread in question. * * \param thread_id The thread id of the thread to terminate. */ static void TerminateExecution(int thread_id); /** * Forcefully terminate the current thread of JavaScript execution * in the given isolate. If no isolate is provided, the default * isolate is used. * * This method can be used by any thread even if that thread has not * acquired the V8 lock with a Locker object. * * \param isolate The isolate in which to terminate the current JS execution. */ static void TerminateExecution(Isolate* isolate = NULL); /** * Is V8 terminating JavaScript execution. * * Returns true if JavaScript execution is currently terminating * because of a call to TerminateExecution. In that case there are * still JavaScript frames on the stack and the termination * exception is still active. * * \param isolate The isolate in which to check. */ static bool IsExecutionTerminating(Isolate* isolate = NULL); /** * Releases any resources used by v8 and stops any utility threads * that may be running. Note that disposing v8 is permanent, it * cannot be reinitialized. * * It should generally not be necessary to dispose v8 before exiting * a process, this should happen automatically. It is only necessary * to use if the process needs the resources taken up by v8. */ static bool Dispose(); /** * Get statistics about the heap memory usage. */ static void GetHeapStatistics(HeapStatistics* heap_statistics); /** * Optional notification that the embedder is idle. * V8 uses the notification to reduce memory footprint. * This call can be used repeatedly if the embedder remains idle. * Returns true if the embedder should stop calling IdleNotification * until real work has been done. This indicates that V8 has done * as much cleanup as it will be able to do. */ static bool IdleNotification(); /** * Optional notification that the system is running low on memory. * V8 uses these notifications to attempt to free memory. */ static void LowMemoryNotification(); /** * Optional notification that a context has been disposed. V8 uses * these notifications to guide the GC heuristic. Returns the number * of context disposals - including this one - since the last time * V8 had a chance to clean up. */ static int ContextDisposedNotification(); private: V8(); static internal::Object** GlobalizeReference(internal::Object** handle); static void DisposeGlobal(internal::Object** global_handle); static void MakeWeak(internal::Object** global_handle, void* data, WeakReferenceCallback); static void ClearWeak(internal::Object** global_handle); static void MarkIndependent(internal::Object** global_handle); static bool IsGlobalNearDeath(internal::Object** global_handle); static bool IsGlobalWeak(internal::Object** global_handle); static void SetWrapperClassId(internal::Object** global_handle, uint16_t class_id); template <class T> friend class Handle; template <class T> friend class Local; template <class T> friend class Persistent; friend class Context; }; /** * An external exception handler. */ class V8EXPORT TryCatch { public: /** * Creates a new try/catch block and registers it with v8. */ TryCatch(); /** * Unregisters and deletes this try/catch block. */ ~TryCatch(); /** * Returns true if an exception has been caught by this try/catch block. */ bool HasCaught() const; /** * For certain types of exceptions, it makes no sense to continue * execution. * * Currently, the only type of exception that can be caught by a * TryCatch handler and for which it does not make sense to continue * is termination exception. Such exceptions are thrown when the * TerminateExecution methods are called to terminate a long-running * script. * * If CanContinue returns false, the correct action is to perform * any C++ cleanup needed and then return. */ bool CanContinue() const; /** * Throws the exception caught by this TryCatch in a way that avoids * it being caught again by this same TryCatch. As with ThrowException * it is illegal to execute any JavaScript operations after calling * ReThrow; the caller must return immediately to where the exception * is caught. */ Handle<Value> ReThrow(); /** * Returns the exception caught by this try/catch block. If no exception has * been caught an empty handle is returned. * * The returned handle is valid until this TryCatch block has been destroyed. */ Local<Value> Exception() const; /** * Returns the .stack property of the thrown object. If no .stack * property is present an empty handle is returned. */ Local<Value> StackTrace() const; /** * Returns the message associated with this exception. If there is * no message associated an empty handle is returned. * * The returned handle is valid until this TryCatch block has been * destroyed. */ Local<v8::Message> Message() const; /** * Clears any exceptions that may have been caught by this try/catch block. * After this method has been called, HasCaught() will return false. * * It is not necessary to clear a try/catch block before using it again; if * another exception is thrown the previously caught exception will just be * overwritten. However, it is often a good idea since it makes it easier * to determine which operation threw a given exception. */ void Reset(); /** * Set verbosity of the external exception handler. * * By default, exceptions that are caught by an external exception * handler are not reported. Call SetVerbose with true on an * external exception handler to have exceptions caught by the * handler reported as if they were not caught. */ void SetVerbose(bool value); /** * Set whether or not this TryCatch should capture a Message object * which holds source information about where the exception * occurred. True by default. */ void SetCaptureMessage(bool value); private: v8::internal::Isolate* isolate_; void* next_; void* exception_; void* message_; bool is_verbose_ : 1; bool can_continue_ : 1; bool capture_message_ : 1; bool rethrow_ : 1; friend class v8::internal::Isolate; }; // --- Context --- /** * Ignore */ class V8EXPORT ExtensionConfiguration { public: ExtensionConfiguration(int name_count, const char* names[]) : name_count_(name_count), names_(names) { } private: friend class ImplementationUtilities; int name_count_; const char** names_; }; /** * A sandboxed execution context with its own set of built-in objects * and functions. */ class V8EXPORT Context { public: /** * Returns the global proxy object or global object itself for * detached contexts. * * Global proxy object is a thin wrapper whose prototype points to * actual context's global object with the properties like Object, etc. * This is done that way for security reasons (for more details see * https://wiki.mozilla.org/Gecko:SplitWindow). * * Please note that changes to global proxy object prototype most probably * would break VM---v8 expects only global object as a prototype of * global proxy object. * * If DetachGlobal() has been invoked, Global() would return actual global * object until global is reattached with ReattachGlobal(). */ Local<Object> Global(); /** * Detaches the global object from its context before * the global object can be reused to create a new context. */ void DetachGlobal(); /** * Reattaches a global object to a context. This can be used to * restore the connection between a global object and a context * after DetachGlobal has been called. * * \param global_object The global object to reattach to the * context. For this to work, the global object must be the global * object that was associated with this context before a call to * DetachGlobal. */ void ReattachGlobal(Handle<Object> global_object); /** Creates a new context. * * Returns a persistent handle to the newly allocated context. This * persistent handle has to be disposed when the context is no * longer used so the context can be garbage collected. * * \param extensions An optional extension configuration containing * the extensions to be installed in the newly created context. * * \param global_template An optional object template from which the * global object for the newly created context will be created. * * \param global_object An optional global object to be reused for * the newly created context. This global object must have been * created by a previous call to Context::New with the same global * template. The state of the global object will be completely reset * and only object identify will remain. */ static Persistent<Context> New( ExtensionConfiguration* extensions = NULL, Handle<ObjectTemplate> global_template = Handle<ObjectTemplate>(), Handle<Value> global_object = Handle<Value>()); /** Returns the last entered context. */ static Local<Context> GetEntered(); /** Returns the context that is on the top of the stack. */ static Local<Context> GetCurrent(); /** * Returns the context of the calling JavaScript code. That is the * context of the top-most JavaScript frame. If there are no * JavaScript frames an empty handle is returned. */ static Local<Context> GetCalling(); /** * Sets the security token for the context. To access an object in * another context, the security tokens must match. */ void SetSecurityToken(Handle<Value> token); /** Restores the security token to the default value. */ void UseDefaultSecurityToken(); /** Returns the security token of this context.*/ Handle<Value> GetSecurityToken(); /** * Enter this context. After entering a context, all code compiled * and run is compiled and run in this context. If another context * is already entered, this old context is saved so it can be * restored when the new context is exited. */ void Enter(); /** * Exit this context. Exiting the current context restores the * context that was in place when entering the current context. */ void Exit(); /** Returns true if the context has experienced an out of memory situation. */ bool HasOutOfMemoryException(); /** Returns true if V8 has a current context. */ static bool InContext(); /** * Associate an additional data object with the context. This is mainly used * with the debugger to provide additional information on the context through * the debugger API. */ void SetData(Handle<String> data); Local<Value> GetData(); /** * Control whether code generation from strings is allowed. Calling * this method with false will disable 'eval' and the 'Function' * constructor for code running in this context. If 'eval' or the * 'Function' constructor are used an exception will be thrown. * * If code generation from strings is not allowed the * V8::AllowCodeGenerationFromStrings callback will be invoked if * set before blocking the call to 'eval' or the 'Function' * constructor. If that callback returns true, the call will be * allowed, otherwise an exception will be thrown. If no callback is * set an exception will be thrown. */ void AllowCodeGenerationFromStrings(bool allow); /** * Stack-allocated class which sets the execution context for all * operations executed within a local scope. */ class Scope { public: explicit inline Scope(Handle<Context> context) : context_(context) { context_->Enter(); } inline ~Scope() { context_->Exit(); } private: Handle<Context> context_; }; private: friend class Value; friend class Script; friend class Object; friend class Function; }; /** * Multiple threads in V8 are allowed, but only one thread at a time * is allowed to use any given V8 isolate. See Isolate class * comments. The definition of 'using V8 isolate' includes * accessing handles or holding onto object pointers obtained * from V8 handles while in the particular V8 isolate. It is up * to the user of V8 to ensure (perhaps with locking) that this * constraint is not violated. * * v8::Locker is a scoped lock object. While it's * active (i.e. between its construction and destruction) the current thread is * allowed to use the locked isolate. V8 guarantees that an isolate can be locked * by at most one thread at any time. In other words, the scope of a v8::Locker is * a critical section. * * Sample usage: * \code * ... * { * v8::Locker locker(isolate); * v8::Isolate::Scope isolate_scope(isolate); * ... * // Code using V8 and isolate goes here. * ... * } // Destructor called here * \endcode * * If you wish to stop using V8 in a thread A you can do this either * by destroying the v8::Locker object as above or by constructing a * v8::Unlocker object: * * \code * { * isolate->Exit(); * v8::Unlocker unlocker(isolate); * ... * // Code not using V8 goes here while V8 can run in another thread. * ... * } // Destructor called here. * isolate->Enter(); * \endcode * * The Unlocker object is intended for use in a long-running callback * from V8, where you want to release the V8 lock for other threads to * use. * * The v8::Locker is a recursive lock. That is, you can lock more than * once in a given thread. This can be useful if you have code that can * be called either from code that holds the lock or from code that does * not. The Unlocker is not recursive so you can not have several * Unlockers on the stack at once, and you can not use an Unlocker in a * thread that is not inside a Locker's scope. * * An unlocker will unlock several lockers if it has to and reinstate * the correct depth of locking on its destruction. eg.: * * \code * // V8 not locked. * { * v8::Locker locker(isolate); * Isolate::Scope isolate_scope(isolate); * // V8 locked. * { * v8::Locker another_locker(isolate); * // V8 still locked (2 levels). * { * isolate->Exit(); * v8::Unlocker unlocker(isolate); * // V8 not locked. * } * isolate->Enter(); * // V8 locked again (2 levels). * } * // V8 still locked (1 level). * } * // V8 Now no longer locked. * \endcode * * */ class V8EXPORT Unlocker { public: /** * Initialize Unlocker for a given Isolate. NULL means default isolate. */ explicit Unlocker(Isolate* isolate = NULL); ~Unlocker(); private: internal::Isolate* isolate_; }; class V8EXPORT Locker { public: /** * Initialize Locker for a given Isolate. NULL means default isolate. */ explicit Locker(Isolate* isolate = NULL); ~Locker(); /** * Start preemption. * * When preemption is started, a timer is fired every n milliseconds * that will switch between multiple threads that are in contention * for the V8 lock. */ static void StartPreemption(int every_n_ms); /** * Stop preemption. */ static void StopPreemption(); /** * Returns whether or not the locker for a given isolate, or default isolate if NULL is given, * is locked by the current thread. */ static bool IsLocked(Isolate* isolate = NULL); /** * Returns whether v8::Locker is being used by this V8 instance. */ static bool IsActive(); private: bool has_lock_; bool top_level_; internal::Isolate* isolate_; static bool active_; // Disallow copying and assigning. Locker(const Locker&); void operator=(const Locker&); }; /** * An interface for exporting data from V8, using "push" model. */ class V8EXPORT OutputStream { // NOLINT public: enum OutputEncoding { kAscii = 0 // 7-bit ASCII. }; enum WriteResult { kContinue = 0, kAbort = 1 }; virtual ~OutputStream() {} /** Notify about the end of stream. */ virtual void EndOfStream() = 0; /** Get preferred output chunk size. Called only once. */ virtual int GetChunkSize() { return 1024; } /** Get preferred output encoding. Called only once. */ virtual OutputEncoding GetOutputEncoding() { return kAscii; } /** * Writes the next chunk of snapshot data into the stream. Writing * can be stopped by returning kAbort as function result. EndOfStream * will not be called in case writing was aborted. */ virtual WriteResult WriteAsciiChunk(char* data, int size) = 0; }; /** * An interface for reporting progress and controlling long-running * activities. */ class V8EXPORT ActivityControl { // NOLINT public: enum ControlOption { kContinue = 0, kAbort = 1 }; virtual ~ActivityControl() {} /** * Notify about current progress. The activity can be stopped by * returning kAbort as the callback result. */ virtual ControlOption ReportProgressValue(int done, int total) = 0; }; // --- Implementation --- namespace internal { static const int kApiPointerSize = sizeof(void*); // NOLINT static const int kApiIntSize = sizeof(int); // NOLINT // Tag information for HeapObject. const int kHeapObjectTag = 1; const int kHeapObjectTagSize = 2; const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1; // Tag information for Smi. const int kSmiTag = 0; const int kSmiTagSize = 1; const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1; template <size_t ptr_size> struct SmiTagging; // Smi constants for 32-bit systems. template <> struct SmiTagging<4> { static const int kSmiShiftSize = 0; static const int kSmiValueSize = 31; static inline int SmiToInt(internal::Object* value) { int shift_bits = kSmiTagSize + kSmiShiftSize; // Throw away top 32 bits and shift down (requires >> to be sign extending). return static_cast<int>(reinterpret_cast<intptr_t>(value)) >> shift_bits; } // For 32-bit systems any 2 bytes aligned pointer can be encoded as smi // with a plain reinterpret_cast. static const uintptr_t kEncodablePointerMask = 0x1; static const int kPointerToSmiShift = 0; }; // Smi constants for 64-bit systems. template <> struct SmiTagging<8> { static const int kSmiShiftSize = 31; static const int kSmiValueSize = 32; static inline int SmiToInt(internal::Object* value) { int shift_bits = kSmiTagSize + kSmiShiftSize; // Shift down and throw away top 32 bits. return static_cast<int>(reinterpret_cast<intptr_t>(value) >> shift_bits); } // To maximize the range of pointers that can be encoded // in the available 32 bits, we require them to be 8 bytes aligned. // This gives 2 ^ (32 + 3) = 32G address space covered. // It might be not enough to cover stack allocated objects on some platforms. static const int kPointerAlignment = 3; static const uintptr_t kEncodablePointerMask = ~(uintptr_t(0xffffffff) << kPointerAlignment); static const int kPointerToSmiShift = kSmiTagSize + kSmiShiftSize - kPointerAlignment; }; typedef SmiTagging<kApiPointerSize> PlatformSmiTagging; const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize; const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize; const uintptr_t kEncodablePointerMask = PlatformSmiTagging::kEncodablePointerMask; const int kPointerToSmiShift = PlatformSmiTagging::kPointerToSmiShift; template <size_t ptr_size> struct InternalConstants; // Internal constants for 32-bit systems. template <> struct InternalConstants<4> { static const int kStringResourceOffset = 3 * kApiPointerSize; }; // Internal constants for 64-bit systems. template <> struct InternalConstants<8> { static const int kStringResourceOffset = 3 * kApiPointerSize; }; /** * This class exports constants and functionality from within v8 that * is necessary to implement inline functions in the v8 api. Don't * depend on functions and constants defined here. */ class Internals { public: // These values match non-compiler-dependent values defined within // the implementation of v8. static const int kHeapObjectMapOffset = 0; static const int kMapInstanceTypeOffset = 1 * kApiPointerSize + kApiIntSize; static const int kStringResourceOffset = InternalConstants<kApiPointerSize>::kStringResourceOffset; static const int kForeignAddressOffset = kApiPointerSize; static const int kJSObjectHeaderSize = 3 * kApiPointerSize; static const int kFullStringRepresentationMask = 0x07; static const int kExternalTwoByteRepresentationTag = 0x02; static const int kJSObjectType = 0xa3; static const int kFirstNonstringType = 0x80; static const int kForeignType = 0x85; static inline bool HasHeapObjectTag(internal::Object* value) { return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) == kHeapObjectTag); } static inline bool HasSmiTag(internal::Object* value) { return ((reinterpret_cast<intptr_t>(value) & kSmiTagMask) == kSmiTag); } static inline int SmiValue(internal::Object* value) { return PlatformSmiTagging::SmiToInt(value); } static inline int GetInstanceType(internal::Object* obj) { typedef internal::Object O; O* map = ReadField<O*>(obj, kHeapObjectMapOffset); return ReadField<uint8_t>(map, kMapInstanceTypeOffset); } static inline void* GetExternalPointerFromSmi(internal::Object* value) { const uintptr_t address = reinterpret_cast<uintptr_t>(value); return reinterpret_cast<void*>(address >> kPointerToSmiShift); } static inline void* GetExternalPointer(internal::Object* obj) { if (HasSmiTag(obj)) { return GetExternalPointerFromSmi(obj); } else if (GetInstanceType(obj) == kForeignType) { return ReadField<void*>(obj, kForeignAddressOffset); } else { return NULL; } } static inline bool IsExternalTwoByteString(int instance_type) { int representation = (instance_type & kFullStringRepresentationMask); return representation == kExternalTwoByteRepresentationTag; } template <typename T> static inline T ReadField(Object* ptr, int offset) { uint8_t* addr = reinterpret_cast<uint8_t*>(ptr) + offset - kHeapObjectTag; return *reinterpret_cast<T*>(addr); } static inline bool CanCastToHeapObject(void* o) { return false; } static inline bool CanCastToHeapObject(Context* o) { return true; } static inline bool CanCastToHeapObject(String* o) { return true; } static inline bool CanCastToHeapObject(Object* o) { return true; } static inline bool CanCastToHeapObject(Message* o) { return true; } static inline bool CanCastToHeapObject(StackTrace* o) { return true; } static inline bool CanCastToHeapObject(StackFrame* o) { return true; } }; } // namespace internal template <class T> Local<T>::Local() : Handle<T>() { } template <class T> Local<T> Local<T>::New(Handle<T> that) { if (that.IsEmpty()) return Local<T>(); T* that_ptr = *that; internal::Object** p = reinterpret_cast<internal::Object**>(that_ptr); if (internal::Internals::CanCastToHeapObject(that_ptr)) { return Local<T>(reinterpret_cast<T*>(HandleScope::CreateHandle( reinterpret_cast<internal::HeapObject*>(*p)))); } return Local<T>(reinterpret_cast<T*>(HandleScope::CreateHandle(*p))); } template <class T> Persistent<T> Persistent<T>::New(Handle<T> that) { if (that.IsEmpty()) return Persistent<T>(); internal::Object** p = reinterpret_cast<internal::Object**>(*that); return Persistent<T>(reinterpret_cast<T*>(V8::GlobalizeReference(p))); } template <class T> bool Persistent<T>::IsNearDeath() const { if (this->IsEmpty()) return false; return V8::IsGlobalNearDeath(reinterpret_cast<internal::Object**>(**this)); } template <class T> bool Persistent<T>::IsWeak() const { if (this->IsEmpty()) return false; return V8::IsGlobalWeak(reinterpret_cast<internal::Object**>(**this)); } template <class T> void Persistent<T>::Dispose() { if (this->IsEmpty()) return; V8::DisposeGlobal(reinterpret_cast<internal::Object**>(**this)); } template <class T> Persistent<T>::Persistent() : Handle<T>() { } template <class T> void Persistent<T>::MakeWeak(void* parameters, WeakReferenceCallback callback) { V8::MakeWeak(reinterpret_cast<internal::Object**>(**this), parameters, callback); } template <class T> void Persistent<T>::ClearWeak() { V8::ClearWeak(reinterpret_cast<internal::Object**>(**this)); } template <class T> void Persistent<T>::MarkIndependent() { V8::MarkIndependent(reinterpret_cast<internal::Object**>(**this)); } template <class T> void Persistent<T>::SetWrapperClassId(uint16_t class_id) { V8::SetWrapperClassId(reinterpret_cast<internal::Object**>(**this), class_id); } Arguments::Arguments(internal::Object** implicit_args, internal::Object** values, int length, bool is_construct_call) : implicit_args_(implicit_args), values_(values), length_(length), is_construct_call_(is_construct_call) { } Local<Value> Arguments::operator[](int i) const { if (i < 0 || length_ <= i) return Local<Value>(*Undefined()); return Local<Value>(reinterpret_cast<Value*>(values_ - i)); } Local<Function> Arguments::Callee() const { return Local<Function>(reinterpret_cast<Function*>( &implicit_args_[kCalleeIndex])); } Local<Object> Arguments::This() const { return Local<Object>(reinterpret_cast<Object*>(values_ + 1)); } Local<Object> Arguments::Holder() const { return Local<Object>(reinterpret_cast<Object*>( &implicit_args_[kHolderIndex])); } Local<Value> Arguments::Data() const { return Local<Value>(reinterpret_cast<Value*>(&implicit_args_[kDataIndex])); } bool Arguments::IsConstructCall() const { return is_construct_call_; } int Arguments::Length() const { return length_; } template <class T> Local<T> HandleScope::Close(Handle<T> value) { internal::Object** before = reinterpret_cast<internal::Object**>(*value); internal::Object** after = RawClose(before); return Local<T>(reinterpret_cast<T*>(after)); } Handle<Value> ScriptOrigin::ResourceName() const { return resource_name_; } Handle<Integer> ScriptOrigin::ResourceLineOffset() const { return resource_line_offset_; } Handle<Integer> ScriptOrigin::ResourceColumnOffset() const { return resource_column_offset_; } Handle<Boolean> Boolean::New(bool value) { return value ? True() : False(); } void Template::Set(const char* name, v8::Handle<Data> value) { Set(v8::String::New(name), value); } Local<Value> Object::GetInternalField(int index) { #ifndef V8_ENABLE_CHECKS Local<Value> quick_result = UncheckedGetInternalField(index); if (!quick_result.IsEmpty()) return quick_result; #endif return CheckedGetInternalField(index); } Local<Value> Object::UncheckedGetInternalField(int index) { typedef internal::Object O; typedef internal::Internals I; O* obj = *reinterpret_cast<O**>(this); if (I::GetInstanceType(obj) == I::kJSObjectType) { // If the object is a plain JSObject, which is the common case, // we know where to find the internal fields and can return the // value directly. int offset = I::kJSObjectHeaderSize + (internal::kApiPointerSize * index); O* value = I::ReadField<O*>(obj, offset); O** result = HandleScope::CreateHandle(value); return Local<Value>(reinterpret_cast<Value*>(result)); } else { return Local<Value>(); } } void* External::Unwrap(Handle<v8::Value> obj) { #ifdef V8_ENABLE_CHECKS return FullUnwrap(obj); #else return QuickUnwrap(obj); #endif } void* External::QuickUnwrap(Handle<v8::Value> wrapper) { typedef internal::Object O; O* obj = *reinterpret_cast<O**>(const_cast<v8::Value*>(*wrapper)); return internal::Internals::GetExternalPointer(obj); } void* Object::GetPointerFromInternalField(int index) { typedef internal::Object O; typedef internal::Internals I; O* obj = *reinterpret_cast<O**>(this); if (I::GetInstanceType(obj) == I::kJSObjectType) { // If the object is a plain JSObject, which is the common case, // we know where to find the internal fields and can return the // value directly. int offset = I::kJSObjectHeaderSize + (internal::kApiPointerSize * index); O* value = I::ReadField<O*>(obj, offset); return I::GetExternalPointer(value); } return SlowGetPointerFromInternalField(index); } String* String::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<String*>(value); } String::ExternalStringResource* String::GetExternalStringResource() const { typedef internal::Object O; typedef internal::Internals I; O* obj = *reinterpret_cast<O**>(const_cast<String*>(this)); String::ExternalStringResource* result; if (I::IsExternalTwoByteString(I::GetInstanceType(obj))) { void* value = I::ReadField<void*>(obj, I::kStringResourceOffset); result = reinterpret_cast<String::ExternalStringResource*>(value); } else { result = NULL; } #ifdef V8_ENABLE_CHECKS VerifyExternalStringResource(result); #endif return result; } bool Value::IsString() const { #ifdef V8_ENABLE_CHECKS return FullIsString(); #else return QuickIsString(); #endif } bool Value::QuickIsString() const { typedef internal::Object O; typedef internal::Internals I; O* obj = *reinterpret_cast<O**>(const_cast<Value*>(this)); if (!I::HasHeapObjectTag(obj)) return false; return (I::GetInstanceType(obj) < I::kFirstNonstringType); } Number* Number::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<Number*>(value); } Integer* Integer::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<Integer*>(value); } Date* Date::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<Date*>(value); } StringObject* StringObject::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<StringObject*>(value); } NumberObject* NumberObject::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<NumberObject*>(value); } BooleanObject* BooleanObject::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<BooleanObject*>(value); } RegExp* RegExp::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<RegExp*>(value); } Object* Object::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<Object*>(value); } Array* Array::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<Array*>(value); } Function* Function::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<Function*>(value); } External* External::Cast(v8::Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); #endif return static_cast<External*>(value); } Local<Value> AccessorInfo::Data() const { return Local<Value>(reinterpret_cast<Value*>(&args_[-2])); } Local<Object> AccessorInfo::This() const { return Local<Object>(reinterpret_cast<Object*>(&args_[0])); } Local<Object> AccessorInfo::Holder() const { return Local<Object>(reinterpret_cast<Object*>(&args_[-1])); } /** * \example shell.cc * A simple shell that takes a list of expressions on the * command-line and executes them. */ /** * \example process.cc */ } // namespace v8 #undef V8EXPORT #undef TYPE_CHECK #endif // V8_H_
{ "content_hash": "f6ba816c1fc8646cf7e3918ff61a7f04", "timestamp": "", "source": "github", "line_count": 4210, "max_line_length": 96, "avg_line_length": 31.760570071258908, "alnum_prop": 0.6863482709106139, "repo_name": "davidbetz/netrouter", "id": "4b7f6e735fe7cc98d758dcc8e28d3c21625bd4cd", "size": "133712", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "_REFERENCE/v8/v8.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "78" }, { "name": "C", "bytes": "306561" }, { "name": "C#", "bytes": "374477" }, { "name": "C++", "bytes": "510399" }, { "name": "JavaScript", "bytes": "63463" }, { "name": "Smalltalk", "bytes": "311126" } ], "symlink_target": "" }
package com.nike.cerberus.util; import static org.assertj.core.api.Fail.fail; import java.util.UUID; import org.junit.Before; import org.junit.Test; public class UuidSupplierTest { private UuidSupplier subject; @Before public void setUp() throws Exception { subject = new UuidSupplier(); } @Test public void get_returns_valid_uuid() { final String uuid = subject.get(); try { UUID.fromString(uuid); } catch (IllegalArgumentException iae) { fail("UUID generated unable to be parsed by UUID.fromString()"); } } }
{ "content_hash": "65dbc8236a9f1db0ea195b8724b3dd9e", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 70, "avg_line_length": 18.866666666666667, "alnum_prop": 0.6890459363957597, "repo_name": "Nike-Inc/cerberus", "id": "bfaeeda62fcbb8989751e47d1ad8456d0dbd012c", "size": "1160", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cerberus-web/src/test/java/com/nike/cerberus/util/UuidSupplierTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1126" }, { "name": "Groovy", "bytes": "123084" }, { "name": "HTML", "bytes": "2160" }, { "name": "Java", "bytes": "1337217" }, { "name": "JavaScript", "bytes": "276262" }, { "name": "SCSS", "bytes": "73750" }, { "name": "Shell", "bytes": "3122" } ], "symlink_target": "" }
module StowCookbook # Methods to build proper stow command invocation module Command # Wrap most specific stow binary & flags into a method def stow(type = nil) Chef::Log.debug ".stow: #{stow_command(type)} #{stow_command_flags}" "#{stow_command(type)} #{stow_command_flags}" end # package delimiter to try to group packages within stow directory def pkg_delim '-+-' end # Stow target path def stow_target blank?(node['stow']['target']) ? nil : node['stow']['target'] end # Stow directory path def stow_path blank?(node['stow']['path']) ? nil : node['stow']['path'] end # Most specific stow target path def stow_resolved_target blank?(stow_target) ? "#{stow_path}/.." : stow_target end # List all outdated package versions in stow's path directory def old_stow_packages(pkg_name, version) old_versions = [] # Iterate over directories that match name & delimiter Dir.glob("#{stow_path}/#{pkg_name}#{pkg_delim}*") do |pkg_path| old_versions << File.basename(pkg_path) end unless blank?(old_versions) # Remove the up to date package from array if it exists configured_version = ["#{pkg_name}#{pkg_delim}#{version}"] old_versions -= configured_version end Chef::Log.debug ".old_stow_packages: #{old_versions}" old_versions end # Determine if specified package version is already stowed # creates should be a relative path to a file to check existence of # e.g., 'bin/openssl' def package_stowed?(name, version, creates) package_stowed = nil if ::File.exist?("#{stow_resolved_target}/#{creates}") # Determine if the created file points to the proper version of the pkg package_stowed = ::File.realpath(stowed_symlink_path(creates)) .include?("#{name}#{pkg_delim}#{version}") else # Creates file path is not found in the target path, package not stowed package_stowed = false end Chef::Log.debug ".package_stowed?: #{package_stowed}" package_stowed ? true : false end # rubocop:disable Metrics/MethodLength # Determine full path for currently stowed package def stowed_symlink_path(creates) stowed_symlink = nil creates_path = creates.split('/') creates_path.clone.each do # Detect lowest level symlink from created_file within stow_path if ::File.symlink?("#{stow_resolved_target}/#{creates_path.join('/')}") stowed_symlink = "#{stow_resolved_target}/#{creates_path.join('/')}" # Symlink found, break and use creates_path for stowed file # stowed_path = creates_path.join('/') break else # Remove lowest path if not a symlink creates_path.pop(1) end end Chef::Log.debug ".stowed_symlink_path: #{stowed_symlink}" stowed_symlink end # Detect which stow command binary to invoke # Order of precedence: -t flag > -d flag > 'stow' def stow_command(type = nil) command = nil if !blank?(stow_target) && stow_target_bin_exists? # Use the target path stow if it exists command = "#{stow_target}/bin/stow" elsif !blank?(stow_path) && stow_path_bin_exists? # Use the parent dir for stow path if it exists & there's no target command = "#{stow_path}/../bin/stow" end # Override to use the buildout path stow if specified command = "#{stow_buildout_path}/bin/stow" if type == 'buildout' # Default to PATH detected stow command ||= 'stow' command end # Set stow command flags def stow_command_flags flags = '' flags += "-t #{stow_target}" unless stow_target.nil? flags += "-d #{stow_path}" unless stow_path.nil? flags end # Stow buildout path - for stow source compile prefix def stow_buildout_path "#{node['stow']['path']}/stow#{pkg_delim}#{node['stow']['version']}" end # Stow target bin file check def stow_target_bin_exists? ::File.exist?("#{stow_target}/bin/stow") end # Stow path bin file check def stow_path_bin_exists? ::File.exist?("#{stow_path}/../bin/stow") end end end
{ "content_hash": "0126be68eb844a8a7cb2b58cb6b7986c", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 79, "avg_line_length": 33.7109375, "alnum_prop": 0.6155272305909618, "repo_name": "stevenhaddox/cookbook-stow", "id": "ad73e0d79b9adefaac0765ae04382a64f4e98a54", "size": "4315", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "libraries/command.rb", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "240" }, { "name": "Ruby", "bytes": "21061" } ], "symlink_target": "" }
{# basic/sourcelink.html ~~~~~~~~~~~~~~~~~~~~~ Sphinx sidebar template: "show source" link. :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. #} {%- if show_source and has_source and sourcename %} <div role="note" aria-label="source link"> <h3>{{ _('This Page') }}</h3> <ul class="this-page-menu"> <li><a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow">{{ _('Show Source') }}</a></li> </ul> </div> {%- endif %}
{ "content_hash": "7cc29ff44e39bae817887518e88a5ed7", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 68, "avg_line_length": 30.22222222222222, "alnum_prop": 0.5551470588235294, "repo_name": "lmregus/Portfolio", "id": "4ec8fa7c04de15aab5971ae2f5a5d8dca44158e3", "size": "544", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "python/design_patterns/env/lib/python3.7/site-packages/sphinx/themes/basic/sourcelink.html", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "27682" }, { "name": "C++", "bytes": "25458" }, { "name": "CSS", "bytes": "12842" }, { "name": "HTML", "bytes": "49171" }, { "name": "Java", "bytes": "99711" }, { "name": "JavaScript", "bytes": "827" }, { "name": "Python", "bytes": "42857" }, { "name": "Shell", "bytes": "5710" } ], "symlink_target": "" }
package com.cws.esolutions.web.enums; /* * Project: eSolutions_java_source * Package: com.cws.esolutions.web.enums * File: ServiceModificationType.java * * History * * Author Date Comments * ---------------------------------------------------------------------------- * kmhuntly@gmail.com 11/23/2008 22:39:20 Created. */ /** * @author khuntly * @version 1.0 */ public enum ServiceModificationType { DATACENTER, PLATFORM; }
{ "content_hash": "4561d6cc292dffdea4719002976a22c6", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 79, "avg_line_length": 22.727272727272727, "alnum_prop": 0.51, "repo_name": "cwsus/esolutions", "id": "e17bfa6da9461b685a5bcdedba2983720f071e0e", "size": "997", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "web/eSolutions/jars/eSolutions_java_source/src/main/java/com/cws/esolutions/web/enums/ServiceModificationType.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "6692" }, { "name": "HTML", "bytes": "3903" }, { "name": "Java", "bytes": "3529458" }, { "name": "JavaScript", "bytes": "4555" }, { "name": "Jinja", "bytes": "188105" } ], "symlink_target": "" }
package Google::Ads::AdWords::v201402::FeedItemOperation; use strict; use warnings; __PACKAGE__->_set_element_form_qualified(1); sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201402' }; our $XML_ATTRIBUTE_CLASS; undef $XML_ATTRIBUTE_CLASS; sub __get_attr_class { return $XML_ATTRIBUTE_CLASS; } use base qw(Google::Ads::AdWords::v201402::Operation); # Variety: sequence use Class::Std::Fast::Storable constructor => 'none'; use base qw(Google::Ads::SOAP::Typelib::ComplexType); { # BLOCK to scope variables my %operator_of :ATTR(:get<operator>); my %Operation__Type_of :ATTR(:get<Operation__Type>); my %operand_of :ATTR(:get<operand>); __PACKAGE__->_factory( [ qw( operator Operation__Type operand ) ], { 'operator' => \%operator_of, 'Operation__Type' => \%Operation__Type_of, 'operand' => \%operand_of, }, { 'operator' => 'Google::Ads::AdWords::v201402::Operator', 'Operation__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string', 'operand' => 'Google::Ads::AdWords::v201402::FeedItem', }, { 'operator' => 'operator', 'Operation__Type' => 'Operation.Type', 'operand' => 'operand', } ); } # end BLOCK 1; =pod =head1 NAME Google::Ads::AdWords::v201402::FeedItemOperation =head1 DESCRIPTION Perl data type class for the XML Schema defined complexType FeedItemOperation from the namespace https://adwords.google.com/api/adwords/cm/v201402. FeedItem service mutate operation. =head2 PROPERTIES The following properties may be accessed using get_PROPERTY / set_PROPERTY methods: =over =item * operand =back =head1 METHODS =head2 new Constructor. The following data structure may be passed to new(): =head1 AUTHOR Generated by SOAP::WSDL =cut
{ "content_hash": "3aafea648e2209e59cc2627b28cfed03", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 87, "avg_line_length": 16.513513513513512, "alnum_prop": 0.6513911620294599, "repo_name": "gitpan/GOOGLE-ADWORDS-PERL-CLIENT", "id": "165606e72533a24c2bced1c10027ccbe53789cb2", "size": "1833", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/Google/Ads/AdWords/v201402/FeedItemOperation.pm", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Perl", "bytes": "26706635" } ], "symlink_target": "" }
var $ = require('../internals/export'); var parseInt = require('../internals/number-parse-int'); // `Number.parseInt` method // https://tc39.es/ecma262/#sec-number.parseint // eslint-disable-next-line es/no-number-parseint -- required for testing $({ target: 'Number', stat: true, forced: Number.parseInt != parseInt }, { parseInt: parseInt });
{ "content_hash": "0b073437f018b9e70750e94749aad150", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 74, "avg_line_length": 38.666666666666664, "alnum_prop": 0.6954022988505747, "repo_name": "cloudfoundry-community/asp.net5-buildpack", "id": "7c6773da62875e50630b78b5c405e91b9e0a7560", "size": "348", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "fixtures/node_apps/angular_dotnet/ClientApp/node_modules/core-js/modules/es.number.parse-int.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ruby", "bytes": "61792" } ], "symlink_target": "" }
package org.jetbrains.plugins.groovy.util; import com.intellij.codeInsight.TailType; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; /** * @author Sergey Evdokimov */ public class FieldInitializerTailTypes extends TailType { public static final TailType EQ_CLOSURE = new FieldInitializerTailTypes("{}", 1); public static final TailType EQ_ARRAY = new FieldInitializerTailTypes("[]", 1); public static final TailType EQ_STRING_ARRAY = new FieldInitializerTailTypes("['']", 2); public static final TailType EQ_STRING = new FieldInitializerTailTypes("\"\"", 1); private final String myText; private final int myPosition; public FieldInitializerTailTypes(String text, int position) { myText = text; myPosition = position; } @Override public int processTail(Editor editor, int tailOffset) { CommonCodeStyleSettings styleSettings = getLocalCodeStyleSettings(editor, tailOffset); Document document = editor.getDocument(); CharSequence chars = document.getCharsSequence(); int textLength = chars.length(); if (tailOffset < textLength - 1 && chars.charAt(tailOffset) == ' ' && chars.charAt(tailOffset + 1) == '='){ return moveCaret(editor, tailOffset, 2); } if (tailOffset < textLength && chars.charAt(tailOffset) == '='){ return moveCaret(editor, tailOffset, 1); } if (styleSettings.SPACE_AROUND_ASSIGNMENT_OPERATORS){ document.insertString(tailOffset, " ="); tailOffset = moveCaret(editor, tailOffset, 2); } else{ document.insertString(tailOffset, "="); tailOffset = moveCaret(editor, tailOffset, 1); } if (styleSettings.SPACE_AROUND_ASSIGNMENT_OPERATORS){ tailOffset = insertChar(editor, tailOffset, ' '); } document.insertString(tailOffset, myText); return moveCaret(editor, tailOffset, myPosition); } }
{ "content_hash": "1fb572fcc398deae5c6d136f7b3b2f00", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 111, "avg_line_length": 34.75, "alnum_prop": 0.7153134635149023, "repo_name": "jk1/intellij-community", "id": "4e0fb697f846411f173036b02a01d064972e57e9", "size": "1946", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "plugins/groovy/src/org/jetbrains/plugins/groovy/util/FieldInitializerTailTypes.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/deployment.proto package com.google.cloud.dialogflow.cx.v3; public interface ListDeploymentsRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.dialogflow.cx.v3.ListDeploymentsRequest) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent * ID&gt;/environments/&lt;Environment ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ java.lang.String getParent(); /** * * * <pre> * Required. The [Environment][google.cloud.dialogflow.cx.v3.Environment] to list all environments for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent * ID&gt;/environments/&lt;Environment ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ com.google.protobuf.ByteString getParentBytes(); /** * * * <pre> * The maximum number of items to return in a single page. By default 20 and * at most 100. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ int getPageSize(); /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ java.lang.String getPageToken(); /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ com.google.protobuf.ByteString getPageTokenBytes(); }
{ "content_hash": "414724c93739fa0ee900446876aaebbe", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 107, "avg_line_length": 25.44578313253012, "alnum_prop": 0.6273674242424242, "repo_name": "googleapis/java-dialogflow-cx", "id": "7a416f1e5f535838f9244b5dd2cb59315f96ab0a", "size": "2706", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/ListDeploymentsRequestOrBuilder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "801" }, { "name": "Java", "bytes": "31858414" }, { "name": "Python", "bytes": "1229" }, { "name": "Shell", "bytes": "20462" } ], "symlink_target": "" }
{% extends 'parkmap/base.html' %} {% block title %}{% endblock %} {% block meta %}{% endblock %} {% block bodyclass %} <body class="neighborhood"> {% endblock %} {% block content %} <div> {{ story.text }} </div> {% endblock %}
{ "content_hash": "208e26b3457596591400c56b918d4f37", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 97, "avg_line_length": 14.3, "alnum_prop": 0.47202797202797203, "repo_name": "MAPC/bostonparks", "id": "35bb46e98e9c9851a1b12f03084eb106474c2e92", "size": "286", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "parkmap/templates/parkmap/story.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "116604" }, { "name": "Python", "bytes": "61825" } ], "symlink_target": "" }
<?php defined('BASEPATH') OR exit('No direct script access allowed'); class Welcome extends CI_Controller{ public function index(){ $query = $this->db->query('SELECT * FROM images LIMIT 20'); $this->load->view('welcome_message', array( 'resultset' => $query->result_array() )); } }
{ "content_hash": "057e6caac2828a21fee764a486b41429", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 63, "avg_line_length": 26.727272727272727, "alnum_prop": 0.6666666666666666, "repo_name": "gftools/collage-board", "id": "6b8f87b30e06795baa14beee7d0ceddfdb9931f7", "size": "294", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/controllers/Welcome.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "240" }, { "name": "HTML", "bytes": "8456" }, { "name": "PHP", "bytes": "1758739" } ], "symlink_target": "" }
import {SECURITY_SCHEMA} from '@angular/compiler/src/schema/dom_security_schema'; import {LView} from '@angular/core/src/render3/interfaces/view'; import {enterView, leaveView} from '@angular/core/src/render3/state'; import {bypassSanitizationTrustHtml, bypassSanitizationTrustResourceUrl, bypassSanitizationTrustScript, bypassSanitizationTrustStyle, bypassSanitizationTrustUrl} from '../../src/sanitization/bypass'; import {getUrlSanitizer, ɵɵsanitizeHtml, ɵɵsanitizeResourceUrl, ɵɵsanitizeScript, ɵɵsanitizeStyle, ɵɵsanitizeUrl, ɵɵsanitizeUrlOrResourceUrl} from '../../src/sanitization/sanitization'; import {SecurityContext} from '../../src/sanitization/security'; function fakeLView(): LView { return [null, {}] as LView; } describe('sanitization', () => { beforeEach(() => enterView(fakeLView(), null)); afterEach(() => leaveView()); class Wrap { constructor(private value: string) {} toString() { return this.value; } } it('should sanitize html', () => { expect(ɵɵsanitizeHtml('<div></div>')).toEqual('<div></div>'); expect(ɵɵsanitizeHtml(new Wrap('<div></div>'))).toEqual('<div></div>'); expect(ɵɵsanitizeHtml('<img src="javascript:true">')) .toEqual('<img src="unsafe:javascript:true">'); expect(ɵɵsanitizeHtml(new Wrap('<img src="javascript:true">'))) .toEqual('<img src="unsafe:javascript:true">'); expect(() => ɵɵsanitizeHtml(bypassSanitizationTrustUrl('<img src="javascript:true">'))) .toThrowError(/Required a safe HTML, got a URL/); expect(ɵɵsanitizeHtml(bypassSanitizationTrustHtml('<img src="javascript:true">'))) .toEqual('<img src="javascript:true">'); }); it('should sanitize url', () => { expect(ɵɵsanitizeUrl('http://server')).toEqual('http://server'); expect(ɵɵsanitizeUrl(new Wrap('http://server'))).toEqual('http://server'); expect(ɵɵsanitizeUrl('javascript:true')).toEqual('unsafe:javascript:true'); expect(ɵɵsanitizeUrl(new Wrap('javascript:true'))).toEqual('unsafe:javascript:true'); expect(() => ɵɵsanitizeUrl(bypassSanitizationTrustHtml('javascript:true'))) .toThrowError(/Required a safe URL, got a HTML/); expect(ɵɵsanitizeUrl(bypassSanitizationTrustUrl('javascript:true'))).toEqual('javascript:true'); }); it('should sanitize resourceUrl', () => { const ERROR = 'unsafe value used in a resource URL context (see http://g.co/ng/security#xss)'; expect(() => ɵɵsanitizeResourceUrl('http://server')).toThrowError(ERROR); expect(() => ɵɵsanitizeResourceUrl('javascript:true')).toThrowError(ERROR); expect(() => ɵɵsanitizeResourceUrl(bypassSanitizationTrustHtml('javascript:true'))) .toThrowError(/Required a safe ResourceURL, got a HTML/); expect(ɵɵsanitizeResourceUrl(bypassSanitizationTrustResourceUrl('javascript:true'))) .toEqual('javascript:true'); }); it('should sanitize style', () => { expect(ɵɵsanitizeStyle('red')).toEqual('red'); expect(ɵɵsanitizeStyle(new Wrap('red'))).toEqual('red'); expect(ɵɵsanitizeStyle('url("http://server")')).toEqual('unsafe'); expect(ɵɵsanitizeStyle(new Wrap('url("http://server")'))).toEqual('unsafe'); expect(() => ɵɵsanitizeStyle(bypassSanitizationTrustHtml('url("http://server")'))) .toThrowError(/Required a safe Style, got a HTML/); expect(ɵɵsanitizeStyle(bypassSanitizationTrustStyle('url("http://server")'))) .toEqual('url("http://server")'); }); it('should sanitize script', () => { const ERROR = 'unsafe value used in a script context'; expect(() => ɵɵsanitizeScript('true')).toThrowError(ERROR); expect(() => ɵɵsanitizeScript('true')).toThrowError(ERROR); expect(() => ɵɵsanitizeScript(bypassSanitizationTrustHtml('true'))) .toThrowError(/Required a safe Script, got a HTML/); expect(ɵɵsanitizeScript(bypassSanitizationTrustScript('true'))).toEqual('true'); }); it('should select correct sanitizer for URL props', () => { // making sure security schema we have on compiler side is in sync with the `getUrlSanitizer` // runtime function definition const schema = SECURITY_SCHEMA(); const contextsByProp: Map<string, Set<number>> = new Map(); const sanitizerNameByContext: Map<number, Function> = new Map([ [SecurityContext.URL, ɵɵsanitizeUrl], [SecurityContext.RESOURCE_URL, ɵɵsanitizeResourceUrl] ]); Object.keys(schema).forEach(key => { const context = schema[key]; if (context === SecurityContext.URL || SecurityContext.RESOURCE_URL) { const [tag, prop] = key.split('|'); const contexts = contextsByProp.get(prop) || new Set<number>(); contexts.add(context); contextsByProp.set(prop, contexts); // check only in case a prop can be a part of both URL contexts if (contexts.size === 2) { expect(getUrlSanitizer(tag, prop)).toEqual(sanitizerNameByContext.get(context) !); } } }); }); it('should sanitize resourceUrls via sanitizeUrlOrResourceUrl', () => { const ERROR = 'unsafe value used in a resource URL context (see http://g.co/ng/security#xss)'; expect(() => ɵɵsanitizeUrlOrResourceUrl('http://server', 'iframe', 'src')).toThrowError(ERROR); expect(() => ɵɵsanitizeUrlOrResourceUrl('javascript:true', 'iframe', 'src')) .toThrowError(ERROR); expect( () => ɵɵsanitizeUrlOrResourceUrl( bypassSanitizationTrustHtml('javascript:true'), 'iframe', 'src')) .toThrowError(/Required a safe ResourceURL, got a HTML/); expect(ɵɵsanitizeUrlOrResourceUrl( bypassSanitizationTrustResourceUrl('javascript:true'), 'iframe', 'src')) .toEqual('javascript:true'); }); it('should sanitize urls via sanitizeUrlOrResourceUrl', () => { expect(ɵɵsanitizeUrlOrResourceUrl('http://server', 'a', 'href')).toEqual('http://server'); expect(ɵɵsanitizeUrlOrResourceUrl(new Wrap('http://server'), 'a', 'href')) .toEqual('http://server'); expect(ɵɵsanitizeUrlOrResourceUrl('javascript:true', 'a', 'href')) .toEqual('unsafe:javascript:true'); expect(ɵɵsanitizeUrlOrResourceUrl(new Wrap('javascript:true'), 'a', 'href')) .toEqual('unsafe:javascript:true'); expect( () => ɵɵsanitizeUrlOrResourceUrl(bypassSanitizationTrustHtml('javascript:true'), 'a', 'href')) .toThrowError(/Required a safe URL, got a HTML/); expect(ɵɵsanitizeUrlOrResourceUrl(bypassSanitizationTrustUrl('javascript:true'), 'a', 'href')) .toEqual('javascript:true'); }); });
{ "content_hash": "f7648c0ef9331d129b7e049daf596244", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 199, "avg_line_length": 50.984375, "alnum_prop": 0.6788231688630095, "repo_name": "snaptech/angular", "id": "2c7a8d2650c61de2eb74eb090da8518512212b4f", "size": "6816", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "packages/core/test/sanitization/sanitization_spec.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "345100" }, { "name": "Dockerfile", "bytes": "11884" }, { "name": "HTML", "bytes": "484280" }, { "name": "JSONiq", "bytes": "619" }, { "name": "JavaScript", "bytes": "2440732" }, { "name": "PHP", "bytes": "7222" }, { "name": "PowerShell", "bytes": "2904" }, { "name": "Python", "bytes": "404839" }, { "name": "Shell", "bytes": "92021" }, { "name": "TypeScript", "bytes": "21577713" } ], "symlink_target": "" }
#ifndef QUECTEL_BC95_CELLULAR_POWER_H_ #define QUECTEL_BC95_CELLULAR_POWER_H_ #include "AT_CellularPower.h" namespace mbed { class QUECTEL_BC95_CellularPower : public AT_CellularPower { public: QUECTEL_BC95_CellularPower(ATHandler &atHandler); virtual ~QUECTEL_BC95_CellularPower(); public: //from CellularPower virtual nsapi_error_t set_at_mode(); virtual nsapi_error_t reset(); }; } // namespace mbed #endif // QUECTEL_BC95_CELLULAR_POWER_H_
{ "content_hash": "bfd880d94921c07c20165e6b45f1d3ca", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 60, "avg_line_length": 20.391304347826086, "alnum_prop": 0.7313432835820896, "repo_name": "betzw/mbed-os", "id": "a10ab3e5d36532c439fd2e66f18a03d7d6848de7", "size": "1123", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "features/cellular/framework/targets/QUECTEL/BC95/QUECTEL_BC95_CellularPower.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AMPL", "bytes": "10950" }, { "name": "Assembly", "bytes": "7623507" }, { "name": "Batchfile", "bytes": "22" }, { "name": "C", "bytes": "374441509" }, { "name": "C++", "bytes": "14180257" }, { "name": "CMake", "bytes": "22983" }, { "name": "HTML", "bytes": "1421788" }, { "name": "Makefile", "bytes": "119198" }, { "name": "Objective-C", "bytes": "74923" }, { "name": "Perl", "bytes": "2589" }, { "name": "Python", "bytes": "1347030" }, { "name": "Shell", "bytes": "88415" }, { "name": "XSLT", "bytes": "8394" } ], "symlink_target": "" }
import java.util.function.Function; public class ValDelegateMethodReference { public void config() { final Column<Entity, java.lang.String> column = createColumn(Entity::getValue); } private <V> Column<Entity, V> createColumn(Function<Entity, V> func) { return new Column<>(func); } } class Column<T, V> { public Column(Function<T, V> vp) { } } class Entity { private MyDelegate innerDelegate; @java.lang.SuppressWarnings("all") public java.lang.String getValue() { return this.innerDelegate.getValue(); } @java.lang.SuppressWarnings("all") public java.lang.Boolean getABoolean() { return this.innerDelegate.getABoolean(); } @java.lang.SuppressWarnings("all") public void setValue(final java.lang.String value) { this.innerDelegate.setValue(value); } @java.lang.SuppressWarnings("all") public void setABoolean(final java.lang.Boolean aBoolean) { this.innerDelegate.setABoolean(aBoolean); } } class MyDelegate { private String value; private Boolean aBoolean; @java.lang.SuppressWarnings("all") public String getValue() { return this.value; } @java.lang.SuppressWarnings("all") public Boolean getABoolean() { return this.aBoolean; } @java.lang.SuppressWarnings("all") public void setValue(final String value) { this.value = value; } @java.lang.SuppressWarnings("all") public void setABoolean(final Boolean aBoolean) { this.aBoolean = aBoolean; } }
{ "content_hash": "a1c7ed2bd5d601018083fe0662be85fb", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 81, "avg_line_length": 21.723076923076924, "alnum_prop": 0.7322946175637394, "repo_name": "Lekanich/lombok", "id": "186e2aaa37396257ecc76416ea36434cc058a7de", "size": "1412", "binary": false, "copies": "1", "ref": "refs/heads/master2", "path": "test/transform/resource/after-delombok/ValDelegateMethodReference.java", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1905" }, { "name": "CSS", "bytes": "5532" }, { "name": "HTML", "bytes": "219607" }, { "name": "Java", "bytes": "2661787" }, { "name": "JavaScript", "bytes": "4148" } ], "symlink_target": "" }
<resources> <!-- Base application theme. --> <style name="AppTheme" parent="Base.Theme.AppCompat.Light.DarkActionBar"> <!-- Customize your theme here. --> </style> </resources>
{ "content_hash": "37f0261c5add9a2a4fb02edf5c0ad227", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 77, "avg_line_length": 23.22222222222222, "alnum_prop": 0.5933014354066986, "repo_name": "koustuvsinha/androidSensorsDemo", "id": "491af76e275955f3447a675e6bb0c0b0e4c3276b", "size": "209", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/values/styles.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "36086" } ], "symlink_target": "" }
package bsh; import java.io.*; import java.util.*; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; /** This class is an implementation of the ClassGenerator interface which contains generally bsh related code. The actual bytecode generation is done by ClassGeneratorUtil. @author Pat Niemeyer (pat@pat.net) */ public class ClassGeneratorImpl extends ClassGenerator { public Class generateClass( String name, Modifiers modifiers, Class [] interfaces, Class superClass, BSHBlock block, boolean isInterface, CallStack callstack, Interpreter interpreter ) throws EvalError { // Delegate to the static method return generateClassImpl( name, modifiers, interfaces, superClass, block, isInterface, callstack, interpreter ); } public Object invokeSuperclassMethod( BshClassManager bcm, Object instance, String methodName, Object [] args ) throws UtilEvalError, ReflectError, InvocationTargetException { // Delegate to the static method return invokeSuperclassMethodImpl( bcm, instance, methodName, args ); } /** Change the parent of the class instance namespace. This is currently used for inner class support. Note: This method will likely be removed in the future. */ // This could be static public void setInstanceNameSpaceParent( Object instance, String className, NameSpace parent ) { This ithis = ClassGeneratorUtil.getClassInstanceThis( instance, className ); ithis.getNameSpace().setParent( parent ); } /** If necessary, parse the BSHBlock for for the class definition and generate the class using ClassGeneratorUtil. This method also initializes the static block namespace and sets it in the class. */ public static Class generateClassImpl( String name, Modifiers modifiers, Class [] interfaces, Class superClass, BSHBlock block, boolean isInterface, CallStack callstack, Interpreter interpreter ) throws EvalError { // Scripting classes currently requires accessibility // This can be eliminated with a bit more work. try { Capabilities.setAccessibility( true ); } catch ( Capabilities.Unavailable e ) { throw new EvalError( "Defining classes currently requires reflective Accessibility.", block, callstack ); } NameSpace enclosingNameSpace = callstack.top(); String packageName = enclosingNameSpace.getPackage(); String className = enclosingNameSpace.isClass ? ( enclosingNameSpace.getName()+"$"+name ) : name; String fqClassName = packageName == null ? className : packageName + "." + className; String bshStaticFieldName = ClassGeneratorUtil.BSHSTATIC+className; BshClassManager bcm = interpreter.getClassManager(); // Race condition here... bcm.definingClass( fqClassName ); // Create the class static namespace NameSpace classStaticNameSpace = new NameSpace( enclosingNameSpace, className); classStaticNameSpace.isClass = true; callstack.push( classStaticNameSpace ); // Evaluate any inner class class definitions in the block // effectively recursively call this method for contained classes first block.evalBlock( callstack, interpreter, true/*override*/, ClassNodeFilter.CLASSCLASSES ); // Generate the type for our class Variable [] variables = getDeclaredVariables( block, callstack, interpreter, packageName ); DelayedEvalBshMethod [] methods = getDeclaredMethods( block, callstack, interpreter, packageName ); // Create the class generator, which encapsulates all knowledge of the // structure of the class ClassGeneratorUtil classGenerator = new ClassGeneratorUtil( modifiers, className, packageName, superClass, interfaces, variables, methods, isInterface ); // Check for existing class (saved class file) Class clas = bcm.getAssociatedClass( fqClassName ); // If the class isn't there then generate it. // Else just let it be initialized below. if ( clas == null ) { // generate bytecode, optionally with static init hooks to // bootstrap the interpreter byte [] code = classGenerator.generateClass( Interpreter.getSaveClasses()/*init code*/ ); if ( Interpreter.getSaveClasses() ) saveClasses( className, code ); else clas = bcm.defineClass( fqClassName, code ); } // If we're just saving clases then don't actually execute the static // code for the class here. if ( !Interpreter.getSaveClasses() ) { // Let the class generator install hooks relating to the structure of // the class into the class static namespace. e.g. the constructor // array. This is necessary whether we are generating code or just // reinitializing a previously generated class. classGenerator.initStaticNameSpace( classStaticNameSpace, block/*instance initializer*/ ); // import the unqualified class name into parent namespace enclosingNameSpace.importClass( fqClassName.replace('$','.') ); // Give the static space its class static import // important to do this after all classes are defined classStaticNameSpace.setClassStatic( clas ); // evaluate the static portion of the block in the static space block.evalBlock( callstack, interpreter, true/*override*/, ClassNodeFilter.CLASSSTATIC ); if ( !clas.isInterface() ) installStaticBlock( clas, bshStaticFieldName, classStaticNameSpace, interpreter ); } callstack.pop(); bcm.doneDefiningClass( fqClassName ); return clas; } private static void installStaticBlock( Class genClass, String bshStaticFieldName, NameSpace classStaticNameSpace, Interpreter interpreter ) { // Set the static bsh This callback try { LHS lhs = Reflect.getLHSStaticField( genClass, bshStaticFieldName ); lhs.assign( classStaticNameSpace.getThis( interpreter ), false/*strict*/ ); } catch ( Exception e ) { throw new InterpreterError("Error in class gen setup: "+e ); } } private static void saveClasses( String className, byte[] code ) { String dir = Interpreter.getSaveClassesDir(); if ( dir != null ) try { FileOutputStream out= new FileOutputStream( dir+"/"+className+".class" ); out.write(code); out.close(); } catch ( IOException e ) { e.printStackTrace(); } } static Variable [] getDeclaredVariables( BSHBlock body, CallStack callstack, Interpreter interpreter, String defaultPackage ) { List vars = new ArrayList(); for( int child=0; child<body.jjtGetNumChildren(); child++ ) { SimpleNode node = (SimpleNode)body.jjtGetChild(child); if ( node instanceof BSHTypedVariableDeclaration ) { BSHTypedVariableDeclaration tvd = (BSHTypedVariableDeclaration)node; Modifiers modifiers = tvd.modifiers; String type = tvd.getTypeDescriptor( callstack, interpreter, defaultPackage ); BSHVariableDeclarator [] vardec = tvd.getDeclarators(); for( int i = 0; i< vardec.length; i++) { String name = vardec[i].name; try { Variable var = new Variable( name, type, null/*value*/, modifiers ); vars.add( var ); } catch ( UtilEvalError e ) { // value error shouldn't happen } } } } return (Variable [])vars.toArray( new Variable[0] ); } static DelayedEvalBshMethod [] getDeclaredMethods( BSHBlock body, CallStack callstack, Interpreter interpreter, String defaultPackage ) { List methods = new ArrayList(); for( int child=0; child<body.jjtGetNumChildren(); child++ ) { SimpleNode node = (SimpleNode)body.jjtGetChild(child); if ( node instanceof BSHMethodDeclaration ) { BSHMethodDeclaration md = (BSHMethodDeclaration)node; md.insureNodesParsed(); Modifiers modifiers = md.modifiers; String name = md.name; String returnType = md.getReturnTypeDescriptor( callstack, interpreter, defaultPackage ); BSHReturnType returnTypeNode = md.getReturnTypeNode(); BSHFormalParameters paramTypesNode = md.paramsNode; String [] paramTypes = paramTypesNode.getTypeDescriptors( callstack, interpreter, defaultPackage ); DelayedEvalBshMethod bm = new DelayedEvalBshMethod( name, returnType, returnTypeNode, md.paramsNode.getParamNames(), paramTypes, paramTypesNode, md.blockNode, null/*declaringNameSpace*/, modifiers, callstack, interpreter ); methods.add( bm ); } } return (DelayedEvalBshMethod [])methods.toArray( new DelayedEvalBshMethod[0] ); } /** A node filter that filters nodes for either a class body static initializer or instance initializer. In the static case only static members are passed, etc. */ static class ClassNodeFilter implements BSHBlock.NodeFilter { public static final int STATIC=0, INSTANCE=1, CLASSES=2; public static ClassNodeFilter CLASSSTATIC = new ClassNodeFilter( STATIC ); public static ClassNodeFilter CLASSINSTANCE = new ClassNodeFilter( INSTANCE ); public static ClassNodeFilter CLASSCLASSES = new ClassNodeFilter( CLASSES ); int context; private ClassNodeFilter( int context ) { this.context = context; } public boolean isVisible( SimpleNode node ) { if ( context == CLASSES ) return node instanceof BSHClassDeclaration; // Only show class decs in CLASSES if ( node instanceof BSHClassDeclaration ) return false; if ( context == STATIC ) return isStatic( node ); if ( context == INSTANCE ) return !isStatic( node ); // ALL return true; } boolean isStatic( SimpleNode node ) { if ( node instanceof BSHTypedVariableDeclaration ) return ((BSHTypedVariableDeclaration)node).modifiers != null && ((BSHTypedVariableDeclaration)node).modifiers .hasModifier("static"); if ( node instanceof BSHMethodDeclaration ) return ((BSHMethodDeclaration)node).modifiers != null && ((BSHMethodDeclaration)node).modifiers .hasModifier("static"); // need to add static block here if ( node instanceof BSHBlock) return ((BSHBlock)node).isStatic; return false; } } public static Object invokeSuperclassMethodImpl( BshClassManager bcm, Object instance, String methodName, Object [] args ) throws UtilEvalError, ReflectError, InvocationTargetException { String superName = ClassGeneratorUtil.BSHSUPER+methodName; // look for the specially named super delegate method Class clas = instance.getClass(); Method superMethod = Reflect.resolveJavaMethod( bcm, clas, superName, Types.getTypes(args), false/*onlyStatic*/ ); if ( superMethod != null ) return Reflect.invokeMethod( superMethod, instance, args ); // No super method, try to invoke regular method // could be a superfluous "super." which is legal. Class superClass = clas.getSuperclass(); superMethod = Reflect.resolveExpectedJavaMethod( bcm, superClass, instance, methodName, args, false/*onlyStatic*/ ); return Reflect.invokeMethod( superMethod, instance, args ); } }
{ "content_hash": "71e3a8c8af45a74589c26df0416f6cc8", "timestamp": "", "source": "github", "line_count": 352, "max_line_length": 73, "avg_line_length": 31.025568181818183, "alnum_prop": 0.7230107133046424, "repo_name": "neoautus/lucidj", "id": "f0682944c93a417b11c7f3a24d19b7269d3de44b", "size": "12896", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/beanshell/src/bsh/ClassGeneratorImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "43472" }, { "name": "CSS", "bytes": "19181" }, { "name": "HTML", "bytes": "61234" }, { "name": "Java", "bytes": "3093869" }, { "name": "JavaScript", "bytes": "9182" }, { "name": "Shell", "bytes": "54440" } ], "symlink_target": "" }
import React, { Component } from 'react'; import { Field, reduxForm } from 'redux-form'; import { Link } from 'react-router-dom'; import { connect } from 'react-redux'; import { createPost } from '../actions'; class PostsNew extends Component { renderField(field) { const { meta: { touched, error} } = field; const className = `form-group ${touched && error ? 'has-danger' : ''}`; return ( <div className={className}> <label>{field.label}</label> <input className="form-control" type="text" {...field.input} /> <div className="text-help"> {touched ? error : ''} </div> </div> ); } onSubmit(values) { // go to root route this.props.createPost(values, () => { this.props.history.push('/'); }); } render() { const { handleSubmit } = this.props; return ( <form onSubmit={handleSubmit(this.onSubmit.bind(this))}> <Field label="Title for Post" name="title" component={this.renderField} /> <Field label="Categories" name="categories" component={this.renderField} /> <Field label="Post Content" name="content" component={this.renderField} /> <button type="submit" className="btn btn-primary">Submit</button> <Link to="/" className="btn btn-danger">Cancel</Link> </form> ); } } function validate(values) { const errors = {}; // validate the inputs from 'values' if (!values.title) { errors.title = "Enter a title!"; } if (!values.categories) { errors.categories = "Enter some categories"; } if (!values.content) { errors.content = "Enter some content please"; } // If error is empty, the form is fine to submit // If error has any properties, redux form assumes form is invalid return errors; } export default reduxForm({ validate, form: 'PostsNewForm' })( connect(null, { createPost })(PostsNew) );
{ "content_hash": "2e86e8035a111a484dc885f6d705fb0e", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 81, "avg_line_length": 28.376470588235293, "alnum_prop": 0.486318407960199, "repo_name": "Ian8829/ReactRouter-ReduxForm", "id": "53dbdca4b10479ddbf4c34f7f1d81ef27f2cab61", "size": "2412", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/components/posts_new.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "33" }, { "name": "HTML", "bytes": "474" }, { "name": "JavaScript", "bytes": "7918" } ], "symlink_target": "" }
#ifndef __MEDIA_RECORDEROBSERVERWORKER_H #define __MEDIA_RECORDEROBSERVERWORKER_H #include <thread> #include <mutex> #include <condition_variable> #include <queue> #include <atomic> #include <functional> #include <iostream> #include <fstream> #include <tinyalsa/tinyalsa.h> #include <media/MediaRecorder.h> #include "MediaWorker.h" #include "MediaRecorderImpl.h" using namespace std; namespace media { class RecorderObserverWorker : public MediaWorker { public: static RecorderObserverWorker& getWorker(); private: RecorderObserverWorker(); virtual ~RecorderObserverWorker(); int entry() override; }; } // namespace media #endif
{ "content_hash": "9baaac1de08ac991c7e4b060b65ddac5", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 49, "avg_line_length": 19.363636363636363, "alnum_prop": 0.7715179968701096, "repo_name": "btheosam/TizenRT", "id": "4b8a1e971ce44e6088987b0d8fc411e125f49407", "size": "1399", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "framework/src/media/RecorderObserverWorker.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "233969" }, { "name": "Batchfile", "bytes": "39014" }, { "name": "C", "bytes": "23535956" }, { "name": "C++", "bytes": "811127" }, { "name": "HTML", "bytes": "2990" }, { "name": "Makefile", "bytes": "547491" }, { "name": "Objective-C", "bytes": "22417" }, { "name": "Perl", "bytes": "4361" }, { "name": "Python", "bytes": "96889" }, { "name": "Shell", "bytes": "152911" }, { "name": "Tcl", "bytes": "163693" } ], "symlink_target": "" }
package com.instaclick.pentaho.plugin.amqp.initializer; import com.instaclick.pentaho.plugin.amqp.listener.ConfirmationAckListener; import com.instaclick.pentaho.plugin.amqp.listener.ConfirmationRejectListener; import com.instaclick.pentaho.plugin.amqp.AMQPPlugin; import com.instaclick.pentaho.plugin.amqp.AMQPPluginData; import com.instaclick.pentaho.plugin.amqp.listener.ConfirmationRowStepListener; import com.rabbitmq.client.Channel; import java.io.IOException; import java.util.ArrayList; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.trans.step.StepInterface; public class ActiveConvirmationInitializer implements Initializer { public static final ActiveConvirmationInitializer INSTANCE = new ActiveConvirmationInitializer(); protected ConfirmationAckListener ackDelivery(final Channel channel, final AMQPPlugin plugin, final AMQPPluginData data) { return new ConfirmationAckListener() { @Override public void ackDelivery(long deliveryTag) throws IOException { if ( ! data.isTransactional) { plugin.logDebug("Immidiate ack message " + deliveryTag); channel.basicAck(deliveryTag, false); data.ack++; return; } plugin.logDebug("Postponed ack message " + deliveryTag); data.ackMsgInTransaction.add(deliveryTag); } }; } protected ConfirmationRejectListener rejectDelivery(final Channel channel, final AMQPPlugin plugin, final AMQPPluginData data) { return new ConfirmationRejectListener() { @Override public void rejectDelivery(long deliveryTag) throws IOException { plugin.incrementLinesRejected(); if ( ! data.isTransactional) { plugin.logDebug("Immidiate reject message " + deliveryTag); channel.basicNack(deliveryTag, false, false); data.rejected++; return; } plugin.logDebug("Postponed reject message " + deliveryTag); data.rejectedMsgInTransaction.add(deliveryTag); } }; } @Override public void initialize(final Channel channel, final AMQPPlugin plugin, final AMQPPluginData data) throws IOException, KettleStepException { //bind to step with acknowledge rows on input stream if ( ! Const.isEmpty(data.ackStepName) ) { final StepInterface si = plugin.getTrans().getStepInterface( data.ackStepName, 0); if (si == null) { throw new KettleStepException("Can not find step : " + data.ackStepName ); } if (plugin.getTrans().getStepInterface( data.ackStepName, 1 ) != null) { throw new KettleStepException("Only SINGLE INSTANCE Steps supported : " + data.ackStepName ); } si.addRowListener(new ConfirmationRowStepListener(data.ackStepDeliveryTagField, ackDelivery(channel, plugin, data))); if (data.isTransactional) { data.ackMsgInTransaction = new ArrayList<Long>(); } } //bind to step with rejected rows on input stream if ( ! Const.isEmpty(data.rejectStepName) ) { final StepInterface si = plugin.getTrans().getStepInterface( data.rejectStepName, 0 ); if (si == null) { throw new KettleStepException("Can not find step : " + data.rejectStepName ); } if (plugin.getTrans().getStepInterface( data.rejectStepName, 1 ) != null) { throw new KettleStepException("Only SINGLE INSTANCE Steps supported : " + data.rejectStepName ); } si.addRowListener(new ConfirmationRowStepListener(data.rejectStepDeliveryTagField, rejectDelivery(channel, plugin, data))); if (data.isTransactional) { data.rejectedMsgInTransaction = new ArrayList<Long>(); } } } }
{ "content_hash": "3e7e38b69313dccbdc96c1c94ad67322", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 141, "avg_line_length": 39.95192307692308, "alnum_prop": 0.6397111913357401, "repo_name": "instaclick/PDI-Plugin-Step-AMQP", "id": "eed6f511d3b2a292de725a0e6b1c9ebfb902f81c", "size": "4155", "binary": false, "copies": "1", "ref": "refs/heads/2.0.x", "path": "src/main/java/com/instaclick/pentaho/plugin/amqp/initializer/ActiveConvirmationInitializer.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "181078" } ], "symlink_target": "" }
import { NgModule } from '@angular/core'; import { RouterModule, Routes } from '@angular/router'; import { TowersComponent } from './towers-list/towers-list.component'; import { TowerDetailComponent } from './tower-detail/tower-detail.component'; import { HeroesComponent } from './heroes-list/heroes-list.component'; import { HeroDetailComponent } from './hero-detail/hero-detail.component'; const routes: Routes = [ { path: '', redirectTo: '/towers', pathMatch: 'full' }, { path: 'towers', component: TowersComponent }, { path: 'towers/:id/heroes', component: HeroesComponent }, { path: 'towers/:id', component: TowerDetailComponent }, { path: 'towers/:id/heroes/:idhero', component: HeroDetailComponent }, { path: 'heroes', component: HeroesComponent } ]; @NgModule({ imports: [ RouterModule.forRoot(routes) ], exports: [ RouterModule ] }) export class AppRoutingModule {}
{ "content_hash": "5003cfbe5b8c449d08823479730dae1b", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 79, "avg_line_length": 40.130434782608695, "alnum_prop": 0.6890574214517876, "repo_name": "ssadiks/form-builder", "id": "5393970d89269cf28eecde82253e076d5bc35145", "size": "923", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/components/heroes/app-routing.module.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "36434" }, { "name": "HTML", "bytes": "15938" }, { "name": "JavaScript", "bytes": "33429" }, { "name": "TypeScript", "bytes": "19010" } ], "symlink_target": "" }
window.mafiaSystem.gangs =[ { type:"gang", title:"Corleone", isInSystem:true, leader:{ name:"Vito Corleone", sex:"male", isInSystem:true }, children:[ { type:"white-business", title:"Tomorrow bar", isInSystem: true, leader:{ name:"Adams Corleone", sex:"male", isInSystem:true } }, { type:"white-business", title:"Galaxy Fortune", isInSystem:true, leader:{ name:"David Corleone", sex:"male", isInSystem:true }, children:[ { type:"white-business", title:"Gamble Team", isInSystem:true } ] }, { type:"black-business", title:"Sisters in Shadow", isInSystem:true, leader:{ name: "Diana Venom", sex:"female", isInSystem:true }, children:[ { name: "Lili Venom", sex:"female", isInSystem:true }, { name: "Roxanne Venom", sex:"female", isInSystem:true } ] } ] } ]; window.mafiaSystem.ratingLevelOptions = ["Bad", "Moderate", "Good"]; window.mafiaSystem.freeEntities.push({ type:"white-business", title:"Galaxy Restaurant", leader:null, children:[] }); updateGangsInfo();
{ "content_hash": "e748cf3e95e7709ccf75c77680df1f68", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 68, "avg_line_length": 25.973684210526315, "alnum_prop": 0.34295845997973656, "repo_name": "alexzhaosheng/knot.js", "id": "1740cd525522bf993878af06d0000fead8550517", "size": "1974", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "example/js/mafia.data.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "18486" }, { "name": "HTML", "bytes": "74851" }, { "name": "JavaScript", "bytes": "333811" } ], "symlink_target": "" }
package org.jboss.pnc.termdbuilddriver.websockets; /** * @author <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> */ public interface ClientMessageHandler { public void onMessage(byte[] bytes); public void onMessage(String message); }
{ "content_hash": "09004225758cbc77fbca361307b7a85f", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 63, "avg_line_length": 22.818181818181817, "alnum_prop": 0.7370517928286853, "repo_name": "pgier/pnc", "id": "2096b0b20a0a85b17b2e8d578312049469f1a47a", "size": "954", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "termd-build-driver/src/main/java/org/jboss/pnc/termdbuilddriver/websockets/ClientMessageHandler.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "102097" }, { "name": "Groovy", "bytes": "884" }, { "name": "HTML", "bytes": "204866" }, { "name": "Java", "bytes": "1948630" }, { "name": "JavaScript", "bytes": "2724526" }, { "name": "Shell", "bytes": "7028" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>posix::basic_stream_descriptor::io_control (1 of 2 overloads)</title> <link rel="stylesheet" href="../../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.76.1"> <link rel="home" href="../../../../boost_asio.html" title="Boost.Asio"> <link rel="up" href="../io_control.html" title="posix::basic_stream_descriptor::io_control"> <link rel="prev" href="../io_control.html" title="posix::basic_stream_descriptor::io_control"> <link rel="next" href="overload2.html" title="posix::basic_stream_descriptor::io_control (2 of 2 overloads)"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../../boost.png"></td> <td align="center"><a href="../../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="../io_control.html"><img src="../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../io_control.html"><img src="../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../../boost_asio.html"><img src="../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="overload2.html"><img src="../../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1"></a><a class="link" href="overload1.html" title="posix::basic_stream_descriptor::io_control (1 of 2 overloads)">posix::basic_stream_descriptor::io_control (1 of 2 overloads)</a> </h5></div></div></div> <p> <span class="emphasis"><em>Inherited from posix::basic_descriptor.</em></span> </p> <p> Perform an IO control command on the descriptor. </p> <pre class="programlisting"><span class="keyword">template</span><span class="special">&lt;</span> <span class="keyword">typename</span> <a class="link" href="../../IoControlCommand.html" title="I/O control command requirements">IoControlCommand</a><span class="special">&gt;</span> <span class="keyword">void</span> <span class="identifier">io_control</span><span class="special">(</span> <span class="identifier">IoControlCommand</span> <span class="special">&amp;</span> <span class="identifier">command</span><span class="special">);</span> </pre> <p> This function is used to execute an IO control command on the descriptor. </p> <h6> <a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.h0"></a> <span><a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.parameters"></a></span><a class="link" href="overload1.html#boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.parameters">Parameters</a> </h6> <div class="variablelist"> <p class="title"><b></b></p> <dl> <dt><span class="term">command</span></dt> <dd><p> The IO control command to be performed on the descriptor. </p></dd> </dl> </div> <h6> <a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.h1"></a> <span><a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.exceptions"></a></span><a class="link" href="overload1.html#boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.exceptions">Exceptions</a> </h6> <div class="variablelist"> <p class="title"><b></b></p> <dl> <dt><span class="term">boost::system::system_error</span></dt> <dd><p> Thrown on failure. </p></dd> </dl> </div> <h6> <a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.h2"></a> <span><a name="boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.example"></a></span><a class="link" href="overload1.html#boost_asio.reference.posix__basic_stream_descriptor.io_control.overload1.example">Example</a> </h6> <p> Getting the number of bytes ready to read: </p> <pre class="programlisting"><span class="identifier">boost</span><span class="special">::</span><span class="identifier">asio</span><span class="special">::</span><span class="identifier">posix</span><span class="special">::</span><span class="identifier">stream_descriptor</span> <span class="identifier">descriptor</span><span class="special">(</span><span class="identifier">io_service</span><span class="special">);</span> <span class="special">...</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">asio</span><span class="special">::</span><span class="identifier">posix</span><span class="special">::</span><span class="identifier">stream_descriptor</span><span class="special">::</span><span class="identifier">bytes_readable</span> <span class="identifier">command</span><span class="special">;</span> <span class="identifier">descriptor</span><span class="special">.</span><span class="identifier">io_control</span><span class="special">(</span><span class="identifier">command</span><span class="special">);</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">size_t</span> <span class="identifier">bytes_readable</span> <span class="special">=</span> <span class="identifier">command</span><span class="special">.</span><span class="identifier">get</span><span class="special">();</span> </pre> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2015 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="../io_control.html"><img src="../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../io_control.html"><img src="../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../../boost_asio.html"><img src="../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="overload2.html"><img src="../../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "c9ab8930a1768e0199a1f09149546d8d", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 445, "avg_line_length": 72.01020408163265, "alnum_prop": 0.6516933541164801, "repo_name": "yinchunlong/abelkhan-1", "id": "0e7e6df00df75a4083e4b2c278c82e062cb9517c", "size": "7057", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "ext/c++/thirdpart/c++/boost/libs/asio/doc/html/boost_asio/reference/posix__basic_stream_descriptor/io_control/overload1.html", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "118649" }, { "name": "Assembly", "bytes": "223360" }, { "name": "Batchfile", "bytes": "32410" }, { "name": "C", "bytes": "2956993" }, { "name": "C#", "bytes": "219949" }, { "name": "C++", "bytes": "184617089" }, { "name": "CMake", "bytes": "125437" }, { "name": "CSS", "bytes": "427629" }, { "name": "Cuda", "bytes": "52444" }, { "name": "DIGITAL Command Language", "bytes": "6246" }, { "name": "FORTRAN", "bytes": "1856" }, { "name": "Groff", "bytes": "5189" }, { "name": "HTML", "bytes": "234939732" }, { "name": "IDL", "bytes": "14" }, { "name": "JavaScript", "bytes": "682223" }, { "name": "Lex", "bytes": "1231" }, { "name": "M4", "bytes": "29689" }, { "name": "Makefile", "bytes": "1083341" }, { "name": "Max", "bytes": "36857" }, { "name": "Objective-C", "bytes": "11406" }, { "name": "Objective-C++", "bytes": "630" }, { "name": "PHP", "bytes": "59030" }, { "name": "Perl", "bytes": "38649" }, { "name": "Perl6", "bytes": "2053" }, { "name": "Python", "bytes": "1780184" }, { "name": "QML", "bytes": "593" }, { "name": "QMake", "bytes": "16692" }, { "name": "Rebol", "bytes": "354" }, { "name": "Ruby", "bytes": "5532" }, { "name": "Shell", "bytes": "354720" }, { "name": "Tcl", "bytes": "1172" }, { "name": "TeX", "bytes": "32117" }, { "name": "XSLT", "bytes": "552736" }, { "name": "Yacc", "bytes": "19623" } ], "symlink_target": "" }
""" Test functions for models.GLM """ import os import warnings import numpy as np from numpy.testing import ( assert_, assert_allclose, assert_almost_equal, assert_array_less, assert_equal, assert_raises, ) import pandas as pd from pandas.testing import assert_series_equal import pytest from scipy import stats import statsmodels.api as sm from statsmodels.datasets import cpunish, longley from statsmodels.discrete import discrete_model as discrete from statsmodels.genmod.generalized_linear_model import GLM, SET_USE_BIC_LLF from statsmodels.tools.numdiff import ( approx_fprime, approx_fprime_cs, approx_hess, approx_hess_cs, ) from statsmodels.tools.sm_exceptions import ( DomainWarning, PerfectSeparationError, ValueWarning, ) from statsmodels.tools.tools import add_constant # Test Precisions DECIMAL_4 = 4 DECIMAL_3 = 3 DECIMAL_2 = 2 DECIMAL_1 = 1 DECIMAL_0 = 0 pdf_output = False if pdf_output: from matplotlib.backends.backend_pdf import PdfPages pdf = PdfPages("test_glm.pdf") else: pdf = None def close_or_save(pdf, fig): if pdf_output: pdf.savefig(fig) def teardown_module(): if pdf_output: pdf.close() @pytest.fixture(scope="module") def iris(): cur_dir = os.path.dirname(os.path.abspath(__file__)) return np.genfromtxt(os.path.join(cur_dir, 'results', 'iris.csv'), delimiter=",", skip_header=1) class CheckModelResultsMixin: ''' res2 should be either the results from RModelWrap or the results as defined in model_results_data ''' decimal_params = DECIMAL_4 def test_params(self): assert_almost_equal(self.res1.params, self.res2.params, self.decimal_params) decimal_bse = DECIMAL_4 def test_standard_errors(self): assert_allclose(self.res1.bse, self.res2.bse, atol=10**(-self.decimal_bse), rtol=1e-5) decimal_resids = DECIMAL_4 def test_residuals(self): # fix incorrect numbers in resid_working results # residuals for Poisson are also tested in test_glm_weights.py import copy # new numpy would have copy method resid2 = copy.copy(self.res2.resids) resid2[:, 2] *= self.res1.family.link.deriv(self.res1.mu)**2 atol = 10**(-self.decimal_resids) resid_a = self.res1.resid_anscombe_unscaled resids = np.column_stack((self.res1.resid_pearson, self.res1.resid_deviance, self.res1.resid_working, resid_a, self.res1.resid_response)) assert_allclose(resids, resid2, rtol=1e-6, atol=atol) decimal_aic_R = DECIMAL_4 def test_aic_R(self): # R includes the estimation of the scale as a lost dof # Does not with Gamma though if self.res1.scale != 1: dof = 2 else: dof = 0 if isinstance(self.res1.model.family, (sm.families.NegativeBinomial)): llf = self.res1.model.family.loglike(self.res1.model.endog, self.res1.mu, self.res1.model.var_weights, self.res1.model.freq_weights, scale=1) aic = (-2*llf+2*(self.res1.df_model+1)) else: aic = self.res1.aic assert_almost_equal(aic+dof, self.res2.aic_R, self.decimal_aic_R) decimal_aic_Stata = DECIMAL_4 def test_aic_Stata(self): # Stata uses the below llf for aic definition for these families if isinstance(self.res1.model.family, (sm.families.Gamma, sm.families.InverseGaussian, sm.families.NegativeBinomial)): llf = self.res1.model.family.loglike(self.res1.model.endog, self.res1.mu, self.res1.model.var_weights, self.res1.model.freq_weights, scale=1) aic = (-2*llf+2*(self.res1.df_model+1))/self.res1.nobs else: aic = self.res1.aic/self.res1.nobs assert_almost_equal(aic, self.res2.aic_Stata, self.decimal_aic_Stata) decimal_deviance = DECIMAL_4 def test_deviance(self): assert_almost_equal(self.res1.deviance, self.res2.deviance, self.decimal_deviance) decimal_scale = DECIMAL_4 def test_scale(self): assert_almost_equal(self.res1.scale, self.res2.scale, self.decimal_scale) decimal_loglike = DECIMAL_4 def test_loglike(self): # Stata uses the below llf for these families # We differ with R for them if isinstance(self.res1.model.family, (sm.families.Gamma, sm.families.InverseGaussian, sm.families.NegativeBinomial)): llf = self.res1.model.family.loglike(self.res1.model.endog, self.res1.mu, self.res1.model.var_weights, self.res1.model.freq_weights, scale=1) else: llf = self.res1.llf assert_almost_equal(llf, self.res2.llf, self.decimal_loglike) decimal_null_deviance = DECIMAL_4 def test_null_deviance(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DomainWarning) assert_almost_equal(self.res1.null_deviance, self.res2.null_deviance, self.decimal_null_deviance) decimal_bic = DECIMAL_4 def test_bic(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") assert_almost_equal(self.res1.bic, self.res2.bic_Stata, self.decimal_bic) def test_degrees(self): assert_equal(self.res1.model.df_resid,self.res2.df_resid) decimal_fittedvalues = DECIMAL_4 def test_fittedvalues(self): assert_almost_equal(self.res1.fittedvalues, self.res2.fittedvalues, self.decimal_fittedvalues) def test_tpvalues(self): # test comparing tvalues and pvalues with normal implementation # make sure they use normal distribution (inherited in results class) params = self.res1.params tvalues = params / self.res1.bse pvalues = stats.norm.sf(np.abs(tvalues)) * 2 half_width = stats.norm.isf(0.025) * self.res1.bse conf_int = np.column_stack((params - half_width, params + half_width)) if isinstance(tvalues, pd.Series): assert_series_equal(self.res1.tvalues, tvalues) else: assert_almost_equal(self.res1.tvalues, tvalues) assert_almost_equal(self.res1.pvalues, pvalues) assert_almost_equal(self.res1.conf_int(), conf_int) def test_pearson_chi2(self): if hasattr(self.res2, 'pearson_chi2'): assert_allclose(self.res1.pearson_chi2, self.res2.pearson_chi2, atol=1e-6, rtol=1e-6) def test_prsquared(self): if hasattr(self.res2, 'prsquared'): assert_allclose(self.res1.pseudo_rsquared(kind="mcf"), self.res2.prsquared, rtol=0.05) if hasattr(self.res2, 'prsquared_cox_snell'): assert_allclose(float(self.res1.pseudo_rsquared(kind="cs")), self.res2.prsquared_cox_snell, rtol=0.05) @pytest.mark.smoke def test_summary(self): self.res1.summary() @pytest.mark.smoke def test_summary2(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DomainWarning) self.res1.summary2() def test_get_distribution(self): res1 = self.res1 if not hasattr(res1.model.family, "get_distribution"): # only Tweedie has not get_distribution pytest.skip("get_distribution not available") if isinstance(res1.model.family, sm.families.NegativeBinomial): res_scale = 1 # QMLE scale can differ from 1 else: res_scale = res1.scale distr = res1.model.family.get_distribution(res1.fittedvalues, res_scale) var_endog = res1.model.family.variance(res1.fittedvalues) * res_scale m, v = distr.stats() assert_allclose(res1.fittedvalues, m, rtol=1e-13) assert_allclose(var_endog, v, rtol=1e-13) # check model method distr2 = res1.model.get_distribution(res1.params, res_scale) for k in distr2.kwds: assert_allclose(distr.kwds[k], distr2.kwds[k], rtol=1e-13) class CheckComparisonMixin: def test_compare_discrete(self): res1 = self.res1 resd = self.resd assert_allclose(res1.llf, resd.llf, rtol=1e-10) score_obs1 = res1.model.score_obs(res1.params * 0.98) score_obsd = resd.model.score_obs(resd.params * 0.98) assert_allclose(score_obs1, score_obsd, rtol=1e-10) # score score1 = res1.model.score(res1.params * 0.98) assert_allclose(score1, score_obs1.sum(0), atol=1e-20) score0 = res1.model.score(res1.params) assert_allclose(score0, np.zeros(score_obs1.shape[1]), atol=5e-7) hessian1 = res1.model.hessian(res1.params * 0.98, observed=False) hessiand = resd.model.hessian(resd.params * 0.98) assert_allclose(hessian1, hessiand, rtol=1e-10) hessian1 = res1.model.hessian(res1.params * 0.98, observed=True) hessiand = resd.model.hessian(resd.params * 0.98) assert_allclose(hessian1, hessiand, rtol=1e-9) def test_score_test(self): res1 = self.res1 # fake example, should be zero, k_constraint should be 0 st, pv, df = res1.model.score_test(res1.params, k_constraints=1) assert_allclose(st, 0, atol=1e-20) assert_allclose(pv, 1, atol=1e-10) assert_equal(df, 1) st, pv, df = res1.model.score_test(res1.params, k_constraints=0) assert_allclose(st, 0, atol=1e-20) assert_(np.isnan(pv), msg=repr(pv)) assert_equal(df, 0) # TODO: no verified numbers largely SMOKE test exog_extra = res1.model.exog[:,1]**2 st, pv, df = res1.model.score_test(res1.params, exog_extra=exog_extra) assert_array_less(0.1, st) assert_array_less(0.1, pv) assert_equal(df, 1) def test_get_prediction(self): pred1 = self.res1.get_prediction() # GLM predd = self.resd.get_prediction() # discrete class assert_allclose(predd.predicted, pred1.predicted_mean, rtol=1e-11) assert_allclose(predd.se, pred1.se_mean, rtol=1e-6) assert_allclose(predd.summary_frame().values, pred1.summary_frame().values, rtol=1e-6) class TestGlmGaussian(CheckModelResultsMixin): @classmethod def setup_class(cls): ''' Test Gaussian family with canonical identity link ''' # Test Precisions cls.decimal_resids = DECIMAL_3 cls.decimal_params = DECIMAL_2 cls.decimal_bic = DECIMAL_0 cls.decimal_bse = DECIMAL_3 from statsmodels.datasets.longley import load cls.data = load() cls.data.endog = np.asarray(cls.data.endog) cls.data.exog = np.asarray(cls.data.exog) cls.data.exog = add_constant(cls.data.exog, prepend=False) cls.res1 = GLM(cls.data.endog, cls.data.exog, family=sm.families.Gaussian()).fit() from .results.results_glm import Longley cls.res2 = Longley() def test_compare_OLS(self): res1 = self.res1 # OLS does not define score_obs from statsmodels.regression.linear_model import OLS resd = OLS(self.data.endog, self.data.exog).fit() self.resd = resd # attach to access from the outside assert_allclose(res1.llf, resd.llf, rtol=1e-10) score_obs1 = res1.model.score_obs(res1.params, scale=None) score_obsd = resd.resid[:, None] / resd.scale * resd.model.exog # low precision because of badly scaled exog assert_allclose(score_obs1, score_obsd, rtol=1e-8) score_obs1 = res1.model.score_obs(res1.params, scale=1) score_obsd = resd.resid[:, None] * resd.model.exog assert_allclose(score_obs1, score_obsd, rtol=1e-8) hess_obs1 = res1.model.hessian(res1.params, scale=None) hess_obsd = -1. / resd.scale * resd.model.exog.T.dot(resd.model.exog) # low precision because of badly scaled exog assert_allclose(hess_obs1, hess_obsd, rtol=1e-8) # FIXME: enable or delete # def setup_method(self): # if skipR: # raise SkipTest, "Rpy not installed." # Gauss = r.gaussian # self.res2 = RModel(self.data.endog, self.data.exog, r.glm, family=Gauss) # self.res2.resids = np.array(self.res2.resid)[:,None]*np.ones((1,5)) # self.res2.null_deviance = 185008826 # taken from R. Rpy bug? class TestGlmGaussianGradient(TestGlmGaussian): @classmethod def setup_class(cls): ''' Test Gaussian family with canonical identity link ''' # Test Precisions cls.decimal_resids = DECIMAL_3 cls.decimal_params = DECIMAL_2 cls.decimal_bic = DECIMAL_0 cls.decimal_bse = DECIMAL_2 from statsmodels.datasets.longley import load cls.data = load() cls.data.endog = np.asarray(cls.data.endog) cls.data.exog = np.asarray(cls.data.exog) cls.data.exog = add_constant(cls.data.exog, prepend=False) cls.res1 = GLM(cls.data.endog, cls.data.exog, family=sm.families.Gaussian()).fit(method='newton') from .results.results_glm import Longley cls.res2 = Longley() class TestGaussianLog(CheckModelResultsMixin): @classmethod def setup_class(cls): # Test Precision cls.decimal_aic_R = DECIMAL_0 cls.decimal_aic_Stata = DECIMAL_2 cls.decimal_loglike = DECIMAL_0 cls.decimal_null_deviance = DECIMAL_1 nobs = 100 x = np.arange(nobs) np.random.seed(54321) # y = 1.0 - .02*x - .001*x**2 + 0.001 * np.random.randn(nobs) cls.X = np.c_[np.ones((nobs,1)),x,x**2] cls.lny = np.exp(-(-1.0 + 0.02*x + 0.0001*x**2)) +\ 0.001 * np.random.randn(nobs) GaussLog_Model = GLM(cls.lny, cls.X, family=sm.families.Gaussian(sm.families.links.log())) cls.res1 = GaussLog_Model.fit() from .results.results_glm import GaussianLog cls.res2 = GaussianLog() # FIXME: enable or delete # def setup(cls): # if skipR: # raise SkipTest, "Rpy not installed" # GaussLogLink = r.gaussian(link = "log") # GaussLog_Res_R = RModel(cls.lny, cls.X, r.glm, family=GaussLogLink) # cls.res2 = GaussLog_Res_R class TestGaussianInverse(CheckModelResultsMixin): @classmethod def setup_class(cls): # Test Precisions cls.decimal_bic = DECIMAL_1 cls.decimal_aic_R = DECIMAL_1 cls.decimal_aic_Stata = DECIMAL_3 cls.decimal_loglike = DECIMAL_1 cls.decimal_resids = DECIMAL_3 nobs = 100 x = np.arange(nobs) np.random.seed(54321) y = 1.0 + 2.0 * x + x**2 + 0.1 * np.random.randn(nobs) cls.X = np.c_[np.ones((nobs,1)),x,x**2] cls.y_inv = (1. + .02*x + .001*x**2)**-1 + .001 * np.random.randn(nobs) InverseLink_Model = GLM(cls.y_inv, cls.X, family=sm.families.Gaussian(sm.families.links.inverse_power())) InverseLink_Res = InverseLink_Model.fit() cls.res1 = InverseLink_Res from .results.results_glm import GaussianInverse cls.res2 = GaussianInverse() # FIXME: enable or delete # def setup(cls): # if skipR: # raise SkipTest, "Rpy not installed." # InverseLink = r.gaussian(link = "inverse") # InverseLink_Res_R = RModel(cls.y_inv, cls.X, r.glm, family=InverseLink) # cls.res2 = InverseLink_Res_R class TestGlmBinomial(CheckModelResultsMixin): @classmethod def setup_class(cls): ''' Test Binomial family with canonical logit link using star98 dataset. ''' cls.decimal_resids = DECIMAL_1 cls.decimal_bic = DECIMAL_2 from statsmodels.datasets.star98 import load from .results.results_glm import Star98 data = load() data.endog = np.asarray(data.endog) data.exog = np.asarray(data.exog) data.exog = add_constant(data.exog, prepend=False) cls.res1 = GLM(data.endog, data.exog, family=sm.families.Binomial()).fit() # NOTE: if you want to replicate with RModel # res2 = RModel(data.endog[:,0]/trials, data.exog, r.glm, # family=r.binomial, weights=trials) cls.res2 = Star98() def test_endog_dtype(self): from statsmodels.datasets.star98 import load data = load() data.exog = add_constant(data.exog, prepend=False) endog = data.endog.astype(int) res2 = GLM(endog, data.exog, family=sm.families.Binomial()).fit() assert_allclose(res2.params, self.res1.params) endog = data.endog.astype(np.double) res3 = GLM(endog, data.exog, family=sm.families.Binomial()).fit() assert_allclose(res3.params, self.res1.params) def test_invalid_endog(self, reset_randomstate): # GH2733 inspired check endog = np.random.randint(0, 100, size=(1000, 3)) exog = np.random.standard_normal((1000, 2)) with pytest.raises(ValueError, match='endog has more than 2 columns'): GLM(endog, exog, family=sm.families.Binomial()) def test_invalid_endog_formula(self, reset_randomstate): # GH2733 n = 200 exog = np.random.normal(size=(n, 2)) endog = np.random.randint(0, 3, size=n).astype(str) # formula interface data = pd.DataFrame({"y": endog, "x1": exog[:, 0], "x2": exog[:, 1]}) with pytest.raises(ValueError, match='array with multiple columns'): sm.GLM.from_formula("y ~ x1 + x2", data, family=sm.families.Binomial()) def test_get_distribution_binom_count(self): # test for binomial counts with n_trials > 1 res1 = self.res1 res_scale = 1 # QMLE scale can differ from 1 mu_prob = res1.fittedvalues n = res1.model.n_trials distr = res1.model.family.get_distribution(mu_prob, res_scale, n_trials=n) var_endog = res1.model.family.variance(mu_prob) * res_scale m, v = distr.stats() assert_allclose(mu_prob * n, m, rtol=1e-13) assert_allclose(var_endog * n, v, rtol=1e-13) # check model method distr2 = res1.model.get_distribution(res1.params, res_scale, n_trials=n) for k in distr2.kwds: assert_allclose(distr.kwds[k], distr2.kwds[k], rtol=1e-13) # FIXME: enable/xfail/skip or delete # TODO: # Non-Canonical Links for the Binomial family require the algorithm to be # slightly changed # class TestGlmBinomialLog(CheckModelResultsMixin): # pass # class TestGlmBinomialLogit(CheckModelResultsMixin): # pass # class TestGlmBinomialProbit(CheckModelResultsMixin): # pass # class TestGlmBinomialCloglog(CheckModelResultsMixin): # pass # class TestGlmBinomialPower(CheckModelResultsMixin): # pass # class TestGlmBinomialLoglog(CheckModelResultsMixin): # pass # class TestGlmBinomialLogc(CheckModelResultsMixin): # TODO: need include logc link # pass class TestGlmBernoulli(CheckModelResultsMixin, CheckComparisonMixin): @classmethod def setup_class(cls): from .results.results_glm import Lbw cls.res2 = Lbw() cls.res1 = GLM(cls.res2.endog, cls.res2.exog, family=sm.families.Binomial()).fit() modd = discrete.Logit(cls.res2.endog, cls.res2.exog) cls.resd = modd.fit(start_params=cls.res1.params * 0.9, disp=False) def test_score_r(self): res1 = self.res1 res2 = self.res2 st, pv, df = res1.model.score_test(res1.params, exog_extra=res1.model.exog[:, 1]**2) st_res = 0.2837680293459376 # (-0.5326988167303712)**2 assert_allclose(st, st_res, rtol=1e-4) st, pv, df = res1.model.score_test(res1.params, exog_extra=res1.model.exog[:, 0]**2) st_res = 0.6713492821514992 # (-0.8193590679009413)**2 assert_allclose(st, st_res, rtol=1e-4) select = list(range(9)) select.pop(7) res1b = GLM(res2.endog, res2.exog.iloc[:, select], family=sm.families.Binomial()).fit() tres = res1b.model.score_test(res1b.params, exog_extra=res1.model.exog[:, -2]) tres = np.asarray(tres[:2]).ravel() tres_r = (2.7864148487452, 0.0950667) assert_allclose(tres, tres_r, rtol=1e-4) cmd_r = """\ data = read.csv("...statsmodels\\statsmodels\\genmod\\tests\\results\\stata_lbw_glm.csv") data["race_black"] = data["race"] == "black" data["race_other"] = data["race"] == "other" mod = glm(low ~ age + lwt + race_black + race_other + smoke + ptl + ht + ui, family=binomial, data=data) options(digits=16) anova(mod, test="Rao") library(statmod) s = glm.scoretest(mod, data["age"]**2) s**2 s = glm.scoretest(mod, data["lwt"]**2) s**2 """ # class TestGlmBernoulliIdentity(CheckModelResultsMixin): # pass # class TestGlmBernoulliLog(CheckModelResultsMixin): # pass # class TestGlmBernoulliProbit(CheckModelResultsMixin): # pass # class TestGlmBernoulliCloglog(CheckModelResultsMixin): # pass # class TestGlmBernoulliPower(CheckModelResultsMixin): # pass # class TestGlmBernoulliLoglog(CheckModelResultsMixin): # pass # class test_glm_bernoulli_logc(CheckModelResultsMixin): # pass class TestGlmGamma(CheckModelResultsMixin): @classmethod def setup_class(cls): ''' Tests Gamma family with canonical inverse link (power -1) ''' # Test Precisions cls.decimal_aic_R = -1 #TODO: off by about 1, we are right with Stata cls.decimal_resids = DECIMAL_2 from statsmodels.datasets.scotland import load from .results.results_glm import Scotvote data = load() data.exog = add_constant(data.exog, prepend=False) with warnings.catch_warnings(): warnings.simplefilter("ignore") res1 = GLM(data.endog, data.exog, family=sm.families.Gamma()).fit() cls.res1 = res1 # res2 = RModel(data.endog, data.exog, r.glm, family=r.Gamma) res2 = Scotvote() res2.aic_R += 2 # R does not count degree of freedom for scale with gamma cls.res2 = res2 class TestGlmGammaLog(CheckModelResultsMixin): @classmethod def setup_class(cls): # Test Precisions cls.decimal_resids = DECIMAL_3 cls.decimal_aic_R = DECIMAL_0 cls.decimal_fittedvalues = DECIMAL_3 from .results.results_glm import CancerLog res2 = CancerLog() cls.res1 = GLM(res2.endog, res2.exog, family=sm.families.Gamma(link=sm.families.links.log())).fit() cls.res2 = res2 # FIXME: enable or delete # def setup(cls): # if skipR: # raise SkipTest, "Rpy not installed." # cls.res2 = RModel(cls.data.endog, cls.data.exog, r.glm, # family=r.Gamma(link="log")) # cls.res2.null_deviance = 27.92207137420696 # From R (bug in rpy) # cls.res2.bic = -154.1582089453923 # from Stata class TestGlmGammaIdentity(CheckModelResultsMixin): @classmethod def setup_class(cls): # Test Precisions cls.decimal_resids = -100 #TODO Very off from Stata? cls.decimal_params = DECIMAL_2 cls.decimal_aic_R = DECIMAL_0 cls.decimal_loglike = DECIMAL_1 from .results.results_glm import CancerIdentity res2 = CancerIdentity() with warnings.catch_warnings(): warnings.simplefilter("ignore") fam = sm.families.Gamma(link=sm.families.links.identity()) cls.res1 = GLM(res2.endog, res2.exog, family=fam).fit() cls.res2 = res2 # FIXME: enable or delete # def setup(cls): # if skipR: # raise SkipTest, "Rpy not installed." # cls.res2 = RModel(cls.data.endog, cls.data.exog, r.glm, # family=r.Gamma(link="identity")) # cls.res2.null_deviance = 27.92207137420696 # from R, Rpy bug class TestGlmPoisson(CheckModelResultsMixin, CheckComparisonMixin): @classmethod def setup_class(cls): ''' Tests Poisson family with canonical log link. Test results were obtained by R. ''' from .results.results_glm import Cpunish cls.data = cpunish.load() cls.data.endog = np.asarray(cls.data.endog) cls.data.exog = np.asarray(cls.data.exog) cls.data.exog[:, 3] = np.log(cls.data.exog[:, 3]) cls.data.exog = add_constant(cls.data.exog, prepend=False) cls.res1 = GLM(cls.data.endog, cls.data.exog, family=sm.families.Poisson()).fit() cls.res2 = Cpunish() # compare with discrete, start close to save time modd = discrete.Poisson(cls.data.endog, cls.data.exog) cls.resd = modd.fit(start_params=cls.res1.params * 0.9, disp=False) #class TestGlmPoissonIdentity(CheckModelResultsMixin): # pass #class TestGlmPoissonPower(CheckModelResultsMixin): # pass class TestGlmInvgauss(CheckModelResultsMixin): @classmethod def setup_class(cls): ''' Tests the Inverse Gaussian family in GLM. Notes ----- Used the rndivgx.ado file provided by Hardin and Hilbe to generate the data. Results are read from model_results, which were obtained by running R_ig.s ''' # Test Precisions cls.decimal_aic_R = DECIMAL_0 cls.decimal_loglike = DECIMAL_0 from .results.results_glm import InvGauss res2 = InvGauss() res1 = GLM(res2.endog, res2.exog, family=sm.families.InverseGaussian()).fit() cls.res1 = res1 cls.res2 = res2 def test_get_distribution(self): res1 = self.res1 distr = res1.model.family.get_distribution(res1.fittedvalues, res1.scale) var_endog = res1.model.family.variance(res1.fittedvalues) * res1.scale m, v = distr.stats() assert_allclose(res1.fittedvalues, m, rtol=1e-13) assert_allclose(var_endog, v, rtol=1e-13) class TestGlmInvgaussLog(CheckModelResultsMixin): @classmethod def setup_class(cls): # Test Precisions cls.decimal_aic_R = -10 # Big difference vs R. cls.decimal_resids = DECIMAL_3 from .results.results_glm import InvGaussLog res2 = InvGaussLog() cls.res1 = GLM(res2.endog, res2.exog, family=sm.families.InverseGaussian( link=sm.families.links.log())).fit() cls.res2 = res2 # FIXME: enable or delete # def setup(cls): # if skipR: # raise SkipTest, "Rpy not installed." # cls.res2 = RModel(cls.data.endog, cls.data.exog, r.glm, # family=r.inverse_gaussian(link="log")) # cls.res2.null_deviance = 335.1539777981053 # from R, Rpy bug # cls.res2.llf = -12162.72308 # from Stata, R's has big rounding diff class TestGlmInvgaussIdentity(CheckModelResultsMixin): @classmethod def setup_class(cls): # Test Precisions cls.decimal_aic_R = -10 #TODO: Big difference vs R cls.decimal_fittedvalues = DECIMAL_3 cls.decimal_params = DECIMAL_3 from .results.results_glm import Medpar1 data = Medpar1() with warnings.catch_warnings(): warnings.simplefilter("ignore") cls.res1 = GLM(data.endog, data.exog, family=sm.families.InverseGaussian( link=sm.families.links.identity())).fit() from .results.results_glm import InvGaussIdentity cls.res2 = InvGaussIdentity() # FIXME: enable or delete # def setup(cls): # if skipR: # raise SkipTest, "Rpy not installed." # cls.res2 = RModel(cls.data.endog, cls.data.exog, r.glm, # family=r.inverse_gaussian(link="identity")) # cls.res2.null_deviance = 335.1539777981053 # from R, Rpy bug # cls.res2.llf = -12163.25545 # from Stata, big diff with R class TestGlmNegbinomial(CheckModelResultsMixin): @classmethod def setup_class(cls): ''' Test Negative Binomial family with log link ''' # Test Precision cls.decimal_resid = DECIMAL_1 cls.decimal_params = DECIMAL_3 cls.decimal_resids = -1 # 1 % mismatch at 0 cls.decimal_fittedvalues = DECIMAL_1 from statsmodels.datasets.committee import load cls.data = load() cls.data.endog = np.asarray(cls.data.endog) cls.data.exog = np.asarray(cls.data.exog) cls.data.exog[:,2] = np.log(cls.data.exog[:,2]) interaction = cls.data.exog[:,2]*cls.data.exog[:,1] cls.data.exog = np.column_stack((cls.data.exog,interaction)) cls.data.exog = add_constant(cls.data.exog, prepend=False) with warnings.catch_warnings(): warnings.simplefilter("ignore", category=DomainWarning) fam = sm.families.NegativeBinomial() cls.res1 = GLM(cls.data.endog, cls.data.exog, family=fam).fit(scale='x2') from .results.results_glm import Committee res2 = Committee() res2.aic_R += 2 # They do not count a degree of freedom for the scale cls.res2 = res2 # FIXME: enable or delete # def setup_method(self): # if skipR: # raise SkipTest, "Rpy not installed" # r.library('MASS') # this does not work when done in rmodelwrap? # self.res2 = RModel(self.data.endog, self.data.exog, r.glm, # family=r.negative_binomial(1)) # self.res2.null_deviance = 27.8110469364343 # FIXME: enable/xfail/skip or delete #class TestGlmNegbinomial_log(CheckModelResultsMixin): # pass # FIXME: enable/xfail/skip or delete #class TestGlmNegbinomial_power(CheckModelResultsMixin): # pass # FIXME: enable/xfail/skip or delete #class TestGlmNegbinomial_nbinom(CheckModelResultsMixin): # pass class TestGlmPoissonOffset(CheckModelResultsMixin): @classmethod def setup_class(cls): from .results.results_glm import Cpunish_offset cls.decimal_params = DECIMAL_4 cls.decimal_bse = DECIMAL_4 cls.decimal_aic_R = 3 data = cpunish.load() data.endog = np.asarray(data.endog) data.exog = np.asarray(data.exog) data.exog[:, 3] = np.log(data.exog[:, 3]) data.exog = add_constant(data.exog, prepend=True) exposure = [100] * len(data.endog) cls.data = data cls.exposure = exposure cls.res1 = GLM(data.endog, data.exog, family=sm.families.Poisson(), exposure=exposure).fit() cls.res2 = Cpunish_offset() def test_missing(self): # make sure offset is dropped correctly endog = self.data.endog.copy() endog[[2,4,6,8]] = np.nan mod = GLM(endog, self.data.exog, family=sm.families.Poisson(), exposure=self.exposure, missing='drop') assert_equal(mod.exposure.shape[0], 13) def test_offset_exposure(self): # exposure=x and offset=log(x) should have the same effect np.random.seed(382304) endog = np.random.randint(0, 10, 100) exog = np.random.normal(size=(100,3)) exposure = np.random.uniform(1, 2, 100) offset = np.random.uniform(1, 2, 100) mod1 = GLM(endog, exog, family=sm.families.Poisson(), offset=offset, exposure=exposure).fit() offset2 = offset + np.log(exposure) mod2 = GLM(endog, exog, family=sm.families.Poisson(), offset=offset2).fit() assert_almost_equal(mod1.params, mod2.params) assert_allclose(mod1.null, mod2.null, rtol=1e-10) # test recreating model mod1_ = mod1.model kwds = mod1_._get_init_kwds() assert_allclose(kwds['exposure'], exposure, rtol=1e-14) assert_allclose(kwds['offset'], mod1_.offset, rtol=1e-14) mod3 = mod1_.__class__(mod1_.endog, mod1_.exog, **kwds) assert_allclose(mod3.exposure, mod1_.exposure, rtol=1e-14) assert_allclose(mod3.offset, mod1_.offset, rtol=1e-14) # test fit_regularized exposure, see #4605 resr1 = mod1.model.fit_regularized() resr2 = mod2.model.fit_regularized() assert_allclose(resr1.params, resr2.params, rtol=1e-10) def test_predict(self): np.random.seed(382304) endog = np.random.randint(0, 10, 100) exog = np.random.normal(size=(100,3)) exposure = np.random.uniform(1, 2, 100) mod1 = GLM(endog, exog, family=sm.families.Poisson(), exposure=exposure).fit() exog1 = np.random.normal(size=(10,3)) exposure1 = np.random.uniform(1, 2, 10) # Doubling exposure time should double expected response pred1 = mod1.predict(exog=exog1, exposure=exposure1) pred2 = mod1.predict(exog=exog1, exposure=2*exposure1) assert_almost_equal(pred2, 2*pred1) # Check exposure defaults pred3 = mod1.predict() pred4 = mod1.predict(exposure=exposure) pred5 = mod1.predict(exog=exog, exposure=exposure) assert_almost_equal(pred3, pred4) assert_almost_equal(pred4, pred5) # Check offset defaults offset = np.random.uniform(1, 2, 100) mod2 = GLM(endog, exog, offset=offset, family=sm.families.Poisson()).fit() pred1 = mod2.predict() pred2 = mod2.predict(offset=offset) pred3 = mod2.predict(exog=exog, offset=offset) assert_almost_equal(pred1, pred2) assert_almost_equal(pred2, pred3) # Check that offset shifts the linear predictor mod3 = GLM(endog, exog, family=sm.families.Poisson()).fit() offset = np.random.uniform(1, 2, 10) pred1 = mod3.predict(exog=exog1, offset=offset, linear=True) pred2 = mod3.predict(exog=exog1, offset=2*offset, linear=True) assert_almost_equal(pred2, pred1+offset) # Passing exposure as a pandas series should not effect output type assert isinstance( mod1.predict(exog=exog1, exposure=pd.Series(exposure1)), np.ndarray ) def test_perfect_pred(iris): y = iris[:, -1] X = iris[:, :-1] X = X[y != 2] y = y[y != 2] X = add_constant(X, prepend=True) glm = GLM(y, X, family=sm.families.Binomial()) with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) assert_raises(PerfectSeparationError, glm.fit) def test_score_test_ols(): # nicer example than Longley from statsmodels.regression.linear_model import OLS np.random.seed(5) nobs = 100 sige = 0.5 x = np.random.uniform(0, 1, size=(nobs, 5)) x[:, 0] = 1 beta = 1. / np.arange(1., x.shape[1] + 1) y = x.dot(beta) + sige * np.random.randn(nobs) res_ols = OLS(y, x).fit() res_olsc = OLS(y, x[:, :-2]).fit() co = res_ols.compare_lm_test(res_olsc, demean=False) res_glm = GLM(y, x[:, :-2], family=sm.families.Gaussian()).fit() co2 = res_glm.model.score_test(res_glm.params, exog_extra=x[:, -2:]) # difference in df_resid versus nobs in scale see #1786 assert_allclose(co[0] * 97 / 100., co2[0], rtol=1e-13) def test_attribute_writable_resettable(): # Regression test for mutables and class constructors. data = sm.datasets.longley.load() endog, exog = data.endog, data.exog glm_model = sm.GLM(endog, exog) assert_equal(glm_model.family.link.power, 1.0) glm_model.family.link.power = 2. assert_equal(glm_model.family.link.power, 2.0) glm_model2 = sm.GLM(endog, exog) assert_equal(glm_model2.family.link.power, 1.0) class TestStartParams(CheckModelResultsMixin): @classmethod def setup_class(cls): ''' Test Gaussian family with canonical identity link ''' # Test Precisions cls.decimal_resids = DECIMAL_3 cls.decimal_params = DECIMAL_2 cls.decimal_bic = DECIMAL_0 cls.decimal_bse = DECIMAL_3 from statsmodels.datasets.longley import load cls.data = load() cls.data.exog = add_constant(cls.data.exog, prepend=False) params = sm.OLS(cls.data.endog, cls.data.exog).fit().params cls.res1 = GLM(cls.data.endog, cls.data.exog, family=sm.families.Gaussian()).fit(start_params=params) from .results.results_glm import Longley cls.res2 = Longley() def test_glm_start_params(): # see 1604 y2 = np.array('0 1 0 0 0 1'.split(), int) wt = np.array([50,1,50,1,5,10]) y2 = np.repeat(y2, wt) x2 = np.repeat([0,0,0.001,100,-1,-1], wt) mod = sm.GLM(y2, sm.add_constant(x2), family=sm.families.Binomial()) res = mod.fit(start_params=[-4, -5]) np.testing.assert_almost_equal(res.params, [-4.60305022, -5.29634545], 6) def test_loglike_no_opt(): # see 1728 y = np.asarray([0, 1, 0, 0, 1, 1, 0, 1, 1, 1]) x = np.arange(10, dtype=np.float64) def llf(params): lin_pred = params[0] + params[1]*x pr = 1 / (1 + np.exp(-lin_pred)) return np.sum(y*np.log(pr) + (1-y)*np.log(1-pr)) for params in [0,0], [0,1], [0.5,0.5]: mod = sm.GLM(y, sm.add_constant(x), family=sm.families.Binomial()) res = mod.fit(start_params=params, maxiter=0) like = llf(params) assert_almost_equal(like, res.llf) def test_formula_missing_exposure(): # see 2083 import statsmodels.formula.api as smf d = {'Foo': [1, 2, 10, 149], 'Bar': [1, 2, 3, np.nan], 'constant': [1] * 4, 'exposure': np.random.uniform(size=4), 'x': [1, 3, 2, 1.5]} df = pd.DataFrame(d) family = sm.families.Gaussian(link=sm.families.links.log()) mod = smf.glm("Foo ~ Bar", data=df, exposure=df.exposure, family=family) assert_(type(mod.exposure) is np.ndarray, msg='Exposure is not ndarray') exposure = pd.Series(np.random.uniform(size=5)) df.loc[3, 'Bar'] = 4 # nan not relevant for Valueerror for shape mismatch assert_raises(ValueError, smf.glm, "Foo ~ Bar", data=df, exposure=exposure, family=family) assert_raises(ValueError, GLM, df.Foo, df[['constant', 'Bar']], exposure=exposure, family=family) @pytest.mark.matplotlib def test_plots(close_figures): np.random.seed(378) n = 200 exog = np.random.normal(size=(n, 2)) lin_pred = exog[:, 0] + exog[:, 1]**2 prob = 1 / (1 + np.exp(-lin_pred)) endog = 1 * (np.random.uniform(size=n) < prob) model = sm.GLM(endog, exog, family=sm.families.Binomial()) result = model.fit() import pandas as pd from statsmodels.graphics.regressionplots import add_lowess # array interface for j in 0,1: fig = result.plot_added_variable(j) add_lowess(fig.axes[0], frac=0.5) close_or_save(pdf, fig) fig = result.plot_partial_residuals(j) add_lowess(fig.axes[0], frac=0.5) close_or_save(pdf, fig) fig = result.plot_ceres_residuals(j) add_lowess(fig.axes[0], frac=0.5) close_or_save(pdf, fig) # formula interface data = pd.DataFrame({"y": endog, "x1": exog[:, 0], "x2": exog[:, 1]}) model = sm.GLM.from_formula("y ~ x1 + x2", data, family=sm.families.Binomial()) result = model.fit() for j in 0,1: xname = ["x1", "x2"][j] fig = result.plot_added_variable(xname) add_lowess(fig.axes[0], frac=0.5) close_or_save(pdf, fig) fig = result.plot_partial_residuals(xname) add_lowess(fig.axes[0], frac=0.5) close_or_save(pdf, fig) fig = result.plot_ceres_residuals(xname) add_lowess(fig.axes[0], frac=0.5) close_or_save(pdf, fig) def gen_endog(lin_pred, family_class, link, binom_version=0): np.random.seed(872) fam = sm.families mu = link().inverse(lin_pred) if family_class == fam.Binomial: if binom_version == 0: endog = 1*(np.random.uniform(size=len(lin_pred)) < mu) else: endog = np.empty((len(lin_pred), 2)) n = 10 endog[:, 0] = (np.random.uniform(size=(len(lin_pred), n)) < mu[:, None]).sum(1) endog[:, 1] = n - endog[:, 0] elif family_class == fam.Poisson: endog = np.random.poisson(mu) elif family_class == fam.Gamma: endog = np.random.gamma(2, mu) elif family_class == fam.Gaussian: endog = mu + 2 * np.random.normal(size=len(lin_pred)) elif family_class == fam.NegativeBinomial: from scipy.stats.distributions import nbinom endog = nbinom.rvs(mu, 0.5) elif family_class == fam.InverseGaussian: from scipy.stats.distributions import invgauss endog = invgauss.rvs(mu, scale=20) else: raise ValueError return endog @pytest.mark.smoke def test_summary(): np.random.seed(4323) n = 100 exog = np.random.normal(size=(n, 2)) exog[:, 0] = 1 endog = np.random.normal(size=n) for method in ["irls", "cg"]: fa = sm.families.Gaussian() model = sm.GLM(endog, exog, family=fa) rslt = model.fit(method=method) s = rslt.summary() def check_score_hessian(results): # compare models core and hessian with numerical derivatives params = results.params # avoid checking score at MLE, score close to zero sc = results.model.score(params * 0.98, scale=1) # cs currently (0.9) does not work for all families llfunc = lambda x: results.model.loglike(x, scale=1) # noqa sc2 = approx_fprime(params * 0.98, llfunc) assert_allclose(sc, sc2, rtol=1e-4, atol=1e-4) hess = results.model.hessian(params, scale=1) hess2 = approx_hess(params, llfunc) assert_allclose(hess, hess2, rtol=1e-4) scfunc = lambda x: results.model.score(x, scale=1) # noqa hess3 = approx_fprime(params, scfunc) assert_allclose(hess, hess3, rtol=1e-4) def test_gradient_irls(): # Compare the results when using gradient optimization and IRLS. # TODO: Find working examples for inverse_squared link np.random.seed(87342) fam = sm.families lnk = sm.families.links families = [(fam.Binomial, [lnk.logit, lnk.probit, lnk.cloglog, lnk.log, lnk.cauchy]), (fam.Poisson, [lnk.log, lnk.identity, lnk.sqrt]), (fam.Gamma, [lnk.log, lnk.identity, lnk.inverse_power]), (fam.Gaussian, [lnk.identity, lnk.log, lnk.inverse_power]), (fam.InverseGaussian, [lnk.log, lnk.identity, lnk.inverse_power, lnk.inverse_squared]), (fam.NegativeBinomial, [lnk.log, lnk.inverse_power, lnk.inverse_squared, lnk.identity])] n = 100 p = 3 exog = np.random.normal(size=(n, p)) exog[:, 0] = 1 skip_one = False for family_class, family_links in families: for link in family_links: for binom_version in 0,1: if family_class != fam.Binomial and binom_version == 1: continue if (family_class, link) == (fam.Poisson, lnk.identity): lin_pred = 20 + exog.sum(1) elif (family_class, link) == (fam.Binomial, lnk.log): lin_pred = -1 + exog.sum(1) / 8 elif (family_class, link) == (fam.Poisson, lnk.sqrt): lin_pred = 2 + exog.sum(1) elif (family_class, link) == (fam.InverseGaussian, lnk.log): #skip_zero = True lin_pred = -1 + exog.sum(1) elif (family_class, link) == (fam.InverseGaussian, lnk.identity): lin_pred = 20 + 5*exog.sum(1) lin_pred = np.clip(lin_pred, 1e-4, np.inf) elif (family_class, link) == (fam.InverseGaussian, lnk.inverse_squared): lin_pred = 0.5 + exog.sum(1) / 5 continue # skip due to non-convergence elif (family_class, link) == (fam.InverseGaussian, lnk.inverse_power): lin_pred = 1 + exog.sum(1) / 5 elif (family_class, link) == (fam.NegativeBinomial, lnk.identity): lin_pred = 20 + 5*exog.sum(1) lin_pred = np.clip(lin_pred, 1e-4, np.inf) elif (family_class, link) == (fam.NegativeBinomial, lnk.inverse_squared): lin_pred = 0.1 + np.random.uniform(size=exog.shape[0]) continue # skip due to non-convergence elif (family_class, link) == (fam.NegativeBinomial, lnk.inverse_power): lin_pred = 1 + exog.sum(1) / 5 elif (family_class, link) == (fam.Gaussian, lnk.inverse_power): # adding skip because of convergence failure skip_one = True # the following fails with identity link, because endog < 0 # elif family_class == fam.Gamma: # lin_pred = 0.5 * exog.sum(1) + np.random.uniform(size=exog.shape[0]) else: lin_pred = np.random.uniform(size=exog.shape[0]) endog = gen_endog(lin_pred, family_class, link, binom_version) with warnings.catch_warnings(): warnings.simplefilter("ignore") mod_irls = sm.GLM(endog, exog, family=family_class(link=link())) rslt_irls = mod_irls.fit(method="IRLS") if not (family_class, link) in [(fam.Poisson, lnk.sqrt), (fam.Gamma, lnk.inverse_power), (fam.InverseGaussian, lnk.identity) ]: check_score_hessian(rslt_irls) # Try with and without starting values. for max_start_irls, start_params in (0, rslt_irls.params), (3, None): # TODO: skip convergence failures for now if max_start_irls > 0 and skip_one: continue with warnings.catch_warnings(): warnings.simplefilter("ignore") mod_gradient = sm.GLM(endog, exog, family=family_class(link=link())) rslt_gradient = mod_gradient.fit(max_start_irls=max_start_irls, start_params=start_params, method="newton", maxiter=300) assert_allclose(rslt_gradient.params, rslt_irls.params, rtol=1e-6, atol=5e-5) assert_allclose(rslt_gradient.llf, rslt_irls.llf, rtol=1e-6, atol=1e-6) assert_allclose(rslt_gradient.scale, rslt_irls.scale, rtol=1e-6, atol=1e-6) # Get the standard errors using expected information. gradient_bse = rslt_gradient.bse ehess = mod_gradient.hessian(rslt_gradient.params, observed=False) gradient_bse = np.sqrt(-np.diag(np.linalg.inv(ehess))) assert_allclose(gradient_bse, rslt_irls.bse, rtol=1e-6, atol=5e-5) # rslt_irls.bse corresponds to observed=True assert_allclose(rslt_gradient.bse, rslt_irls.bse, rtol=0.2, atol=5e-5) rslt_gradient_eim = mod_gradient.fit(max_start_irls=0, cov_type='eim', start_params=rslt_gradient.params, method="newton", maxiter=300) assert_allclose(rslt_gradient_eim.bse, rslt_irls.bse, rtol=5e-5, atol=0) def test_gradient_irls_eim(): # Compare the results when using eime gradient optimization and IRLS. # TODO: Find working examples for inverse_squared link np.random.seed(87342) fam = sm.families lnk = sm.families.links families = [(fam.Binomial, [lnk.logit, lnk.probit, lnk.cloglog, lnk.log, lnk.cauchy]), (fam.Poisson, [lnk.log, lnk.identity, lnk.sqrt]), (fam.Gamma, [lnk.log, lnk.identity, lnk.inverse_power]), (fam.Gaussian, [lnk.identity, lnk.log, lnk.inverse_power]), (fam.InverseGaussian, [lnk.log, lnk.identity, lnk.inverse_power, lnk.inverse_squared]), (fam.NegativeBinomial, [lnk.log, lnk.inverse_power, lnk.inverse_squared, lnk.identity])] n = 100 p = 3 exog = np.random.normal(size=(n, p)) exog[:, 0] = 1 skip_one = False for family_class, family_links in families: for link in family_links: for binom_version in 0, 1: if family_class != fam.Binomial and binom_version == 1: continue if (family_class, link) == (fam.Poisson, lnk.identity): lin_pred = 20 + exog.sum(1) elif (family_class, link) == (fam.Binomial, lnk.log): lin_pred = -1 + exog.sum(1) / 8 elif (family_class, link) == (fam.Poisson, lnk.sqrt): lin_pred = 2 + exog.sum(1) elif (family_class, link) == (fam.InverseGaussian, lnk.log): # skip_zero = True lin_pred = -1 + exog.sum(1) elif (family_class, link) == (fam.InverseGaussian, lnk.identity): lin_pred = 20 + 5*exog.sum(1) lin_pred = np.clip(lin_pred, 1e-4, np.inf) elif (family_class, link) == (fam.InverseGaussian, lnk.inverse_squared): lin_pred = 0.5 + exog.sum(1) / 5 continue # skip due to non-convergence elif (family_class, link) == (fam.InverseGaussian, lnk.inverse_power): lin_pred = 1 + exog.sum(1) / 5 elif (family_class, link) == (fam.NegativeBinomial, lnk.identity): lin_pred = 20 + 5*exog.sum(1) lin_pred = np.clip(lin_pred, 1e-4, np.inf) elif (family_class, link) == (fam.NegativeBinomial, lnk.inverse_squared): lin_pred = 0.1 + np.random.uniform(size=exog.shape[0]) continue # skip due to non-convergence elif (family_class, link) == (fam.NegativeBinomial, lnk.inverse_power): lin_pred = 1 + exog.sum(1) / 5 elif (family_class, link) == (fam.Gaussian, lnk.inverse_power): # adding skip because of convergence failure skip_one = True else: lin_pred = np.random.uniform(size=exog.shape[0]) endog = gen_endog(lin_pred, family_class, link, binom_version) with warnings.catch_warnings(): warnings.simplefilter("ignore") mod_irls = sm.GLM(endog, exog, family=family_class(link=link())) rslt_irls = mod_irls.fit(method="IRLS") # Try with and without starting values. for max_start_irls, start_params in ((0, rslt_irls.params), (3, None)): # TODO: skip convergence failures for now if max_start_irls > 0 and skip_one: continue with warnings.catch_warnings(): warnings.simplefilter("ignore") mod_gradient = sm.GLM(endog, exog, family=family_class(link=link())) rslt_gradient = mod_gradient.fit( max_start_irls=max_start_irls, start_params=start_params, method="newton", optim_hessian='eim' ) assert_allclose(rslt_gradient.params, rslt_irls.params, rtol=1e-6, atol=5e-5) assert_allclose(rslt_gradient.llf, rslt_irls.llf, rtol=1e-6, atol=1e-6) assert_allclose(rslt_gradient.scale, rslt_irls.scale, rtol=1e-6, atol=1e-6) # Get the standard errors using expected information. ehess = mod_gradient.hessian(rslt_gradient.params, observed=False) gradient_bse = np.sqrt(-np.diag(np.linalg.inv(ehess))) assert_allclose(gradient_bse, rslt_irls.bse, rtol=1e-6, atol=5e-5) def test_glm_irls_method(): nobs, k_vars = 50, 4 np.random.seed(987126) x = np.random.randn(nobs, k_vars - 1) exog = add_constant(x, has_constant='add') y = exog.sum(1) + np.random.randn(nobs) mod = GLM(y, exog) res1 = mod.fit() res2 = mod.fit(wls_method='pinv', attach_wls=True) res3 = mod.fit(wls_method='qr', attach_wls=True) # fit_gradient does not attach mle_settings res_g1 = mod.fit(start_params=res1.params, method='bfgs') for r in [res1, res2, res3]: assert_equal(r.mle_settings['optimizer'], 'IRLS') assert_equal(r.method, 'IRLS') assert_equal(res1.mle_settings['wls_method'], 'lstsq') assert_equal(res2.mle_settings['wls_method'], 'pinv') assert_equal(res3.mle_settings['wls_method'], 'qr') assert_(hasattr(res2.results_wls.model, 'pinv_wexog')) assert_(hasattr(res3.results_wls.model, 'exog_Q')) # fit_gradient currently does not attach mle_settings assert_equal(res_g1.method, 'bfgs') class CheckWtdDuplicationMixin: decimal_params = DECIMAL_4 @classmethod def setup_class(cls): cls.data = cpunish.load() cls.data.endog = np.asarray(cls.data.endog) cls.data.exog = np.asarray(cls.data.exog) cls.endog = cls.data.endog cls.exog = cls.data.exog np.random.seed(1234) cls.weight = np.random.randint(5, 100, len(cls.endog)) cls.endog_big = np.repeat(cls.endog, cls.weight) cls.exog_big = np.repeat(cls.exog, cls.weight, axis=0) def test_params(self): assert_allclose(self.res1.params, self.res2.params, atol=1e-6, rtol=1e-6) decimal_bse = DECIMAL_4 def test_standard_errors(self): assert_allclose(self.res1.bse, self.res2.bse, rtol=1e-5, atol=1e-6) decimal_resids = DECIMAL_4 # TODO: This does not work... Arrays are of different shape. # Perhaps we use self.res1.model.family.resid_XXX()? """ def test_residuals(self): resids1 = np.column_stack((self.res1.resid_pearson, self.res1.resid_deviance, self.res1.resid_working, self.res1.resid_anscombe, self.res1.resid_response)) resids2 = np.column_stack((self.res1.resid_pearson, self.res2.resid_deviance, self.res2.resid_working, self.res2.resid_anscombe, self.res2.resid_response)) assert_allclose(resids1, resids2, self.decimal_resids) """ def test_aic(self): # R includes the estimation of the scale as a lost dof # Does not with Gamma though assert_allclose(self.res1.aic, self.res2.aic, atol=1e-6, rtol=1e-6) def test_deviance(self): assert_allclose(self.res1.deviance, self.res2.deviance, atol=1e-6, rtol=1e-6) def test_scale(self): assert_allclose(self.res1.scale, self.res2.scale, atol=1e-6, rtol=1e-6) def test_loglike(self): # Stata uses the below llf for these families # We differ with R for them assert_allclose(self.res1.llf, self.res2.llf, 1e-6) decimal_null_deviance = DECIMAL_4 def test_null_deviance(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DomainWarning) assert_allclose(self.res1.null_deviance, self.res2.null_deviance, atol=1e-6, rtol=1e-6) decimal_bic = DECIMAL_4 def test_bic(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") assert_allclose(self.res1.bic, self.res2.bic, atol=1e-6, rtol=1e-6) decimal_fittedvalues = DECIMAL_4 def test_fittedvalues(self): res2_fitted = self.res2.predict(self.res1.model.exog) assert_allclose(self.res1.fittedvalues, res2_fitted, atol=1e-5, rtol=1e-5) decimal_tpvalues = DECIMAL_4 def test_tpvalues(self): # test comparing tvalues and pvalues with normal implementation # make sure they use normal distribution (inherited in results class) assert_allclose(self.res1.tvalues, self.res2.tvalues, atol=1e-6, rtol=2e-4) assert_allclose(self.res1.pvalues, self.res2.pvalues, atol=1e-6, rtol=1e-6) assert_allclose(self.res1.conf_int(), self.res2.conf_int(), atol=1e-6, rtol=1e-6) class TestWtdGlmPoisson(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Poisson family with canonical log link. ''' super(TestWtdGlmPoisson, cls).setup_class() cls.endog = np.asarray(cls.endog) cls.exog = np.asarray(cls.exog) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=sm.families.Poisson()).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=sm.families.Poisson()).fit() class TestWtdGlmPoissonNewton(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Poisson family with canonical log link. ''' super(TestWtdGlmPoissonNewton, cls).setup_class() start_params = np.array([1.82794424e-04, -4.76785037e-02, -9.48249717e-02, -2.92293226e-04, 2.63728909e+00, -2.05934384e+01]) fit_kwds = dict(method='newton') cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=sm.families.Poisson()).fit(**fit_kwds) fit_kwds = dict(method='newton', start_params=start_params) cls.res2 = GLM(cls.endog_big, cls.exog_big, family=sm.families.Poisson()).fit(**fit_kwds) class TestWtdGlmPoissonHC0(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Poisson family with canonical log link. ''' super(TestWtdGlmPoissonHC0, cls).setup_class() start_params = np.array([1.82794424e-04, -4.76785037e-02, -9.48249717e-02, -2.92293226e-04, 2.63728909e+00, -2.05934384e+01]) fit_kwds = dict(cov_type='HC0') cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=sm.families.Poisson()).fit(**fit_kwds) fit_kwds = dict(cov_type='HC0', start_params=start_params) cls.res2 = GLM(cls.endog_big, cls.exog_big, family=sm.families.Poisson()).fit(**fit_kwds) class TestWtdGlmPoissonClu(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Poisson family with canonical log link. ''' super(TestWtdGlmPoissonClu, cls).setup_class() start_params = np.array([1.82794424e-04, -4.76785037e-02, -9.48249717e-02, -2.92293226e-04, 2.63728909e+00, -2.05934384e+01]) gid = np.arange(1, len(cls.endog) + 1) // 2 fit_kwds = dict(cov_type='cluster', cov_kwds={'groups': gid, 'use_correction':False}) import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore") cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=sm.families.Poisson()).fit(**fit_kwds) gidr = np.repeat(gid, cls.weight) fit_kwds = dict(cov_type='cluster', cov_kwds={'groups': gidr, 'use_correction':False}) cls.res2 = GLM(cls.endog_big, cls.exog_big, family=sm.families.Poisson()).fit(start_params=start_params, **fit_kwds) class TestWtdGlmBinomial(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Binomial family with canonical logit link. ''' super(TestWtdGlmBinomial, cls).setup_class() cls.endog = cls.endog / 100 cls.endog_big = cls.endog_big / 100 cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=sm.families.Binomial()).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=sm.families.Binomial()).fit() class TestWtdGlmNegativeBinomial(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Negative Binomial family with canonical link g(p) = log(p/(p + 1/alpha)) ''' super(TestWtdGlmNegativeBinomial, cls).setup_class() alpha = 1. with warnings.catch_warnings(): warnings.simplefilter("ignore", category=DomainWarning) family_link = sm.families.NegativeBinomial( link=sm.families.links.nbinom(alpha=alpha), alpha=alpha) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() class TestWtdGlmGamma(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Gamma family with log link. ''' super(TestWtdGlmGamma, cls).setup_class() family_link = sm.families.Gamma(sm.families.links.log()) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() class TestWtdGlmGaussian(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Gaussian family with log link. ''' super(TestWtdGlmGaussian, cls).setup_class() family_link = sm.families.Gaussian(sm.families.links.log()) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() class TestWtdGlmInverseGaussian(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests InverseGaussian family with log link. ''' super(TestWtdGlmInverseGaussian, cls).setup_class() family_link = sm.families.InverseGaussian(sm.families.links.log()) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() class TestWtdGlmGammaNewton(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Gamma family with log link. ''' super(TestWtdGlmGammaNewton, cls).setup_class() family_link = sm.families.Gamma(sm.families.links.log()) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link ).fit(method='newton') cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link ).fit(method='newton') def test_init_kwargs(self): family_link = sm.families.Gamma(sm.families.links.log()) with pytest.warns(ValueWarning, match="unknown kwargs"): GLM(self.endog, self.exog, family=family_link, weights=self.weight, # incorrect keyword ) class TestWtdGlmGammaScale_X2(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Gamma family with log link. ''' super(TestWtdGlmGammaScale_X2, cls).setup_class() family_link = sm.families.Gamma(sm.families.links.log()) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link, ).fit(scale='X2') cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link, ).fit(scale='X2') class TestWtdGlmGammaScale_dev(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Gamma family with log link. ''' super(TestWtdGlmGammaScale_dev, cls).setup_class() family_link = sm.families.Gamma(sm.families.links.log()) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link, ).fit(scale='dev') cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link, ).fit(scale='dev') def test_missing(self): endog = self.data.endog.copy() exog = self.data.exog.copy() exog[0, 0] = np.nan endog[[2, 4, 6, 8]] = np.nan freq_weights = self.weight mod_misisng = GLM(endog, exog, family=self.res1.model.family, freq_weights=freq_weights, missing='drop') assert_equal(mod_misisng.freq_weights.shape[0], mod_misisng.endog.shape[0]) assert_equal(mod_misisng.freq_weights.shape[0], mod_misisng.exog.shape[0]) keep_idx = np.array([1, 3, 5, 7, 9, 10, 11, 12, 13, 14, 15, 16]) assert_equal(mod_misisng.freq_weights, self.weight[keep_idx]) class TestWtdTweedieLog(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Tweedie family with log link and var_power=1. ''' super(TestWtdTweedieLog, cls).setup_class() family_link = sm.families.Tweedie(link=sm.families.links.log(), var_power=1) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() class TestWtdTweediePower2(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Tweedie family with Power(1) link and var_power=2. ''' cls.data = cpunish.load_pandas() cls.endog = cls.data.endog cls.exog = cls.data.exog[['INCOME', 'SOUTH']] np.random.seed(1234) cls.weight = np.random.randint(5, 100, len(cls.endog)) cls.endog_big = np.repeat(cls.endog.values, cls.weight) cls.exog_big = np.repeat(cls.exog.values, cls.weight, axis=0) link = sm.families.links.Power() family_link = sm.families.Tweedie(link=link, var_power=2) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() class TestWtdTweediePower15(CheckWtdDuplicationMixin): @classmethod def setup_class(cls): ''' Tests Tweedie family with Power(0.5) link and var_power=1.5. ''' super(TestWtdTweediePower15, cls).setup_class() family_link = sm.families.Tweedie(link=sm.families.links.Power(0.5), var_power=1.5) cls.res1 = GLM(cls.endog, cls.exog, freq_weights=cls.weight, family=family_link).fit() cls.res2 = GLM(cls.endog_big, cls.exog_big, family=family_link).fit() def test_wtd_patsy_missing(): import pandas as pd data = cpunish.load() data.endog = np.asarray(data.endog) data.exog = np.asarray(data.exog) data.exog[0, 0] = np.nan data.endog[[2, 4, 6, 8]] = np.nan data.pandas = pd.DataFrame(data.exog, columns=data.exog_name) data.pandas['EXECUTIONS'] = data.endog weights = np.arange(1, len(data.endog)+1) formula = """EXECUTIONS ~ INCOME + PERPOVERTY + PERBLACK + VC100k96 + SOUTH + DEGREE""" mod_misisng = GLM.from_formula(formula, data=data.pandas, freq_weights=weights) assert_equal(mod_misisng.freq_weights.shape[0], mod_misisng.endog.shape[0]) assert_equal(mod_misisng.freq_weights.shape[0], mod_misisng.exog.shape[0]) assert_equal(mod_misisng.freq_weights.shape[0], 12) keep_weights = np.array([2, 4, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17]) assert_equal(mod_misisng.freq_weights, keep_weights) class CheckTweedie: def test_resid(self): idx1 = len(self.res1.resid_response) - 1 idx2 = len(self.res2.resid_response) - 1 assert_allclose(np.concatenate((self.res1.resid_response[:17], [self.res1.resid_response[idx1]])), np.concatenate((self.res2.resid_response[:17], [self.res2.resid_response[idx2]])), rtol=1e-5, atol=1e-5) assert_allclose(np.concatenate((self.res1.resid_pearson[:17], [self.res1.resid_pearson[idx1]])), np.concatenate((self.res2.resid_pearson[:17], [self.res2.resid_pearson[idx2]])), rtol=1e-5, atol=1e-5) assert_allclose(np.concatenate((self.res1.resid_deviance[:17], [self.res1.resid_deviance[idx1]])), np.concatenate((self.res2.resid_deviance[:17], [self.res2.resid_deviance[idx2]])), rtol=1e-5, atol=1e-5) assert_allclose(np.concatenate((self.res1.resid_working[:17], [self.res1.resid_working[idx1]])), np.concatenate((self.res2.resid_working[:17], [self.res2.resid_working[idx2]])), rtol=1e-5, atol=1e-5) def test_bse(self): assert_allclose(self.res1.bse, self.res2.bse, atol=1e-6, rtol=1e6) def test_params(self): assert_allclose(self.res1.params, self.res2.params, atol=1e-5, rtol=1e-5) def test_deviance(self): assert_allclose(self.res1.deviance, self.res2.deviance, atol=1e-6, rtol=1e-6) def test_df(self): assert_equal(self.res1.df_model, self.res2.df_model) assert_equal(self.res1.df_resid, self.res2.df_resid) def test_fittedvalues(self): idx1 = len(self.res1.fittedvalues) - 1 idx2 = len(self.res2.resid_response) - 1 assert_allclose(np.concatenate((self.res1.fittedvalues[:17], [self.res1.fittedvalues[idx1]])), np.concatenate((self.res2.fittedvalues[:17], [self.res2.fittedvalues[idx2]])), atol=1e-4, rtol=1e-4) def test_summary(self): self.res1.summary() self.res1.summary2() class TestTweediePower15(CheckTweedie): @classmethod def setup_class(cls): from .results.results_glm import CpunishTweediePower15 cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family_link = sm.families.Tweedie(link=sm.families.links.Power(1), var_power=1.5) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family_link).fit() cls.res2 = CpunishTweediePower15() class TestTweediePower2(CheckTweedie): @classmethod def setup_class(cls): from .results.results_glm import CpunishTweediePower2 cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family_link = sm.families.Tweedie(link=sm.families.links.Power(1), var_power=2.) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family_link).fit() cls.res2 = CpunishTweediePower2() class TestTweedieLog1(CheckTweedie): @classmethod def setup_class(cls): from .results.results_glm import CpunishTweedieLog1 cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family_link = sm.families.Tweedie(link=sm.families.links.log(), var_power=1.) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family_link).fit() cls.res2 = CpunishTweedieLog1() class TestTweedieLog15Fair(CheckTweedie): @classmethod def setup_class(cls): from statsmodels.datasets.fair import load_pandas from .results.results_glm import FairTweedieLog15 data = load_pandas() family_link = sm.families.Tweedie(link=sm.families.links.log(), var_power=1.5) cls.res1 = sm.GLM(endog=data.endog, exog=data.exog[['rate_marriage', 'age', 'yrs_married']], family=family_link).fit() cls.res2 = FairTweedieLog15() class CheckTweedieSpecial: def test_mu(self): assert_allclose(self.res1.mu, self.res2.mu, rtol=1e-5, atol=1e-5) def test_resid(self): assert_allclose(self.res1.resid_response, self.res2.resid_response, rtol=1e-5, atol=1e-5) assert_allclose(self.res1.resid_pearson, self.res2.resid_pearson, rtol=1e-5, atol=1e-5) assert_allclose(self.res1.resid_deviance, self.res2.resid_deviance, rtol=1e-5, atol=1e-5) assert_allclose(self.res1.resid_working, self.res2.resid_working, rtol=1e-5, atol=1e-5) assert_allclose(self.res1.resid_anscombe_unscaled, self.res2.resid_anscombe_unscaled, rtol=1e-5, atol=1e-5) class TestTweedieSpecialLog0(CheckTweedieSpecial): @classmethod def setup_class(cls): cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family1 = sm.families.Gaussian(link=sm.families.links.log()) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family1).fit() family2 = sm.families.Tweedie(link=sm.families.links.log(), var_power=0) cls.res2 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family2).fit() class TestTweedieSpecialLog1(CheckTweedieSpecial): @classmethod def setup_class(cls): cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family1 = sm.families.Poisson(link=sm.families.links.log()) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family1).fit() family2 = sm.families.Tweedie(link=sm.families.links.log(), var_power=1) cls.res2 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family2).fit() class TestTweedieSpecialLog2(CheckTweedieSpecial): @classmethod def setup_class(cls): cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family1 = sm.families.Gamma(link=sm.families.links.log()) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family1).fit() family2 = sm.families.Tweedie(link=sm.families.links.log(), var_power=2) cls.res2 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family2).fit() class TestTweedieSpecialLog3(CheckTweedieSpecial): @classmethod def setup_class(cls): cls.data = cpunish.load_pandas() cls.exog = cls.data.exog[['INCOME', 'SOUTH']] cls.endog = cls.data.endog family1 = sm.families.InverseGaussian(link=sm.families.links.log()) cls.res1 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family1).fit() family2 = sm.families.Tweedie(link=sm.families.links.log(), var_power=3) cls.res2 = sm.GLM(endog=cls.data.endog, exog=cls.data.exog[['INCOME', 'SOUTH']], family=family2).fit() def gen_tweedie(p): np.random.seed(3242) n = 500 x = np.random.normal(size=(n, 4)) lpr = np.dot(x, np.r_[1, -1, 0, 0.5]) mu = np.exp(lpr) lam = 10 * mu**(2 - p) / (2 - p) alp = (2 - p) / (p - 1) bet = 10 * mu**(1 - p) / (p - 1) # Generate Tweedie values using commpound Poisson distribution y = np.empty(n) N = np.random.poisson(lam) for i in range(n): y[i] = np.random.gamma(alp, 1 / bet[i], N[i]).sum() return y, x @pytest.mark.filterwarnings("ignore:GLM ridge optimization") def test_tweedie_EQL(): # All tests below are regression tests, but the results # are very close to the population values. p = 1.5 y, x = gen_tweedie(p) # Un-regularized fit using gradients fam = sm.families.Tweedie(var_power=p, eql=True) model1 = sm.GLM(y, x, family=fam) result1 = model1.fit(method="newton") assert_allclose(result1.params, np.array([1.00350497, -0.99656954, 0.00802702, 0.50713209]), rtol=1e-5, atol=1e-5) # Un-regularized fit using IRLS model1x = sm.GLM(y, x, family=fam) result1x = model1x.fit(method="irls") assert_allclose(result1.params, result1x.params) assert_allclose(result1.bse, result1x.bse, rtol=1e-2) # Lasso fit using coordinate-wise descent # TODO: The search gets trapped in an infinite oscillation, so use # a slack convergence tolerance. model2 = sm.GLM(y, x, family=fam) result2 = model2.fit_regularized(L1_wt=1, alpha=0.07, maxiter=200, cnvrg_tol=0.01) rtol, atol = 1e-2, 1e-4 assert_allclose(result2.params, np.array([0.976831, -0.952854, 0., 0.470171]), rtol=rtol, atol=atol) # Series of ridge fits using gradients ev = (np.array([1.001778, -0.99388, 0.00797, 0.506183]), np.array([0.98586638, -0.96953481, 0.00749983, 0.4975267]), np.array([0.206429, -0.164547, 0.000235, 0.102489])) for j, alpha in enumerate([0.05, 0.5, 0.7]): model3 = sm.GLM(y, x, family=fam) result3 = model3.fit_regularized(L1_wt=0, alpha=alpha) assert_allclose(result3.params, ev[j], rtol=rtol, atol=atol) result4 = model3.fit_regularized(L1_wt=0, alpha=alpha * np.ones(x.shape[1])) assert_allclose(result4.params, result3.params, rtol=rtol, atol=atol) alpha = alpha * np.ones(x.shape[1]) alpha[0] = 0 result5 = model3.fit_regularized(L1_wt=0, alpha=alpha) assert not np.allclose(result5.params, result4.params) def test_tweedie_elastic_net(): # Check that the coefficients vanish one-by-one # when using the elastic net. p = 1.5 # Tweedie variance exponent y, x = gen_tweedie(p) # Un-regularized fit using gradients fam = sm.families.Tweedie(var_power=p, eql=True) model1 = sm.GLM(y, x, family=fam) nnz = [] for alpha in np.linspace(0, 10, 20): result1 = model1.fit_regularized(L1_wt=0.5, alpha=alpha) nnz.append((np.abs(result1.params) > 0).sum()) nnz = np.unique(nnz) assert len(nnz) == 5 def test_tweedie_EQL_poisson_limit(): # Test the limiting Poisson case of the Nelder/Pregibon/Tweedie # EQL. np.random.seed(3242) n = 500 x = np.random.normal(size=(n, 3)) x[:, 0] = 1 lpr = 4 + x[:, 1:].sum(1) mn = np.exp(lpr) y = np.random.poisson(mn) for scale in 1.0, 'x2', 'dev': # Un-regularized fit using gradients not IRLS fam = sm.families.Tweedie(var_power=1, eql=True) model1 = sm.GLM(y, x, family=fam) result1 = model1.fit(method="newton", scale=scale) # Poisson GLM model2 = sm.GLM(y, x, family=sm.families.Poisson()) result2 = model2.fit(method="newton", scale=scale) assert_allclose(result1.params, result2.params, atol=1e-6, rtol=1e-6) assert_allclose(result1.bse, result2.bse, 1e-6, 1e-6) def test_tweedie_EQL_upper_limit(): # Test the limiting case of the Nelder/Pregibon/Tweedie # EQL with var = mean^2. These are tests against population # values so accuracy is not high. np.random.seed(3242) n = 500 x = np.random.normal(size=(n, 3)) x[:, 0] = 1 lpr = 4 + x[:, 1:].sum(1) mn = np.exp(lpr) y = np.random.poisson(mn) for scale in 'x2', 'dev', 1.0: # Un-regularized fit using gradients not IRLS fam = sm.families.Tweedie(var_power=2, eql=True) model1 = sm.GLM(y, x, family=fam) result1 = model1.fit(method="newton", scale=scale) assert_allclose(result1.params, np.r_[4, 1, 1], atol=1e-3, rtol=1e-1) def testTweediePowerEstimate(): # Test the Pearson estimate of the Tweedie variance and scale parameters. # # Ideally, this would match the following R code, but I cannot make it work... # # setwd('c:/workspace') # data <- read.csv('cpunish.csv', sep=",") # # library(tweedie) # # y <- c(1.00113835e+05, 6.89668315e+03, 6.15726842e+03, # 1.41718806e+03, 5.11776456e+02, 2.55369154e+02, # 1.07147443e+01, 3.56874698e+00, 4.06797842e-02, # 7.06996731e-05, 2.10165106e-07, 4.34276938e-08, # 1.56354040e-09, 0.00000000e+00, 0.00000000e+00, # 0.00000000e+00, 0.00000000e+00) # # data$NewY <- y # # out <- tweedie.profile( NewY ~ INCOME + SOUTH - 1, # p.vec=c(1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, # 1.9), link.power=0, # data=data,do.plot = TRUE) data = cpunish.load_pandas() y = [1.00113835e+05, 6.89668315e+03, 6.15726842e+03, 1.41718806e+03, 5.11776456e+02, 2.55369154e+02, 1.07147443e+01, 3.56874698e+00, 4.06797842e-02, 7.06996731e-05, 2.10165106e-07, 4.34276938e-08, 1.56354040e-09, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00] model1 = sm.GLM(y, data.exog[['INCOME', 'SOUTH']], family=sm.families.Tweedie(link=sm.families.links.log(), var_power=1.5)) res1 = model1.fit() model2 = sm.GLM((y - res1.mu) ** 2, np.column_stack((np.ones(len(res1.mu)), np.log(res1.mu))), family=sm.families.Gamma(sm.families.links.log())) res2 = model2.fit() # Sample may be too small for this... # assert_allclose(res1.scale, np.exp(res2.params[0]), rtol=0.25) p = model1.estimate_tweedie_power(res1.mu) assert_allclose(p, res2.params[1], rtol=0.25) def test_glm_lasso_6431(): # Based on issue #6431 # Fails with newton-cg as optimizer np.random.seed(123) from statsmodels.regression.linear_model import OLS n = 50 x = np.ones((n, 2)) x[:, 1] = np.arange(0, n) y = 1000 + x[:, 1] + np.random.normal(0, 1, n) params = np.r_[999.82244338, 1.0077889] for method in "bfgs", None: for fun in [OLS, GLM]: # Changing L1_wtValue from 0 to 1e-9 changes # the algorithm from scipy gradient optimization # to statsmodels coordinate descent for L1_wtValue in [0, 1e-9]: model = fun(y, x) if fun == OLS: fit = model.fit_regularized(alpha=0, L1_wt=L1_wtValue) else: fit = model._fit_ridge(alpha=0, start_params=None, method=method) assert_allclose(params, fit.params, atol=1e-6, rtol=1e-6) class TestRegularized: def test_regularized(self): import os from .results import glmnet_r_results for dtype in "binomial", "poisson": cur_dir = os.path.dirname(os.path.abspath(__file__)) data = np.loadtxt(os.path.join(cur_dir, "results", "enet_%s.csv" % dtype), delimiter=",") endog = data[:, 0] exog = data[:, 1:] fam = {"binomial" : sm.families.Binomial, "poisson" : sm.families.Poisson}[dtype] for j in range(9): vn = "rslt_%s_%d" % (dtype, j) r_result = getattr(glmnet_r_results, vn) L1_wt = r_result[0] alpha = r_result[1] params = r_result[2:] model = GLM(endog, exog, family=fam()) sm_result = model.fit_regularized(L1_wt=L1_wt, alpha=alpha) # Agreement is OK, see below for further check assert_allclose(params, sm_result.params, atol=1e-2, rtol=0.3) # The penalized log-likelihood that we are maximizing. def plf(params): llf = model.loglike(params) / len(endog) llf = llf - alpha * ((1 - L1_wt)*np.sum(params**2) / 2 + L1_wt*np.sum(np.abs(params))) return llf # Confirm that we are doing better than glmnet. llf_r = plf(params) llf_sm = plf(sm_result.params) assert_equal(np.sign(llf_sm - llf_r), 1) class TestConvergence: @classmethod def setup_class(cls): ''' Test Binomial family with canonical logit link using star98 dataset. ''' from statsmodels.datasets.star98 import load data = load() data.exog = add_constant(data.exog, prepend=False) cls.model = GLM(data.endog, data.exog, family=sm.families.Binomial()) def _when_converged(self, atol=1e-8, rtol=0, tol_criterion='deviance'): for i, dev in enumerate(self.res.fit_history[tol_criterion]): orig = self.res.fit_history[tol_criterion][i] new = self.res.fit_history[tol_criterion][i + 1] if np.allclose(orig, new, atol=atol, rtol=rtol): return i raise ValueError('CONVERGENCE CHECK: It seems this doens\'t converge!') def test_convergence_atol_only(self): atol = 1e-8 rtol = 0 self.res = self.model.fit(atol=atol, rtol=rtol) expected_iterations = self._when_converged(atol=atol, rtol=rtol) actual_iterations = self.res.fit_history['iteration'] # Note the first value is the list is np.inf. The second value # is the initial guess based off of start_params or the # estimate thereof. The third value (index = 2) is the actual "first # iteration" assert_equal(expected_iterations, actual_iterations) assert_equal(len(self.res.fit_history['deviance']) - 2, actual_iterations) def test_convergence_rtol_only(self): atol = 0 rtol = 1e-8 self.res = self.model.fit(atol=atol, rtol=rtol) expected_iterations = self._when_converged(atol=atol, rtol=rtol) actual_iterations = self.res.fit_history['iteration'] # Note the first value is the list is np.inf. The second value # is the initial guess based off of start_params or the # estimate thereof. The third value (index = 2) is the actual "first # iteration" assert_equal(expected_iterations, actual_iterations) assert_equal(len(self.res.fit_history['deviance']) - 2, actual_iterations) def test_convergence_atol_rtol(self): atol = 1e-8 rtol = 1e-8 self.res = self.model.fit(atol=atol, rtol=rtol) expected_iterations = self._when_converged(atol=atol, rtol=rtol) actual_iterations = self.res.fit_history['iteration'] # Note the first value is the list is np.inf. The second value # is the initial guess based off of start_params or the # estimate thereof. The third value (index = 2) is the actual "first # iteration" assert_equal(expected_iterations, actual_iterations) assert_equal(len(self.res.fit_history['deviance']) - 2, actual_iterations) def test_convergence_atol_only_params(self): atol = 1e-8 rtol = 0 self.res = self.model.fit(atol=atol, rtol=rtol, tol_criterion='params') expected_iterations = self._when_converged(atol=atol, rtol=rtol, tol_criterion='params') actual_iterations = self.res.fit_history['iteration'] # Note the first value is the list is np.inf. The second value # is the initial guess based off of start_params or the # estimate thereof. The third value (index = 2) is the actual "first # iteration" assert_equal(expected_iterations, actual_iterations) assert_equal(len(self.res.fit_history['deviance']) - 2, actual_iterations) def test_convergence_rtol_only_params(self): atol = 0 rtol = 1e-8 self.res = self.model.fit(atol=atol, rtol=rtol, tol_criterion='params') expected_iterations = self._when_converged(atol=atol, rtol=rtol, tol_criterion='params') actual_iterations = self.res.fit_history['iteration'] # Note the first value is the list is np.inf. The second value # is the initial guess based off of start_params or the # estimate thereof. The third value (index = 2) is the actual "first # iteration" assert_equal(expected_iterations, actual_iterations) assert_equal(len(self.res.fit_history['deviance']) - 2, actual_iterations) def test_convergence_atol_rtol_params(self): atol = 1e-8 rtol = 1e-8 self.res = self.model.fit(atol=atol, rtol=rtol, tol_criterion='params') expected_iterations = self._when_converged(atol=atol, rtol=rtol, tol_criterion='params') actual_iterations = self.res.fit_history['iteration'] # Note the first value is the list is np.inf. The second value # is the initial guess based off of start_params or the # estimate thereof. The third value (index = 2) is the actual "first # iteration" assert_equal(expected_iterations, actual_iterations) assert_equal(len(self.res.fit_history['deviance']) - 2, actual_iterations) def test_poisson_deviance(): # see #3355 missing term in deviance if resid_response.sum() != 0 np.random.seed(123987) nobs, k_vars = 50, 3-1 x = sm.add_constant(np.random.randn(nobs, k_vars)) mu_true = np.exp(x.sum(1)) y = np.random.poisson(mu_true, size=nobs) mod = sm.GLM(y, x[:, :], family=sm.genmod.families.Poisson()) res = mod.fit() d_i = res.resid_deviance d = res.deviance lr = (mod.family.loglike(y, y+1e-20) - mod.family.loglike(y, res.fittedvalues)) * 2 assert_allclose(d, (d_i**2).sum(), rtol=1e-12) assert_allclose(d, lr, rtol=1e-12) # case without constant, resid_response.sum() != 0 mod_nc = sm.GLM(y, x[:, 1:], family=sm.genmod.families.Poisson()) res_nc = mod_nc.fit() d_i = res_nc.resid_deviance d = res_nc.deviance lr = (mod.family.loglike(y, y+1e-20) - mod.family.loglike(y, res_nc.fittedvalues)) * 2 assert_allclose(d, (d_i**2).sum(), rtol=1e-12) assert_allclose(d, lr, rtol=1e-12) def test_non_invertible_hessian_fails_summary(): # Test when the hessian fails the summary is still available. data = cpunish.load_pandas() data.endog[:] = 1 with warnings.catch_warnings(): # we filter DomainWarning, the convergence problems # and warnings in summary warnings.simplefilter("ignore") mod = sm.GLM(data.endog, data.exog, family=sm.families.Gamma()) res = mod.fit(maxiter=1, method='bfgs', max_start_irls=0) res.summary() def test_int_scale(): # GH-6627, make sure it works with int scale data = longley.load() mod = GLM(data.endog, data.exog, family=sm.families.Gaussian()) res = mod.fit(scale=1) assert isinstance(res.params, pd.Series) assert res.scale.dtype == np.float64 @pytest.mark.parametrize("dtype", [np.int8, np.int16, np.int32, np.int64]) def test_int_exog(dtype): # GH-6627, make use of floats internally count1, n1, count2, n2 = 60, 51477.5, 30, 54308.7 y = [count1, count2] x = np.asarray([[1, 1], [1, 0]]).astype(dtype) exposure = np.asarray([n1, n2]) mod = GLM(y, x, exposure=exposure, family=sm.families.Poisson()) res = mod.fit(method='bfgs', max_start_irls=0) assert isinstance(res.params, np.ndarray) def test_glm_bic(iris): X = np.c_[np.ones(100), iris[50:, :4]] y = np.array(iris)[50:, 4].astype(np.int32) y -= 1 SET_USE_BIC_LLF(True) model = GLM(y, X, family=sm.families.Binomial()).fit() # 34.9244 is what glm() of R yields assert_almost_equal(model.bic, 34.9244, decimal=3) assert_almost_equal(model.bic_llf, 34.9244, decimal=3) SET_USE_BIC_LLF(False) assert_almost_equal(model.bic, model.bic_deviance, decimal=3) SET_USE_BIC_LLF(None) def test_glm_bic_warning(iris): X = np.c_[np.ones(100), iris[50:, :4]] y = np.array(iris)[50:, 4].astype(np.int32) y -= 1 model = GLM(y, X, family=sm.families.Binomial()).fit() with pytest.warns(FutureWarning, match="The bic"): assert isinstance(model.bic, float) def test_output_exposure_null(reset_randomstate): # GH 6953 x0 = [np.sin(i / 20) + 2 for i in range(1000)] rs = np.random.RandomState(0) # Variable exposures for each observation exposure = rs.randint(100, 200, size=1000) y = [np.sum(rs.poisson(x, size=e)) for x, e in zip(x0, exposure)] x = add_constant(x0) model = GLM( endog=y, exog=x, exposure=exposure, family=sm.families.Poisson() ).fit() null_model = GLM( endog=y, exog=x[:, 0], exposure=exposure, family=sm.families.Poisson() ).fit() null_model_without_exposure = GLM( endog=y, exog=x[:, 0], family=sm.families.Poisson() ).fit() assert_allclose(model.llnull, null_model.llf) # Check that they are different assert np.abs(null_model_without_exposure.llf - model.llnull) > 1 def test_qaic(): # Example from documentation of R package MuMIn import patsy ldose = np.concatenate((np.arange(6), np.arange(6))) sex = ["M"]*6 + ["F"]*6 numdead = [10, 4, 9, 12, 18, 20, 0, 2, 6, 10, 12, 16] df = pd.DataFrame({"ldose": ldose, "sex": sex, "numdead": numdead}) df["numalive"] = 20 - df["numdead"] df["SF"] = df["numdead"] y = df[["numalive", "numdead"]].values x = patsy.dmatrix("sex*ldose", data=df, return_type='dataframe') m = GLM(y, x, family=sm.families.Binomial()) r = m.fit() scale = 2.412699 qaic = r.info_criteria(crit="qaic", scale=scale) # R gives 31.13266 because it uses a df that is 1 greater, # presumably because they count the scale parameter in df. # This won't matter when comparing models by differencing # QAICs. # Binomial doesn't have a scale parameter, so adding +1 is not correct. assert_allclose(qaic, 29.13266, rtol=1e-5, atol=1e-5) qaic1 = r.info_criteria(crit="qaic", scale=scale, dk_params=1) assert_allclose(qaic1, 31.13266, rtol=1e-5, atol=1e-5) def test_tweedie_score(): np.random.seed(3242) n = 500 x = np.random.normal(size=(n, 4)) lpr = np.dot(x, np.r_[1, -1, 0, 0.5]) mu = np.exp(lpr) p0 = 1.5 lam = 10 * mu**(2 - p0) / (2 - p0) alp = (2 - p0) / (p0 - 1) bet = 10 * mu**(1 - p0) / (p0 - 1) y = np.empty(n) N = np.random.poisson(lam) for i in range(n): y[i] = np.random.gamma(alp, 1 / bet[i], N[i]).sum() for p in [1, 1.5, 2]: fam = sm.families.Tweedie(var_power=p, eql=True) model = GLM(y, x, family=fam) result = model.fit() pa = result.params + 0.2*np.random.normal(size=result.params.size) ngrad = approx_fprime_cs(pa, lambda x: model.loglike(x, scale=1)) agrad = model.score(pa, scale=1) assert_allclose(ngrad, agrad, atol=1e-8, rtol=1e-8) nhess = approx_hess_cs(pa, lambda x: model.loglike(x, scale=1)) ahess = model.hessian(pa, scale=1) assert_allclose(nhess, ahess, atol=5e-8, rtol=5e-8)
{ "content_hash": "33ec9d847340de46cc00427d6faa09ea", "timestamp": "", "source": "github", "line_count": 2620, "max_line_length": 112, "avg_line_length": 38.48664122137404, "alnum_prop": 0.5722021123617791, "repo_name": "bashtage/statsmodels", "id": "6750bbb386050e41365c6ec3415d2537a86fa438", "size": "100835", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "statsmodels/genmod/tests/test_glm.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "AGS Script", "bytes": "457842" }, { "name": "Assembly", "bytes": "10035" }, { "name": "Batchfile", "bytes": "625" }, { "name": "C", "bytes": "381" }, { "name": "Cython", "bytes": "225838" }, { "name": "Fortran", "bytes": "16671" }, { "name": "HTML", "bytes": "148470" }, { "name": "MATLAB", "bytes": "100525" }, { "name": "Python", "bytes": "14433387" }, { "name": "R", "bytes": "106569" }, { "name": "Shell", "bytes": "25329" }, { "name": "Stata", "bytes": "50129" } ], "symlink_target": "" }
<?php namespace Ekyna\Bundle\ProductBundle\Repository; use Ekyna\Component\Resource\Doctrine\ORM\TranslatableResourceRepository; /** * Class OptionRepository * @package Ekyna\Bundle\ProductBundle\Repository * @author Etienne Dauvergne <contact@ekyna.com> */ class OptionRepository extends TranslatableResourceRepository { }
{ "content_hash": "6eb7e91fd3722d5462a8c01cb7e4bde9", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 73, "avg_line_length": 23.714285714285715, "alnum_prop": 0.8102409638554217, "repo_name": "ekyna/ProductBundle", "id": "6d643b7fc3b324f0d60a8c30fcba5edcfb48dc07", "size": "332", "binary": false, "copies": "1", "ref": "refs/heads/0.7", "path": "Repository/OptionRepository.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "150" }, { "name": "Gherkin", "bytes": "12108" }, { "name": "JavaScript", "bytes": "104324" }, { "name": "Less", "bytes": "54477" }, { "name": "PHP", "bytes": "1578420" }, { "name": "Twig", "bytes": "249980" }, { "name": "TypeScript", "bytes": "740" } ], "symlink_target": "" }
<?php namespace luya\cms\models; use Yii; use luya\admin\models\User; use yii\helpers\Json; use luya\cms\admin\Module; use yii\base\InvalidParamException; /** * Eventer-Logger for CMS Activitys * * @property integer $id * @property integer $user_id * @property integer $is_insertion * @property integer $is_update * @property integer $is_deletion * @property integer $timestamp * @property string $message * @property string $data_json * @property string $table_name * @property integer $row_id * * @author Basil Suter <basil@nadar.io> * @since 1.0.0 */ class Log extends \yii\db\ActiveRecord { /** * @inheritdoc */ public function init() { parent::init(); $this->on(self::EVENT_BEFORE_INSERT, [$this, 'onBeforeInsert']); } public function onBeforeInsert() { $this->timestamp = time(); $this->user_id = (Yii::$app instanceof \luya\web\Application) ? Yii::$app->adminuser->getId() : 0; $this->data_json = json_encode($this->data_json); } public function getMessageArray() { try { return Json::decode($this->message); } catch (InvalidParamException $err) { return []; } } public function getRowDescriber() { if (!empty($this->row_id)) { switch ($this->table_name) { case "nav": return Nav::findOne($this->row_id)->activeLanguageItem->title; case "nav_item": return NavItem::findOne($this->row_id)->title; case "cms_nav_item_page_block_item": $block = NavItemPageBlockItem::findOne($this->row_id); if (!$block || $block->block == null) { $arr = $this->getMessageArray(); if (!empty($arr) && isset($arr['blockName'])) { return $arr['blockName'] . " ({$arr['pageTitle']})"; } else { return; } } return $block->block->getNameForLog() . " (" .$block->droppedPageTitle. ")"; } } } /** * @inheritdoc */ public static function tableName() { return 'cms_log'; } /** * @inheritdoc */ public function attributeLabels() { return [ 'id' => 'ID', 'user_id' => 'User ID', 'is_insertion' => 'Is Insertion', 'is_update' => 'Is Update', 'is_deletion' => 'Is Deletion', 'timestamp' => 'Timestamp', 'message' => 'Message', 'data_json' => 'Data Json', 'table_name' => 'Table Name', 'row_id' => 'Row ID', ]; } /** * @inheritdoc */ public function rules() { return [ [['is_insertion', 'is_deletion', 'is_update', 'message', 'data_json', 'row_id', 'table_name'], 'safe'], ]; } /** * @inheritdoc */ public function fields() { return [ 'is_insertion', 'is_update', 'is_deletion', 'timestamp', 'action', 'user', ]; } public function getAction() { if ($this->is_insertion) { switch ($this->table_name) { case "nav_item": return Module::t('log_action_insert_cms_nav_item', ['info' => $this->rowDescriber]); case "nav": return Module::t('log_action_insert_cms_nav', ['info' => $this->rowDescriber]); case "cms_nav_item_page_block_item": return Module::t('log_action_insert_cms_nav_item_page_block_item', ['info' => $this->rowDescriber]); default: return Module::t('log_action_insert_unkown', ['info' => $this->rowDescriber]); } } if ($this->is_update) { switch ($this->table_name) { case "nav_item": return Module::t('log_action_update_cms_nav_item', ['info' => $this->rowDescriber]); case "nav": return Module::t('log_action_update_cms_nav', ['info' => $this->rowDescriber]); case "cms_nav_item_page_block_item": return Module::t('log_action_update_cms_nav_item_page_block_item', ['info' => $this->rowDescriber]); default: return Module::t('log_action_update_unkown', ['info' => $this->rowDescriber]); } } if ($this->is_deletion) { switch ($this->table_name) { case "nav_item": return Module::t('log_action_delete_cms_nav_item', ['info' => $this->rowDescriber]); case "nav": return Module::t('log_action_delete_cms_nav', ['info' => $this->rowDescriber]); case "cms_nav_item_page_block_item": return Module::t('log_action_delete_cms_nav_item_page_block_item', ['info' => $this->rowDescriber]); default: return Module::t('log_action_delete_unkown'); } } } /** * * @return \yii\db\ActiveQuery */ public function getUser() { return $this->hasOne(User::className(), ['id' => 'user_id']); } /** * * @param integer $type Types of message: * + 1 = insertion * + 2 = update * + 3 = deletion * @param array $message * @param string $tableName * @param integer $rowId * @param array $additionalData * @return boolean */ public static function add($type, array $message, $tableName, $rowId = 0, array $additionalData = []) { $model = new self(); $model->setAttributes([ 'is_insertion' => ($type == 1) ? true : false, 'is_update' => ($type == 2) ? true : false, 'is_deletion' => ($type == 3) ? true : false, 'table_name' => $tableName, 'row_id' => $rowId, 'message' => Json::encode($message), 'data_json' => $additionalData, ]); return $model->insert(false); } }
{ "content_hash": "6fa7393117a48527bddbd53ca9ad9d3e", "timestamp": "", "source": "github", "line_count": 207, "max_line_length": 120, "avg_line_length": 30.67632850241546, "alnum_prop": 0.4804724409448819, "repo_name": "nandes2062/luya", "id": "8ed5cc5936ba38641694d24b637d2ca75c04ed62", "size": "6350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/cms/src/models/Log.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "382244" }, { "name": "HTML", "bytes": "45192" }, { "name": "JavaScript", "bytes": "258589" }, { "name": "PHP", "bytes": "2645927" }, { "name": "Shell", "bytes": "3345" } ], "symlink_target": "" }
'use strict'; module.exports = { db: 'mongodb://localhost/ship-test', port: 3001, app: { title: 'Ship - Test Environment' }, facebook: { clientID: process.env.FACEBOOK_ID || 'APP_ID', clientSecret: process.env.FACEBOOK_SECRET || 'APP_SECRET', callbackURL: '/auth/facebook/callback' }, twitter: { clientID: process.env.TWITTER_KEY || 'CONSUMER_KEY', clientSecret: process.env.TWITTER_SECRET || 'CONSUMER_SECRET', callbackURL: '/auth/twitter/callback' }, google: { clientID: process.env.GOOGLE_ID || 'APP_ID', clientSecret: process.env.GOOGLE_SECRET || 'APP_SECRET', callbackURL: '/auth/google/callback' }, linkedin: { clientID: process.env.LINKEDIN_ID || 'APP_ID', clientSecret: process.env.LINKEDIN_SECRET || 'APP_SECRET', callbackURL: '/auth/linkedin/callback' }, github: { clientID: process.env.GITHUB_ID || 'APP_ID', clientSecret: process.env.GITHUB_SECRET || 'APP_SECRET', callbackURL: '/auth/github/callback' }, mailer: { from: process.env.MAILER_FROM || 'MAILER_FROM', options: { service: process.env.MAILER_SERVICE_PROVIDER || 'MAILER_SERVICE_PROVIDER', auth: { user: process.env.MAILER_EMAIL_ID || 'MAILER_EMAIL_ID', pass: process.env.MAILER_PASSWORD || 'MAILER_PASSWORD' } } } };
{ "content_hash": "61ba4a50af303eb114b0e24e8369aaca", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 77, "avg_line_length": 28.65909090909091, "alnum_prop": 0.675654242664552, "repo_name": "exzeroex/shipping", "id": "7a24e9a231fdb76844a449842b11fe444ea66973", "size": "1261", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/env/test.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "500" }, { "name": "HTML", "bytes": "25044" }, { "name": "JavaScript", "bytes": "87677" }, { "name": "Perl", "bytes": "48" }, { "name": "Shell", "bytes": "414" } ], "symlink_target": "" }
import java.util.Iterator; import java.util.TreeSet; public class Sort13 { public static void main(String[] args) { // create ascending iterator Iterator<String> iterator = ascending("1", "13", "17", "2"); // displaying the Tree set data System.out.println("Tree set data in ascending order: "); while (iterator.hasNext()){ System.out.println(iterator.next() + " "); } } public static Iterator<String> ascending(String... args){ // creating a TreeSet TreeSet<String> treeadd = new TreeSet<String>(); // adding strings to set for(String each : args){ treeadd.add(each); } return treeadd.iterator(); } }
{ "content_hash": "3989d6ac5c9a1eae0d17c2778e0e2c20", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 64, "avg_line_length": 24.107142857142858, "alnum_prop": 0.64, "repo_name": "aas-integration/integration-test", "id": "d81597cf8de6d97a49dd7cc8d4ee2e444764be33", "size": "675", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "corpus/Sort13/src/Sort13.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "12214" }, { "name": "Python", "bytes": "47569" }, { "name": "Shell", "bytes": "5051" } ], "symlink_target": "" }
require File.dirname(__FILE__) + '/../spec_helper' describe SitemapHelper do #Delete this example and add some real ones or delete this file it "should include the SitemapHelper" do included_modules = self.metaclass.send :included_modules included_modules.should include(SitemapHelper) end end
{ "content_hash": "c9de3ff9b9ef9d90adb71e63ced7b008", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 65, "avg_line_length": 28.636363636363637, "alnum_prop": 0.7396825396825397, "repo_name": "jhnsntmthy/tgs_store", "id": "3b8b738df5442102828bfe0a85d23dd1439949db", "size": "315", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "vendor/extensions/sitemaps/spec/helpers/sitemap_helper_spec.rb", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "353857" }, { "name": "PHP", "bytes": "1052" }, { "name": "Ruby", "bytes": "437078" } ], "symlink_target": "" }
<?php use Celestial\Api\ApiProvider; use Celestial\Api\ApiResponse; class ServicesTestsHelper { public static function mockApi($methods = null, $callback = null) { if ($methods instanceof Closure) { $callback = $methods; $methods = null; } if (is_null($methods)) { $methods = ['request']; } $mock = Mockery::mock(ApiProvider::class.'['.implode(',', $methods).']', [ 'https://example.org', 'api-token' ]); if ($callback instanceof Closure) { $callback($mock); } return $mock; } public static function toApiResponse(array $data, int $code = 200) { $response = new ApiResponse($code); return $response->setResponseData($data); } }
{ "content_hash": "ea62a7ec5a370cad68174871cb027236", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 82, "avg_line_length": 22.444444444444443, "alnum_prop": 0.5420792079207921, "repo_name": "goolead/celestial-sdk", "id": "93be72de436ef8ffeddc201c297455508867a0d1", "size": "808", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/ServicesTestsHelper.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "145164" } ], "symlink_target": "" }
from re import compile, escape from ..scraper import _BasicScraper, _ParserScraper from ..helpers import indirectStarter, bounceStarter from ..util import tagre from .common import ComicControlScraper, WordPressScraper, WordPressNavi, WordPressWebcomic class Namesake(ComicControlScraper): url = 'http://namesakecomic.com/' stripUrl = url + 'comic/%s' firstStripUrl = stripUrl % 'the-journey-begins' class NatalieDee(_BasicScraper): url = 'http://www.nataliedee.com/' rurl = escape(url) stripUrl = url + '%s' firstStripUrl = stripUrl % '022806' imageSearch = compile(tagre("img", "src", r'(%s\d+/[^"]+)' % rurl, before="overflow")) prevSearch = compile(tagre("a", "href", r'([^"]+)') + "&lt;&lt; Yesterday") help = 'Index format: mmddyy' def namer(self, image_url, page_url): unused, date, filename = image_url.rsplit('/', 2) return '%s-%s' % (date, filename) class Nedroid(WordPressScraper): url = 'http://nedroid.com/' prevSearch = '//a[@rel="prev"]' class NeoCTC(_ParserScraper): url = 'http://www.hirezfox.com/neoctc/' stripUrl = url + 'd/%s.html' firstStripUrl = stripUrl % '20071205' imageSearch = '//img[contains(@src, "neoctc/comics")]' prevSearch = '//a[./img[@alt="Previous Day"]]' multipleImagesPerStrip = True class NeoEarth(_BasicScraper): url = 'http://www.neo-earth.com/NE/' stripUrl = url + 'index.php?date=%s' firstStripUrl = stripUrl % '2007-03-23' imageSearch = compile(r'<img src="(strips/.+?)"') prevSearch = compile(r'<a href="(.+?)">Previous</a>') help = 'Index format: yyyy-mm-dd' class NerfNow(WordPressScraper): url = 'https://www.nerfnow.com/' prevSearch = '//li[@id="nav_previous"]/a' class Newshounds(_ParserScraper): stripUrl = 'http://www.newshounds.com/%s.html' url = stripUrl % 'nh2/20140929' firstStripUrl = stripUrl % 'nh1/19971101' imageSearch = '//img[@class="ksc"]' prevSearch = '//a[./img[@alt="Previous comic"]]' endOfLife = True def getPrevUrl(self, url, data): # Add navigation link between comic and graphic novel if url == self.stripUrl % 'nh2/20070201': return self.stripUrl % 'nh1/20061208' return super(Newshounds, self).getPrevUrl(url, data) class NewWorld(WordPressScraper): url = ('https://web.archive.org/web/20190718012133/' 'http://www.tfsnewworld.com/') stripUrl = url + '%s/' firstStripUrl = stripUrl % '2007/08/30/63' prevSearch = '//a[@rel="prev"]' endOfLife = True help = 'Index format: yyyy/mm/dd/stripn' class NeverSatisfied(ComicControlScraper): url = 'https://www.neversatisfiedcomic.com/' stripUrl = url + 'comic/%s' firstStripUrl = stripUrl % 'never-satisfied' class NichtLustig(_BasicScraper): url = 'https://joscha.com/' starter = bounceStarter stripUrl = url + 'nichtlustig/%s/' firstStripUrl = stripUrl % '000501' lang = 'de' imageSearch = compile(tagre("img", "src", r'(https://joscha.com/data/media/cartoons/[0-9a-f-_]+.png)')) prevSearch = compile(tagre("a", "href", r'(https://joscha.com/nichtlustig/\d+/)', after="next")) nextSearch = compile(tagre("a", "href", r'(https://joscha.com/nichtlustig/\d+/)', after="prev")) help = 'Index format: yymmdd' def namer(self, image_url, page_url): unused, filename, unused2 = page_url.rsplit('/', 2) return '%s' % (filename) class Nicky510(WordPressNavi): url = ('https://web.archive.org/web/20160510215718/' 'http://www.nickyitis.com/') endOfLife = True class Nightshift(WordPressWebcomic): url = 'https://poecatcomix.com/nightshift-static/' stripUrl = 'https://poecatcomix.com/nightshift/%s/' firstStripUrl = stripUrl % 'ns-cover' imageSearch = '//div[contains(@class, "webcomic-media")]//img' adult = True def starter(self): # Build list of chapters for naming indexPage = self.getPage(self.url) self.chapters = indexPage.xpath('//a[./img[contains(@class, "attachment-large")]]/@href') latestPage = self.chapters[0] self.chapters = self.chapters[1:] self.currentChapter = len(self.chapters) return latestPage def namer(self, imageUrl, pageUrl): page = pageUrl.rstrip('/').rsplit('/', 1)[-1] page = page.replace('blood-brothers', 'bloodbrothers').replace('bb-2', 'bb2').replace('ns7-', 'page-') filename = 'ns%d-%s.%s' % (self.currentChapter, page, imageUrl.rsplit('.', 1)[-1]) if pageUrl in self.chapters: self.currentChapter = self.currentChapter - 1 return filename class Nimona(_ParserScraper): url = ('https://web.archive.org/web/20141008095502/' 'http://gingerhaze.com/nimona/') stripUrl = url + 'comic/%s' firstStripUrl = stripUrl % "page-1" imageSearch = '//div[d:class("field-name-field-comic-page")]//img' prevSearch = '//a[img[contains(@src, "/comicdrop_prev_label")]]' endOfLife = True class NineToNine(_ParserScraper): url = 'https://www.tigerknight.com/99' stripUrl = url + '/%s' firstStripUrl = stripUrl % '2014-01-01' imageSearch = '//img[d:class("comic-image")]' prevSearch = '//a[./span[contains(text(), "Previous")]]' multipleImagesPerStrip = True class NobodyScores(_BasicScraper): url = 'http://nobodyscores.loosenutstudio.com/' rurl = escape(url) stripUrl = url + 'index.php?id=%s' firstStripUrl = stripUrl % '4' imageSearch = compile(tagre("img", "src", r'(%scomix/[^"]+)' % rurl)) multipleImagesPerStrip = True prevSearch = compile(r'<a href="(%sindex.php.+?)">the one before </a>' % rurl) help = 'Index format: nnn' class NoNeedForBushido(_ParserScraper): url = 'http://nn4b.com/' stripUrl = url + 'comic/%s' imageSearch = '//div[@id="comic-image"]//img' prevSearch = '//a[@rel="prev"]' help = 'Index format: nnn' class NonPlayerCharacter(_ParserScraper): url = 'https://www.lfg.co/' stripUrl = url + 'npc/tale/%s/' firstStripUrl = stripUrl % '1-1' imageSearch = '//div[@id="comic-img"]//img' prevSearch = '//a[@class="comic-nav-prev"]' latestSearch = '//div[@id="feature-npc-footer"]/a[contains(@href, "npc/tale/")]' starter = indirectStarter def namer(self, imageUrl, pageUrl): return pageUrl.rstrip('/').rsplit('/', 1)[-1] class NotAVillain(WordPressWebcomic): url = 'http://navcomic.com/' stripUrl = url + 'not-a-villain/%s/' firstStripUrl = stripUrl % 'v1-001' def namer(self, imageUrl, pageUrl): filename = imageUrl.rsplit('/', 1)[-1] # Fix filenames missing "Page" if filename[2].isdigit(): filename = filename[0] + '-Page' + filename[2:] # Fix filenames of early comics filename = filename.replace('Page-', '1-Page') if filename.startswith('0-Page'): filename = '1' + filename[1:] return filename class NotInventedHere(_ParserScraper): url = 'http://notinventedhe.re/' stripUrl = url + 'on/%s' firstStripUrl = stripUrl % '2009-9-21' imageSearch = '//div[@id="comic-content"]//img' prevSearch = '//a[@id="nav-previous"]' help = 'Index format: yyyy-m-d' class Nukees(_BasicScraper): url = 'http://www.nukees.com/' stripUrl = url + 'd/%s' firstStripUrl = stripUrl % '19970121' imageSearch = compile(r'"comic".+?"(/comics/.+?)"') prevSearch = compile(r'"(/d/.+?)".+?previous') help = 'Index format: yyyymmdd.html'
{ "content_hash": "6c32111cdc30a43b0863fa5a17179acb", "timestamp": "", "source": "github", "line_count": 219, "max_line_length": 110, "avg_line_length": 34.56164383561644, "alnum_prop": 0.6230677764565993, "repo_name": "webcomics/dosage", "id": "8387bccf2ede1485404a84a7eff4dca631594991", "size": "7793", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dosagelib/plugins/n.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "33" }, { "name": "Python", "bytes": "622985" }, { "name": "Shell", "bytes": "1363" } ], "symlink_target": "" }
use_inline_resources provides :zabbix_graph if defined? provides def whyrun_supported? true end TYPE = { normal: 0, stacked: 1, pie: 2, exploded: 3, }.freeze action :create do converge_by("Creating data for #{new_resource}") do graph_items = new_resource.graph_items.map do |gi| { key: gi[:key], color: gi[:color], yaxisside: gi[:yaxisside], } end graph = { height: new_resource.height, width: new_resource.width, gitems: graph_items, } if new_resource.graph_type raise "Graph type should be one of: #{TYPE.keys.join(', ')}" unless TYPE.keys.include? new_resource.graph_type graph[:graphtype] = TYPE[new_resource.graph_type] end add_data(node, node['fqdn'], graphs: { new_resource.name => graph, }) end end
{ "content_hash": "c47c993c8404fd63465c86ce27444ec7", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 116, "avg_line_length": 20.261904761904763, "alnum_prop": 0.5981198589894242, "repo_name": "chromko/test_lwrp_docker", "id": "15a1729a6adcdee519e026616a085b83fc52719f", "size": "2054", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "providers/graph.rb", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "3679" }, { "name": "Ruby", "bytes": "119060" } ], "symlink_target": "" }
package gosnmp import ( "fmt" "log" "net" "sync" "time" ) // // Sending Traps ie GoSNMP acting as an Agent // // SendTrap sends a SNMP Trap (v2c/v3 only) // // pdus[0] can a pdu of Type TimeTicks (with the desired uint32 epoch // time). Otherwise a TimeTicks pdu will be prepended, with time set to // now. This mirrors the behaviour of the Net-SNMP command-line tools. // // SendTrap doesn't wait for a return packet from the NMS (Network // Management Station). // // See also Listen() and examples for creating an NMS. func (x *GoSNMP) SendTrap(trap SnmpTrap) (result *SnmpPacket, err error) { var pdutype PDUType if len(trap.Variables) == 0 { return nil, fmt.Errorf("SendTrap requires at least 1 PDU") } if trap.Variables[0].Type == TimeTicks { // check is uint32 if _, ok := trap.Variables[0].Value.(uint32); !ok { return nil, fmt.Errorf("SendTrap TimeTick must be uint32") } } switch x.Version { case Version2c, Version3: pdutype = SNMPv2Trap if trap.Variables[0].Type != TimeTicks { now := uint32(time.Now().Unix()) timetickPDU := SnmpPDU{"1.3.6.1.2.1.1.3.0", TimeTicks, now, x.Logger} // prepend timetickPDU trap.Variables = append([]SnmpPDU{timetickPDU}, trap.Variables...) } case Version1: pdutype = Trap if len(trap.Enterprise) == 0 { return nil, fmt.Errorf("SendTrap for SNMPV1 requires an Enterprise OID") } if len(trap.AgentAddress) == 0 { return nil, fmt.Errorf("SendTrap for SNMPV1 requires an Agent Address") } default: err = fmt.Errorf("SendTrap doesn't support %s", x.Version) return nil, err } packetOut := x.mkSnmpPacket(pdutype, trap.Variables, 0, 0) if x.Version == Version1 { packetOut.Enterprise = trap.Enterprise packetOut.AgentAddress = trap.AgentAddress packetOut.GenericTrap = trap.GenericTrap packetOut.SpecificTrap = trap.SpecificTrap packetOut.Timestamp = trap.Timestamp } // all sends wait for the return packet, except for SNMPv2Trap // -> wait is false return x.send(packetOut, false) } // // Receiving Traps ie GoSNMP acting as an NMS (Network Management // Station). // // GoSNMP.unmarshal() currently only handles SNMPv2Trap (ie v2c, v3) // // A TrapListener defines parameters for running a SNMP Trap receiver. // nil values will be replaced by default values. type TrapListener struct { sync.Mutex OnNewTrap func(s *SnmpPacket, u *net.UDPAddr) Params *GoSNMP // These unexported fields are for letting test cases // know we are ready. conn *net.UDPConn finish chan bool done chan bool listening chan bool } // NewTrapListener returns an initialized TrapListener. func NewTrapListener() *TrapListener { tl := &TrapListener{} tl.finish = make(chan bool) tl.done = make(chan bool) // Buffered because one doesn't have to block on it. tl.listening = make(chan bool, 1) return tl } // Listening returns a sentinel channel on which one can block // until the listener is ready to receive requests. func (t *TrapListener) Listening() <-chan bool { t.Lock() defer t.Unlock() return t.listening } // Close terminates the listening on TrapListener socket func (t *TrapListener) Close() { t.conn.Close() t.finish <- true <-t.done } // Listen listens on the UDP address addr and calls the OnNewTrap // function specified in *TrapListener for every trap received. func (t *TrapListener) Listen(addr string) (err error) { if t.Params == nil { t.Params = Default } t.Params.validateParameters() if t.OnNewTrap == nil { t.OnNewTrap = debugTrapHandler } udpAddr, err := net.ResolveUDPAddr("udp", addr) if err != nil { return err } conn, err := net.ListenUDP("udp", udpAddr) if err != nil { return err } t.conn = conn defer conn.Close() // Mark that we are listening now. t.listening <- true for { select { case <-t.finish: t.done <- true return default: var buf [4096]byte rlen, remote, err := conn.ReadFromUDP(buf[:]) if err != nil { t.Params.logPrintf("TrapListener: error in read %s\n", err) } msg := buf[:rlen] traps := t.Params.UnmarshalTrap(msg) if traps != nil { t.OnNewTrap(traps, remote) } } } } // Default trap handler func debugTrapHandler(s *SnmpPacket, u *net.UDPAddr) { log.Printf("got trapdata from %+v: %+v\n", u, s) } // UnmarshalTrap unpacks the SNMP Trap. func (x *GoSNMP) UnmarshalTrap(trap []byte) (result *SnmpPacket) { result = new(SnmpPacket) if x.SecurityParameters != nil { result.SecurityParameters = x.SecurityParameters.Copy() } cursor, err := x.unmarshalHeader(trap, result) if err != nil { x.logPrintf("UnmarshalTrap: %s\n", err) return nil } if result.Version == Version3 { if result.SecurityModel == UserSecurityModel { err = x.testAuthentication(trap, result) if err != nil { x.logPrintf("UnmarshalTrap v3 auth: %s\n", err) return nil } } trap, cursor, err = x.decryptPacket(trap, cursor, result) } err = x.unmarshalPayload(trap, cursor, result) if err != nil { x.logPrintf("UnmarshalTrap: %s\n", err) return nil } return result }
{ "content_hash": "0d661abf9abd3bfcfeb1cb4d0fa0a311", "timestamp": "", "source": "github", "line_count": 209, "max_line_length": 75, "avg_line_length": 24.177033492822968, "alnum_prop": 0.6873144666534732, "repo_name": "qiniu/logkit", "id": "67923789ec22594a3140219fdc15a4a788ef1add", "size": "5223", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "vendor/github.com/soniah/gosnmp/trap.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1875" }, { "name": "CSS", "bytes": "4581" }, { "name": "Dockerfile", "bytes": "683" }, { "name": "Go", "bytes": "2606689" }, { "name": "HTML", "bytes": "4256" }, { "name": "JavaScript", "bytes": "157768" }, { "name": "Makefile", "bytes": "264" }, { "name": "Python", "bytes": "10544" }, { "name": "Shell", "bytes": "1418" } ], "symlink_target": "" }
using System.Reflection; using System.Runtime.InteropServices; [assembly: AssemblyTitle("CoApp.VSE.VisualStudio.Tools")] [assembly: Guid("bce9242e-ccbd-482c-bfff-a70fa348aa20")]
{ "content_hash": "a973a175fdef3da52f7a018738e26e7f", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 57, "avg_line_length": 35.8, "alnum_prop": 0.8044692737430168, "repo_name": "henjuv/coapp-vse", "id": "660d8ac68d0dc4fe8ca2df08aa07074ceb6908e8", "size": "181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "VsExtension/Properties/AssemblyInfo.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "281117" }, { "name": "JavaScript", "bytes": "1593" } ], "symlink_target": "" }
class AbstractCompany < ActiveRecord::Base self.abstract_class = true end class Company < AbstractCompany self.sequence_name = :companies_nonstd_seq validates_presence_of :name has_one :account, foreign_key: "firm_id" has_one :dummy_account, foreign_key: "firm_id", class_name: "Account" has_many :contracts has_many :developers, through: :contracts has_many :special_contracts, -> { includes(:special_developer).where.not("developers.id": nil) } has_many :special_developers, through: :special_contracts has_many :comments, foreign_key: "company" alias_attribute :new_name, :name attribute :metadata, :json scope :of_first_firm, lambda { joins(account: :firm).where("companies.id": 1) } def arbitrary_method "I am Jack's profound disappointment" end private def private_method "I am Jack's innermost fears and aspirations" end class SpecialCo < Company end end module Namespaced class Company < ::Company end class Firm < ::Company has_many :clients, class_name: "Namespaced::Client" end class Client < ::Company end end class Firm < Company to_param :name has_many :clients, -> { order "id" }, dependent: :destroy, before_remove: :log_before_remove, after_remove: :log_after_remove has_many :unsorted_clients, class_name: "Client" has_many :unsorted_clients_with_symbol, class_name: :Client has_many :clients_sorted_desc, -> { order "id DESC" }, class_name: "Client" has_many :clients_of_firm, -> { order "id" }, foreign_key: "client_of", class_name: "Client", inverse_of: :firm has_many :clients_ordered_by_name, -> { order "name" }, class_name: "Client" has_many :unvalidated_clients_of_firm, foreign_key: "client_of", class_name: "Client", validate: false has_many :dependent_clients_of_firm, -> { order "id" }, foreign_key: "client_of", class_name: "Client", dependent: :destroy has_many :exclusively_dependent_clients_of_firm, -> { order "id" }, foreign_key: "client_of", class_name: "Client", dependent: :delete_all has_many :limited_clients, -> { limit 1 }, class_name: "Client" has_many :clients_with_interpolated_conditions, ->(firm) { where "rating > #{firm.rating}" }, class_name: "Client" has_many :clients_like_ms, -> { where("name = 'Microsoft'").order("id") }, class_name: "Client" has_many :clients_like_ms_with_hash_conditions, -> { where(name: "Microsoft").order("id") }, class_name: "Client" has_many :plain_clients, class_name: "Client" has_many :clients_using_primary_key, class_name: "Client", primary_key: "name", foreign_key: "firm_name" has_many :clients_using_primary_key_with_delete_all, class_name: "Client", primary_key: "name", foreign_key: "firm_name", dependent: :delete_all has_many :clients_grouped_by_firm_id, -> { group("firm_id").select("firm_id") }, class_name: "Client" has_many :clients_grouped_by_name, -> { group("name").select("name") }, class_name: "Client" has_one :account, foreign_key: "firm_id", dependent: :destroy, validate: true has_one :unvalidated_account, foreign_key: "firm_id", class_name: "Account", validate: false has_one :account_with_select, -> { select("id, firm_id") }, foreign_key: "firm_id", class_name: "Account" has_one :readonly_account, -> { readonly }, foreign_key: "firm_id", class_name: "Account" # added order by id as in fixtures there are two accounts for Rails Core # Oracle tests were failing because of that as the second fixture was selected has_one :account_using_primary_key, -> { order("id") }, primary_key: "firm_id", class_name: "Account" has_one :account_using_foreign_and_primary_keys, foreign_key: "firm_name", primary_key: "name", class_name: "Account" has_one :account_with_inexistent_foreign_key, class_name: "Account", foreign_key: "inexistent" has_one :deletable_account, foreign_key: "firm_id", class_name: "Account", dependent: :delete has_one :account_limit_500_with_hash_conditions, -> { where credit_limit: 500 }, foreign_key: "firm_id", class_name: "Account" has_one :unautosaved_account, foreign_key: "firm_id", class_name: "Account", autosave: false has_many :accounts has_many :unautosaved_accounts, foreign_key: "firm_id", class_name: "Account", autosave: false has_many :association_with_references, -> { references(:foo) }, class_name: "Client" has_many :developers_with_select, -> { select("id, name, first_name") }, class_name: "Developer" has_one :lead_developer, class_name: "Developer" has_many :projects def log @log ||= [] end private def log_before_remove(record) log << "before_remove#{record.id}" end def log_after_remove(record) log << "after_remove#{record.id}" end end class DependentFirm < Company has_one :account, -> { order(:id) }, foreign_key: "firm_id", dependent: :nullify has_many :companies, foreign_key: "client_of", dependent: :nullify has_one :company, foreign_key: "client_of", dependent: :nullify end class RestrictedWithExceptionFirm < Company has_one :account, -> { order("id") }, foreign_key: "firm_id", dependent: :restrict_with_exception has_many :companies, -> { order("id") }, foreign_key: "client_of", dependent: :restrict_with_exception end class RestrictedWithErrorFirm < Company has_one :account, -> { order("id") }, foreign_key: "firm_id", dependent: :restrict_with_error has_many :companies, -> { order("id") }, foreign_key: "client_of", dependent: :restrict_with_error end class Agency < Firm has_many :projects, foreign_key: :firm_id accepts_nested_attributes_for :projects end class Client < Company belongs_to :firm, foreign_key: "client_of" belongs_to :firm_with_basic_id, class_name: "Firm", foreign_key: "firm_id" belongs_to :firm_with_select, -> { select("id") }, class_name: "Firm", foreign_key: "firm_id" belongs_to :firm_with_other_name, class_name: "Firm", foreign_key: "client_of" belongs_to :firm_with_condition, -> { where "1 = ?", 1 }, class_name: "Firm", foreign_key: "client_of" belongs_to :firm_with_primary_key, class_name: "Firm", primary_key: "name", foreign_key: "firm_name" belongs_to :firm_with_primary_key_symbols, class_name: "Firm", primary_key: :name, foreign_key: :firm_name belongs_to :readonly_firm, -> { readonly }, class_name: "Firm", foreign_key: "firm_id" belongs_to :bob_firm, -> { where name: "Bob" }, class_name: "Firm", foreign_key: "client_of" has_many :accounts, through: :firm, source: :accounts belongs_to :account validate do firm end class RaisedOnSave < RuntimeError; end attr_accessor :raise_on_save before_save do raise RaisedOnSave if raise_on_save end attr_accessor :throw_on_save before_save do throw :abort if throw_on_save end attr_accessor :rollback_on_save after_save do raise ActiveRecord::Rollback if rollback_on_save end attr_accessor :rollback_on_create_called after_rollback(on: :create) do |client| client.rollback_on_create_called = true end class RaisedOnDestroy < RuntimeError; end attr_accessor :raise_on_destroy before_destroy do raise RaisedOnDestroy if raise_on_destroy end # Record destruction so we can test whether firm.clients.clear has # is calling client.destroy, deleting from the database, or setting # foreign keys to NULL. def self.destroyed_client_ids @destroyed_client_ids ||= Hash.new { |h, k| h[k] = [] } end before_destroy do |client| if client.firm Client.destroyed_client_ids[client.firm.id] << client.id end true end before_destroy :overwrite_to_raise # Used to test that read and question methods are not generated for these attributes def rating? query_attribute :rating end def overwrite_to_raise end end class ExclusivelyDependentFirm < Company has_one :account, foreign_key: "firm_id", dependent: :delete has_many :dependent_sanitized_conditional_clients_of_firm, -> { order("id").where("name = 'BigShot Inc.'") }, foreign_key: "client_of", class_name: "Client", dependent: :delete_all has_many :dependent_hash_conditional_clients_of_firm, -> { order("id").where(name: "BigShot Inc.") }, foreign_key: "client_of", class_name: "Client", dependent: :delete_all has_many :dependent_conditional_clients_of_firm, -> { order("id").where("name = ?", "BigShot Inc.") }, foreign_key: "client_of", class_name: "Client", dependent: :delete_all end class LargeClient < Client attribute :extra_size, :integer after_initialize :set_extra_size def set_extra_size self[:extra_size] = 50 end end class SpecialClient < Client end class VerySpecialClient < SpecialClient end class NewlyContractedCompany < Company has_many :new_contracts, foreign_key: "company_id" before_save do self.new_contracts << NewContract.new end end require "models/account"
{ "content_hash": "0bf8a58f8dae516ca74890c7d0c9bd07", "timestamp": "", "source": "github", "line_count": 234, "max_line_length": 182, "avg_line_length": 37.675213675213676, "alnum_prop": 0.6928312159709619, "repo_name": "yahonda/rails", "id": "4d0d7db5570285d747792f768a80549137eece3f", "size": "8847", "binary": false, "copies": "5", "ref": "refs/heads/main", "path": "activerecord/test/models/company.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "56339" }, { "name": "CoffeeScript", "bytes": "24687" }, { "name": "Dockerfile", "bytes": "2639" }, { "name": "HTML", "bytes": "93448" }, { "name": "JavaScript", "bytes": "558590" }, { "name": "Ruby", "bytes": "15188676" }, { "name": "Shell", "bytes": "5093" }, { "name": "Yacc", "bytes": "1003" } ], "symlink_target": "" }
<?php // Copyright (c) 2003-2012, CKSource - Frederico Knabben. All rights reserved. // For licensing, see LICENSE.html or http://ckfinder.com/license // Defines the object for the Croatian language. $GLOBALS['CKFLang'] = array ( 'ErrorUnknown' => 'Nije moguće završiti zahtjev. (Greška %1)', 'Errors' => array ( '10' => 'Nepoznata naredba.', '11' => 'Nije navedena vrsta u zahtjevu.', '12' => 'Zatražena vrsta nije važeća.', '102' => 'Neispravno naziv datoteke ili direktoija.', '103' => 'Nije moguće izvršiti zahtjev zbog ograničenja pristupa.', '104' => 'Nije moguće izvršiti zahtjev zbog ograničenja postavka sustava.', '105' => 'Nedozvoljena vrsta datoteke.', '109' => 'Nedozvoljen zahtjev.', '110' => 'Nepoznata greška.', '111' => 'It was not possible to complete the request due to resulting file size.', '115' => 'Datoteka ili direktorij s istim nazivom već postoji.', '116' => 'Direktorij nije pronađen. Osvježite stranicu i pokušajte ponovo.', '117' => 'Datoteka nije pronađena. Osvježite listu datoteka i pokušajte ponovo.', '118' => 'Putanje izvora i odredišta su jednake.', '201' => 'Datoteka s istim nazivom već postoji. Poslana datoteka je promjenjena u "%1".', '202' => 'Neispravna datoteka.', '203' => 'Neispravna datoteka. Veličina datoteke je prevelika.', '204' => 'Poslana datoteka je neispravna.', '205' => 'Ne postoji privremeni direktorij za slanje na server.', '206' => 'Slanje je poništeno zbog sigurnosnih postavki. Naziv datoteke sadrži HTML podatke.', '207' => 'Poslana datoteka je promjenjena u "%1".', '300' => 'Premještanje datoteke(a) nije uspjelo.', '301' => 'Kopiranje datoteke(a) nije uspjelo.', '500' => 'Pretraživanje datoteka nije dozvoljeno iz sigurnosnih razloga. Molimo kontaktirajte administratora sustava kako bi provjerili postavke CKFinder konfiguracijske datoteke.', '501' => 'The thumbnails support is disabled.', ) );
{ "content_hash": "101cc42a1a6a93db249369382b3d15b4", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 183, "avg_line_length": 54.388888888888886, "alnum_prop": 0.686414708886619, "repo_name": "fuhongliang/2015weitonghui", "id": "2e0ebe69e73ca7930946a4d458df4b585bc1999b", "size": "1985", "binary": false, "copies": "20", "ref": "refs/heads/master", "path": "Other/cms/editor/ckfinder/core/connector/php/lang/hr.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "30814" }, { "name": "ActionScript", "bytes": "28251" }, { "name": "ApacheConf", "bytes": "912" }, { "name": "CSS", "bytes": "11665404" }, { "name": "HTML", "bytes": "8852271" }, { "name": "Java", "bytes": "11028" }, { "name": "JavaScript", "bytes": "17601942" }, { "name": "PHP", "bytes": "4103223" }, { "name": "Ruby", "bytes": "841" }, { "name": "Smarty", "bytes": "3562" } ], "symlink_target": "" }
using namespace PlayFabComboSdk; using namespace rapidjson; enum PlayFabApiTestActiveState { PENDING, // Not started ACTIVE, // Currently testing READY, // An answer is sent by the http thread, but the main thread hasn't finalized the test yet COMPLETE, // Test is finalized and recorded ABORTED // todo }; enum PlayFabApiTestFinishState { PASSED, FAILED, SKIPPED, TIMEDOUT }; struct PfTestContext { PfTestContext(AZStd::string name, void(*func)(PfTestContext& context)) : testName(name), activeState(PENDING), finishState(TIMEDOUT), testResultMsg(), testFunc(func), startTime(0), endTime(0) { }; const AZStd::string testName; PlayFabApiTestActiveState activeState; PlayFabApiTestFinishState finishState; AZStd::string testResultMsg; void(*testFunc)(PfTestContext& context); time_t startTime; time_t endTime; AZStd::string GenerateTestSummary(time_t now) { time_t tempEndTime = (activeState == COMPLETE) ? endTime : now; time_t tempStartTime = (startTime != 0) ? startTime : now; AZStd::string temp; temp = std::to_string(tempEndTime - tempStartTime).c_str(); while (temp.length() < 12) temp = " " + temp; temp += " ms, "; switch (finishState) { case PASSED: temp += "pass: "; break; case FAILED: temp += "FAILED: "; break; case SKIPPED: temp += "SKIPPED: "; break; case TIMEDOUT: temp += "TIMED OUT: "; break; } temp += testName; if (testResultMsg.length() > 0) { temp += " - "; temp += testResultMsg; } return temp; } }; class PlayFabApiTests { public: static void InitializeTestSuite() { bool setupSuccessful = ClassSetup(); // Reset testContexts if this has already been run (The results are kept for later viewing) for (auto it = testContexts.begin(); it != testContexts.end(); ++it) delete *it; testContexts.clear(); if (setupSuccessful) { testContexts.insert(testContexts.end(), new PfTestContext("InvalidLogin", InvalidLogin)); testContexts.insert(testContexts.end(), new PfTestContext("InvalidRegistration", InvalidRegistration)); testContexts.insert(testContexts.end(), new PfTestContext("LoginOrRegister", LoginOrRegister)); testContexts.insert(testContexts.end(), new PfTestContext("LoginWithAdvertisingId", LoginWithAdvertisingId)); testContexts.insert(testContexts.end(), new PfTestContext("UserDataApi", UserDataApi)); testContexts.insert(testContexts.end(), new PfTestContext("UserStatisticsApi", PlayerStatisticsApi)); testContexts.insert(testContexts.end(), new PfTestContext("UserCharacter", UserCharacter)); testContexts.insert(testContexts.end(), new PfTestContext("LeaderBoard", LeaderBoard)); testContexts.insert(testContexts.end(), new PfTestContext("AccountInfo", AccountInfo)); testContexts.insert(testContexts.end(), new PfTestContext("CloudScript", CloudScript)); testContexts.insert(testContexts.end(), new PfTestContext("WriteEvent", WriteEvent)); } } static bool TickTestSuite() { int numPending; PlayFabCombo_ClientRequestBus::BroadcastResult(numPending, &PlayFabCombo_ClientRequests::GetPendingCalls); if (numPending > 0) return false; int unfinishedTests = 0; PfTestContext* nextTest = nullptr; for (auto it = testContexts.begin(); it != testContexts.end(); ++it) { auto eachState = (*it)->activeState; if (eachState != COMPLETE && eachState != ABORTED) unfinishedTests++; if (eachState == ACTIVE || eachState == READY) // Find the active test, and prioritize it nextTest = *it; else if (eachState == PENDING && nextTest == nullptr) // Or find a test to start nextTest = *it; } if (nextTest != nullptr && nextTest->activeState == PENDING) StartTest(*nextTest); else if (nextTest != nullptr) TickTest(*nextTest); bool result = unfinishedTests == 0; // Return whether tests are complete return result; } static AZStd::string GenerateTestSummary() { _outputSummary.clear(); time_t now = clock(); int numPassed = 0; int numFailed = 0; for (auto it = testContexts.begin(); it != testContexts.end(); ++it) { if (_outputSummary.length() != 0) _outputSummary += "\n"; _outputSummary += (*it)->GenerateTestSummary(now); if ((*it)->finishState == PASSED) numPassed++; else if ((*it)->finishState == FAILED) numFailed++; } AZStd::string testCountLine = "\nTotal tests: "; testCountLine += AZStd::string(std::to_string(testContexts.size()).c_str()); testCountLine += ", Passed: "; testCountLine += AZStd::string(std::to_string(numPassed).c_str()); testCountLine += ", Failed: "; testCountLine += AZStd::string(std::to_string(numFailed).c_str()); _outputSummary += testCountLine.c_str(); return _outputSummary; } private: static AZStd::string _outputSummary; // Basically a temp variable so I don't reallocate this constantly // A bunch of constants loaded from testTitleData.json static std::string TEST_TITLE_DATA_LOC; static AZStd::string buildIdentifier; static AZStd::string userEmail; const static AZStd::string TEST_DATA_KEY; const static AZStd::string TEST_STAT_NAME; static AZStd::string playFabId; static int testMessageInt; static time_t testMessageTime; static std::list<PfTestContext*> testContexts; static bool ClassSetup() { // README: // modify the TEST_TITLE_DATA_LOC to a location of a testTitleData.json file // The format of this file is described in the sdk readme // - OR - // Comment the "return false;" below, and // Fill in all the variables under: POPULATE THIS SECTION WITH REAL INFORMATION // Prefer to load path from environment variable, if present char* envPath = nullptr; size_t envPathStrLen; errno_t err = _dupenv_s(&envPath, &envPathStrLen, "PF_TEST_TITLE_DATA_JSON"); if (err == 0 && envPath != nullptr) TEST_TITLE_DATA_LOC = envPath; if (envPath != nullptr) free(envPath); std::ifstream titleInput; if (TEST_TITLE_DATA_LOC.length() > 0) titleInput.open(TEST_TITLE_DATA_LOC, std::ios::binary | std::ios::in); if (titleInput) { int begin = titleInput.tellg(); titleInput.seekg(0, std::ios::end); int end = titleInput.tellg(); char* titleData = new char[end - begin]; titleInput.seekg(0, std::ios::beg); titleInput.read(titleData, end - begin); titleData[end - begin] = '\0'; Document testInputs; testInputs.Parse<0>(titleData); SetTitleInfo(testInputs); titleInput.close(); } else { return false; // TODO: Put the info for your title here (Fallback in case it can't read from the file) // POPULATE THIS SECTION WITH REAL INFORMATION PlayFabCombo_SettingsRequestBus::Broadcast(&PlayFabCombo_SettingsRequests::SetTitleId, ""); // The titleId for your title, found in the "Settings" section of PlayFab Game Manager PlayFabCombo_SettingsRequestBus::Broadcast(&PlayFabCombo_SettingsRequests::SetDevSecretKey, ""); // The titleId for your title, found in the "Settings" section of PlayFab Game Manager userEmail = ""; // This is an email for any registered user (just so we can deliberately fail to log into it) } PlayFabCombo_SettingsRequestBus::BroadcastResult(buildIdentifier, &PlayFabCombo_SettingsRequests::GetBuildIdentifier); // Verify all the inputs won't cause crashes in the tests return static_cast<bool>(titleInput) // && !playFabSettings->titleId.empty() // && !playFabSettings->developerSecretKey.empty() && !buildIdentifier.empty() && !userEmail.empty(); } /// <summary> /// PlayFab Title cannot be created from SDK tests, so you must provide your titleId to run unit tests. /// (Also, we don't want lots of excess unused titles) /// </summary> static void SetTitleInfo(Document &testInputs) { // Parse all the inputs auto end = testInputs.MemberEnd(); auto each = testInputs.FindMember("titleId"); if (each != end) PlayFabCombo_SettingsRequestBus::Broadcast(&PlayFabCombo_SettingsRequests::SetTitleId, each->value.GetString()); each = testInputs.FindMember("developerSecretKey"); if (each != end) PlayFabCombo_SettingsRequestBus::Broadcast(&PlayFabCombo_SettingsRequests::SetDevSecretKey, each->value.GetString()); each = testInputs.FindMember("userEmail"); if (each != end) userEmail = each->value.GetString(); } // Start a test, and block until the threaded response arrives static void StartTest(PfTestContext& testContext) { testContext.activeState = ACTIVE; testContext.startTime = clock(); testContext.testFunc(testContext); // Async tests can't resolve this tick, so just return } static void TickTest(PfTestContext& testContext) { time_t now = clock(); if (testContext.activeState != READY // Not finished && (now - testContext.startTime) < 15000) // Not timed out return; testContext.endTime = now; testContext.activeState = COMPLETE; } // This should be called in the api-responses, which are threaded. This will allow TickTest to finalize the test static void EndTest(PfTestContext& testContext, PlayFabApiTestFinishState finishState, AZStd::string resultMsg) { testContext.testResultMsg = resultMsg; testContext.finishState = finishState; testContext.activeState = READY; } static void OnSharedError(const PlayFabError& error, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, FAILED, "Unexpected error: " + error.ErrorMessage); } /// <summary> /// CLIENT API /// Try to deliberately log in with an inappropriate password, /// and verify that the error displays as expected. /// </summary> static void InvalidLogin(PfTestContext& testContext) { ClientModels::LoginWithEmailAddressRequest request; request.Email = userEmail; request.Password = "INVALID"; EBUS_EVENT(PlayFabCombo_ClientRequestBus, LoginWithEmailAddress, request, InvalidLoginSuccess, InvalidLoginFail, &testContext); } static void InvalidLoginSuccess(const ClientModels::LoginResult& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, FAILED, "Expected login to fail"); } static void InvalidLoginFail(const PlayFabError& error, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (error.ErrorMessage.find("password") != -1) EndTest(*testContext, PASSED, ""); else EndTest(*testContext, FAILED, "Password error message not found: " + error.ErrorMessage); } /// <summary> /// CLIENT API /// Try to deliberately register a user with an invalid email and password /// Verify that errorDetails are populated correctly. /// </summary> static void InvalidRegistration(PfTestContext& testContext) { ClientModels::RegisterPlayFabUserRequest request; request.Username = "X"; request.Email = "x"; request.Password = "x"; EBUS_EVENT(PlayFabCombo_ClientRequestBus, RegisterPlayFabUser, request, InvalidRegistrationSuccess, InvalidRegistrationFail, &testContext); } static void InvalidRegistrationSuccess(const ClientModels::RegisterPlayFabUserResult& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, FAILED, "Expected registration to fail"); } static void InvalidRegistrationFail(const PlayFabError& error, void* customData) { bool foundEmailMsg, foundPasswordMsg; AZStd::string expectedEmailMsg = "Email address is not valid."; AZStd::string expectedPasswordMsg = "Password must be between"; AZStd::string errorConcat; for (auto it = error.ErrorDetails.begin(); it != error.ErrorDetails.end(); ++it) errorConcat += it->second; foundEmailMsg = (errorConcat.find(expectedEmailMsg) != -1); foundPasswordMsg = (errorConcat.find(expectedPasswordMsg) != -1); PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (foundEmailMsg && foundPasswordMsg) EndTest(*testContext, PASSED, ""); else EndTest(*testContext, FAILED, "All error details: " + errorConcat); } /// <summary> /// CLIENT API /// Test a sequence of calls that modifies saved data, /// and verifies that the next sequential API call contains updated data. /// Verify that the data is correctly modified on the next call. /// Parameter types tested: string, Dictionary<string, string>, DateTime /// </summary> static void LoginOrRegister(PfTestContext& testContext) { ClientModels::LoginWithCustomIDRequest request; request.CustomId = buildIdentifier; request.CreateAccount = true; EBUS_EVENT(PlayFabCombo_ClientRequestBus, LoginWithCustomID, request, OnLoginOrRegister, OnSharedError, &testContext); } static void OnLoginOrRegister(const ClientModels::LoginResult& result, void* customData) { playFabId = result.PlayFabId; PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT API /// Test that the login call sequence sends the AdvertisingId when set /// </summary> static void LoginWithAdvertisingId(PfTestContext& testContext) { // playFabSettings->advertisingIdType = playFabSettings->AD_TYPE_ANDROID_ID; // playFabSettings->advertisingIdValue = "PlayFabTestId"; ClientModels::LoginWithCustomIDRequest request; request.CustomId = buildIdentifier; request.CreateAccount = true; EBUS_EVENT(PlayFabCombo_ClientRequestBus, LoginWithCustomID, request, OnLoginWithAdvertisingId, OnSharedError, &testContext); } static void OnLoginWithAdvertisingId(const ClientModels::LoginResult& result, void* customData) { // TODO: Need to wait for the NEXT api call to complete, and then test PlayFabSettings::advertisingIdType PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT API /// Test a sequence of calls that modifies saved data, /// and verifies that the next sequential API call contains updated data. /// Verify that the data is correctly modified on the next call. /// Parameter types tested: string, Dictionary<string, string>, DateTime /// </summary> static void UserDataApi(PfTestContext& testContext) { bool isLoggedIn = false; PlayFabCombo_ClientRequestBus::BroadcastResult(isLoggedIn, &PlayFabCombo_ClientRequests::IsClientLoggedIn); if (!isLoggedIn) { EndTest(testContext, SKIPPED, "Earlier tests failed to log in"); return; } ClientModels::GetUserDataRequest request; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetUserData, request, OnUserDataApiGet1, OnSharedError, &testContext); } static void OnUserDataApiGet1(const ClientModels::GetUserDataResult& result, void* customData) { auto it = result.Data.find(TEST_DATA_KEY); testMessageInt = (it == result.Data.end()) ? 1 : atoi(it->second.Value.c_str()); // testMessageTime = it->second.LastUpdated; // Don't need the first time testMessageInt = (testMessageInt + 1) % 100; ClientModels::UpdateUserDataRequest updateRequest; // itoa is not avaialable in android char buffer[16]; AZStd::string temp; sprintf(buffer, "%d", testMessageInt); temp.append(buffer); updateRequest.Data[TEST_DATA_KEY] = temp; EBUS_EVENT(PlayFabCombo_ClientRequestBus, UpdateUserData, updateRequest, OnUserDataApiUpdate, OnSharedError, customData); } static void OnUserDataApiUpdate(const ClientModels::UpdateUserDataResult& result, void* customData) { ClientModels::GetUserDataRequest request; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetUserData, request, OnUserDataApiGet2, OnSharedError, customData); } static void OnUserDataApiGet2(const ClientModels::GetUserDataResult& result, void* customData) { auto it = result.Data.find(TEST_DATA_KEY); int actualDataValue = (it == result.Data.end()) ? -1 : atoi(it->second.Value.c_str()); testMessageTime = (it == result.Data.end()) ? 0 : it->second.LastUpdated; time_t now = time(nullptr); now = mktime(gmtime(&now)); time_t minTime = now - (60 * 5); time_t maxTime = now + (60 * 5); PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (it == result.Data.end()) EndTest(*testContext, FAILED, "Expected user data not found."); else if (testMessageInt != actualDataValue) EndTest(*testContext, FAILED, "User data not updated as expected."); else if (!(minTime <= testMessageTime && testMessageTime <= maxTime)) EndTest(*testContext, FAILED, "DateTime not parsed correctly."); else EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT API /// Test a sequence of calls that modifies saved data, /// and verifies that the next sequential API call contains updated data. /// Verify that the data is saved correctly, and that specific types are tested /// Parameter types tested: Dictionary<string, int> /// </summary> static void PlayerStatisticsApi(PfTestContext& testContext) { bool isLoggedIn = false; PlayFabCombo_ClientRequestBus::BroadcastResult(isLoggedIn, &PlayFabCombo_ClientRequests::IsClientLoggedIn); if (!isLoggedIn) { EndTest(testContext, SKIPPED, "Earlier tests failed to log in"); return; } ClientModels::GetPlayerStatisticsRequest getRequest; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetPlayerStatistics, getRequest, OnPlayerStatisticsApiGet1, OnSharedError, &testContext); } static void OnPlayerStatisticsApiGet1(const ClientModels::GetPlayerStatisticsResult& result, void* customData) { for (auto it = result.Statistics.begin(); it != result.Statistics.end(); ++it) if (it->StatisticName == TEST_STAT_NAME) testMessageInt = (it == result.Statistics.end()) ? 1 : it->Value; // testMessageTime = it->second.LastUpdated; // Don't need the first time testMessageInt = (testMessageInt + 1) % 100; ClientModels::UpdatePlayerStatisticsRequest updateRequest; ClientModels::StatisticUpdate newStat; newStat.StatisticName = TEST_STAT_NAME; newStat.Value = testMessageInt; updateRequest.Statistics.push_back(newStat); EBUS_EVENT(PlayFabCombo_ClientRequestBus, UpdatePlayerStatistics, updateRequest, OnPlayerStatisticsApiUpdate, OnSharedError, customData); } static void OnPlayerStatisticsApiUpdate(const ClientModels::UpdatePlayerStatisticsResult& result, void* customData) { ClientModels::GetPlayerStatisticsRequest getRequest; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetPlayerStatistics, getRequest, OnPlayerStatisticsApiGet2, OnSharedError, customData); } static void OnPlayerStatisticsApiGet2(const ClientModels::GetPlayerStatisticsResult& result, void* customData) { int actualStatValue = -1; for (auto it = result.Statistics.begin(); it != result.Statistics.end(); ++it) if (it->StatisticName == TEST_STAT_NAME) actualStatValue = (it == result.Statistics.end()) ? 1 : it->Value; PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (actualStatValue == -1) EndTest(*testContext, FAILED, "Expected Player statistic not found."); else if (testMessageInt != actualStatValue) EndTest(*testContext, FAILED, "Player statistic not updated as expected."); else EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT API /// Get or create the given test character for the given user /// Parameter types tested: Contained-Classes, string /// </summary> static void UserCharacter(PfTestContext& testContext) { ClientModels::ListUsersCharactersRequest request; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetAllUsersCharacters, request, OnUserCharacter, OnSharedError, &testContext); } static void OnUserCharacter(const ClientModels::ListUsersCharactersResult& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT AND SERVER API /// Test that leaderboard results can be requested /// Parameter types tested: List of contained-classes /// </summary> static void LeaderBoard(PfTestContext& testContext) { testMessageInt = 0; ClientModels::GetLeaderboardRequest clientRequest; clientRequest.MaxResultsCount = 3; clientRequest.StatisticName = TEST_STAT_NAME; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetLeaderboard, clientRequest, OnClientLeaderBoard, OnSharedError, &testContext); ServerModels::GetLeaderboardRequest serverRequest; serverRequest.MaxResultsCount = 3; serverRequest.StatisticName = TEST_STAT_NAME; EBUS_EVENT(PlayFabCombo_ServerRequestBus, GetLeaderboard, serverRequest, OnServerLeaderBoard, OnSharedError, &testContext); } static void OnClientLeaderBoard(const ClientModels::GetLeaderboardResult& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (result.Leaderboard.size() > 0) EndTest(*testContext, PASSED, ""); else EndTest(*testContext, FAILED, "Leaderboard entry not found."); } static void OnServerLeaderBoard(const ServerModels::GetLeaderboardResult& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (result.Leaderboard.size() > 0) EndTest(*testContext, PASSED, ""); else EndTest(*testContext, FAILED, "Leaderboard entry not found."); } /// <summary> /// CLIENT API /// Test that AccountInfo can be requested /// Parameter types tested: List of enum-as-strings converted to list of enums /// </summary> static void AccountInfo(PfTestContext& testContext) { ClientModels::GetAccountInfoRequest request; EBUS_EVENT(PlayFabCombo_ClientRequestBus, GetAccountInfo, request, OnAccountInfo, OnSharedError, &testContext); } static void OnAccountInfo(const ClientModels::GetAccountInfoResult& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); // Enums-by-name can't really be tested in C++, the way they can in other languages if (result.AccountInfo == nullptr || result.AccountInfo->TitleInfo == nullptr || result.AccountInfo->TitleInfo->Origination.isNull()) EndTest(*testContext, FAILED, "The Origination data is not present to test"); else // Received data-format as expected EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT API /// Test that CloudScript can be properly set up and invoked /// </summary> static void CloudScript(PfTestContext& testContext) { ClientModels::ExecuteCloudScriptRequest request; request.FunctionName = "helloWorld"; EBUS_EVENT(PlayFabCombo_ClientRequestBus, ExecuteCloudScript, request, OnHelloWorldCloudScript, OnSharedError, &testContext); } static void OnHelloWorldCloudScript(const ClientModels::ExecuteCloudScriptResult& result, void* customData) { auto strResult = (AZStd::string)result.FunctionResult; bool success = (strResult.find("Hello " + playFabId + "!") != -1); PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); if (!success) EndTest(*testContext, FAILED, strResult); else EndTest(*testContext, PASSED, ""); } /// <summary> /// CLIENT API /// Test that the client can publish custom PlayStream events /// </summary> static void WriteEvent(PfTestContext& testContext) { ClientModels::WriteClientPlayerEventRequest request; request.EventName = "ForumPostEvent"; request.Timestamp = time(nullptr); request.Body["Subject"] = "My First Post"; request.Body["Body"] = "My awesome post."; EBUS_EVENT(PlayFabCombo_ClientRequestBus, WritePlayerEvent, request, OnWritePlayerEvent, OnSharedError, &testContext); } static void OnWritePlayerEvent(const ClientModels::WriteEventResponse& result, void* customData) { PfTestContext* testContext = reinterpret_cast<PfTestContext*>(customData); EndTest(*testContext, PASSED, ""); } }; // C++ Static vars std::string PlayFabApiTests::TEST_TITLE_DATA_LOC = "testTitleData.json"; AZStd::string PlayFabApiTests::_outputSummary; AZStd::string PlayFabApiTests::buildIdentifier; AZStd::string PlayFabApiTests::userEmail; const AZStd::string PlayFabApiTests::TEST_DATA_KEY = "testCounter"; const AZStd::string PlayFabApiTests::TEST_STAT_NAME = "str"; std::list<PfTestContext*> PlayFabApiTests::testContexts; AZStd::string PlayFabApiTests::playFabId; int PlayFabApiTests::testMessageInt; time_t PlayFabApiTests::testMessageTime; class CFlowNode_PlayFabComboApiTests : public CFlowBaseNode<eNCT_Instanced> { public: CFlowNode_PlayFabComboApiTests(SActivationInfo* pActInfo) { } IFlowNodePtr Clone(SActivationInfo *pActInfo) override { return new CFlowNode_PlayFabComboApiTests(pActInfo); } void GetMemoryUsage(ICrySizer* s) const override { s->Add(*this); } void GetConfiguration(SFlowNodeConfig& config) override { static const SInputPortConfig in_config[] = { InputPortConfig<SFlowSystemVoid>("Activate", _HELP("Run the PlayFabApiTests")), { 0 } }; static const SOutputPortConfig out_config[] = { // Could probably put real api types here OutputPortConfig<AZStd::string>("Summary", _HELP("A summary of the tests (once complete)")), { 0 } }; config.sDescription = _HELP("PlayFab Combo gem test node"); config.pInputPorts = in_config; config.pOutputPorts = out_config; config.SetCategory(EFLN_APPROVED); } void ProcessEvent(EFlowEvent event, SActivationInfo* pActInfo) override { switch (event) { case eFE_Update: if (PlayFabApiTests::TickTestSuite()) { pActInfo->pGraph->SetRegularlyUpdated(pActInfo->myID, false); auto outputSummary = PlayFabApiTests::GenerateTestSummary(); AZ_TracePrintf("PlayFab", outputSummary.c_str()); ActivateOutput(pActInfo, 0, string(outputSummary.c_str())); } break; case eFE_Activate: pActInfo->pGraph->SetRegularlyUpdated(pActInfo->myID, true); PlayFabApiTests::InitializeTestSuite(); break; //case eFE_FinalActivate: } auto lastDebugMessage = PlayFabApiTests::GenerateTestSummary(); } }; REGISTER_FLOW_NODE("PlayFab:PlayFabComboApiTests", CFlowNode_PlayFabComboApiTests);
{ "content_hash": "0d66634b7219bf5ff27f30155d0d13c1", "timestamp": "", "source": "github", "line_count": 683, "max_line_length": 195, "avg_line_length": 42.440702781844806, "alnum_prop": 0.6594680373960741, "repo_name": "PlayFab/LumberyardSDK", "id": "f2d5b8ce15a137c7df9ff6ca9346f2543a347681", "size": "29394", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "TestGemCombo/Code/Source/PlayFabComboApiTestNode.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "954" }, { "name": "C", "bytes": "924" }, { "name": "C++", "bytes": "9637276" }, { "name": "Python", "bytes": "2384" } ], "symlink_target": "" }
declare THIS_EXEC="$(basename "${BASH_SOURCE[0]}")" declare DOTFILES_HOME="$HOME/.dotfiles" declare DOTFILES_REPO="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" declare IMPORT="$DOTFILES_REPO/bash/source" declare DOTFILES_LINK="$DOTFILES_REPO/link" declare DOTFILES_COPY="$DOTFILES_REPO/copy" declare LINK_HOME="$DOTFILES_LINK/home" declare LINK_CONFIG="$DOTFILES_LINK/config" declare BACKUP_DIR="$DOTFILES_REPO/backup" declare -a BACKUP_FILES=($(find "$LINK_HOME" -type f -exec basename '{}' \;)) # Filesystem directories to create # Main projects directory declare WS="$HOME/workspace" # Practice projects directory declare PRAC="$WS/practice" # Directory for installed third-party applications declare APPS="$HOME/applications" # Third party archives directory declare ARCHIVES="$APPS/archives" # Program flags # Run with extra logging declare VERBOSE=false # Force all actions that would otherwise involve answering a prompt declare FORCE_INSTALL=false # }}} # Imports {{{ source "${IMPORT}/colors.bash" source "${IMPORT}/functions_log.bash" source "${IMPORT}/functions_os.bash" # }}} # Setup and Cleanup Functions {{{ # Move old dotfiles in $HOME to ~/.dotfiles/backup/ for safe keeping. backup_old_dotfiles() { echoe "Backing up old dotfiles..." local oldFile mkdir -p "$BACKUP_DIR" for oldFile in "${BACKUP_FILES[@]}"; do if [ -f "$HOME/$oldFile" ]; then mv "$HOME/$oldFile" "$BACKUP_DIR/$oldFile" fi done } # Take care of backing up existing ~/.dotfiles directory backup_existing_installation() { # Safe name for backup directory local oldDotfiles="$(mktemp -u "${DOTFILES_HOME}.bak.XXXXXXXXXX")" if [ -d "$DOTFILES_HOME" ]; then log_info "Backing up existing dotfiles installation to $oldDotfiles" mv "$DOTFILES_HOME" "$oldDotfiles" fi } # Figure out what to do if an existing dotfiles installation is found. check_existing_installation() { log_info "Checking for existing dotfiles installation" if [ -d "$DOTFILES_HOME" ]; then warn "Existing dotfiles installation found at ${DOTFILES_HOME}!" echoe "An existing dotfiles installation was found at ${DOTFILES_HOME}." echoe "It must be removed before this installation can progress." local response="" while [[ ! "$response" =~ [YyNn] ]]; do echoe "Would you like to remove it and continue with the installation?" \ "[y/n]" read -sn1 response done [[ "$response" =~ [Nn] ]] && echoe "Exiting." && exit 1 else log_info "No dotfiles installation found." fi } # Performs initial setup. setup() { log_info "Setting up..." if ! $FORCE_INSTALL; then check_existing_installation fi backup_old_dotfiles backup_existing_installation } # Cleanup after the program finishes. cleanup() { log_info "Cleaning up..." # If backup directory is empty, remove it. if [[ -d "$BACKUP_DIR" && "$(ls -A "$BACKUP_DIR" &>/dev/null)" ]]; then rm -rf "$BACKUP_DIR" fi } trap cleanup EXIT # }}} # Help {{{ _help() { cat <<EOF ${THIS_EXEC} Install tjtrabue's dotfiles on the current system. For the most part, this script just creates a bunch of symlinks, so it is highly non-destructive. As opposed to overwriting the user's existing dotfiles, this script backs up all of the existing files before creating any symlinks. Nothing should be lost in the process. Check the 'backup' directory created by this script if you wish to restore your old dotfiles. USAGE: ./${THIS_EXEC} [OPTIONS] OPTIONS: -h | --help Print the help message (this message) and exit. -v | --verbose Run with extra logging. -f | --force Force dotfiles to install, assuming "yes" for all prompts. This option should be used with caution, as it may overwrite some of your files, even though this script tries hard not to do that. EOF exit 0 } # }}} # Test Functions {{{ print_vars() { echoe "DOTFILES_REPO: ${DOTFILES_REPO}" echoe "DOTFILES_HOME: ${DOTFILES_HOME}" } # }}} # Primary Functions {{{ # Link the ~/.config directory init_config() { log_info "Initializing ~/.config directory." local homeConfig="$HOME/.config" [ -d "$homeConfig" ] && rm -rf "$homeConfig" ln -sf "$LINK_CONFIG" "$homeConfig" succ "Done." } # Create symlinks link_dotfiles() { log_info "Linking dotfiles" # Link the repository to ~/.dotfiles ln -s "$DOTFILES_REPO" "$DOTFILES_HOME" # Link $HOME files find "$LINK_HOME" -type f -exec ln -sf '{}' "$HOME/" \; succ "Linking complete." } # Copy one-time transfer files copy_dotfiles() { log_info "Copying dotfiles" find "$DOTFILES_COPY" -maxdepth 1 -mindepth 1 -type f -exec cp -f '{}' "$HOME/" \; succ "Copying complete" } add_extra_os_vars() { log_info "Injecting additional OS variables into $HOME/.vars" local os="$(getosinfo | head -1 | sed 's/Distribution:\s*//')" local extraVarsDir="$DOTFILES_REPO/copy/var_files" local extraVarsLinuxDir="$extraVarsDir/linux" local markerString="#<additional-vars-insert>" local extraVarsFile case "$os" in "Arch Linux") extraVarsFile="$extraVarsLinuxDir/arch_vars.bash" ;; *) warn "No extra vars to add for OS: $os" ;; esac if [ -f "$extraVarsFile" ]; then sed -i -e "/$markerString/r $extraVarsFile" "$HOME/.vars" fi # Get rid of marker string in ~/.vars sed -i "/$markerString/d" "$HOME/.vars" succ "Done injecting additional variables" } ensure_dirs_present() { log_info "Creating important directories" local dirs=( "$WS" "$PRAC" "$APPS" "$ARCHIVES" ) local dir for dir in "${dirs[@]}"; do mkdir -p "$dir" &>/dev/null done } # Main that calls all subroutines main() { setup copy_dotfiles link_dotfiles ensure_dirs_present init_config add_extra_os_vars } # }}} # Parse CLI Options {{{ args=$(getopt -o hvf --long help,verbose,force -n 'init_arch' -- "$@") eval set -- "$args" # extract options and their arguments into variables. while true; do case "$1" in -v | --verbose) VERBOSE=true shift ;; -h | --help) _help shift break ;; -f | --force) FORCE_INSTALL=true shift ;; --) shift break ;; *) err "Unknown option $1 to ${THIS_EXEC}" exit 2 ;; esac done # }}} # Main execution main # Modeline for this file (KEEP IT COMMENTED!) # vim:foldenable:foldmethod=marker
{ "content_hash": "00826f01dd76c47b47c6df6c6a6f7c28", "timestamp": "", "source": "github", "line_count": 263, "max_line_length": 84, "avg_line_length": 24.09125475285171, "alnum_prop": 0.6669823232323232, "repo_name": "tjtrabue/zsh-dotfiles", "id": "7825e5aa85f53ec4df987d4c1f901b78dca5f1f5", "size": "6398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "install.sh", "mode": "33261", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "806" }, { "name": "Python", "bytes": "991" }, { "name": "Shell", "bytes": "97317" }, { "name": "VimL", "bytes": "6276" } ], "symlink_target": "" }
package org.onosproject.cli.net.vnet; import static org.onlab.osgi.DefaultServiceDirectory.getService; import org.apache.karaf.shell.console.completer.ArgumentCompleter.ArgumentList; import org.onosproject.cli.AbstractChoicesCompleter; import org.onosproject.incubator.net.virtual.NetworkId; import org.onosproject.incubator.net.virtual.VirtualNetworkService; import org.onosproject.incubator.net.virtual.VirtualPort; import org.onosproject.net.DeviceId; import org.onosproject.utils.Comparators; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; /** * Virtual port completer. * * Assumes the first argument which can be parsed to a number is network id * and the argument right before the one being completed is device id */ public class VirtualPortCompleter extends AbstractChoicesCompleter { @Override protected List<String> choices() { ArgumentList args = getArgumentList(); //parse argument list for network id String[] argsArray = args.getArguments(); for (String str : argsArray) { if (str.matches("[0-9]+")) { long networkId = Long.valueOf(str); String deviceId = argsArray[argsArray.length - 1]; return getSortedVirtualPorts(networkId, deviceId).stream() .map(virtualPort -> virtualPort.number().toString()) .collect(Collectors.toList()); } } return Collections.singletonList("Missing network id"); } /** * Returns the list of virtual ports sorted using the port number. * * @param networkId network id. * @param deviceId device id * @return sorted virtual port list */ private List<VirtualPort> getSortedVirtualPorts(long networkId, String deviceId) { VirtualNetworkService service = getService(VirtualNetworkService.class); List<VirtualPort> virtualPorts = new ArrayList<>(); virtualPorts.addAll(service.getVirtualPorts(NetworkId.networkId(networkId), DeviceId.deviceId(deviceId))); Collections.sort(virtualPorts, Comparators.VIRTUAL_PORT_COMPARATOR); return virtualPorts; } }
{ "content_hash": "4193ef10da56c122f55bd69e2ba02ce1", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 86, "avg_line_length": 36.83606557377049, "alnum_prop": 0.6987093902981754, "repo_name": "osinstom/onos", "id": "19a31a34cbe6b0208c5b2506f7d36bbb625eb4c0", "size": "2864", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cli/src/main/java/org/onosproject/cli/net/vnet/VirtualPortCompleter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "233181" }, { "name": "HTML", "bytes": "119665" }, { "name": "Java", "bytes": "38529527" }, { "name": "JavaScript", "bytes": "3930281" }, { "name": "Makefile", "bytes": "1058" }, { "name": "P4", "bytes": "78664" }, { "name": "Python", "bytes": "227209" }, { "name": "Shell", "bytes": "4841" } ], "symlink_target": "" }
""" ** OBS - this is not a normal command module! ** ** You cannot import anything in this module as a command! ** This is part of the Evennia unittest framework, for testing the stability and integrity of the codebase during updates. This module test the default command set. It is instantiated by the src/objects/tests.py module, which in turn is run by as part of the main test suite started with > python game/manage.py test. """ import re from django.conf import settings from django.utils.unittest import TestCase from src.players.player import Player from src.utils import create, utils, ansi _RE = re.compile(r"^\+|-+\+|\+-+|--*|\|", re.MULTILINE) #------------------------------------------------------------ # Command testing # ------------------------------------------------------------ class TestPlayerClass(Player): def msg(self, message, **kwargs): "test message" if not self.ndb.stored_msg: self.ndb.stored_msg = [] self.ndb.stored_msg.append(message) def _get_superuser(self): "test with superuser flag" return self.ndb.is_superuser is_superuser = property(_get_superuser) class CommandTest(TestCase): """ Tests a command """ CID = 0 # we must set a different CID in every test to avoid unique-name collisions creating the objects def setUp(self): "sets up testing environment" self.room1 = create.create_object("src.objects.objects.Room", key="Room%i"%self.CID) self.room1.db.desc = "room_desc" self.room2 = create.create_object("src.objects.objects.Room", key="Room%ib" % self.CID) self.obj1 = create.create_object("src.objects.objects.Object", key="Obj%i" % self.CID, location=self.room1, home=self.room1) self.obj2 = create.create_object("src.objects.objects.Object", key="Obj%ib" % self.CID, location=self.room1, home=self.room1) self.char1 = create.create_object("src.objects.objects.Character", key="Char%i" % self.CID, location=self.room1, home=self.room1) self.char2 = create.create_object("src.objects.objects.Character", key="Char%ib" % self.CID, location=self.room1, home=self.room1) self.script = create.create_script("src.scripts.scripts.Script", key="Script%i" % self.CID) self.player = create.create_player("TestPlayer%i" % self.CID, "test@test.com", "testpassword", typeclass=TestPlayerClass) self.player2 = create.create_player("TestPlayer%ib" % self.CID, "test@test.com", "testpassword", typeclass=TestPlayerClass) self.player.permissions = "Immortals" self.char1.player = self.player self.char1.sessid = 1 def call(self, cmdobj, args, msg=None, cmdset=None, noansi=True): """ Test a command by assigning all the needed properties to cmdobj and running cmdobj.at_pre_cmd() cmdobj.parse() cmdobj.func() cmdobj.at_post_cmd() The msgreturn value is compared to eventual output sent to caller.msg in the game """ cmdobj.caller = self.char1 cmdobj.cmdstring = cmdobj.key cmdobj.args = args cmdobj.cmdset = cmdset cmdobj.raw_string = cmdobj.key + " " + args cmdobj.obj = self.char1 cmdobj.sessid = 1 # test self.char1.player.ndb.stored_msg = [] cmdobj.at_pre_cmd() cmdobj.parse() cmdobj.func() cmdobj.at_post_cmd() # clean out prettytable sugar returned_msg = "|".join(_RE.sub("", mess) for mess in self.char1.player.ndb.stored_msg) #returned_msg = "|".join(self.char1.player.ndb.stored_msg) returned_msg = ansi.parse_ansi(returned_msg, strip_ansi=noansi).strip() if msg != None: if msg == "" and returned_msg or not returned_msg.startswith(msg.strip()): sep1 = "\n" + "="*30 + "Wanted message" + "="*34 + "\n" sep2 = "\n" + "="*30 + "Returned message" + "="*32 + "\n" sep3 = "\n" + "="*78 retval = sep1 + msg.strip() + sep2 + returned_msg + sep3 raise AssertionError(retval) #------------------------------------------------------------ # Individual module Tests #------------------------------------------------------------ from src.commands.default import general class TestGeneral(CommandTest): CID = 1 def test_cmds(self): self.call(general.CmdLook(), "here", "Room1\n room_desc") self.call(general.CmdHome(), "", "You are already home") self.call(general.CmdInventory(), "", "You are not carrying anything.") self.call(general.CmdPose(), "looks around", "Char1 looks around") self.call(general.CmdHome(), "", "You are already home") self.call(general.CmdNick(), "testalias = testaliasedstring1", "Nick set:") self.call(general.CmdNick(), "/player testalias = testaliasedstring2", "Nick set:") self.call(general.CmdNick(), "/object testalias = testaliasedstring3", "Nick set:") self.assertEqual(u"testaliasedstring1", self.char1.nicks.get("testalias")) self.assertEqual(u"testaliasedstring2", self.char1.nicks.get("testalias", nick_type="player")) self.assertEqual(u"testaliasedstring3", self.char1.nicks.get("testalias", nick_type="object")) self.call(general.CmdGet(), "Obj1", "You pick up Obj1.") self.call(general.CmdDrop(), "Obj1", "You drop Obj1.") self.call(general.CmdSay(), "Testing", "You say, \"Testing\"") self.call(general.CmdAccess(), "", "Permission Hierarchy (climbing):") from src.commands.default import help from src.commands.default.cmdset_character import CharacterCmdSet class TestHelp(CommandTest): CID = 2 def test_cmds(self): self.call(help.CmdHelp(), "", "Command help entries", cmdset=CharacterCmdSet()) self.call(help.CmdSetHelp(), "testhelp, General = This is a test", "Topic 'testhelp' was successfully created.") self.call(help.CmdHelp(), "testhelp", "Help topic for testhelp", cmdset=CharacterCmdSet()) from src.commands.default import system class TestSystem(CommandTest): CID = 3 def test_cmds(self): # we are not testing CmdReload, CmdReset and CmdShutdown, CmdService or CmdTime # since the server is not running during these tests. self.call(system.CmdPy(), "1+2", ">>> 1+2|<<< 3") self.call(system.CmdScripts(), "", "id ") self.call(system.CmdObjects(), "", "Object subtype totals") self.call(system.CmdAbout(), "", None) self.call(system.CmdServerLoad(), "", "Server CPU and Memory load:") from src.commands.default import admin class TestAdmin(CommandTest): CID = 4 def test_cmds(self): # not testing CmdBoot, CmdDelPlayer, CmdNewPassword self.call(admin.CmdEmit(), "Char4b = Test", "Emitted to Char4b.") self.call(admin.CmdPerm(), "Obj4 = Builders", "Permission 'Builders' given to Obj4.") self.call(admin.CmdWall(), "Test", "Announcing to all connected players ...") self.call(admin.CmdPerm(), "Char4b = Builders","Permission 'Builders' given to Char4b.") self.call(admin.CmdBan(), "Char4", "NameBan char4 was added.") from src.commands.default import player class TestPlayer(CommandTest): CID = 5 def test_cmds(self): self.call(player.CmdOOCLook(), "", "Account TestPlayer5 (you are OutofCharacter)") self.call(player.CmdIC(), "Char5","Char5 is now acted from another") self.call(player.CmdOOC(), "", "You are already") self.call(player.CmdPassword(), "testpassword = testpassword", "Password changed.") self.call(player.CmdEncoding(), "", "Default encoding:") self.call(player.CmdWho(), "", "Players:") self.call(player.CmdQuit(), "", "Quitting. Hope to see you soon again.") self.call(player.CmdSessions(), "", "Your current session(s):") self.call(player.CmdColorTest(), "ansi", "ANSI colors:") self.call(player.CmdCharCreate(), "Test1=Test char","Created new character Test1. Use @ic Test1 to enter the game") self.call(player.CmdQuell(), "", "Quelling Player permissions (Immortals). Use @unquell to get them back.") from src.commands.default import building class TestBuilding(CommandTest): CID = 6 def test_cmds(self): self.call(building.CmdCreate(), "/drop TestObj1", "You create a new Object: TestObj1.") self.call(building.CmdSetObjAlias(), "TestObj1 = TestObj1b","Aliases for 'TestObj1' are now set to testobj1b.") self.call(building.CmdCopy(), "TestObj1 = TestObj2;TestObj2b, TestObj3;TestObj3b", "Copied TestObj1 to 'TestObj3' (aliases: ['TestObj3b']") self.call(building.CmdSetAttribute(), "Obj6/test1=\"value1\"", "Created attribute Obj6/test1 = \"value1\"") self.call(building.CmdSetAttribute(), "Obj6b/test2=\"value2\"", "Created attribute Obj6b/test2 = \"value2\"") self.call(building.CmdMvAttr(), "Obj6b/test2 = Obj6/test3", "Moving Obj6b/test2 (with value value2) ...\nMoved Obj6b.test2") self.call(building.CmdCpAttr(), "Obj6/test1 = Obj6b/test3", "Copying Obj6/test1 (with value value1) ...\nCopied Obj6.test1") self.call(building.CmdName(), "Obj6b=Obj6c", "Object's name changed to 'Obj6c'.") self.call(building.CmdDesc(), "Obj6c=TestDesc", "The description was set on Obj6c.") self.call(building.CmdWipe(), "Obj6c/test2/test3", "Wiped attributes test2,test3 on Obj6c.") self.call(building.CmdDestroy(), "TestObj1","TestObj1 was destroyed.") self.call(building.CmdDig(), "TestRoom1=testroom;tr,back;b", "Created room TestRoom1") self.call(building.CmdTunnel(), "n = TestRoom2;test2", "Created room TestRoom2") self.call(building.CmdOpen(), "TestExit1=Room6b", "Created new Exit 'TestExit1' from Room6 to Room6b") self.call(building.CmdLink(),"TestExit1 = TestRoom1","Link created TestExit1 > TestRoom1 (one way).") self.call(building.CmdUnLink(), "TestExit1", "Former exit TestExit1 no longer links anywhere.") self.call(building.CmdSetHome(), "Obj6 = Room6b", "Obj6's home location was changed from Room6") self.call(building.CmdListCmdSets(), "", "<DefaultCharacter (Union, prio 0, perm)>:") self.call(building.CmdTypeclass(), "Obj6 = src.objects.objects.Character", "Obj6's changed typeclass from src.objects.objects.Object to") self.call(building.CmdLock(), "Obj6 = test:perm(Immortals)", "Added lock 'test:perm(Immortals)' to Obj6.") self.call(building.CmdExamine(), "Obj6", "Name/key: Obj6") self.call(building.CmdFind(), "TestRoom1", "One Match") self.call(building.CmdScript(), "Obj6 = src.scripts.scripts.Script", "Script src.scripts.scripts.Script successfully added") self.call(building.CmdTeleport(), "TestRoom1", "TestRoom1\nExits: back|Teleported to TestRoom1.") from src.commands.default import comms class TestComms(CommandTest): CID = 7 def test_cmds(self): # not testing the irc/imc2/rss commands here since testing happens offline self.call(comms.CmdChannelCreate(), "testchan;test=Test Channel", "Created channel testchan and connected to it.") self.call(comms.CmdAddCom(), "tc = testchan", "You are already connected to channel testchan. You can now") self.call(comms.CmdDelCom(), "tc", "Your alias 'tc' for channel testchan was cleared.") self.call(comms.CmdChannels(), "" ,"Available channels (use comlist,addcom and delcom to manage") self.call(comms.CmdAllCom(), "", "Available channels (use comlist,addcom and delcom to manage") self.call(comms.CmdCset(), "testchan=send:all()", "Lock(s) applied. Current locks on testchan:") self.call(comms.CmdCdesc(), "testchan = Test Channel", "Description of channel 'testchan' set to 'Test Channel'.") self.call(comms.CmdCemit(), "testchan = Test Message", "Sent to channel testchan: [testchan] Test Message") self.call(comms.CmdCWho(), "testchan", "Channel subscriptions\ntestchan:\n TestPlayer7") self.call(comms.CmdPage(), "TestPlayer7b = Test", "You paged TestPlayer7b with: 'Test'.") self.call(comms.CmdCBoot(), "", "Usage: @cboot[/quiet] <channel> = <player> [:reason]") # noone else connected to boot self.call(comms.CmdCdestroy(), "testchan" ,"Channel 'testchan' (Test Channel) was destroyed.") from src.commands.default import batchprocess class TestBatchProcess(CommandTest): CID = 8 def test_cmds(self): # cannot test batchcode here, it must run inside the server process self.call(batchprocess.CmdBatchCommands(), "examples.batch_cmds", "Running Batchcommand processor Automatic mode for examples.batch_cmds") #self.call(batchprocess.CmdBatchCode(), "examples.batch_code", "")
{ "content_hash": "c242eee03a883a36a7e5f23ce29b8e76", "timestamp": "", "source": "github", "line_count": 221, "max_line_length": 147, "avg_line_length": 58.07239819004525, "alnum_prop": 0.6505376344086021, "repo_name": "TaliesinSkye/evennia", "id": "ccad26bf56d499c91ea687b54830a060cab4981c", "size": "12858", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/commands/default/tests.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "59698" }, { "name": "D", "bytes": "9343933" }, { "name": "Emacs Lisp", "bytes": "2734" }, { "name": "JavaScript", "bytes": "91190" }, { "name": "Python", "bytes": "2840755" }, { "name": "Shell", "bytes": "4577" } ], "symlink_target": "" }
SOAP Interop Round2 base 018 (soap/direct): echoHexBinary --SKIPIF-- <?php require_once('skipif.inc'); ?> --FILE-- <?php $client = new SoapClient(NULL,array("location"=>"test://","uri"=>"http://soapinterop.org/","trace"=>1,"exceptions"=>0)); $client->__soapCall("echoHexBinary", array(new SoapParam(new SoapVar('soapx4',XSD_HEXBINARY),"inputHexBinary")), array("soapaction"=>"http://soapinterop.org/","uri"=>"http://soapinterop.org/")); echo $client->__getlastrequest(); $HTTP_RAW_POST_DATA = $client->__getlastrequest(); include("round2_base.inc"); echo "ok\n"; ?> --EXPECT-- <?xml version="1.0" encoding="UTF-8"?> <SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns1="http://soapinterop.org/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/" SOAP-ENV:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"><SOAP-ENV:Body><ns1:echoHexBinary><inputHexBinary xsi:type="xsd:hexBinary">736F61707834</inputHexBinary></ns1:echoHexBinary></SOAP-ENV:Body></SOAP-ENV:Envelope> <?xml version="1.0" encoding="UTF-8"?> <SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns1="http://soapinterop.org/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/" SOAP-ENV:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"><SOAP-ENV:Body><ns1:echoHexBinaryResponse><outputHexBinary xsi:type="xsd:hexBinary">736F61707834</outputHexBinary></ns1:echoHexBinaryResponse></SOAP-ENV:Body></SOAP-ENV:Envelope> ok
{ "content_hash": "909df9d733be9e1d6ee5d9a0c2c71d5e", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 517, "avg_line_length": 93.11111111111111, "alnum_prop": 0.7356801909307876, "repo_name": "lunaczp/learning", "id": "a531069388c6340de61ea9720ae6e8f9d3ce78b6", "size": "1685", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "language/c/testPhpSrc/php-5.6.17/ext/soap/tests/interop/Round2/Base/r2_base_018s.phpt", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "4526" }, { "name": "Assembly", "bytes": "14500403" }, { "name": "Awk", "bytes": "21252" }, { "name": "Batchfile", "bytes": "2526" }, { "name": "C", "bytes": "381839655" }, { "name": "C++", "bytes": "10162228" }, { "name": "CMake", "bytes": "68196" }, { "name": "CSS", "bytes": "3943" }, { "name": "D", "bytes": "1022" }, { "name": "DTrace", "bytes": "4528" }, { "name": "Fortran", "bytes": "1834" }, { "name": "GAP", "bytes": "4344" }, { "name": "GDB", "bytes": "31864" }, { "name": "Gnuplot", "bytes": "148" }, { "name": "Go", "bytes": "732" }, { "name": "HTML", "bytes": "86756" }, { "name": "Java", "bytes": "8286" }, { "name": "JavaScript", "bytes": "238365" }, { "name": "Lex", "bytes": "121233" }, { "name": "Limbo", "bytes": "1609" }, { "name": "Lua", "bytes": "96" }, { "name": "M4", "bytes": "483288" }, { "name": "Makefile", "bytes": "1915601" }, { "name": "Nix", "bytes": "180099" }, { "name": "Objective-C", "bytes": "1742504" }, { "name": "OpenEdge ABL", "bytes": "4238" }, { "name": "PHP", "bytes": "27984629" }, { "name": "Pascal", "bytes": "74868" }, { "name": "Perl", "bytes": "317465" }, { "name": "Perl 6", "bytes": "6916" }, { "name": "Python", "bytes": "21547" }, { "name": "R", "bytes": "1112" }, { "name": "Roff", "bytes": "435717" }, { "name": "Scilab", "bytes": "22980" }, { "name": "Shell", "bytes": "468206" }, { "name": "UnrealScript", "bytes": "20840" }, { "name": "Vue", "bytes": "563" }, { "name": "XSLT", "bytes": "7946" }, { "name": "Yacc", "bytes": "172805" }, { "name": "sed", "bytes": "2073" } ], "symlink_target": "" }
/* Theme Name: Laputan Machines Theme URI: https://github.com/FlatlanderWoman/laputanMachines/releases Author: Michael Bassili Author URI: http://bassi.li/ Description: A WordPress theme for a video game blog. Everything has been designed to fit the minimalist aesthetic resulting in a clean, easy to read blog page. Version: 1.0 License: GNU General Public License v2 or later License URI: http://www.gnu.org/licenses/gpl-2.0.html Tags: video game, blog, blue, red, gear, feed, tag Text Domain: laputanmachines This theme, like WordPress, is licensed under the GPL. Use it to make something cool, have fun, and share what you've learned with others. */ /*Font Importing*/ @import url('https://fonts.googleapis.com/css?family=Patua+One|Roboto:300,400,500,700,900|Open+Sans:300,400'); /*Body Reset*/ *, *:before, *:after { /*Maintain Box Borders*/ -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; box-sizing: border-box; /*Normalize (fallback)*/ margin: 0; padding: 0; line-height: 1.5em; } ::-webkit-input-placeholder, ::-moz-placeholder { text-align: center; } .search-field:focus { /*Remove Blue Borders*/ outline: 0; } body { /*Background*/ background-color: #ededed; } /*Cursor Selection*/ ::selection { /* WebKit/Blink Browsers */ background: rgba(0,0,0,0.2); } ::-moz-selection { /* Gecko Browsers */ background: rgba(0,0,0,0.2); } /*Scroll Bar*/ ::-webkit-scrollbar { /*Colour*/ width: 1em; background-color: #ededed; } ::-webkit-scrollbar-thumb { /*Colour*/ border-radius: 1px; background-color: rgba(0, 0, 0, 0.15); } ::-webkit-scrollbar-track { /*Colour*/ background-color: #ededed; } a:hover { cursor: pointer; } /*Font Optimization*/ *, html, body { /*Antialiasing*/ -moz-osx-font-smoothing: grayscale; -webkit-font-smoothing: antialiased; -moz-font-smoothing: antialiased; -o-font-smoothing: antialiased; font-smoothing: antialiased; /*Handle Large Text-Blocks*/ text-rendering: optimizeLegibility !important; } a { /*Typography*/ text-decoration: none; color: #247ba0; } a:hover { /*Typography*/ text-decoration: underline; } /*General Layout*/ .container { /*Display*/ clear: both; } @media screen and (max-width: 1000px) { /*Mobile Properties*/ .container { /*Sizing*/ max-width: 95vw; /*Centering*/ margin: 0 auto; } } @media screen and (min-width: 1000px) { /*Desktop Properties*/ .container { /*Sizing*/ max-width: 1000px; /*Centering*/ margin: 0 auto; } } .container-content { /*Spacing*/ padding: 5px; } /*Site Header*/ .site-header { /*Rendering*/ background-color: #247ba0; border-bottom: 6px solid #D72638; /*Display*/ clear: both; overflow: auto; /*Spacing*/ margin-bottom: 12px; } .category-title { background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); width: 100%; height: 100%; position: relative; margin-top: 5px; padding: 20px; /*Typography*/ color: #ffffff; font-family: 'Patua One', sans-serif; font-weight: 500; font-size: 1.75em; text-decoration: underline; text-align: center; } /*Header Organization*/ .header-block { /*Display*/ display: flex; justify-content: space-between; align-items: center; } /*Site Logo*/ .site-logo { /*Display*/ display: block; /*float: left;*/ /*Sizing*/ width: 30px; /*Spacing*/ margin: 10px; } @media screen and (max-width: 1000px) { /*Mobile Properties*/ .site-logo { /*Sizing*/ width: 25px; /*Spacing*/ margin: 8px; } } /*Featured Post*/ .first-post { /*Display*/ /*background-color: #ff1654; */ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url('http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png'); width: 100%; height: 100%; position: relative; margin-top: 15px; } .size-featured-thumbnail { /*Sizing*/ width: 50%; max-width: 50%; /*Display*/ display: block; } .featured-container { /*Typography*/ text-align: right; /*Display*/ z-index: 10; position: absolute; top: 0; right: 0; padding: 20px; max-width: 50%; } .featured-title a, .general-title a { /*Typography*/ color: #ffffff; font-family: 'Patua One', sans-serif; font-weight: 500; font-size: 1.25em; text-decoration: none; } .featured-title a:hover, .general-title a:hover { /*Typography*/ /*color: #1c0a30;*/ text-decoration: underline; } .featured-internal-container { text-align: right; position: absolute; bottom: 0; right: 0; padding: 20px; max-width: 50%; } .featured-internal-container p { /*Typography*/ color: #ffffff; font-family: 'Open Sans', sans-serif; font-size: 1em; /*Spacing*/ padding-top: 20px; } .featured-internal-container .post-info { /*Typography*/ opacity: .9; font-size: .9em; } @media screen and (min-width: 1000px) { /*Desktop Properties*/ .size-featured-thumbnail-mobile, .featured-container-mobile { /*Hide on Desktop*/ visibility: hidden; display: none; } } @media screen and (max-width: 1000px) { /*Mobile Properties*/ .size-featured-thumbnail, .featured-container, .featured-internal-container { /*Hide on Mobile*/ visibility: hidden; display: none; } .size-featured-thumbnail-mobile { /*Sizing*/ width: 100%; max-width: 100%; /*Display*/ display: block; } .featured-title-mobile { /*Spacing*/ padding: 10px; text-align: center; } .featured-title-mobile a { /*Typography*/ color: #ffffff; font-family: 'Patua One', sans-serif; font-weight: 500; font-size: 1.25em; text-decoration: none; } .featured-title-mobile a:hover { /*Typography*/ /*color: #1c0a30;*/ text-decoration: underline; } .featured-container-mobile p { /*Typography*/ color: #ffffff; font-family: 'Open Sans', sans-serif; font-size: .95em; text-indent: 2em; /*Spacing*/ padding: 10px; padding-top: 0px; } .featured-container-mobile .post-info-mobile { /*Typography*/ opacity: .9; font-size: .8em; text-align: center; text-indent: 0em !important; } } /*Site Posts*/ .site-post { /*Spacing*/ /*margin-bottom: 5px;*/ } /*Navigation Links*/ .header-block p { } .header-block p a:link, .header-block p a:visited { /*Display*/ display: block; /*Typography*/ text-decoration: none; font-family: 'Roboto', sans-serif; color: #f3ffbd; font-weight: 400; letter-spacing: 1px; opacity: 1; /*Spacing*/ /*padding: 10px 18px;*/ } .header-block p.current_page_item a:link, .header-block p.current_page_item a:visited { /*Current Page Properties*/ color: #ff570a; } .header-block p a:hover, .header-block p a:active { /*Mouse Over Page Properties*/ /*Transition*/ -webkit-transition: all .1s ease-in-out; -moz-transition: all .1s ease-in-out; transition: all .1s ease-in-out; /*Typography*/ color: #ff1654; } @media screen and (max-width: 1000px) { /*Mobile Properties*/ .header-block p a:link, .header-block p a:visited { /*Typography*/ font-size: 1em; /*Spacing*/ padding-left: 10px; padding-right: 10px; } } @media screen and (min-width: 1000px) { /*Desktop Properties*/ .header-block p a:link, .header-block p a:visited { /*Typography*/ font-size: 1em; /*Spacing*/ padding-left: 5px; padding-right: 5px; } } /*Post Thumbnails*/ img { /*Sizing*/ width: 100%; max-width: 1000px; height: auto; } /*Landing Meta Links*/ .index-info { /*Display*/ display: flex; justify-content: space-between; /*Spacing*/ margin-top: 20px; } @media screen and (max-width: 600px) { .index-info { display: block; } .right-info, .left-info { width: 100% !important; max-width: 100vw !important; } } .left-info { width: 100%; /*Sizing*/ width: 50%; max-width: calc(50% - 15px); } .right-info { /*Sizing*/ width: 50%; max-width: calc(50% - 15px); } .right-info h3, .left-info h3 { /*Background*/ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); /*Typography*/ color: #ffffff; font-family: 'Patua One', sans-serif; font-weight: 500; font-size: 1em; letter-spacing: 1.25px; text-decoration: none; text-align: center; /*Spacing*/ padding: 10px; } .textwidget { /*Typography*/ color: #333; font-family: 'Open Sans', sans-serif; font-size: .9em; line-height: 1.75em; text-indent: 2em; /*Spacing*/ padding: 10px; } .secondaryColumn .textwidget { color: #555; padding-right: 25px; padding-left: 25px; } /*General Posts*/ .general-container { /*Display*/ } .general-post { /*Sizing*/ min-height: 100%; width: calc(100%/3 - 2.7px); /*Spacing*/ padding: 10px; /*Display*/ display: inline-block; overflow: hidden; position: relative; /*Background*/ background-color: #000; } .general-post:nth-child(1), .general-post:nth-child(2), .general-post:nth-child(3) { /*Spacing*/ margin-top: 30px; } .general-post .size-small-thumbnail { /*Display*/ position: absolute; z-index: 0; opacity: 0.75; /*Sizing*/ /*height: 100%;*/ width: 100%; /*Spacing*/ margin:-10px; } .image-holder { /*Display*/ position: initial; z-index: -10; } .image-holder:after { /*Display*/ content:''; top: 0; left: 0; z-index: 10; display: block; position: absolute; /*Sizing*/ width: 100%; height: 100%; /*Background*/ background: rgba(139, 0, 0, .15); } .general-post .general-title { /*Display*/ position: relative; z-index: 10; /*Typography*/ text-align: center; /*Spacing*/ padding-top: 50px; padding-bottom: 50px; } @media screen and (max-width: 1000px) { /*Mobile Properties*/ .general-post { /*Display*/ display: block; width: 100%; /*Spacing*/ margin-top: 20px; } } @media screen and (min-width: 1000px) { /*Desktop Properties*/ .general-post { /*Spacing*/ margin-top: 20px; } } /*Pageination*/ .posts-links { /*Centering*/ text-align: center; /*Sizing*/ width: 100%; /*Spacing*/ margin-top: 15px; } .posts-links a { /*Typography*/ color: #fff; font-family: 'Patua One', sans-serif; letter-spacing: 1px; font-weight: 400; font-size: 1em; text-align: center; text-decoration: none; /*Display*/ display: inline-block; /*Spacing*/ padding: 10px; /*Background*/ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); } .posts-links a:hover { text-decoration: underline; } /*Single*/ .single-post { } .size-banner-image { /*Display*/ display: block; /*Background*/ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); /*Sizing*/ height: 25%; max-height: 25%; /*Spacing*/ margin: 0 auto; margin-top: 15px; } .single-post > p { /*Sizing*/ max-width: 1000px; /*Spacing*/ margin: 0 auto; padding: 20px; padding-left: 6em; padding-right: 6em; /*Typography*/ text-indent: 2em; color: #222; font-family: 'Open Sans', sans-serif; font-size: .925em; } .single-post .wp-caption { /*Sizing*/ max-width: 1000px; /*Spacing*/ margin: 0 auto; padding: 10px; /*padding-left: 12em;*/ /*padding-right: 12em;*/ /*Typography*/ /*text-indent: 2em;*/ color: black; font-family: 'Open Sans', sans-serif; font-size: 1em; } .single-post .wp-caption img { /*Display*/ display: block; } .single-post .wp-caption p { /*Background*/ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); /*Typography*/ color: #ffffff; font-family: 'Patua One', sans-serif; font-weight: 500; font-size: .9em; letter-spacing: 1.25px; text-decoration: none; text-align: center; /*Spacing*/ padding: 5px; } .single-post > p > img { text-indent: -2em; display: block; } .single-post-info { /*Typography*/ font-size: .9em !important; text-indent: 0px !important; text-align: center; } iframe { /*Sizing*/ max-width: 100%; } .controls { /*Hide*/ visibility: hidden; display: none; } .single-post h4 { /*Typography*/ font-family: 'Roboto', sans-serif; font-weight: 400; color: #999; font-size: .9em; text-align: center; /*Spacing*/ margin: 12px; margin-top: 0px; } .single-title { /*Typography*/ text-align: center; /*Background*/ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); } .single-title:hover { /*Typography*/ text-decoration: underline; } .single-title a { /*Typography*/ color: #fff; font-family: 'Patua One', sans-serif; letter-spacing: 1px; font-weight: 400; /*font-size: 2.25em;*/ text-align: center; text-decoration: none; /*Spacing*/ padding-bottom: 20px; padding-top: 20px; } @media screen and (max-width: 1000px) { .single-post > p { /*Spacing*/ padding-left: 3em !important; padding-right: 3em !important; } } /*Search*/ .search-form { /*Spacing*/ /*padding: 20px;*/ margin-top: 20px; } #wc-comment-header, #wc_show_hide_loggedin_username, .wc-comment-bar, .search-title { /*Typography*/ color: #999; font-family: 'Patua One', sans-serif; letter-spacing: 1px; font-weight: 400; font-size: 2.25em; text-align: center; /*Display*/ display: flex; justify-content: center; } .search-form .search-field { /*Display*/ border: 0; /*Sizing*/ width: 100%; /*Spacing*/ padding: 5px; /*Typography*/ font-family: 'Open Sans', sans-serif; font-size: 1em; text-align: center; } .search-submit { /*Spacing*/ padding: 5px; margin-top: 0px; /*Sizing*/ width: 100%; /*Background*/ /*background: rgb(215, 38, 56);*/ background: linear-gradient( rgba(215, 38, 56, .7), rgba(215, 38, 56, .7) ), url(http://localhost/basilisk/wp-content/uploads/2017/01/small_steps.png); border: 0; color: white; /*Typography*/ font-family: 'Patua One', sans-serif; letter-spacing: 1px; font-weight: 400; font-size: 1.25em; text-align: center; } .search-submit:hover { /*Typography*/ text-decoration: underline; cursor: pointer; } ::-webkit-search-cancel-button { /*Hide*/ display: none; visibility: hidden; } /*Footer*/ .site-footer { /*Spacing*/ margin-top: 12px; /*Display*/ clear: both; overflow: auto; bottom: 0; } .site-footer p { /*Typography*/ font-family: 'Roboto', sans-serif; font-weight: 400; color: #999; text-align: center; /*Spacing*/ margin: 12px; padding-top: 10px; } .site-footer a { /*Typography*/ color: inherit; text-decoration: none; } .site-footer a:hover { /*Typography*/ text-decoration: underline; } @media screen and (max-width: 1000px) { /*Mobile Properties*/ .site-footer p { /*Typography*/ font-size: 1.05em; } } @media screen and (min-width: 1000px) { /*Desktop Properties*/ .site-footer p { /*Typography*/ font-size: .9em; } } /*Column Widget*/ .parent { clear: both; } /*Post*/ .mainColumn { /*Positioning*/ width: 75%; float: left; } /*Sidebar*/ .secondaryColumn { /*Positioning*/ width: 25%; float: right; } .sidebarTitle { /*Typography*/ font-family: 'Roboto', sans-serif; font-weight: 400; color: #999; font-size: .9em; text-align: center; /*Spacing*/ margin: 12px; margin-top: 20px; } .secondaryColumn li { /*Spacing*/ margin: 0 auto; padding: 20px; padding-top: 0px; padding-bottom: 5px; /*Typography*/ /*text-indent: 2em;*/ color: #222; font-family: 'Open Sans', sans-serif; font-size: .9em; list-style: none; text-align: center } @media screen and (max-width: 1000px) { /*Hide Sidebar*/ .secondaryColumn { /*Positioning*/ display: none; visibility: hidden; } /*Center Post*/ .mainColumn { /*Positioning*/ width: 100%; float: none; } }
{ "content_hash": "7f86d5c9e4ef152b43c312db07ffd351", "timestamp": "", "source": "github", "line_count": 896, "max_line_length": 160, "avg_line_length": 18.202008928571427, "alnum_prop": 0.6333926053099516, "repo_name": "FlatlanderWoman/laputanMachines", "id": "b477027f46b836c19dd778a1bd5b5e88d6f837bc", "size": "16309", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "style.css", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16309" }, { "name": "PHP", "bytes": "9643" } ], "symlink_target": "" }
#import <Foundation/Foundation.h> #import "ManifestTime.h" #import "LinearTime.h" #import "PlaybackPolicy.h" typedef enum { PlaylistEntryType_Media, // URI points to single-segment Smooth/HLS/DASH/Progressive media PlaylistEntryType_Static, // URI points to static page PlaylistEntryType_VAST, // URI points to VAST manifest - client is responsible for resolving to media URI PlaylistEntryType_SeekToStart // URI points to single-segment Smooth/HLS/DASH/Progressive media for live or null if on-demand. } PlaylistEntryType; @interface PlaylistEntry : NSObject { @private PlaylistEntryType type; int32_t entryId; int32_t originalId; LinearTime *linearTime; NSURL *clipURI; ManifestTime *renderTime; BOOL isAdvertisement; BOOL deleteAfterPlaying; PlaybackPolicy *playbackPolicy; } @property(nonatomic, assign) PlaylistEntryType type; @property(nonatomic, assign) int32_t entryId; @property(nonatomic, assign) int32_t originalId; @property(nonatomic, retain) LinearTime *linearTime; @property(nonatomic, retain) NSURL *clipURI; @property(nonatomic, retain) ManifestTime *renderTime; @property(nonatomic, assign) BOOL isAdvertisement; @property(nonatomic, assign) BOOL deleteAfterPlaying; @property(nonatomic, retain) PlaybackPolicy *playbackPolicy; @end
{ "content_hash": "5a5db8e61a71ed086233102013300b44", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 132, "avg_line_length": 34.3, "alnum_prop": 0.7383381924198251, "repo_name": "CloudMetal/azure-media-player-framework", "id": "744a211a8aa62a6b9c9cc43e8bdd759e340ec356", "size": "2108", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/iOS/inc/PlaylistEntry.h", "mode": "33261", "license": "apache-2.0", "language": [], "symlink_target": "" }
// ========================================================================== // SeqAn - The Library for Sequence Analysis // ========================================================================== // Copyright (c) 2006-2016, Knut Reinert, FU Berlin // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of Knut Reinert or the FU Berlin nor the names of // its contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY // OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH // DAMAGE. // // ========================================================================== // Author: Manuel Holtgrewe <manuel.holtgrewe@fu-berlin.de> // ========================================================================== // Global alignment interface for the banded Needleman-Wunsch and Gotoh // algorithms. // // We define the interface functions pretty explicitely (versus just TAlign, // TFragments etc.) so the candidates the compiler gives when resolution to // the globalFunction() fails is actually meaningful. // ========================================================================== #ifndef SEQAN_INCLUDE_SEQAN_ALIGN_GLOBAL_ALIGNMENT_BANDED_H_ #define SEQAN_INCLUDE_SEQAN_ALIGN_GLOBAL_ALIGNMENT_BANDED_H_ namespace seqan { // ============================================================================ // Forwards // ============================================================================ template <typename TScoreValue, typename TSpec> class Score; template <typename TSpec> class Graph; template <typename TStringSet, typename TCargo, typename TGraphSpec> struct Alignment; template <typename TSize, typename TFragmentSpec> class Fragment; // ============================================================================ // Tags, Classes, Enums // ============================================================================ // ============================================================================ // Metafunctions // ============================================================================ // ============================================================================ // Functions // ============================================================================ // ---------------------------------------------------------------------------- // Function globalAlignment() [banded, Align] // ---------------------------------------------------------------------------- template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> TScoreValue globalAlignment(Align<TSequence, TAlignSpec> & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef Align<TSequence, TAlignSpec> TAlign; typedef typename Size<TAlign>::Type TSize; typedef typename Position<TAlign>::Type TPosition; typedef TraceSegment_<TPosition, TSize> TTraceSegment; typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; String<TTraceSegment> trace; DPScoutState_<Default> dpScoutState; TScoreValue res = _setUpAndRunAlignment(trace, dpScoutState, source(row(align, 0)), source(row(align, 1)), scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); _adaptTraceSegmentsTo(row(align, 0), row(align, 1), trace); return res; } // Interface without AlignConfig<>. template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> TScoreValue globalAlignment(Align<TSequence, TAlignSpec> & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> TScoreValue globalAlignment(Align<TSequence, TAlignSpec> & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag, LinearGaps()); else return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag, AffineGaps()); } // Interface without AlignConfig<> and algorithm tag. template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec> TScoreValue globalAlignment(Align<TSequence, TAlignSpec> & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignment() [banded, Gaps] // ---------------------------------------------------------------------------- template <typename TSequenceH, typename TGapsSpecH, typename TSequenceV, typename TGapsSpecV, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> TScoreValue globalAlignment(Gaps<TSequenceH, TGapsSpecH> & gapsH, Gaps<TSequenceV, TGapsSpecV> & gapsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef typename Size<TSequenceH>::Type TSize; typedef typename Position<TSequenceH>::Type TPosition; typedef TraceSegment_<TPosition, TSize> TTraceSegment; typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; String<TTraceSegment> trace; DPScoutState_<Default> dpScoutState; TScoreValue res = _setUpAndRunAlignment(trace, dpScoutState, source(gapsH), source(gapsV), scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); _adaptTraceSegmentsTo(gapsH, gapsV, trace); return res; } // Interface without AlignConfig<>. template <typename TSequenceH, typename TGapsSpecH, typename TSequenceV, typename TGapsSpecV, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> TScoreValue globalAlignment(Gaps<TSequenceH, TGapsSpecH> & gapsH, Gaps<TSequenceV, TGapsSpecV> & gapsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignment(gapsH, gapsV, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TSequenceH, typename TGapsSpecH, typename TSequenceV, typename TGapsSpecV, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> TScoreValue globalAlignment(Gaps<TSequenceH, TGapsSpecH> & gapsH, Gaps<TSequenceV, TGapsSpecV> & gapsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignment(gapsH, gapsV, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignment(gapsH, gapsV, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TSequenceH, typename TGapsSpecH, typename TSequenceV, typename TGapsSpecV, typename TScoreValue, typename TScoreSpec> TScoreValue globalAlignment(Gaps<TSequenceH, TGapsSpecH> & gapsH, Gaps<TSequenceV, TGapsSpecV> & gapsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignment(gapsH, gapsV, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignment() [banded, Graph<Alignment<> >] // ---------------------------------------------------------------------------- // Full interface. template <typename TStringSet, typename TCargo, typename TGraphSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> TScoreValue globalAlignment(Graph<Alignment<TStringSet, TCargo, TGraphSpec> > & alignmentGraph, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef Graph<Alignment<TStringSet, TCargo, TGraphSpec> > TGraph; typedef typename Position<TGraph>::Type TPosition; typedef typename Size<TGraph>::Type TSize; typedef TraceSegment_<TPosition, TSize> TTraceSegment; typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; String<TTraceSegment> trace; DPScoutState_<Default> dpScoutState; TScoreValue res = _setUpAndRunAlignment(trace, dpScoutState, value(stringSet(alignmentGraph), 0), value(stringSet(alignmentGraph), 1), scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); _adaptTraceSegmentsTo(alignmentGraph, positionToId(stringSet(alignmentGraph), 0), positionToId(stringSet(alignmentGraph), 1), trace); return res; } // Interface without AlignConfig<>. template <typename TStringSet, typename TCargo, typename TGraphSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> TScoreValue globalAlignment(Graph<Alignment<TStringSet, TCargo, TGraphSpec> > & alignmentGraph, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignment(alignmentGraph, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TStringSet, typename TCargo, typename TGraphSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> TScoreValue globalAlignment(Graph<Alignment<TStringSet, TCargo, TGraphSpec> > & alignmentGraph, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignment(alignmentGraph, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignment(alignmentGraph, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TStringSet, typename TCargo, typename TGraphSpec, typename TScoreValue, typename TScoreSpec> TScoreValue globalAlignment(Graph<Alignment<TStringSet, TCargo, TGraphSpec> > & alignmentGraph, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignment(alignmentGraph, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignment() [banded, String<Fragment<> >] // ---------------------------------------------------------------------------- // Full interface. template <typename TSize, typename TFragmentSpec, typename TStringSpec, typename TSequence, typename TStringSetSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> TScoreValue globalAlignment(String<Fragment<TSize, TFragmentSpec>, TStringSpec> & fragmentString, StringSet<TSequence, TStringSetSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef String<Fragment<TSize, TFragmentSpec>, TStringSpec> TFragments; typedef typename Position<TFragments>::Type TPosition; typedef TraceSegment_<TPosition, TSize> TTraceSegment; typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; String<TTraceSegment> trace; DPScoutState_<Default> dpScoutState; TScoreValue res = _setUpAndRunAlignment(trace, dpScoutState, value(strings, 0), value(strings, 1), scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); _adaptTraceSegmentsTo(fragmentString, positionToId(strings, 0), positionToId(strings, 1), trace); return res; } // Interface without AlignConfig<>. template <typename TSize, typename TFragmentSpec, typename TStringSpec, typename TSequence, typename TStringSetSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> TScoreValue globalAlignment(String<Fragment<TSize, TFragmentSpec>, TStringSpec> & fragmentString, StringSet<TSequence, TStringSetSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignment(fragmentString, strings, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TSize, typename TFragmentSpec, typename TStringSpec, typename TSequence, typename TStringSetSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> TScoreValue globalAlignment(String<Fragment<TSize, TFragmentSpec>, TStringSpec> & fragmentString, StringSet<TSequence, TStringSetSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignment(fragmentString, strings, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignment(fragmentString, strings, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TSize, typename TFragmentSpec, typename TStringSpec, typename TSequence, typename TStringSetSpec, typename TScoreValue, typename TScoreSpec> TScoreValue globalAlignment(String<Fragment<TSize, TFragmentSpec>, TStringSpec> & fragmentString, StringSet<TSequence, TStringSetSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignment(fragmentString, strings, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignmentScore() [banded, 2 Strings] // ---------------------------------------------------------------------------- template <typename TSequenceH, typename TSequenceV, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> TScoreValue globalAlignmentScore(TSequenceH const & seqH, TSequenceV const & seqV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps, TracebackOff> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; DPScoutState_<Default> dpScoutState; String<TraceSegment_<unsigned, unsigned> > traceSegments; // Dummy segments. return _setUpAndRunAlignment(traceSegments, dpScoutState, seqH, seqV, scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); } // Interface without AlignConfig<>. template <typename TSequenceH, typename TSequenceV, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> TScoreValue globalAlignmentScore(TSequenceH const & seqH, TSequenceV const & seqV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignmentScore(seqH, seqV, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TSequenceH, typename TSequenceV, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> TScoreValue globalAlignmentScore(TSequenceH const & seqH, TSequenceV const & seqV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignmentScore(seqH, seqV, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignmentScore(seqH, seqV, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TSequenceH, typename TSequenceV, typename TScoreValue, typename TScoreSpec> TScoreValue globalAlignmentScore(TSequenceH const & seqH, TSequenceV const & seqV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignmentScore(seqH, seqV, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignmentScore() [banded, StringSet] // ---------------------------------------------------------------------------- template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> TScoreValue globalAlignmentScore(StringSet<TString, TSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps, TracebackOff> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; SEQAN_ASSERT_EQ(length(strings), 2u); DPScoutState_<Default> dpScoutState; String<TraceSegment_<unsigned, unsigned> > traceSegments; // Dummy segments. return _setUpAndRunAlignment(traceSegments, dpScoutState, strings[0], strings[1], scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); } // Interface without AlignConfig<>. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> TScoreValue globalAlignmentScore(StringSet<TString, TSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { SEQAN_ASSERT_EQ(length(strings), 2u); AlignConfig<> alignConfig; return globalAlignmentScore(strings[0], strings[1], scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> TScoreValue globalAlignmentScore(StringSet<TString, TSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { SEQAN_ASSERT_EQ(length(strings), 2u); if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignmentScore(strings[0], strings[1], scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignmentScore(strings[0], strings[1], scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec> TScoreValue globalAlignmentScore(StringSet<TString, TSpec> const & strings, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { SEQAN_ASSERT_EQ(length(strings), 2u); AlignConfig<> alignConfig; return globalAlignmentScore(strings[0], strings[1], scoringScheme, alignConfig, lowerDiag, upperDiag); } // ============================================================================ // Many-vs-Many align interfaces. // ============================================================================ // ---------------------------------------------------------------------------- // Function globalAlignmentScore() [banded, SIMD version, 2x StringSet] // ---------------------------------------------------------------------------- template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> String<TScoreValue> globalAlignmentScore(StringSet<TString, TSpec> const & stringsH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps, TracebackOff> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; SEQAN_ASSERT_EQ(length(stringsH), length(stringsV)); return _alignWrapper(stringsH, stringsV, scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); } // Interface without AlignConfig<>. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> String<TScoreValue> globalAlignmentScore(StringSet<TString, TSpec> const & stringsH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignmentScore(stringsH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> String<TScoreValue> globalAlignmentScore(StringSet<TString, TSpec> const & stringsH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignmentScore(stringsH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignmentScore(stringsH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec> String<TScoreValue> globalAlignmentScore(StringSet<TString, TSpec> const & stringsH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignmentScore(stringsH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignmentScore() [banded, SIMD version, String vs StringSet] // ---------------------------------------------------------------------------- template <typename TStringH, typename TStringV, typename TSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> String<TScoreValue> globalAlignmentScore(TStringH const & stringH, StringSet<TStringV, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int lowerDiag, int upperDiag, TAlgoTag const & /*algoTag*/) { typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps, TracebackOff> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; return _alignWrapper(stringH, stringsV, scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); } // Interface without AlignConfig<>. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> String<TScoreValue> globalAlignmentScore(TString const & stringH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignmentScore(stringH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> String<TScoreValue> globalAlignmentScore(TString const & stringH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignmentScore(stringH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignmentScore(stringH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TString, typename TSpec, typename TScoreValue, typename TScoreSpec> String<TScoreValue> globalAlignmentScore(TString const & stringH, StringSet<TString, TSpec> const & stringsV, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignmentScore(stringH, stringsV, scoringScheme, alignConfig, lowerDiag, upperDiag); } // ---------------------------------------------------------------------------- // Function globalAlignment() [banded, SIMD version, GapsH, GapsV] // ---------------------------------------------------------------------------- template <typename TGapSequenceH, typename TSetSpecH, typename TGapSequenceV, typename TSetSpecV, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> inline auto globalAlignment(StringSet<TGapSequenceH, TSetSpecH> & gapSeqSetH, StringSet<TGapSequenceV, TSetSpecV> & gapSeqSetV, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & /*alignConfig*/, int const lowerDiag, int const upperDiag, TAlgoTag const & /*algoTag*/) { typedef AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> TAlignConfig; typedef typename SubstituteAlignConfig_<TAlignConfig>::Type TFreeEndGaps; typedef AlignConfig2<DPGlobal, DPBandConfig<BandOn>, TFreeEndGaps> TAlignConfig2; typedef typename SubstituteAlgoTag_<TAlgoTag>::Type TGapModel; return _alignWrapper(gapSeqSetH, gapSeqSetV, scoringScheme, TAlignConfig2(lowerDiag, upperDiag), TGapModel()); } // ---------------------------------------------------------------------------- // Function globalAlignment() [banded, SIMD version, StringSet<Align>] // ---------------------------------------------------------------------------- template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec, typename TAlgoTag> String<TScoreValue> globalAlignment(StringSet<Align<TSequence, TAlignSpec> > & alignSet, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int const lowerDiag, int const upperDiag, TAlgoTag const & algoTag) { typedef Align<TSequence, TAlignSpec> TAlign; typedef typename Row<TAlign>::Type TGapSequence; StringSet<TGapSequence, Dependent<> > gapSetH; StringSet<TGapSequence, Dependent<> > gapSetV; reserve(gapSetH, length(alignSet)); reserve(gapSetV, length(alignSet)); for (auto & align : alignSet) { appendValue(gapSetH, row(align, 0)); appendValue(gapSetV, row(align, 1)); } return globalAlignment(gapSetH, gapSetV, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without AlignConfig<>. template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec, typename TAlgoTag> String<TScoreValue> globalAlignment(StringSet<Align<TSequence, TAlignSpec> > & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag, TAlgoTag const & algoTag) { AlignConfig<> alignConfig; return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag, algoTag); } // Interface without algorithm tag. template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec, bool TOP, bool LEFT, bool RIGHT, bool BOTTOM, typename TACSpec> String<TScoreValue> globalAlignment(StringSet<Align<TSequence, TAlignSpec> > & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, AlignConfig<TOP, LEFT, RIGHT, BOTTOM, TACSpec> const & alignConfig, int lowerDiag, int upperDiag) { if (scoreGapOpen(scoringScheme) == scoreGapExtend(scoringScheme)) return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag, NeedlemanWunsch()); else return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag, Gotoh()); } // Interface without AlignConfig<> and algorithm tag. template <typename TSequence, typename TAlignSpec, typename TScoreValue, typename TScoreSpec> String<TScoreValue> globalAlignment(StringSet<Align<TSequence, TAlignSpec> > & align, Score<TScoreValue, TScoreSpec> const & scoringScheme, int lowerDiag, int upperDiag) { AlignConfig<> alignConfig; return globalAlignment(align, scoringScheme, alignConfig, lowerDiag, upperDiag); } } // namespace seqan #endif // #ifndef SEQAN_INCLUDE_SEQAN_ALIGN_GLOBAL_ALIGNMENT_BANDED_H_
{ "content_hash": "058c08b15520bb7a9198c7f5770e5936", "timestamp": "", "source": "github", "line_count": 786, "max_line_length": 129, "avg_line_length": 51.23155216284987, "alnum_prop": 0.5964537598092778, "repo_name": "variar/contest-template", "id": "eab33c6382754bf940d895d6f2d138a731eb03aa", "size": "40268", "binary": false, "copies": "14", "ref": "refs/heads/master", "path": "external/seqan/include/seqan/align/global_alignment_banded.h", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "10311" }, { "name": "CMake", "bytes": "205949" }, { "name": "Makefile", "bytes": "18" }, { "name": "Shell", "bytes": "1028" } ], "symlink_target": "" }
/** * Example that shows the use of the the progress callback. * The progress bar is made using the node-progress (https://github.com/visionmedia/node-progress) module. * You need to install the dev dependencies to make it work. */ var Curl = require( '../lib/Curl' ), path = require( 'path' ), fs = require( 'fs' ), ProgressBar = require( 'progress' ); var curl = new Curl(), url = process.argv[2] || 'http://ipv4.download.thinkbroadband.com/5MB.zip', outputFile = path.resolve( __dirname, 'result.out' ), lastdlnow = 0, bar; if ( fs.existsSync( outputFile ) ) fs.unlinkSync( outputFile ); curl.setOpt( 'URL', url ); curl.setOpt( Curl.option.NOPROGRESS, false ); //Since we are downloading a large file, disable internal storage // used for automatic http data/headers parsing. //Because of that, the end event will receive a nothing for both data/header arguments. curl.enable( Curl.feature.NO_STORAGE ); // The option XFERINFOFUNCTION was introduced in curl version 7.32.0, // versions older than that should use PROGRESSFUNCTION. // if you don't want to mess with version numbers, // there is the following helper method to set the progress cb. curl.setProgressCallback(function( dltotal, dlnow, ultotal, ulnow ) { if ( dltotal == 0 ) return 0; if ( !bar ) { bar = new ProgressBar('Downloading [:bar] :percent :etas', { complete : '=', incomplete: ' ', width : 20, total : dltotal }); } else { bar.tick( dlnow - lastdlnow ); lastdlnow = dlnow; } return 0; }); // This is the same than the data event, however, // keep in mind that here the return value is considered. curl.onData = function( chunk ) { fs.appendFileSync( outputFile, chunk ); return chunk.length; }; curl.on( 'end', curl.close.bind( curl ) ); curl.on( 'error', curl.close.bind( curl ) ); curl.perform();
{ "content_hash": "ea5b1eaccc081a404f762b4965777659", "timestamp": "", "source": "github", "line_count": 70, "max_line_length": 106, "avg_line_length": 27.82857142857143, "alnum_prop": 0.6457905544147844, "repo_name": "yesyayen/Website-Uptime-Monitor", "id": "a10acab88d3acbb74d3e30bb940739738639f08f", "size": "1948", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "node_modules/node-libcurl/examples/progress-callback.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3162" }, { "name": "HTML", "bytes": "25181" }, { "name": "JavaScript", "bytes": "20707" } ], "symlink_target": "" }
@interface SuperCardTests : XCTestCase @end @implementation SuperCardTests - (void)setUp { [super setUp]; // Put setup code here. This method is called before the invocation of each test method in the class. } - (void)tearDown { // Put teardown code here. This method is called after the invocation of each test method in the class. [super tearDown]; } - (void)testExample { // This is an example of a functional test case. XCTAssert(YES, @"Pass"); } - (void)testPerformanceExample { // This is an example of a performance test case. [self measureBlock:^{ // Put the code you want to measure the time of here. }]; } @end
{ "content_hash": "f60effb2fa9059693a864a101bba0eae", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 107, "avg_line_length": 23.17241379310345, "alnum_prop": 0.6785714285714286, "repo_name": "mcxiaoke/learning-ios", "id": "dc109e53fcdc087f3262509357157b69e2d302fc", "size": "866", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cs193p/SuperCard/SuperCardTests/SuperCardTests.m", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "656" }, { "name": "Objective-C", "bytes": "394182" }, { "name": "Ruby", "bytes": "1479" }, { "name": "Swift", "bytes": "847789" } ], "symlink_target": "" }