repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
jarv/cmdchallenge-site | lambda_src/runcmd/requests/packages/urllib3/contrib/appengine.py | 10847 | """
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Example usage::
from urllib3 import PoolManager
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
if is_appengine_sandbox():
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
http = AppEngineManager()
else:
# PoolManager uses a socket-level API behind the scenes
http = PoolManager()
r = http.request('GET', 'https://google.com/')
There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
cost-effective in many circumstances as long as your usage is within the
limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
Sockets also have `limitations and restrictions
<https://cloud.google.com/appengine/docs/python/sockets/\
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
To use sockets, be sure to specify the following in your ``app.yaml``::
env_variables:
GAE_USE_SOCKETS_HTTPLIB : 'true'
3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""
from __future__ import absolute_import
import logging
import os
import warnings
from ..packages.six.moves.urllib.parse import urljoin
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
TimeoutError,
SSLError
)
from ..packages.six import BytesIO
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.timeout import Timeout
from ..util.retry import Retry
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation `here
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Notably it will raise an :class:`AppEnginePlatformError` if:
* URLFetch is not available.
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabtyes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(self, headers=None, retries=None, validate_certificate=True,
urlfetch_retries=True):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment.")
if is_prod_appengine_mvms():
raise AppEnginePlatformError(
"Use normal urllib3.PoolManager instead of AppEngineManager"
"on Managed VMs, as using URLFetch is not necessary in "
"this environment.")
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/latest/contrib.html.",
AppEnginePlatformWarning)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.urlfetch_retries = urlfetch_retries
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(self, method, url, body=None, headers=None,
retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw):
retries = self._get_retries(retries, redirect)
try:
follow_redirects = (
redirect and
retries.redirect != 0 and
retries.total)
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=self.urlfetch_retries and follow_redirects,
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if 'too large' in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.", e)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if 'Too many redirects' in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.", e)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e)
http_response = self._urlfetch_response_to_http_response(
response, retries=retries, **response_kw)
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
if (self.urlfetch_retries and retries.raise_on_redirect):
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
method = 'GET'
try:
retries = retries.increment(method, url, response=http_response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
return http_response
retries.sleep_for_retry(http_response)
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
method, redirect_url, body, headers,
retries=retries, redirect=redirect,
timeout=timeout, **response_kw)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.getheader('Retry-After'))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(
method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
method, url,
body=body, headers=headers,
retries=retries, redirect=redirect,
timeout=timeout, **response_kw)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get('content-encoding')
if content_encoding == 'deflate':
del urlfetch_resp.headers['content-encoding']
transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == 'chunked':
encodings = transfer_encoding.split(",")
encodings.remove('chunked')
urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)
return HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
if isinstance(timeout, Timeout):
if timeout._read is not None or timeout._connect is not None:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
AppEnginePlatformWarning)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(
retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning)
return retries
def is_appengine():
return (is_local_appengine() or
is_prod_appengine() or
is_prod_appengine_mvms())
def is_appengine_sandbox():
return is_appengine() and not is_prod_appengine_mvms()
def is_local_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Development/' in os.environ['SERVER_SOFTWARE'])
def is_prod_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
not is_prod_appengine_mvms())
def is_prod_appengine_mvms():
return os.environ.get('GAE_VM', False) == 'true'
| mit |
leandronunes/abelo | vendor/rails/activerecord/test/cases/reload_models_test.rb | 730 | require "cases/helper"
require 'models/owner'
require 'models/pet'
class ReloadModelsTest < ActiveRecord::TestCase
def test_has_one_with_reload
pet = Pet.find_by_name('parrot')
pet.owner = Owner.find_by_name('ashley')
# Reload the class Owner, simulating auto-reloading of model classes in a
# development environment. Note that meanwhile the class Pet is not
# reloaded, simulating a class that is present in a plugin.
Object.class_eval { remove_const :Owner }
Kernel.load(File.expand_path(File.join(File.dirname(__FILE__), "../models/owner.rb")))
pet = Pet.find_by_name('parrot')
pet.owner = Owner.find_by_name('ashley')
assert_equal pet.owner, Owner.find_by_name('ashley')
end
end | gpl-3.0 |
HereSinceres/TypeScript | tests/baselines/reference/subtypingWithConstructSignatures4.js | 5584 | //// [subtypingWithConstructSignatures4.ts]
// checking subtype relations for function types as it relates to contextual signature instantiation
class Base { foo: string; }
class Derived extends Base { bar: string; }
class Derived2 extends Derived { baz: string; }
class OtherDerived extends Base { bing: string; }
declare function foo1(a: new <T>(x: T) => T[]);
declare function foo1(a: any): any;
declare function foo2(a2: new <T>(x: T) => string[]);
declare function foo2(a: any): any;
declare function foo3(a3: new <T>(x: T) => void);
declare function foo3(a: any): any;
declare function foo4(a4: new <T, U>(x: T, y: U) => string);
declare function foo4(a: any): any;
declare function foo5(a5: new <T, U>(x: new (arg: T) => U) => T);
declare function foo5(a: any): any;
declare function foo6(a6: new <T extends Base>(x: new (arg: T) => Derived) => T);
declare function foo6(a: any): any;
declare function foo11(a11: new <T>(x: { foo: T }, y: { foo: T; bar: T }) => Base);
declare function foo11(a: any): any;
declare function foo15(a15: new <T>(x: { a: T; b: T }) => T[]);
declare function foo15(a: any): any;
declare function foo16(a16: new <T extends Base>(x: { a: T; b: T }) => T[]);
declare function foo16(a: any): any;
declare function foo17(a17: {
new <T extends Derived>(x: new (a: T) => T): T[];
new <T extends Base>(x: new (a: T) => T): T[];
});
declare function foo17(a: any): any;
declare function foo18(a18: {
new (x: {
new <T extends Derived>(a: T): T;
new <T extends Base>(a: T): T;
}): any[];
new (x: {
new <T extends Derived2>(a: T): T;
new <T extends Base>(a: T): T;
}): any[];
});
declare function foo18(a: any): any;
var r1arg: new <T>(x: T) => T[];
var r1arg2: new <T>(x: T) => T[];
var r1 = foo1(r1arg);
var r1a = [r1arg, r1arg2];
var r1b = [r1arg2, r1arg];
var r2arg: new <T>(x: T) => string[];
var r2arg2: new <T>(x: T) => string[];
var r2 = foo2(r2arg);
var r2a = [r2arg, r2arg2];
var r2b = [r2arg2, r2arg];
var r3arg: new <T>(x: T) => T;
var r3arg2: new <T>(x: T) => void;
var r3 = foo3(r3arg);
var r3a = [r3arg, r3arg2];
var r3b = [r3arg2, r3arg];
var r4arg: new <T, U>(x: T, y: U) => string;
var r4arg2: new <T, U>(x: T, y: U) => string;
var r4 = foo4(r4arg);
var r4a = [r4arg, r4arg2];
var r4b = [r4arg2, r4arg];
var r5arg: new <T, U>(x: new (arg: T) => U) => T;
var r5arg2: new <T, U>(x: new (arg: T) => U) => T;
var r5 = foo5(r5arg);
var r5a = [r5arg, r5arg2];
var r5b = [r5arg2, r5arg];
var r6arg: new <T extends Base, U extends Derived>(x: new (arg: T) => U) => T;
var r6arg2: new <T extends Base>(x: new (arg: T) => Derived) => T;
var r6 = foo6(r6arg);
var r6a = [r6arg, r6arg2];
var r6b = [r6arg2, r6arg];
var r11arg: new <T, U>(x: { foo: T }, y: { foo: U; bar: U }) => Base;
var r11arg2: new <T>(x: { foo: T }, y: { foo: T; bar: T }) => Base;
var r11 = foo11(r11arg);
var r11a = [r11arg, r11arg2];
var r11b = [r11arg2, r11arg];
var r15arg: new <U, V>(x: { a: U; b: V; }) => U[];
var r15arg2: new <T>(x: { a: T; b: T }) => T[];
var r15 = foo15(r15arg);
var r15a = [r15arg, r15arg2];
var r15b = [r15arg2, r15arg];
var r16arg: new <T extends Base>(x: { a: T; b: T }) => T[];
var r16arg2: new <T extends Base>(x: { a: T; b: T }) => T[];
var r16 = foo16(r16arg);
var r16a = [r16arg, r16arg2];
var r16b = [r16arg2, r16arg];
var r17arg: new <T>(x: new (a: T) => T) => T[];
var r17 = foo17(r17arg);
var r18arg: new (x: new <T>(a: T) => T) => any[];
var r18 = foo18(r18arg);
//// [subtypingWithConstructSignatures4.js]
// checking subtype relations for function types as it relates to contextual signature instantiation
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Base = (function () {
function Base() {
}
return Base;
})();
var Derived = (function (_super) {
__extends(Derived, _super);
function Derived() {
_super.apply(this, arguments);
}
return Derived;
})(Base);
var Derived2 = (function (_super) {
__extends(Derived2, _super);
function Derived2() {
_super.apply(this, arguments);
}
return Derived2;
})(Derived);
var OtherDerived = (function (_super) {
__extends(OtherDerived, _super);
function OtherDerived() {
_super.apply(this, arguments);
}
return OtherDerived;
})(Base);
var r1arg;
var r1arg2;
var r1 = foo1(r1arg);
var r1a = [r1arg, r1arg2];
var r1b = [r1arg2, r1arg];
var r2arg;
var r2arg2;
var r2 = foo2(r2arg);
var r2a = [r2arg, r2arg2];
var r2b = [r2arg2, r2arg];
var r3arg;
var r3arg2;
var r3 = foo3(r3arg);
var r3a = [r3arg, r3arg2];
var r3b = [r3arg2, r3arg];
var r4arg;
var r4arg2;
var r4 = foo4(r4arg);
var r4a = [r4arg, r4arg2];
var r4b = [r4arg2, r4arg];
var r5arg;
var r5arg2;
var r5 = foo5(r5arg);
var r5a = [r5arg, r5arg2];
var r5b = [r5arg2, r5arg];
var r6arg;
var r6arg2;
var r6 = foo6(r6arg);
var r6a = [r6arg, r6arg2];
var r6b = [r6arg2, r6arg];
var r11arg;
var r11arg2;
var r11 = foo11(r11arg);
var r11a = [r11arg, r11arg2];
var r11b = [r11arg2, r11arg];
var r15arg;
var r15arg2;
var r15 = foo15(r15arg);
var r15a = [r15arg, r15arg2];
var r15b = [r15arg2, r15arg];
var r16arg;
var r16arg2;
var r16 = foo16(r16arg);
var r16a = [r16arg, r16arg2];
var r16b = [r16arg2, r16arg];
var r17arg;
var r17 = foo17(r17arg);
var r18arg;
var r18 = foo18(r18arg);
| apache-2.0 |
CentOps-TechMahindra/CentOps | PSDashboard/WebContent/jqGrid_JS/src/i18n/grid.locale-ja.js | 8102 | /**
* jqGrid Japanese Translation
* OKADA Yoshitada okada.dev@sth.jp
* http://trirand.com/blog/
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
**/
/*global jQuery, define */
(function( factory ) {
"use strict";
if ( typeof define === "function" && define.amd ) {
// AMD. Register as an anonymous module.
define([
"jquery",
"../grid.base"
], factory );
} else {
// Browser globals
factory( jQuery );
}
}(function( $ ) {
$.jgrid = $.jgrid || {};
if(!$.jgrid.hasOwnProperty("regional")) {
$.jgrid.regional = [];
}
$.jgrid.regional["ja"] = {
defaults : {
recordtext: "{2} \u4EF6\u4E2D {0} - {1} \u3092\u8868\u793A ",
emptyrecords: "\u8868\u793A\u3059\u308B\u30EC\u30B3\u30FC\u30C9\u304C\u3042\u308A\u307E\u305B\u3093",
loadtext: "\u8aad\u307f\u8fbc\u307f\u4e2d...",
pgtext : "{1} \u30DA\u30FC\u30B8\u4E2D {0} \u30DA\u30FC\u30B8\u76EE ",
savetext: "Saving...",
pgfirst : "First Page",
pglast : "Last Page",
pgnext : "Next Page",
pgprev : "Previous Page",
pgrecs : "Records per Page",
showhide: "Toggle Expand Collapse Grid",
// mobile
pagerCaption : "Grid::Page Settings",
pageText : "Page:",
recordPage : "Records per Page",
nomorerecs : "No more records...",
scrollPullup: "Pull up to load more...",
scrollPulldown : "Pull down to refresh...",
scrollRefresh : "Release to refresh..."
},
search : {
caption: "\u691c\u7d22...",
Find: "\u691c\u7d22",
Reset: "\u30ea\u30bb\u30c3\u30c8",
odata: [{ oper:'eq', text:"\u6B21\u306B\u7B49\u3057\u3044"}, { oper:'ne', text:"\u6B21\u306B\u7B49\u3057\u304F\u306A\u3044"},
{ oper:'lt', text:"\u6B21\u3088\u308A\u5C0F\u3055\u3044"}, { oper:'le', text:"\u6B21\u306B\u7B49\u3057\u3044\u304B\u5C0F\u3055\u3044"},
{ oper:'gt', text:"\u6B21\u3088\u308A\u5927\u304D\u3044"}, { oper:'ge', text:"\u6B21\u306B\u7B49\u3057\u3044\u304B\u5927\u304D\u3044"},
{ oper:'bw', text:"\u6B21\u3067\u59CB\u307E\u308B"}, { oper:'bn', text:"\u6B21\u3067\u59CB\u307E\u3089\u306A\u3044"},
{ oper:'in', text:"\u6B21\u306B\u542B\u307E\u308C\u308B"}, { oper:'ni', text:"\u6B21\u306B\u542B\u307E\u308C\u306A\u3044"},
{ oper:'ew', text:"\u6B21\u3067\u7D42\u308F\u308B"}, { oper:'en', text:"\u6B21\u3067\u7D42\u308F\u3089\u306A\u3044"},
{ oper:'cn', text:"\u6B21\u3092\u542B\u3080"}, { oper:'nc', text:"\u6B21\u3092\u542B\u307E\u306A\u3044"},
{ oper:'nu', text:'is null'},{ oper:'nn', text:'is not null'}],
groupOps: [{
op: "AND",
text: "\u3059\u3079\u3066\u306E"
},
{
op: "OR",
text: "\u3044\u305A\u308C\u304B\u306E"
}],
operandTitle : "Click to select search operation.",
resetTitle : "Reset Search Value"
},
edit : {
addCaption: "\u30ec\u30b3\u30fc\u30c9\u8ffd\u52a0",
editCaption: "\u30ec\u30b3\u30fc\u30c9\u7de8\u96c6",
bSubmit: "\u9001\u4fe1",
bCancel: "\u30ad\u30e3\u30f3\u30bb\u30eb",
bClose: "\u9589\u3058\u308b",
saveData: "\u30C7\u30FC\u30BF\u304C\u5909\u66F4\u3055\u308C\u3066\u3044\u307E\u3059\u3002\u4FDD\u5B58\u3057\u307E\u3059\u304B\uFF1F",
bYes: "\u306F\u3044",
bNo: "\u3044\u3044\u3048",
bExit: "\u30AD\u30E3\u30F3\u30BB\u30EB",
msg: {
required:"\u3053\u306e\u9805\u76ee\u306f\u5fc5\u9808\u3067\u3059\u3002",
number:"\u6b63\u3057\u3044\u6570\u5024\u3092\u5165\u529b\u3057\u3066\u4e0b\u3055\u3044\u3002",
minValue:"\u6b21\u306e\u5024\u4ee5\u4e0a\u3067\u5165\u529b\u3057\u3066\u4e0b\u3055\u3044\u3002",
maxValue:"\u6b21\u306e\u5024\u4ee5\u4e0b\u3067\u5165\u529b\u3057\u3066\u4e0b\u3055\u3044\u3002",
email: "e-mail\u304c\u6b63\u3057\u304f\u3042\u308a\u307e\u305b\u3093\u3002",
integer: "\u6b63\u3057\u3044\u6574\u6570\u5024\u3092\u5165\u529b\u3057\u3066\u4e0b\u3055\u3044\u3002",
date: "\u6b63\u3057\u3044\u5024\u3092\u5165\u529b\u3057\u3066\u4e0b\u3055\u3044\u3002",
url: "\u306F\u6709\u52B9\u306AURL\u3067\u306F\u3042\u308A\u307E\u305B\u3093\u3002\20\u30D7\u30EC\u30D5\u30A3\u30C3\u30AF\u30B9\u304C\u5FC5\u8981\u3067\u3059\u3002 ('http://' \u307E\u305F\u306F 'https://')",
nodefined: " \u304C\u5B9A\u7FA9\u3055\u308C\u3066\u3044\u307E\u305B\u3093",
novalue: " \u623B\u308A\u5024\u304C\u5FC5\u8981\u3067\u3059",
customarray: "\u30AB\u30B9\u30BF\u30E0\u95A2\u6570\u306F\u914D\u5217\u3092\u8FD4\u3059\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059",
customfcheck: "\u30AB\u30B9\u30BF\u30E0\u691C\u8A3C\u306B\u306F\u30AB\u30B9\u30BF\u30E0\u95A2\u6570\u304C\u5FC5\u8981\u3067\u3059"
}
},
view : {
caption: "\u30EC\u30B3\u30FC\u30C9\u3092\u8868\u793A",
bClose: "\u9589\u3058\u308B"
},
del : {
caption: "\u524a\u9664",
msg: "\u9078\u629e\u3057\u305f\u30ec\u30b3\u30fc\u30c9\u3092\u524a\u9664\u3057\u307e\u3059\u304b\uff1f",
bSubmit: "\u524a\u9664",
bCancel: "\u30ad\u30e3\u30f3\u30bb\u30eb"
},
nav : {
edittext: " ",
edittitle: "\u9078\u629e\u3057\u305f\u884c\u3092\u7de8\u96c6",
addtext:" ",
addtitle: "\u884c\u3092\u65b0\u898f\u8ffd\u52a0",
deltext: " ",
deltitle: "\u9078\u629e\u3057\u305f\u884c\u3092\u524a\u9664",
searchtext: " ",
searchtitle: "\u30ec\u30b3\u30fc\u30c9\u691c\u7d22",
refreshtext: "",
refreshtitle: "\u30b0\u30ea\u30c3\u30c9\u3092\u30ea\u30ed\u30fc\u30c9",
alertcap: "\u8b66\u544a",
alerttext: "\u884c\u3092\u9078\u629e\u3057\u3066\u4e0b\u3055\u3044\u3002",
viewtext: "",
viewtitle: "\u9078\u629E\u3057\u305F\u884C\u3092\u8868\u793A",
savetext: "",
savetitle: "Save row",
canceltext: "",
canceltitle : "Cancel row editing",
selectcaption : "Actions..."
},
col : {
caption: "\u5217\u3092\u8868\u793a\uff0f\u96a0\u3059",
bSubmit: "\u9001\u4fe1",
bCancel: "\u30ad\u30e3\u30f3\u30bb\u30eb"
},
errors : {
errcap : "\u30a8\u30e9\u30fc",
nourl : "URL\u304c\u8a2d\u5b9a\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002",
norecords: "\u51e6\u7406\u5bfe\u8c61\u306e\u30ec\u30b3\u30fc\u30c9\u304c\u3042\u308a\u307e\u305b\u3093\u3002",
model : "colNames\u306e\u9577\u3055\u304ccolModel\u3068\u4e00\u81f4\u3057\u307e\u305b\u3093\u3002"
},
formatter : {
integer: {
thousandsSeparator: ",",
defaultValue: '0'
},
number: {
decimalSeparator: ".",
thousandsSeparator: ",",
decimalPlaces: 2,
defaultValue: '0.00'
},
currency: {
decimalSeparator: ".",
thousandsSeparator: ",",
decimalPlaces: 0,
prefix: "",
suffix: "",
defaultValue: '0'
},
date : {
dayNames: [
"\u65e5", "\u6708", "\u706b", "\u6c34", "\u6728", "\u91d1", "\u571f",
"\u65e5", "\u6708", "\u706b", "\u6c34", "\u6728", "\u91d1", "\u571f"
],
monthNames: [
"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12",
"1\u6708", "2\u6708", "3\u6708", "4\u6708", "5\u6708", "6\u6708", "7\u6708", "8\u6708", "9\u6708", "10\u6708", "11\u6708", "12\u6708"
],
AmPm : ["am","pm","AM","PM"],
S: function (j) { return "\u756a\u76ee"; },
srcformat: 'Y-m-d',
newformat: 'd/m/Y',
parseRe : /[#%\\\/:_;.,\t\s-]/,
masks : {
ISO8601Long:"Y-m-d H:i:s",
ISO8601Short:"Y-m-d",
ShortDate: "n/j/Y",
LongDate: "l, F d, Y",
FullDateTime: "l, F d, Y g:i:s A",
MonthDay: "F d",
ShortTime: "g:i A",
LongTime: "g:i:s A",
SortableDateTime: "Y-m-d\\TH:i:s",
UniversalSortableDateTime: "Y-m-d H:i:sO",
YearMonth: "F, Y"
},
reformatAfterEdit : false,
userLocalTime : false
},
baseLinkUrl: '',
showAction: '',
target: '',
checkbox : {disabled:true},
idName : 'id'
}
};
}));
| mit |
0xadada/homebrew-cask | Casks/wasted.rb | 370 | cask 'wasted' do
version '3.0'
sha256 '9802d9d9674145881f82ccd127ee49d64af4fd09f27a1199fe354a5fe57881ba'
url 'http://wasted.werk01.de/Wasted.zip'
appcast 'http://wasted.werk01.de/appcast.xml',
checkpoint: '23a115ef319622d89f1b56f4b1ffea1ba10fd69e3b753e48344a4566687f04c6'
name 'WASTED'
homepage 'https://wasted.werk01.de/'
app 'Wasted.app'
end
| bsd-2-clause |
GdZ/scriptfile | software/googleAppEngine/lib/django_1_2/tests/regressiontests/bug639/tests.py | 1327 | """
Tests for file field behavior, and specifically #639, in which Model.save()
gets called *again* for each FileField. This test will fail if calling a
ModelForm's save() method causes Model.save() to be called more than once.
"""
import os
import shutil
import unittest
from django.core.files.uploadedfile import SimpleUploadedFile
from regressiontests.bug639.models import Photo, PhotoForm, temp_storage_dir
class Bug639Test(unittest.TestCase):
def testBug639(self):
"""
Simulate a file upload and check how many times Model.save() gets
called.
"""
# Grab an image for testing.
filename = os.path.join(os.path.dirname(__file__), "test.jpg")
img = open(filename, "rb").read()
# Fake a POST QueryDict and FILES MultiValueDict.
data = {'title': 'Testing'}
files = {"image": SimpleUploadedFile('test.jpg', img, 'image/jpeg')}
form = PhotoForm(data=data, files=files)
p = form.save()
# Check the savecount stored on the object (see the model).
self.assertEqual(p._savecount, 1)
def tearDown(self):
"""
Make sure to delete the "uploaded" file to avoid clogging /tmp.
"""
p = Photo.objects.get()
p.image.delete(save=False)
shutil.rmtree(temp_storage_dir)
| mit |
wout/cdnjs | ajax/libs/inferno-component/1.0.0-beta27/inferno-component.js | 17407 | /*!
* inferno-component v1.0.0-beta27
* (c) 2016 Dominic Gannaway
* Released under the MIT License.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Inferno = global.Inferno || {}, global.Inferno.Component = factory());
}(this, (function () { 'use strict';
var NO_OP = '$NO_OP';
var ERROR_MSG = 'a runtime error occured! Use Inferno in development environment to find the error.';
// this is MUCH faster than .constructor === Array and instanceof Array
// in Node 7 and the later versions of V8, slower in older versions though
var isArray = Array.isArray;
function isStatefulComponent(o) {
return !isUndefined(o.prototype) && !isUndefined(o.prototype.render);
}
function isStringOrNumber(obj) {
return isString(obj) || isNumber(obj);
}
function isNullOrUndef(obj) {
return isUndefined(obj) || isNull(obj);
}
function isInvalid(obj) {
return isNull(obj) || obj === false || isTrue(obj) || isUndefined(obj);
}
function isFunction(obj) {
return typeof obj === 'function';
}
function isString(obj) {
return typeof obj === 'string';
}
function isNumber(obj) {
return typeof obj === 'number';
}
function isNull(obj) {
return obj === null;
}
function isTrue(obj) {
return obj === true;
}
function isUndefined(obj) {
return obj === undefined;
}
function throwError(message) {
if (!message) {
message = ERROR_MSG;
}
throw new Error(("Inferno Error: " + message));
}
var EMPTY_OBJ = {};
function cloneVNode(vNodeToClone, props) {
var _children = [], len = arguments.length - 2;
while ( len-- > 0 ) _children[ len ] = arguments[ len + 2 ];
var children = _children;
if (_children.length > 0 && !isNull(_children[0])) {
if (!props) {
props = {};
}
if (_children.length === 1) {
children = _children[0];
}
if (isUndefined(props.children)) {
props.children = children;
}
else {
if (isArray(children)) {
if (isArray(props.children)) {
props.children = props.children.concat(children);
}
else {
props.children = [props.children].concat(children);
}
}
else {
if (isArray(props.children)) {
props.children.push(children);
}
else {
props.children = [props.children];
props.children.push(children);
}
}
}
}
children = null;
var flags = vNodeToClone.flags;
var events = vNodeToClone.events || (props && props.events) || null;
var newVNode;
if (isArray(vNodeToClone)) {
newVNode = vNodeToClone.map(function (vNode) { return cloneVNode(vNode); });
}
else if (isNullOrUndef(props) && isNullOrUndef(children)) {
newVNode = Object.assign({}, vNodeToClone);
}
else {
var key = !isNullOrUndef(vNodeToClone.key) ? vNodeToClone.key : props.key;
var ref = vNodeToClone.ref || props.ref;
if (flags & 28 /* Component */) {
newVNode = createVNode(flags, vNodeToClone.type, Object.assign({}, vNodeToClone.props, props), null, events, key, ref, true);
}
else if (flags & 3970 /* Element */) {
children = (props && props.children) || vNodeToClone.children;
newVNode = createVNode(flags, vNodeToClone.type, Object.assign({}, vNodeToClone.props, props), children, events, key, ref, !children);
}
}
if (flags & 28 /* Component */) {
var newProps = newVNode.props;
if (newProps) {
var newChildren = newProps.children;
// we need to also clone component children that are in props
// as the children may also have been hoisted
if (newChildren) {
if (isArray(newChildren)) {
for (var i = 0; i < newChildren.length; i++) {
var child = newChildren[i];
if (!isInvalid(child) && isVNode(child)) {
newProps.children[i] = cloneVNode(child);
}
}
}
else if (isVNode(newChildren)) {
newProps.children = cloneVNode(newChildren);
}
}
}
newVNode.children = null;
}
newVNode.dom = null;
return newVNode;
}
function _normalizeVNodes(nodes, result, i) {
for (; i < nodes.length; i++) {
var n = nodes[i];
if (!isInvalid(n)) {
if (Array.isArray(n)) {
_normalizeVNodes(n, result, 0);
}
else {
if (isStringOrNumber(n)) {
n = createTextVNode(n);
}
else if (isVNode(n) && n.dom) {
n = cloneVNode(n);
}
result.push(n);
}
}
}
}
function normalizeVNodes(nodes) {
var newNodes;
// we assign $ which basically means we've flagged this array for future note
// if it comes back again, we need to clone it, as people are using it
// in an immutable way
// tslint:disable
if (nodes['$']) {
nodes = nodes.slice();
}
else {
nodes['$'] = true;
}
// tslint:enable
for (var i = 0; i < nodes.length; i++) {
var n = nodes[i];
if (isInvalid(n)) {
if (!newNodes) {
newNodes = nodes.slice(0, i);
}
newNodes.push(n);
}
else if (Array.isArray(n)) {
var result = (newNodes || nodes).slice(0, i);
_normalizeVNodes(nodes, result, i);
return result;
}
else if (isStringOrNumber(n)) {
if (!newNodes) {
newNodes = nodes.slice(0, i);
}
newNodes.push(createTextVNode(n));
}
else if (isVNode(n) && n.dom) {
if (!newNodes) {
newNodes = nodes.slice(0, i);
}
newNodes.push(cloneVNode(n));
}
else if (newNodes) {
newNodes.push(cloneVNode(n));
}
}
return newNodes || nodes;
}
function normalizeChildren(children) {
if (isArray(children)) {
return normalizeVNodes(children);
}
else if (isVNode(children) && children.dom) {
return cloneVNode(children);
}
return children;
}
function normalizeProps(vNode, props, children) {
if (!(vNode.flags & 28 /* Component */) && isNullOrUndef(children) && !isNullOrUndef(props.children)) {
vNode.children = props.children;
}
if (props.ref) {
vNode.ref = props.ref;
}
if (props.events) {
vNode.events = props.events;
}
if (!isNullOrUndef(props.key)) {
vNode.key = props.key;
}
}
function normalize(vNode) {
var props = vNode.props;
var children = vNode.children;
// convert a wrongly created type back to element
if (isString(vNode.type) && (vNode.flags & 28 /* Component */)) {
vNode.flags = 3970 /* Element */;
}
if (props) {
normalizeProps(vNode, props, children);
}
if (!isInvalid(children)) {
vNode.children = normalizeChildren(children);
}
if (props && !isInvalid(props.children)) {
props.children = normalizeChildren(props.children);
}
}
function createVNode(flags, type, props, children, events, key, ref, noNormalise) {
if (flags & 16 /* ComponentUnknown */) {
flags = isStatefulComponent(type) ? 4 /* ComponentClass */ : 8 /* ComponentFunction */;
}
var vNode = {
children: isUndefined(children) ? null : children,
dom: null,
events: events || null,
flags: flags || 0,
key: key === undefined ? null : key,
props: props || null,
ref: ref || null,
type: type
};
if (!noNormalise) {
normalize(vNode);
}
return vNode;
}
// when a components root VNode is also a component, we can run into issues
// this will recursively look for vNode.parentNode if the VNode is a component
function updateParentComponentVNodes(vNode, dom) {
if (vNode.flags & 28 /* Component */) {
var parentVNode = vNode.parentVNode;
if (parentVNode) {
parentVNode.dom = dom;
updateParentComponentVNodes(parentVNode, dom);
}
}
}
function createVoidVNode() {
return createVNode(4096 /* Void */);
}
function createTextVNode(text) {
return createVNode(1 /* Text */, null, null, text);
}
function isVNode(o) {
return !!o.flags;
}
var Lifecycle = function Lifecycle() {
this.listeners = [];
this.fastUnmount = true;
};
Lifecycle.prototype.addListener = function addListener (callback) {
this.listeners.push(callback);
};
Lifecycle.prototype.trigger = function trigger () {
var this$1 = this;
for (var i = 0; i < this.listeners.length; i++) {
this$1.listeners[i]();
}
};
var noOp = ERROR_MSG;
if (process.env.NODE_ENV !== 'production') {
noOp = 'Inferno Error: Can only update a mounted or mounting component. This usually means you called setState() or forceUpdate() on an unmounted component. This is a no-op.';
}
var componentCallbackQueue = new Map();
function addToQueue(component, force, callback) {
// TODO this function needs to be revised and improved on
var queue = componentCallbackQueue.get(component);
if (!queue) {
queue = [];
componentCallbackQueue.set(component, queue);
Promise.resolve().then(function () {
applyState(component, force, function () {
for (var i = 0; i < queue.length; i++) {
queue[i]();
}
});
componentCallbackQueue.delete(component);
component._processingSetState = false;
});
}
if (callback) {
queue.push(callback);
}
}
function queueStateChanges(component, newState, callback) {
if (isFunction(newState)) {
newState = newState(component.state);
}
for (var stateKey in newState) {
component._pendingState[stateKey] = newState[stateKey];
}
if (!component._pendingSetState) {
if (component._processingSetState || callback) {
addToQueue(component, false, callback);
}
else {
component._pendingSetState = true;
component._processingSetState = true;
applyState(component, false, callback);
component._processingSetState = false;
}
}
else {
component.state = Object.assign({}, component.state, component._pendingState);
component._pendingState = {};
}
}
function applyState(component, force, callback) {
if ((!component._deferSetState || force) && !component._blockRender) {
component._pendingSetState = false;
var pendingState = component._pendingState;
var prevState = component.state;
var nextState = Object.assign({}, prevState, pendingState);
var props = component.props;
var context = component.context;
component._pendingState = {};
var nextInput = component._updateComponent(prevState, nextState, props, props, context, force);
var didUpdate = true;
if (isInvalid(nextInput)) {
nextInput = createVoidVNode();
}
else if (isArray(nextInput)) {
if (process.env.NODE_ENV !== 'production') {
throwError('a valid Inferno VNode (or null) must be returned from a component render. You may have returned an array or an invalid object.');
}
throwError();
}
else if (nextInput === NO_OP) {
nextInput = component._lastInput;
didUpdate = false;
}
var lastInput = component._lastInput;
var parentDom = lastInput.dom.parentNode;
component._lastInput = nextInput;
if (didUpdate) {
var subLifecycle = component._lifecycle;
if (!subLifecycle) {
subLifecycle = new Lifecycle();
}
else {
subLifecycle.listeners = [];
}
component._lifecycle = subLifecycle;
var childContext = component.getChildContext();
if (!isNullOrUndef(childContext)) {
childContext = Object.assign({}, context, component._childContext, childContext);
}
else {
childContext = Object.assign({}, context, component._childContext);
}
component._patch(lastInput, nextInput, parentDom, subLifecycle, childContext, component._isSVG, false);
subLifecycle.trigger();
component.componentDidUpdate(props, prevState);
}
var vNode = component._vNode;
var dom = vNode.dom = nextInput.dom;
var componentToDOMNodeMap = component._componentToDOMNodeMap;
componentToDOMNodeMap && componentToDOMNodeMap.set(component, nextInput.dom);
updateParentComponentVNodes(vNode, dom);
if (!isNullOrUndef(callback)) {
callback();
}
}
}
var Component$1 = function Component$1(props, context) {
this.state = {};
this.refs = {};
this._processingSetState = false;
this._blockRender = false;
this._ignoreSetState = false;
this._blockSetState = false;
this._deferSetState = false;
this._pendingSetState = false;
this._pendingState = {};
this._lastInput = null;
this._vNode = null;
this._unmounted = true;
this._devToolsStatus = null;
this._devToolsId = null;
this._lifecycle = null;
this._childContext = null;
this._patch = null;
this._isSVG = false;
this._componentToDOMNodeMap = null;
/** @type {object} */
this.props = props || EMPTY_OBJ;
/** @type {object} */
this.context = context || {};
if (!this.componentDidMount) {
this.componentDidMount = null;
}
};
Component$1.prototype.render = function render (nextProps, nextState, nextContext) {
};
Component$1.prototype.forceUpdate = function forceUpdate (callback) {
if (this._unmounted) {
throw Error(noOp);
}
applyState(this, true, callback);
};
Component$1.prototype.setState = function setState (newState, callback) {
if (this._unmounted) {
throw Error(noOp);
}
if (!this._blockSetState) {
if (!this._ignoreSetState) {
queueStateChanges(this, newState, callback);
}
}
else {
if (process.env.NODE_ENV !== 'production') {
throwError('cannot update state via setState() in componentWillUpdate().');
}
throwError();
}
};
Component$1.prototype.componentWillMount = function componentWillMount () {
};
Component$1.prototype.componentDidMount = function componentDidMount () {
};
Component$1.prototype.componentWillUnmount = function componentWillUnmount () {
};
Component$1.prototype.componentDidUpdate = function componentDidUpdate (prevProps, prevState, prevContext) {
};
Component$1.prototype.shouldComponentUpdate = function shouldComponentUpdate (nextProps, nextState, context) {
return true;
};
Component$1.prototype.componentWillReceiveProps = function componentWillReceiveProps (nextProps, context) {
};
Component$1.prototype.componentWillUpdate = function componentWillUpdate (nextProps, nextState, nextContext) {
};
Component$1.prototype.getChildContext = function getChildContext () {
};
Component$1.prototype._updateComponent = function _updateComponent (prevState, nextState, prevProps, nextProps, context, force) {
if (this._unmounted === true) {
if (process.env.NODE_ENV !== 'production') {
throwError(noOp);
}
throwError();
}
if (!isNullOrUndef(nextProps) && isNullOrUndef(nextProps.children)) {
nextProps.children = prevProps.children;
}
if ((prevProps !== nextProps || nextProps === EMPTY_OBJ) || prevState !== nextState || force) {
if (prevProps !== nextProps || nextProps === EMPTY_OBJ) {
this._blockRender = true;
this.componentWillReceiveProps(nextProps, context);
this._blockRender = false;
if (this._pendingSetState) {
nextState = Object.assign({}, nextState, this._pendingState);
this._pendingSetState = false;
this._pendingState = {};
}
}
var shouldUpdate = this.shouldComponentUpdate(nextProps, nextState, context);
if (shouldUpdate !== false || force) {
this._blockSetState = true;
this.componentWillUpdate(nextProps, nextState, context);
this._blockSetState = false;
this.props = nextProps;
var state = this.state = nextState;
this.context = context;
this._beforeRender && this._beforeRender();
var render = this.render(nextProps, state, context);
this._afterRender && this._afterRender();
return render;
}
}
return NO_OP;
};
return Component$1;
})));
| mit |
UNM-GEOG-485-585/class-materials | sample-files/OpenLayers/js/v3.14.2/closure-library/closure/goog/ui/ratings.js | 14218 | // Copyright 2006 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview A base ratings widget that allows the user to select a rating,
* like "star video" in Google Video. This fires a "change" event when the user
* selects a rating.
*
* Keyboard:
* ESC = Clear (if supported)
* Home = 1 star
* End = Full rating
* Left arrow = Decrease rating
* Right arrow = Increase rating
* 0 = Clear (if supported)
* 1 - 9 = nth star
*
* @see ../demos/ratings.html
*/
goog.provide('goog.ui.Ratings');
goog.provide('goog.ui.Ratings.EventType');
goog.require('goog.a11y.aria');
goog.require('goog.a11y.aria.Role');
goog.require('goog.a11y.aria.State');
goog.require('goog.asserts');
goog.require('goog.dom.TagName');
goog.require('goog.dom.classlist');
goog.require('goog.events.EventType');
goog.require('goog.ui.Component');
/**
* A UI Control used for rating things, i.e. videos on Google Video.
* @param {Array<string>=} opt_ratings Ratings. Default: [1,2,3,4,5].
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
* @constructor
* @extends {goog.ui.Component}
*/
goog.ui.Ratings = function(opt_ratings, opt_domHelper) {
goog.ui.Component.call(this, opt_domHelper);
/**
* Ordered ratings that can be picked, Default: [1,2,3,4,5]
* @type {Array<string>}
* @private
*/
this.ratings_ = opt_ratings || ['1', '2', '3', '4', '5'];
/**
* Array containing references to the star elements
* @type {Array<Element>}
* @private
*/
this.stars_ = [];
// Awkward name because the obvious name is taken by subclasses already.
/**
* Whether the control is enabled.
* @type {boolean}
* @private
*/
this.isEnabled_ = true;
/**
* The last index to be highlighted
* @type {number}
* @private
*/
this.highlightedIndex_ = -1;
/**
* The currently selected index
* @type {number}
* @private
*/
this.selectedIndex_ = -1;
/**
* An attached form field to set the value to
* @type {HTMLInputElement|HTMLSelectElement|null}
* @private
*/
this.attachedFormField_ = null;
};
goog.inherits(goog.ui.Ratings, goog.ui.Component);
goog.tagUnsealableClass(goog.ui.Ratings);
/**
* Default CSS class to be applied to the root element of components rendered
* by this renderer.
* @type {string}
*/
goog.ui.Ratings.CSS_CLASS = goog.getCssName('goog-ratings');
/**
* Enums for Ratings event type.
* @enum {string}
*/
goog.ui.Ratings.EventType = {
CHANGE: 'change',
HIGHLIGHT_CHANGE: 'highlightchange',
HIGHLIGHT: 'highlight',
UNHIGHLIGHT: 'unhighlight'
};
/**
* Decorate a HTML structure already in the document. Expects the structure:
* <pre>
* - div
* - select
* - option 1 #text = 1 star
* - option 2 #text = 2 stars
* - option 3 #text = 3 stars
* - option N (where N is max number of ratings)
* </pre>
*
* The div can contain other elements for graceful degredation, but they will be
* hidden when the decoration occurs.
*
* @param {Element} el Div element to decorate.
* @override
*/
goog.ui.Ratings.prototype.decorateInternal = function(el) {
var select = el.getElementsByTagName(goog.dom.TagName.SELECT)[0];
if (!select) {
throw Error('Can not decorate ' + el + ', with Ratings. Must ' +
'contain select box');
}
this.ratings_.length = 0;
for (var i = 0, n = select.options.length; i < n; i++) {
var option = select.options[i];
this.ratings_.push(option.text);
}
this.setSelectedIndex(select.selectedIndex);
select.style.display = 'none';
this.attachedFormField_ = select;
this.createDom();
el.insertBefore(this.getElement(), select);
};
/**
* Render the rating widget inside the provided element. This will override the
* current content of the element.
* @override
*/
goog.ui.Ratings.prototype.enterDocument = function() {
var el = this.getElement();
goog.asserts.assert(el, 'The DOM element for ratings cannot be null.');
goog.ui.Ratings.base(this, 'enterDocument');
el.tabIndex = 0;
goog.dom.classlist.add(el, this.getCssClass());
goog.a11y.aria.setRole(el, goog.a11y.aria.Role.SLIDER);
goog.a11y.aria.setState(el, goog.a11y.aria.State.VALUEMIN, 0);
var max = this.ratings_.length - 1;
goog.a11y.aria.setState(el, goog.a11y.aria.State.VALUEMAX, max);
var handler = this.getHandler();
handler.listen(el, 'keydown', this.onKeyDown_);
// Create the elements for the stars
for (var i = 0; i < this.ratings_.length; i++) {
var star = this.getDomHelper().createDom(goog.dom.TagName.SPAN, {
'title': this.ratings_[i],
'class': this.getClassName_(i, false),
'index': i});
this.stars_.push(star);
el.appendChild(star);
}
handler.listen(el, goog.events.EventType.CLICK, this.onClick_);
handler.listen(el, goog.events.EventType.MOUSEOUT, this.onMouseOut_);
handler.listen(el, goog.events.EventType.MOUSEOVER, this.onMouseOver_);
this.highlightIndex_(this.selectedIndex_);
};
/**
* Should be called when the widget is removed from the document but may be
* reused. This removes all the listeners the widget has attached and destroys
* the DOM nodes it uses.
* @override
*/
goog.ui.Ratings.prototype.exitDocument = function() {
goog.ui.Ratings.superClass_.exitDocument.call(this);
for (var i = 0; i < this.stars_.length; i++) {
this.getDomHelper().removeNode(this.stars_[i]);
}
this.stars_.length = 0;
};
/** @override */
goog.ui.Ratings.prototype.disposeInternal = function() {
goog.ui.Ratings.superClass_.disposeInternal.call(this);
this.ratings_.length = 0;
};
/**
* Returns the base CSS class used by subcomponents of this component.
* @return {string} Component-specific CSS class.
*/
goog.ui.Ratings.prototype.getCssClass = function() {
return goog.ui.Ratings.CSS_CLASS;
};
/**
* Sets the selected index. If the provided index is greater than the number of
* ratings then the max is set. 0 is the first item, -1 is no selection.
* @param {number} index The index of the rating to select.
*/
goog.ui.Ratings.prototype.setSelectedIndex = function(index) {
index = Math.max(-1, Math.min(index, this.ratings_.length - 1));
if (index != this.selectedIndex_) {
this.selectedIndex_ = index;
this.highlightIndex_(this.selectedIndex_);
if (this.attachedFormField_) {
if (this.attachedFormField_.tagName == goog.dom.TagName.SELECT) {
this.attachedFormField_.selectedIndex = index;
} else {
this.attachedFormField_.value =
/** @type {string} */ (this.getValue());
}
var ratingsElement = this.getElement();
goog.asserts.assert(ratingsElement,
'The DOM ratings element cannot be null.');
goog.a11y.aria.setState(ratingsElement,
goog.a11y.aria.State.VALUENOW,
this.ratings_[index]);
}
this.dispatchEvent(goog.ui.Ratings.EventType.CHANGE);
}
};
/**
* @return {number} The index of the currently selected rating.
*/
goog.ui.Ratings.prototype.getSelectedIndex = function() {
return this.selectedIndex_;
};
/**
* Returns the rating value of the currently selected rating
* @return {?string} The value of the currently selected rating (or null).
*/
goog.ui.Ratings.prototype.getValue = function() {
return this.selectedIndex_ == -1 ? null : this.ratings_[this.selectedIndex_];
};
/**
* Returns the index of the currently highlighted rating, -1 if the mouse isn't
* currently over the widget
* @return {number} The index of the currently highlighted rating.
*/
goog.ui.Ratings.prototype.getHighlightedIndex = function() {
return this.highlightedIndex_;
};
/**
* Returns the value of the currently highlighted rating, null if the mouse
* isn't currently over the widget
* @return {?string} The value of the currently highlighted rating, or null.
*/
goog.ui.Ratings.prototype.getHighlightedValue = function() {
return this.highlightedIndex_ == -1 ? null :
this.ratings_[this.highlightedIndex_];
};
/**
* Sets the array of ratings that the comonent
* @param {Array<string>} ratings Array of value to use as ratings.
*/
goog.ui.Ratings.prototype.setRatings = function(ratings) {
this.ratings_ = ratings;
// TODO(user): If rendered update stars
};
/**
* Gets the array of ratings that the component
* @return {Array<string>} Array of ratings.
*/
goog.ui.Ratings.prototype.getRatings = function() {
return this.ratings_;
};
/**
* Attaches an input or select element to the ratings widget. The value or
* index of the field will be updated along with the ratings widget.
* @param {HTMLSelectElement|HTMLInputElement} field The field to attach to.
*/
goog.ui.Ratings.prototype.setAttachedFormField = function(field) {
this.attachedFormField_ = field;
};
/**
* Returns the attached input or select element to the ratings widget.
* @return {HTMLSelectElement|HTMLInputElement|null} The attached form field.
*/
goog.ui.Ratings.prototype.getAttachedFormField = function() {
return this.attachedFormField_;
};
/**
* Enables or disables the ratings control.
* @param {boolean} enable Whether to enable or disable the control.
*/
goog.ui.Ratings.prototype.setEnabled = function(enable) {
this.isEnabled_ = enable;
if (!enable) {
// Undo any highlighting done during mouseover when disabling the control
// and highlight the last selected rating.
this.resetHighlights_();
}
};
/**
* @return {boolean} Whether the ratings control is enabled.
*/
goog.ui.Ratings.prototype.isEnabled = function() {
return this.isEnabled_;
};
/**
* Handle the mouse moving over a star.
* @param {goog.events.BrowserEvent} e The browser event.
* @private
*/
goog.ui.Ratings.prototype.onMouseOver_ = function(e) {
if (!this.isEnabled()) {
return;
}
if (goog.isDef(e.target.index)) {
var n = e.target.index;
if (this.highlightedIndex_ != n) {
this.highlightIndex_(n);
this.highlightedIndex_ = n;
this.dispatchEvent(goog.ui.Ratings.EventType.HIGHLIGHT_CHANGE);
this.dispatchEvent(goog.ui.Ratings.EventType.HIGHLIGHT);
}
}
};
/**
* Handle the mouse moving over a star.
* @param {goog.events.BrowserEvent} e The browser event.
* @private
*/
goog.ui.Ratings.prototype.onMouseOut_ = function(e) {
// Only remove the highlight if the mouse is not moving to another star
if (e.relatedTarget && !goog.isDef(e.relatedTarget.index)) {
this.resetHighlights_();
}
};
/**
* Handle the mouse moving over a star.
* @param {goog.events.BrowserEvent} e The browser event.
* @private
*/
goog.ui.Ratings.prototype.onClick_ = function(e) {
if (!this.isEnabled()) {
return;
}
if (goog.isDef(e.target.index)) {
this.setSelectedIndex(e.target.index);
}
};
/**
* Handle the key down event. 0 = unselected in this case, 1 = the first rating
* @param {goog.events.BrowserEvent} e The browser event.
* @private
*/
goog.ui.Ratings.prototype.onKeyDown_ = function(e) {
if (!this.isEnabled()) {
return;
}
switch (e.keyCode) {
case 27: // esc
this.setSelectedIndex(-1);
break;
case 36: // home
this.setSelectedIndex(0);
break;
case 35: // end
this.setSelectedIndex(this.ratings_.length);
break;
case 37: // left arrow
this.setSelectedIndex(this.getSelectedIndex() - 1);
break;
case 39: // right arrow
this.setSelectedIndex(this.getSelectedIndex() + 1);
break;
default:
// Detected a numeric key stroke, such as 0 - 9. 0 clears, 1 is first
// star, 9 is 9th star or last if there are less than 9 stars.
var num = parseInt(String.fromCharCode(e.keyCode), 10);
if (!isNaN(num)) {
this.setSelectedIndex(num - 1);
}
}
};
/**
* Resets the highlights to the selected rating to undo highlights due to hover
* effects.
* @private
*/
goog.ui.Ratings.prototype.resetHighlights_ = function() {
this.highlightIndex_(this.selectedIndex_);
this.highlightedIndex_ = -1;
this.dispatchEvent(goog.ui.Ratings.EventType.HIGHLIGHT_CHANGE);
this.dispatchEvent(goog.ui.Ratings.EventType.UNHIGHLIGHT);
};
/**
* Highlights the ratings up to a specific index
* @param {number} n Index to highlight.
* @private
*/
goog.ui.Ratings.prototype.highlightIndex_ = function(n) {
for (var i = 0, star; star = this.stars_[i]; i++) {
goog.dom.classlist.set(star, this.getClassName_(i, i <= n));
}
};
/**
* Get the class name for a given rating. All stars have the class:
* goog-ratings-star.
* Other possible classnames dependent on position and state are:
* goog-ratings-firststar-on
* goog-ratings-firststar-off
* goog-ratings-midstar-on
* goog-ratings-midstar-off
* goog-ratings-laststar-on
* goog-ratings-laststar-off
* @param {number} i Index to get class name for.
* @param {boolean} on Whether it should be on.
* @return {string} The class name.
* @private
*/
goog.ui.Ratings.prototype.getClassName_ = function(i, on) {
var className;
var enabledClassName;
var baseClass = this.getCssClass();
if (i === 0) {
className = goog.getCssName(baseClass, 'firststar');
} else if (i == this.ratings_.length - 1) {
className = goog.getCssName(baseClass, 'laststar');
} else {
className = goog.getCssName(baseClass, 'midstar');
}
if (on) {
className = goog.getCssName(className, 'on');
} else {
className = goog.getCssName(className, 'off');
}
if (this.isEnabled_) {
enabledClassName = goog.getCssName(baseClass, 'enabled');
} else {
enabledClassName = goog.getCssName(baseClass, 'disabled');
}
return goog.getCssName(baseClass, 'star') + ' ' + className +
' ' + enabledClassName;
};
| mit |
diestrin/angular | aio/content/examples/router/src/app/login-routing.module.ts | 572 | // #docregion
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { AuthGuard } from './auth-guard.service';
import { AuthService } from './auth.service';
import { LoginComponent } from './login.component';
const loginRoutes: Routes = [
{ path: 'login', component: LoginComponent }
];
@NgModule({
imports: [
RouterModule.forChild(loginRoutes)
],
exports: [
RouterModule
],
providers: [
AuthGuard,
AuthService
]
})
export class LoginRoutingModule {}
| mit |
gorgozilla/estivale2014 | components/com_content/views/featured/view.html.php | 5959 | <?php
/**
* @package Joomla.Site
* @subpackage com_content
*
* @copyright Copyright (C) 2005 - 2013 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('_JEXEC') or die;
/**
* Frontpage View class
*
* @package Joomla.Site
* @subpackage com_content
* @since 1.5
*/
class ContentViewFeatured extends JViewLegacy
{
protected $state = null;
protected $item = null;
protected $items = null;
protected $pagination = null;
protected $lead_items = array();
protected $intro_items = array();
protected $link_items = array();
protected $columns = 1;
/**
* Display the view
*
* @return mixed False on error, null otherwise.
*/
public function display($tpl = null)
{
$user = JFactory::getUser();
$state = $this->get('State');
$items = $this->get('Items');
$pagination = $this->get('Pagination');
// Check for errors.
if (count($errors = $this->get('Errors')))
{
JError::raiseWarning(500, implode("\n", $errors));
return false;
}
$params = &$state->params;
// PREPARE THE DATA
// Get the metrics for the structural page layout.
$numLeading = $params->def('num_leading_articles', 1);
$numIntro = $params->def('num_intro_articles', 4);
// Compute the article slugs and prepare introtext (runs content plugins).
foreach ($items as &$item)
{
$item->slug = $item->alias ? ($item->id . ':' . $item->alias) : $item->id;
$item->catslug = ($item->category_alias) ? ($item->catid . ':' . $item->category_alias) : $item->catid;
$item->parent_slug = ($item->parent_alias) ? ($item->parent_id . ':' . $item->parent_alias) : $item->parent_id;
// No link for ROOT category
if ($item->parent_alias == 'root')
{
$item->parent_slug = null;
}
$item->event = new stdClass;
$dispatcher = JEventDispatcher::getInstance();
// Old plugins: Ensure that text property is available
if (!isset($item->text))
{
$item->text = $item->introtext;
}
JPluginHelper::importPlugin('content');
$dispatcher->trigger('onContentPrepare', array ('com_content.featured', &$item, &$this->params, 0));
// Old plugins: Use processed text as introtext
$item->introtext = $item->text;
$results = $dispatcher->trigger('onContentAfterTitle', array('com_content.featured', &$item, &$item->params, 0));
$item->event->afterDisplayTitle = trim(implode("\n", $results));
$results = $dispatcher->trigger('onContentBeforeDisplay', array('com_content.featured', &$item, &$item->params, 0));
$item->event->beforeDisplayContent = trim(implode("\n", $results));
$results = $dispatcher->trigger('onContentAfterDisplay', array('com_content.featured', &$item, &$item->params, 0));
$item->event->afterDisplayContent = trim(implode("\n", $results));
}
// Preprocess the breakdown of leading, intro and linked articles.
// This makes it much easier for the designer to just interogate the arrays.
$max = count($items);
// The first group is the leading articles.
$limit = $numLeading;
for ($i = 0; $i < $limit && $i < $max; $i++)
{
$this->lead_items[$i] = &$items[$i];
}
// The second group is the intro articles.
$limit = $numLeading + $numIntro;
// Order articles across, then down (or single column mode)
for ($i = $numLeading; $i < $limit && $i < $max; $i++)
{
$this->intro_items[$i] = &$items[$i];
}
$this->columns = max(1, $params->def('num_columns', 1));
$order = $params->def('multi_column_order', 1);
if ($order == 0 && $this->columns > 1)
{
// call order down helper
$this->intro_items = ContentHelperQuery::orderDownColumns($this->intro_items, $this->columns);
}
// The remainder are the links.
for ($i = $numLeading + $numIntro; $i < $max; $i++)
{
$this->link_items[$i] = &$items[$i];
}
//Escape strings for HTML output
$this->pageclass_sfx = htmlspecialchars($params->get('pageclass_sfx'));
$this->params = &$params;
$this->items = &$items;
$this->pagination = &$pagination;
$this->user = &$user;
$this->_prepareDocument();
parent::display($tpl);
}
/**
* Prepares the document
*/
protected function _prepareDocument()
{
$app = JFactory::getApplication();
$menus = $app->getMenu();
$title = null;
// Because the application sets a default page title,
// we need to get it from the menu item itself
$menu = $menus->getActive();
if ($menu)
{
$this->params->def('page_heading', $this->params->get('page_title', $menu->title));
}
else
{
$this->params->def('page_heading', JText::_('JGLOBAL_ARTICLES'));
}
$title = $this->params->get('page_title', '');
if (empty($title))
{
$title = $app->getCfg('sitename');
}
elseif ($app->getCfg('sitename_pagetitles', 0) == 1)
{
$title = JText::sprintf('JPAGETITLE', $app->getCfg('sitename'), $title);
}
elseif ($app->getCfg('sitename_pagetitles', 0) == 2)
{
$title = JText::sprintf('JPAGETITLE', $title, $app->getCfg('sitename'));
}
$this->document->setTitle($title);
if ($this->params->get('menu-meta_description'))
{
$this->document->setDescription($this->params->get('menu-meta_description'));
}
if ($this->params->get('menu-meta_keywords'))
{
$this->document->setMetadata('keywords', $this->params->get('menu-meta_keywords'));
}
if ($this->params->get('robots'))
{
$this->document->setMetadata('robots', $this->params->get('robots'));
}
// Add feed links
if ($this->params->get('show_feed_link', 1))
{
$link = '&format=feed&limitstart=';
$attribs = array('type' => 'application/rss+xml', 'title' => 'RSS 2.0');
$this->document->addHeadLink(JRoute::_($link . '&type=rss'), 'alternate', 'rel', $attribs);
$attribs = array('type' => 'application/atom+xml', 'title' => 'Atom 1.0');
$this->document->addHeadLink(JRoute::_($link . '&type=atom'), 'alternate', 'rel', $attribs);
}
}
}
| gpl-2.0 |
seogi1004/cdnjs | ajax/libs/startbootstrap-agency/5.0.0/js/agency.js | 1536 | (function($) {
"use strict"; // Start of use strict
// Smooth scrolling using jQuery easing
$('a.js-scroll-trigger[href*="#"]:not([href="#"])').click(function() {
if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html, body').animate({
scrollTop: (target.offset().top - 54)
}, 1000, "easeInOutExpo");
return false;
}
}
});
// Closes responsive menu when a scroll trigger link is clicked
$('.js-scroll-trigger').click(function() {
$('.navbar-collapse').collapse('hide');
});
// Activate scrollspy to add active class to navbar items on scroll
$('body').scrollspy({
target: '#mainNav',
offset: 56
});
// Collapse Navbar
var navbarCollapse = function() {
if ($("#mainNav").offset().top > 100) {
$("#mainNav").addClass("navbar-shrink");
} else {
$("#mainNav").removeClass("navbar-shrink");
}
};
// Collapse now if page is not at top
navbarCollapse();
// Collapse the navbar when page is scrolled
$(window).scroll(navbarCollapse);
// Hide navbar when modals trigger
$('.portfolio-modal').on('show.bs.modal', function(e) {
$('.navbar').addClass('d-none');
})
$('.portfolio-modal').on('hidden.bs.modal', function(e) {
$('.navbar').removeClass('d-none');
})
})(jQuery); // End of use strict
| mit |
otto-torino/gino | core/resources/libraries/phpfastcache/phpfastcache/_extensions/predis-1.0/src/Command/KeyExists.php | 628 | <?php
/*
* This file is part of the Predis package.
*
* (c) Daniele Alessandri <suppakilla@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Predis\Command;
/**
* @link http://redis.io/commands/exists
* @author Daniele Alessandri <suppakilla@gmail.com>
*/
class KeyExists extends Command
{
/**
* {@inheritdoc}
*/
public function getId()
{
return 'EXISTS';
}
/**
* {@inheritdoc}
*/
public function parseResponse($data)
{
return (bool) $data;
}
}
| mit |
kdwink/intellij-community | python/testData/codeInsight/smartEnter/googleDocStringColonAndIndentAfterSection.py | 43 | def func():
"""
A<caret>rgs
""" | apache-2.0 |
michaelgallacher/intellij-community | platform/core-api/src/com/intellij/util/PathsList.java | 5819 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util;
import com.intellij.ide.highlighter.ArchiveFileType;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.StandardFileSystems;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.FilteringIterator;
import com.intellij.util.containers.HashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.*;
import static com.intellij.util.containers.ContainerUtil.*;
public class PathsList {
private final List<String> myPath = new ArrayList<String>();
private final List<String> myPathTail = new ArrayList<String>();
private final Set<String> myPathSet = new HashSet<String>();
private static final Function<String, VirtualFile> PATH_TO_LOCAL_VFILE = new NullableFunction<String, VirtualFile>() {
@Override
public VirtualFile fun(String path) {
return StandardFileSystems.local().findFileByPath(path.replace(File.separatorChar, '/'));
}
};
private static final Function<VirtualFile, String> LOCAL_PATH = new Function<VirtualFile, String>() {
@Override
public String fun(VirtualFile file) {
return PathUtil.getLocalPath(file);
}
};
private static final Function<String, VirtualFile> PATH_TO_DIR = new NullableFunction<String, VirtualFile>() {
@Override
public VirtualFile fun(String s) {
final FileType fileType = FileTypeRegistry.getInstance().getFileTypeByFileName(s);
final VirtualFile localFile = PATH_TO_LOCAL_VFILE.fun(s);
if (localFile == null) return null;
if (ArchiveFileType.INSTANCE.equals(fileType) && !localFile.isDirectory()) {
return StandardFileSystems.getJarRootForLocalFile(localFile);
}
return localFile;
}
};
public void add(@NonNls String path) {
addAllLast(chooseFirstTimeItems(path), myPath);
}
public void remove(@NotNull String path) {
myPath.remove(path);
myPathTail.remove(path);
myPathSet.remove(path);
}
public void add(VirtualFile file) {
add(LOCAL_PATH.fun(file));
}
public void addFirst(@NonNls String path) {
final Iterator<String> elements = chooseFirstTimeItems(path);
int index = 0;
while (elements.hasNext()) {
final String element = elements.next();
myPath.add(index, element);
myPathSet.add(element);
index++;
}
}
public void addTail(String path) {
addAllLast(chooseFirstTimeItems(path), myPathTail);
}
private Iterator<String> chooseFirstTimeItems(String path) {
if (path == null) {
return emptyIterator();
}
final StringTokenizer tokenizer = new StringTokenizer(path, File.pathSeparator);
// in JDK 1.5 StringTokenizer implements Enumeration<Object> rather then Enumeration<String>, need to convert
final Enumeration<String> en = new Enumeration<String>() {
@Override
public boolean hasMoreElements() {
return tokenizer.hasMoreElements();
}
@Override
public String nextElement() {
return (String)tokenizer.nextElement();
}
};
return FilteringIterator.create(iterate(en), new Condition<String>() {
@Override
public boolean value(String element) {
element = element.trim();
return !element.isEmpty() && !myPathSet.contains(element);
}
});
}
private void addAllLast(Iterator<String> elements, List<String> toArray) {
while (elements.hasNext()) {
final String element = elements.next();
toArray.add(element);
myPathSet.add(element);
}
}
@NotNull
public String getPathsString() {
return StringUtil.join(getPathList(), File.pathSeparator);
}
public List<String> getPathList() {
final List<String> result = new ArrayList<String>();
result.addAll(myPath);
result.addAll(myPathTail);
return result;
}
/**
* @return {@link VirtualFile}s on local file system (returns jars as files).
*/
public List<VirtualFile> getVirtualFiles() {
return skipNulls(map(getPathList(), PATH_TO_LOCAL_VFILE));
}
/**
* @return The same as {@link #getVirtualFiles()} but returns jars as {@link JarFileSystem} roots.
*/
public List<VirtualFile> getRootDirs() {
return skipNulls(map(getPathList(), PATH_TO_DIR));
}
public void addAll(List<String> allClasspath) {
for (String path : allClasspath) {
add(path);
}
}
public void addAllFiles(File[] classpathList) {
addAllFiles(Arrays.asList(classpathList));
}
public void addAllFiles(List<File> classpathList) {
for (File file : classpathList) {
add(file);
}
}
public void add(File file) {
add(FileUtil.toCanonicalPath(file.getAbsolutePath()).replace('/', File.separatorChar));
}
public void addVirtualFiles(Collection<VirtualFile> files) {
for (final VirtualFile file : files) {
add(file);
}
}
public void addVirtualFiles(VirtualFile[] files) {
for (VirtualFile file : files) {
add(file);
}
}
}
| apache-2.0 |
AascisB/thumbnailator | src/test/java/net/coobird/thumbnailator/name/RenameTest.java | 10609 | package net.coobird.thumbnailator.name;
import net.coobird.thumbnailator.ThumbnailParameter;
import net.coobird.thumbnailator.builders.ThumbnailParameterBuilder;
import org.junit.Test;
import static org.junit.Assert.*;
public class RenameTest
{
@Test
public void renameNoChange_NameGiven_ParamNull()
{
// given
String name = "filename";
ThumbnailParameter param = null;
// when
String filename = Rename.NO_CHANGE.apply(name, param);
// then
assertEquals("filename", filename);
}
@Test
public void renameNoChange_NameGiven_ParamGiven()
{
// given
String name = "filename";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.NO_CHANGE.apply(name, param);
// then
assertEquals("filename", filename);
}
@Test
public void renamePrefixDotThumbnail_NameGiven_ParamNull()
{
// given
String name = "filename";
ThumbnailParameter param = null;
// when
String filename = Rename.PREFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail.filename", filename);
}
@Test
public void renamePrefixDotThumbnail_NameGiven_ParamGiven()
{
// given
String name = "filename";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.PREFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail.filename", filename);
}
@Test
public void renamePrefixHyphenThumbnail_NameGiven_ParamNull()
{
// given
String name = "filename";
ThumbnailParameter param = null;
// when
String filename = Rename.PREFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail-filename", filename);
}
@Test
public void renamePrefixHyphenThumbnail_NameGiven_ParamGiven()
{
// given
String name = "filename";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.PREFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail-filename", filename);
}
@Test
public void renameSuffixDotThumbnail_NameGiven_ParamNull()
{
// given
String name = "filename";
ThumbnailParameter param = null;
// when
String filename = Rename.SUFFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.thumbnail", filename);
}
@Test
public void renameSuffixDotThumbnail_NameGiven_ParamGiven()
{
// given
String name = "filename";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.SUFFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.thumbnail", filename);
}
@Test
public void renameSuffixHyphenThumbnail_NameGiven_ParamNull()
{
// given
String name = "filename";
ThumbnailParameter param = null;
// when
String filename = Rename.SUFFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("filename-thumbnail", filename);
}
@Test
public void renameSuffixHyphenThumbnail_NameGiven_ParamGiven()
{
// given
String name = "filename";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.SUFFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("filename-thumbnail", filename);
}
@Test
public void renameNoChange_NameGiven_ParamNull_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.NO_CHANGE.apply(name, param);
// then
assertEquals("filename.jpg", filename);
}
@Test
public void renameNoChange_NameGiven_ParamGiven_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.NO_CHANGE.apply(name, param);
// then
assertEquals("filename.jpg", filename);
}
@Test
public void renamePrefixDotThumbnail_NameGiven_ParamNull_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.PREFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail.filename.jpg", filename);
}
@Test
public void renamePrefixDotThumbnail_NameGiven_ParamGiven_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.PREFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail.filename.jpg", filename);
}
@Test
public void renamePrefixHyphenThumbnail_NameGiven_ParamNull_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.PREFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail-filename.jpg", filename);
}
@Test
public void renamePrefixHyphenThumbnail_NameGiven_ParamGiven_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.PREFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail-filename.jpg", filename);
}
@Test
public void renameSuffixDotThumbnail_NameGiven_ParamNull_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.SUFFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.thumbnail.jpg", filename);
}
@Test
public void renameSuffixDotThumbnail_NameGiven_ParamGiven_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.SUFFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.thumbnail.jpg", filename);
}
@Test
public void renameSuffixHyphenThumbnail_NameGiven_ParamNull_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.SUFFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("filename-thumbnail.jpg", filename);
}
@Test
public void renameSuffixHyphenThumbnail_NameGiven_ParamGiven_WithExtension()
{
// given
String name = "filename.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.SUFFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("filename-thumbnail.jpg", filename);
}
@Test
public void renameNoChange_NameGiven_ParamNull_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.NO_CHANGE.apply(name, param);
// then
assertEquals("filename.middle.jpg", filename);
}
@Test
public void renameNoChange_NameGiven_ParamGiven_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.NO_CHANGE.apply(name, param);
// then
assertEquals("filename.middle.jpg", filename);
}
@Test
public void renamePrefixDotThumbnail_NameGiven_ParamNull_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.PREFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail.filename.middle.jpg", filename);
}
@Test
public void renamePrefixDotThumbnail_NameGiven_ParamGiven_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.PREFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail.filename.middle.jpg", filename);
}
@Test
public void renamePrefixHyphenThumbnail_NameGiven_ParamNull_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.PREFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail-filename.middle.jpg", filename);
}
@Test
public void renamePrefixHyphenThumbnail_NameGiven_ParamGiven_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.PREFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("thumbnail-filename.middle.jpg", filename);
}
@Test
public void renameSuffixDotThumbnail_NameGiven_ParamNull_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.SUFFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.middle.thumbnail.jpg", filename);
}
@Test
public void renameSuffixDotThumbnail_NameGiven_ParamGiven_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.SUFFIX_DOT_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.middle.thumbnail.jpg", filename);
}
@Test
public void renameSuffixHyphenThumbnail_NameGiven_ParamNull_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = null;
// when
String filename = Rename.SUFFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.middle-thumbnail.jpg", filename);
}
@Test
public void renameSuffixHyphenThumbnail_NameGiven_ParamGiven_WithMultipleDots()
{
// given
String name = "filename.middle.jpg";
ThumbnailParameter param = new ThumbnailParameterBuilder().scale(1.0).build();
// when
String filename = Rename.SUFFIX_HYPHEN_THUMBNAIL.apply(name, param);
// then
assertEquals("filename.middle-thumbnail.jpg", filename);
}
}
| mit |
MustWin/kubernetes | contrib/mesos/pkg/offers/offers_test.go | 10293 | /*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package offers
import (
"errors"
"sync/atomic"
"testing"
"time"
mesos "github.com/mesos/mesos-go/mesosproto"
util "github.com/mesos/mesos-go/mesosutil"
"k8s.io/kubernetes/contrib/mesos/pkg/proc"
)
func TestExpiredOffer(t *testing.T) {
t.Parallel()
ttl := 2 * time.Second
o := Expired("test", "testhost", ttl)
if o.Id() != "test" {
t.Error("expiredOffer does not return its Id")
}
if o.Host() != "testhost" {
t.Error("expiredOffer does not return its hostname")
}
if o.HasExpired() != true {
t.Error("expiredOffer is not expired")
}
if o.Details() != nil {
t.Error("expiredOffer does not return nil Details")
}
if o.Acquire() != false {
t.Error("expiredOffer must not be able to be acquired")
}
if delay := o.GetDelay(); !(0 < delay && delay <= ttl) {
t.Error("expiredOffer does not return a valid deadline")
}
} // TestExpiredOffer
func TestTimedOffer(t *testing.T) {
t.Parallel()
ttl := 2 * time.Second
now := time.Now()
o := &liveOffer{nil, now.Add(ttl), 0}
if o.HasExpired() {
t.Errorf("offer ttl was %v and should not have expired yet", ttl)
}
if !o.Acquire() {
t.Fatal("1st acquisition of offer failed")
}
o.Release()
if !o.Acquire() {
t.Fatal("2nd acquisition of offer failed")
}
if o.Acquire() {
t.Fatal("3rd acquisition of offer passed but prior claim was not released")
}
o.Release()
if !o.Acquire() {
t.Fatal("4th acquisition of offer failed")
}
o.Release()
time.Sleep(ttl)
if !o.HasExpired() {
t.Fatal("offer not expired after ttl passed")
}
if !o.Acquire() {
t.Fatal("5th acquisition of offer failed; should not be tied to expiration")
}
if o.Acquire() {
t.Fatal("6th acquisition of offer succeeded; should already be acquired")
}
} // TestTimedOffer
func TestOfferStorage(t *testing.T) {
ttl := time.Second / 4
var declinedNum int32
getDeclinedNum := func() int32 { return atomic.LoadInt32(&declinedNum) }
config := RegistryConfig{
DeclineOffer: func(offerId string) <-chan error {
atomic.AddInt32(&declinedNum, 1)
return proc.ErrorChan(nil)
},
Compat: func(o *mesos.Offer) bool {
return o.Hostname == nil || *o.Hostname != "incompatiblehost"
},
TTL: ttl,
LingerTTL: 2 * ttl,
}
storage := CreateRegistry(config)
done := make(chan struct{})
storage.Init(done)
// Add offer
id := util.NewOfferID("foo")
o := &mesos.Offer{Id: id}
storage.Add([]*mesos.Offer{o})
// Added offer should be in the storage
if obj, ok := storage.Get(id.GetValue()); obj == nil || !ok {
t.Error("offer not added")
}
if obj, _ := storage.Get(id.GetValue()); obj.Details() != o {
t.Error("added offer differs from returned offer")
}
// Not-added offer is not in storage
if obj, ok := storage.Get("bar"); obj != nil || ok {
t.Error("offer bar should not exist in storage")
}
// Deleted offer lingers in storage, is acquired and declined
offer, _ := storage.Get(id.GetValue())
declinedNumBefore := getDeclinedNum()
storage.Delete(id.GetValue(), "deleted for test")
if obj, _ := storage.Get(id.GetValue()); obj == nil {
t.Error("deleted offer is not lingering")
}
if obj, _ := storage.Get(id.GetValue()); !obj.HasExpired() {
t.Error("deleted offer is no expired")
}
if ok := offer.Acquire(); ok {
t.Error("deleted offer can be acquired")
}
if getDeclinedNum() <= declinedNumBefore {
t.Error("deleted offer was not declined")
}
// Acquired offer is only declined after 2*ttl
id = util.NewOfferID("foo2")
o = &mesos.Offer{Id: id}
storage.Add([]*mesos.Offer{o})
offer, _ = storage.Get(id.GetValue())
declinedNumBefore = getDeclinedNum()
offer.Acquire()
storage.Delete(id.GetValue(), "deleted for test")
if getDeclinedNum() > declinedNumBefore {
t.Error("acquired offer is declined")
}
offer.Release()
time.Sleep(3 * ttl)
if getDeclinedNum() <= declinedNumBefore {
t.Error("released offer is not declined after 2*ttl")
}
// Added offer should be expired after ttl, but lingering
id = util.NewOfferID("foo3")
o = &mesos.Offer{Id: id}
storage.Add([]*mesos.Offer{o})
time.Sleep(2 * ttl)
obj, ok := storage.Get(id.GetValue())
if obj == nil || !ok {
t.Error("offer not lingering after ttl")
}
if !obj.HasExpired() {
t.Error("offer is not expired after ttl")
}
// Should be deleted when waiting longer than LingerTTL
time.Sleep(2 * ttl)
if obj, ok := storage.Get(id.GetValue()); obj != nil || ok {
t.Error("offer not deleted after LingerTTL")
}
// Incompatible offer is declined
id = util.NewOfferID("foo4")
incompatibleHostname := "incompatiblehost"
o = &mesos.Offer{Id: id, Hostname: &incompatibleHostname}
declinedNumBefore = getDeclinedNum()
storage.Add([]*mesos.Offer{o})
if obj, ok := storage.Get(id.GetValue()); obj != nil || ok {
t.Error("incompatible offer not rejected")
}
if getDeclinedNum() <= declinedNumBefore {
t.Error("incompatible offer is not declined")
}
// Invalidated offer are not declined, but expired
id = util.NewOfferID("foo5")
o = &mesos.Offer{Id: id}
storage.Add([]*mesos.Offer{o})
offer, _ = storage.Get(id.GetValue())
declinedNumBefore = getDeclinedNum()
storage.Invalidate(id.GetValue())
if obj, _ := storage.Get(id.GetValue()); !obj.HasExpired() {
t.Error("invalidated offer is not expired")
}
if getDeclinedNum() > declinedNumBefore {
t.Error("invalidated offer is declined")
}
if ok := offer.Acquire(); ok {
t.Error("invalidated offer can be acquired")
}
// Invalidate "" will invalidate all offers
id = util.NewOfferID("foo6")
o = &mesos.Offer{Id: id}
storage.Add([]*mesos.Offer{o})
id2 := util.NewOfferID("foo7")
o2 := &mesos.Offer{Id: id2}
storage.Add([]*mesos.Offer{o2})
storage.Invalidate("")
if obj, _ := storage.Get(id.GetValue()); !obj.HasExpired() {
t.Error("invalidated offer is not expired")
}
if obj2, _ := storage.Get(id2.GetValue()); !obj2.HasExpired() {
t.Error("invalidated offer is not expired")
}
// InvalidateForSlave invalides all offers for that slave, but only those
id = util.NewOfferID("foo8")
slaveId := util.NewSlaveID("test-slave")
o = &mesos.Offer{Id: id, SlaveId: slaveId}
storage.Add([]*mesos.Offer{o})
id2 = util.NewOfferID("foo9")
o2 = &mesos.Offer{Id: id2}
storage.Add([]*mesos.Offer{o2})
storage.InvalidateForSlave(slaveId.GetValue())
if obj, _ := storage.Get(id.GetValue()); !obj.HasExpired() {
t.Error("invalidated offer for test-slave is not expired")
}
if obj2, _ := storage.Get(id2.GetValue()); obj2.HasExpired() {
t.Error("invalidated offer another slave is expired")
}
close(done)
} // TestOfferStorage
func TestListen(t *testing.T) {
ttl := time.Second / 4
config := RegistryConfig{
DeclineOffer: func(offerId string) <-chan error {
return proc.ErrorChan(nil)
},
Compat: func(o *mesos.Offer) bool {
return true
},
TTL: ttl,
ListenerDelay: ttl / 2,
}
storage := CreateRegistry(config)
done := make(chan struct{})
storage.Init(done)
// Create two listeners with a hostname filter
hostname1 := "hostname1"
hostname2 := "hostname2"
listener1 := storage.Listen("listener1", func(offer *mesos.Offer) bool {
return offer.GetHostname() == hostname1
})
listener2 := storage.Listen("listener2", func(offer *mesos.Offer) bool {
return offer.GetHostname() == hostname2
})
// Add hostname1 offer
id := util.NewOfferID("foo")
o := &mesos.Offer{Id: id, Hostname: &hostname1}
storage.Add([]*mesos.Offer{o})
// listener1 is notified by closing channel
select {
case _, more := <-listener1:
if more {
t.Error("listener1 is not closed")
}
}
// listener2 is not notified within ttl
select {
case <-listener2:
t.Error("listener2 is notified")
case <-time.After(ttl):
}
close(done)
} // TestListen
func TestWalk(t *testing.T) {
t.Parallel()
config := RegistryConfig{
DeclineOffer: func(offerId string) <-chan error {
return proc.ErrorChan(nil)
},
TTL: 0 * time.Second,
LingerTTL: 0 * time.Second,
ListenerDelay: 0 * time.Second,
}
storage := CreateRegistry(config)
acceptedOfferId := ""
walked := 0
walker1 := func(p Perishable) (bool, error) {
walked++
if p.Acquire() {
acceptedOfferId = p.Details().Id.GetValue()
return true, nil
}
return false, nil
}
// sanity check
err := storage.Walk(walker1)
if err != nil {
t.Fatalf("received impossible error %v", err)
}
if walked != 0 {
t.Fatal("walked empty storage")
}
if acceptedOfferId != "" {
t.Fatal("somehow found an offer when registry was empty")
}
impl, ok := storage.(*offerStorage)
if !ok {
t.Fatal("unexpected offer storage impl")
}
// single offer
ttl := 2 * time.Second
now := time.Now()
o := &liveOffer{&mesos.Offer{Id: util.NewOfferID("foo")}, now.Add(ttl), 0}
impl.offers.Add(o)
err = storage.Walk(walker1)
if err != nil {
t.Fatalf("received impossible error %v", err)
}
if walked != 1 {
t.Fatalf("walk count %d", walked)
}
if acceptedOfferId != "foo" {
t.Fatalf("found offer %v", acceptedOfferId)
}
acceptedOfferId = ""
err = storage.Walk(walker1)
if err != nil {
t.Fatalf("received impossible error %v", err)
}
if walked != 2 {
t.Fatalf("walk count %d", walked)
}
if acceptedOfferId != "" {
t.Fatalf("found offer %v", acceptedOfferId)
}
walker2 := func(p Perishable) (bool, error) {
walked++
return true, nil
}
err = storage.Walk(walker2)
if err != nil {
t.Fatalf("received impossible error %v", err)
}
if walked != 3 {
t.Fatalf("walk count %d", walked)
}
if acceptedOfferId != "" {
t.Fatalf("found offer %v", acceptedOfferId)
}
walker3 := func(p Perishable) (bool, error) {
walked++
return true, errors.New("baz")
}
err = storage.Walk(walker3)
if err == nil {
t.Fatal("expected error")
}
if walked != 4 {
t.Fatalf("walk count %d", walked)
}
}
| apache-2.0 |
mpilman/presto | presto-orc/src/main/java/com/facebook/presto/orc/metadata/StripeInformation.java | 2021 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc.metadata;
import static com.google.common.base.MoreObjects.toStringHelper;
public class StripeInformation
{
private final int numberOfRows;
private final long offset;
private final long indexLength;
private final long dataLength;
private final long footerLength;
public StripeInformation(int numberOfRows, long offset, long indexLength, long dataLength, long footerLength)
{
this.numberOfRows = numberOfRows;
this.offset = offset;
this.indexLength = indexLength;
this.dataLength = dataLength;
this.footerLength = footerLength;
}
public int getNumberOfRows()
{
return numberOfRows;
}
public long getOffset()
{
return offset;
}
public long getIndexLength()
{
return indexLength;
}
public long getDataLength()
{
return dataLength;
}
public long getFooterLength()
{
return footerLength;
}
public long getTotalLength()
{
return indexLength + dataLength + footerLength;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("numberOfRows", numberOfRows)
.add("offset", offset)
.add("indexLength", indexLength)
.add("dataLength", dataLength)
.add("footerLength", footerLength)
.toString();
}
}
| apache-2.0 |
IllusionRom-deprecated/android_platform_external_chromium_org | chrome/renderer/extensions/context_menus_custom_bindings.cc | 1022 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/renderer/extensions/context_menus_custom_bindings.h"
#include "base/bind.h"
#include "chrome/common/extensions/extension_messages.h"
#include "content/public/renderer/render_thread.h"
#include "grit/renderer_resources.h"
#include "v8/include/v8.h"
namespace {
void GetNextContextMenuId(const v8::FunctionCallbackInfo<v8::Value>& args) {
int context_menu_id = -1;
content::RenderThread::Get()->Send(
new ExtensionHostMsg_GenerateUniqueID(&context_menu_id));
args.GetReturnValue().Set(static_cast<int32_t>(context_menu_id));
}
} // namespace
namespace extensions {
ContextMenusCustomBindings::ContextMenusCustomBindings(
Dispatcher* dispatcher, ChromeV8Context* context)
: ChromeV8Extension(dispatcher, context) {
RouteFunction("GetNextContextMenuId", base::Bind(&GetNextContextMenuId));
}
} // extensions
| bsd-3-clause |
varunajmera0/webseries | assets/sdk_google/vendor/google/apiclient-services/src/Google/Service/Replicapoolupdater/RollingUpdate.php | 3939 | <?php
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_Replicapoolupdater_RollingUpdate extends Google_Model
{
public $actionType;
public $creationTimestamp;
public $description;
protected $errorType = 'Google_Service_Replicapoolupdater_RollingUpdateError';
protected $errorDataType = '';
public $id;
public $instanceGroup;
public $instanceGroupManager;
public $instanceTemplate;
public $kind;
public $oldInstanceTemplate;
protected $policyType = 'Google_Service_Replicapoolupdater_RollingUpdatePolicy';
protected $policyDataType = '';
public $progress;
public $selfLink;
public $status;
public $statusMessage;
public $user;
public function setActionType($actionType)
{
$this->actionType = $actionType;
}
public function getActionType()
{
return $this->actionType;
}
public function setCreationTimestamp($creationTimestamp)
{
$this->creationTimestamp = $creationTimestamp;
}
public function getCreationTimestamp()
{
return $this->creationTimestamp;
}
public function setDescription($description)
{
$this->description = $description;
}
public function getDescription()
{
return $this->description;
}
public function setError(Google_Service_Replicapoolupdater_RollingUpdateError $error)
{
$this->error = $error;
}
public function getError()
{
return $this->error;
}
public function setId($id)
{
$this->id = $id;
}
public function getId()
{
return $this->id;
}
public function setInstanceGroup($instanceGroup)
{
$this->instanceGroup = $instanceGroup;
}
public function getInstanceGroup()
{
return $this->instanceGroup;
}
public function setInstanceGroupManager($instanceGroupManager)
{
$this->instanceGroupManager = $instanceGroupManager;
}
public function getInstanceGroupManager()
{
return $this->instanceGroupManager;
}
public function setInstanceTemplate($instanceTemplate)
{
$this->instanceTemplate = $instanceTemplate;
}
public function getInstanceTemplate()
{
return $this->instanceTemplate;
}
public function setKind($kind)
{
$this->kind = $kind;
}
public function getKind()
{
return $this->kind;
}
public function setOldInstanceTemplate($oldInstanceTemplate)
{
$this->oldInstanceTemplate = $oldInstanceTemplate;
}
public function getOldInstanceTemplate()
{
return $this->oldInstanceTemplate;
}
public function setPolicy(Google_Service_Replicapoolupdater_RollingUpdatePolicy $policy)
{
$this->policy = $policy;
}
public function getPolicy()
{
return $this->policy;
}
public function setProgress($progress)
{
$this->progress = $progress;
}
public function getProgress()
{
return $this->progress;
}
public function setSelfLink($selfLink)
{
$this->selfLink = $selfLink;
}
public function getSelfLink()
{
return $this->selfLink;
}
public function setStatus($status)
{
$this->status = $status;
}
public function getStatus()
{
return $this->status;
}
public function setStatusMessage($statusMessage)
{
$this->statusMessage = $statusMessage;
}
public function getStatusMessage()
{
return $this->statusMessage;
}
public function setUser($user)
{
$this->user = $user;
}
public function getUser()
{
return $this->user;
}
}
| mit |
frohoff/jdk8u-jdk | test/javax/management/MBeanInfo/NotificationInfoTest.java | 11219 | /*
* Copyright (c) 2004, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 5012634
* @summary Test that JMX classes use fully-qualified class names
* in MBeanNotificationInfo
* @author Eamonn McManus
* @run clean NotificationInfoTest
* @run build NotificationInfoTest
* @run main NotificationInfoTest
*/
import java.io.*;
import java.lang.management.*;
import java.lang.reflect.*;
import java.net.*;
import java.security.CodeSource;
import java.util.*;
import java.util.jar.*;
import javax.management.*;
import javax.management.relation.*;
import javax.management.remote.*;
import javax.management.remote.rmi.*;
/*
* This test finds all classes in the same code-base as the JMX
* classes that look like Standard MBeans, and checks that if they are
* NotificationBroadcasters they declare existent notification types.
* A class looks like a Standard MBean if both Thing and ThingMBean
* classes exist. So for example javax.management.timer.Timer looks
* like a Standard MBean because javax.management.timer.TimerMBean
* exists. Timer is instanceof NotificationBroadcaster, so we expect
* that ((NotificationBroadcaster) timer).getNotificationInfo() will
* return an array of MBeanNotificationInfo where each entry has a
* getName() that names an existent Java class that is a Notification.
*
* An MBean is "suspicious" if it is a NotificationBroadcaster but its
* MBeanNotificationInfo[] is empty. This is legal, but surprising.
*
* In order to call getNotificationInfo(), we need an instance of the
* class. We attempt to make one by calling a public no-arg
* constructor. But the "construct" method below can be extended to
* construct specific MBean classes for which the no-arg constructor
* doesn't exist.
*
* The test is obviously not exhaustive, but does catch the cases that
* failed in 5012634.
*/
public class NotificationInfoTest {
// class or object names where the test failed
private static final Set<String> failed = new TreeSet<String>();
// class or object names where there were no MBeanNotificationInfo entries
private static final Set<String> suspicious = new TreeSet<String>();
public static void main(String[] args) throws Exception {
System.out.println("Checking that all known MBeans that are " +
"NotificationBroadcasters have sane " +
"MBeanInfo.getNotifications()");
System.out.println("Checking platform MBeans...");
checkPlatformMBeans();
CodeSource cs =
javax.management.MBeanServer.class.getProtectionDomain()
.getCodeSource();
URL codeBase;
if (cs == null) {
String javaHome = System.getProperty("java.home");
String[] candidates = {"/lib/rt.jar", "/classes/"};
codeBase = null;
for (String candidate : candidates) {
File file = new File(javaHome + candidate);
if (file.exists()) {
codeBase = file.toURI().toURL();
break;
}
}
if (codeBase == null) {
throw new Exception(
"Could not determine codeBase for java.home=" + javaHome);
}
} else
codeBase = cs.getLocation();
System.out.println();
System.out.println("Looking for standard MBeans...");
String[] classes = findStandardMBeans(codeBase);
System.out.println("Testing standard MBeans...");
for (int i = 0; i < classes.length; i++) {
String name = classes[i];
Class<?> c;
try {
c = Class.forName(name);
} catch (Throwable e) {
System.out.println(name + ": cannot load (not public?): " + e);
continue;
}
if (!NotificationBroadcaster.class.isAssignableFrom(c)) {
System.out.println(name + ": not a NotificationBroadcaster");
continue;
}
if (Modifier.isAbstract(c.getModifiers())) {
System.out.println(name + ": abstract class");
continue;
}
NotificationBroadcaster mbean;
Constructor<?> constr;
try {
constr = c.getConstructor();
} catch (Exception e) {
System.out.println(name + ": no public no-arg constructor: "
+ e);
continue;
}
try {
mbean = (NotificationBroadcaster) constr.newInstance();
} catch (Exception e) {
System.out.println(name + ": no-arg constructor failed: " + e);
continue;
}
check(mbean);
}
System.out.println();
System.out.println("Testing some explicit cases...");
check(new RelationService(false));
/*
We can't do this:
check(new RequiredModelMBean());
because the Model MBean spec more or less forces us to use the
names GENERIC and ATTRIBUTE_CHANGE for its standard notifs.
*/
checkRMIConnectorServer();
System.out.println();
if (!suspicious.isEmpty())
System.out.println("SUSPICIOUS CLASSES: " + suspicious);
if (failed.isEmpty())
System.out.println("TEST PASSED");
else {
System.out.println("TEST FAILED: " + failed);
System.exit(1);
}
}
private static void check(NotificationBroadcaster mbean)
throws Exception {
System.out.print(mbean.getClass().getName() + ": ");
check(mbean.getClass().getName(), mbean.getNotificationInfo());
}
private static void checkPlatformMBeans() throws Exception {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
Set<ObjectName> mbeanNames = mbs.queryNames(null, null);
for (ObjectName name : mbeanNames) {
if (!mbs.isInstanceOf(name,
NotificationBroadcaster.class.getName())) {
System.out.println(name + ": not a NotificationBroadcaster");
} else {
MBeanInfo mbi = mbs.getMBeanInfo(name);
check(name.toString(), mbi.getNotifications());
}
}
}
private static void checkRMIConnectorServer() throws Exception {
JMXServiceURL url = new JMXServiceURL("service:jmx:rmi://");
RMIConnectorServer connector = new RMIConnectorServer(url, null);
check(connector);
}
private static void check(String what, MBeanNotificationInfo[] mbnis) {
System.out.print(what + ": checking notification info: ");
if (mbnis.length == 0) {
System.out.println("NONE (suspicious)");
suspicious.add(what);
return;
}
// Each MBeanNotificationInfo.getName() should be an existent
// Java class that is Notification or a subclass of it
for (int j = 0; j < mbnis.length; j++) {
String notifClassName = mbnis[j].getName();
Class notifClass;
try {
notifClass = Class.forName(notifClassName);
} catch (Exception e) {
System.out.print("FAILED(" + notifClassName + ": " + e +
") ");
failed.add(what);
continue;
}
if (!Notification.class.isAssignableFrom(notifClass)) {
System.out.print("FAILED(" + notifClassName +
": not a Notification) ");
failed.add(what);
continue;
}
System.out.print("OK(" + notifClassName + ") ");
}
System.out.println();
}
private static String[] findStandardMBeans(URL codeBase)
throws Exception {
Set<String> names;
if (codeBase.getProtocol().equalsIgnoreCase("file")
&& codeBase.toString().endsWith("/"))
names = findStandardMBeansFromDir(codeBase);
else
names = findStandardMBeansFromJar(codeBase);
Set<String> standardMBeanNames = new TreeSet<String>();
for (String name : names) {
if (name.endsWith("MBean")) {
String prefix = name.substring(0, name.length() - 5);
if (names.contains(prefix))
standardMBeanNames.add(prefix);
}
}
return standardMBeanNames.toArray(new String[0]);
}
private static Set<String> findStandardMBeansFromJar(URL codeBase)
throws Exception {
InputStream is = codeBase.openStream();
JarInputStream jis = new JarInputStream(is);
Set<String> names = new TreeSet<String>();
JarEntry entry;
while ((entry = jis.getNextJarEntry()) != null) {
String name = entry.getName();
if (!name.endsWith(".class"))
continue;
name = name.substring(0, name.length() - 6);
name = name.replace('/', '.');
names.add(name);
}
return names;
}
private static Set<String> findStandardMBeansFromDir(URL codeBase)
throws Exception {
File dir = new File(new URI(codeBase.toString()));
Set<String> names = new TreeSet<String>();
scanDir(dir, "", names);
return names;
}
private static void scanDir(File dir, String prefix, Set<String> names)
throws Exception {
File[] files = dir.listFiles();
if (files == null)
return;
for (int i = 0; i < files.length; i++) {
File f = files[i];
String name = f.getName();
String p = (prefix.equals("")) ? name : prefix + "." + name;
if (f.isDirectory())
scanDir(f, p, names);
else if (name.endsWith(".class")) {
p = p.substring(0, p.length() - 6);
names.add(p);
}
}
}
}
| gpl-2.0 |
jasmas/homebrew-cask | Casks/uxprotect.rb | 789 | cask 'uxprotect' do
version '1.1.1'
sha256 'e6a0472e35d04cf719e8841c813ff0e155550ab47d2e05c3a426a6815f05fbc2'
url "https://digitasecurity.com/download/uxprotect/UXProtect#{version}.zip"
appcast 'https://digitasecurity.com/product/uxprotect/appcast.xml'
name 'UXProtect'
homepage 'https://digitasecurity.com/product/uxprotect/'
auto_updates true
depends_on macos: '>= :sierra'
app 'UXProtect.app'
zap trash: [
'~/Library/Application Scripts/com.digitasecurity.UXProtect',
'~/Library/Caches/KSCrash/UXProtect',
'~/Library/Caches/com.digitasecurity.UXProtect',
'~/Library/Containers/com.digitasecurity.UXProtect',
'~/Library/Preferences/com.digitasecurity.UXProtect.plist',
]
end
| bsd-2-clause |
usami-k/homebrew-cask | Casks/bowtie.rb | 308 | cask 'bowtie' do
version '1.5'
sha256 'd8406b066851c0730ca052036bedd5ded82019403de1fd58b579da34cfa4a948'
url "http://bowtieapp.com/bowtie-#{version}.zip"
appcast 'http://updates.13bold.com/appcasts/bowtie'
name 'Bowtie'
homepage 'http://bowtieapp.com/'
app "Bowtie #{version}/Bowtie.app"
end
| bsd-2-clause |
lucafavatella/intellij-community | platform/platform-api/src/com/intellij/openapi/editor/actionSystem/EditorTextInsertHandler.java | 1011 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.actionSystem;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.editor.Editor;
import com.intellij.util.Producer;
import java.awt.datatransfer.Transferable;
/**
* @author pegov
*/
public interface EditorTextInsertHandler {
void execute(final Editor editor, final DataContext dataContext, final Producer<Transferable> producer);
}
| apache-2.0 |
smartdog23/zf2-na-pratica | vendor/zendframework/zendframework/library/Zend/Memory/Value.php | 3565 | <?php
/**
* Zend Framework (http://framework.zend.com/)
*
* @link http://github.com/zendframework/zf2 for the canonical source repository
* @copyright Copyright (c) 2005-2013 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
namespace Zend\Memory;
use ArrayAccess;
use Countable;
/**
* String value object
*
* It's an OO string wrapper.
* Used to intercept string updates.
*/
class Value implements ArrayAccess, Countable
{
/**
* Value
*
* @var string
*/
private $value;
/**
* Container
*
* @var Container\Movable
*/
private $container;
/**
* Boolean flag which signals to trace value modifications
*
* @var bool
*/
private $trace;
/**
* Object constructor
*
* @param string $value
* @param \Zend\Memory\Container\Movable $container
*/
public function __construct($value, Container\Movable $container)
{
$this->container = $container;
$this->value = (string) $value;
/**
* Object is marked as just modified by memory manager
* So we don't need to trace followed object modifications and
* object is processed (and marked as traced) when another
* memory object is modified.
*
* It reduces overall number of calls necessary to modification trace
*/
$this->trace = false;
}
/**
* Countable
*
* @return int
*/
public function count()
{
return strlen($this->value);
}
/**
* ArrayAccess interface method
* returns true if string offset exists
*
* @param integer $offset
* @return bool
*/
public function offsetExists($offset)
{
return $offset >= 0 && $offset < strlen($this->value);
}
/**
* ArrayAccess interface method
* Get character at $offset position
*
* @param integer $offset
* @return string
*/
public function offsetGet($offset)
{
return $this->value[$offset];
}
/**
* ArrayAccess interface method
* Set character at $offset position
*
* @param integer $offset
* @param string $char
*/
public function offsetSet($offset, $char)
{
$this->value[$offset] = $char;
if ($this->trace) {
$this->trace = false;
$this->container->processUpdate();
}
}
/**
* ArrayAccess interface method
* Unset character at $offset position
*
* @param integer $offset
*/
public function offsetUnset($offset)
{
unset($this->value[$offset]);
if ($this->trace) {
$this->trace = false;
$this->container->processUpdate();
}
}
/**
* To string conversion
*
* @return string
*/
public function __toString()
{
return $this->value;
}
/**
* Get string value reference
*
* _Must_ be used for value access before PHP v 5.2
* or _may_ be used for performance considerations
*
* @internal
* @return string
*/
public function &getRef()
{
return $this->value;
}
/**
* Start modifications trace
*
* _Must_ be used for value access before PHP v 5.2
* or _may_ be used for performance considerations
*
* @internal
*/
public function startTrace()
{
$this->trace = true;
}
}
| bsd-3-clause |
shimingsg/corefx | src/Common/src/CoreLib/System/CharEnumerator.cs | 1927 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: Enumerates the characters on a string. skips range
** checks.
**
**
============================================================*/
using System.Collections;
using System.Collections.Generic;
namespace System
{
public sealed class CharEnumerator : IEnumerator, IEnumerator<char>, IDisposable, ICloneable
{
private string? _str;
private int _index;
private char _currentElement;
internal CharEnumerator(string str)
{
_str = str;
_index = -1;
}
public object Clone()
{
return MemberwiseClone();
}
public bool MoveNext()
{
if (_index < (_str!.Length - 1))
{
_index++;
_currentElement = _str[_index];
return true;
}
else
_index = _str.Length;
return false;
}
public void Dispose()
{
if (_str != null)
_index = _str.Length;
_str = null;
}
object? IEnumerator.Current => Current;
public char Current
{
get
{
if (_index == -1)
throw new InvalidOperationException(SR.InvalidOperation_EnumNotStarted);
if (_index >= _str!.Length)
throw new InvalidOperationException(SR.InvalidOperation_EnumEnded);
return _currentElement;
}
}
public void Reset()
{
_currentElement = (char)0;
_index = -1;
}
}
}
| mit |
themhz/Paid-Downloads-for-virtuemart | packages/payeddownloads/media/system/js/plugins/jqplot.canvasAxisTickRenderer.js | 9604 | /**
* jqPlot
* Pure JavaScript plotting plugin using jQuery
*
* Version: 1.0.4
* Revision: 1121
*
* Copyright (c) 2009-2012 Chris Leonello
* jqPlot is currently available for use in all personal or commercial projects
* under both the MIT (http://www.opensource.org/licenses/mit-license.php) and GPL
* version 2.0 (http://www.gnu.org/licenses/gpl-2.0.html) licenses. This means that you can
* choose the license that best suits your project and use it accordingly.
*
* Although not required, the author would appreciate an email letting him
* know of any substantial use of jqPlot. You can reach the author at:
* chris at jqplot dot com or see http://www.jqplot.com/info.php .
*
* If you are feeling kind and generous, consider supporting the project by
* making a donation at: http://www.jqplot.com/donate.php .
*
* sprintf functions contained in jqplot.sprintf.js by Ash Searle:
*
* version 2007.04.27
* author Ash Searle
* http://hexmen.com/blog/2007/03/printf-sprintf/
* http://hexmen.com/js/sprintf.js
* The author (Ash Searle) has placed this code in the public domain:
* "This code is unrestricted: you are free to use it however you like."
*
*/
(function($) {
/**
* Class: $.jqplot.CanvasAxisTickRenderer
* Renderer to draw axis ticks with a canvas element to support advanced
* featrues such as rotated text. This renderer uses a separate rendering engine
* to draw the text on the canvas. Two modes of rendering the text are available.
* If the browser has native font support for canvas fonts (currently Mozila 3.5
* and Safari 4), you can enable text rendering with the canvas fillText method.
* You do so by setting the "enableFontSupport" option to true.
*
* Browsers lacking native font support will have the text drawn on the canvas
* using the Hershey font metrics. Even if the "enableFontSupport" option is true
* non-supporting browsers will still render with the Hershey font.
*/
$.jqplot.CanvasAxisTickRenderer = function(options) {
// Group: Properties
// prop: mark
// tick mark on the axis. One of 'inside', 'outside', 'cross', '' or null.
this.mark = 'outside';
// prop: showMark
// wether or not to show the mark on the axis.
this.showMark = true;
// prop: showGridline
// wether or not to draw the gridline on the grid at this tick.
this.showGridline = true;
// prop: isMinorTick
// if this is a minor tick.
this.isMinorTick = false;
// prop: angle
// angle of text, measured clockwise from x axis.
this.angle = 0;
// prop: markSize
// Length of the tick marks in pixels. For 'cross' style, length
// will be stoked above and below axis, so total length will be twice this.
this.markSize = 4;
// prop: show
// wether or not to show the tick (mark and label).
this.show = true;
// prop: showLabel
// wether or not to show the label.
this.showLabel = true;
// prop: labelPosition
// 'auto', 'start', 'middle' or 'end'.
// Whether tick label should be positioned so the start, middle, or end
// of the tick mark.
this.labelPosition = 'auto';
this.label = '';
this.value = null;
this._styles = {};
// prop: formatter
// A class of a formatter for the tick text.
// The default $.jqplot.DefaultTickFormatter uses sprintf.
this.formatter = $.jqplot.DefaultTickFormatter;
// prop: formatString
// string passed to the formatter.
this.formatString = '';
// prop: prefix
// String to prepend to the tick label.
// Prefix is prepended to the formatted tick label.
this.prefix = '';
// prop: fontFamily
// css spec for the font-family css attribute.
this.fontFamily = '"Trebuchet MS", Arial, Helvetica, sans-serif';
// prop: fontSize
// CSS spec for font size.
this.fontSize = '10pt';
// prop: fontWeight
// CSS spec for fontWeight
this.fontWeight = 'normal';
// prop: fontStretch
// Multiplier to condense or expand font width.
// Applies only to browsers which don't support canvas native font rendering.
this.fontStretch = 1.0;
// prop: textColor
// css spec for the color attribute.
this.textColor = '#666666';
// prop: enableFontSupport
// true to turn on native canvas font support in Mozilla 3.5+ and Safari 4+.
// If true, tick label will be drawn with canvas tag native support for fonts.
// If false, tick label will be drawn with Hershey font metrics.
this.enableFontSupport = true;
// prop: pt2px
// Point to pixel scaling factor, used for computing height of bounding box
// around a label. The labels text renderer has a default setting of 1.4, which
// should be suitable for most fonts. Leave as null to use default. If tops of
// letters appear clipped, increase this. If bounding box seems too big, decrease.
// This is an issue only with the native font renderering capabilities of Mozilla
// 3.5 and Safari 4 since they do not provide a method to determine the font height.
this.pt2px = null;
this._elem;
this._ctx;
this._plotWidth;
this._plotHeight;
this._plotDimensions = {height:null, width:null};
$.extend(true, this, options);
var ropts = {fontSize:this.fontSize, fontWeight:this.fontWeight, fontStretch:this.fontStretch, fillStyle:this.textColor, angle:this.getAngleRad(), fontFamily:this.fontFamily};
if (this.pt2px) {
ropts.pt2px = this.pt2px;
}
if (this.enableFontSupport) {
if ($.jqplot.support_canvas_text()) {
this._textRenderer = new $.jqplot.CanvasFontRenderer(ropts);
}
else {
this._textRenderer = new $.jqplot.CanvasTextRenderer(ropts);
}
}
else {
this._textRenderer = new $.jqplot.CanvasTextRenderer(ropts);
}
};
$.jqplot.CanvasAxisTickRenderer.prototype.init = function(options) {
$.extend(true, this, options);
this._textRenderer.init({fontSize:this.fontSize, fontWeight:this.fontWeight, fontStretch:this.fontStretch, fillStyle:this.textColor, angle:this.getAngleRad(), fontFamily:this.fontFamily});
};
// return width along the x axis
// will check first to see if an element exists.
// if not, will return the computed text box width.
$.jqplot.CanvasAxisTickRenderer.prototype.getWidth = function(ctx) {
if (this._elem) {
return this._elem.outerWidth(true);
}
else {
var tr = this._textRenderer;
var l = tr.getWidth(ctx);
var h = tr.getHeight(ctx);
var w = Math.abs(Math.sin(tr.angle)*h) + Math.abs(Math.cos(tr.angle)*l);
return w;
}
};
// return height along the y axis.
$.jqplot.CanvasAxisTickRenderer.prototype.getHeight = function(ctx) {
if (this._elem) {
return this._elem.outerHeight(true);
}
else {
var tr = this._textRenderer;
var l = tr.getWidth(ctx);
var h = tr.getHeight(ctx);
var w = Math.abs(Math.cos(tr.angle)*h) + Math.abs(Math.sin(tr.angle)*l);
return w;
}
};
$.jqplot.CanvasAxisTickRenderer.prototype.getAngleRad = function() {
var a = this.angle * Math.PI/180;
return a;
};
$.jqplot.CanvasAxisTickRenderer.prototype.setTick = function(value, axisName, isMinor) {
this.value = value;
if (isMinor) {
this.isMinorTick = true;
}
return this;
};
$.jqplot.CanvasAxisTickRenderer.prototype.draw = function(ctx, plot) {
if (!this.label) {
this.label = this.prefix + this.formatter(this.formatString, this.value);
}
// Memory Leaks patch
if (this._elem) {
if ($.jqplot.use_excanvas && window.G_vmlCanvasManager.uninitElement !== undefined) {
window.G_vmlCanvasManager.uninitElement(this._elem.get(0));
}
this._elem.emptyForce();
this._elem = null;
}
// create a canvas here, but can't draw on it untill it is appended
// to dom for IE compatability.
var elem = plot.canvasManager.getCanvas();
this._textRenderer.setText(this.label, ctx);
var w = this.getWidth(ctx);
var h = this.getHeight(ctx);
// canvases seem to need to have width and heigh attributes directly set.
elem.width = w;
elem.height = h;
elem.style.width = w;
elem.style.height = h;
elem.style.textAlign = 'left';
elem.style.position = 'absolute';
elem = plot.canvasManager.initCanvas(elem);
this._elem = $(elem);
this._elem.css(this._styles);
this._elem.addClass('jqplot-'+this.axis+'-tick');
elem = null;
return this._elem;
};
$.jqplot.CanvasAxisTickRenderer.prototype.pack = function() {
this._textRenderer.draw(this._elem.get(0).getContext("2d"), this.label);
};
})(jQuery); | gpl-3.0 |
JosephCastro/selenium | py/test/selenium/webdriver/common/driver_element_finding_tests.py | 7171 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from selenium.webdriver.common.by import By
from selenium.common.exceptions import InvalidSelectorException
class DriverElementFindingTests(unittest.TestCase):
def testShouldFindElementById(self):
self._loadSimplePage()
e = self.driver.find_element_by_id("oneline")
self.assertEqual("A single line of text", e.text)
def testShouldFindElementByLinkText(self):
self._loadSimplePage()
e = self.driver.find_element_by_link_text("link with leading space")
self.assertEqual("link with leading space", e.text)
def testShouldFindElementByName(self):
self._loadPage("nestedElements")
e = self.driver.find_element_by_name("div1")
self.assertEqual("hello world hello world", e.text)
def testShouldFindElementByXPath(self):
self._loadSimplePage()
e = self.driver.find_element_by_xpath("/html/body/p[1]")
self.assertEqual("A single line of text", e.text)
def testShouldFindElementByClassName(self):
self._loadPage("nestedElements")
e = self.driver.find_element_by_class_name("one")
self.assertEqual("Span with class of one", e.text)
def testShouldFindElementByPartialLinkText(self):
self._loadSimplePage()
e = self.driver.find_element_by_partial_link_text("leading space")
self.assertEqual("link with leading space", e.text)
def testShouldFindElementByTagName(self):
self._loadSimplePage()
e = self.driver.find_element_by_tag_name("H1")
self.assertEqual("Heading", e.text)
def testShouldFindElementsById(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_id("test_id")
self.assertEqual(2, len(elements))
def testShouldFindElementsByLinkText(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_link_text("hello world")
self.assertEqual(12, len(elements))
def testShouldFindElementsByName(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_name("form1")
self.assertEqual(4, len(elements))
def testShouldFindElementsByXPath(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_xpath("//a")
self.assertEqual(12, len(elements))
def testShouldFindElementsByClassName(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_class_name("one")
self.assertEqual(3, len(elements))
def testShouldFindElementsByPartialLinkText(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_partial_link_text("world")
self.assertEqual(12, len(elements))
def testShouldFindElementsByTagName(self):
self._loadPage("nestedElements")
elements = self.driver.find_elements_by_tag_name("a")
self.assertEqual(12, len(elements))
def testShouldBeAbleToFindAnElementByCssSelector(self):
self._loadPage("xhtmlTest")
element = self.driver.find_element_by_css_selector("div.content")
self.assertEqual("div", element.tag_name.lower())
self.assertEqual("content", element.get_attribute("class"))
def testShouldBeAbleToFindMultipleElementsByCssSelector(self):
self._loadPage("frameset")
elements = self.driver.find_elements_by_css_selector("frame")
self.assertEqual(7, len(elements))
elements = self.driver.find_elements_by_css_selector("frame#sixth")
self.assertEqual(1, len(elements))
self.assertEqual("frame", elements[0].tag_name.lower())
self.assertEqual("sixth", elements[0].get_attribute("id"))
def testShouldThrowAnErrorIfUserPassesInInteger(self):
self._loadSimplePage()
try:
self.driver.find_element(By.ID, 333333)
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInTuple(self):
self._loadSimplePage()
try:
self.driver.find_element((By.ID, 333333))
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInNone(self):
self._loadSimplePage()
try:
self.driver.find_element(By.ID, None)
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInInvalidBy(self):
self._loadSimplePage()
try:
self.driver.find_element("css", "body")
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInIntegerWhenFindElements(self):
self._loadSimplePage()
try:
self.driver.find_elements(By.ID, 333333)
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInTupleWhenFindElements(self):
self._loadSimplePage()
try:
self.driver.find_elements((By.ID, 333333))
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInNoneWhenFindElements(self):
self._loadSimplePage()
try:
self.driver.find_elements(By.ID, None)
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def testShouldThrowAnErrorIfUserPassesInInvalidByWhenFindElements(self):
self._loadSimplePage()
try:
self.driver.find_elements("css", "body")
self.fail("Should have thrown WebDriver Exception")
except InvalidSelectorException:
pass #This is expected
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| apache-2.0 |
WuJiahu/phabricator | src/applications/phrequent/storage/PhrequentDAO.php | 137 | <?php
abstract class PhrequentDAO extends PhabricatorLiskDAO {
public function getApplicationName() {
return 'phrequent';
}
}
| apache-2.0 |
neveldo/cdnjs | ajax/libs/select2/3.4.8/select2_locale_ko.js | 809 | /**
* Select2 Korean translation.
*
* @author Swen Mun <longfinfunnel@gmail.com>
*/
(function ($) {
"use strict";
$.extend($.fn.select2.defaults, {
formatNoMatches: function () { return "결과 없음"; },
formatInputTooShort: function (input, min) { var n = min - input.length; return "너무 짧습니다. "+n+"글자 더 입력해주세요."; },
formatInputTooLong: function (input, max) { var n = input.length - max; return "너무 깁니다. "+n+"글자 지워주세요."; },
formatSelectionTooBig: function (limit) { return "최대 "+limit+"개까지만 선택하실 수 있습니다."; },
formatLoadMore: function (pageNumber) { return "불러오는 중…"; },
formatSearching: function () { return "검색 중…"; }
});
})(jQuery);
| mit |
portworx/docker | cli/command/image/push.go | 1626 | package image
import (
"golang.org/x/net/context"
"github.com/docker/docker/cli"
"github.com/docker/docker/cli/command"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/docker/docker/reference"
"github.com/docker/docker/registry"
"github.com/spf13/cobra"
)
// NewPushCommand creates a new `docker push` command
func NewPushCommand(dockerCli *command.DockerCli) *cobra.Command {
cmd := &cobra.Command{
Use: "push [OPTIONS] NAME[:TAG]",
Short: "Push an image or a repository to a registry",
Args: cli.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
return runPush(dockerCli, args[0])
},
}
flags := cmd.Flags()
command.AddTrustedFlags(flags, true)
return cmd
}
func runPush(dockerCli *command.DockerCli, remote string) error {
ref, err := reference.ParseNamed(remote)
if err != nil {
return err
}
// Resolve the Repository name from fqn to RepositoryInfo
repoInfo, err := registry.ParseRepositoryInfo(ref)
if err != nil {
return err
}
ctx := context.Background()
// Resolve the Auth config relevant for this server
authConfig := command.ResolveAuthConfig(ctx, dockerCli, repoInfo.Index)
requestPrivilege := command.RegistryAuthenticationPrivilegedFunc(dockerCli, repoInfo.Index, "push")
if command.IsTrusted() {
return trustedPush(ctx, dockerCli, repoInfo, ref, authConfig, requestPrivilege)
}
responseBody, err := imagePushPrivileged(ctx, dockerCli, authConfig, ref.String(), requestPrivilege)
if err != nil {
return err
}
defer responseBody.Close()
return jsonmessage.DisplayJSONMessagesToStream(responseBody, dockerCli.Out(), nil)
}
| apache-2.0 |
wcjohnson/babylon-lightscript | test/expressions/esprima/expression-binary-logical/migrated_0005/actual.js | 11 | x || y ^ z
| mit |
dsebastien/DefinitelyTyped | types/is-charging/is-charging-tests.ts | 91 | import isCharging = require('is-charging');
// $ExpectType Promise<boolean>
isCharging();
| mit |
glycerine/etcd | embed/util.go | 825 | // Copyright 2016 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package embed
import (
"path"
"github.com/coreos/etcd/wal"
)
func isMemberInitialized(cfg *Config) bool {
waldir := cfg.WalDir
if waldir == "" {
waldir = path.Join(cfg.Dir, "member", "wal")
}
return wal.Exist(waldir)
}
| apache-2.0 |
auth0/mdocs | public/components/socket.io-client/lib/transports/htmlfile.js | 3926 | /**
* socket.io
* Copyright(c) 2011 LearnBoost <dev@learnboost.com>
* MIT Licensed
*/
(function (exports, io) {
/**
* Expose constructor.
*/
exports.htmlfile = HTMLFile;
/**
* The HTMLFile transport creates a `forever iframe` based transport
* for Internet Explorer. Regular forever iframe implementations will
* continuously trigger the browsers buzy indicators. If the forever iframe
* is created inside a `htmlfile` these indicators will not be trigged.
*
* @constructor
* @extends {io.Transport.XHR}
* @api public
*/
function HTMLFile (socket) {
io.Transport.XHR.apply(this, arguments);
};
/**
* Inherits from XHR transport.
*/
io.util.inherit(HTMLFile, io.Transport.XHR);
/**
* Transport name
*
* @api public
*/
HTMLFile.prototype.name = 'htmlfile';
/**
* Creates a new ActiveX `htmlfile` with a forever loading iframe
* that can be used to listen to messages. Inside the generated
* `htmlfile` a reference will be made to the HTMLFile transport.
*
* @api private
*/
HTMLFile.prototype.get = function () {
this.doc = new ActiveXObject('htmlfile');
this.doc.open();
this.doc.write('<html></html>');
this.doc.close();
this.doc.parentWindow.s = this;
var iframeC = this.doc.createElement('div');
iframeC.className = 'socketio';
this.doc.body.appendChild(iframeC);
this.iframe = this.doc.createElement('iframe');
iframeC.appendChild(this.iframe);
var self = this
, query = io.util.query(this.socket.options.query, 't='+ +new Date);
this.iframe.src = this.prepareUrl() + query;
io.util.on(window, 'unload', function () {
self.destroy();
});
};
/**
* The Socket.IO server will write script tags inside the forever
* iframe, this function will be used as callback for the incoming
* information.
*
* @param {String} data The message
* @param {document} doc Reference to the context
* @api private
*/
HTMLFile.prototype._ = function (data, doc) {
this.onData(data);
try {
var script = doc.getElementsByTagName('script')[0];
script.parentNode.removeChild(script);
} catch (e) { }
};
/**
* Destroy the established connection, iframe and `htmlfile`.
* And calls the `CollectGarbage` function of Internet Explorer
* to release the memory.
*
* @api private
*/
HTMLFile.prototype.destroy = function () {
if (this.iframe){
try {
this.iframe.src = 'about:blank';
} catch(e){}
this.doc = null;
this.iframe.parentNode.removeChild(this.iframe);
this.iframe = null;
CollectGarbage();
}
};
/**
* Disconnects the established connection.
*
* @returns {Transport} Chaining.
* @api public
*/
HTMLFile.prototype.close = function () {
this.destroy();
return io.Transport.XHR.prototype.close.call(this);
};
/**
* Checks if the browser supports this transport. The browser
* must have an `ActiveXObject` implementation.
*
* @return {Boolean}
* @api public
*/
HTMLFile.check = function (socket) {
if (typeof window != "undefined" && 'ActiveXObject' in window){
try {
var a = new ActiveXObject('htmlfile');
return a && io.Transport.XHR.check(socket);
} catch(e){}
}
return false;
};
/**
* Check if cross domain requests are supported.
*
* @returns {Boolean}
* @api public
*/
HTMLFile.xdomainCheck = function () {
// we can probably do handling for sub-domains, we should
// test that it's cross domain but a subdomain here
return false;
};
/**
* Add the transport to your public io.transports array.
*
* @api private
*/
io.transports.push('htmlfile');
})(
'undefined' != typeof io ? io.Transport : module.exports
, 'undefined' != typeof io ? io : module.parent.exports
);
| mit |
oaklen/Shelf | node_modules/express-unless/index.js | 1685 | var URL = require('url');
module.exports = function (options) {
var parent = this;
var opts = typeof options === 'function' ? {custom: options} : options;
opts.useOriginalUrl = (typeof opts.useOriginalUrl === 'undefined') ? true : opts.useOriginalUrl;
return function (req, res, next) {
var url = URL.parse((opts.useOriginalUrl ? req.originalUrl : req.url) || req.url || '', true);
var skip = false;
if (opts.custom) {
skip = skip || opts.custom(req);
}
var paths = !opts.path || Array.isArray(opts.path) ?
opts.path : [opts.path];
if (paths) {
skip = skip || paths.some(function (p) {
return isUrlMatch(p, url.pathname) && isMethodMatch(p.methods, req.method);
});
}
var exts = (!opts.ext || Array.isArray(opts.ext)) ?
opts.ext : [opts.ext];
if (exts) {
skip = skip || exts.some(function (ext) {
return url.pathname.substr(ext.length * -1) === ext;
});
}
var methods = (!opts.method || Array.isArray(opts.method)) ?
opts.method : [opts.method];
if (methods) {
skip = skip || !!~methods.indexOf(req.method);
}
if (skip) {
return next();
}
parent(req, res, next);
};
};
function isUrlMatch(p, url) {
var ret = (typeof p === 'string' && p === url) || (p instanceof RegExp && !!p.exec(url));
if (p instanceof RegExp) {
p.lastIndex = 0;
}
if (p && p.url) {
ret = isUrlMatch(p.url, url)
}
return ret;
}
function isMethodMatch(methods, m) {
if (!methods) {
return true;
}
methods = Array.isArray(methods) ? methods : [methods];
return !!~methods.indexOf(m);
}
| mit |
shuhei/babel | packages/babylon/test/fixtures/esprima/expression-binary/migrated_0011/input.js | 12 | x << y << z
| mit |
TaurusTiger/binnavi | src/main/java/com/google/security/zynamics/binnavi/ZyGraph/Menus/Actions/CActionExpandNode.java | 1405 | /*
Copyright 2015 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.ZyGraph.Menus.Actions;
import java.awt.event.ActionEvent;
import javax.swing.AbstractAction;
import com.google.security.zynamics.binnavi.disassembly.CGroupNode;
/**
* Action class used for expanding group nodes.
*/
public final class CActionExpandNode extends AbstractAction {
/**
* Used for serialization.
*/
private static final long serialVersionUID = 4035524523936320056L;
/**
* Group node to be expanded.
*/
private final CGroupNode m_node;
/**
* Creates a new action object.
*
* @param node Group node to be expanded.
*/
public CActionExpandNode(final CGroupNode node) {
super("Expand Group");
m_node = node;
}
@Override
public void actionPerformed(final ActionEvent event) {
m_node.setCollapsed(false);
}
}
| apache-2.0 |
TaurusTiger/binnavi | src/main/java/com/google/security/zynamics/binnavi/debug/connection/packets/commands/CancelTargetSelectionCommand.java | 1229 | /*
Copyright 2015 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.debug.connection.packets.commands;
import com.google.security.zynamics.binnavi.debug.connection.DebugCommandType;
/**
* Command class for canceling target selection. This debug message should be sent when the user
* canceled the selection of a target file or a target process.
*/
public final class CancelTargetSelectionCommand extends DebugCommand {
/**
* Creates a new cancel target selection command.
*
* @param packetId Packet ID of the command.
*/
public CancelTargetSelectionCommand(final int packetId) {
super(DebugCommandType.CMD_CANCEL_TARGET_SELECTION, packetId);
}
}
| apache-2.0 |
iuliat/nova | nova/virt/disk/vfs/localfs.py | 6103 | # Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import tempfile
from oslo_log import log as logging
from oslo_utils import excutils
from nova import exception
from nova.i18n import _
from nova import utils
from nova.virt.disk.mount import api as mount_api
from nova.virt.disk.vfs import api as vfs
LOG = logging.getLogger(__name__)
class VFSLocalFS(vfs.VFS):
"""os.path.join() with safety check for injected file paths.
Join the supplied path components and make sure that the
resulting path we are injecting into is within the
mounted guest fs. Trying to be clever and specifying a
path with '..' in it will hit this safeguard.
"""
def _canonical_path(self, path):
canonpath, _err = utils.execute(
'readlink', '-nm',
os.path.join(self.imgdir, path.lstrip("/")),
run_as_root=True)
if not canonpath.startswith(os.path.realpath(self.imgdir) + '/'):
raise exception.Invalid(_('File path %s not valid') % path)
return canonpath
"""
This class implements a VFS module that is mapped to a virtual
root directory present on the host filesystem. This implementation
uses the nova.virt.disk.mount.Mount API to make virtual disk
images visible in the host filesystem. If the disk format is
raw, it will use the loopback mount impl, otherwise it will
use the qemu-nbd impl.
"""
def __init__(self, image, partition=None, imgdir=None):
"""Create a new local VFS instance
:param image: instance of nova.virt.image.model.Image
:param partition: the partition number of access
:param imgdir: the directory to mount the image at
"""
super(VFSLocalFS, self).__init__(image, partition)
self.imgdir = imgdir
self.mount = None
def setup(self, mount=True):
self.imgdir = tempfile.mkdtemp(prefix="openstack-vfs-localfs")
try:
mnt = mount_api.Mount.instance_for_format(self.image,
self.imgdir,
self.partition)
if mount:
if not mnt.do_mount():
raise exception.NovaException(mnt.error)
self.mount = mnt
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.debug("Failed to mount image: %(ex)s", {'ex': e})
self.teardown()
def teardown(self):
try:
if self.mount:
self.mount.do_teardown()
except Exception as e:
LOG.debug("Failed to unmount %(imgdir)s: %(ex)s",
{'imgdir': self.imgdir, 'ex': e})
try:
if self.imgdir:
os.rmdir(self.imgdir)
except Exception as e:
LOG.debug("Failed to remove %(imgdir)s: %(ex)s",
{'imgdir': self.imgdir, 'ex': e})
self.imgdir = None
self.mount = None
def make_path(self, path):
LOG.debug("Make directory path=%s", path)
canonpath = self._canonical_path(path)
utils.execute('mkdir', '-p', canonpath, run_as_root=True)
def append_file(self, path, content):
LOG.debug("Append file path=%s", path)
canonpath = self._canonical_path(path)
args = ["-a", canonpath]
kwargs = dict(process_input=content, run_as_root=True)
utils.execute('tee', *args, **kwargs)
def replace_file(self, path, content):
LOG.debug("Replace file path=%s", path)
canonpath = self._canonical_path(path)
args = [canonpath]
kwargs = dict(process_input=content, run_as_root=True)
utils.execute('tee', *args, **kwargs)
def read_file(self, path):
LOG.debug("Read file path=%s", path)
canonpath = self._canonical_path(path)
return utils.read_file_as_root(canonpath)
def has_file(self, path):
LOG.debug("Has file path=%s", path)
canonpath = self._canonical_path(path)
exists, _err = utils.trycmd('readlink', '-e',
canonpath,
run_as_root=True)
return exists
def set_permissions(self, path, mode):
LOG.debug("Set permissions path=%(path)s mode=%(mode)o",
{'path': path, 'mode': mode})
canonpath = self._canonical_path(path)
utils.execute('chmod', "%o" % mode, canonpath, run_as_root=True)
def set_ownership(self, path, user, group):
LOG.debug("Set permissions path=%(path)s "
"user=%(user)s group=%(group)s",
{'path': path, 'user': user, 'group': group})
canonpath = self._canonical_path(path)
owner = None
cmd = "chown"
if group is not None and user is not None:
owner = user + ":" + group
elif user is not None:
owner = user
elif group is not None:
owner = group
cmd = "chgrp"
if owner is not None:
utils.execute(cmd, owner, canonpath, run_as_root=True)
def get_image_fs(self):
if self.mount.device or self.mount.get_dev():
out, err = utils.execute('blkid', '-o',
'value', '-s',
'TYPE', self.mount.device,
run_as_root=True,
check_exit_code=[0, 2])
return out.strip()
return ""
| apache-2.0 |
BPI-SINOVOIP/BPI-Mainline-kernel | linux-5.4/tools/perf/scripts/python/stackcollapse.py | 4408 | # stackcollapse.py - format perf samples with one line per distinct call stack
# SPDX-License-Identifier: GPL-2.0
#
# This script's output has two space-separated fields. The first is a semicolon
# separated stack including the program name (from the "comm" field) and the
# function names from the call stack. The second is a count:
#
# swapper;start_kernel;rest_init;cpu_idle;default_idle;native_safe_halt 2
#
# The file is sorted according to the first field.
#
# Input may be created and processed using:
#
# perf record -a -g -F 99 sleep 60
# perf script report stackcollapse > out.stacks-folded
#
# (perf script record stackcollapse works too).
#
# Written by Paolo Bonzini <pbonzini@redhat.com>
# Based on Brendan Gregg's stackcollapse-perf.pl script.
from __future__ import print_function
import os
import sys
from collections import defaultdict
from optparse import OptionParser, make_option
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from EventClass import *
# command line parsing
option_list = [
# formatting options for the bottom entry of the stack
make_option("--include-tid", dest="include_tid",
action="store_true", default=False,
help="include thread id in stack"),
make_option("--include-pid", dest="include_pid",
action="store_true", default=False,
help="include process id in stack"),
make_option("--no-comm", dest="include_comm",
action="store_false", default=True,
help="do not separate stacks according to comm"),
make_option("--tidy-java", dest="tidy_java",
action="store_true", default=False,
help="beautify Java signatures"),
make_option("--kernel", dest="annotate_kernel",
action="store_true", default=False,
help="annotate kernel functions with _[k]")
]
parser = OptionParser(option_list=option_list)
(opts, args) = parser.parse_args()
if len(args) != 0:
parser.error("unexpected command line argument")
if opts.include_tid and not opts.include_comm:
parser.error("requesting tid but not comm is invalid")
if opts.include_pid and not opts.include_comm:
parser.error("requesting pid but not comm is invalid")
# event handlers
lines = defaultdict(lambda: 0)
def process_event(param_dict):
def tidy_function_name(sym, dso):
if sym is None:
sym = '[unknown]'
sym = sym.replace(';', ':')
if opts.tidy_java:
# the original stackcollapse-perf.pl script gives the
# example of converting this:
# Lorg/mozilla/javascript/MemberBox;.<init>(Ljava/lang/reflect/Method;)V
# to this:
# org/mozilla/javascript/MemberBox:.init
sym = sym.replace('<', '')
sym = sym.replace('>', '')
if sym[0] == 'L' and sym.find('/'):
sym = sym[1:]
try:
sym = sym[:sym.index('(')]
except ValueError:
pass
if opts.annotate_kernel and dso == '[kernel.kallsyms]':
return sym + '_[k]'
else:
return sym
stack = list()
if 'callchain' in param_dict:
for entry in param_dict['callchain']:
entry.setdefault('sym', dict())
entry['sym'].setdefault('name', None)
entry.setdefault('dso', None)
stack.append(tidy_function_name(entry['sym']['name'],
entry['dso']))
else:
param_dict.setdefault('symbol', None)
param_dict.setdefault('dso', None)
stack.append(tidy_function_name(param_dict['symbol'],
param_dict['dso']))
if opts.include_comm:
comm = param_dict["comm"].replace(' ', '_')
sep = "-"
if opts.include_pid:
comm = comm + sep + str(param_dict['sample']['pid'])
sep = "/"
if opts.include_tid:
comm = comm + sep + str(param_dict['sample']['tid'])
stack.append(comm)
stack_string = ';'.join(reversed(stack))
lines[stack_string] = lines[stack_string] + 1
def trace_end():
list = sorted(lines)
for stack in list:
print("%s %d" % (stack, lines[stack]))
| gpl-2.0 |
rookweb/Projet_Owo | assets/dist/plugins/ckeditor/plugins/placeholder/lang/ko.js | 435 | /*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("placeholder","ko",{title:"플레이스홀더 속성",toolbar:"플레이스홀더",name:"플레이스홀더 이름",invalidName:"플레이스홀더는 빈칸이거나 다음 문자열을 포함할 수 없습니다: [, ], \x3c, \x3e",pathName:"플레이스홀더"}); | mit |
tommy3/Urho3D | Source/ThirdParty/toluapp/src/bin/lua/clean.lua | 1336 | -- mark up comments and strings
STR1 = "\001"
STR2 = "\002"
STR3 = "\003"
STR4 = "\004"
REM = "\005"
ANY = "([\001-\005])"
ESC1 = "\006"
ESC2 = "\007"
MASK = { -- the substitution order is important
{ESC1, "\\'"},
{ESC2, '\\"'},
{STR1, "'"},
{STR2, '"'},
{STR3, "%[%["},
{STR4, "%]%]"},
{REM , "%-%-"},
}
function mask (s)
for i = 1,getn(MASK) do
s = gsub(s,MASK[i][2],MASK[i][1])
end
return s
end
function unmask (s)
for i = 1,getn(MASK) do
s = gsub(s,MASK[i][1],MASK[i][2])
end
return s
end
function clean (s)
-- check for compilation error
local code = "return function ()\n" .. s .. "\n end"
if not dostring(code) then
return nil
end
if flags['C'] then
return s
end
local S = "" -- saved string
s = mask(s)
-- remove blanks and comments
while 1 do
local b,e,d = strfind(s,ANY)
if b then
S = S..strsub(s,1,b-1)
s = strsub(s,b+1)
if d==STR1 or d==STR2 then
e = strfind(s,d)
S = S ..d..strsub(s,1,e)
s = strsub(s,e+1)
elseif d==STR3 then
e = strfind(s,STR4)
S = S..d..strsub(s,1,e)
s = strsub(s,e+1)
elseif d==REM then
s = gsub(s,"[^\n]*(\n?)","%1",1)
end
else
S = S..s
break
end
end
-- eliminate unecessary spaces
S = gsub(S,"[ \t]+"," ")
S = gsub(S,"[ \t]*\n[ \t]*","\n")
S = gsub(S,"\n+","\n")
S = unmask(S)
return S
end
| mit |
arrivu/jigsaw-lms | spec/messages/new_files_added.email.erb_spec.rb | 1009 | #
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
require File.expand_path(File.dirname(__FILE__) + '/messages_helper')
describe 'new_files_added.email' do
it "should render" do
attachment_model
@object = @attachment
generate_message(:new_files_added, :email, @object, :data => {:count => 5})
end
end
| agpl-3.0 |
Ghands/go | src/encoding/gob/codec_test.go | 35016 | // Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gob
import (
"bytes"
"errors"
"flag"
"math"
"math/rand"
"reflect"
"strings"
"testing"
"time"
)
var doFuzzTests = flag.Bool("gob.fuzz", false, "run the fuzz tests, which are large and very slow")
// Guarantee encoding format by comparing some encodings to hand-written values
type EncodeT struct {
x uint64
b []byte
}
var encodeT = []EncodeT{
{0x00, []byte{0x00}},
{0x0F, []byte{0x0F}},
{0xFF, []byte{0xFF, 0xFF}},
{0xFFFF, []byte{0xFE, 0xFF, 0xFF}},
{0xFFFFFF, []byte{0xFD, 0xFF, 0xFF, 0xFF}},
{0xFFFFFFFF, []byte{0xFC, 0xFF, 0xFF, 0xFF, 0xFF}},
{0xFFFFFFFFFF, []byte{0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}},
{0xFFFFFFFFFFFF, []byte{0xFA, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}},
{0xFFFFFFFFFFFFFF, []byte{0xF9, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}},
{0xFFFFFFFFFFFFFFFF, []byte{0xF8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}},
{0x1111, []byte{0xFE, 0x11, 0x11}},
{0x1111111111111111, []byte{0xF8, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}},
{0x8888888888888888, []byte{0xF8, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88}},
{1 << 63, []byte{0xF8, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}},
}
// testError is meant to be used as a deferred function to turn a panic(gobError) into a
// plain test.Error call.
func testError(t *testing.T) {
if e := recover(); e != nil {
t.Error(e.(gobError).err) // Will re-panic if not one of our errors, such as a runtime error.
}
return
}
func newDecBuffer(data []byte) *decBuffer {
return &decBuffer{
data: data,
}
}
// Test basic encode/decode routines for unsigned integers
func TestUintCodec(t *testing.T) {
defer testError(t)
b := new(encBuffer)
encState := newEncoderState(b)
for _, tt := range encodeT {
b.Reset()
encState.encodeUint(tt.x)
if !bytes.Equal(tt.b, b.Bytes()) {
t.Errorf("encodeUint: %#x encode: expected % x got % x", tt.x, tt.b, b.Bytes())
}
}
for u := uint64(0); ; u = (u + 1) * 7 {
b.Reset()
encState.encodeUint(u)
decState := newDecodeState(newDecBuffer(b.Bytes()))
v := decState.decodeUint()
if u != v {
t.Errorf("Encode/Decode: sent %#x received %#x", u, v)
}
if u&(1<<63) != 0 {
break
}
}
}
func verifyInt(i int64, t *testing.T) {
defer testError(t)
var b = new(encBuffer)
encState := newEncoderState(b)
encState.encodeInt(i)
decState := newDecodeState(newDecBuffer(b.Bytes()))
decState.buf = make([]byte, 8)
j := decState.decodeInt()
if i != j {
t.Errorf("Encode/Decode: sent %#x received %#x", uint64(i), uint64(j))
}
}
// Test basic encode/decode routines for signed integers
func TestIntCodec(t *testing.T) {
for u := uint64(0); ; u = (u + 1) * 7 {
// Do positive and negative values
i := int64(u)
verifyInt(i, t)
verifyInt(-i, t)
verifyInt(^i, t)
if u&(1<<63) != 0 {
break
}
}
verifyInt(-1<<63, t) // a tricky case
}
// The result of encoding a true boolean with field number 7
var boolResult = []byte{0x07, 0x01}
// The result of encoding a number 17 with field number 7
var signedResult = []byte{0x07, 2 * 17}
var unsignedResult = []byte{0x07, 17}
var floatResult = []byte{0x07, 0xFE, 0x31, 0x40}
// The result of encoding a number 17+19i with field number 7
var complexResult = []byte{0x07, 0xFE, 0x31, 0x40, 0xFE, 0x33, 0x40}
// The result of encoding "hello" with field number 7
var bytesResult = []byte{0x07, 0x05, 'h', 'e', 'l', 'l', 'o'}
func newDecodeState(buf *decBuffer) *decoderState {
d := new(decoderState)
d.b = buf
d.buf = make([]byte, uint64Size)
return d
}
func newEncoderState(b *encBuffer) *encoderState {
b.Reset()
state := &encoderState{enc: nil, b: b}
state.fieldnum = -1
return state
}
// Test instruction execution for encoding.
// Do not run the machine yet; instead do individual instructions crafted by hand.
func TestScalarEncInstructions(t *testing.T) {
var b = new(encBuffer)
// bool
{
var data bool = true
instr := &encInstr{encBool, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(boolResult, b.Bytes()) {
t.Errorf("bool enc instructions: expected % x got % x", boolResult, b.Bytes())
}
}
// int
{
b.Reset()
var data int = 17
instr := &encInstr{encInt, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(signedResult, b.Bytes()) {
t.Errorf("int enc instructions: expected % x got % x", signedResult, b.Bytes())
}
}
// uint
{
b.Reset()
var data uint = 17
instr := &encInstr{encUint, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(unsignedResult, b.Bytes()) {
t.Errorf("uint enc instructions: expected % x got % x", unsignedResult, b.Bytes())
}
}
// int8
{
b.Reset()
var data int8 = 17
instr := &encInstr{encInt, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(signedResult, b.Bytes()) {
t.Errorf("int8 enc instructions: expected % x got % x", signedResult, b.Bytes())
}
}
// uint8
{
b.Reset()
var data uint8 = 17
instr := &encInstr{encUint, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(unsignedResult, b.Bytes()) {
t.Errorf("uint8 enc instructions: expected % x got % x", unsignedResult, b.Bytes())
}
}
// int16
{
b.Reset()
var data int16 = 17
instr := &encInstr{encInt, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(signedResult, b.Bytes()) {
t.Errorf("int16 enc instructions: expected % x got % x", signedResult, b.Bytes())
}
}
// uint16
{
b.Reset()
var data uint16 = 17
instr := &encInstr{encUint, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(unsignedResult, b.Bytes()) {
t.Errorf("uint16 enc instructions: expected % x got % x", unsignedResult, b.Bytes())
}
}
// int32
{
b.Reset()
var data int32 = 17
instr := &encInstr{encInt, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(signedResult, b.Bytes()) {
t.Errorf("int32 enc instructions: expected % x got % x", signedResult, b.Bytes())
}
}
// uint32
{
b.Reset()
var data uint32 = 17
instr := &encInstr{encUint, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(unsignedResult, b.Bytes()) {
t.Errorf("uint32 enc instructions: expected % x got % x", unsignedResult, b.Bytes())
}
}
// int64
{
b.Reset()
var data int64 = 17
instr := &encInstr{encInt, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(signedResult, b.Bytes()) {
t.Errorf("int64 enc instructions: expected % x got % x", signedResult, b.Bytes())
}
}
// uint64
{
b.Reset()
var data uint64 = 17
instr := &encInstr{encUint, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(unsignedResult, b.Bytes()) {
t.Errorf("uint64 enc instructions: expected % x got % x", unsignedResult, b.Bytes())
}
}
// float32
{
b.Reset()
var data float32 = 17
instr := &encInstr{encFloat, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(floatResult, b.Bytes()) {
t.Errorf("float32 enc instructions: expected % x got % x", floatResult, b.Bytes())
}
}
// float64
{
b.Reset()
var data float64 = 17
instr := &encInstr{encFloat, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(floatResult, b.Bytes()) {
t.Errorf("float64 enc instructions: expected % x got % x", floatResult, b.Bytes())
}
}
// bytes == []uint8
{
b.Reset()
data := []byte("hello")
instr := &encInstr{encUint8Array, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(bytesResult, b.Bytes()) {
t.Errorf("bytes enc instructions: expected % x got % x", bytesResult, b.Bytes())
}
}
// string
{
b.Reset()
var data string = "hello"
instr := &encInstr{encString, 6, nil, 0}
state := newEncoderState(b)
instr.op(instr, state, reflect.ValueOf(data))
if !bytes.Equal(bytesResult, b.Bytes()) {
t.Errorf("string enc instructions: expected % x got % x", bytesResult, b.Bytes())
}
}
}
func execDec(typ string, instr *decInstr, state *decoderState, t *testing.T, value reflect.Value) {
defer testError(t)
v := int(state.decodeUint())
if v+state.fieldnum != 6 {
t.Fatalf("decoding field number %d, got %d", 6, v+state.fieldnum)
}
instr.op(instr, state, value.Elem())
state.fieldnum = 6
}
func newDecodeStateFromData(data []byte) *decoderState {
b := newDecBuffer(data)
state := newDecodeState(b)
state.fieldnum = -1
return state
}
// Test instruction execution for decoding.
// Do not run the machine yet; instead do individual instructions crafted by hand.
func TestScalarDecInstructions(t *testing.T) {
ovfl := errors.New("overflow")
// bool
{
var data bool
instr := &decInstr{decBool, 6, nil, ovfl}
state := newDecodeStateFromData(boolResult)
execDec("bool", instr, state, t, reflect.ValueOf(&data))
if data != true {
t.Errorf("bool a = %v not true", data)
}
}
// int
{
var data int
instr := &decInstr{decOpTable[reflect.Int], 6, nil, ovfl}
state := newDecodeStateFromData(signedResult)
execDec("int", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("int a = %v not 17", data)
}
}
// uint
{
var data uint
instr := &decInstr{decOpTable[reflect.Uint], 6, nil, ovfl}
state := newDecodeStateFromData(unsignedResult)
execDec("uint", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("uint a = %v not 17", data)
}
}
// int8
{
var data int8
instr := &decInstr{decInt8, 6, nil, ovfl}
state := newDecodeStateFromData(signedResult)
execDec("int8", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("int8 a = %v not 17", data)
}
}
// uint8
{
var data uint8
instr := &decInstr{decUint8, 6, nil, ovfl}
state := newDecodeStateFromData(unsignedResult)
execDec("uint8", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("uint8 a = %v not 17", data)
}
}
// int16
{
var data int16
instr := &decInstr{decInt16, 6, nil, ovfl}
state := newDecodeStateFromData(signedResult)
execDec("int16", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("int16 a = %v not 17", data)
}
}
// uint16
{
var data uint16
instr := &decInstr{decUint16, 6, nil, ovfl}
state := newDecodeStateFromData(unsignedResult)
execDec("uint16", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("uint16 a = %v not 17", data)
}
}
// int32
{
var data int32
instr := &decInstr{decInt32, 6, nil, ovfl}
state := newDecodeStateFromData(signedResult)
execDec("int32", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("int32 a = %v not 17", data)
}
}
// uint32
{
var data uint32
instr := &decInstr{decUint32, 6, nil, ovfl}
state := newDecodeStateFromData(unsignedResult)
execDec("uint32", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("uint32 a = %v not 17", data)
}
}
// uintptr
{
var data uintptr
instr := &decInstr{decOpTable[reflect.Uintptr], 6, nil, ovfl}
state := newDecodeStateFromData(unsignedResult)
execDec("uintptr", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("uintptr a = %v not 17", data)
}
}
// int64
{
var data int64
instr := &decInstr{decInt64, 6, nil, ovfl}
state := newDecodeStateFromData(signedResult)
execDec("int64", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("int64 a = %v not 17", data)
}
}
// uint64
{
var data uint64
instr := &decInstr{decUint64, 6, nil, ovfl}
state := newDecodeStateFromData(unsignedResult)
execDec("uint64", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("uint64 a = %v not 17", data)
}
}
// float32
{
var data float32
instr := &decInstr{decFloat32, 6, nil, ovfl}
state := newDecodeStateFromData(floatResult)
execDec("float32", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("float32 a = %v not 17", data)
}
}
// float64
{
var data float64
instr := &decInstr{decFloat64, 6, nil, ovfl}
state := newDecodeStateFromData(floatResult)
execDec("float64", instr, state, t, reflect.ValueOf(&data))
if data != 17 {
t.Errorf("float64 a = %v not 17", data)
}
}
// complex64
{
var data complex64
instr := &decInstr{decOpTable[reflect.Complex64], 6, nil, ovfl}
state := newDecodeStateFromData(complexResult)
execDec("complex", instr, state, t, reflect.ValueOf(&data))
if data != 17+19i {
t.Errorf("complex a = %v not 17+19i", data)
}
}
// complex128
{
var data complex128
instr := &decInstr{decOpTable[reflect.Complex128], 6, nil, ovfl}
state := newDecodeStateFromData(complexResult)
execDec("complex", instr, state, t, reflect.ValueOf(&data))
if data != 17+19i {
t.Errorf("complex a = %v not 17+19i", data)
}
}
// bytes == []uint8
{
var data []byte
instr := &decInstr{decUint8Slice, 6, nil, ovfl}
state := newDecodeStateFromData(bytesResult)
execDec("bytes", instr, state, t, reflect.ValueOf(&data))
if string(data) != "hello" {
t.Errorf(`bytes a = %q not "hello"`, string(data))
}
}
// string
{
var data string
instr := &decInstr{decString, 6, nil, ovfl}
state := newDecodeStateFromData(bytesResult)
execDec("bytes", instr, state, t, reflect.ValueOf(&data))
if data != "hello" {
t.Errorf(`bytes a = %q not "hello"`, data)
}
}
}
func TestEndToEnd(t *testing.T) {
type T2 struct {
T string
}
s1 := "string1"
s2 := "string2"
type T1 struct {
A, B, C int
M map[string]*float64
EmptyMap map[string]int // to check that we receive a non-nil map.
N *[3]float64
Strs *[2]string
Int64s *[]int64
RI complex64
S string
Y []byte
T *T2
}
pi := 3.14159
e := 2.71828
t1 := &T1{
A: 17,
B: 18,
C: -5,
M: map[string]*float64{"pi": &pi, "e": &e},
EmptyMap: make(map[string]int),
N: &[3]float64{1.5, 2.5, 3.5},
Strs: &[2]string{s1, s2},
Int64s: &[]int64{77, 89, 123412342134},
RI: 17 - 23i,
S: "Now is the time",
Y: []byte("hello, sailor"),
T: &T2{"this is T2"},
}
b := new(bytes.Buffer)
err := NewEncoder(b).Encode(t1)
if err != nil {
t.Error("encode:", err)
}
var _t1 T1
err = NewDecoder(b).Decode(&_t1)
if err != nil {
t.Fatal("decode:", err)
}
if !reflect.DeepEqual(t1, &_t1) {
t.Errorf("encode expected %v got %v", *t1, _t1)
}
// Be absolutely sure the received map is non-nil.
if t1.EmptyMap == nil {
t.Errorf("nil map sent")
}
if _t1.EmptyMap == nil {
t.Errorf("nil map received")
}
}
func TestOverflow(t *testing.T) {
type inputT struct {
Maxi int64
Mini int64
Maxu uint64
Maxf float64
Minf float64
Maxc complex128
Minc complex128
}
var it inputT
var err error
b := new(bytes.Buffer)
enc := NewEncoder(b)
dec := NewDecoder(b)
// int8
b.Reset()
it = inputT{
Maxi: math.MaxInt8 + 1,
}
type outi8 struct {
Maxi int8
Mini int8
}
var o1 outi8
enc.Encode(it)
err = dec.Decode(&o1)
if err == nil || err.Error() != `value for "Maxi" out of range` {
t.Error("wrong overflow error for int8:", err)
}
it = inputT{
Mini: math.MinInt8 - 1,
}
b.Reset()
enc.Encode(it)
err = dec.Decode(&o1)
if err == nil || err.Error() != `value for "Mini" out of range` {
t.Error("wrong underflow error for int8:", err)
}
// int16
b.Reset()
it = inputT{
Maxi: math.MaxInt16 + 1,
}
type outi16 struct {
Maxi int16
Mini int16
}
var o2 outi16
enc.Encode(it)
err = dec.Decode(&o2)
if err == nil || err.Error() != `value for "Maxi" out of range` {
t.Error("wrong overflow error for int16:", err)
}
it = inputT{
Mini: math.MinInt16 - 1,
}
b.Reset()
enc.Encode(it)
err = dec.Decode(&o2)
if err == nil || err.Error() != `value for "Mini" out of range` {
t.Error("wrong underflow error for int16:", err)
}
// int32
b.Reset()
it = inputT{
Maxi: math.MaxInt32 + 1,
}
type outi32 struct {
Maxi int32
Mini int32
}
var o3 outi32
enc.Encode(it)
err = dec.Decode(&o3)
if err == nil || err.Error() != `value for "Maxi" out of range` {
t.Error("wrong overflow error for int32:", err)
}
it = inputT{
Mini: math.MinInt32 - 1,
}
b.Reset()
enc.Encode(it)
err = dec.Decode(&o3)
if err == nil || err.Error() != `value for "Mini" out of range` {
t.Error("wrong underflow error for int32:", err)
}
// uint8
b.Reset()
it = inputT{
Maxu: math.MaxUint8 + 1,
}
type outu8 struct {
Maxu uint8
}
var o4 outu8
enc.Encode(it)
err = dec.Decode(&o4)
if err == nil || err.Error() != `value for "Maxu" out of range` {
t.Error("wrong overflow error for uint8:", err)
}
// uint16
b.Reset()
it = inputT{
Maxu: math.MaxUint16 + 1,
}
type outu16 struct {
Maxu uint16
}
var o5 outu16
enc.Encode(it)
err = dec.Decode(&o5)
if err == nil || err.Error() != `value for "Maxu" out of range` {
t.Error("wrong overflow error for uint16:", err)
}
// uint32
b.Reset()
it = inputT{
Maxu: math.MaxUint32 + 1,
}
type outu32 struct {
Maxu uint32
}
var o6 outu32
enc.Encode(it)
err = dec.Decode(&o6)
if err == nil || err.Error() != `value for "Maxu" out of range` {
t.Error("wrong overflow error for uint32:", err)
}
// float32
b.Reset()
it = inputT{
Maxf: math.MaxFloat32 * 2,
}
type outf32 struct {
Maxf float32
Minf float32
}
var o7 outf32
enc.Encode(it)
err = dec.Decode(&o7)
if err == nil || err.Error() != `value for "Maxf" out of range` {
t.Error("wrong overflow error for float32:", err)
}
// complex64
b.Reset()
it = inputT{
Maxc: complex(math.MaxFloat32*2, math.MaxFloat32*2),
}
type outc64 struct {
Maxc complex64
Minc complex64
}
var o8 outc64
enc.Encode(it)
err = dec.Decode(&o8)
if err == nil || err.Error() != `value for "Maxc" out of range` {
t.Error("wrong overflow error for complex64:", err)
}
}
func TestNesting(t *testing.T) {
type RT struct {
A string
Next *RT
}
rt := new(RT)
rt.A = "level1"
rt.Next = new(RT)
rt.Next.A = "level2"
b := new(bytes.Buffer)
NewEncoder(b).Encode(rt)
var drt RT
dec := NewDecoder(b)
err := dec.Decode(&drt)
if err != nil {
t.Fatal("decoder error:", err)
}
if drt.A != rt.A {
t.Errorf("nesting: encode expected %v got %v", *rt, drt)
}
if drt.Next == nil {
t.Errorf("nesting: recursion failed")
}
if drt.Next.A != rt.Next.A {
t.Errorf("nesting: encode expected %v got %v", *rt.Next, *drt.Next)
}
}
// These three structures have the same data with different indirections
type T0 struct {
A int
B int
C int
D int
}
type T1 struct {
A int
B *int
C **int
D ***int
}
type T2 struct {
A ***int
B **int
C *int
D int
}
func TestAutoIndirection(t *testing.T) {
// First transfer t1 into t0
var t1 T1
t1.A = 17
t1.B = new(int)
*t1.B = 177
t1.C = new(*int)
*t1.C = new(int)
**t1.C = 1777
t1.D = new(**int)
*t1.D = new(*int)
**t1.D = new(int)
***t1.D = 17777
b := new(bytes.Buffer)
enc := NewEncoder(b)
enc.Encode(t1)
dec := NewDecoder(b)
var t0 T0
dec.Decode(&t0)
if t0.A != 17 || t0.B != 177 || t0.C != 1777 || t0.D != 17777 {
t.Errorf("t1->t0: expected {17 177 1777 17777}; got %v", t0)
}
// Now transfer t2 into t0
var t2 T2
t2.D = 17777
t2.C = new(int)
*t2.C = 1777
t2.B = new(*int)
*t2.B = new(int)
**t2.B = 177
t2.A = new(**int)
*t2.A = new(*int)
**t2.A = new(int)
***t2.A = 17
b.Reset()
enc.Encode(t2)
t0 = T0{}
dec.Decode(&t0)
if t0.A != 17 || t0.B != 177 || t0.C != 1777 || t0.D != 17777 {
t.Errorf("t2->t0 expected {17 177 1777 17777}; got %v", t0)
}
// Now transfer t0 into t1
t0 = T0{17, 177, 1777, 17777}
b.Reset()
enc.Encode(t0)
t1 = T1{}
dec.Decode(&t1)
if t1.A != 17 || *t1.B != 177 || **t1.C != 1777 || ***t1.D != 17777 {
t.Errorf("t0->t1 expected {17 177 1777 17777}; got {%d %d %d %d}", t1.A, *t1.B, **t1.C, ***t1.D)
}
// Now transfer t0 into t2
b.Reset()
enc.Encode(t0)
t2 = T2{}
dec.Decode(&t2)
if ***t2.A != 17 || **t2.B != 177 || *t2.C != 1777 || t2.D != 17777 {
t.Errorf("t0->t2 expected {17 177 1777 17777}; got {%d %d %d %d}", ***t2.A, **t2.B, *t2.C, t2.D)
}
// Now do t2 again but without pre-allocated pointers.
b.Reset()
enc.Encode(t0)
***t2.A = 0
**t2.B = 0
*t2.C = 0
t2.D = 0
dec.Decode(&t2)
if ***t2.A != 17 || **t2.B != 177 || *t2.C != 1777 || t2.D != 17777 {
t.Errorf("t0->t2 expected {17 177 1777 17777}; got {%d %d %d %d}", ***t2.A, **t2.B, *t2.C, t2.D)
}
}
type RT0 struct {
A int
B string
C float64
}
type RT1 struct {
C float64
B string
A int
NotSet string
}
func TestReorderedFields(t *testing.T) {
var rt0 RT0
rt0.A = 17
rt0.B = "hello"
rt0.C = 3.14159
b := new(bytes.Buffer)
NewEncoder(b).Encode(rt0)
dec := NewDecoder(b)
var rt1 RT1
// Wire type is RT0, local type is RT1.
err := dec.Decode(&rt1)
if err != nil {
t.Fatal("decode error:", err)
}
if rt0.A != rt1.A || rt0.B != rt1.B || rt0.C != rt1.C {
t.Errorf("rt1->rt0: expected %v; got %v", rt0, rt1)
}
}
// Like an RT0 but with fields we'll ignore on the decode side.
type IT0 struct {
A int64
B string
Ignore_d []int
Ignore_e [3]float64
Ignore_f bool
Ignore_g string
Ignore_h []byte
Ignore_i *RT1
Ignore_m map[string]int
C float64
}
func TestIgnoredFields(t *testing.T) {
var it0 IT0
it0.A = 17
it0.B = "hello"
it0.C = 3.14159
it0.Ignore_d = []int{1, 2, 3}
it0.Ignore_e[0] = 1.0
it0.Ignore_e[1] = 2.0
it0.Ignore_e[2] = 3.0
it0.Ignore_f = true
it0.Ignore_g = "pay no attention"
it0.Ignore_h = []byte("to the curtain")
it0.Ignore_i = &RT1{3.1, "hi", 7, "hello"}
it0.Ignore_m = map[string]int{"one": 1, "two": 2}
b := new(bytes.Buffer)
NewEncoder(b).Encode(it0)
dec := NewDecoder(b)
var rt1 RT1
// Wire type is IT0, local type is RT1.
err := dec.Decode(&rt1)
if err != nil {
t.Error("error: ", err)
}
if int(it0.A) != rt1.A || it0.B != rt1.B || it0.C != rt1.C {
t.Errorf("rt0->rt1: expected %v; got %v", it0, rt1)
}
}
func TestBadRecursiveType(t *testing.T) {
type Rec ***Rec
var rec Rec
b := new(bytes.Buffer)
err := NewEncoder(b).Encode(&rec)
if err == nil {
t.Error("expected error; got none")
} else if strings.Index(err.Error(), "recursive") < 0 {
t.Error("expected recursive type error; got", err)
}
// Can't test decode easily because we can't encode one, so we can't pass one to a Decoder.
}
type Indirect struct {
A ***[3]int
S ***[]int
M ****map[string]int
}
type Direct struct {
A [3]int
S []int
M map[string]int
}
func TestIndirectSliceMapArray(t *testing.T) {
// Marshal indirect, unmarshal to direct.
i := new(Indirect)
i.A = new(**[3]int)
*i.A = new(*[3]int)
**i.A = new([3]int)
***i.A = [3]int{1, 2, 3}
i.S = new(**[]int)
*i.S = new(*[]int)
**i.S = new([]int)
***i.S = []int{4, 5, 6}
i.M = new(***map[string]int)
*i.M = new(**map[string]int)
**i.M = new(*map[string]int)
***i.M = new(map[string]int)
****i.M = map[string]int{"one": 1, "two": 2, "three": 3}
b := new(bytes.Buffer)
NewEncoder(b).Encode(i)
dec := NewDecoder(b)
var d Direct
err := dec.Decode(&d)
if err != nil {
t.Error("error: ", err)
}
if len(d.A) != 3 || d.A[0] != 1 || d.A[1] != 2 || d.A[2] != 3 {
t.Errorf("indirect to direct: d.A is %v not %v", d.A, ***i.A)
}
if len(d.S) != 3 || d.S[0] != 4 || d.S[1] != 5 || d.S[2] != 6 {
t.Errorf("indirect to direct: d.S is %v not %v", d.S, ***i.S)
}
if len(d.M) != 3 || d.M["one"] != 1 || d.M["two"] != 2 || d.M["three"] != 3 {
t.Errorf("indirect to direct: d.M is %v not %v", d.M, ***i.M)
}
// Marshal direct, unmarshal to indirect.
d.A = [3]int{11, 22, 33}
d.S = []int{44, 55, 66}
d.M = map[string]int{"four": 4, "five": 5, "six": 6}
i = new(Indirect)
b.Reset()
NewEncoder(b).Encode(d)
dec = NewDecoder(b)
err = dec.Decode(&i)
if err != nil {
t.Fatal("error: ", err)
}
if len(***i.A) != 3 || (***i.A)[0] != 11 || (***i.A)[1] != 22 || (***i.A)[2] != 33 {
t.Errorf("direct to indirect: ***i.A is %v not %v", ***i.A, d.A)
}
if len(***i.S) != 3 || (***i.S)[0] != 44 || (***i.S)[1] != 55 || (***i.S)[2] != 66 {
t.Errorf("direct to indirect: ***i.S is %v not %v", ***i.S, ***i.S)
}
if len(****i.M) != 3 || (****i.M)["four"] != 4 || (****i.M)["five"] != 5 || (****i.M)["six"] != 6 {
t.Errorf("direct to indirect: ****i.M is %v not %v", ****i.M, d.M)
}
}
// An interface with several implementations
type Squarer interface {
Square() int
}
type Int int
func (i Int) Square() int {
return int(i * i)
}
type Float float64
func (f Float) Square() int {
return int(f * f)
}
type Vector []int
func (v Vector) Square() int {
sum := 0
for _, x := range v {
sum += x * x
}
return sum
}
type Point struct {
X, Y int
}
func (p Point) Square() int {
return p.X*p.X + p.Y*p.Y
}
// A struct with interfaces in it.
type InterfaceItem struct {
I int
Sq1, Sq2, Sq3 Squarer
F float64
Sq []Squarer
}
// The same struct without interfaces
type NoInterfaceItem struct {
I int
F float64
}
func TestInterface(t *testing.T) {
iVal := Int(3)
fVal := Float(5)
// Sending a Vector will require that the receiver define a type in the middle of
// receiving the value for item2.
vVal := Vector{1, 2, 3}
b := new(bytes.Buffer)
item1 := &InterfaceItem{1, iVal, fVal, vVal, 11.5, []Squarer{iVal, fVal, nil, vVal}}
// Register the types.
Register(Int(0))
Register(Float(0))
Register(Vector{})
err := NewEncoder(b).Encode(item1)
if err != nil {
t.Error("expected no encode error; got", err)
}
item2 := InterfaceItem{}
err = NewDecoder(b).Decode(&item2)
if err != nil {
t.Fatal("decode:", err)
}
if item2.I != item1.I {
t.Error("normal int did not decode correctly")
}
if item2.Sq1 == nil || item2.Sq1.Square() != iVal.Square() {
t.Error("Int did not decode correctly")
}
if item2.Sq2 == nil || item2.Sq2.Square() != fVal.Square() {
t.Error("Float did not decode correctly")
}
if item2.Sq3 == nil || item2.Sq3.Square() != vVal.Square() {
t.Error("Vector did not decode correctly")
}
if item2.F != item1.F {
t.Error("normal float did not decode correctly")
}
// Now check that we received a slice of Squarers correctly, including a nil element
if len(item1.Sq) != len(item2.Sq) {
t.Fatalf("[]Squarer length wrong: got %d; expected %d", len(item2.Sq), len(item1.Sq))
}
for i, v1 := range item1.Sq {
v2 := item2.Sq[i]
if v1 == nil || v2 == nil {
if v1 != nil || v2 != nil {
t.Errorf("item %d inconsistent nils", i)
}
} else if v1.Square() != v2.Square() {
t.Errorf("item %d inconsistent values: %v %v", i, v1, v2)
}
}
}
// A struct with all basic types, stored in interfaces.
type BasicInterfaceItem struct {
Int, Int8, Int16, Int32, Int64 interface{}
Uint, Uint8, Uint16, Uint32, Uint64 interface{}
Float32, Float64 interface{}
Complex64, Complex128 interface{}
Bool interface{}
String interface{}
Bytes interface{}
}
func TestInterfaceBasic(t *testing.T) {
b := new(bytes.Buffer)
item1 := &BasicInterfaceItem{
int(1), int8(1), int16(1), int32(1), int64(1),
uint(1), uint8(1), uint16(1), uint32(1), uint64(1),
float32(1), 1.0,
complex64(1i), complex128(1i),
true,
"hello",
[]byte("sailor"),
}
err := NewEncoder(b).Encode(item1)
if err != nil {
t.Error("expected no encode error; got", err)
}
item2 := &BasicInterfaceItem{}
err = NewDecoder(b).Decode(&item2)
if err != nil {
t.Fatal("decode:", err)
}
if !reflect.DeepEqual(item1, item2) {
t.Errorf("encode expected %v got %v", item1, item2)
}
// Hand check a couple for correct types.
if v, ok := item2.Bool.(bool); !ok || !v {
t.Error("boolean should be true")
}
if v, ok := item2.String.(string); !ok || v != item1.String.(string) {
t.Errorf("string should be %v is %v", item1.String, v)
}
}
type String string
type PtrInterfaceItem struct {
Str1 interface{} // basic
Str2 interface{} // derived
}
// We'll send pointers; should receive values.
// Also check that we can register T but send *T.
func TestInterfacePointer(t *testing.T) {
b := new(bytes.Buffer)
str1 := "howdy"
str2 := String("kiddo")
item1 := &PtrInterfaceItem{
&str1,
&str2,
}
// Register the type.
Register(str2)
err := NewEncoder(b).Encode(item1)
if err != nil {
t.Error("expected no encode error; got", err)
}
item2 := &PtrInterfaceItem{}
err = NewDecoder(b).Decode(&item2)
if err != nil {
t.Fatal("decode:", err)
}
// Hand test for correct types and values.
if v, ok := item2.Str1.(string); !ok || v != str1 {
t.Errorf("basic string failed: %q should be %q", v, str1)
}
if v, ok := item2.Str2.(String); !ok || v != str2 {
t.Errorf("derived type String failed: %q should be %q", v, str2)
}
}
func TestIgnoreInterface(t *testing.T) {
iVal := Int(3)
fVal := Float(5)
// Sending a Point will require that the receiver define a type in the middle of
// receiving the value for item2.
pVal := Point{2, 3}
b := new(bytes.Buffer)
item1 := &InterfaceItem{1, iVal, fVal, pVal, 11.5, nil}
// Register the types.
Register(Int(0))
Register(Float(0))
Register(Point{})
err := NewEncoder(b).Encode(item1)
if err != nil {
t.Error("expected no encode error; got", err)
}
item2 := NoInterfaceItem{}
err = NewDecoder(b).Decode(&item2)
if err != nil {
t.Fatal("decode:", err)
}
if item2.I != item1.I {
t.Error("normal int did not decode correctly")
}
if item2.F != item2.F {
t.Error("normal float did not decode correctly")
}
}
type U struct {
A int
B string
c float64
D uint
}
func TestUnexportedFields(t *testing.T) {
var u0 U
u0.A = 17
u0.B = "hello"
u0.c = 3.14159
u0.D = 23
b := new(bytes.Buffer)
NewEncoder(b).Encode(u0)
dec := NewDecoder(b)
var u1 U
u1.c = 1234.
err := dec.Decode(&u1)
if err != nil {
t.Fatal("decode error:", err)
}
if u0.A != u0.A || u0.B != u1.B || u0.D != u1.D {
t.Errorf("u1->u0: expected %v; got %v", u0, u1)
}
if u1.c != 1234. {
t.Error("u1.c modified")
}
}
var singletons = []interface{}{
true,
7,
3.2,
"hello",
[3]int{11, 22, 33},
[]float32{0.5, 0.25, 0.125},
map[string]int{"one": 1, "two": 2},
}
func TestDebugSingleton(t *testing.T) {
if debugFunc == nil {
return
}
b := new(bytes.Buffer)
// Accumulate a number of values and print them out all at once.
for _, x := range singletons {
err := NewEncoder(b).Encode(x)
if err != nil {
t.Fatal("encode:", err)
}
}
debugFunc(b)
}
// A type that won't be defined in the gob until we send it in an interface value.
type OnTheFly struct {
A int
}
type DT struct {
// X OnTheFly
A int
B string
C float64
I interface{}
J interface{}
I_nil interface{}
M map[string]int
T [3]int
S []string
}
func newDT() DT {
var dt DT
dt.A = 17
dt.B = "hello"
dt.C = 3.14159
dt.I = 271828
dt.J = OnTheFly{3}
dt.I_nil = nil
dt.M = map[string]int{"one": 1, "two": 2}
dt.T = [3]int{11, 22, 33}
dt.S = []string{"hi", "joe"}
return dt
}
func TestDebugStruct(t *testing.T) {
if debugFunc == nil {
return
}
Register(OnTheFly{})
dt := newDT()
b := new(bytes.Buffer)
err := NewEncoder(b).Encode(dt)
if err != nil {
t.Fatal("encode:", err)
}
debugBuffer := bytes.NewBuffer(b.Bytes())
dt2 := &DT{}
err = NewDecoder(b).Decode(&dt2)
if err != nil {
t.Error("decode:", err)
}
debugFunc(debugBuffer)
}
func encFuzzDec(rng *rand.Rand, in interface{}) error {
buf := new(bytes.Buffer)
enc := NewEncoder(buf)
if err := enc.Encode(&in); err != nil {
return err
}
b := buf.Bytes()
for i, bi := range b {
if rng.Intn(10) < 3 {
b[i] = bi + uint8(rng.Intn(256))
}
}
dec := NewDecoder(buf)
var e interface{}
if err := dec.Decode(&e); err != nil {
return err
}
return nil
}
// This does some "fuzz testing" by attempting to decode a sequence of random bytes.
func TestFuzz(t *testing.T) {
if !*doFuzzTests {
t.Logf("disabled; run with -gob.fuzz to enable")
return
}
// all possible inputs
input := []interface{}{
new(int),
new(float32),
new(float64),
new(complex128),
&ByteStruct{255},
&ArrayStruct{},
&StringStruct{"hello"},
&GobTest1{0, &StringStruct{"hello"}},
}
testFuzz(t, time.Now().UnixNano(), 100, input...)
}
func TestFuzzRegressions(t *testing.T) {
if !*doFuzzTests {
t.Logf("disabled; run with -gob.fuzz to enable")
return
}
// An instance triggering a type name of length ~102 GB.
testFuzz(t, 1328492090837718000, 100, new(float32))
// An instance triggering a type name of 1.6 GB.
// Note: can take several minutes to run.
testFuzz(t, 1330522872628565000, 100, new(int))
}
func testFuzz(t *testing.T, seed int64, n int, input ...interface{}) {
for _, e := range input {
t.Logf("seed=%d n=%d e=%T", seed, n, e)
rng := rand.New(rand.NewSource(seed))
for i := 0; i < n; i++ {
encFuzzDec(rng, e)
}
}
}
// TestFuzzOneByte tries to decode corrupted input sequences
// and checks that no panic occurs.
func TestFuzzOneByte(t *testing.T) {
buf := new(bytes.Buffer)
Register(OnTheFly{})
dt := newDT()
if err := NewEncoder(buf).Encode(dt); err != nil {
t.Fatal(err)
}
s := buf.String()
indices := make([]int, 0, len(s))
for i := 0; i < len(s); i++ {
switch i {
case 14, 167, 231, 265: // a slice length, corruptions are not handled yet.
continue
}
indices = append(indices, i)
}
if testing.Short() {
indices = []int{1, 111, 178} // known fixed panics
}
for _, i := range indices {
for j := 0; j < 256; j += 3 {
b := []byte(s)
b[i] ^= byte(j)
var e DT
func() {
defer func() {
if p := recover(); p != nil {
t.Errorf("crash for b[%d] ^= 0x%x", i, j)
panic(p)
}
}()
err := NewDecoder(bytes.NewReader(b)).Decode(&e)
_ = err
}()
}
}
}
// Don't crash, just give error with invalid type id.
// Issue 9649.
func TestErrorInvalidTypeId(t *testing.T) {
data := []byte{0x01, 0x00, 0x01, 0x00}
d := NewDecoder(bytes.NewReader(data))
// When running d.Decode(&foo) the first time the decoder stops
// after []byte{0x01, 0x00} and reports an errBadType. Running
// d.Decode(&foo) again on exactly the same input sequence should
// give another errBadType, but instead caused a panic because
// decoderMap wasn't cleaned up properly after the first error.
for i := 0; i < 2; i++ {
var foo struct{}
err := d.Decode(&foo)
if err != errBadType {
t.Fatal("decode: expected %s, got %s", errBadType, err)
}
}
}
| bsd-3-clause |
nitro2010/moodle | lib/classes/event/competency_viewed.php | 3028 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Competency viewed event.
*
* @package core_competency
* @copyright 2016 Issam Taboubi <issam.taboubi@umontreal.ca>
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
namespace core\event;
use core\event\base;
use core_competency\competency;
defined('MOODLE_INTERNAL') || die();
/**
* Competency viewed event class.
*
*
* @package core_competency
* @since Moodle 3.1
* @copyright 2016 Issam Taboubi <issam.taboubi@umontreal.ca>
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class competency_viewed extends base {
/**
* Convenience method to instantiate the event.
*
* @param competency $competency The competency.
* @return self
*/
public static function create_from_competency(competency $competency) {
if (!$competency->get_id()) {
throw new \coding_exception('The competency ID must be set.');
}
$event = static::create(array(
'contextid' => $competency->get_context()->id,
'objectid' => $competency->get_id()
));
$event->add_record_snapshot(competency::TABLE, $competency->to_record());
return $event;
}
/**
* Returns description of what happened.
*
* @return string
*/
public function get_description() {
return "The user with id '$this->userid' viewed the competency with id '$this->objectid'";
}
/**
* Return localised event name.
*
* @return string
*/
public static function get_name() {
return get_string('eventcompetencyviewed', 'core_competency');
}
/**
* Get URL related to the action
*
* @return \moodle_url
*/
public function get_url() {
return \core_competency\url::competency($this->objectid, $this->contextid);
}
/**
* Init method.
*
* @return void
*/
protected function init() {
$this->data['crud'] = 'r';
$this->data['edulevel'] = self::LEVEL_OTHER;
$this->data['objecttable'] = competency::TABLE;
}
/**
* Get_objectid_mapping method.
*
* @return string the name of the restore mapping the objectid links to
*/
public static function get_objectid_mapping() {
return base::NOT_MAPPED;
}
}
| gpl-3.0 |
kenshin233/elasticsearch | core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java | 9371 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.search.MatchQuery;
import java.io.IOException;
/**
*
*/
public class MatchQueryParser implements QueryParser {
public static final String NAME = "match";
@Inject
public MatchQueryParser() {
}
@Override
public String[] names() {
return new String[]{
NAME, "match_phrase", "matchPhrase", "match_phrase_prefix", "matchPhrasePrefix", "matchFuzzy", "match_fuzzy", "fuzzy_match"
};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
MatchQuery.Type type = MatchQuery.Type.BOOLEAN;
if ("match_phrase".equals(parser.currentName()) || "matchPhrase".equals(parser.currentName()) ||
"text_phrase".equals(parser.currentName()) || "textPhrase".equals(parser.currentName())) {
type = MatchQuery.Type.PHRASE;
} else if ("match_phrase_prefix".equals(parser.currentName()) || "matchPhrasePrefix".equals(parser.currentName()) ||
"text_phrase_prefix".equals(parser.currentName()) || "textPhrasePrefix".equals(parser.currentName())) {
type = MatchQuery.Type.PHRASE_PREFIX;
}
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new QueryParsingException(parseContext, "[match] query malformed, no field");
}
String fieldName = parser.currentName();
Object value = null;
float boost = 1.0f;
MatchQuery matchQuery = new MatchQuery(parseContext);
String minimumShouldMatch = null;
String queryName = null;
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("query".equals(currentFieldName)) {
value = parser.objectText();
} else if ("type".equals(currentFieldName)) {
String tStr = parser.text();
if ("boolean".equals(tStr)) {
type = MatchQuery.Type.BOOLEAN;
} else if ("phrase".equals(tStr)) {
type = MatchQuery.Type.PHRASE;
} else if ("phrase_prefix".equals(tStr) || "phrasePrefix".equals(currentFieldName)) {
type = MatchQuery.Type.PHRASE_PREFIX;
} else {
throw new QueryParsingException(parseContext, "[match] query does not support type " + tStr);
}
} else if ("analyzer".equals(currentFieldName)) {
String analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
throw new QueryParsingException(parseContext, "[match] analyzer [" + parser.text() + "] not found");
}
matchQuery.setAnalyzer(analyzer);
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
matchQuery.setPhraseSlop(parser.intValue());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
matchQuery.setFuzziness(Fuzziness.parse(parser));
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
matchQuery.setFuzzyPrefixLength(parser.intValue());
} else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) {
matchQuery.setMaxExpansions(parser.intValue());
} else if ("operator".equals(currentFieldName)) {
String op = parser.text();
if ("or".equalsIgnoreCase(op)) {
matchQuery.setOccur(BooleanClause.Occur.SHOULD);
} else if ("and".equalsIgnoreCase(op)) {
matchQuery.setOccur(BooleanClause.Occur.MUST);
} else {
throw new QueryParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not ["
+ op + "]");
}
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
minimumShouldMatch = parser.textOrNull();
} else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) {
matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null));
} else if ("fuzzy_transpositions".equals(currentFieldName)) {
matchQuery.setTranspositions(parser.booleanValue());
} else if ("lenient".equals(currentFieldName)) {
matchQuery.setLenient(parser.booleanValue());
} else if ("cutoff_frequency".equals(currentFieldName)) {
matchQuery.setCommonTermsCutoff(parser.floatValue());
} else if ("zero_terms_query".equals(currentFieldName)) {
String zeroTermsDocs = parser.text();
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE);
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL);
} else {
throw new QueryParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
}
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new QueryParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]");
}
}
}
parser.nextToken();
} else {
value = parser.objectText();
// move to the next token
token = parser.nextToken();
if (token != XContentParser.Token.END_OBJECT) {
throw new QueryParsingException(parseContext,
"[match] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
}
}
if (value == null) {
throw new QueryParsingException(parseContext, "No text specified for text query");
}
Query query = matchQuery.parse(type, fieldName, value);
if (query == null) {
return null;
}
if (query instanceof BooleanQuery) {
Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
}
}
| apache-2.0 |
vhamine/azure-sdk-for-net | src/HDInsight/Microsoft.Hadoop.Avro/AvroSerializer.Static.cs | 11037 | // Copyright (c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
namespace Microsoft.Hadoop.Avro
{
using System;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.Serialization;
using Microsoft.Hadoop.Avro.Schema;
using Microsoft.Hadoop.Avro.Serializers;
/// <summary>
/// A factory class to create <see cref="Microsoft.Hadoop.Avro.IAvroSerializer{T}"/>.
/// </summary>
public static class AvroSerializer
{
private static readonly Cache<Tuple<string, Type, AvroSerializerSettings>, GeneratedSerializer> TypedSerializers
= new Cache<Tuple<string, Type, AvroSerializerSettings>, GeneratedSerializer>();
private static readonly Cache<Tuple<string, string>, GeneratedSerializer> UntypedSerializers
= new Cache<Tuple<string, string>, GeneratedSerializer>();
/// <summary>
/// Creates a serializer that allows serializing types attributed with <see cref="T:System.Runtime.Serialization.DataContractAttribute" />.
/// </summary>
/// <typeparam name="T">The type of objects to serialize.</typeparam>
/// <returns>
/// A serializer.
/// </returns>
/// <remarks>
/// This function can cause in-memory runtime code generation if the type <typeparamref name="T"/> has not been used before.
/// Otherwise, a cached version of the serializer is given to the user.
/// </remarks>
public static IAvroSerializer<T> Create<T>()
{
return Create<T>(new AvroSerializerSettings());
}
/// <summary>
/// Creates a serializer that allows serializing types attributed with <see cref="T:System.Runtime.Serialization.DataContractAttribute" />.
/// </summary>
/// <typeparam name="T">The type of objects to serialize.</typeparam>
/// <param name="settings">The serialization settings.</param>
/// <returns> A serializer. </returns>
/// <exception cref="System.ArgumentNullException">Thrown if <paramref name="settings"/> is null.</exception>
/// <remarks>
/// This function can cause in-memory runtime code generation if the type <typeparamref name="T"/> has not used seen before.
/// Otherwise, a cached version of the serializer is given to the user.
/// </remarks>
public static IAvroSerializer<T> Create<T>(AvroSerializerSettings settings)
{
if (settings == null)
{
throw new ArgumentNullException("settings");
}
return CreateForCore<T>(string.Empty, settings);
}
/// <summary>
/// Creates a deserializer for the data that was written with the specified <paramref name="writerSchema">schema</paramref>.
/// </summary>
/// <typeparam name="T">The type of objects to deserialize.</typeparam>
/// <param name="writerSchema">The writer schema.</param>
/// <param name="settings">The settings.</param>
/// <returns>
/// A serializer.
/// </returns>
/// <exception cref="System.ArgumentNullException">Thrown when <paramref name="settings" /> is null.</exception>
/// <remarks>
/// This function can cause in-memory runtime code generation if the type <typeparamref name="T"/> has not been used before.
/// Otherwise, a cached version of the serializer is given to the user.
/// </remarks>
public static IAvroSerializer<T> CreateDeserializerOnly<T>(string writerSchema, AvroSerializerSettings settings)
{
if (string.IsNullOrEmpty(writerSchema))
{
throw new ArgumentNullException("writerSchema");
}
if (settings == null)
{
throw new ArgumentNullException("settings");
}
return CreateForCore<T>(writerSchema, settings);
}
/// <summary>
/// Creates a generic serializer for the specified schema.
/// A resulted serializer can serialize data in AvroRecord hierarchy. For more details, please see <b>Remarks</b> section of
/// <see cref="Microsoft.Hadoop.Avro.IAvroSerializer{T}"/> interface.
/// </summary>
/// <param name="schema">The schema.</param>
/// <returns>A serializer.</returns>
/// <exception cref="System.ArgumentNullException">Thrown when <paramref name="schema"/> is null.</exception>
public static IAvroSerializer<object> CreateGeneric(string schema)
{
if (string.IsNullOrEmpty(schema))
{
throw new ArgumentNullException("schema");
}
return CreateForCore<object>(string.Empty, schema);
}
/// <summary>
/// Creates a generic deserializer for the data that was written with the specified <paramref name="writerSchema">schema</paramref>.
/// Should be used, when reading the data written using an older version of the schema.
/// </summary>
/// <param name="writerSchema">The writer schema.</param>
/// <param name="readerSchema">The reader schema.</param>
/// <returns> A deserializer.</returns>
/// <exception cref="System.ArgumentNullException"> Thrown if <paramref name="writerSchema"/> or <paramref name="readerSchema"/> is null.</exception>
public static IAvroSerializer<object> CreateGenericDeserializerOnly(string writerSchema, string readerSchema)
{
if (string.IsNullOrEmpty(writerSchema))
{
throw new ArgumentNullException("writerSchema");
}
if (string.IsNullOrEmpty(readerSchema))
{
throw new ArgumentNullException("readerSchema");
}
return CreateForCore<object>(writerSchema, readerSchema);
}
private static AvroSerializer<T> CreateForCore<T>(string writerSchema, AvroSerializerSettings settings)
{
if (settings == null)
{
throw new ArgumentNullException("settings");
}
var key = Tuple.Create(writerSchema, typeof(T), settings);
var serializer = TypedSerializers.Get(key);
if (serializer != null && settings.UseCache)
{
return new AvroSerializer<T>(serializer);
}
var reader = new ReflectionSchemaBuilder(settings).BuildSchema(typeof(T));
var generator = new SerializerGenerator();
var builderGenerator = new SerializerAssigningVisitor(settings);
if (string.IsNullOrEmpty(writerSchema))
{
builderGenerator.Visit(reader);
serializer = new GeneratedSerializer
{
WriterSchema = reader,
ReaderSchema = reader,
Serialize = settings.GenerateSerializer
? generator.GenerateSerializer<T>(reader)
: null,
Deserialize = settings.GenerateDeserializer
? generator.GenerateDeserializer<T>(reader)
: null
};
}
else
{
var writer = new JsonSchemaBuilder().BuildSchema(writerSchema);
var matchedSchema = new EvolutionSchemaBuilder().Build(writer, reader);
if (matchedSchema == null)
{
throw new SerializationException(string.Format(CultureInfo.InvariantCulture, "Writer schema does not match reader schema."));
}
builderGenerator.Visit(matchedSchema);
serializer = new GeneratedSerializer
{
WriterSchema = writer,
ReaderSchema = reader,
Deserialize = generator.GenerateDeserializer<T>(matchedSchema)
};
}
if (settings.UseCache)
{
TypedSerializers.Add(key, serializer);
}
return new AvroSerializer<T>(serializer);
}
private static AvroSerializer<T> CreateForCore<T>(string writerSchema, string readerSchema)
{
var key = Tuple.Create(writerSchema, readerSchema);
var serializer = UntypedSerializers.Get(key);
if (serializer != null)
{
return new AvroSerializer<T>(serializer);
}
var reader = new JsonSchemaBuilder().BuildSchema(readerSchema);
var builderGenerator = new SerializerAssigningVisitor(new AvroSerializerSettings());
if (string.IsNullOrEmpty(writerSchema))
{
builderGenerator.Visit(reader);
Action<IEncoder, T> s = (e, obj) => reader.Serializer.Serialize(e, obj);
Func<IDecoder, T> d = decode =>
{
return (T)reader.Serializer.Deserialize(decode);
};
serializer = new GeneratedSerializer
{
WriterSchema = reader,
ReaderSchema = reader,
Serialize = s,
Deserialize = d
};
}
else
{
var writer = new JsonSchemaBuilder().BuildSchema(writerSchema);
var matchedSchema = new EvolutionSchemaBuilder().Build(writer, reader);
if (matchedSchema == null)
{
throw new SerializationException(string.Format(CultureInfo.InvariantCulture, "Writer schema does not match reader schema."));
}
builderGenerator.Visit(matchedSchema);
Func<IDecoder, T> d = decode => (T)matchedSchema.Serializer.Deserialize(decode);
serializer = new GeneratedSerializer
{
WriterSchema = writer,
ReaderSchema = reader,
Deserialize = d
};
}
UntypedSerializers.Add(key, serializer);
return new AvroSerializer<T>(serializer);
}
internal static int CacheEntriesCount
{
get { return TypedSerializers.Count; }
}
}
}
| apache-2.0 |
lukeadams/homebrew-cask | spec/support/Casks/missing-license.rb | 93 | test_cask 'missing-license' do
version '1.2.3'
url 'http://localhost/something.dmg'
end
| bsd-2-clause |
sashberd/cdnjs | ajax/libs/zingchart/0.0.2/modules/zingchart-html5-varea-min.js | 1085 | /*
All of the code within the ZingChart software is developed and copyrighted by PINT, Inc., and may not be copied,
replicated, or used in any other software or application without prior permission from PINT. All usage must coincide with the
ZingChart End User License Agreement which can be requested by email at support@zingchart.com.
Build 0.141230
*/
eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('0.h.m("d");0.o=0.n.e({$i:4(a){f b=3;b.b(a);b.q="d";b.1.p="r";b.l=2 0.6(b);b.1[0.c[j]]=9;b.1[0.c[k]]=9;b.1["t-A"]=g},C:4(b,a){D(b){8"x":5 2 0.s(3);7;8"y":5 2 0.u(3);7}}});0.6=0.v.e({B:4(b){f a=2 0.w(3);a.z=g;5 a}});',40,40,'ZC|AK|new|this|function|return|A43|break|case|false|||_|varea|BK|var|true|T5||23|56|AV|push|LI|A74|layout|AA|yx|SG|enable|SH|K6|O1|||PF|scroll|AA0|KV|switch'.split('|'),0,{}))
| mit |
aakb/replicator | sites/all/modules/contrib/jquery_ui/jquery.ui/tests/unit/resizable/resizable_tickets.js | 90 | /*
* resizable_tickets.js
*/
(function($) {
module("resizable: tickets");
})(jQuery);
| gpl-2.0 |
jiangzhuo/kbengine | kbe/src/lib/python/Lib/ipaddress.py | 72856 | # Copyright 2007 Google Inc.
# Licensed to PSF under a Contributor Agreement.
"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.
"""
__version__ = '1.0'
import functools
IPV4LENGTH = 32
IPV6LENGTH = 128
class AddressValueError(ValueError):
"""A Value Error related to the address."""
class NetmaskValueError(ValueError):
"""A Value Error related to the netmask."""
def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Address(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
address)
def ip_network(address, strict=True):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP network. Either IPv4 or
IPv6 networks may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Network or IPv6Network object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address. Or if the network has host bits set.
"""
try:
return IPv4Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
address)
def ip_interface(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
"""
try:
return IPv4Interface(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Interface(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
address)
def v4_int_to_packed(address):
"""Represent an address as 4 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv4 IP address.
Returns:
The integer address packed as 4 bytes in network (big-endian) order.
Raises:
ValueError: If the integer is negative or too large to be an
IPv4 IP address.
"""
try:
return address.to_bytes(4, 'big')
except:
raise ValueError("Address negative or too large for IPv4")
def v6_int_to_packed(address):
"""Represent an address as 16 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv6 IP address.
Returns:
The integer address packed as 16 bytes in network (big-endian) order.
"""
try:
return address.to_bytes(16, 'big')
except:
raise ValueError("Address negative or too large for IPv6")
def _split_optional_netmask(address):
"""Helper to split the netmask and raise AddressValueError if needed"""
addr = str(address).split('/')
if len(addr) > 2:
raise AddressValueError("Only one '/' permitted in %r" % address)
return addr
def _find_address_range(addresses):
"""Find a sequence of IPv#Address.
Args:
addresses: a list of IPv#Address objects.
Returns:
A tuple containing the first and last IP addresses in the sequence.
"""
first = last = addresses[0]
for ip in addresses[1:]:
if ip._ip == last._ip + 1:
last = ip
else:
break
return (first, last)
def _count_righthand_zero_bits(number, bits):
"""Count the number of zero bits on the right hand side.
Args:
number: an integer.
bits: maximum number of bits to count.
Returns:
The number of zero bits on the right hand side of the number.
"""
if number == 0:
return bits
for i in range(bits):
if (number >> i) & 1:
return i
# All bits of interest were zero, even if there are more in the number
return bits
def summarize_address_range(first, last):
"""Summarize a network range given the first and last IP addresses.
Example:
>>> list(summarize_address_range(IPv4Address('192.0.2.0'),
... IPv4Address('192.0.2.130')))
... #doctest: +NORMALIZE_WHITESPACE
[IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
IPv4Network('192.0.2.130/32')]
Args:
first: the first IPv4Address or IPv6Address in the range.
last: the last IPv4Address or IPv6Address in the range.
Returns:
An iterator of the summarized IPv(4|6) network objects.
Raise:
TypeError:
If the first and last objects are not IP addresses.
If the first and last objects are not the same version.
ValueError:
If the last object is not greater than the first.
If the version of the first address is not 4 or 6.
"""
if (not (isinstance(first, _BaseAddress) and
isinstance(last, _BaseAddress))):
raise TypeError('first and last must be IP addresses, not networks')
if first.version != last.version:
raise TypeError("%s and %s are not of the same version" % (
first, last))
if first > last:
raise ValueError('last IP address must be greater than first')
if first.version == 4:
ip = IPv4Network
elif first.version == 6:
ip = IPv6Network
else:
raise ValueError('unknown IP version')
ip_bits = first._max_prefixlen
first_int = first._ip
last_int = last._ip
while first_int <= last_int:
nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
(last_int - first_int + 1).bit_length() - 1)
net = ip('%s/%d' % (first, ip_bits - nbits))
yield net
first_int += 1 << nbits
if first_int - 1 == ip._ALL_ONES:
break
first = first.__class__(first_int)
def _collapse_addresses_recursive(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_recursive([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
"""
while True:
last_addr = None
ret_array = []
optimized = False
for cur_addr in addresses:
if not ret_array:
last_addr = cur_addr
ret_array.append(cur_addr)
elif (cur_addr.network_address >= last_addr.network_address and
cur_addr.broadcast_address <= last_addr.broadcast_address):
optimized = True
elif cur_addr == list(last_addr.supernet().subnets())[1]:
ret_array[-1] = last_addr = last_addr.supernet()
optimized = True
else:
last_addr = cur_addr
ret_array.append(cur_addr)
addresses = ret_array
if not optimized:
return addresses
def collapse_addresses(addresses):
"""Collapse a list of IP objects.
Example:
collapse_addresses([IPv4Network('192.0.2.0/25'),
IPv4Network('192.0.2.128/25')]) ->
[IPv4Network('192.0.2.0/24')]
Args:
addresses: An iterator of IPv4Network or IPv6Network objects.
Returns:
An iterator of the collapsed IPv(4|6)Network objects.
Raises:
TypeError: If passed a list of mixed version objects.
"""
i = 0
addrs = []
ips = []
nets = []
# split IP addresses and networks
for ip in addresses:
if isinstance(ip, _BaseAddress):
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
ips.append(ip)
elif ip._prefixlen == ip._max_prefixlen:
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
try:
ips.append(ip.ip)
except AttributeError:
ips.append(ip.network_address)
else:
if nets and nets[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, nets[-1]))
nets.append(ip)
# sort and dedup
ips = sorted(set(ips))
nets = sorted(set(nets))
while i < len(ips):
(first, last) = _find_address_range(ips[i:])
i = ips.index(last) + 1
addrs.extend(summarize_address_range(first, last))
return iter(_collapse_addresses_recursive(sorted(
addrs + nets, key=_BaseNetwork._get_networks_key)))
def get_mixed_type_key(obj):
"""Return a key suitable for sorting between networks and addresses.
Address and Network objects are not sortable by default; they're
fundamentally different so the expression
IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
doesn't make any sense. There are some times however, where you may wish
to have ipaddress sort these for you anyway. If you need to do this, you
can use this function as the key= argument to sorted().
Args:
obj: either a Network or Address object.
Returns:
appropriate key.
"""
if isinstance(obj, _BaseNetwork):
return obj._get_networks_key()
elif isinstance(obj, _BaseAddress):
return obj._get_address_key()
return NotImplemented
class _TotalOrderingMixin:
# Helper that derives the other comparison operations from
# __lt__ and __eq__
# We avoid functools.total_ordering because it doesn't handle
# NotImplemented correctly yet (http://bugs.python.org/issue10042)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
def __lt__(self, other):
raise NotImplementedError
def __le__(self, other):
less = self.__lt__(other)
if less is NotImplemented or not less:
return self.__eq__(other)
return less
def __gt__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not (less or equal)
def __ge__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
return not less
class _IPAddressBase(_TotalOrderingMixin):
"""The mother class."""
@property
def exploded(self):
"""Return the longhand version of the IP address as a string."""
return self._explode_shorthand_ip_string()
@property
def compressed(self):
"""Return the shorthand version of the IP address as a string."""
return str(self)
@property
def version(self):
msg = '%200s has no version specified' % (type(self),)
raise NotImplementedError(msg)
def _check_int_address(self, address):
if address < 0:
msg = "%d (< 0) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._version))
if address > self._ALL_ONES:
msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._max_prefixlen,
self._version))
def _check_packed_address(self, address, expected_len):
address_len = len(address)
if address_len != expected_len:
msg = "%r (len %d != %d) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, address_len,
expected_len, self._version))
def _ip_int_from_prefix(self, prefixlen):
"""Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
"""
return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen)
def _prefix_from_ip_int(self, ip_int):
"""Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in axpanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones
"""
trailing_zeroes = _count_righthand_zero_bits(ip_int,
self._max_prefixlen)
prefixlen = self._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefixlen) - 1
if leading_ones != all_ones:
byteslen = self._max_prefixlen // 8
details = ip_int.to_bytes(byteslen, 'big')
msg = 'Netmask pattern %r mixes zeroes & ones'
raise ValueError(msg % details)
return prefixlen
def _report_invalid_netmask(self, netmask_str):
msg = '%r is not a valid netmask' % netmask_str
raise NetmaskValueError(msg) from None
def _prefix_from_prefix_string(self, prefixlen_str):
"""Return prefix length from a numeric string
Args:
prefixlen_str: The string to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask
"""
# int allows a leading +/- as well as surrounding whitespace,
# so we ensure that isn't the case
if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
self._report_invalid_netmask(prefixlen_str)
try:
prefixlen = int(prefixlen_str)
except ValueError:
self._report_invalid_netmask(prefixlen_str)
if not (0 <= prefixlen <= self._max_prefixlen):
self._report_invalid_netmask(prefixlen_str)
return prefixlen
def _prefix_from_ip_string(self, ip_str):
"""Turn a netmask/hostmask string into a prefix length
Args:
ip_str: The netmask/hostmask to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask/hostmask
"""
# Parse the netmask/hostmask like an IP address.
try:
ip_int = self._ip_int_from_string(ip_str)
except AddressValueError:
self._report_invalid_netmask(ip_str)
# Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
# Note that the two ambiguous cases (all-ones and all-zeroes) are
# treated as netmasks.
try:
return self._prefix_from_ip_int(ip_int)
except ValueError:
pass
# Invert the bits, and try matching a /0+1+/ hostmask instead.
ip_int ^= self._ALL_ONES
try:
return self._prefix_from_ip_int(ip_int)
except ValueError:
self._report_invalid_netmask(ip_str)
class _BaseAddress(_IPAddressBase):
"""A generic IP object.
This IP class contains the version independent methods which are
used by single IP addresses.
"""
def __init__(self, address):
if (not isinstance(address, bytes)
and '/' in str(address)):
raise AddressValueError("Unexpected '/' in %r" % address)
def __int__(self):
return self._ip
def __eq__(self, other):
try:
return (self._ip == other._ip
and self._version == other._version)
except AttributeError:
return NotImplemented
def __lt__(self, other):
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if not isinstance(other, _BaseAddress):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._ip != other._ip:
return self._ip < other._ip
return False
# Shorthand for Integer addition and subtraction. This is not
# meant to ever support addition/subtraction of addresses.
def __add__(self, other):
if not isinstance(other, int):
return NotImplemented
return self.__class__(int(self) + other)
def __sub__(self, other):
if not isinstance(other, int):
return NotImplemented
return self.__class__(int(self) - other)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, str(self))
def __str__(self):
return str(self._string_from_ip_int(self._ip))
def __hash__(self):
return hash(hex(int(self._ip)))
def _get_address_key(self):
return (self._version, self)
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
This IP class contains the version independent methods which are
used by networks.
"""
def __init__(self, address):
self._cache = {}
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, str(self))
def __str__(self):
return '%s/%d' % (self.network_address, self.prefixlen)
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the network
or broadcast addresses.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in range(network + 1, broadcast):
yield self._address_class(x)
def __iter__(self):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in range(network, broadcast + 1):
yield self._address_class(x)
def __getitem__(self, n):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
if n >= 0:
if network + n > broadcast:
raise IndexError
return self._address_class(network + n)
else:
n += 1
if broadcast + n < network:
raise IndexError
return self._address_class(broadcast + n)
def __lt__(self, other):
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if not isinstance(other, _BaseNetwork):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self.network_address != other.network_address:
return self.network_address < other.network_address
if self.netmask != other.netmask:
return self.netmask < other.netmask
return False
def __eq__(self, other):
try:
return (self._version == other._version and
self.network_address == other.network_address and
int(self.netmask) == int(other.netmask))
except AttributeError:
return NotImplemented
def __hash__(self):
return hash(int(self.network_address) ^ int(self.netmask))
def __contains__(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if isinstance(other, _BaseNetwork):
return False
# dealing with another address
else:
# address
return (int(self.network_address) <= int(other._ip) <=
int(self.broadcast_address))
def overlaps(self, other):
"""Tell if self is partly contained in other."""
return self.network_address in other or (
self.broadcast_address in other or (
other.network_address in self or (
other.broadcast_address in self)))
@property
def broadcast_address(self):
x = self._cache.get('broadcast_address')
if x is None:
x = self._address_class(int(self.network_address) |
int(self.hostmask))
self._cache['broadcast_address'] = x
return x
@property
def hostmask(self):
x = self._cache.get('hostmask')
if x is None:
x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
self._cache['hostmask'] = x
return x
@property
def with_prefixlen(self):
return '%s/%d' % (self.network_address, self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self.network_address, self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self.network_address, self.hostmask)
@property
def num_addresses(self):
"""Number of hosts in the current subnet."""
return int(self.broadcast_address) - int(self.network_address) + 1
@property
def _address_class(self):
# Returning bare address objects (rather than interfaces) allows for
# more consistent behaviour across the network address, broadcast
# address and individual host addresses.
msg = '%200s has no associated address class' % (type(self),)
raise NotImplementedError(msg)
@property
def prefixlen(self):
return self._prefixlen
def address_exclude(self, other):
"""Remove an address from a larger block.
For example:
addr1 = ip_network('192.0.2.0/28')
addr2 = ip_network('192.0.2.1/32')
addr1.address_exclude(addr2) =
[IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
or IPv6:
addr1 = ip_network('2001:db8::1/32')
addr2 = ip_network('2001:db8::1/128')
addr1.address_exclude(addr2) =
[ip_network('2001:db8::1/128'),
ip_network('2001:db8::2/127'),
ip_network('2001:db8::4/126'),
ip_network('2001:db8::8/125'),
...
ip_network('2001:db8:8000::/33')]
Args:
other: An IPv4Network or IPv6Network object of the same type.
Returns:
An iterator of the IPv(4|6)Network objects which is self
minus other.
Raises:
TypeError: If self and other are of differing address
versions, or if other is not a network object.
ValueError: If other is not completely contained by self.
"""
if not self._version == other._version:
raise TypeError("%s and %s are not of the same version" % (
self, other))
if not isinstance(other, _BaseNetwork):
raise TypeError("%s is not a network object" % other)
if not (other.network_address >= self.network_address and
other.broadcast_address <= self.broadcast_address):
raise ValueError('%s not contained in %s' % (other, self))
if other == self:
raise StopIteration
# Make sure we're comparing the network of other.
other = other.__class__('%s/%s' % (other.network_address,
other.prefixlen))
s1, s2 = self.subnets()
while s1 != other and s2 != other:
if (other.network_address >= s1.network_address and
other.broadcast_address <= s1.broadcast_address):
yield s2
s1, s2 = s1.subnets()
elif (other.network_address >= s2.network_address and
other.broadcast_address <= s2.broadcast_address):
yield s1
s1, s2 = s2.subnets()
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
if s1 == other:
yield s2
elif s2 == other:
yield s1
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
def compare_networks(self, other):
"""Compare two IP objects.
This is only concerned about the comparison of the integer
representation of the network addresses. This means that the
host bits aren't considered at all in this method. If you want
to compare host bits, you can easily enough do a
'HostA._ip < HostB._ip'
Args:
other: An IP object.
Returns:
If the IP versions of self and other are the same, returns:
-1 if self < other:
eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
IPv6Network('2001:db8::1000/124') <
IPv6Network('2001:db8::2000/124')
0 if self == other
eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
IPv6Network('2001:db8::1000/124') ==
IPv6Network('2001:db8::1000/124')
1 if self > other
eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
IPv6Network('2001:db8::2000/124') >
IPv6Network('2001:db8::1000/124')
Raises:
TypeError if the IP versions are different.
"""
# does this need to raise a ValueError?
if self._version != other._version:
raise TypeError('%s and %s are not of the same type' % (
self, other))
# self._version == other._version below here:
if self.network_address < other.network_address:
return -1
if self.network_address > other.network_address:
return 1
# self.network_address == other.network_address below here:
if self.netmask < other.netmask:
return -1
if self.netmask > other.netmask:
return 1
return 0
def _get_networks_key(self):
"""Network-only key function.
Returns an object that identifies this address' network and
netmask. This function is a suitable "key" argument for sorted()
and list.sort().
"""
return (self._version, self.network_address, self.netmask)
def subnets(self, prefixlen_diff=1, new_prefix=None):
"""The subnets which join to make the current subnet.
In the case that self contains only one IP
(self._prefixlen == 32 for IPv4 or self._prefixlen == 128
for IPv6), yield an iterator with just ourself.
Args:
prefixlen_diff: An integer, the amount the prefix length
should be increased by. This should not be set if
new_prefix is also set.
new_prefix: The desired new prefix length. This must be a
larger number (smaller prefix) than the existing prefix.
This should not be set if prefixlen_diff is also set.
Returns:
An iterator of IPv(4|6) objects.
Raises:
ValueError: The prefixlen_diff is too small or too large.
OR
prefixlen_diff and new_prefix are both set or new_prefix
is a smaller number than the current prefix (smaller
number means a larger network)
"""
if self._prefixlen == self._max_prefixlen:
yield self
return
if new_prefix is not None:
if new_prefix < self._prefixlen:
raise ValueError('new prefix must be longer')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = new_prefix - self._prefixlen
if prefixlen_diff < 0:
raise ValueError('prefix length diff must be > 0')
new_prefixlen = self._prefixlen + prefixlen_diff
if new_prefixlen > self._max_prefixlen:
raise ValueError(
'prefix length diff %d is invalid for netblock %s' % (
new_prefixlen, self))
first = self.__class__('%s/%s' %
(self.network_address,
self._prefixlen + prefixlen_diff))
yield first
current = first
while True:
broadcast = current.broadcast_address
if broadcast == self.broadcast_address:
return
new_addr = self._address_class(int(broadcast) + 1)
current = self.__class__('%s/%s' % (new_addr,
new_prefixlen))
yield current
def supernet(self, prefixlen_diff=1, new_prefix=None):
"""The supernet containing the current network.
Args:
prefixlen_diff: An integer, the amount the prefix length of
the network should be decreased by. For example, given a
/24 network and a prefixlen_diff of 3, a supernet with a
/21 netmask is returned.
Returns:
An IPv4 network object.
Raises:
ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
a negative prefix length.
OR
If prefixlen_diff and new_prefix are both set or new_prefix is a
larger number than the current prefix (larger number means a
smaller network)
"""
if self._prefixlen == 0:
return self
if new_prefix is not None:
if new_prefix > self._prefixlen:
raise ValueError('new prefix must be shorter')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = self._prefixlen - new_prefix
if self.prefixlen - prefixlen_diff < 0:
raise ValueError(
'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
(self.prefixlen, prefixlen_diff))
# TODO (pmoody): optimize this.
t = self.__class__('%s/%d' % (self.network_address,
self.prefixlen - prefixlen_diff),
strict=False)
return t.__class__('%s/%d' % (t.network_address, t.prefixlen))
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return (self.network_address.is_multicast and
self.broadcast_address.is_multicast)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return (self.network_address.is_reserved and
self.broadcast_address.is_reserved)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return (self.network_address.is_link_local and
self.broadcast_address.is_link_local)
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return (self.network_address.is_private and
self.broadcast_address.is_private)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return (self.network_address.is_unspecified and
self.broadcast_address.is_unspecified)
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return (self.network_address.is_loopback and
self.broadcast_address.is_loopback)
class _BaseV4:
"""Base IPv4 object.
The following methods are used by IPv4 objects in both single IP
addresses and networks.
"""
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2**IPV4LENGTH) - 1
_DECIMAL_DIGITS = frozenset('0123456789')
# the valid octets for host and netmasks. only useful for IPv4.
_valid_mask_octets = frozenset((255, 254, 252, 248, 240, 224, 192, 128, 0))
def __init__(self, address):
self._version = 4
self._max_prefixlen = IPV4LENGTH
def _explode_shorthand_ip_string(self):
return str(self)
def _ip_int_from_string(self, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
ip_str: A string, the IP ip_str.
Returns:
The IP ip_str as an integer.
Raises:
AddressValueError: if ip_str isn't a valid IPv4 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
octets = ip_str.split('.')
if len(octets) != 4:
raise AddressValueError("Expected 4 octets in %r" % ip_str)
try:
return int.from_bytes(map(self._parse_octet, octets), 'big')
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str)) from None
def _parse_octet(self, octet_str):
"""Convert a decimal octet into an integer.
Args:
octet_str: A string, the number to parse.
Returns:
The octet as an integer.
Raises:
ValueError: if the octet isn't strictly a decimal from [0..255].
"""
if not octet_str:
raise ValueError("Empty octet not permitted")
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not self._DECIMAL_DIGITS.issuperset(octet_str):
msg = "Only decimal digits permitted in %r"
raise ValueError(msg % octet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
# Any octets that look like they *might* be written in octal,
# and which don't look exactly the same in both octal and
# decimal are rejected as ambiguous
if octet_int > 7 and octet_str[0] == '0':
msg = "Ambiguous (octal/decimal) value in %r not permitted"
raise ValueError(msg % octet_str)
if octet_int > 255:
raise ValueError("Octet %d (> 255) not permitted" % octet_int)
return octet_int
def _string_from_ip_int(self, ip_int):
"""Turns a 32-bit integer into dotted decimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
The IP address as a string in dotted decimal notation.
"""
return '.'.join(map(str, ip_int.to_bytes(4, 'big')))
def _is_valid_netmask(self, netmask):
"""Verify that the netmask is valid.
Args:
netmask: A string, either a prefix or dotted decimal
netmask.
Returns:
A boolean, True if the prefix represents a valid IPv4
netmask.
"""
mask = netmask.split('.')
if len(mask) == 4:
try:
for x in mask:
if int(x) not in self._valid_mask_octets:
return False
except ValueError:
# Found something that isn't an integer or isn't valid
return False
for idx, y in enumerate(mask):
if idx > 0 and y > mask[idx - 1]:
return False
return True
try:
netmask = int(netmask)
except ValueError:
return False
return 0 <= netmask <= self._max_prefixlen
def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split('.')
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except ValueError:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv4Address(_BaseV4, _BaseAddress):
"""Represent and manipulate single IPv4 Addresses."""
def __init__(self, address):
"""
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv4Address('192.0.2.1') == IPv4Address(3221225985).
or, more generally
IPv4Address(int(IPv4Address('192.0.2.1'))) ==
IPv4Address('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
"""
_BaseAddress.__init__(self, address)
_BaseV4.__init__(self, address)
# Efficient constructor from integer.
if isinstance(address, int):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 4)
self._ip = int.from_bytes(address, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = str(address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v4_int_to_packed(self._ip)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within the
reserved IPv4 Network range.
"""
reserved_network = IPv4Network('240.0.0.0/4')
return self in reserved_network
@property
@functools.lru_cache()
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry.
"""
return (self in IPv4Network('0.0.0.0/8') or
self in IPv4Network('10.0.0.0/8') or
self in IPv4Network('127.0.0.0/8') or
self in IPv4Network('169.254.0.0/16') or
self in IPv4Network('172.16.0.0/12') or
self in IPv4Network('192.0.0.0/29') or
self in IPv4Network('192.0.0.170/31') or
self in IPv4Network('192.0.2.0/24') or
self in IPv4Network('192.168.0.0/16') or
self in IPv4Network('198.18.0.0/15') or
self in IPv4Network('198.51.100.0/24') or
self in IPv4Network('203.0.113.0/24') or
self in IPv4Network('240.0.0.0/4') or
self in IPv4Network('255.255.255.255/32'))
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is multicast.
See RFC 3171 for details.
"""
multicast_network = IPv4Network('224.0.0.0/4')
return self in multicast_network
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 5735 3.
"""
unspecified_address = IPv4Address('0.0.0.0')
return self == unspecified_address
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback per RFC 3330.
"""
loopback_network = IPv4Network('127.0.0.0/8')
return self in loopback_network
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is link-local per RFC 3927.
"""
linklocal_network = IPv4Network('169.254.0.0/16')
return self in linklocal_network
class IPv4Interface(IPv4Address):
def __init__(self, address):
if isinstance(address, (bytes, int)):
IPv4Address.__init__(self, address)
self.network = IPv4Network(self._ip)
self._prefixlen = self._max_prefixlen
return
addr = _split_optional_netmask(address)
IPv4Address.__init__(self, addr[0])
self.network = IPv4Network(address, strict=False)
self._prefixlen = self.network._prefixlen
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv4Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return self.network < other.network
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
@property
def ip(self):
return IPv4Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
class IPv4Network(_BaseV4, _BaseNetwork):
"""This class represents and manipulates 32-bit IPv4 network + addresses..
Attributes: [examples for IPv4Network('192.0.2.0/27')]
.network_address: IPv4Address('192.0.2.0')
.hostmask: IPv4Address('0.0.0.31')
.broadcast_address: IPv4Address('192.0.2.32')
.netmask: IPv4Address('255.255.255.224')
.prefixlen: 27
"""
# Class to use when creating address objects
_address_class = IPv4Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv4 network object.
Args:
address: A string or integer representing the IP [& network].
'192.0.2.0/24'
'192.0.2.0/255.255.255.0'
'192.0.0.2/0.0.0.255'
are all functionally the same in IPv4. Similarly,
'192.0.2.1'
'192.0.2.1/255.255.255.255'
'192.0.2.1/32'
are also functionally equivalent. That is to say, failing to
provide a subnetmask will create an object with a mask of /32.
If the mask (portion after the / in the argument) is given in
dotted quad form, it is treated as a netmask if it starts with a
non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
starts with a zero field (e.g. 0.255.255.255 == /8), with the
single exception of an all-zero mask which is treated as a
netmask == /0. If no mask is given, a default of /32 is used.
Additionally, an integer can be passed, so
IPv4Network('192.0.2.1') == IPv4Network(3221225985)
or, more generally
IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
IPv4Interface('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
NetmaskValueError: If the netmask isn't valid for
an IPv4 address.
ValueError: If strict is True and a network address is not
supplied.
"""
_BaseV4.__init__(self, address)
_BaseNetwork.__init__(self, address)
# Constructing from a packed address
if isinstance(address, bytes):
self.network_address = IPv4Address(address)
self._prefixlen = self._max_prefixlen
self.netmask = IPv4Address(self._ALL_ONES)
#fixme: address/network test here
return
# Efficient constructor from integer.
if isinstance(address, int):
self.network_address = IPv4Address(address)
self._prefixlen = self._max_prefixlen
self.netmask = IPv4Address(self._ALL_ONES)
#fixme: address/network test here.
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
try:
# Check for a netmask in prefix length form
self._prefixlen = self._prefix_from_prefix_string(addr[1])
except NetmaskValueError:
# Check for a netmask or hostmask in dotted-quad form.
# This may raise NetmaskValueError.
self._prefixlen = self._prefix_from_ip_string(addr[1])
else:
self._prefixlen = self._max_prefixlen
self.netmask = IPv4Address(self._ip_int_from_prefix(self._prefixlen))
if strict:
if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv4Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
@property
@functools.lru_cache()
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry.
"""
return (not (self.network_address in IPv4Network('100.64.0.0/10') and
self.broadcast_address in IPv4Network('100.64.0.0/10')) and
not self.is_private)
class _BaseV6:
"""Base IPv6 object.
The following methods are used by IPv6 objects in both single IP
addresses and networks.
"""
_ALL_ONES = (2**IPV6LENGTH) - 1
_HEXTET_COUNT = 8
_HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
def __init__(self, address):
self._version = 6
self._max_prefixlen = IPV6LENGTH
def _ip_int_from_string(self, ip_str):
"""Turn an IPv6 ip_str into an integer.
Args:
ip_str: A string, the IPv6 ip_str.
Returns:
An int, the IPv6 address
Raises:
AddressValueError: if ip_str isn't a valid IPv6 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
parts = ip_str.split(':')
# An IPv6 address needs at least 2 colons (3 parts).
_min_parts = 3
if len(parts) < _min_parts:
msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
raise AddressValueError(msg)
# If the address has an IPv4-style suffix, convert it to hexadecimal.
if '.' in parts[-1]:
try:
ipv4_int = IPv4Address(parts.pop())._ip
except AddressValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str)) from None
parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
parts.append('%x' % (ipv4_int & 0xFFFF))
# An IPv6 address can't have more than 8 colons (9 parts).
# The extra colon comes from using the "::" notation for a single
# leading or trailing zero part.
_max_parts = self._HEXTET_COUNT + 1
if len(parts) > _max_parts:
msg = "At most %d colons permitted in %r" % (_max_parts-1, ip_str)
raise AddressValueError(msg)
# Disregarding the endpoints, find '::' with nothing in between.
# This indicates that a run of zeroes has been skipped.
skip_index = None
for i in range(1, len(parts) - 1):
if not parts[i]:
if skip_index is not None:
# Can't have more than one '::'
msg = "At most one '::' permitted in %r" % ip_str
raise AddressValueError(msg)
skip_index = i
# parts_hi is the number of parts to copy from above/before the '::'
# parts_lo is the number of parts to copy from below/after the '::'
if skip_index is not None:
# If we found a '::', then check if it also covers the endpoints.
parts_hi = skip_index
parts_lo = len(parts) - skip_index - 1
if not parts[0]:
parts_hi -= 1
if parts_hi:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
parts_lo -= 1
if parts_lo:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo)
if parts_skipped < 1:
msg = "Expected at most %d other parts with '::' in %r"
raise AddressValueError(msg % (self._HEXTET_COUNT-1, ip_str))
else:
# Otherwise, allocate the entire address to parts_hi. The
# endpoints could still be empty, but _parse_hextet() will check
# for that.
if len(parts) != self._HEXTET_COUNT:
msg = "Exactly %d parts expected without '::' in %r"
raise AddressValueError(msg % (self._HEXTET_COUNT, ip_str))
if not parts[0]:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_hi = len(parts)
parts_lo = 0
parts_skipped = 0
try:
# Now, parse the hextets into a 128-bit integer.
ip_int = 0
for i in range(parts_hi):
ip_int <<= 16
ip_int |= self._parse_hextet(parts[i])
ip_int <<= 16 * parts_skipped
for i in range(-parts_lo, 0):
ip_int <<= 16
ip_int |= self._parse_hextet(parts[i])
return ip_int
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str)) from None
def _parse_hextet(self, hextet_str):
"""Convert an IPv6 hextet string into an integer.
Args:
hextet_str: A string, the number to parse.
Returns:
The hextet as an integer.
Raises:
ValueError: if the input isn't strictly a hex number from
[0..FFFF].
"""
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not self._HEX_DIGITS.issuperset(hextet_str):
raise ValueError("Only hex digits permitted in %r" % hextet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(hextet_str) > 4:
msg = "At most 4 characters permitted in %r"
raise ValueError(msg % hextet_str)
# Length check means we can skip checking the integer value
return int(hextet_str, 16)
def _compress_hextets(self, hextets):
"""Compresses a list of hextets.
Compresses a list of strings, replacing the longest continuous
sequence of "0" in the list with "" and adding empty strings at
the beginning or at the end of the string such that subsequently
calling ":".join(hextets) will produce the compressed version of
the IPv6 address.
Args:
hextets: A list of strings, the hextets to compress.
Returns:
A list of strings.
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
for index, hextet in enumerate(hextets):
if hextet == '0':
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
if best_doublecolon_len > 1:
best_doublecolon_end = (best_doublecolon_start +
best_doublecolon_len)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += ['']
hextets[best_doublecolon_start:best_doublecolon_end] = ['']
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [''] + hextets
return hextets
def _string_from_ip_int(self, ip_int=None):
"""Turns a 128-bit integer into hexadecimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
A string, the hexadecimal representation of the address.
Raises:
ValueError: The address is bigger than 128 bits of all ones.
"""
if ip_int is None:
ip_int = int(self._ip)
if ip_int > self._ALL_ONES:
raise ValueError('IPv6 address is too large')
hex_str = '%032x' % ip_int
hextets = ['%x' % int(hex_str[x:x+4], 16) for x in range(0, 32, 4)]
hextets = self._compress_hextets(hextets)
return ':'.join(hextets)
def _explode_shorthand_ip_string(self):
"""Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if isinstance(self, IPv6Network):
ip_str = str(self.network_address)
elif isinstance(self, IPv6Interface):
ip_str = str(self.ip)
else:
ip_str = str(self)
ip_int = self._ip_int_from_string(ip_str)
hex_str = '%032x' % ip_int
parts = [hex_str[x:x+4] for x in range(0, 32, 4)]
if isinstance(self, (_BaseNetwork, IPv6Interface)):
return '%s/%d' % (':'.join(parts), self._prefixlen)
return ':'.join(parts)
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv6Address(_BaseV6, _BaseAddress):
"""Represent and manipulate single IPv6 Addresses."""
def __init__(self, address):
"""Instantiate a new IPv6 address object.
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv6Address('2001:db8::') ==
IPv6Address(42540766411282592856903984951653826560)
or, more generally
IPv6Address(int(IPv6Address('2001:db8::'))) ==
IPv6Address('2001:db8::')
Raises:
AddressValueError: If address isn't a valid IPv6 address.
"""
_BaseAddress.__init__(self, address)
_BaseV6.__init__(self, address)
# Efficient constructor from integer.
if isinstance(address, int):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 16)
self._ip = int.from_bytes(address, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = str(address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v6_int_to_packed(self._ip)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
multicast_network = IPv6Network('ff00::/8')
return self in multicast_network
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
reserved_networks = [IPv6Network('::/8'), IPv6Network('100::/8'),
IPv6Network('200::/7'), IPv6Network('400::/6'),
IPv6Network('800::/5'), IPv6Network('1000::/4'),
IPv6Network('4000::/3'), IPv6Network('6000::/3'),
IPv6Network('8000::/3'), IPv6Network('A000::/3'),
IPv6Network('C000::/3'), IPv6Network('E000::/4'),
IPv6Network('F000::/5'), IPv6Network('F800::/6'),
IPv6Network('FE00::/9')]
return any(self in x for x in reserved_networks)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
linklocal_network = IPv6Network('fe80::/10')
return self in linklocal_network
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
sitelocal_network = IPv6Network('fec0::/10')
return self in sitelocal_network
@property
@functools.lru_cache()
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv6-special-registry.
"""
return (self in IPv6Network('::1/128') or
self in IPv6Network('::/128') or
self in IPv6Network('::ffff:0:0/96') or
self in IPv6Network('100::/64') or
self in IPv6Network('2001::/23') or
self in IPv6Network('2001:2::/48') or
self in IPv6Network('2001:db8::/32') or
self in IPv6Network('2001:10::/28') or
self in IPv6Network('fc00::/7') or
self in IPv6Network('fe80::/10'))
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, true if the address is not reserved per
iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return self._ip == 0
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return self._ip == 1
@property
def ipv4_mapped(self):
"""Return the IPv4 mapped address.
Returns:
If the IPv6 address is a v4 mapped address, return the
IPv4 mapped address. Return None otherwise.
"""
if (self._ip >> 32) != 0xFFFF:
return None
return IPv4Address(self._ip & 0xFFFFFFFF)
@property
def teredo(self):
"""Tuple of embedded teredo IPs.
Returns:
Tuple of the (server, client) IPs or None if the address
doesn't appear to be a teredo address (doesn't start with
2001::/32)
"""
if (self._ip >> 96) != 0x20010000:
return None
return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
IPv4Address(~self._ip & 0xFFFFFFFF))
@property
def sixtofour(self):
"""Return the IPv4 6to4 embedded address.
Returns:
The IPv4 6to4-embedded address if present or None if the
address doesn't appear to contain a 6to4 embedded address.
"""
if (self._ip >> 112) != 0x2002:
return None
return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
class IPv6Interface(IPv6Address):
def __init__(self, address):
if isinstance(address, (bytes, int)):
IPv6Address.__init__(self, address)
self.network = IPv6Network(self._ip)
self._prefixlen = self._max_prefixlen
return
addr = _split_optional_netmask(address)
IPv6Address.__init__(self, addr[0])
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return self.network < other.network
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
@property
def ip(self):
return IPv6Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
@property
def is_unspecified(self):
return self._ip == 0 and self.network.is_unspecified
@property
def is_loopback(self):
return self._ip == 1 and self.network.is_loopback
class IPv6Network(_BaseV6, _BaseNetwork):
"""This class represents and manipulates 128-bit IPv6 networks.
Attributes: [examples for IPv6('2001:db8::1000/124')]
.network_address: IPv6Address('2001:db8::1000')
.hostmask: IPv6Address('::f')
.broadcast_address: IPv6Address('2001:db8::100f')
.netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
.prefixlen: 124
"""
# Class to use when creating address objects
_address_class = IPv6Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv6 Network object.
Args:
address: A string or integer representing the IPv6 network or the
IP and prefix/netmask.
'2001:db8::/128'
'2001:db8:0000:0000:0000:0000:0000:0000/128'
'2001:db8::'
are all functionally the same in IPv6. That is to say,
failing to provide a subnetmask will create an object with
a mask of /128.
Additionally, an integer can be passed, so
IPv6Network('2001:db8::') ==
IPv6Network(42540766411282592856903984951653826560)
or, more generally
IPv6Network(int(IPv6Network('2001:db8::'))) ==
IPv6Network('2001:db8::')
strict: A boolean. If true, ensure that we have been passed
A true network address, eg, 2001:db8::1000/124 and not an
IP address on a network, eg, 2001:db8::1/124.
Raises:
AddressValueError: If address isn't a valid IPv6 address.
NetmaskValueError: If the netmask isn't valid for
an IPv6 address.
ValueError: If strict was True and a network address was not
supplied.
"""
_BaseV6.__init__(self, address)
_BaseNetwork.__init__(self, address)
# Efficient constructor from integer.
if isinstance(address, int):
self.network_address = IPv6Address(address)
self._prefixlen = self._max_prefixlen
self.netmask = IPv6Address(self._ALL_ONES)
return
# Constructing from a packed address
if isinstance(address, bytes):
self.network_address = IPv6Address(address)
self._prefixlen = self._max_prefixlen
self.netmask = IPv6Address(self._ALL_ONES)
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
# This may raise NetmaskValueError
self._prefixlen = self._prefix_from_prefix_string(addr[1])
else:
self._prefixlen = self._max_prefixlen
self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen))
if strict:
if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv6Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the
Subnet-Router anycast address.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in range(network + 1, broadcast + 1):
yield self._address_class(x)
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return (self.network_address.is_site_local and
self.broadcast_address.is_site_local)
| lgpl-3.0 |
web240/web240.github.io | demo/backbonejs/test/model.js | 45372 | (function() {
var ProxyModel = Backbone.Model.extend();
var Klass = Backbone.Collection.extend({
url: function() { return '/collection'; }
});
var doc, collection;
QUnit.module('Backbone.Model', {
beforeEach: function(assert) {
doc = new ProxyModel({
id: '1-the-tempest',
title: 'The Tempest',
author: 'Bill Shakespeare',
length: 123
});
collection = new Klass();
collection.add(doc);
}
});
QUnit.test('initialize', function(assert) {
assert.expect(3);
var Model = Backbone.Model.extend({
initialize: function() {
this.one = 1;
assert.equal(this.collection, collection);
}
});
var model = new Model({}, {collection: collection});
assert.equal(model.one, 1);
assert.equal(model.collection, collection);
});
QUnit.test('Object.prototype properties are overridden by attributes', function(assert) {
assert.expect(1);
var model = new Backbone.Model({hasOwnProperty: true});
assert.equal(model.get('hasOwnProperty'), true);
});
QUnit.test('initialize with attributes and options', function(assert) {
assert.expect(1);
var Model = Backbone.Model.extend({
initialize: function(attributes, options) {
this.one = options.one;
}
});
var model = new Model({}, {one: 1});
assert.equal(model.one, 1);
});
QUnit.test('initialize with parsed attributes', function(assert) {
assert.expect(1);
var Model = Backbone.Model.extend({
parse: function(attrs) {
attrs.value += 1;
return attrs;
}
});
var model = new Model({value: 1}, {parse: true});
assert.equal(model.get('value'), 2);
});
QUnit.test('parse can return null', function(assert) {
assert.expect(1);
var Model = Backbone.Model.extend({
parse: function(attrs) {
attrs.value += 1;
return null;
}
});
var model = new Model({value: 1}, {parse: true});
assert.equal(JSON.stringify(model.toJSON()), '{}');
});
QUnit.test('url', function(assert) {
assert.expect(3);
doc.urlRoot = null;
assert.equal(doc.url(), '/collection/1-the-tempest');
doc.collection.url = '/collection/';
assert.equal(doc.url(), '/collection/1-the-tempest');
doc.collection = null;
assert.raises(function() { doc.url(); });
doc.collection = collection;
});
QUnit.test('url when using urlRoot, and uri encoding', function(assert) {
assert.expect(2);
var Model = Backbone.Model.extend({
urlRoot: '/collection'
});
var model = new Model();
assert.equal(model.url(), '/collection');
model.set({id: '+1+'});
assert.equal(model.url(), '/collection/%2B1%2B');
});
QUnit.test('url when using urlRoot as a function to determine urlRoot at runtime', function(assert) {
assert.expect(2);
var Model = Backbone.Model.extend({
urlRoot: function() {
return '/nested/' + this.get('parentId') + '/collection';
}
});
var model = new Model({parentId: 1});
assert.equal(model.url(), '/nested/1/collection');
model.set({id: 2});
assert.equal(model.url(), '/nested/1/collection/2');
});
QUnit.test('underscore methods', function(assert) {
assert.expect(5);
var model = new Backbone.Model({foo: 'a', bar: 'b', baz: 'c'});
var model2 = model.clone();
assert.deepEqual(model.keys(), ['foo', 'bar', 'baz']);
assert.deepEqual(model.values(), ['a', 'b', 'c']);
assert.deepEqual(model.invert(), {a: 'foo', b: 'bar', c: 'baz'});
assert.deepEqual(model.pick('foo', 'baz'), {foo: 'a', baz: 'c'});
assert.deepEqual(model.omit('foo', 'bar'), {baz: 'c'});
});
QUnit.test('chain', function(assert) {
var model = new Backbone.Model({a: 0, b: 1, c: 2});
assert.deepEqual(model.chain().pick('a', 'b', 'c').values().compact().value(), [1, 2]);
});
QUnit.test('clone', function(assert) {
assert.expect(10);
var a = new Backbone.Model({foo: 1, bar: 2, baz: 3});
var b = a.clone();
assert.equal(a.get('foo'), 1);
assert.equal(a.get('bar'), 2);
assert.equal(a.get('baz'), 3);
assert.equal(b.get('foo'), a.get('foo'), 'Foo should be the same on the clone.');
assert.equal(b.get('bar'), a.get('bar'), 'Bar should be the same on the clone.');
assert.equal(b.get('baz'), a.get('baz'), 'Baz should be the same on the clone.');
a.set({foo: 100});
assert.equal(a.get('foo'), 100);
assert.equal(b.get('foo'), 1, 'Changing a parent attribute does not change the clone.');
var foo = new Backbone.Model({p: 1});
var bar = new Backbone.Model({p: 2});
bar.set(foo.clone().attributes, {unset: true});
assert.equal(foo.get('p'), 1);
assert.equal(bar.get('p'), undefined);
});
QUnit.test('isNew', function(assert) {
assert.expect(6);
var a = new Backbone.Model({foo: 1, bar: 2, baz: 3});
assert.ok(a.isNew(), 'it should be new');
a = new Backbone.Model({foo: 1, bar: 2, baz: 3, id: -5});
assert.ok(!a.isNew(), 'any defined ID is legal, negative or positive');
a = new Backbone.Model({foo: 1, bar: 2, baz: 3, id: 0});
assert.ok(!a.isNew(), 'any defined ID is legal, including zero');
assert.ok(new Backbone.Model().isNew(), 'is true when there is no id');
assert.ok(!new Backbone.Model({id: 2}).isNew(), 'is false for a positive integer');
assert.ok(!new Backbone.Model({id: -5}).isNew(), 'is false for a negative integer');
});
QUnit.test('get', function(assert) {
assert.expect(2);
assert.equal(doc.get('title'), 'The Tempest');
assert.equal(doc.get('author'), 'Bill Shakespeare');
});
QUnit.test('escape', function(assert) {
assert.expect(5);
assert.equal(doc.escape('title'), 'The Tempest');
doc.set({audience: 'Bill & Bob'});
assert.equal(doc.escape('audience'), 'Bill & Bob');
doc.set({audience: 'Tim > Joan'});
assert.equal(doc.escape('audience'), 'Tim > Joan');
doc.set({audience: 10101});
assert.equal(doc.escape('audience'), '10101');
doc.unset('audience');
assert.equal(doc.escape('audience'), '');
});
QUnit.test('has', function(assert) {
assert.expect(10);
var model = new Backbone.Model();
assert.strictEqual(model.has('name'), false);
model.set({
'0': 0,
'1': 1,
'true': true,
'false': false,
'empty': '',
'name': 'name',
'null': null,
'undefined': undefined
});
assert.strictEqual(model.has('0'), true);
assert.strictEqual(model.has('1'), true);
assert.strictEqual(model.has('true'), true);
assert.strictEqual(model.has('false'), true);
assert.strictEqual(model.has('empty'), true);
assert.strictEqual(model.has('name'), true);
model.unset('name');
assert.strictEqual(model.has('name'), false);
assert.strictEqual(model.has('null'), false);
assert.strictEqual(model.has('undefined'), false);
});
QUnit.test('matches', function(assert) {
assert.expect(4);
var model = new Backbone.Model();
assert.strictEqual(model.matches({name: 'Jonas', cool: true}), false);
model.set({name: 'Jonas', cool: true});
assert.strictEqual(model.matches({name: 'Jonas'}), true);
assert.strictEqual(model.matches({name: 'Jonas', cool: true}), true);
assert.strictEqual(model.matches({name: 'Jonas', cool: false}), false);
});
QUnit.test('matches with predicate', function(assert) {
var model = new Backbone.Model({a: 0});
assert.strictEqual(model.matches(function(attr) {
return attr.a > 1 && attr.b != null;
}), false);
model.set({a: 3, b: true});
assert.strictEqual(model.matches(function(attr) {
return attr.a > 1 && attr.b != null;
}), true);
});
QUnit.test('set and unset', function(assert) {
assert.expect(8);
var a = new Backbone.Model({id: 'id', foo: 1, bar: 2, baz: 3});
var changeCount = 0;
a.on('change:foo', function() { changeCount += 1; });
a.set({foo: 2});
assert.equal(a.get('foo'), 2, 'Foo should have changed.');
assert.equal(changeCount, 1, 'Change count should have incremented.');
// set with value that is not new shouldn't fire change event
a.set({foo: 2});
assert.equal(a.get('foo'), 2, 'Foo should NOT have changed, still 2');
assert.equal(changeCount, 1, 'Change count should NOT have incremented.');
a.validate = function(attrs) {
assert.equal(attrs.foo, void 0, 'validate:true passed while unsetting');
};
a.unset('foo', {validate: true});
assert.equal(a.get('foo'), void 0, 'Foo should have changed');
delete a.validate;
assert.equal(changeCount, 2, 'Change count should have incremented for unset.');
a.unset('id');
assert.equal(a.id, undefined, 'Unsetting the id should remove the id property.');
});
QUnit.test('#2030 - set with failed validate, followed by another set triggers change', function(assert) {
var attr = 0, main = 0, error = 0;
var Model = Backbone.Model.extend({
validate: function(attrs) {
if (attrs.x > 1) {
error++;
return 'this is an error';
}
}
});
var model = new Model({x: 0});
model.on('change:x', function() { attr++; });
model.on('change', function() { main++; });
model.set({x: 2}, {validate: true});
model.set({x: 1}, {validate: true});
assert.deepEqual([attr, main, error], [1, 1, 1]);
});
QUnit.test('set triggers changes in the correct order', function(assert) {
var value = null;
var model = new Backbone.Model;
model.on('last', function(){ value = 'last'; });
model.on('first', function(){ value = 'first'; });
model.trigger('first');
model.trigger('last');
assert.equal(value, 'last');
});
QUnit.test('set falsy values in the correct order', function(assert) {
assert.expect(2);
var model = new Backbone.Model({result: 'result'});
model.on('change', function() {
assert.equal(model.changed.result, void 0);
assert.equal(model.previous('result'), false);
});
model.set({result: void 0}, {silent: true});
model.set({result: null}, {silent: true});
model.set({result: false}, {silent: true});
model.set({result: void 0});
});
QUnit.test('nested set triggers with the correct options', function(assert) {
var model = new Backbone.Model();
var o1 = {};
var o2 = {};
var o3 = {};
model.on('change', function(__, options) {
switch (model.get('a')) {
case 1:
assert.equal(options, o1);
return model.set('a', 2, o2);
case 2:
assert.equal(options, o2);
return model.set('a', 3, o3);
case 3:
assert.equal(options, o3);
}
});
model.set('a', 1, o1);
});
QUnit.test('multiple unsets', function(assert) {
assert.expect(1);
var i = 0;
var counter = function(){ i++; };
var model = new Backbone.Model({a: 1});
model.on('change:a', counter);
model.set({a: 2});
model.unset('a');
model.unset('a');
assert.equal(i, 2, 'Unset does not fire an event for missing attributes.');
});
QUnit.test('unset and changedAttributes', function(assert) {
assert.expect(1);
var model = new Backbone.Model({a: 1});
model.on('change', function() {
assert.ok('a' in model.changedAttributes(), 'changedAttributes should contain unset properties');
});
model.unset('a');
});
QUnit.test('using a non-default id attribute.', function(assert) {
assert.expect(5);
var MongoModel = Backbone.Model.extend({idAttribute: '_id'});
var model = new MongoModel({id: 'eye-dee', _id: 25, title: 'Model'});
assert.equal(model.get('id'), 'eye-dee');
assert.equal(model.id, 25);
assert.equal(model.isNew(), false);
model.unset('_id');
assert.equal(model.id, undefined);
assert.equal(model.isNew(), true);
});
QUnit.test('setting an alternative cid prefix', function(assert) {
assert.expect(4);
var Model = Backbone.Model.extend({
cidPrefix: 'm'
});
var model = new Model();
assert.equal(model.cid.charAt(0), 'm');
model = new Backbone.Model();
assert.equal(model.cid.charAt(0), 'c');
var Collection = Backbone.Collection.extend({
model: Model
});
var col = new Collection([{id: 'c5'}, {id: 'c6'}, {id: 'c7'}]);
assert.equal(col.get('c6').cid.charAt(0), 'm');
col.set([{id: 'c6', value: 'test'}], {
merge: true,
add: true,
remove: false
});
assert.ok(col.get('c6').has('value'));
});
QUnit.test('set an empty string', function(assert) {
assert.expect(1);
var model = new Backbone.Model({name: 'Model'});
model.set({name: ''});
assert.equal(model.get('name'), '');
});
QUnit.test('setting an object', function(assert) {
assert.expect(1);
var model = new Backbone.Model({
custom: {foo: 1}
});
model.on('change', function() {
assert.ok(1);
});
model.set({
custom: {foo: 1} // no change should be fired
});
model.set({
custom: {foo: 2} // change event should be fired
});
});
QUnit.test('clear', function(assert) {
assert.expect(3);
var changed;
var model = new Backbone.Model({id: 1, name: 'Model'});
model.on('change:name', function(){ changed = true; });
model.on('change', function() {
var changedAttrs = model.changedAttributes();
assert.ok('name' in changedAttrs);
});
model.clear();
assert.equal(changed, true);
assert.equal(model.get('name'), undefined);
});
QUnit.test('defaults', function(assert) {
assert.expect(9);
var Defaulted = Backbone.Model.extend({
defaults: {
one: 1,
two: 2
}
});
var model = new Defaulted({two: undefined});
assert.equal(model.get('one'), 1);
assert.equal(model.get('two'), 2);
model = new Defaulted({two: 3});
assert.equal(model.get('one'), 1);
assert.equal(model.get('two'), 3);
Defaulted = Backbone.Model.extend({
defaults: function() {
return {
one: 3,
two: 4
};
}
});
model = new Defaulted({two: undefined});
assert.equal(model.get('one'), 3);
assert.equal(model.get('two'), 4);
Defaulted = Backbone.Model.extend({
defaults: {hasOwnProperty: true}
});
model = new Defaulted();
assert.equal(model.get('hasOwnProperty'), true);
model = new Defaulted({hasOwnProperty: undefined});
assert.equal(model.get('hasOwnProperty'), true);
model = new Defaulted({hasOwnProperty: false});
assert.equal(model.get('hasOwnProperty'), false);
});
QUnit.test('change, hasChanged, changedAttributes, previous, previousAttributes', function(assert) {
assert.expect(9);
var model = new Backbone.Model({name: 'Tim', age: 10});
assert.deepEqual(model.changedAttributes(), false);
model.on('change', function() {
assert.ok(model.hasChanged('name'), 'name changed');
assert.ok(!model.hasChanged('age'), 'age did not');
assert.ok(_.isEqual(model.changedAttributes(), {name: 'Rob'}), 'changedAttributes returns the changed attrs');
assert.equal(model.previous('name'), 'Tim');
assert.ok(_.isEqual(model.previousAttributes(), {name: 'Tim', age: 10}), 'previousAttributes is correct');
});
assert.equal(model.hasChanged(), false);
assert.equal(model.hasChanged(undefined), false);
model.set({name: 'Rob'});
assert.equal(model.get('name'), 'Rob');
});
QUnit.test('changedAttributes', function(assert) {
assert.expect(3);
var model = new Backbone.Model({a: 'a', b: 'b'});
assert.deepEqual(model.changedAttributes(), false);
assert.equal(model.changedAttributes({a: 'a'}), false);
assert.equal(model.changedAttributes({a: 'b'}).a, 'b');
});
QUnit.test('change with options', function(assert) {
assert.expect(2);
var value;
var model = new Backbone.Model({name: 'Rob'});
model.on('change', function(m, options) {
value = options.prefix + m.get('name');
});
model.set({name: 'Bob'}, {prefix: 'Mr. '});
assert.equal(value, 'Mr. Bob');
model.set({name: 'Sue'}, {prefix: 'Ms. '});
assert.equal(value, 'Ms. Sue');
});
QUnit.test('change after initialize', function(assert) {
assert.expect(1);
var changed = 0;
var attrs = {id: 1, label: 'c'};
var obj = new Backbone.Model(attrs);
obj.on('change', function() { changed += 1; });
obj.set(attrs);
assert.equal(changed, 0);
});
QUnit.test('save within change event', function(assert) {
assert.expect(1);
var env = this;
var model = new Backbone.Model({firstName: 'Taylor', lastName: 'Swift'});
model.url = '/test';
model.on('change', function() {
model.save();
assert.ok(_.isEqual(env.syncArgs.model, model));
});
model.set({lastName: 'Hicks'});
});
QUnit.test('validate after save', function(assert) {
assert.expect(2);
var lastError, model = new Backbone.Model();
model.validate = function(attrs) {
if (attrs.admin) return "Can't change admin status.";
};
model.sync = function(method, m, options) {
options.success.call(this, {admin: true});
};
model.on('invalid', function(m, error) {
lastError = error;
});
model.save(null);
assert.equal(lastError, "Can't change admin status.");
assert.equal(model.validationError, "Can't change admin status.");
});
QUnit.test('save', function(assert) {
assert.expect(2);
doc.save({title: 'Henry V'});
assert.equal(this.syncArgs.method, 'update');
assert.ok(_.isEqual(this.syncArgs.model, doc));
});
QUnit.test('save, fetch, destroy triggers error event when an error occurs', function(assert) {
assert.expect(3);
var model = new Backbone.Model();
model.on('error', function() {
assert.ok(true);
});
model.sync = function(method, m, options) {
options.error();
};
model.save({data: 2, id: 1});
model.fetch();
model.destroy();
});
QUnit.test('#3283 - save, fetch, destroy calls success with context', function(assert) {
assert.expect(3);
var model = new Backbone.Model();
var obj = {};
var options = {
context: obj,
success: function() {
assert.equal(this, obj);
}
};
model.sync = function(method, m, opts) {
opts.success.call(opts.context);
};
model.save({data: 2, id: 1}, options);
model.fetch(options);
model.destroy(options);
});
QUnit.test('#3283 - save, fetch, destroy calls error with context', function(assert) {
assert.expect(3);
var model = new Backbone.Model();
var obj = {};
var options = {
context: obj,
error: function() {
assert.equal(this, obj);
}
};
model.sync = function(method, m, opts) {
opts.error.call(opts.context);
};
model.save({data: 2, id: 1}, options);
model.fetch(options);
model.destroy(options);
});
QUnit.test('#3470 - save and fetch with parse false', function(assert) {
assert.expect(2);
var i = 0;
var model = new Backbone.Model();
model.parse = function() {
assert.ok(false);
};
model.sync = function(method, m, options) {
options.success({i: ++i});
};
model.fetch({parse: false});
assert.equal(model.get('i'), i);
model.save(null, {parse: false});
assert.equal(model.get('i'), i);
});
QUnit.test('save with PATCH', function(assert) {
doc.clear().set({id: 1, a: 1, b: 2, c: 3, d: 4});
doc.save();
assert.equal(this.syncArgs.method, 'update');
assert.equal(this.syncArgs.options.attrs, undefined);
doc.save({b: 2, d: 4}, {patch: true});
assert.equal(this.syncArgs.method, 'patch');
assert.equal(_.size(this.syncArgs.options.attrs), 2);
assert.equal(this.syncArgs.options.attrs.d, 4);
assert.equal(this.syncArgs.options.attrs.a, undefined);
assert.equal(this.ajaxSettings.data, '{"b":2,"d":4}');
});
QUnit.test('save with PATCH and different attrs', function(assert) {
doc.clear().save({b: 2, d: 4}, {patch: true, attrs: {B: 1, D: 3}});
assert.equal(this.syncArgs.options.attrs.D, 3);
assert.equal(this.syncArgs.options.attrs.d, undefined);
assert.equal(this.ajaxSettings.data, '{"B":1,"D":3}');
assert.deepEqual(doc.attributes, {b: 2, d: 4});
});
QUnit.test('save in positional style', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.sync = function(method, m, options) {
options.success();
};
model.save('title', 'Twelfth Night');
assert.equal(model.get('title'), 'Twelfth Night');
});
QUnit.test('save with non-object success response', function(assert) {
assert.expect(2);
var model = new Backbone.Model();
model.sync = function(method, m, options) {
options.success('', options);
options.success(null, options);
};
model.save({testing: 'empty'}, {
success: function(m) {
assert.deepEqual(m.attributes, {testing: 'empty'});
}
});
});
QUnit.test('save with wait and supplied id', function(assert) {
var Model = Backbone.Model.extend({
urlRoot: '/collection'
});
var model = new Model();
model.save({id: 42}, {wait: true});
assert.equal(this.ajaxSettings.url, '/collection/42');
});
QUnit.test('save will pass extra options to success callback', function(assert) {
assert.expect(1);
var SpecialSyncModel = Backbone.Model.extend({
sync: function(method, m, options) {
_.extend(options, {specialSync: true});
return Backbone.Model.prototype.sync.call(this, method, m, options);
},
urlRoot: '/test'
});
var model = new SpecialSyncModel();
var onSuccess = function(m, response, options) {
assert.ok(options.specialSync, 'Options were passed correctly to callback');
};
model.save(null, {success: onSuccess});
this.ajaxSettings.success();
});
QUnit.test('fetch', function(assert) {
assert.expect(2);
doc.fetch();
assert.equal(this.syncArgs.method, 'read');
assert.ok(_.isEqual(this.syncArgs.model, doc));
});
QUnit.test('fetch will pass extra options to success callback', function(assert) {
assert.expect(1);
var SpecialSyncModel = Backbone.Model.extend({
sync: function(method, m, options) {
_.extend(options, {specialSync: true});
return Backbone.Model.prototype.sync.call(this, method, m, options);
},
urlRoot: '/test'
});
var model = new SpecialSyncModel();
var onSuccess = function(m, response, options) {
assert.ok(options.specialSync, 'Options were passed correctly to callback');
};
model.fetch({success: onSuccess});
this.ajaxSettings.success();
});
QUnit.test('destroy', function(assert) {
assert.expect(3);
doc.destroy();
assert.equal(this.syncArgs.method, 'delete');
assert.ok(_.isEqual(this.syncArgs.model, doc));
var newModel = new Backbone.Model;
assert.equal(newModel.destroy(), false);
});
QUnit.test('destroy will pass extra options to success callback', function(assert) {
assert.expect(1);
var SpecialSyncModel = Backbone.Model.extend({
sync: function(method, m, options) {
_.extend(options, {specialSync: true});
return Backbone.Model.prototype.sync.call(this, method, m, options);
},
urlRoot: '/test'
});
var model = new SpecialSyncModel({id: 'id'});
var onSuccess = function(m, response, options) {
assert.ok(options.specialSync, 'Options were passed correctly to callback');
};
model.destroy({success: onSuccess});
this.ajaxSettings.success();
});
QUnit.test('non-persisted destroy', function(assert) {
assert.expect(1);
var a = new Backbone.Model({foo: 1, bar: 2, baz: 3});
a.sync = function() { throw 'should not be called'; };
a.destroy();
assert.ok(true, 'non-persisted model should not call sync');
});
QUnit.test('validate', function(assert) {
var lastError;
var model = new Backbone.Model();
model.validate = function(attrs) {
if (attrs.admin !== this.get('admin')) return "Can't change admin status.";
};
model.on('invalid', function(m, error) {
lastError = error;
});
var result = model.set({a: 100});
assert.equal(result, model);
assert.equal(model.get('a'), 100);
assert.equal(lastError, undefined);
result = model.set({admin: true});
assert.equal(model.get('admin'), true);
result = model.set({a: 200, admin: false}, {validate: true});
assert.equal(lastError, "Can't change admin status.");
assert.equal(result, false);
assert.equal(model.get('a'), 100);
});
QUnit.test('validate on unset and clear', function(assert) {
assert.expect(6);
var error;
var model = new Backbone.Model({name: 'One'});
model.validate = function(attrs) {
if (!attrs.name) {
error = true;
return 'No thanks.';
}
};
model.set({name: 'Two'});
assert.equal(model.get('name'), 'Two');
assert.equal(error, undefined);
model.unset('name', {validate: true});
assert.equal(error, true);
assert.equal(model.get('name'), 'Two');
model.clear({validate: true});
assert.equal(model.get('name'), 'Two');
delete model.validate;
model.clear();
assert.equal(model.get('name'), undefined);
});
QUnit.test('validate with error callback', function(assert) {
assert.expect(8);
var lastError, boundError;
var model = new Backbone.Model();
model.validate = function(attrs) {
if (attrs.admin) return "Can't change admin status.";
};
model.on('invalid', function(m, error) {
boundError = true;
});
var result = model.set({a: 100}, {validate: true});
assert.equal(result, model);
assert.equal(model.get('a'), 100);
assert.equal(model.validationError, null);
assert.equal(boundError, undefined);
result = model.set({a: 200, admin: true}, {validate: true});
assert.equal(result, false);
assert.equal(model.get('a'), 100);
assert.equal(model.validationError, "Can't change admin status.");
assert.equal(boundError, true);
});
QUnit.test('defaults always extend attrs (#459)', function(assert) {
assert.expect(2);
var Defaulted = Backbone.Model.extend({
defaults: {one: 1},
initialize: function(attrs, opts) {
assert.equal(this.attributes.one, 1);
}
});
var providedattrs = new Defaulted({});
var emptyattrs = new Defaulted();
});
QUnit.test('Inherit class properties', function(assert) {
assert.expect(6);
var Parent = Backbone.Model.extend({
instancePropSame: function() {},
instancePropDiff: function() {}
}, {
classProp: function() {}
});
var Child = Parent.extend({
instancePropDiff: function() {}
});
var adult = new Parent;
var kid = new Child;
assert.equal(Child.classProp, Parent.classProp);
assert.notEqual(Child.classProp, undefined);
assert.equal(kid.instancePropSame, adult.instancePropSame);
assert.notEqual(kid.instancePropSame, undefined);
assert.notEqual(Child.prototype.instancePropDiff, Parent.prototype.instancePropDiff);
assert.notEqual(Child.prototype.instancePropDiff, undefined);
});
QUnit.test("Nested change events don't clobber previous attributes", function(assert) {
assert.expect(4);
new Backbone.Model()
.on('change:state', function(m, newState) {
assert.equal(m.previous('state'), undefined);
assert.equal(newState, 'hello');
// Fire a nested change event.
m.set({other: 'whatever'});
})
.on('change:state', function(m, newState) {
assert.equal(m.previous('state'), undefined);
assert.equal(newState, 'hello');
})
.set({state: 'hello'});
});
QUnit.test('hasChanged/set should use same comparison', function(assert) {
assert.expect(2);
var changed = 0, model = new Backbone.Model({a: null});
model.on('change', function() {
assert.ok(this.hasChanged('a'));
})
.on('change:a', function() {
changed++;
})
.set({a: undefined});
assert.equal(changed, 1);
});
QUnit.test('#582, #425, change:attribute callbacks should fire after all changes have occurred', function(assert) {
assert.expect(9);
var model = new Backbone.Model;
var assertion = function() {
assert.equal(model.get('a'), 'a');
assert.equal(model.get('b'), 'b');
assert.equal(model.get('c'), 'c');
};
model.on('change:a', assertion);
model.on('change:b', assertion);
model.on('change:c', assertion);
model.set({a: 'a', b: 'b', c: 'c'});
});
QUnit.test('#871, set with attributes property', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.set({attributes: true});
assert.ok(model.has('attributes'));
});
QUnit.test('set value regardless of equality/change', function(assert) {
assert.expect(1);
var model = new Backbone.Model({x: []});
var a = [];
model.set({x: a});
assert.ok(model.get('x') === a);
});
QUnit.test('set same value does not trigger change', function(assert) {
assert.expect(0);
var model = new Backbone.Model({x: 1});
model.on('change change:x', function() { assert.ok(false); });
model.set({x: 1});
model.set({x: 1});
});
QUnit.test('unset does not fire a change for undefined attributes', function(assert) {
assert.expect(0);
var model = new Backbone.Model({x: undefined});
model.on('change:x', function(){ assert.ok(false); });
model.unset('x');
});
QUnit.test('set: undefined values', function(assert) {
assert.expect(1);
var model = new Backbone.Model({x: undefined});
assert.ok('x' in model.attributes);
});
QUnit.test('hasChanged works outside of change events, and true within', function(assert) {
assert.expect(6);
var model = new Backbone.Model({x: 1});
model.on('change:x', function() {
assert.ok(model.hasChanged('x'));
assert.equal(model.get('x'), 1);
});
model.set({x: 2}, {silent: true});
assert.ok(model.hasChanged());
assert.equal(model.hasChanged('x'), true);
model.set({x: 1});
assert.ok(model.hasChanged());
assert.equal(model.hasChanged('x'), true);
});
QUnit.test('hasChanged gets cleared on the following set', function(assert) {
assert.expect(4);
var model = new Backbone.Model;
model.set({x: 1});
assert.ok(model.hasChanged());
model.set({x: 1});
assert.ok(!model.hasChanged());
model.set({x: 2});
assert.ok(model.hasChanged());
model.set({});
assert.ok(!model.hasChanged());
});
QUnit.test('save with `wait` succeeds without `validate`', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.url = '/test';
model.save({x: 1}, {wait: true});
assert.ok(this.syncArgs.model === model);
});
QUnit.test("save without `wait` doesn't set invalid attributes", function(assert) {
var model = new Backbone.Model();
model.validate = function() { return 1; };
model.save({a: 1});
assert.equal(model.get('a'), void 0);
});
QUnit.test("save doesn't validate twice", function(assert) {
var model = new Backbone.Model();
var times = 0;
model.sync = function() {};
model.validate = function() { ++times; };
model.save({});
assert.equal(times, 1);
});
QUnit.test('`hasChanged` for falsey keys', function(assert) {
assert.expect(2);
var model = new Backbone.Model();
model.set({x: true}, {silent: true});
assert.ok(!model.hasChanged(0));
assert.ok(!model.hasChanged(''));
});
QUnit.test('`previous` for falsey keys', function(assert) {
assert.expect(2);
var model = new Backbone.Model({'0': true, '': true});
model.set({'0': false, '': false}, {silent: true});
assert.equal(model.previous(0), true);
assert.equal(model.previous(''), true);
});
QUnit.test('`save` with `wait` sends correct attributes', function(assert) {
assert.expect(5);
var changed = 0;
var model = new Backbone.Model({x: 1, y: 2});
model.url = '/test';
model.on('change:x', function() { changed++; });
model.save({x: 3}, {wait: true});
assert.deepEqual(JSON.parse(this.ajaxSettings.data), {x: 3, y: 2});
assert.equal(model.get('x'), 1);
assert.equal(changed, 0);
this.syncArgs.options.success({});
assert.equal(model.get('x'), 3);
assert.equal(changed, 1);
});
QUnit.test("a failed `save` with `wait` doesn't leave attributes behind", function(assert) {
assert.expect(1);
var model = new Backbone.Model;
model.url = '/test';
model.save({x: 1}, {wait: true});
assert.equal(model.get('x'), void 0);
});
QUnit.test('#1030 - `save` with `wait` results in correct attributes if success is called during sync', function(assert) {
assert.expect(2);
var model = new Backbone.Model({x: 1, y: 2});
model.sync = function(method, m, options) {
options.success();
};
model.on('change:x', function() { assert.ok(true); });
model.save({x: 3}, {wait: true});
assert.equal(model.get('x'), 3);
});
QUnit.test('save with wait validates attributes', function(assert) {
var model = new Backbone.Model();
model.url = '/test';
model.validate = function() { assert.ok(true); };
model.save({x: 1}, {wait: true});
});
QUnit.test('save turns on parse flag', function(assert) {
var Model = Backbone.Model.extend({
sync: function(method, m, options) { assert.ok(options.parse); }
});
new Model().save();
});
QUnit.test("nested `set` during `'change:attr'`", function(assert) {
assert.expect(2);
var events = [];
var model = new Backbone.Model();
model.on('all', function(event) { events.push(event); });
model.on('change', function() {
model.set({z: true}, {silent: true});
});
model.on('change:x', function() {
model.set({y: true});
});
model.set({x: true});
assert.deepEqual(events, ['change:y', 'change:x', 'change']);
events = [];
model.set({z: true});
assert.deepEqual(events, []);
});
QUnit.test('nested `change` only fires once', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.on('change', function() {
assert.ok(true);
model.set({x: true});
});
model.set({x: true});
});
QUnit.test("nested `set` during `'change'`", function(assert) {
assert.expect(6);
var count = 0;
var model = new Backbone.Model();
model.on('change', function() {
switch (count++) {
case 0:
assert.deepEqual(this.changedAttributes(), {x: true});
assert.equal(model.previous('x'), undefined);
model.set({y: true});
break;
case 1:
assert.deepEqual(this.changedAttributes(), {x: true, y: true});
assert.equal(model.previous('x'), undefined);
model.set({z: true});
break;
case 2:
assert.deepEqual(this.changedAttributes(), {x: true, y: true, z: true});
assert.equal(model.previous('y'), undefined);
break;
default:
assert.ok(false);
}
});
model.set({x: true});
});
QUnit.test('nested `change` with silent', function(assert) {
assert.expect(3);
var count = 0;
var model = new Backbone.Model();
model.on('change:y', function() { assert.ok(false); });
model.on('change', function() {
switch (count++) {
case 0:
assert.deepEqual(this.changedAttributes(), {x: true});
model.set({y: true}, {silent: true});
model.set({z: true});
break;
case 1:
assert.deepEqual(this.changedAttributes(), {x: true, y: true, z: true});
break;
case 2:
assert.deepEqual(this.changedAttributes(), {z: false});
break;
default:
assert.ok(false);
}
});
model.set({x: true});
model.set({z: false});
});
QUnit.test('nested `change:attr` with silent', function(assert) {
assert.expect(0);
var model = new Backbone.Model();
model.on('change:y', function(){ assert.ok(false); });
model.on('change', function() {
model.set({y: true}, {silent: true});
model.set({z: true});
});
model.set({x: true});
});
QUnit.test('multiple nested changes with silent', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.on('change:x', function() {
model.set({y: 1}, {silent: true});
model.set({y: 2});
});
model.on('change:y', function(m, val) {
assert.equal(val, 2);
});
model.set({x: true});
});
QUnit.test('multiple nested changes with silent', function(assert) {
assert.expect(1);
var changes = [];
var model = new Backbone.Model();
model.on('change:b', function(m, val) { changes.push(val); });
model.on('change', function() {
model.set({b: 1});
});
model.set({b: 0});
assert.deepEqual(changes, [0, 1]);
});
QUnit.test('basic silent change semantics', function(assert) {
assert.expect(1);
var model = new Backbone.Model;
model.set({x: 1});
model.on('change', function(){ assert.ok(true); });
model.set({x: 2}, {silent: true});
model.set({x: 1});
});
QUnit.test('nested set multiple times', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.on('change:b', function() {
assert.ok(true);
});
model.on('change:a', function() {
model.set({b: true});
model.set({b: true});
});
model.set({a: true});
});
QUnit.test('#1122 - clear does not alter options.', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
var options = {};
model.clear(options);
assert.ok(!options.unset);
});
QUnit.test('#1122 - unset does not alter options.', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
var options = {};
model.unset('x', options);
assert.ok(!options.unset);
});
QUnit.test('#1355 - `options` is passed to success callbacks', function(assert) {
assert.expect(3);
var model = new Backbone.Model();
var opts = {
success: function( m, resp, options ) {
assert.ok(options);
}
};
model.sync = function(method, m, options) {
options.success();
};
model.save({id: 1}, opts);
model.fetch(opts);
model.destroy(opts);
});
QUnit.test("#1412 - Trigger 'sync' event.", function(assert) {
assert.expect(3);
var model = new Backbone.Model({id: 1});
model.sync = function(method, m, options) { options.success(); };
model.on('sync', function(){ assert.ok(true); });
model.fetch();
model.save();
model.destroy();
});
QUnit.test('#1365 - Destroy: New models execute success callback.', function(assert) {
var done = assert.async();
assert.expect(2);
new Backbone.Model()
.on('sync', function() { assert.ok(false); })
.on('destroy', function(){ assert.ok(true); })
.destroy({success: function(){
assert.ok(true);
done();
}});
});
QUnit.test('#1433 - Save: An invalid model cannot be persisted.', function(assert) {
assert.expect(1);
var model = new Backbone.Model;
model.validate = function(){ return 'invalid'; };
model.sync = function(){ assert.ok(false); };
assert.strictEqual(model.save(), false);
});
QUnit.test("#1377 - Save without attrs triggers 'error'.", function(assert) {
assert.expect(1);
var Model = Backbone.Model.extend({
url: '/test/',
sync: function(method, m, options){ options.success(); },
validate: function(){ return 'invalid'; }
});
var model = new Model({id: 1});
model.on('invalid', function(){ assert.ok(true); });
model.save();
});
QUnit.test('#1545 - `undefined` can be passed to a model constructor without coersion', function(assert) {
var Model = Backbone.Model.extend({
defaults: {one: 1},
initialize: function(attrs, opts) {
assert.equal(attrs, undefined);
}
});
var emptyattrs = new Model();
var undefinedattrs = new Model(undefined);
});
QUnit.test('#1478 - Model `save` does not trigger change on unchanged attributes', function(assert) {
var done = assert.async();
assert.expect(0);
var Model = Backbone.Model.extend({
sync: function(method, m, options) {
setTimeout(function(){
options.success();
done();
}, 0);
}
});
new Model({x: true})
.on('change:x', function(){ assert.ok(false); })
.save(null, {wait: true});
});
QUnit.test('#1664 - Changing from one value, silently to another, back to original triggers a change.', function(assert) {
assert.expect(1);
var model = new Backbone.Model({x: 1});
model.on('change:x', function() { assert.ok(true); });
model.set({x: 2}, {silent: true});
model.set({x: 3}, {silent: true});
model.set({x: 1});
});
QUnit.test('#1664 - multiple silent changes nested inside a change event', function(assert) {
assert.expect(2);
var changes = [];
var model = new Backbone.Model();
model.on('change', function() {
model.set({a: 'c'}, {silent: true});
model.set({b: 2}, {silent: true});
model.unset('c', {silent: true});
});
model.on('change:a change:b change:c', function(m, val) { changes.push(val); });
model.set({a: 'a', b: 1, c: 'item'});
assert.deepEqual(changes, ['a', 1, 'item']);
assert.deepEqual(model.attributes, {a: 'c', b: 2});
});
QUnit.test('#1791 - `attributes` is available for `parse`', function(assert) {
var Model = Backbone.Model.extend({
parse: function() { this.has('a'); } // shouldn't throw an error
});
var model = new Model(null, {parse: true});
assert.expect(0);
});
QUnit.test('silent changes in last `change` event back to original triggers change', function(assert) {
assert.expect(2);
var changes = [];
var model = new Backbone.Model();
model.on('change:a change:b change:c', function(m, val) { changes.push(val); });
model.on('change', function() {
model.set({a: 'c'}, {silent: true});
});
model.set({a: 'a'});
assert.deepEqual(changes, ['a']);
model.set({a: 'a'});
assert.deepEqual(changes, ['a', 'a']);
});
QUnit.test('#1943 change calculations should use _.isEqual', function(assert) {
var model = new Backbone.Model({a: {key: 'value'}});
model.set('a', {key: 'value'}, {silent: true});
assert.equal(model.changedAttributes(), false);
});
QUnit.test('#1964 - final `change` event is always fired, regardless of interim changes', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.on('change:property', function() {
model.set('property', 'bar');
});
model.on('change', function() {
assert.ok(true);
});
model.set('property', 'foo');
});
QUnit.test('isValid', function(assert) {
var model = new Backbone.Model({valid: true});
model.validate = function(attrs) {
if (!attrs.valid) return 'invalid';
};
assert.equal(model.isValid(), true);
assert.equal(model.set({valid: false}, {validate: true}), false);
assert.equal(model.isValid(), true);
model.set({valid: false});
assert.equal(model.isValid(), false);
assert.ok(!model.set('valid', false, {validate: true}));
});
QUnit.test('#1179 - isValid returns true in the absence of validate.', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.validate = null;
assert.ok(model.isValid());
});
QUnit.test('#1961 - Creating a model with {validate:true} will call validate and use the error callback', function(assert) {
var Model = Backbone.Model.extend({
validate: function(attrs) {
if (attrs.id === 1) return "This shouldn't happen";
}
});
var model = new Model({id: 1}, {validate: true});
assert.equal(model.validationError, "This shouldn't happen");
});
QUnit.test('toJSON receives attrs during save(..., {wait: true})', function(assert) {
assert.expect(1);
var Model = Backbone.Model.extend({
url: '/test',
toJSON: function() {
assert.strictEqual(this.attributes.x, 1);
return _.clone(this.attributes);
}
});
var model = new Model;
model.save({x: 1}, {wait: true});
});
QUnit.test('#2034 - nested set with silent only triggers one change', function(assert) {
assert.expect(1);
var model = new Backbone.Model();
model.on('change', function() {
model.set({b: true}, {silent: true});
assert.ok(true);
});
model.set({a: true});
});
QUnit.test('#3778 - id will only be updated if it is set', function(assert) {
assert.expect(2);
var model = new Backbone.Model({id: 1});
model.id = 2;
model.set({foo: 'bar'});
assert.equal(model.id, 2);
model.set({id: 3});
assert.equal(model.id, 3);
});
})();
| mit |
soundcloud/gocd | server/webapp/WEB-INF/rails.new/vendor/bundle/jruby/1.9/gems/rspec-expectations-2.14.5/lib/rspec/expectations/expectation_target.rb | 2735 | module RSpec
module Expectations
# Wraps the target of an expectation.
# @example
# expect(something) # => ExpectationTarget wrapping something
#
# # used with `to`
# expect(actual).to eq(3)
#
# # with `not_to`
# expect(actual).not_to eq(3)
class ExpectationTarget
class << self
attr_accessor :deprecated_should_enabled
alias deprecated_should_enabled? deprecated_should_enabled
end
# @api private
def initialize(target)
@target = target
end
# Runs the given expectation, passing if `matcher` returns true.
# @example
# expect(value).to eq(5)
# expect { perform }.to raise_error
# @param [Matcher]
# matcher
# @param [String] message optional message to display when the expectation fails
# @return [Boolean] true if the expectation succeeds (else raises)
# @see RSpec::Matchers
def to(matcher=nil, message=nil, &block)
prevent_operator_matchers(:to, matcher)
RSpec::Expectations::PositiveExpectationHandler.handle_matcher(@target, matcher, message, &block)
end
# Runs the given expectation, passing if `matcher` returns false.
# @example
# expect(value).not_to eq(5)
# @param [Matcher]
# matcher
# @param [String] message optional message to display when the expectation fails
# @return [Boolean] false if the negative expectation succeeds (else raises)
# @see RSpec::Matchers
def not_to(matcher=nil, message=nil, &block)
prevent_operator_matchers(:not_to, matcher)
RSpec::Expectations::NegativeExpectationHandler.handle_matcher(@target, matcher, message, &block)
end
alias to_not not_to
def self.enable_deprecated_should
return if deprecated_should_enabled?
def should(*args)
RSpec.deprecate "`expect { }.should`", :replacement => "`expect { }.to`"
@target.should(*args)
end
def should_not(*args)
RSpec.deprecate "`expect { }.should_not`", :replacement => "`expect { }.not_to`"
@target.should_not(*args)
end
self.deprecated_should_enabled = true
end
def self.disable_deprecated_should
return unless deprecated_should_enabled?
remove_method :should
remove_method :should_not
self.deprecated_should_enabled = false
end
private
def prevent_operator_matchers(verb, matcher)
return if matcher
raise ArgumentError, "The expect syntax does not support operator matchers, " +
"so you must pass a matcher to `##{verb}`."
end
end
end
end
| apache-2.0 |
sapcc/fog | lib/fog/bluebox/requests/dns/get_zones.rb | 1352 | module Fog
module DNS
class Bluebox
class Real
require 'fog/bluebox/parsers/dns/get_zones'
# Get list of all DNS zones hosted on Bluebox (for this account)
#
# ==== Returns
# * response<~Excon::Response>:
# * 'records'<~Array>
# * 'record'
# * 'name'<~String> - name of the zone
# * 'serial'<~Integer> - Serial # for the zone
# * 'ttl'<~Integer> - TTL for the zone record in seconds
# * 'retry'<~Integer> - Retry interval for the zone record in seconds
# * 'expires'<~Integer> - Expiration interval for the zone record in seconds
# * 'record-count'<~Integer> - # of records in this zone
# * 'id'<~String> - Id for the zone record
# * 'refresh'<~Integer> - default refresh interval for this zone, in seconds
# * 'minimum'<~Integer> - minimum value for intervals for this zone, in seconds
def get_zones
request(
:expects => 200,
:method => 'GET',
:parser => Fog::Parsers::DNS::Bluebox::GetZones.new,
:path => '/api/domains.xml'
)
end
end
class Mock
def get_zones
Fog::Mock.not_implemented
end
end
end
end
end
| mit |
mohitsethi/fog | lib/fog/bluebox/requests/dns/create_zone.rb | 1972 | module Fog
module DNS
class Bluebox
class Real
require 'fog/bluebox/parsers/dns/create_zone'
# Create a new DNS zone
# ==== Parameters
# * 'name'<~String> - The name of the zone
# * 'ttl'<~Integer> - TimeToLive (ttl) for the domain, in seconds
# * 'retry'<~Integer> - Retry interval for the domain, in seconds
# * 'refresh'<~Integer> - Refresh interval for the zone
# * 'minimum'<~Integer> - Minimum refresh interval for the zone
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'name'<~String> - The name of the zone
# * 'serial'<~Integer> - Serial number of the zone
# * 'ttl'<~Integer> - TimeToLive (ttl) for the domain, in seconds
# * 'retry'<~Integer> - Retry interval for the domain, in seconds
# * 'record-count'<~Integer> - Number of records in the zone
# * 'id'<~String> - Id for the zone
# * 'refresh'<~Integer> - Refresh interval for the zone
# * 'minimum'<~Integer> - Minimum refresh interval for the zone
def create_zone(options)
body = %Q{<?xml version="1.0" encoding="UTF-8"?><domain><name>#{options[:name]}</name><ttl>#{options[:ttl]}</ttl>}
body += %Q{<retry>#{options[:retry]}</retry>} if options[:retry]
body += %Q{<refresh>#{options[:retry]}</refresh>} if options[:refresh]
body += %Q{<minimum>#{options[:minimum]}</minimum>} if options[:minimum]
body += %Q{</domain>}
request(
:body => body,
:expects => 202,
:method => 'POST',
:parser => Fog::Parsers::DNS::Bluebox::CreateZone.new,
:path => "/api/domains.xml"
)
end
end
class Mock
def create_zone(options)
Fog::Mock.not_implemented
end
end
end
end
end
| mit |
dbrumley/recfi | llvm-3.3/tools/clang/test/SemaCXX/no-exceptions.cpp | 638 | // RUN: %clang_cc1 -fsyntax-only -verify %s
// Various tests for -fno-exceptions
typedef __SIZE_TYPE__ size_t;
namespace test0 {
// rdar://problem/7878149
class Foo {
public:
void* operator new(size_t x);
private:
void operator delete(void *x);
};
void test() {
// Under -fexceptions, this does access control for the associated
// 'operator delete'.
(void) new Foo();
}
}
namespace test1 {
void f() {
throw; // expected-error {{cannot use 'throw' with exceptions disabled}}
}
void g() {
try { // expected-error {{cannot use 'try' with exceptions disabled}}
f();
} catch (...) {
}
}
}
| mit |
tesidroni/mp | ExtLibs/ZedGraph/ZedGraph/ZedGraphControl.Designer.cs | 3099 | namespace ZedGraph
{
partial class ZedGraphControl
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.vScrollBar1 = new System.Windows.Forms.VScrollBar();
this.hScrollBar1 = new System.Windows.Forms.HScrollBar();
this.pointToolTip = new System.Windows.Forms.ToolTip( this.components );
this.contextMenuStrip1 = new System.Windows.Forms.ContextMenuStrip( this.components );
this.SuspendLayout();
//
// vScrollBar1
//
this.vScrollBar1.Location = new System.Drawing.Point( 128, 0 );
this.vScrollBar1.Name = "vScrollBar1";
this.vScrollBar1.Size = new System.Drawing.Size( 17, 128 );
this.vScrollBar1.TabIndex = 0;
//this.vScrollBar1.MouseCaptureChanged += new System.EventHandler( this.ScrollBarMouseCaptureChanged );
this.vScrollBar1.Scroll += new System.Windows.Forms.ScrollEventHandler( this.vScrollBar1_Scroll );
//
// hScrollBar1
//
this.hScrollBar1.Location = new System.Drawing.Point( 0, 128 );
this.hScrollBar1.Name = "hScrollBar1";
this.hScrollBar1.Size = new System.Drawing.Size( 128, 17 );
this.hScrollBar1.TabIndex = 1;
//this.hScrollBar1.MouseCaptureChanged += new System.EventHandler( this.ScrollBarMouseCaptureChanged );
this.hScrollBar1.Scroll += new System.Windows.Forms.ScrollEventHandler( this.hScrollBar1_Scroll );
//
// pointToolTip
//
this.pointToolTip.AutoPopDelay = 5000;
this.pointToolTip.InitialDelay = 100;
this.pointToolTip.ReshowDelay = 0;
//
// contextMenuStrip1
//
this.contextMenuStrip1.Name = "contextMenuStrip1";
this.contextMenuStrip1.Size = new System.Drawing.Size( 61, 4 );
this.contextMenuStrip1.Opening += new System.ComponentModel.CancelEventHandler( this.contextMenuStrip1_Opening );
//
// ZedGraphControl
//
this.AutoScaleDimensions = new System.Drawing.SizeF( 6F, 13F );
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ContextMenuStrip = this.contextMenuStrip1;
this.Controls.Add( this.hScrollBar1 );
this.Controls.Add( this.vScrollBar1 );
this.Name = "ZedGraphControl";
this.Resize += new System.EventHandler( this.ZedGraphControl_ReSize );
this.KeyUp += new System.Windows.Forms.KeyEventHandler( this.ZedGraphControl_KeyUp );
this.KeyDown += new System.Windows.Forms.KeyEventHandler( this.ZedGraphControl_KeyDown );
this.MouseWheel += new System.Windows.Forms.MouseEventHandler( this.ZedGraphControl_MouseWheel );
this.ResumeLayout( false );
}
#endregion
private System.Windows.Forms.VScrollBar vScrollBar1;
private System.Windows.Forms.HScrollBar hScrollBar1;
private System.Windows.Forms.ToolTip pointToolTip;
private System.Windows.Forms.ContextMenuStrip contextMenuStrip1;
}
}
| gpl-3.0 |
veger/ansible | test/units/modules/network/ironware/ironware_module.py | 2550 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible.module_utils import basic
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class TestIronwareModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
| gpl-3.0 |
evgrud/TypeScript | tests/baselines/reference/project/maprootUrlsourcerootUrlModuleSimpleSpecifyOutputDirectory/amd/outdir/simple/test.js | 446 | define(["require", "exports", "m1"], function (require, exports, m1) {
"use strict";
exports.a1 = 10;
var c1 = (function () {
function c1() {
}
return c1;
}());
exports.c1 = c1;
exports.instance1 = new c1();
function f1() {
return exports.instance1;
}
exports.f1 = f1;
exports.a2 = m1.m1_c1;
});
//# sourceMappingURL=http://www.typescriptlang.org/test.js.map | apache-2.0 |
a-b/homebrew | Library/Formula/xhyve.rb | 689 | class Xhyve < Formula
desc "xhyve, lightweight OS X virtualization solution based on FreeBSD's bhyve"
homepage "https://github.com/mist64/xhyve"
url "https://github.com/mist64/xhyve/archive/v0.2.0.tar.gz"
sha256 "32c390529a73c8eb33dbc1aede7baab5100c314f726cac14627d2204ad9d3b3c"
head "https://github.com/mist64/xhyve.git"
bottle do
cellar :any
sha256 "4b7fe0a81da6d1a6777a42c41d3465d7777047a1ec9581fcfdef949e13d68010" => :yosemite
end
depends_on :macos => :yosemite
def install
args = []
args << "GIT_VERSION=#{version}" if build.stable?
system "make", *args
bin.install "build/xhyve"
end
test do
system "#{bin}/xhyve", "-v"
end
end
| bsd-2-clause |
cashmanbiz/prop2 | wp-content/plugins/nextgen-gallery/products/photocrati_nextgen/modules/ngglegacy/admin/upgrade.php | 8994 | <?php
/**
* ngg_convert_tags() - Import the tags into the wp tables (only required for pre V1.00 versions)
*
* @return Success Message
*/
function ngg_convert_tags() {
global $wpdb, $wp_taxonomies;
// get the obsolete tables
$wpdb->nggtags = $wpdb->prefix . 'ngg_tags';
$wpdb->nggpic2tags = $wpdb->prefix . 'ngg_pic2tags';
$picturelist = $wpdb->get_col("SELECT pid FROM $wpdb->nggpictures");
if ( is_array($picturelist) ) {
foreach($picturelist as $id) {
$tags = array();
$tagarray = $wpdb->get_results("SELECT t.*, tt.* FROM $wpdb->nggpic2tags AS t INNER JOIN $wpdb->nggtags AS tt ON t.tagid = tt.id WHERE t.picid = '$id' ORDER BY tt.slug ASC ");
if (!empty($tagarray)){
foreach($tagarray as $element) {
$tags[$element->id] = $element->name;
}
wp_set_object_terms($id, $tags, 'ngg_tag');
}
}
}
}
/**
* ngg_convert_filestructure() - converter for old thumnail folder structure
*
* @return void
*/
function ngg_convert_filestructure() {
global $wpdb;
$gallerylist = $wpdb->get_results("SELECT * FROM $wpdb->nggallery ORDER BY gid ASC", OBJECT_K);
if ( is_array($gallerylist) ) {
$errors = array();
foreach($gallerylist as $gallery) {
$gallerypath = ABSPATH.$gallery->path;
// old mygallery check, convert the wrong folder/ file name now
if (@is_dir($gallerypath . '/tumbs')) {
if ( !@rename($gallerypath . '/tumbs' , $gallerypath .'/thumbs') )
$errors[] = $gallery->path . '/thumbs';
// read list of images
$imageslist = nggAdmin::scandir($gallerypath . '/thumbs');
if ( !empty($imageslist)) {
foreach($imageslist as $image) {
$purename = substr($image, 4);
if ( !@rename($gallerypath . '/thumbs/' . $image, $gallerypath . '/thumbs/thumbs_' . $purename ))
$errors[] = $gallery->path . '/thumbs/thumbs_' . $purename ;
}
}
}
}
if (!empty($errors)) {
echo "<div class='error_inline'><p>". __('Some folders/files could not renamed, please recheck the permission and rescan the folder in the manage gallery section.', 'nggallery') ."</p>";
foreach($errors as $value) {
echo __('Rename failed', 'nggallery') . ' : <strong>' . $value . "</strong><br />\n";
}
echo '</div>';
}
}
}
/**
* Move the imagerotator outside the plugin folder, as we remove it from the REPO with the next update
*
* @return string $path URL to the imagerotator
*/
function ngg_move_imagerotator() {
$upload = wp_upload_dir();
// look first at the old place and move it
if ( file_exists( NGGALLERY_ABSPATH . 'imagerotator.swf' ) )
@rename(NGGALLERY_ABSPATH . 'imagerotator.swf', $upload['basedir'] . '/imagerotator.swf');
// If it's successful then we return the new path
if ( file_exists( $upload['basedir'] . '/imagerotator.swf' ) )
return $upload['baseurl'] . '/imagerotator.swf';
//In some worse case it's still at the old place
if ( file_exists( NGGALLERY_ABSPATH . 'imagerotator.swf' ) )
return NGGALLERY_URLPATH . 'imagerotator.swf';
// if something failed, we must return a empty string
return '';
}
/**
* ngg_import_date_time() - Read the timestamp from exif and insert it into the database
*
* @return void
*/
function ngg_import_date_time() {
global $wpdb;
$imagelist = $wpdb->get_results("SELECT t.*, tt.* FROM $wpdb->nggallery AS t INNER JOIN $wpdb->nggpictures AS tt ON t.gid = tt.galleryid ORDER BY tt.pid ASC");
if ( is_array($imagelist) ) {
foreach ($imagelist as $image) {
$picture = new nggImage($image);
$meta = new nggMeta($picture->pid, true);
$date = $meta->get_date_time();
$wpdb->query("UPDATE $wpdb->nggpictures SET imagedate = '$date' WHERE pid = '$picture->pid'");
}
}
}
/**
* Adding a new column if needed
* Example : ngg_maybe_add_column( $wpdb->nggpictures, 'imagedate', "DATETIME NOT NULL DEFAULT '0000-00-00 00:00:00' AFTER alttext");
*
* @param string $table_name Database table name.
* @param string $column_name Database column name to create.
* @param string $create_ddl SQL statement to create column
* @return bool True, when done with execution.
*/
function ngg_maybe_add_column($table_name, $column_name, $create_ddl) {
global $wpdb;
foreach ($wpdb->get_col("SHOW COLUMNS FROM $table_name") as $column ) {
if ($column == $column_name)
return true;
}
//didn't find it try to create it.
$wpdb->query("ALTER TABLE $table_name ADD $column_name " . $create_ddl);
// we cannot directly tell that whether this succeeded!
foreach ($wpdb->get_col("SHOW COLUMNS FROM $table_name") as $column ) {
if ($column == $column_name)
return true;
}
echo("Could not add column $column_name in table $table_name<br />\n");
return false;
}
/**
* nggallery_upgrade_page() - This page showsup , when the database version doesn't fir to the script NGG_DBVERSION constant.
*
* @return Upgrade Message
*/
function nggallery_upgrade_page() {
$filepath = admin_url() . 'admin.php?page=' . $_GET['page'];
if ( isset($_GET['upgrade']) && $_GET['upgrade'] == 'now') {
nggallery_start_upgrade($filepath);
return;
}
?>
<div class="wrap">
<h2><?php _e('Upgrade NextGEN Gallery', 'nggallery') ;?></h2>
<p><?php _e('The script detect that you upgrade from a older version.', 'nggallery') ;?>
<?php _e('Your database tables for NextGEN Gallery is out-of-date, and must be upgraded before you can continue.', 'nggallery'); ?>
<?php _e('If you would like to downgrade later, please make first a complete backup of your database and the images.', 'nggallery') ;?></p>
<p><?php _e('The upgrade process may take a while, so please be patient.', 'nggallery'); ?></p>
<h3><a href="<?php echo $filepath;?>&upgrade=now"><?php _e('Start upgrade now', 'nggallery'); ?>...</a></h3>
</div>
<?php
}
/**
* nggallery_start_upgrade() - Proceed the upgrade routine
*
* @param mixed $filepath
* @return void
*/
function nggallery_start_upgrade($filepath) {
?>
<div class="wrap">
<h2><?php _e('Upgrade NextGEN Gallery', 'nggallery') ;?></h2>
<p><?php ngg_upgrade();?></p>
<p class="finished"><?php _e('Upgrade finished...', 'nggallery') ;?></p>
<h3><a class="finished" href="<?php echo $filepath;?>"><?php _e('Continue', 'nggallery'); ?>...</a></h3>
</div>
<?php
}
/**
* Rebuild slugs for albums, galleries and images via AJAX request
*
* @sine 1.7.0
* @access internal
*/
class ngg_rebuild_unique_slugs {
function start_rebuild() {
global $wpdb;
$total = array();
// get the total number of images
$total['images'] = intval( $wpdb->get_var("SELECT COUNT(*) FROM $wpdb->nggpictures") );
$total['gallery'] = intval( $wpdb->get_var("SELECT COUNT(*) FROM $wpdb->nggallery") );
$total['album'] = intval( $wpdb->get_var("SELECT COUNT(*) FROM $wpdb->nggalbum") );
$messages = array(
'images' => __( 'Rebuild image structure : %s / %s images', 'nggallery' ),
'gallery' => __( 'Rebuild gallery structure : %s / %s galleries', 'nggallery' ),
'album' => __( 'Rebuild album structure : %s / %s albums', 'nggallery' ),
);
?>
<?php
foreach ( array_keys( $messages ) as $key ) {
$message = sprintf( $messages[ $key ] ,
"<span class='ngg-count-current'>0</span>",
"<span class='ngg-count-total'>" . $total[ $key ] . "</span>"
);
echo "<div class='$key updated'><p class='ngg'>$message</p></div>";
}
$ajax_url = add_query_arg( 'action', 'ngg_rebuild_unique_slugs', admin_url( 'admin-ajax.php' ) );
?>
<script type="text/javascript">
jQuery(document).ready(function($) {
var ajax_url = '<?php echo $ajax_url; ?>',
_action = 'images',
images = <?php echo $total['images']; ?>,
gallery = <?php echo $total['gallery']; ?>,
album = <?php echo $total['album']; ?>,
total = 0,
offset = 0,
count = 50;
var $display = $('.ngg-count-current');
$('.finished, .gallery, .album').hide();
total = images;
function call_again() {
if ( offset > total ) {
offset = 0;
// 1st run finished
if (_action == 'images') {
_action = 'gallery';
total = gallery;
$('.images, .gallery').toggle();
$display.html(offset);
call_again();
return;
}
// 2nd run finished
if (_action == 'gallery') {
_action = 'album';
total = album;
$('.gallery, .album').toggle();
$display.html(offset);
call_again();
return;
}
// 3rd run finished, exit now
if (_action == 'album') {
$('.ngg')
.html('<?php _e( 'Done.', 'nggallery' ); ?>')
.parent('div').hide();
$('.finished').show();
return;
}
}
$.post(ajax_url, {'_action': _action, 'offset': offset}, function(response) {
$display.html(offset);
offset += count;
call_again();
});
}
call_again();
});
</script>
<?php
}
} | gpl-2.0 |
circularvortex/testme | sites/all/modules/feedapi/parser_simplepie/test/feed_copyright/SPtests/rss/0.90/atom/1.0/rights.php | 474 | <?php
class SimplePie_Feed_Copyright_Test_RSS_090_Atom_10_Rights extends SimplePie_Feed_Copyright_Test
{
function data()
{
$this->data =
'<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://my.netscape.com/rdf/simple/0.9/" xmlns:a="http://www.w3.org/2005/Atom">
<channel>
<a:rights>Example Copyright Information</a:rights>
</channel>
</rdf:RDF>';
}
function expected()
{
$this->expected = 'Example Copyright Information';
}
}
?> | gpl-2.0 |
MigSchweps/EraNueva | wp-content/plugins/the-events-calendar/resources/deprecated/chosen-load.min.js | 64 | jQuery(document).ready(function(){jQuery(".chosen").chosen()});
| gpl-2.0 |
mzdb/pwiz-mzdb | libraries/boost_1_56_0/boost/asio/ip/basic_resolver_query.hpp | 9356 | //
// ip/basic_resolver_query.hpp
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2014 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef BOOST_ASIO_IP_BASIC_RESOLVER_QUERY_HPP
#define BOOST_ASIO_IP_BASIC_RESOLVER_QUERY_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
#include <boost/asio/detail/config.hpp>
#include <string>
#include <boost/asio/detail/socket_ops.hpp>
#include <boost/asio/ip/resolver_query_base.hpp>
#include <boost/asio/detail/push_options.hpp>
namespace boost {
namespace asio {
namespace ip {
/// An query to be passed to a resolver.
/**
* The boost::asio::ip::basic_resolver_query class template describes a query
* that can be passed to a resolver.
*
* @par Thread Safety
* @e Distinct @e objects: Safe.@n
* @e Shared @e objects: Unsafe.
*/
template <typename InternetProtocol>
class basic_resolver_query
: public resolver_query_base
{
public:
/// The protocol type associated with the endpoint query.
typedef InternetProtocol protocol_type;
/// Construct with specified service name for any protocol.
/**
* This constructor is typically used to perform name resolution for local
* service binding.
*
* @param service A string identifying the requested service. This may be a
* descriptive name or a numeric string corresponding to a port number.
*
* @param resolve_flags A set of flags that determine how name resolution
* should be performed. The default flags are suitable for local service
* binding.
*
* @note On POSIX systems, service names are typically defined in the file
* <tt>/etc/services</tt>. On Windows, service names may be found in the file
* <tt>c:\\windows\\system32\\drivers\\etc\\services</tt>. Operating systems
* may use additional locations when resolving service names.
*/
basic_resolver_query(const std::string& service,
resolver_query_base::flags resolve_flags = passive | address_configured)
: hints_(),
host_name_(),
service_name_(service)
{
typename InternetProtocol::endpoint endpoint;
hints_.ai_flags = static_cast<int>(resolve_flags);
hints_.ai_family = PF_UNSPEC;
hints_.ai_socktype = endpoint.protocol().type();
hints_.ai_protocol = endpoint.protocol().protocol();
hints_.ai_addrlen = 0;
hints_.ai_canonname = 0;
hints_.ai_addr = 0;
hints_.ai_next = 0;
}
/// Construct with specified service name for a given protocol.
/**
* This constructor is typically used to perform name resolution for local
* service binding with a specific protocol version.
*
* @param protocol A protocol object, normally representing either the IPv4 or
* IPv6 version of an internet protocol.
*
* @param service A string identifying the requested service. This may be a
* descriptive name or a numeric string corresponding to a port number.
*
* @param resolve_flags A set of flags that determine how name resolution
* should be performed. The default flags are suitable for local service
* binding.
*
* @note On POSIX systems, service names are typically defined in the file
* <tt>/etc/services</tt>. On Windows, service names may be found in the file
* <tt>c:\\windows\\system32\\drivers\\etc\\services</tt>. Operating systems
* may use additional locations when resolving service names.
*/
basic_resolver_query(const protocol_type& protocol,
const std::string& service,
resolver_query_base::flags resolve_flags = passive | address_configured)
: hints_(),
host_name_(),
service_name_(service)
{
hints_.ai_flags = static_cast<int>(resolve_flags);
hints_.ai_family = protocol.family();
hints_.ai_socktype = protocol.type();
hints_.ai_protocol = protocol.protocol();
hints_.ai_addrlen = 0;
hints_.ai_canonname = 0;
hints_.ai_addr = 0;
hints_.ai_next = 0;
}
/// Construct with specified host name and service name for any protocol.
/**
* This constructor is typically used to perform name resolution for
* communication with remote hosts.
*
* @param host A string identifying a location. May be a descriptive name or
* a numeric address string. If an empty string and the passive flag has been
* specified, the resolved endpoints are suitable for local service binding.
* If an empty string and passive is not specified, the resolved endpoints
* will use the loopback address.
*
* @param service A string identifying the requested service. This may be a
* descriptive name or a numeric string corresponding to a port number. May
* be an empty string, in which case all resolved endpoints will have a port
* number of 0.
*
* @param resolve_flags A set of flags that determine how name resolution
* should be performed. The default flags are suitable for communication with
* remote hosts.
*
* @note On POSIX systems, host names may be locally defined in the file
* <tt>/etc/hosts</tt>. On Windows, host names may be defined in the file
* <tt>c:\\windows\\system32\\drivers\\etc\\hosts</tt>. Remote host name
* resolution is performed using DNS. Operating systems may use additional
* locations when resolving host names (such as NETBIOS names on Windows).
*
* On POSIX systems, service names are typically defined in the file
* <tt>/etc/services</tt>. On Windows, service names may be found in the file
* <tt>c:\\windows\\system32\\drivers\\etc\\services</tt>. Operating systems
* may use additional locations when resolving service names.
*/
basic_resolver_query(const std::string& host, const std::string& service,
resolver_query_base::flags resolve_flags = address_configured)
: hints_(),
host_name_(host),
service_name_(service)
{
typename InternetProtocol::endpoint endpoint;
hints_.ai_flags = static_cast<int>(resolve_flags);
hints_.ai_family = BOOST_ASIO_OS_DEF(AF_UNSPEC);
hints_.ai_socktype = endpoint.protocol().type();
hints_.ai_protocol = endpoint.protocol().protocol();
hints_.ai_addrlen = 0;
hints_.ai_canonname = 0;
hints_.ai_addr = 0;
hints_.ai_next = 0;
}
/// Construct with specified host name and service name for a given protocol.
/**
* This constructor is typically used to perform name resolution for
* communication with remote hosts.
*
* @param protocol A protocol object, normally representing either the IPv4 or
* IPv6 version of an internet protocol.
*
* @param host A string identifying a location. May be a descriptive name or
* a numeric address string. If an empty string and the passive flag has been
* specified, the resolved endpoints are suitable for local service binding.
* If an empty string and passive is not specified, the resolved endpoints
* will use the loopback address.
*
* @param service A string identifying the requested service. This may be a
* descriptive name or a numeric string corresponding to a port number. May
* be an empty string, in which case all resolved endpoints will have a port
* number of 0.
*
* @param resolve_flags A set of flags that determine how name resolution
* should be performed. The default flags are suitable for communication with
* remote hosts.
*
* @note On POSIX systems, host names may be locally defined in the file
* <tt>/etc/hosts</tt>. On Windows, host names may be defined in the file
* <tt>c:\\windows\\system32\\drivers\\etc\\hosts</tt>. Remote host name
* resolution is performed using DNS. Operating systems may use additional
* locations when resolving host names (such as NETBIOS names on Windows).
*
* On POSIX systems, service names are typically defined in the file
* <tt>/etc/services</tt>. On Windows, service names may be found in the file
* <tt>c:\\windows\\system32\\drivers\\etc\\services</tt>. Operating systems
* may use additional locations when resolving service names.
*/
basic_resolver_query(const protocol_type& protocol,
const std::string& host, const std::string& service,
resolver_query_base::flags resolve_flags = address_configured)
: hints_(),
host_name_(host),
service_name_(service)
{
hints_.ai_flags = static_cast<int>(resolve_flags);
hints_.ai_family = protocol.family();
hints_.ai_socktype = protocol.type();
hints_.ai_protocol = protocol.protocol();
hints_.ai_addrlen = 0;
hints_.ai_canonname = 0;
hints_.ai_addr = 0;
hints_.ai_next = 0;
}
/// Get the hints associated with the query.
const boost::asio::detail::addrinfo_type& hints() const
{
return hints_;
}
/// Get the host name associated with the query.
std::string host_name() const
{
return host_name_;
}
/// Get the service name associated with the query.
std::string service_name() const
{
return service_name_;
}
private:
boost::asio::detail::addrinfo_type hints_;
std::string host_name_;
std::string service_name_;
};
} // namespace ip
} // namespace asio
} // namespace boost
#include <boost/asio/detail/pop_options.hpp>
#endif // BOOST_ASIO_IP_BASIC_RESOLVER_QUERY_HPP
| apache-2.0 |
pbl/skanken | vendor/bundle/ruby/2.0.0/gems/sass-3.4.13/vendor/listen/spec/listen/adapters/linux_spec.rb | 1248 | require 'spec_helper'
describe Listen::Adapters::Linux do
if linux?
if Listen::Adapters::Linux.usable?
it "is usable on Linux" do
described_class.should be_usable
end
it_should_behave_like 'a filesystem adapter'
it_should_behave_like 'an adapter that call properly listener#on_change'
describe '#initialize' do
context 'when the inotify limit for watched files is not enough' do
before { INotify::Notifier.any_instance.should_receive(:watch).and_raise(Errno::ENOSPC) }
it 'fails gracefully' do
described_class.any_instance.should_receive(:abort).with(described_class::INOTIFY_LIMIT_MESSAGE)
described_class.new(File.dirname(__FILE__))
end
end
end
else
it "isn't usable on Linux with #{RbConfig::CONFIG['RUBY_INSTALL_NAME']}" do
described_class.should_not be_usable
end
end
end
if bsd?
it "isn't usable on BSD" do
described_class.should_not be_usable
end
end
if mac?
it "isn't usable on Mac OS X" do
described_class.should_not be_usable
end
end
if windows?
it "isn't usable on Windows" do
described_class.should_not be_usable
end
end
end
| mit |
shimingsg/corefx | src/System.CodeDom/src/System/CodeDom/CodeTypeParameter.cs | 1051 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.CodeDom
{
public class CodeTypeParameter : CodeObject
{
private string _name;
private CodeAttributeDeclarationCollection _customAttributes;
private CodeTypeReferenceCollection _constraints;
public CodeTypeParameter() { }
public CodeTypeParameter(string name)
{
_name = name;
}
public string Name
{
get => _name ?? string.Empty;
set => _name = value;
}
public CodeTypeReferenceCollection Constraints => _constraints ?? (_constraints = new CodeTypeReferenceCollection());
public CodeAttributeDeclarationCollection CustomAttributes => _customAttributes ?? (_customAttributes = new CodeAttributeDeclarationCollection());
public bool HasConstructorConstraint { get; set; }
}
}
| mit |
ed-rom/moodle | grade/import/direct/version.php | 1286 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Version details
*
* @package gradeimport_direct
* @copyright 2014 Adrian Greeve <adrian@moodle.com>
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
defined('MOODLE_INTERNAL') || die();
$plugin->version = 2015051100; // The current plugin version (Date: YYYYMMDDXX)
$plugin->requires = 2015050500; // Requires this Moodle version
$plugin->component = 'gradeimport_direct'; // Full name of the plugin (used for diagnostics).
$plugin->dependencies = array('gradeimport_csv' => 2015050500); // Grade import csv is required for this plugin. | gpl-3.0 |
Sascha-Brandhoff/count_ce | assets/waypoints/lib/shortcuts/inview.js | 2804 | /*!
Waypoints Inview Shortcut - 4.0.0
Copyright © 2011-2015 Caleb Troughton
Licensed under the MIT license.
https://github.com/imakewebthings/waypoints/blob/master/licenses.txt
*/
(function() {
'use strict'
function noop() {}
var Waypoint = window.Waypoint
/* http://imakewebthings.com/waypoints/shortcuts/inview */
function Inview(options) {
this.options = Waypoint.Adapter.extend({}, Inview.defaults, options)
this.axis = this.options.horizontal ? 'horizontal' : 'vertical'
this.waypoints = []
this.element = this.options.element
this.createWaypoints()
}
/* Private */
Inview.prototype.createWaypoints = function() {
var configs = {
vertical: [{
down: 'enter',
up: 'exited',
offset: '100%'
}, {
down: 'entered',
up: 'exit',
offset: 'bottom-in-view'
}, {
down: 'exit',
up: 'entered',
offset: 0
}, {
down: 'exited',
up: 'enter',
offset: function() {
return -this.adapter.outerHeight()
}
}],
horizontal: [{
right: 'enter',
left: 'exited',
offset: '100%'
}, {
right: 'entered',
left: 'exit',
offset: 'right-in-view'
}, {
right: 'exit',
left: 'entered',
offset: 0
}, {
right: 'exited',
left: 'enter',
offset: function() {
return -this.adapter.outerWidth()
}
}]
}
for (var i = 0, end = configs[this.axis].length; i < end; i++) {
var config = configs[this.axis][i]
this.createWaypoint(config)
}
}
/* Private */
Inview.prototype.createWaypoint = function(config) {
var self = this
this.waypoints.push(new Waypoint({
context: this.options.context,
element: this.options.element,
enabled: this.options.enabled,
handler: (function(config) {
return function(direction) {
self.options[config[direction]].call(self, direction)
}
}(config)),
offset: config.offset,
horizontal: this.options.horizontal
}))
}
/* Public */
Inview.prototype.destroy = function() {
for (var i = 0, end = this.waypoints.length; i < end; i++) {
this.waypoints[i].destroy()
}
this.waypoints = []
}
Inview.prototype.disable = function() {
for (var i = 0, end = this.waypoints.length; i < end; i++) {
this.waypoints[i].disable()
}
}
Inview.prototype.enable = function() {
for (var i = 0, end = this.waypoints.length; i < end; i++) {
this.waypoints[i].enable()
}
}
Inview.defaults = {
context: window,
enabled: true,
enter: noop,
entered: noop,
exit: noop,
exited: noop
}
Waypoint.Inview = Inview
}())
; | lgpl-3.0 |
DeepakRajendranMsft/azure-powershell | src/ServiceManagement/Services/Commands.Utilities/Scheduler/Model/PSJobCollection.cs | 1329 | // ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
namespace Microsoft.WindowsAzure.Commands.Utilities.Scheduler.Model
{
public class PSJobCollection
{
public string CloudServiceName { get; internal set; }
public string Location { get; internal set; }
public string JobCollectionName { get; internal set; }
public string Plan { get; internal set; }
public string State { get; internal set; }
public string MaxJobCount { get; internal set; }
public string MaxRecurrence { get; internal set; }
public string Uri { get; internal set; }
}
}
| apache-2.0 |
austinsc/Orchard | src/Orchard/Mvc/ViewEngines/IViewEngineProvider.cs | 768 | using System.Collections.Generic;
using System.Web.Mvc;
namespace Orchard.Mvc.ViewEngines {
public class CreateThemeViewEngineParams {
public string VirtualPath { get; set; }
}
public class CreateModulesViewEngineParams {
public IEnumerable<string> VirtualPaths { get; set; }
}
public interface IViewEngineProvider : ISingletonDependency {
IViewEngine CreateThemeViewEngine(CreateThemeViewEngineParams parameters);
IViewEngine CreateModulesViewEngine(CreateModulesViewEngineParams parameters);
/// <summary>
/// Produce a view engine configured to resolve only fully qualified {viewName} parameters
/// </summary>
IViewEngine CreateBareViewEngine();
}
}
| bsd-3-clause |
dulems/hue | desktop/core/ext-py/python-openid-2.2.5/openid/test/test_yadis_discover.py | 5869 | #!/usr/bin/env python
"""Tests for yadis.discover.
@todo: Now that yadis.discover uses urljr.fetchers, we should be able to do
tests with a mock fetcher instead of spawning threads with BaseHTTPServer.
"""
import unittest
import urlparse
import re
import types
from openid.yadis.discover import discover, DiscoveryFailure
from openid import fetchers
import discoverdata
status_header_re = re.compile(r'Status: (\d+) .*?$', re.MULTILINE)
four04_pat = """\
Content-Type: text/plain
No such file %s
"""
class QuitServer(Exception): pass
def mkResponse(data):
status_mo = status_header_re.match(data)
headers_str, body = data.split('\n\n', 1)
headers = {}
for line in headers_str.split('\n'):
k, v = line.split(':', 1)
k = k.strip().lower()
v = v.strip()
headers[k] = v
status = int(status_mo.group(1))
return fetchers.HTTPResponse(status=status,
headers=headers,
body=body)
class TestFetcher(object):
def __init__(self, base_url):
self.base_url = base_url
def fetch(self, url, headers, body):
current_url = url
while True:
parsed = urlparse.urlparse(current_url)
path = parsed[2][1:]
try:
data = discoverdata.generateSample(path, self.base_url)
except KeyError:
return fetchers.HTTPResponse(status=404,
final_url=current_url,
headers={},
body='')
response = mkResponse(data)
if response.status in [301, 302, 303, 307]:
current_url = response.headers['location']
else:
response.final_url = current_url
return response
class TestSecondGet(unittest.TestCase):
class MockFetcher(object):
def __init__(self):
self.count = 0
def fetch(self, uri, headers=None, body=None):
self.count += 1
if self.count == 1:
headers = {
'X-XRDS-Location'.lower(): 'http://unittest/404',
}
return fetchers.HTTPResponse(uri, 200, headers, '')
else:
return fetchers.HTTPResponse(uri, 404)
def setUp(self):
self.oldfetcher = fetchers.getDefaultFetcher()
fetchers.setDefaultFetcher(self.MockFetcher())
def tearDown(self):
fetchers.setDefaultFetcher(self.oldfetcher)
def test_404(self):
uri = "http://something.unittest/"
self.failUnlessRaises(DiscoveryFailure, discover, uri)
class _TestCase(unittest.TestCase):
base_url = 'http://invalid.unittest/'
def __init__(self, input_name, id_name, result_name, success):
self.input_name = input_name
self.id_name = id_name
self.result_name = result_name
self.success = success
# Still not quite sure how to best construct these custom tests.
# Between python2.3 and python2.4, a patch attached to pyunit.sf.net
# bug #469444 got applied which breaks loadTestsFromModule on this
# class if it has test_ or runTest methods. So, kludge to change
# the method name.
unittest.TestCase.__init__(self, methodName='runCustomTest')
def setUp(self):
fetchers.setDefaultFetcher(TestFetcher(self.base_url),
wrap_exceptions=False)
self.input_url, self.expected = discoverdata.generateResult(
self.base_url,
self.input_name,
self.id_name,
self.result_name,
self.success)
def tearDown(self):
fetchers.setDefaultFetcher(None)
def runCustomTest(self):
if self.expected is DiscoveryFailure:
self.failUnlessRaises(DiscoveryFailure,
discover, self.input_url)
else:
result = discover(self.input_url)
self.failUnlessEqual(self.input_url, result.request_uri)
msg = 'Identity URL mismatch: actual = %r, expected = %r' % (
result.normalized_uri, self.expected.normalized_uri)
self.failUnlessEqual(
self.expected.normalized_uri, result.normalized_uri, msg)
msg = 'Content mismatch: actual = %r, expected = %r' % (
result.response_text, self.expected.response_text)
self.failUnlessEqual(
self.expected.response_text, result.response_text, msg)
expected_keys = dir(self.expected)
expected_keys.sort()
actual_keys = dir(result)
actual_keys.sort()
self.failUnlessEqual(actual_keys, expected_keys)
for k in dir(self.expected):
if k.startswith('__') and k.endswith('__'):
continue
exp_v = getattr(self.expected, k)
if isinstance(exp_v, types.MethodType):
continue
act_v = getattr(result, k)
assert act_v == exp_v, (k, exp_v, act_v)
def shortDescription(self):
try:
n = self.input_url
except AttributeError:
# run before setUp, or if setUp did not complete successfully.
n = self.input_name
return "%s (%s)" % (
n,
self.__class__.__module__)
def pyUnitTests():
s = unittest.TestSuite()
for success, input_name, id_name, result_name in discoverdata.testlist:
test = _TestCase(input_name, id_name, result_name, success)
s.addTest(test)
return s
def test():
runner = unittest.TextTestRunner()
return runner.run(loadTests())
if __name__ == '__main__':
test()
| apache-2.0 |
haveatry/kubernetes | vendor/google.golang.org/api/cloudmonitoring/v2beta2/cloudmonitoring-gen.go | 86711 | // Package cloudmonitoring provides access to the Cloud Monitoring API.
//
// See https://cloud.google.com/monitoring/v2beta2/
//
// Usage example:
//
// import "google.golang.org/api/cloudmonitoring/v2beta2"
// ...
// cloudmonitoringService, err := cloudmonitoring.New(oauthHttpClient)
package cloudmonitoring
import (
"bytes"
"encoding/json"
"errors"
"fmt"
context "golang.org/x/net/context"
ctxhttp "golang.org/x/net/context/ctxhttp"
gensupport "google.golang.org/api/gensupport"
googleapi "google.golang.org/api/googleapi"
"io"
"net/http"
"net/url"
"strconv"
"strings"
)
// Always reference these packages, just in case the auto-generated code
// below doesn't.
var _ = bytes.NewBuffer
var _ = strconv.Itoa
var _ = fmt.Sprintf
var _ = json.NewDecoder
var _ = io.Copy
var _ = url.Parse
var _ = gensupport.MarshalJSON
var _ = googleapi.Version
var _ = errors.New
var _ = strings.Replace
var _ = context.Canceled
var _ = ctxhttp.Do
const apiId = "cloudmonitoring:v2beta2"
const apiName = "cloudmonitoring"
const apiVersion = "v2beta2"
const basePath = "https://www.googleapis.com/cloudmonitoring/v2beta2/projects/"
// OAuth2 scopes used by this API.
const (
// View and manage your data across Google Cloud Platform services
CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
// View and write monitoring data for all of your Google and third-party
// Cloud and API projects
MonitoringScope = "https://www.googleapis.com/auth/monitoring"
)
func New(client *http.Client) (*Service, error) {
if client == nil {
return nil, errors.New("client is nil")
}
s := &Service{client: client, BasePath: basePath}
s.MetricDescriptors = NewMetricDescriptorsService(s)
s.Timeseries = NewTimeseriesService(s)
s.TimeseriesDescriptors = NewTimeseriesDescriptorsService(s)
return s, nil
}
type Service struct {
client *http.Client
BasePath string // API endpoint base URL
UserAgent string // optional additional User-Agent fragment
MetricDescriptors *MetricDescriptorsService
Timeseries *TimeseriesService
TimeseriesDescriptors *TimeseriesDescriptorsService
}
func (s *Service) userAgent() string {
if s.UserAgent == "" {
return googleapi.UserAgent
}
return googleapi.UserAgent + " " + s.UserAgent
}
func NewMetricDescriptorsService(s *Service) *MetricDescriptorsService {
rs := &MetricDescriptorsService{s: s}
return rs
}
type MetricDescriptorsService struct {
s *Service
}
func NewTimeseriesService(s *Service) *TimeseriesService {
rs := &TimeseriesService{s: s}
return rs
}
type TimeseriesService struct {
s *Service
}
func NewTimeseriesDescriptorsService(s *Service) *TimeseriesDescriptorsService {
rs := &TimeseriesDescriptorsService{s: s}
return rs
}
type TimeseriesDescriptorsService struct {
s *Service
}
// DeleteMetricDescriptorResponse: The response of
// cloudmonitoring.metricDescriptors.delete.
type DeleteMetricDescriptorResponse struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#deleteMetricDescriptorResponse".
Kind string `json:"kind,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *DeleteMetricDescriptorResponse) MarshalJSON() ([]byte, error) {
type noMethod DeleteMetricDescriptorResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListMetricDescriptorsRequest: The request of
// cloudmonitoring.metricDescriptors.list.
type ListMetricDescriptorsRequest struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#listMetricDescriptorsRequest".
Kind string `json:"kind,omitempty"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListMetricDescriptorsRequest) MarshalJSON() ([]byte, error) {
type noMethod ListMetricDescriptorsRequest
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListMetricDescriptorsResponse: The response of
// cloudmonitoring.metricDescriptors.list.
type ListMetricDescriptorsResponse struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#listMetricDescriptorsResponse".
Kind string `json:"kind,omitempty"`
// Metrics: The returned metric descriptors.
Metrics []*MetricDescriptor `json:"metrics,omitempty"`
// NextPageToken: Pagination token. If present, indicates that
// additional results are available for retrieval. To access the results
// past the pagination limit, pass this value to the pageToken query
// parameter.
NextPageToken string `json:"nextPageToken,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListMetricDescriptorsResponse) MarshalJSON() ([]byte, error) {
type noMethod ListMetricDescriptorsResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListTimeseriesDescriptorsRequest: The request of
// cloudmonitoring.timeseriesDescriptors.list
type ListTimeseriesDescriptorsRequest struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#listTimeseriesDescriptorsRequest".
Kind string `json:"kind,omitempty"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListTimeseriesDescriptorsRequest) MarshalJSON() ([]byte, error) {
type noMethod ListTimeseriesDescriptorsRequest
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListTimeseriesDescriptorsResponse: The response of
// cloudmonitoring.timeseriesDescriptors.list
type ListTimeseriesDescriptorsResponse struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#listTimeseriesDescriptorsResponse".
Kind string `json:"kind,omitempty"`
// NextPageToken: Pagination token. If present, indicates that
// additional results are available for retrieval. To access the results
// past the pagination limit, set this value to the pageToken query
// parameter.
NextPageToken string `json:"nextPageToken,omitempty"`
// Oldest: The oldest timestamp of the interval of this query, as an RFC
// 3339 string.
Oldest string `json:"oldest,omitempty"`
// Timeseries: The returned time series descriptors.
Timeseries []*TimeseriesDescriptor `json:"timeseries,omitempty"`
// Youngest: The youngest timestamp of the interval of this query, as an
// RFC 3339 string.
Youngest string `json:"youngest,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListTimeseriesDescriptorsResponse) MarshalJSON() ([]byte, error) {
type noMethod ListTimeseriesDescriptorsResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListTimeseriesRequest: The request of cloudmonitoring.timeseries.list
type ListTimeseriesRequest struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#listTimeseriesRequest".
Kind string `json:"kind,omitempty"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListTimeseriesRequest) MarshalJSON() ([]byte, error) {
type noMethod ListTimeseriesRequest
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListTimeseriesResponse: The response of
// cloudmonitoring.timeseries.list
type ListTimeseriesResponse struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#listTimeseriesResponse".
Kind string `json:"kind,omitempty"`
// NextPageToken: Pagination token. If present, indicates that
// additional results are available for retrieval. To access the results
// past the pagination limit, set the pageToken query parameter to this
// value. All of the points of a time series will be returned before
// returning any point of the subsequent time series.
NextPageToken string `json:"nextPageToken,omitempty"`
// Oldest: The oldest timestamp of the interval of this query as an RFC
// 3339 string.
Oldest string `json:"oldest,omitempty"`
// Timeseries: The returned time series.
Timeseries []*Timeseries `json:"timeseries,omitempty"`
// Youngest: The youngest timestamp of the interval of this query as an
// RFC 3339 string.
Youngest string `json:"youngest,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListTimeseriesResponse) MarshalJSON() ([]byte, error) {
type noMethod ListTimeseriesResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// MetricDescriptor: A metricDescriptor defines the name, label keys,
// and data type of a particular metric.
type MetricDescriptor struct {
// Description: Description of this metric.
Description string `json:"description,omitempty"`
// Labels: Labels defined for this metric.
Labels []*MetricDescriptorLabelDescriptor `json:"labels,omitempty"`
// Name: The name of this metric.
Name string `json:"name,omitempty"`
// Project: The project ID to which the metric belongs.
Project string `json:"project,omitempty"`
// TypeDescriptor: Type description for this metric.
TypeDescriptor *MetricDescriptorTypeDescriptor `json:"typeDescriptor,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Description") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Description") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *MetricDescriptor) MarshalJSON() ([]byte, error) {
type noMethod MetricDescriptor
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// MetricDescriptorLabelDescriptor: A label in a metric is a description
// of this metric, including the key of this description (what the
// description is), and the value for this description.
type MetricDescriptorLabelDescriptor struct {
// Description: Label description.
Description string `json:"description,omitempty"`
// Key: Label key.
Key string `json:"key,omitempty"`
// ForceSendFields is a list of field names (e.g. "Description") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Description") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *MetricDescriptorLabelDescriptor) MarshalJSON() ([]byte, error) {
type noMethod MetricDescriptorLabelDescriptor
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// MetricDescriptorTypeDescriptor: A type in a metric contains
// information about how the metric is collected and what its data
// points look like.
type MetricDescriptorTypeDescriptor struct {
// MetricType: The method of collecting data for the metric. See Metric
// types.
MetricType string `json:"metricType,omitempty"`
// ValueType: The data type of of individual points in the metric's time
// series. See Metric value types.
ValueType string `json:"valueType,omitempty"`
// ForceSendFields is a list of field names (e.g. "MetricType") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "MetricType") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *MetricDescriptorTypeDescriptor) MarshalJSON() ([]byte, error) {
type noMethod MetricDescriptorTypeDescriptor
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Point: Point is a single point in a time series. It consists of a
// start time, an end time, and a value.
type Point struct {
// BoolValue: The value of this data point. Either "true" or "false".
BoolValue *bool `json:"boolValue,omitempty"`
// DistributionValue: The value of this data point as a distribution. A
// distribution value can contain a list of buckets and/or an
// underflowBucket and an overflowBucket. The values of these points can
// be used to create a histogram.
DistributionValue *PointDistribution `json:"distributionValue,omitempty"`
// DoubleValue: The value of this data point as a double-precision
// floating-point number.
DoubleValue *float64 `json:"doubleValue,omitempty"`
// End: The interval [start, end] is the time period to which the
// point's value applies. For gauge metrics, whose values are
// instantaneous measurements, this interval should be empty (start
// should equal end). For cumulative metrics (of which deltas and rates
// are special cases), the interval should be non-empty. Both start and
// end are RFC 3339 strings.
End string `json:"end,omitempty"`
// Int64Value: The value of this data point as a 64-bit integer.
Int64Value *int64 `json:"int64Value,omitempty,string"`
// Start: The interval [start, end] is the time period to which the
// point's value applies. For gauge metrics, whose values are
// instantaneous measurements, this interval should be empty (start
// should equal end). For cumulative metrics (of which deltas and rates
// are special cases), the interval should be non-empty. Both start and
// end are RFC 3339 strings.
Start string `json:"start,omitempty"`
// StringValue: The value of this data point in string format.
StringValue *string `json:"stringValue,omitempty"`
// ForceSendFields is a list of field names (e.g. "BoolValue") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BoolValue") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Point) MarshalJSON() ([]byte, error) {
type noMethod Point
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PointDistribution: Distribution data point value type. When writing
// distribution points, try to be consistent with the boundaries of your
// buckets. If you must modify the bucket boundaries, then do so by
// merging, partitioning, or appending rather than skewing them.
type PointDistribution struct {
// Buckets: The finite buckets.
Buckets []*PointDistributionBucket `json:"buckets,omitempty"`
// OverflowBucket: The overflow bucket.
OverflowBucket *PointDistributionOverflowBucket `json:"overflowBucket,omitempty"`
// UnderflowBucket: The underflow bucket.
UnderflowBucket *PointDistributionUnderflowBucket `json:"underflowBucket,omitempty"`
// ForceSendFields is a list of field names (e.g. "Buckets") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Buckets") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *PointDistribution) MarshalJSON() ([]byte, error) {
type noMethod PointDistribution
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PointDistributionBucket: The histogram's bucket. Buckets that form
// the histogram of a distribution value. If the upper bound of a
// bucket, say U1, does not equal the lower bound of the next bucket,
// say L2, this means that there is no event in [U1, L2).
type PointDistributionBucket struct {
// Count: The number of events whose values are in the interval defined
// by this bucket.
Count int64 `json:"count,omitempty,string"`
// LowerBound: The lower bound of the value interval of this bucket
// (inclusive).
LowerBound float64 `json:"lowerBound,omitempty"`
// UpperBound: The upper bound of the value interval of this bucket
// (exclusive).
UpperBound float64 `json:"upperBound,omitempty"`
// ForceSendFields is a list of field names (e.g. "Count") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Count") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *PointDistributionBucket) MarshalJSON() ([]byte, error) {
type noMethod PointDistributionBucket
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PointDistributionOverflowBucket: The overflow bucket is a special
// bucket that does not have the upperBound field; it includes all of
// the events that are no less than its lower bound.
type PointDistributionOverflowBucket struct {
// Count: The number of events whose values are in the interval defined
// by this bucket.
Count int64 `json:"count,omitempty,string"`
// LowerBound: The lower bound of the value interval of this bucket
// (inclusive).
LowerBound float64 `json:"lowerBound,omitempty"`
// ForceSendFields is a list of field names (e.g. "Count") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Count") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *PointDistributionOverflowBucket) MarshalJSON() ([]byte, error) {
type noMethod PointDistributionOverflowBucket
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PointDistributionUnderflowBucket: The underflow bucket is a special
// bucket that does not have the lowerBound field; it includes all of
// the events that are less than its upper bound.
type PointDistributionUnderflowBucket struct {
// Count: The number of events whose values are in the interval defined
// by this bucket.
Count int64 `json:"count,omitempty,string"`
// UpperBound: The upper bound of the value interval of this bucket
// (exclusive).
UpperBound float64 `json:"upperBound,omitempty"`
// ForceSendFields is a list of field names (e.g. "Count") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Count") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *PointDistributionUnderflowBucket) MarshalJSON() ([]byte, error) {
type noMethod PointDistributionUnderflowBucket
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Timeseries: The monitoring data is organized as metrics and stored as
// data points that are recorded over time. Each data point represents
// information like the CPU utilization of your virtual machine. A
// historical record of these data points is called a time series.
type Timeseries struct {
// Points: The data points of this time series. The points are listed in
// order of their end timestamp, from younger to older.
Points []*Point `json:"points,omitempty"`
// TimeseriesDesc: The descriptor of this time series.
TimeseriesDesc *TimeseriesDescriptor `json:"timeseriesDesc,omitempty"`
// ForceSendFields is a list of field names (e.g. "Points") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Points") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Timeseries) MarshalJSON() ([]byte, error) {
type noMethod Timeseries
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// TimeseriesDescriptor: TimeseriesDescriptor identifies a single time
// series.
type TimeseriesDescriptor struct {
// Labels: The label's name.
Labels map[string]string `json:"labels,omitempty"`
// Metric: The name of the metric.
Metric string `json:"metric,omitempty"`
// Project: The Developers Console project number to which this time
// series belongs.
Project string `json:"project,omitempty"`
// ForceSendFields is a list of field names (e.g. "Labels") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Labels") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TimeseriesDescriptor) MarshalJSON() ([]byte, error) {
type noMethod TimeseriesDescriptor
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
type TimeseriesDescriptorLabel struct {
// Key: The label's name.
Key string `json:"key,omitempty"`
// Value: The label's value.
Value string `json:"value,omitempty"`
// ForceSendFields is a list of field names (e.g. "Key") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Key") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TimeseriesDescriptorLabel) MarshalJSON() ([]byte, error) {
type noMethod TimeseriesDescriptorLabel
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// TimeseriesPoint: When writing time series, TimeseriesPoint should be
// used instead of Timeseries, to enforce single point for each time
// series in the timeseries.write request.
type TimeseriesPoint struct {
// Point: The data point in this time series snapshot.
Point *Point `json:"point,omitempty"`
// TimeseriesDesc: The descriptor of this time series.
TimeseriesDesc *TimeseriesDescriptor `json:"timeseriesDesc,omitempty"`
// ForceSendFields is a list of field names (e.g. "Point") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Point") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TimeseriesPoint) MarshalJSON() ([]byte, error) {
type noMethod TimeseriesPoint
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// WriteTimeseriesRequest: The request of
// cloudmonitoring.timeseries.write
type WriteTimeseriesRequest struct {
// CommonLabels: The label's name.
CommonLabels map[string]string `json:"commonLabels,omitempty"`
// Timeseries: Provide time series specific labels and the data points
// for each time series. The labels in timeseries and the common_labels
// should form a complete list of labels that required by the metric.
Timeseries []*TimeseriesPoint `json:"timeseries,omitempty"`
// ForceSendFields is a list of field names (e.g. "CommonLabels") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CommonLabels") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *WriteTimeseriesRequest) MarshalJSON() ([]byte, error) {
type noMethod WriteTimeseriesRequest
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// WriteTimeseriesResponse: The response of
// cloudmonitoring.timeseries.write
type WriteTimeseriesResponse struct {
// Kind: Identifies what kind of resource this is. Value: the fixed
// string "cloudmonitoring#writeTimeseriesResponse".
Kind string `json:"kind,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *WriteTimeseriesResponse) MarshalJSON() ([]byte, error) {
type noMethod WriteTimeseriesResponse
raw := noMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// method id "cloudmonitoring.metricDescriptors.create":
type MetricDescriptorsCreateCall struct {
s *Service
project string
metricdescriptor *MetricDescriptor
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Create: Create a new metric.
func (r *MetricDescriptorsService) Create(project string, metricdescriptor *MetricDescriptor) *MetricDescriptorsCreateCall {
c := &MetricDescriptorsCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.project = project
c.metricdescriptor = metricdescriptor
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *MetricDescriptorsCreateCall) Fields(s ...googleapi.Field) *MetricDescriptorsCreateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *MetricDescriptorsCreateCall) Context(ctx context.Context) *MetricDescriptorsCreateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *MetricDescriptorsCreateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *MetricDescriptorsCreateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.metricdescriptor)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "{project}/metricDescriptors")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("POST", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"project": c.project,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudmonitoring.metricDescriptors.create" call.
// Exactly one of *MetricDescriptor or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *MetricDescriptor.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *MetricDescriptorsCreateCall) Do(opts ...googleapi.CallOption) (*MetricDescriptor, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &MetricDescriptor{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Create a new metric.",
// "httpMethod": "POST",
// "id": "cloudmonitoring.metricDescriptors.create",
// "parameterOrder": [
// "project"
// ],
// "parameters": {
// "project": {
// "description": "The project id. The value can be the numeric project ID or string-based project name.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "{project}/metricDescriptors",
// "request": {
// "$ref": "MetricDescriptor"
// },
// "response": {
// "$ref": "MetricDescriptor"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform",
// "https://www.googleapis.com/auth/monitoring"
// ]
// }
}
// method id "cloudmonitoring.metricDescriptors.delete":
type MetricDescriptorsDeleteCall struct {
s *Service
project string
metric string
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Delete: Delete an existing metric.
func (r *MetricDescriptorsService) Delete(project string, metric string) *MetricDescriptorsDeleteCall {
c := &MetricDescriptorsDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.project = project
c.metric = metric
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *MetricDescriptorsDeleteCall) Fields(s ...googleapi.Field) *MetricDescriptorsDeleteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *MetricDescriptorsDeleteCall) Context(ctx context.Context) *MetricDescriptorsDeleteCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *MetricDescriptorsDeleteCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *MetricDescriptorsDeleteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "{project}/metricDescriptors/{metric}")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("DELETE", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"project": c.project,
"metric": c.metric,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudmonitoring.metricDescriptors.delete" call.
// Exactly one of *DeleteMetricDescriptorResponse or error will be
// non-nil. Any non-2xx status code is an error. Response headers are in
// either *DeleteMetricDescriptorResponse.ServerResponse.Header or (if a
// response was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *MetricDescriptorsDeleteCall) Do(opts ...googleapi.CallOption) (*DeleteMetricDescriptorResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &DeleteMetricDescriptorResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Delete an existing metric.",
// "httpMethod": "DELETE",
// "id": "cloudmonitoring.metricDescriptors.delete",
// "parameterOrder": [
// "project",
// "metric"
// ],
// "parameters": {
// "metric": {
// "description": "Name of the metric.",
// "location": "path",
// "required": true,
// "type": "string"
// },
// "project": {
// "description": "The project ID to which the metric belongs.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "{project}/metricDescriptors/{metric}",
// "response": {
// "$ref": "DeleteMetricDescriptorResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform",
// "https://www.googleapis.com/auth/monitoring"
// ]
// }
}
// method id "cloudmonitoring.metricDescriptors.list":
type MetricDescriptorsListCall struct {
s *Service
project string
listmetricdescriptorsrequest *ListMetricDescriptorsRequest
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: List metric descriptors that match the query. If the query is
// not set, then all of the metric descriptors will be returned. Large
// responses will be paginated, use the nextPageToken returned in the
// response to request subsequent pages of results by setting the
// pageToken query parameter to the value of the nextPageToken.
func (r *MetricDescriptorsService) List(project string, listmetricdescriptorsrequest *ListMetricDescriptorsRequest) *MetricDescriptorsListCall {
c := &MetricDescriptorsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.project = project
c.listmetricdescriptorsrequest = listmetricdescriptorsrequest
return c
}
// Count sets the optional parameter "count": Maximum number of metric
// descriptors per page. Used for pagination. If not specified, count =
// 100.
func (c *MetricDescriptorsListCall) Count(count int64) *MetricDescriptorsListCall {
c.urlParams_.Set("count", fmt.Sprint(count))
return c
}
// PageToken sets the optional parameter "pageToken": The pagination
// token, which is used to page through large result sets. Set this
// value to the value of the nextPageToken to retrieve the next page of
// results.
func (c *MetricDescriptorsListCall) PageToken(pageToken string) *MetricDescriptorsListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Query sets the optional parameter "query": The query used to search
// against existing metrics. Separate keywords with a space; the service
// joins all keywords with AND, meaning that all keywords must match for
// a metric to be returned. If this field is omitted, all metrics are
// returned. If an empty string is passed with this field, no metrics
// are returned.
func (c *MetricDescriptorsListCall) Query(query string) *MetricDescriptorsListCall {
c.urlParams_.Set("query", query)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *MetricDescriptorsListCall) Fields(s ...googleapi.Field) *MetricDescriptorsListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *MetricDescriptorsListCall) IfNoneMatch(entityTag string) *MetricDescriptorsListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *MetricDescriptorsListCall) Context(ctx context.Context) *MetricDescriptorsListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *MetricDescriptorsListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *MetricDescriptorsListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "{project}/metricDescriptors")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("GET", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"project": c.project,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudmonitoring.metricDescriptors.list" call.
// Exactly one of *ListMetricDescriptorsResponse or error will be
// non-nil. Any non-2xx status code is an error. Response headers are in
// either *ListMetricDescriptorsResponse.ServerResponse.Header or (if a
// response was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *MetricDescriptorsListCall) Do(opts ...googleapi.CallOption) (*ListMetricDescriptorsResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListMetricDescriptorsResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "List metric descriptors that match the query. If the query is not set, then all of the metric descriptors will be returned. Large responses will be paginated, use the nextPageToken returned in the response to request subsequent pages of results by setting the pageToken query parameter to the value of the nextPageToken.",
// "httpMethod": "GET",
// "id": "cloudmonitoring.metricDescriptors.list",
// "parameterOrder": [
// "project"
// ],
// "parameters": {
// "count": {
// "default": "100",
// "description": "Maximum number of metric descriptors per page. Used for pagination. If not specified, count = 100.",
// "format": "int32",
// "location": "query",
// "maximum": "1000",
// "minimum": "1",
// "type": "integer"
// },
// "pageToken": {
// "description": "The pagination token, which is used to page through large result sets. Set this value to the value of the nextPageToken to retrieve the next page of results.",
// "location": "query",
// "type": "string"
// },
// "project": {
// "description": "The project id. The value can be the numeric project ID or string-based project name.",
// "location": "path",
// "required": true,
// "type": "string"
// },
// "query": {
// "description": "The query used to search against existing metrics. Separate keywords with a space; the service joins all keywords with AND, meaning that all keywords must match for a metric to be returned. If this field is omitted, all metrics are returned. If an empty string is passed with this field, no metrics are returned.",
// "location": "query",
// "type": "string"
// }
// },
// "path": "{project}/metricDescriptors",
// "request": {
// "$ref": "ListMetricDescriptorsRequest"
// },
// "response": {
// "$ref": "ListMetricDescriptorsResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform",
// "https://www.googleapis.com/auth/monitoring"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *MetricDescriptorsListCall) Pages(ctx context.Context, f func(*ListMetricDescriptorsResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "cloudmonitoring.timeseries.list":
type TimeseriesListCall struct {
s *Service
project string
metric string
listtimeseriesrequest *ListTimeseriesRequest
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: List the data points of the time series that match the metric
// and labels values and that have data points in the interval. Large
// responses are paginated; use the nextPageToken returned in the
// response to request subsequent pages of results by setting the
// pageToken query parameter to the value of the nextPageToken.
func (r *TimeseriesService) List(project string, metric string, youngest string, listtimeseriesrequest *ListTimeseriesRequest) *TimeseriesListCall {
c := &TimeseriesListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.project = project
c.metric = metric
c.urlParams_.Set("youngest", youngest)
c.listtimeseriesrequest = listtimeseriesrequest
return c
}
// Aggregator sets the optional parameter "aggregator": The aggregation
// function that will reduce the data points in each window to a single
// point. This parameter is only valid for non-cumulative metrics with a
// value type of INT64 or DOUBLE.
//
// Possible values:
// "max"
// "mean"
// "min"
// "sum"
func (c *TimeseriesListCall) Aggregator(aggregator string) *TimeseriesListCall {
c.urlParams_.Set("aggregator", aggregator)
return c
}
// Count sets the optional parameter "count": Maximum number of data
// points per page, which is used for pagination of results.
func (c *TimeseriesListCall) Count(count int64) *TimeseriesListCall {
c.urlParams_.Set("count", fmt.Sprint(count))
return c
}
// Labels sets the optional parameter "labels": A collection of labels
// for the matching time series, which are represented as:
// - key==value: key equals the value
// - key=~value: key regex matches the value
// - key!=value: key does not equal the value
// - key!~value: key regex does not match the value For example, to
// list all of the time series descriptors for the region us-central1,
// you could
// specify:
// label=cloud.googleapis.com%2Flocation=~us-central1.*
func (c *TimeseriesListCall) Labels(labels ...string) *TimeseriesListCall {
c.urlParams_.SetMulti("labels", append([]string{}, labels...))
return c
}
// Oldest sets the optional parameter "oldest": Start of the time
// interval (exclusive), which is expressed as an RFC 3339 timestamp. If
// neither oldest nor timespan is specified, the default time interval
// will be (youngest - 4 hours, youngest]
func (c *TimeseriesListCall) Oldest(oldest string) *TimeseriesListCall {
c.urlParams_.Set("oldest", oldest)
return c
}
// PageToken sets the optional parameter "pageToken": The pagination
// token, which is used to page through large result sets. Set this
// value to the value of the nextPageToken to retrieve the next page of
// results.
func (c *TimeseriesListCall) PageToken(pageToken string) *TimeseriesListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Timespan sets the optional parameter "timespan": Length of the time
// interval to query, which is an alternative way to declare the
// interval: (youngest - timespan, youngest]. The timespan and oldest
// parameters should not be used together. Units:
// - s: second
// - m: minute
// - h: hour
// - d: day
// - w: week Examples: 2s, 3m, 4w. Only one unit is allowed, for
// example: 2w3d is not allowed; you should use 17d instead.
//
// If neither oldest nor timespan is specified, the default time
// interval will be (youngest - 4 hours, youngest].
func (c *TimeseriesListCall) Timespan(timespan string) *TimeseriesListCall {
c.urlParams_.Set("timespan", timespan)
return c
}
// Window sets the optional parameter "window": The sampling window. At
// most one data point will be returned for each window in the requested
// time interval. This parameter is only valid for non-cumulative metric
// types. Units:
// - m: minute
// - h: hour
// - d: day
// - w: week Examples: 3m, 4w. Only one unit is allowed, for example:
// 2w3d is not allowed; you should use 17d instead.
func (c *TimeseriesListCall) Window(window string) *TimeseriesListCall {
c.urlParams_.Set("window", window)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TimeseriesListCall) Fields(s ...googleapi.Field) *TimeseriesListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *TimeseriesListCall) IfNoneMatch(entityTag string) *TimeseriesListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TimeseriesListCall) Context(ctx context.Context) *TimeseriesListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *TimeseriesListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *TimeseriesListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "{project}/timeseries/{metric}")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("GET", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"project": c.project,
"metric": c.metric,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudmonitoring.timeseries.list" call.
// Exactly one of *ListTimeseriesResponse or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *ListTimeseriesResponse.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *TimeseriesListCall) Do(opts ...googleapi.CallOption) (*ListTimeseriesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListTimeseriesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "List the data points of the time series that match the metric and labels values and that have data points in the interval. Large responses are paginated; use the nextPageToken returned in the response to request subsequent pages of results by setting the pageToken query parameter to the value of the nextPageToken.",
// "httpMethod": "GET",
// "id": "cloudmonitoring.timeseries.list",
// "parameterOrder": [
// "project",
// "metric",
// "youngest"
// ],
// "parameters": {
// "aggregator": {
// "description": "The aggregation function that will reduce the data points in each window to a single point. This parameter is only valid for non-cumulative metrics with a value type of INT64 or DOUBLE.",
// "enum": [
// "max",
// "mean",
// "min",
// "sum"
// ],
// "enumDescriptions": [
// "",
// "",
// "",
// ""
// ],
// "location": "query",
// "type": "string"
// },
// "count": {
// "default": "6000",
// "description": "Maximum number of data points per page, which is used for pagination of results.",
// "format": "int32",
// "location": "query",
// "maximum": "12000",
// "minimum": "1",
// "type": "integer"
// },
// "labels": {
// "description": "A collection of labels for the matching time series, which are represented as: \n- key==value: key equals the value \n- key=~value: key regex matches the value \n- key!=value: key does not equal the value \n- key!~value: key regex does not match the value For example, to list all of the time series descriptors for the region us-central1, you could specify:\nlabel=cloud.googleapis.com%2Flocation=~us-central1.*",
// "location": "query",
// "pattern": "(.+?)(==|=~|!=|!~)(.+)",
// "repeated": true,
// "type": "string"
// },
// "metric": {
// "description": "Metric names are protocol-free URLs as listed in the Supported Metrics page. For example, compute.googleapis.com/instance/disk/read_ops_count.",
// "location": "path",
// "required": true,
// "type": "string"
// },
// "oldest": {
// "description": "Start of the time interval (exclusive), which is expressed as an RFC 3339 timestamp. If neither oldest nor timespan is specified, the default time interval will be (youngest - 4 hours, youngest]",
// "location": "query",
// "type": "string"
// },
// "pageToken": {
// "description": "The pagination token, which is used to page through large result sets. Set this value to the value of the nextPageToken to retrieve the next page of results.",
// "location": "query",
// "type": "string"
// },
// "project": {
// "description": "The project ID to which this time series belongs. The value can be the numeric project ID or string-based project name.",
// "location": "path",
// "required": true,
// "type": "string"
// },
// "timespan": {
// "description": "Length of the time interval to query, which is an alternative way to declare the interval: (youngest - timespan, youngest]. The timespan and oldest parameters should not be used together. Units: \n- s: second \n- m: minute \n- h: hour \n- d: day \n- w: week Examples: 2s, 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.\n\nIf neither oldest nor timespan is specified, the default time interval will be (youngest - 4 hours, youngest].",
// "location": "query",
// "pattern": "[0-9]+[smhdw]?",
// "type": "string"
// },
// "window": {
// "description": "The sampling window. At most one data point will be returned for each window in the requested time interval. This parameter is only valid for non-cumulative metric types. Units: \n- m: minute \n- h: hour \n- d: day \n- w: week Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.",
// "location": "query",
// "pattern": "[0-9]+[mhdw]?",
// "type": "string"
// },
// "youngest": {
// "description": "End of the time interval (inclusive), which is expressed as an RFC 3339 timestamp.",
// "location": "query",
// "required": true,
// "type": "string"
// }
// },
// "path": "{project}/timeseries/{metric}",
// "request": {
// "$ref": "ListTimeseriesRequest"
// },
// "response": {
// "$ref": "ListTimeseriesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform",
// "https://www.googleapis.com/auth/monitoring"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *TimeseriesListCall) Pages(ctx context.Context, f func(*ListTimeseriesResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "cloudmonitoring.timeseries.write":
type TimeseriesWriteCall struct {
s *Service
project string
writetimeseriesrequest *WriteTimeseriesRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Write: Put data points to one or more time series for one or more
// metrics. If a time series does not exist, a new time series will be
// created. It is not allowed to write a time series point that is older
// than the existing youngest point of that time series. Points that are
// older than the existing youngest point of that time series will be
// discarded silently. Therefore, users should make sure that points of
// a time series are written sequentially in the order of their end
// time.
func (r *TimeseriesService) Write(project string, writetimeseriesrequest *WriteTimeseriesRequest) *TimeseriesWriteCall {
c := &TimeseriesWriteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.project = project
c.writetimeseriesrequest = writetimeseriesrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TimeseriesWriteCall) Fields(s ...googleapi.Field) *TimeseriesWriteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TimeseriesWriteCall) Context(ctx context.Context) *TimeseriesWriteCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *TimeseriesWriteCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *TimeseriesWriteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.writetimeseriesrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "{project}/timeseries:write")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("POST", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"project": c.project,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudmonitoring.timeseries.write" call.
// Exactly one of *WriteTimeseriesResponse or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *WriteTimeseriesResponse.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *TimeseriesWriteCall) Do(opts ...googleapi.CallOption) (*WriteTimeseriesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &WriteTimeseriesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Put data points to one or more time series for one or more metrics. If a time series does not exist, a new time series will be created. It is not allowed to write a time series point that is older than the existing youngest point of that time series. Points that are older than the existing youngest point of that time series will be discarded silently. Therefore, users should make sure that points of a time series are written sequentially in the order of their end time.",
// "httpMethod": "POST",
// "id": "cloudmonitoring.timeseries.write",
// "parameterOrder": [
// "project"
// ],
// "parameters": {
// "project": {
// "description": "The project ID. The value can be the numeric project ID or string-based project name.",
// "location": "path",
// "required": true,
// "type": "string"
// }
// },
// "path": "{project}/timeseries:write",
// "request": {
// "$ref": "WriteTimeseriesRequest"
// },
// "response": {
// "$ref": "WriteTimeseriesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform",
// "https://www.googleapis.com/auth/monitoring"
// ]
// }
}
// method id "cloudmonitoring.timeseriesDescriptors.list":
type TimeseriesDescriptorsListCall struct {
s *Service
project string
metric string
listtimeseriesdescriptorsrequest *ListTimeseriesDescriptorsRequest
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: List the descriptors of the time series that match the metric
// and labels values and that have data points in the interval. Large
// responses are paginated; use the nextPageToken returned in the
// response to request subsequent pages of results by setting the
// pageToken query parameter to the value of the nextPageToken.
func (r *TimeseriesDescriptorsService) List(project string, metric string, youngest string, listtimeseriesdescriptorsrequest *ListTimeseriesDescriptorsRequest) *TimeseriesDescriptorsListCall {
c := &TimeseriesDescriptorsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.project = project
c.metric = metric
c.urlParams_.Set("youngest", youngest)
c.listtimeseriesdescriptorsrequest = listtimeseriesdescriptorsrequest
return c
}
// Aggregator sets the optional parameter "aggregator": The aggregation
// function that will reduce the data points in each window to a single
// point. This parameter is only valid for non-cumulative metrics with a
// value type of INT64 or DOUBLE.
//
// Possible values:
// "max"
// "mean"
// "min"
// "sum"
func (c *TimeseriesDescriptorsListCall) Aggregator(aggregator string) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("aggregator", aggregator)
return c
}
// Count sets the optional parameter "count": Maximum number of time
// series descriptors per page. Used for pagination. If not specified,
// count = 100.
func (c *TimeseriesDescriptorsListCall) Count(count int64) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("count", fmt.Sprint(count))
return c
}
// Labels sets the optional parameter "labels": A collection of labels
// for the matching time series, which are represented as:
// - key==value: key equals the value
// - key=~value: key regex matches the value
// - key!=value: key does not equal the value
// - key!~value: key regex does not match the value For example, to
// list all of the time series descriptors for the region us-central1,
// you could
// specify:
// label=cloud.googleapis.com%2Flocation=~us-central1.*
func (c *TimeseriesDescriptorsListCall) Labels(labels ...string) *TimeseriesDescriptorsListCall {
c.urlParams_.SetMulti("labels", append([]string{}, labels...))
return c
}
// Oldest sets the optional parameter "oldest": Start of the time
// interval (exclusive), which is expressed as an RFC 3339 timestamp. If
// neither oldest nor timespan is specified, the default time interval
// will be (youngest - 4 hours, youngest]
func (c *TimeseriesDescriptorsListCall) Oldest(oldest string) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("oldest", oldest)
return c
}
// PageToken sets the optional parameter "pageToken": The pagination
// token, which is used to page through large result sets. Set this
// value to the value of the nextPageToken to retrieve the next page of
// results.
func (c *TimeseriesDescriptorsListCall) PageToken(pageToken string) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Timespan sets the optional parameter "timespan": Length of the time
// interval to query, which is an alternative way to declare the
// interval: (youngest - timespan, youngest]. The timespan and oldest
// parameters should not be used together. Units:
// - s: second
// - m: minute
// - h: hour
// - d: day
// - w: week Examples: 2s, 3m, 4w. Only one unit is allowed, for
// example: 2w3d is not allowed; you should use 17d instead.
//
// If neither oldest nor timespan is specified, the default time
// interval will be (youngest - 4 hours, youngest].
func (c *TimeseriesDescriptorsListCall) Timespan(timespan string) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("timespan", timespan)
return c
}
// Window sets the optional parameter "window": The sampling window. At
// most one data point will be returned for each window in the requested
// time interval. This parameter is only valid for non-cumulative metric
// types. Units:
// - m: minute
// - h: hour
// - d: day
// - w: week Examples: 3m, 4w. Only one unit is allowed, for example:
// 2w3d is not allowed; you should use 17d instead.
func (c *TimeseriesDescriptorsListCall) Window(window string) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("window", window)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *TimeseriesDescriptorsListCall) Fields(s ...googleapi.Field) *TimeseriesDescriptorsListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *TimeseriesDescriptorsListCall) IfNoneMatch(entityTag string) *TimeseriesDescriptorsListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *TimeseriesDescriptorsListCall) Context(ctx context.Context) *TimeseriesDescriptorsListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *TimeseriesDescriptorsListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *TimeseriesDescriptorsListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
urls := googleapi.ResolveRelative(c.s.BasePath, "{project}/timeseriesDescriptors/{metric}")
urls += "?" + c.urlParams_.Encode()
req, _ := http.NewRequest("GET", urls, body)
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"project": c.project,
"metric": c.metric,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudmonitoring.timeseriesDescriptors.list" call.
// Exactly one of *ListTimeseriesDescriptorsResponse or error will be
// non-nil. Any non-2xx status code is an error. Response headers are in
// either *ListTimeseriesDescriptorsResponse.ServerResponse.Header or
// (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *TimeseriesDescriptorsListCall) Do(opts ...googleapi.CallOption) (*ListTimeseriesDescriptorsResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListTimeseriesDescriptorsResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := json.NewDecoder(res.Body).Decode(target); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "List the descriptors of the time series that match the metric and labels values and that have data points in the interval. Large responses are paginated; use the nextPageToken returned in the response to request subsequent pages of results by setting the pageToken query parameter to the value of the nextPageToken.",
// "httpMethod": "GET",
// "id": "cloudmonitoring.timeseriesDescriptors.list",
// "parameterOrder": [
// "project",
// "metric",
// "youngest"
// ],
// "parameters": {
// "aggregator": {
// "description": "The aggregation function that will reduce the data points in each window to a single point. This parameter is only valid for non-cumulative metrics with a value type of INT64 or DOUBLE.",
// "enum": [
// "max",
// "mean",
// "min",
// "sum"
// ],
// "enumDescriptions": [
// "",
// "",
// "",
// ""
// ],
// "location": "query",
// "type": "string"
// },
// "count": {
// "default": "100",
// "description": "Maximum number of time series descriptors per page. Used for pagination. If not specified, count = 100.",
// "format": "int32",
// "location": "query",
// "maximum": "1000",
// "minimum": "1",
// "type": "integer"
// },
// "labels": {
// "description": "A collection of labels for the matching time series, which are represented as: \n- key==value: key equals the value \n- key=~value: key regex matches the value \n- key!=value: key does not equal the value \n- key!~value: key regex does not match the value For example, to list all of the time series descriptors for the region us-central1, you could specify:\nlabel=cloud.googleapis.com%2Flocation=~us-central1.*",
// "location": "query",
// "pattern": "(.+?)(==|=~|!=|!~)(.+)",
// "repeated": true,
// "type": "string"
// },
// "metric": {
// "description": "Metric names are protocol-free URLs as listed in the Supported Metrics page. For example, compute.googleapis.com/instance/disk/read_ops_count.",
// "location": "path",
// "required": true,
// "type": "string"
// },
// "oldest": {
// "description": "Start of the time interval (exclusive), which is expressed as an RFC 3339 timestamp. If neither oldest nor timespan is specified, the default time interval will be (youngest - 4 hours, youngest]",
// "location": "query",
// "type": "string"
// },
// "pageToken": {
// "description": "The pagination token, which is used to page through large result sets. Set this value to the value of the nextPageToken to retrieve the next page of results.",
// "location": "query",
// "type": "string"
// },
// "project": {
// "description": "The project ID to which this time series belongs. The value can be the numeric project ID or string-based project name.",
// "location": "path",
// "required": true,
// "type": "string"
// },
// "timespan": {
// "description": "Length of the time interval to query, which is an alternative way to declare the interval: (youngest - timespan, youngest]. The timespan and oldest parameters should not be used together. Units: \n- s: second \n- m: minute \n- h: hour \n- d: day \n- w: week Examples: 2s, 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.\n\nIf neither oldest nor timespan is specified, the default time interval will be (youngest - 4 hours, youngest].",
// "location": "query",
// "pattern": "[0-9]+[smhdw]?",
// "type": "string"
// },
// "window": {
// "description": "The sampling window. At most one data point will be returned for each window in the requested time interval. This parameter is only valid for non-cumulative metric types. Units: \n- m: minute \n- h: hour \n- d: day \n- w: week Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.",
// "location": "query",
// "pattern": "[0-9]+[mhdw]?",
// "type": "string"
// },
// "youngest": {
// "description": "End of the time interval (inclusive), which is expressed as an RFC 3339 timestamp.",
// "location": "query",
// "required": true,
// "type": "string"
// }
// },
// "path": "{project}/timeseriesDescriptors/{metric}",
// "request": {
// "$ref": "ListTimeseriesDescriptorsRequest"
// },
// "response": {
// "$ref": "ListTimeseriesDescriptorsResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform",
// "https://www.googleapis.com/auth/monitoring"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *TimeseriesDescriptorsListCall) Pages(ctx context.Context, f func(*ListTimeseriesDescriptorsResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
| apache-2.0 |
slack4u/homebrew-cask | Casks/plan.rb | 162 | cask 'plan' do
version :latest
sha256 :no_check
url 'https://getplan.co/mac-beta.zip'
name 'Plan'
homepage 'https://getplan.co/'
app 'Plan.app'
end
| bsd-2-clause |
AlessandroNepi/FAMDUniversity | client/www/Chart.js-master/src/scales/scale.linear.js | 5657 | 'use strict';
module.exports = function(Chart) {
var helpers = Chart.helpers;
var defaultConfig = {
position: 'left',
ticks: {
callback: Chart.Ticks.formatters.linear
}
};
var LinearScale = Chart.LinearScaleBase.extend({
determineDataLimits: function() {
var me = this;
var opts = me.options;
var chart = me.chart;
var data = chart.data;
var datasets = data.datasets;
var isHorizontal = me.isHorizontal();
var DEFAULT_MIN = 0;
var DEFAULT_MAX = 1;
function IDMatches(meta) {
return isHorizontal ? meta.xAxisID === me.id : meta.yAxisID === me.id;
}
// First Calculate the range
me.min = null;
me.max = null;
var hasStacks = opts.stacked;
if (hasStacks === undefined) {
helpers.each(datasets, function(dataset, datasetIndex) {
if (hasStacks) {
return;
}
var meta = chart.getDatasetMeta(datasetIndex);
if (chart.isDatasetVisible(datasetIndex) && IDMatches(meta) &&
meta.stack !== undefined) {
hasStacks = true;
}
});
}
if (opts.stacked || hasStacks) {
var valuesPerStack = {};
helpers.each(datasets, function(dataset, datasetIndex) {
var meta = chart.getDatasetMeta(datasetIndex);
var key = [
meta.type,
// we have a separate stack for stack=undefined datasets when the opts.stacked is undefined
((opts.stacked === undefined && meta.stack === undefined) ? datasetIndex : ''),
meta.stack
].join('.');
if (valuesPerStack[key] === undefined) {
valuesPerStack[key] = {
positiveValues: [],
negativeValues: []
};
}
// Store these per type
var positiveValues = valuesPerStack[key].positiveValues;
var negativeValues = valuesPerStack[key].negativeValues;
if (chart.isDatasetVisible(datasetIndex) && IDMatches(meta)) {
helpers.each(dataset.data, function(rawValue, index) {
var value = +me.getRightValue(rawValue);
if (isNaN(value) || meta.data[index].hidden) {
return;
}
positiveValues[index] = positiveValues[index] || 0;
negativeValues[index] = negativeValues[index] || 0;
if (opts.relativePoints) {
positiveValues[index] = 100;
} else if (value < 0) {
negativeValues[index] += value;
} else {
positiveValues[index] += value;
}
});
}
});
helpers.each(valuesPerStack, function(valuesForType) {
var values = valuesForType.positiveValues.concat(valuesForType.negativeValues);
var minVal = helpers.min(values);
var maxVal = helpers.max(values);
me.min = me.min === null ? minVal : Math.min(me.min, minVal);
me.max = me.max === null ? maxVal : Math.max(me.max, maxVal);
});
} else {
helpers.each(datasets, function(dataset, datasetIndex) {
var meta = chart.getDatasetMeta(datasetIndex);
if (chart.isDatasetVisible(datasetIndex) && IDMatches(meta)) {
helpers.each(dataset.data, function(rawValue, index) {
var value = +me.getRightValue(rawValue);
if (isNaN(value) || meta.data[index].hidden) {
return;
}
if (me.min === null) {
me.min = value;
} else if (value < me.min) {
me.min = value;
}
if (me.max === null) {
me.max = value;
} else if (value > me.max) {
me.max = value;
}
});
}
});
}
me.min = isFinite(me.min) ? me.min : DEFAULT_MIN;
me.max = isFinite(me.max) ? me.max : DEFAULT_MAX;
// Common base implementation to handle ticks.min, ticks.max, ticks.beginAtZero
this.handleTickRangeOptions();
},
getTickLimit: function() {
var maxTicks;
var me = this;
var tickOpts = me.options.ticks;
if (me.isHorizontal()) {
maxTicks = Math.min(tickOpts.maxTicksLimit ? tickOpts.maxTicksLimit : 11, Math.ceil(me.width / 50));
} else {
// The factor of 2 used to scale the font size has been experimentally determined.
var tickFontSize = helpers.getValueOrDefault(tickOpts.fontSize, Chart.defaults.global.defaultFontSize);
maxTicks = Math.min(tickOpts.maxTicksLimit ? tickOpts.maxTicksLimit : 11, Math.ceil(me.height / (2 * tickFontSize)));
}
return maxTicks;
},
// Called after the ticks are built. We need
handleDirectionalChanges: function() {
if (!this.isHorizontal()) {
// We are in a vertical orientation. The top value is the highest. So reverse the array
this.ticks.reverse();
}
},
getLabelForIndex: function(index, datasetIndex) {
return +this.getRightValue(this.chart.data.datasets[datasetIndex].data[index]);
},
// Utils
getPixelForValue: function(value) {
// This must be called after fit has been run so that
// this.left, this.top, this.right, and this.bottom have been defined
var me = this;
var start = me.start;
var rightValue = +me.getRightValue(value);
var pixel;
var range = me.end - start;
if (me.isHorizontal()) {
pixel = me.left + (me.width / range * (rightValue - start));
return Math.round(pixel);
}
pixel = me.bottom - (me.height / range * (rightValue - start));
return Math.round(pixel);
},
getValueForPixel: function(pixel) {
var me = this;
var isHorizontal = me.isHorizontal();
var innerDimension = isHorizontal ? me.width : me.height;
var offset = (isHorizontal ? pixel - me.left : me.bottom - pixel) / innerDimension;
return me.start + ((me.end - me.start) * offset);
},
getPixelForTick: function(index) {
return this.getPixelForValue(this.ticksAsNumbers[index]);
}
});
Chart.scaleService.registerScaleType('linear', LinearScale, defaultConfig);
};
| mit |
iains/darwin-gcc-5 | libstdc++-v3/include/ext/pb_ds/detail/ov_tree_map_/ov_tree_map_.hpp | 15366 | // -*- C++ -*-
// Copyright (C) 2005-2015 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the terms
// of the GNU General Public License as published by the Free Software
// Foundation; either version 3, or (at your option) any later
// version.
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
// Under Section 7 of GPL version 3, you are granted additional
// permissions described in the GCC Runtime Library Exception, version
// 3.1, as published by the Free Software Foundation.
// You should have received a copy of the GNU General Public License and
// a copy of the GCC Runtime Library Exception along with this program;
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
// <http://www.gnu.org/licenses/>.
// Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL.
// Permission to use, copy, modify, sell, and distribute this software
// is hereby granted without fee, provided that the above copyright
// notice appears in all copies, and that both that copyright notice
// and this permission notice appear in supporting documentation. None
// of the above authors, nor IBM Haifa Research Laboratories, make any
// representation about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
/**
* @file ov_tree_map_/ov_tree_map_.hpp
* Contains an implementation class for ov_tree.
*/
#include <map>
#include <set>
#include <ext/pb_ds/exception.hpp>
#include <ext/pb_ds/tree_policy.hpp>
#include <ext/pb_ds/detail/eq_fn/eq_by_less.hpp>
#include <ext/pb_ds/detail/types_traits.hpp>
#include <ext/pb_ds/detail/type_utils.hpp>
#include <ext/pb_ds/detail/tree_trace_base.hpp>
#ifdef _GLIBCXX_DEBUG
#include <ext/pb_ds/detail/debug_map_base.hpp>
#endif
#include <utility>
#include <functional>
#include <algorithm>
#include <vector>
#include <assert.h>
#include <debug/debug.h>
namespace __gnu_pbds
{
namespace detail
{
#ifdef PB_DS_DATA_TRUE_INDICATOR
#define PB_DS_OV_TREE_NAME ov_tree_map
#define PB_DS_CONST_NODE_ITERATOR_NAME ov_tree_node_const_iterator_map
#endif
#ifdef PB_DS_DATA_FALSE_INDICATOR
#define PB_DS_OV_TREE_NAME ov_tree_set
#define PB_DS_CONST_NODE_ITERATOR_NAME ov_tree_node_const_iterator_set
#endif
#define PB_DS_CLASS_T_DEC \
template<typename Key, typename Mapped, typename Cmp_Fn, \
typename Node_And_It_Traits, typename _Alloc>
#define PB_DS_CLASS_C_DEC \
PB_DS_OV_TREE_NAME<Key, Mapped, Cmp_Fn, Node_And_It_Traits, _Alloc>
#define PB_DS_OV_TREE_TRAITS_BASE \
types_traits<Key, Mapped, _Alloc, false>
#ifdef _GLIBCXX_DEBUG
#define PB_DS_DEBUG_MAP_BASE_C_DEC \
debug_map_base<Key, eq_by_less<Key, Cmp_Fn>, \
typename _Alloc::template rebind<Key>::other::const_reference>
#endif
#ifdef PB_DS_TREE_TRACE
#define PB_DS_TREE_TRACE_BASE_C_DEC \
tree_trace_base<typename Node_And_It_Traits::node_const_iterator, \
typename Node_And_It_Traits::node_iterator, \
Cmp_Fn, false, _Alloc>
#endif
#ifndef PB_DS_CHECK_KEY_EXISTS
# error Missing definition
#endif
/**
* @brief Ordered-vector tree associative-container.
* @ingroup branch-detail
*/
template<typename Key, typename Mapped, typename Cmp_Fn,
typename Node_And_It_Traits, typename _Alloc>
class PB_DS_OV_TREE_NAME :
#ifdef _GLIBCXX_DEBUG
protected PB_DS_DEBUG_MAP_BASE_C_DEC,
#endif
#ifdef PB_DS_TREE_TRACE
public PB_DS_TREE_TRACE_BASE_C_DEC,
#endif
public Cmp_Fn,
public Node_And_It_Traits::node_update,
public PB_DS_OV_TREE_TRAITS_BASE
{
private:
typedef PB_DS_OV_TREE_TRAITS_BASE traits_base;
typedef Node_And_It_Traits traits_type;
typedef typename remove_const<typename traits_base::value_type>::type non_const_value_type;
typedef typename _Alloc::template rebind<non_const_value_type>::other value_allocator;
typedef typename value_allocator::pointer value_vector;
#ifdef _GLIBCXX_DEBUG
typedef PB_DS_DEBUG_MAP_BASE_C_DEC debug_base;
#endif
#ifdef PB_DS_TREE_TRACE
typedef PB_DS_TREE_TRACE_BASE_C_DEC trace_base;
#endif
typedef typename traits_base::pointer mapped_pointer_;
typedef typename traits_base::const_pointer mapped_const_pointer_;
typedef typename traits_type::metadata_type metadata_type;
typedef typename _Alloc::template rebind<metadata_type>::other metadata_allocator;
typedef typename metadata_allocator::pointer metadata_pointer;
typedef typename metadata_allocator::const_reference metadata_const_reference;
typedef typename metadata_allocator::reference metadata_reference;
typedef typename traits_type::null_node_update_pointer
null_node_update_pointer;
public:
typedef ov_tree_tag container_category;
typedef _Alloc allocator_type;
typedef typename _Alloc::size_type size_type;
typedef typename _Alloc::difference_type difference_type;
typedef Cmp_Fn cmp_fn;
typedef typename traits_base::key_type key_type;
typedef typename traits_base::key_pointer key_pointer;
typedef typename traits_base::key_const_pointer key_const_pointer;
typedef typename traits_base::key_reference key_reference;
typedef typename traits_base::key_const_reference key_const_reference;
typedef typename traits_base::mapped_type mapped_type;
typedef typename traits_base::mapped_pointer mapped_pointer;
typedef typename traits_base::mapped_const_pointer mapped_const_pointer;
typedef typename traits_base::mapped_reference mapped_reference;
typedef typename traits_base::mapped_const_reference mapped_const_reference;
typedef typename traits_base::value_type value_type;
typedef typename traits_base::pointer pointer;
typedef typename traits_base::const_pointer const_pointer;
typedef typename traits_base::reference reference;
typedef typename traits_base::const_reference const_reference;
typedef const_pointer point_const_iterator;
#ifdef PB_DS_DATA_TRUE_INDICATOR
typedef pointer point_iterator;
#else
typedef point_const_iterator point_iterator;
#endif
typedef point_iterator iterator;
typedef point_const_iterator const_iterator;
/// Conditional destructor.
template<typename Size_Type>
class cond_dtor
{
public:
cond_dtor(value_vector a_vec, iterator& r_last_it,
Size_Type total_size)
: m_a_vec(a_vec), m_r_last_it(r_last_it), m_max_size(total_size),
m_no_action(false)
{ }
~cond_dtor()
{
if (m_no_action)
return;
iterator it = m_a_vec;
while (it != m_r_last_it)
{
it->~value_type();
++it;
}
if (m_max_size > 0)
value_allocator().deallocate(m_a_vec, m_max_size);
}
inline void
set_no_action()
{ m_no_action = true; }
protected:
value_vector m_a_vec;
iterator& m_r_last_it;
const Size_Type m_max_size;
bool m_no_action;
};
typedef typename traits_type::node_update node_update;
typedef typename traits_type::node_iterator node_iterator;
typedef typename traits_type::node_const_iterator node_const_iterator;
PB_DS_OV_TREE_NAME();
PB_DS_OV_TREE_NAME(const Cmp_Fn&);
PB_DS_OV_TREE_NAME(const Cmp_Fn&, const node_update&);
PB_DS_OV_TREE_NAME(const PB_DS_CLASS_C_DEC&);
~PB_DS_OV_TREE_NAME();
void
swap(PB_DS_CLASS_C_DEC&);
template<typename It>
void
copy_from_range(It, It);
inline size_type
max_size() const;
inline bool
empty() const;
inline size_type
size() const;
Cmp_Fn&
get_cmp_fn();
const Cmp_Fn&
get_cmp_fn() const;
inline mapped_reference
operator[](key_const_reference r_key)
{
#ifdef PB_DS_DATA_TRUE_INDICATOR
PB_DS_ASSERT_VALID((*this))
point_iterator it = lower_bound(r_key);
if (it != end() && !Cmp_Fn::operator()(r_key, PB_DS_V2F(*it)))
{
PB_DS_CHECK_KEY_EXISTS(r_key)
PB_DS_ASSERT_VALID((*this))
return it->second;
}
return insert_new_val(it, std::make_pair(r_key, mapped_type()))->second;
#else
insert(r_key);
return traits_base::s_null_type;
#endif
}
inline std::pair<point_iterator, bool>
insert(const_reference r_value)
{
PB_DS_ASSERT_VALID((*this))
key_const_reference r_key = PB_DS_V2F(r_value);
point_iterator it = lower_bound(r_key);
if (it != end()&& !Cmp_Fn::operator()(r_key, PB_DS_V2F(*it)))
{
PB_DS_ASSERT_VALID((*this))
PB_DS_CHECK_KEY_EXISTS(r_key)
return std::make_pair(it, false);
}
return std::make_pair(insert_new_val(it, r_value), true);
}
inline point_iterator
lower_bound(key_const_reference r_key)
{
pointer it = m_a_values;
pointer e_it = m_a_values + m_size;
while (it != e_it)
{
pointer mid_it = it + ((e_it - it) >> 1);
if (cmp_fn::operator()(PB_DS_V2F(*mid_it), r_key))
it = ++mid_it;
else
e_it = mid_it;
}
return it;
}
inline point_const_iterator
lower_bound(key_const_reference r_key) const
{ return const_cast<PB_DS_CLASS_C_DEC& >(*this).lower_bound(r_key); }
inline point_iterator
upper_bound(key_const_reference r_key)
{
iterator pot_it = lower_bound(r_key);
if (pot_it != end() && !Cmp_Fn::operator()(r_key, PB_DS_V2F(*pot_it)))
{
PB_DS_CHECK_KEY_EXISTS(r_key)
return ++pot_it;
}
PB_DS_CHECK_KEY_DOES_NOT_EXIST(r_key)
return pot_it;
}
inline point_const_iterator
upper_bound(key_const_reference r_key) const
{ return const_cast<PB_DS_CLASS_C_DEC&>(*this).upper_bound(r_key); }
inline point_iterator
find(key_const_reference r_key)
{
PB_DS_ASSERT_VALID((*this))
iterator pot_it = lower_bound(r_key);
if (pot_it != end() && !Cmp_Fn::operator()(r_key, PB_DS_V2F(*pot_it)))
{
PB_DS_CHECK_KEY_EXISTS(r_key)
return pot_it;
}
PB_DS_CHECK_KEY_DOES_NOT_EXIST(r_key)
return end();
}
inline point_const_iterator
find(key_const_reference r_key) const
{ return (const_cast<PB_DS_CLASS_C_DEC&>(*this).find(r_key)); }
bool
erase(key_const_reference);
template<typename Pred>
inline size_type
erase_if(Pred);
inline iterator
erase(iterator it)
{ return erase_imp<iterator>(it); }
void
clear();
void
join(PB_DS_CLASS_C_DEC&);
void
split(key_const_reference, PB_DS_CLASS_C_DEC&);
inline iterator
begin()
{ return m_a_values; }
inline const_iterator
begin() const
{ return m_a_values; }
inline iterator
end()
{ return m_end_it; }
inline const_iterator
end() const
{ return m_end_it; }
/// Returns a const node_iterator corresponding to the node at the
/// root of the tree.
inline node_const_iterator
node_begin() const;
/// Returns a node_iterator corresponding to the node at the
/// root of the tree.
inline node_iterator
node_begin();
/// Returns a const node_iterator corresponding to a node just
/// after a leaf of the tree.
inline node_const_iterator
node_end() const;
/// Returns a node_iterator corresponding to a node just
/// after a leaf of the tree.
inline node_iterator
node_end();
private:
inline void
update(node_iterator, null_node_update_pointer);
template<typename Node_Update>
void
update(node_iterator, Node_Update*);
void
reallocate_metadata(null_node_update_pointer, size_type);
template<typename Node_Update_>
void
reallocate_metadata(Node_Update_*, size_type);
template<typename It>
void
copy_from_ordered_range(It, It);
void
value_swap(PB_DS_CLASS_C_DEC&);
template<typename It>
void
copy_from_ordered_range(It, It, It, It);
template<typename Ptr>
inline static Ptr
mid_pointer(Ptr p_begin, Ptr p_end)
{
_GLIBCXX_DEBUG_ASSERT(p_end >= p_begin);
return (p_begin + (p_end - p_begin) / 2);
}
inline iterator
insert_new_val(iterator it, const_reference r_value)
{
#ifdef PB_DS_REGRESSION
typename _Alloc::group_adjustor adjust(m_size);
#endif
PB_DS_CHECK_KEY_DOES_NOT_EXIST(PB_DS_V2F(r_value))
value_vector a_values = s_value_alloc.allocate(m_size + 1);
iterator source_it = begin();
iterator source_end_it = end();
iterator target_it = a_values;
iterator ret_it;
cond_dtor<size_type> cd(a_values, target_it, m_size + 1);
while (source_it != it)
{
new (const_cast<void*>(static_cast<const void*>(target_it)))
value_type(*source_it++);
++target_it;
}
new (const_cast<void*>(static_cast<const void*>(ret_it = target_it)))
value_type(r_value);
++target_it;
while (source_it != source_end_it)
{
new (const_cast<void*>(static_cast<const void*>(target_it)))
value_type(*source_it++);
++target_it;
}
reallocate_metadata((node_update*)this, m_size + 1);
cd.set_no_action();
if (m_size != 0)
{
cond_dtor<size_type> cd1(m_a_values, m_end_it, m_size);
}
++m_size;
m_a_values = a_values;
m_end_it = m_a_values + m_size;
_GLIBCXX_DEBUG_ONLY(debug_base::insert_new(PB_DS_V2F(r_value)));
update(node_begin(), (node_update* )this);
PB_DS_ASSERT_VALID((*this))
return ret_it;
}
#ifdef _GLIBCXX_DEBUG
void
assert_valid(const char*, int) const;
void
assert_iterators(const char*, int) const;
#endif
template<typename It>
It
erase_imp(It);
inline node_const_iterator
PB_DS_node_begin_imp() const;
inline node_const_iterator
PB_DS_node_end_imp() const;
inline node_iterator
PB_DS_node_begin_imp();
inline node_iterator
PB_DS_node_end_imp();
private:
static value_allocator s_value_alloc;
static metadata_allocator s_metadata_alloc;
value_vector m_a_values;
metadata_pointer m_a_metadata;
iterator m_end_it;
size_type m_size;
};
#include <ext/pb_ds/detail/ov_tree_map_/constructors_destructor_fn_imps.hpp>
#include <ext/pb_ds/detail/ov_tree_map_/iterators_fn_imps.hpp>
#include <ext/pb_ds/detail/ov_tree_map_/debug_fn_imps.hpp>
#include <ext/pb_ds/detail/ov_tree_map_/erase_fn_imps.hpp>
#include <ext/pb_ds/detail/ov_tree_map_/insert_fn_imps.hpp>
#include <ext/pb_ds/detail/ov_tree_map_/info_fn_imps.hpp>
#include <ext/pb_ds/detail/ov_tree_map_/split_join_fn_imps.hpp>
#include <ext/pb_ds/detail/bin_search_tree_/policy_access_fn_imps.hpp>
#undef PB_DS_CLASS_C_DEC
#undef PB_DS_CLASS_T_DEC
#undef PB_DS_OV_TREE_NAME
#undef PB_DS_OV_TREE_TRAITS_BASE
#undef PB_DS_DEBUG_MAP_BASE_C_DEC
#ifdef PB_DS_TREE_TRACE
#undef PB_DS_TREE_TRACE_BASE_C_DEC
#endif
#undef PB_DS_CONST_NODE_ITERATOR_NAME
} // namespace detail
} // namespace __gnu_pbds
| gpl-2.0 |
zero24/joomla-cms | libraries/joomla/github/package/repositories/keys.php | 3231 | <?php
/**
* @package Joomla.Platform
* @subpackage GitHub
*
* @copyright Copyright (C) 2005 - 2015 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE
*/
defined('JPATH_PLATFORM') or die;
/**
* GitHub API Forks class for the Joomla Platform.
*
* @documentation http://developer.github.com/v3/repos/keys
*
* @since 11.3
*/
class JGithubPackageRepositoriesKeys extends JGithubPackage
{
/**
* List keys in a repository.
*
* @param string $owner The name of the owner of the GitHub repository.
* @param string $repo The name of the GitHub repository.
*
* @since 12.4
*
* @return object
*/
public function getList($owner, $repo)
{
// Build the request path.
$path = '/repos/' . $owner . '/' . $repo . '/keys';
return $this->processResponse(
$this->client->get($this->fetchUrl($path))
);
}
/**
* Get a key.
*
* @param string $owner The name of the owner of the GitHub repository.
* @param string $repo The name of the GitHub repository.
* @param integer $id The id of the key.
*
* @since 12.4
*
* @return object
*/
public function get($owner, $repo, $id)
{
// Build the request path.
$path = '/repos/' . $owner . '/' . $repo . '/keys/' . (int) $id;
return $this->processResponse(
$this->client->get($this->fetchUrl($path))
);
}
/**
* Create a key.
*
* @param string $owner The name of the owner of the GitHub repository.
* @param string $repo The name of the GitHub repository.
* @param string $title The key title.
* @param string $key The key.
*
* @since 12.4
*
* @return object
*/
public function create($owner, $repo, $title, $key)
{
// Build the request path.
$path = '/repos/' . $owner . '/' . $repo . '/keys';
$data = array(
'title' => $title,
'key' => $key
);
return $this->processResponse(
$this->client->post($this->fetchUrl($path), json_encode($data)),
201
);
}
/**
* Edit a key.
*
* @param string $owner The name of the owner of the GitHub repository.
* @param string $repo The name of the GitHub repository.
* @param integer $id The id of the key.
* @param string $title The key title.
* @param string $key The key.
*
* @since 12.4
*
* @return object
*/
public function edit($owner, $repo, $id, $title, $key)
{
// Build the request path.
$path = '/repos/' . $owner . '/' . $repo . '/keys/' . (int) $id;
$data = array(
'title' => $title,
'key' => $key
);
return $this->processResponse(
$this->client->patch($this->fetchUrl($path), json_encode($data))
);
}
/**
* Delete a key.
*
* @param string $owner The name of the owner of the GitHub repository.
* @param string $repo The name of the GitHub repository.
* @param integer $id The id of the key.
*
* @since 12.4
*
* @return boolean
*/
public function delete($owner, $repo, $id)
{
// Build the request path.
$path = '/repos/' . $owner . '/' . $repo . '/keys/' . (int) $id;
$this->processResponse(
$this->client->delete($this->fetchUrl($path)),
204
);
return true;
}
}
| gpl-2.0 |
stardog-union/stardog-graviton | vendor/github.com/mitchellh/packer/vendor/github.com/mitchellh/go-vnc/client.go | 11375 | // Package vnc implements a VNC client.
//
// References:
// [PROTOCOL]: http://tools.ietf.org/html/rfc6143
package vnc
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"net"
"unicode"
)
type ClientConn struct {
c net.Conn
config *ClientConfig
// If the pixel format uses a color map, then this is the color
// map that is used. This should not be modified directly, since
// the data comes from the server.
ColorMap [256]Color
// Encodings supported by the client. This should not be modified
// directly. Instead, SetEncodings should be used.
Encs []Encoding
// Width of the frame buffer in pixels, sent from the server.
FrameBufferWidth uint16
// Height of the frame buffer in pixels, sent from the server.
FrameBufferHeight uint16
// Name associated with the desktop, sent from the server.
DesktopName string
// The pixel format associated with the connection. This shouldn't
// be modified. If you wish to set a new pixel format, use the
// SetPixelFormat method.
PixelFormat PixelFormat
}
// A ClientConfig structure is used to configure a ClientConn. After
// one has been passed to initialize a connection, it must not be modified.
type ClientConfig struct {
// A slice of ClientAuth methods. Only the first instance that is
// suitable by the server will be used to authenticate.
Auth []ClientAuth
// Exclusive determines whether the connection is shared with other
// clients. If true, then all other clients connected will be
// disconnected when a connection is established to the VNC server.
Exclusive bool
// The channel that all messages received from the server will be
// sent on. If the channel blocks, then the goroutine reading data
// from the VNC server may block indefinitely. It is up to the user
// of the library to ensure that this channel is properly read.
// If this is not set, then all messages will be discarded.
ServerMessageCh chan<- ServerMessage
// A slice of supported messages that can be read from the server.
// This only needs to contain NEW server messages, and doesn't
// need to explicitly contain the RFC-required messages.
ServerMessages []ServerMessage
}
func Client(c net.Conn, cfg *ClientConfig) (*ClientConn, error) {
conn := &ClientConn{
c: c,
config: cfg,
}
if err := conn.handshake(); err != nil {
conn.Close()
return nil, err
}
go conn.mainLoop()
return conn, nil
}
func (c *ClientConn) Close() error {
return c.c.Close()
}
// CutText tells the server that the client has new text in its cut buffer.
// The text string MUST only contain Latin-1 characters. This encoding
// is compatible with Go's native string format, but can only use up to
// unicode.MaxLatin values.
//
// See RFC 6143 Section 7.5.6
func (c *ClientConn) CutText(text string) error {
var buf bytes.Buffer
// This is the fixed size data we'll send
fixedData := []interface{}{
uint8(6),
uint8(0),
uint8(0),
uint8(0),
uint32(len(text)),
}
for _, val := range fixedData {
if err := binary.Write(&buf, binary.BigEndian, val); err != nil {
return err
}
}
for _, char := range text {
if char > unicode.MaxLatin1 {
return fmt.Errorf("Character '%s' is not valid Latin-1", char)
}
if err := binary.Write(&buf, binary.BigEndian, uint8(char)); err != nil {
return err
}
}
dataLength := 8 + len(text)
if _, err := c.c.Write(buf.Bytes()[0:dataLength]); err != nil {
return err
}
return nil
}
// Requests a framebuffer update from the server. There may be an indefinite
// time between the request and the actual framebuffer update being
// received.
//
// See RFC 6143 Section 7.5.3
func (c *ClientConn) FramebufferUpdateRequest(incremental bool, x, y, width, height uint16) error {
var buf bytes.Buffer
var incrementalByte uint8 = 0
if incremental {
incrementalByte = 1
}
data := []interface{}{
uint8(3),
incrementalByte,
x, y, width, height,
}
for _, val := range data {
if err := binary.Write(&buf, binary.BigEndian, val); err != nil {
return err
}
}
if _, err := c.c.Write(buf.Bytes()[0:10]); err != nil {
return err
}
return nil
}
// KeyEvent indiciates a key press or release and sends it to the server.
// The key is indicated using the X Window System "keysym" value. Use
// Google to find a reference of these values. To simulate a key press,
// you must send a key with both a down event, and a non-down event.
//
// See 7.5.4.
func (c *ClientConn) KeyEvent(keysym uint32, down bool) error {
var downFlag uint8 = 0
if down {
downFlag = 1
}
data := []interface{}{
uint8(4),
downFlag,
uint8(0),
uint8(0),
keysym,
}
for _, val := range data {
if err := binary.Write(c.c, binary.BigEndian, val); err != nil {
return err
}
}
return nil
}
// PointerEvent indicates that pointer movement or a pointer button
// press or release.
//
// The mask is a bitwise mask of various ButtonMask values. When a button
// is set, it is pressed, when it is unset, it is released.
//
// See RFC 6143 Section 7.5.5
func (c *ClientConn) PointerEvent(mask ButtonMask, x, y uint16) error {
var buf bytes.Buffer
data := []interface{}{
uint8(5),
uint8(mask),
x,
y,
}
for _, val := range data {
if err := binary.Write(&buf, binary.BigEndian, val); err != nil {
return err
}
}
if _, err := c.c.Write(buf.Bytes()[0:6]); err != nil {
return err
}
return nil
}
// SetEncodings sets the encoding types in which the pixel data can
// be sent from the server. After calling this method, the encs slice
// given should not be modified.
//
// See RFC 6143 Section 7.5.2
func (c *ClientConn) SetEncodings(encs []Encoding) error {
data := make([]interface{}, 3+len(encs))
data[0] = uint8(2)
data[1] = uint8(0)
data[2] = uint16(len(encs))
for i, enc := range encs {
data[3+i] = int32(enc.Type())
}
var buf bytes.Buffer
for _, val := range data {
if err := binary.Write(&buf, binary.BigEndian, val); err != nil {
return err
}
}
dataLength := 4 + (4 * len(encs))
if _, err := c.c.Write(buf.Bytes()[0:dataLength]); err != nil {
return err
}
c.Encs = encs
return nil
}
// SetPixelFormat sets the format in which pixel values should be sent
// in FramebufferUpdate messages from the server.
//
// See RFC 6143 Section 7.5.1
func (c *ClientConn) SetPixelFormat(format *PixelFormat) error {
var keyEvent [20]byte
keyEvent[0] = 0
pfBytes, err := writePixelFormat(format)
if err != nil {
return err
}
// Copy the pixel format bytes into the proper slice location
copy(keyEvent[4:], pfBytes)
// Send the data down the connection
if _, err := c.c.Write(keyEvent[:]); err != nil {
return err
}
// Reset the color map as according to RFC.
var newColorMap [256]Color
c.ColorMap = newColorMap
return nil
}
const pvLen = 12 // ProtocolVersion message length.
func parseProtocolVersion(pv []byte) (uint, uint, error) {
var major, minor uint
if len(pv) < pvLen {
return 0, 0, fmt.Errorf("ProtocolVersion message too short (%v < %v)", len(pv), pvLen)
}
l, err := fmt.Sscanf(string(pv), "RFB %d.%d\n", &major, &minor)
if l != 2 {
return 0, 0, fmt.Errorf("error parsing ProtocolVersion.")
}
if err != nil {
return 0, 0, err
}
return major, minor, nil
}
func (c *ClientConn) handshake() error {
var protocolVersion [pvLen]byte
// 7.1.1, read the ProtocolVersion message sent by the server.
if _, err := io.ReadFull(c.c, protocolVersion[:]); err != nil {
return err
}
maxMajor, maxMinor, err := parseProtocolVersion(protocolVersion[:])
if err != nil {
return err
}
if maxMajor < 3 {
return fmt.Errorf("unsupported major version, less than 3: %d", maxMajor)
}
if maxMinor < 8 {
return fmt.Errorf("unsupported minor version, less than 8: %d", maxMinor)
}
// Respond with the version we will support
if _, err = c.c.Write([]byte("RFB 003.008\n")); err != nil {
return err
}
// 7.1.2 Security Handshake from server
var numSecurityTypes uint8
if err = binary.Read(c.c, binary.BigEndian, &numSecurityTypes); err != nil {
return err
}
if numSecurityTypes == 0 {
return fmt.Errorf("no security types: %s", c.readErrorReason())
}
securityTypes := make([]uint8, numSecurityTypes)
if err = binary.Read(c.c, binary.BigEndian, &securityTypes); err != nil {
return err
}
clientSecurityTypes := c.config.Auth
if clientSecurityTypes == nil {
clientSecurityTypes = []ClientAuth{new(ClientAuthNone)}
}
var auth ClientAuth
FindAuth:
for _, curAuth := range clientSecurityTypes {
for _, securityType := range securityTypes {
if curAuth.SecurityType() == securityType {
// We use the first matching supported authentication
auth = curAuth
break FindAuth
}
}
}
if auth == nil {
return fmt.Errorf("no suitable auth schemes found. server supported: %#v", securityTypes)
}
// Respond back with the security type we'll use
if err = binary.Write(c.c, binary.BigEndian, auth.SecurityType()); err != nil {
return err
}
if err = auth.Handshake(c.c); err != nil {
return err
}
// 7.1.3 SecurityResult Handshake
var securityResult uint32
if err = binary.Read(c.c, binary.BigEndian, &securityResult); err != nil {
return err
}
if securityResult == 1 {
return fmt.Errorf("security handshake failed: %s", c.readErrorReason())
}
// 7.3.1 ClientInit
var sharedFlag uint8 = 1
if c.config.Exclusive {
sharedFlag = 0
}
if err = binary.Write(c.c, binary.BigEndian, sharedFlag); err != nil {
return err
}
// 7.3.2 ServerInit
if err = binary.Read(c.c, binary.BigEndian, &c.FrameBufferWidth); err != nil {
return err
}
if err = binary.Read(c.c, binary.BigEndian, &c.FrameBufferHeight); err != nil {
return err
}
// Read the pixel format
if err = readPixelFormat(c.c, &c.PixelFormat); err != nil {
return err
}
var nameLength uint32
if err = binary.Read(c.c, binary.BigEndian, &nameLength); err != nil {
return err
}
nameBytes := make([]uint8, nameLength)
if err = binary.Read(c.c, binary.BigEndian, &nameBytes); err != nil {
return err
}
c.DesktopName = string(nameBytes)
return nil
}
// mainLoop reads messages sent from the server and routes them to the
// proper channels for users of the client to read.
func (c *ClientConn) mainLoop() {
defer c.Close()
// Build the map of available server messages
typeMap := make(map[uint8]ServerMessage)
defaultMessages := []ServerMessage{
new(FramebufferUpdateMessage),
new(SetColorMapEntriesMessage),
new(BellMessage),
new(ServerCutTextMessage),
}
for _, msg := range defaultMessages {
typeMap[msg.Type()] = msg
}
if c.config.ServerMessages != nil {
for _, msg := range c.config.ServerMessages {
typeMap[msg.Type()] = msg
}
}
for {
var messageType uint8
if err := binary.Read(c.c, binary.BigEndian, &messageType); err != nil {
break
}
msg, ok := typeMap[messageType]
if !ok {
// Unsupported message type! Bad!
break
}
parsedMsg, err := msg.Read(c, c.c)
if err != nil {
break
}
if c.config.ServerMessageCh == nil {
continue
}
c.config.ServerMessageCh <- parsedMsg
}
}
func (c *ClientConn) readErrorReason() string {
var reasonLen uint32
if err := binary.Read(c.c, binary.BigEndian, &reasonLen); err != nil {
return "<error>"
}
reason := make([]uint8, reasonLen)
if err := binary.Read(c.c, binary.BigEndian, &reason); err != nil {
return "<error>"
}
return string(reason)
}
| apache-2.0 |
lukeadams/homebrew-cask | Casks/qlmarkdown.rb | 497 | cask 'qlmarkdown' do
version '1.3.5'
sha256 'a290edf5b6124cbd4e526217e0979a9011c8ef3b964a33458f5063d51a9b15f2'
url "https://github.com/toland/qlmarkdown/releases/download/v#{version}/QLMarkdown.qlgenerator.zip"
appcast 'https://github.com/toland/qlmarkdown/releases.atom',
checkpoint: '90c2dd6a28f257f169c067ad0f8b24b42cf0edd38955bfe0e629215cf8db74f7'
name 'QLMarkdown'
homepage 'https://github.com/toland/qlmarkdown'
license :bsd
qlplugin 'QLMarkdown.qlgenerator'
end
| bsd-2-clause |
iamJoeTaylor/cdnjs | ajax/libs/autocomplete.js/0.0.1/autocomplete.jquery.js | 42317 | /*!
* autocomplete.js 0.0.1
* https://github.com/algolia/autocomplete.js
* Copyright 2015 Algolia, Inc. and other contributors; Licensed MIT
*/
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
'use strict';
module.exports = require('./src/autocomplete/plugin.js');
},{"./src/autocomplete/plugin.js":9}],2:[function(require,module,exports){
'use strict';
var _ = require('../common/utils.js');
var css = {
wrapper: {
position: 'relative',
display: 'inline-block'
},
hint: {
position: 'absolute',
top: '0',
left: '0',
borderColor: 'transparent',
boxShadow: 'none',
// #741: fix hint opacity issue on iOS
opacity: '1'
},
input: {
position: 'relative',
verticalAlign: 'top',
backgroundColor: 'transparent'
},
inputWithNoHint: {
position: 'relative',
verticalAlign: 'top'
},
dropdown: {
position: 'absolute',
top: '100%',
left: '0',
zIndex: '100',
display: 'none'
},
suggestions: {
display: 'block'
},
suggestion: {
whiteSpace: 'nowrap',
cursor: 'pointer'
},
suggestionChild: {
whiteSpace: 'normal'
},
ltr: {
left: '0',
right: 'auto'
},
rtl: {
left: 'auto',
right: '0'
}
};
// ie specific styling
if (_.isMsie()) {
// ie6-8 (and 9?) doesn't fire hover and click events for elements with
// transparent backgrounds, for a workaround, use 1x1 transparent gif
_.mixin(css.input, {
backgroundImage: 'url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)'
});
}
// ie7 and under specific styling
if (_.isMsie() && _.isMsie() <= 7) {
// if someone can tell me why this is necessary to align
// the hint with the query in ie7, i'll send you $5 - @JakeHarding
_.mixin(css.input, {marginTop: '-1px'});
}
module.exports = css;
},{"../common/utils.js":11}],3:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
var datasetKey = 'aaDataset';
var valueKey = 'aaValue';
var datumKey = 'aaDatum';
var _ = require('../common/utils.js');
var html = require('./html.js');
var css = require('./css.js');
var EventEmitter = require('./event_emitter.js');
// constructor
// -----------
function Dataset(o) {
o = o || {};
o.templates = o.templates || {};
if (!o.source) {
$.error('missing source');
}
if (o.name && !isValidName(o.name)) {
$.error('invalid dataset name: ' + o.name);
}
// tracks the last query the dataset was updated for
this.query = null;
this.highlight = !!o.highlight;
this.name = o.name || _.getUniqueId();
this.source = o.source;
this.displayFn = getDisplayFn(o.display || o.displayKey);
this.templates = getTemplates(o.templates, this.displayFn);
this.$el = $(html.dataset.replace('%CLASS%', this.name));
}
// static methods
// --------------
Dataset.extractDatasetName = function extractDatasetName(el) {
return $(el).data(datasetKey);
};
Dataset.extractValue = function extractDatum(el) {
return $(el).data(valueKey);
};
Dataset.extractDatum = function extractDatum(el) {
return $(el).data(datumKey);
};
// instance methods
// ----------------
_.mixin(Dataset.prototype, EventEmitter, {
// ### private
_render: function render(query, suggestions) {
if (!this.$el) { return; }
var that = this;
var hasSuggestions;
var args = [].slice.call(arguments, 2);
this.$el.empty();
hasSuggestions = suggestions && suggestions.length;
if (!hasSuggestions && this.templates.empty) {
this.$el
.html(getEmptyHtml.apply(this, args))
.prepend(that.templates.header ? getHeaderHtml.apply(this, args) : null)
.append(that.templates.footer ? getFooterHtml.apply(this, args) : null);
} else if (hasSuggestions) {
this.$el
.html(getSuggestionsHtml.apply(this, args))
.prepend(that.templates.header ? getHeaderHtml.apply(this, args) : null)
.append(that.templates.footer ? getFooterHtml.apply(this, args) : null);
}
this.trigger('rendered');
function getEmptyHtml() {
var args = [].slice.call(arguments, 0);
args = [{ query: query, isEmpty: true }].concat(args);
return that.templates.empty.apply(this, args);
}
function getSuggestionsHtml() {
var args = [].slice.call(arguments, 0);
var $suggestions;
var nodes;
$suggestions = $(html.suggestions).css(css.suggestions);
// jQuery#append doesn't support arrays as the first argument
// until version 1.8, see http://bugs.jquery.com/ticket/11231
nodes = _.map(suggestions, getSuggestionNode);
$suggestions.append.apply($suggestions, nodes);
return $suggestions;
function getSuggestionNode(suggestion) {
var $el;
$el = $(html.suggestion)
.append(that.templates.suggestion.apply(this, [suggestion].concat(args)))
.data(datasetKey, that.name)
.data(valueKey, that.displayFn(suggestion))
.data(datumKey, suggestion);
$el.children().each(function() { $(this).css(css.suggestionChild); });
return $el;
}
}
function getHeaderHtml() {
var args = [].slice.call(arguments, 0);
args = [{ query: query, isEmpty: !hasSuggestions }].concat(args);
return that.templates.header.apply(this, args);
}
function getFooterHtml() {
var args = [].slice.call(arguments, 0);
args = [{ query: query, isEmpty: !hasSuggestions }].concat(args);
return that.templates.footer.apply(this, args);
}
},
// ### public
getRoot: function getRoot() {
return this.$el;
},
update: function update(query) {
var that = this;
this.query = query;
this.canceled = false;
this.source(query, render);
function render(suggestions) {
// if the update has been canceled or if the query has changed
// do not render the suggestions as they've become outdated
if (!that.canceled && query === that.query) {
// concat all the other arguments that could have been passed
// to the render function, and forward them to _render
var args = [].slice.call(arguments, 1);
args = [query, suggestions].concat(args);
that._render.apply(that, args);
}
}
},
cancel: function cancel() {
this.canceled = true;
},
clear: function clear() {
this.cancel();
this.$el.empty();
this.trigger('rendered');
},
isEmpty: function isEmpty() {
return this.$el.is(':empty');
},
destroy: function destroy() {
this.$el = null;
}
});
// helper functions
// ----------------
function getDisplayFn(display) {
display = display || 'value';
return _.isFunction(display) ? display : displayFn;
function displayFn(obj) { return obj[display]; }
}
function getTemplates(templates, displayFn) {
return {
empty: templates.empty && _.templatify(templates.empty),
header: templates.header && _.templatify(templates.header),
footer: templates.footer && _.templatify(templates.footer),
suggestion: templates.suggestion || suggestionTemplate
};
function suggestionTemplate(context) {
return '<p>' + displayFn(context) + '</p>';
}
}
function isValidName(str) {
// dashes, underscores, letters, and numbers
return (/^[_a-zA-Z0-9-]+$/).test(str);
}
module.exports = Dataset;
},{"../common/utils.js":11,"./css.js":2,"./event_emitter.js":6,"./html.js":7}],4:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
var _ = require('../common/utils.js');
var EventEmitter = require('./event_emitter.js');
var Dataset = require('./dataset.js');
var css = require('./css.js');
// constructor
// -----------
function Dropdown(o) {
var that = this;
var onSuggestionClick;
var onSuggestionMouseEnter;
var onSuggestionMouseLeave;
o = o || {};
if (!o.menu) {
$.error('menu is required');
}
this.isOpen = false;
this.isEmpty = true;
this.datasets = _.map(o.datasets, initializeDataset);
// bound functions
onSuggestionClick = _.bind(this._onSuggestionClick, this);
onSuggestionMouseEnter = _.bind(this._onSuggestionMouseEnter, this);
onSuggestionMouseLeave = _.bind(this._onSuggestionMouseLeave, this);
this.$menu = $(o.menu)
.on('click.aa', '.aa-suggestion', onSuggestionClick)
.on('mouseenter.aa', '.aa-suggestion', onSuggestionMouseEnter)
.on('mouseleave.aa', '.aa-suggestion', onSuggestionMouseLeave);
_.each(this.datasets, function(dataset) {
that.$menu.append(dataset.getRoot());
dataset.onSync('rendered', that._onRendered, that);
});
}
// instance methods
// ----------------
_.mixin(Dropdown.prototype, EventEmitter, {
// ### private
_onSuggestionClick: function onSuggestionClick($e) {
this.trigger('suggestionClicked', $($e.currentTarget));
},
_onSuggestionMouseEnter: function onSuggestionMouseEnter($e) {
this._removeCursor();
this._setCursor($($e.currentTarget), true);
},
_onSuggestionMouseLeave: function onSuggestionMouseLeave() {
this._removeCursor();
},
_onRendered: function onRendered() {
this.isEmpty = _.every(this.datasets, isDatasetEmpty);
if (this.isEmpty) {
this._hide();
} else if (this.isOpen) {
this._show();
}
this.trigger('datasetRendered');
function isDatasetEmpty(dataset) { return dataset.isEmpty(); }
},
_hide: function() {
this.$menu.hide();
},
_show: function() {
// can't use jQuery#show because $menu is a span element we want
// display: block; not dislay: inline;
this.$menu.css('display', 'block');
},
_getSuggestions: function getSuggestions() {
return this.$menu.find('.aa-suggestion');
},
_getCursor: function getCursor() {
return this.$menu.find('.aa-cursor').first();
},
_setCursor: function setCursor($el, silent) {
$el.first().addClass('aa-cursor');
if (!silent) {
this.trigger('cursorMoved');
}
},
_removeCursor: function removeCursor() {
this._getCursor().removeClass('aa-cursor');
},
_moveCursor: function moveCursor(increment) {
var $suggestions;
var $oldCursor;
var newCursorIndex;
var $newCursor;
if (!this.isOpen) { return; }
$oldCursor = this._getCursor();
$suggestions = this._getSuggestions();
this._removeCursor();
// shifting before and after modulo to deal with -1 index
newCursorIndex = $suggestions.index($oldCursor) + increment;
newCursorIndex = (newCursorIndex + 1) % ($suggestions.length + 1) - 1;
if (newCursorIndex === -1) {
this.trigger('cursorRemoved');
return;
} else if (newCursorIndex < -1) {
newCursorIndex = $suggestions.length - 1;
}
this._setCursor($newCursor = $suggestions.eq(newCursorIndex));
// in the case of scrollable overflow
// make sure the cursor is visible in the menu
this._ensureVisible($newCursor);
},
_ensureVisible: function ensureVisible($el) {
var elTop;
var elBottom;
var menuScrollTop;
var menuHeight;
elTop = $el.position().top;
elBottom = elTop + $el.outerHeight(true);
menuScrollTop = this.$menu.scrollTop();
menuHeight = this.$menu.height() +
parseInt(this.$menu.css('paddingTop'), 10) +
parseInt(this.$menu.css('paddingBottom'), 10);
if (elTop < 0) {
this.$menu.scrollTop(menuScrollTop + elTop);
} else if (menuHeight < elBottom) {
this.$menu.scrollTop(menuScrollTop + (elBottom - menuHeight));
}
},
// ### public
close: function close() {
if (this.isOpen) {
this.isOpen = false;
this._removeCursor();
this._hide();
this.trigger('closed');
}
},
open: function open() {
if (!this.isOpen) {
this.isOpen = true;
if (!this.isEmpty) {
this._show();
}
this.trigger('opened');
}
},
setLanguageDirection: function setLanguageDirection(dir) {
this.$menu.css(dir === 'ltr' ? css.ltr : css.rtl);
},
moveCursorUp: function moveCursorUp() {
this._moveCursor(-1);
},
moveCursorDown: function moveCursorDown() {
this._moveCursor(+1);
},
getDatumForSuggestion: function getDatumForSuggestion($el) {
var datum = null;
if ($el.length) {
datum = {
raw: Dataset.extractDatum($el),
value: Dataset.extractValue($el),
datasetName: Dataset.extractDatasetName($el)
};
}
return datum;
},
getDatumForCursor: function getDatumForCursor() {
return this.getDatumForSuggestion(this._getCursor().first());
},
getDatumForTopSuggestion: function getDatumForTopSuggestion() {
return this.getDatumForSuggestion(this._getSuggestions().first());
},
update: function update(query) {
_.each(this.datasets, updateDataset);
function updateDataset(dataset) { dataset.update(query); }
},
empty: function empty() {
_.each(this.datasets, clearDataset);
this.isEmpty = true;
function clearDataset(dataset) { dataset.clear(); }
},
isVisible: function isVisible() {
return this.isOpen && !this.isEmpty;
},
destroy: function destroy() {
this.$menu.off('.aa');
this.$menu = null;
_.each(this.datasets, destroyDataset);
function destroyDataset(dataset) { dataset.destroy(); }
}
});
// helper functions
// ----------------
Dropdown.Dataset = Dataset;
function initializeDataset(oDataset) {
return new Dropdown.Dataset(oDataset);
}
module.exports = Dropdown;
},{"../common/utils.js":11,"./css.js":2,"./dataset.js":3,"./event_emitter.js":6}],5:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
var namespace = 'typeahead:';
var _ = require('../common/utils.js');
// constructor
// -----------
function EventBus(o) {
if (!o || !o.el) {
$.error('EventBus initialized without el');
}
this.$el = $(o.el);
}
// instance methods
// ----------------
_.mixin(EventBus.prototype, {
// ### public
trigger: function(type) {
var args = [].slice.call(arguments, 1);
this.$el.trigger(namespace + type, args);
}
});
module.exports = EventBus;
},{"../common/utils.js":11}],6:[function(require,module,exports){
'use strict';
var splitter = /\s+/;
var nextTick = getNextTick();
module.exports = {
onSync: onSync,
onAsync: onAsync,
off: off,
trigger: trigger
};
function on(method, types, cb, context) {
var type;
if (!cb) { return this; }
types = types.split(splitter);
cb = context ? bindContext(cb, context) : cb;
this._callbacks = this._callbacks || {};
while (type = types.shift()) {
this._callbacks[type] = this._callbacks[type] || {sync: [], async: []};
this._callbacks[type][method].push(cb);
}
return this;
}
function onAsync(types, cb, context) {
return on.call(this, 'async', types, cb, context);
}
function onSync(types, cb, context) {
return on.call(this, 'sync', types, cb, context);
}
function off(types) {
var type;
if (!this._callbacks) { return this; }
types = types.split(splitter);
while (type = types.shift()) {
delete this._callbacks[type];
}
return this;
}
function trigger(types) {
var type;
var callbacks;
var args;
var syncFlush;
var asyncFlush;
if (!this._callbacks) { return this; }
types = types.split(splitter);
args = [].slice.call(arguments, 1);
while ((type = types.shift()) && (callbacks = this._callbacks[type])) { // eslint-disable-line
syncFlush = getFlush(callbacks.sync, this, [type].concat(args));
asyncFlush = getFlush(callbacks.async, this, [type].concat(args));
if (syncFlush()) {
nextTick(asyncFlush);
}
}
return this;
}
function getFlush(callbacks, context, args) {
return flush;
function flush() {
var cancelled;
for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) {
// only cancel if the callback explicitly returns false
cancelled = callbacks[i].apply(context, args) === false;
}
return !cancelled;
}
}
function getNextTick() {
var nextTickFn;
if (window.setImmediate) { // IE10+
nextTickFn = function nextTickSetImmediate(fn) {
setImmediate(function() { fn(); });
};
} else { // old browsers
nextTickFn = function nextTickSetTimeout(fn) {
setTimeout(function() { fn(); }, 0);
};
}
return nextTickFn;
}
function bindContext(fn, context) {
return fn.bind ?
fn.bind(context) :
function() { fn.apply(context, [].slice.call(arguments, 0)); };
}
},{}],7:[function(require,module,exports){
'use strict';
module.exports = {
wrapper: '<span class="algolia-autocomplete"></span>',
dropdown: '<span class="aa-dropdown-menu"></span>',
dataset: '<div class="aa-dataset-%CLASS%"></div>',
suggestions: '<span class="aa-suggestions"></span>',
suggestion: '<div class="aa-suggestion"></div>'
};
},{}],8:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
var specialKeyCodeMap;
specialKeyCodeMap = {
9: 'tab',
27: 'esc',
37: 'left',
39: 'right',
13: 'enter',
38: 'up',
40: 'down'
};
var _ = require('../common/utils.js');
var EventEmitter = require('./event_emitter.js');
// constructor
// -----------
function Input(o) {
var that = this;
var onBlur;
var onFocus;
var onKeydown;
var onInput;
o = o || {};
if (!o.input) {
$.error('input is missing');
}
// bound functions
onBlur = _.bind(this._onBlur, this);
onFocus = _.bind(this._onFocus, this);
onKeydown = _.bind(this._onKeydown, this);
onInput = _.bind(this._onInput, this);
this.$hint = $(o.hint);
this.$input = $(o.input)
.on('blur.aa', onBlur)
.on('focus.aa', onFocus)
.on('keydown.aa', onKeydown);
// if no hint, noop all the hint related functions
if (this.$hint.length === 0) {
this.setHint =
this.getHint =
this.clearHint =
this.clearHintIfInvalid = _.noop;
}
// ie7 and ie8 don't support the input event
// ie9 doesn't fire the input event when characters are removed
// not sure if ie10 is compatible
if (!_.isMsie()) {
this.$input.on('input.aa', onInput);
} else {
this.$input.on('keydown.aa keypress.aa cut.aa paste.aa', function($e) {
// if a special key triggered this, ignore it
if (specialKeyCodeMap[$e.which || $e.keyCode]) { return; }
// give the browser a chance to update the value of the input
// before checking to see if the query changed
_.defer(_.bind(that._onInput, that, $e));
});
}
// the query defaults to whatever the value of the input is
// on initialization, it'll most likely be an empty string
this.query = this.$input.val();
// helps with calculating the width of the input's value
this.$overflowHelper = buildOverflowHelper(this.$input);
}
// static methods
// --------------
Input.normalizeQuery = function(str) {
// strips leading whitespace and condenses all whitespace
return (str || '').replace(/^\s*/g, '').replace(/\s{2,}/g, ' ');
};
// instance methods
// ----------------
_.mixin(Input.prototype, EventEmitter, {
// ### private
_onBlur: function onBlur() {
this.resetInputValue();
this.trigger('blurred');
},
_onFocus: function onFocus() {
this.trigger('focused');
},
_onKeydown: function onKeydown($e) {
// which is normalized and consistent (but not for ie)
var keyName = specialKeyCodeMap[$e.which || $e.keyCode];
this._managePreventDefault(keyName, $e);
if (keyName && this._shouldTrigger(keyName, $e)) {
this.trigger(keyName + 'Keyed', $e);
}
},
_onInput: function onInput() {
this._checkInputValue();
},
_managePreventDefault: function managePreventDefault(keyName, $e) {
var preventDefault;
var hintValue;
var inputValue;
switch (keyName) {
case 'tab':
hintValue = this.getHint();
inputValue = this.getInputValue();
preventDefault = hintValue &&
hintValue !== inputValue &&
!withModifier($e);
break;
case 'up':
case 'down':
preventDefault = !withModifier($e);
break;
default:
preventDefault = false;
}
if (preventDefault) {
$e.preventDefault();
}
},
_shouldTrigger: function shouldTrigger(keyName, $e) {
var trigger;
switch (keyName) {
case 'tab':
trigger = !withModifier($e);
break;
default:
trigger = true;
}
return trigger;
},
_checkInputValue: function checkInputValue() {
var inputValue;
var areEquivalent;
var hasDifferentWhitespace;
inputValue = this.getInputValue();
areEquivalent = areQueriesEquivalent(inputValue, this.query);
hasDifferentWhitespace = areEquivalent ?
this.query.length !== inputValue.length : false;
this.query = inputValue;
if (!areEquivalent) {
this.trigger('queryChanged', this.query);
} else if (hasDifferentWhitespace) {
this.trigger('whitespaceChanged', this.query);
}
},
// ### public
focus: function focus() {
this.$input.focus();
},
blur: function blur() {
this.$input.blur();
},
getQuery: function getQuery() {
return this.query;
},
setQuery: function setQuery(query) {
this.query = query;
},
getInputValue: function getInputValue() {
return this.$input.val();
},
setInputValue: function setInputValue(value, silent) {
this.$input.val(value);
// silent prevents any additional events from being triggered
if (silent) {
this.clearHint();
} else {
this._checkInputValue();
}
},
resetInputValue: function resetInputValue() {
this.setInputValue(this.query, true);
},
getHint: function getHint() {
return this.$hint.val();
},
setHint: function setHint(value) {
this.$hint.val(value);
},
clearHint: function clearHint() {
this.setHint('');
},
clearHintIfInvalid: function clearHintIfInvalid() {
var val;
var hint;
var valIsPrefixOfHint;
var isValid;
val = this.getInputValue();
hint = this.getHint();
valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0;
isValid = val !== '' && valIsPrefixOfHint && !this.hasOverflow();
if (!isValid) {
this.clearHint();
}
},
getLanguageDirection: function getLanguageDirection() {
return (this.$input.css('direction') || 'ltr').toLowerCase();
},
hasOverflow: function hasOverflow() {
// 2 is arbitrary, just picking a small number to handle edge cases
var constraint = this.$input.width() - 2;
this.$overflowHelper.text(this.getInputValue());
return this.$overflowHelper.width() >= constraint;
},
isCursorAtEnd: function() {
var valueLength;
var selectionStart;
var range;
valueLength = this.$input.val().length;
selectionStart = this.$input[0].selectionStart;
if (_.isNumber(selectionStart)) {
return selectionStart === valueLength;
} else if (document.selection) {
// NOTE: this won't work unless the input has focus, the good news
// is this code should only get called when the input has focus
range = document.selection.createRange();
range.moveStart('character', -valueLength);
return valueLength === range.text.length;
}
return true;
},
destroy: function destroy() {
this.$hint.off('.aa');
this.$input.off('.aa');
this.$hint = this.$input = this.$overflowHelper = null;
}
});
// helper functions
// ----------------
function buildOverflowHelper($input) {
return $('<pre aria-hidden="true"></pre>')
.css({
// position helper off-screen
position: 'absolute',
visibility: 'hidden',
// avoid line breaks and whitespace collapsing
whiteSpace: 'pre',
// use same font css as input to calculate accurate width
fontFamily: $input.css('font-family'),
fontSize: $input.css('font-size'),
fontStyle: $input.css('font-style'),
fontVariant: $input.css('font-variant'),
fontWeight: $input.css('font-weight'),
wordSpacing: $input.css('word-spacing'),
letterSpacing: $input.css('letter-spacing'),
textIndent: $input.css('text-indent'),
textRendering: $input.css('text-rendering'),
textTransform: $input.css('text-transform')
})
.insertAfter($input);
}
function areQueriesEquivalent(a, b) {
return Input.normalizeQuery(a) === Input.normalizeQuery(b);
}
function withModifier($e) {
return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey;
}
module.exports = Input;
},{"../common/utils.js":11,"./event_emitter.js":6}],9:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
var _ = require('../common/utils.js');
var Typeahead = require('./typeahead.js');
var EventBus = require('./event_bus.js');
var old;
var typeaheadKey;
var methods;
old = $.fn.typeahead;
typeaheadKey = 'aaAutocomplete';
methods = {
// supported signatures:
// function(o, dataset, dataset, ...)
// function(o, [dataset, dataset, ...])
initialize: function initialize(o, datasets) {
datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1);
o = o || {};
return this.each(attach);
function attach() {
var $input = $(this);
var eventBus = new EventBus({el: $input});
var typeahead;
typeahead = new Typeahead({
input: $input,
eventBus: eventBus,
withHint: _.isUndefined(o.hint) ? true : !!o.hint,
minLength: o.minLength,
autoselect: o.autoselect,
datasets: datasets
});
$input.data(typeaheadKey, typeahead);
}
},
open: function open() {
return this.each(openTypeahead);
function openTypeahead() {
var $input = $(this);
var typeahead;
if (typeahead = $input.data(typeaheadKey)) {
typeahead.open();
}
}
},
close: function close() {
return this.each(closeTypeahead);
function closeTypeahead() {
var $input = $(this);
var typeahead;
if (typeahead = $input.data(typeaheadKey)) {
typeahead.close();
}
}
},
val: function val(newVal) {
// mirror jQuery#val functionality: reads opearte on first match,
// write operates on all matches
return !arguments.length ? getVal(this.first()) : this.each(setVal);
function setVal() {
var $input = $(this);
var typeahead;
if (typeahead = $input.data(typeaheadKey)) {
typeahead.setVal(newVal);
}
}
function getVal($input) {
var typeahead;
var query;
if (typeahead = $input.data(typeaheadKey)) {
query = typeahead.getVal();
}
return query;
}
},
destroy: function destroy() {
return this.each(unattach);
function unattach() {
var $input = $(this);
var typeahead;
if (typeahead = $input.data(typeaheadKey)) {
typeahead.destroy();
$input.removeData(typeaheadKey);
}
}
}
};
$.fn.autocomplete = function(method) {
var tts;
// methods that should only act on intialized typeaheads
if (methods[method] && method !== 'initialize') {
// filter out non-typeahead inputs
tts = this.filter(function() { return !!$(this).data(typeaheadKey); });
return methods[method].apply(tts, [].slice.call(arguments, 1));
}
return methods.initialize.apply(this, arguments);
};
$.fn.autocomplete.noConflict = function noConflict() {
$.fn.autocomplete = old;
return this;
};
},{"../common/utils.js":11,"./event_bus.js":5,"./typeahead.js":10}],10:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
var attrsKey = 'aaAttrs';
var _ = require('../common/utils.js');
var EventBus = require('./event_bus.js');
var Input = require('./input.js');
var Dropdown = require('./dropdown.js');
var html = require('./html.js');
var css = require('./css.js');
// constructor
// -----------
// THOUGHT: what if datasets could dynamically be added/removed?
function Typeahead(o) {
var $menu;
var $input;
var $hint;
o = o || {};
if (!o.input) {
$.error('missing input');
}
this.isActivated = false;
this.autoselect = !!o.autoselect;
this.minLength = _.isNumber(o.minLength) ? o.minLength : 1;
this.$node = buildDom(o.input, o.withHint);
$menu = this.$node.find('.aa-dropdown-menu');
$input = this.$node.find('.aa-input');
$hint = this.$node.find('.aa-hint');
// #705: if there's scrollable overflow, ie doesn't support
// blur cancellations when the scrollbar is clicked
//
// #351: preventDefault won't cancel blurs in ie <= 8
$input.on('blur.aa', function($e) {
var active;
var isActive;
var hasActive;
active = document.activeElement;
isActive = $menu.is(active);
hasActive = $menu.has(active).length > 0;
if (_.isMsie() && (isActive || hasActive)) {
$e.preventDefault();
// stop immediate in order to prevent Input#_onBlur from
// getting exectued
$e.stopImmediatePropagation();
_.defer(function() { $input.focus(); });
}
});
// #351: prevents input blur due to clicks within dropdown menu
$menu.on('mousedown.aa', function($e) { $e.preventDefault(); });
this.eventBus = o.eventBus || new EventBus({el: $input});
this.dropdown = new Typeahead.Dropdown({menu: $menu, datasets: o.datasets})
.onSync('suggestionClicked', this._onSuggestionClicked, this)
.onSync('cursorMoved', this._onCursorMoved, this)
.onSync('cursorRemoved', this._onCursorRemoved, this)
.onSync('opened', this._onOpened, this)
.onSync('closed', this._onClosed, this)
.onAsync('datasetRendered', this._onDatasetRendered, this);
this.input = new Typeahead.Input({input: $input, hint: $hint})
.onSync('focused', this._onFocused, this)
.onSync('blurred', this._onBlurred, this)
.onSync('enterKeyed', this._onEnterKeyed, this)
.onSync('tabKeyed', this._onTabKeyed, this)
.onSync('escKeyed', this._onEscKeyed, this)
.onSync('upKeyed', this._onUpKeyed, this)
.onSync('downKeyed', this._onDownKeyed, this)
.onSync('leftKeyed', this._onLeftKeyed, this)
.onSync('rightKeyed', this._onRightKeyed, this)
.onSync('queryChanged', this._onQueryChanged, this)
.onSync('whitespaceChanged', this._onWhitespaceChanged, this);
this._setLanguageDirection();
}
// instance methods
// ----------------
_.mixin(Typeahead.prototype, {
// ### private
_onSuggestionClicked: function onSuggestionClicked(type, $el) {
var datum;
if (datum = this.dropdown.getDatumForSuggestion($el)) {
this._select(datum);
}
},
_onCursorMoved: function onCursorMoved() {
var datum = this.dropdown.getDatumForCursor();
this.input.setInputValue(datum.value, true);
this.eventBus.trigger('cursorchanged', datum.raw, datum.datasetName);
},
_onCursorRemoved: function onCursorRemoved() {
this.input.resetInputValue();
this._updateHint();
},
_onDatasetRendered: function onDatasetRendered() {
this._updateHint();
},
_onOpened: function onOpened() {
this._updateHint();
this.eventBus.trigger('opened');
},
_onClosed: function onClosed() {
this.input.clearHint();
this.eventBus.trigger('closed');
},
_onFocused: function onFocused() {
this.isActivated = true;
this.dropdown.open();
},
_onBlurred: function onBlurred() {
this.isActivated = false;
this.dropdown.empty();
this.dropdown.close();
},
_onEnterKeyed: function onEnterKeyed(type, $e) {
var cursorDatum;
var topSuggestionDatum;
cursorDatum = this.dropdown.getDatumForCursor();
topSuggestionDatum = this.dropdown.getDatumForTopSuggestion();
if (cursorDatum) {
this._select(cursorDatum);
$e.preventDefault();
} else if (this.autoselect && topSuggestionDatum) {
this._select(topSuggestionDatum);
$e.preventDefault();
}
},
_onTabKeyed: function onTabKeyed(type, $e) {
var datum;
if (datum = this.dropdown.getDatumForCursor()) {
this._select(datum);
$e.preventDefault();
} else {
this._autocomplete(true);
}
},
_onEscKeyed: function onEscKeyed() {
this.dropdown.close();
this.input.resetInputValue();
},
_onUpKeyed: function onUpKeyed() {
var query = this.input.getQuery();
if (this.dropdown.isEmpty && query.length >= this.minLength) {
this.dropdown.update(query);
} else {
this.dropdown.moveCursorUp();
}
this.dropdown.open();
},
_onDownKeyed: function onDownKeyed() {
var query = this.input.getQuery();
if (this.dropdown.isEmpty && query.length >= this.minLength) {
this.dropdown.update(query);
} else {
this.dropdown.moveCursorDown();
}
this.dropdown.open();
},
_onLeftKeyed: function onLeftKeyed() {
if (this.dir === 'rtl') {
this._autocomplete();
}
},
_onRightKeyed: function onRightKeyed() {
if (this.dir === 'ltr') {
this._autocomplete();
}
},
_onQueryChanged: function onQueryChanged(e, query) {
this.input.clearHintIfInvalid();
if (query.length >= this.minLength) {
this.dropdown.update(query);
} else {
this.dropdown.empty();
}
this.dropdown.open();
this._setLanguageDirection();
},
_onWhitespaceChanged: function onWhitespaceChanged() {
this._updateHint();
this.dropdown.open();
},
_setLanguageDirection: function setLanguageDirection() {
var dir = this.input.getLanguageDirection();
if (this.dir !== dir) {
this.dir = dir;
this.$node.css('direction', dir);
this.dropdown.setLanguageDirection(dir);
}
},
_updateHint: function updateHint() {
var datum;
var val;
var query;
var escapedQuery;
var frontMatchRegEx;
var match;
datum = this.dropdown.getDatumForTopSuggestion();
if (datum && this.dropdown.isVisible() && !this.input.hasOverflow()) {
val = this.input.getInputValue();
query = Input.normalizeQuery(val);
escapedQuery = _.escapeRegExChars(query);
// match input value, then capture trailing text
frontMatchRegEx = new RegExp('^(?:' + escapedQuery + ')(.+$)', 'i');
match = frontMatchRegEx.exec(datum.value);
// clear hint if there's no trailing text
if (match) {
this.input.setHint(val + match[1]);
} else {
this.input.clearHint();
}
} else {
this.input.clearHint();
}
},
_autocomplete: function autocomplete(laxCursor) {
var hint;
var query;
var isCursorAtEnd;
var datum;
hint = this.input.getHint();
query = this.input.getQuery();
isCursorAtEnd = laxCursor || this.input.isCursorAtEnd();
if (hint && query !== hint && isCursorAtEnd) {
datum = this.dropdown.getDatumForTopSuggestion();
if (datum) {
this.input.setInputValue(datum.value);
}
this.eventBus.trigger('autocompleted', datum.raw, datum.datasetName);
}
},
_select: function select(datum) {
this.input.setQuery(datum.value);
this.input.setInputValue(datum.value, true);
this._setLanguageDirection();
this.eventBus.trigger('selected', datum.raw, datum.datasetName);
this.dropdown.close();
// #118: allow click event to bubble up to the body before removing
// the suggestions otherwise we break event delegation
_.defer(_.bind(this.dropdown.empty, this.dropdown));
},
// ### public
open: function open() {
// if the menu is not activated yet, we need to update
// the underlying dropdown menu to trigger the search
// otherwise we're not gonna see anything
if (!this.isActivated) {
var query = this.input.getInputValue();
if (query.length >= this.minLength) {
this.dropdown.update(query);
} else {
this.dropdown.empty();
}
}
this.dropdown.open();
},
close: function close() {
this.dropdown.close();
},
setVal: function setVal(val) {
// expect val to be a string, so be safe, and coerce
val = _.toStr(val);
if (this.isActivated) {
this.input.setInputValue(val);
} else {
this.input.setQuery(val);
this.input.setInputValue(val, true);
}
this._setLanguageDirection();
},
getVal: function getVal() {
return this.input.getQuery();
},
destroy: function destroy() {
this.input.destroy();
this.dropdown.destroy();
destroyDomStructure(this.$node);
this.$node = null;
}
});
function buildDom(input, withHint) {
var $input;
var $wrapper;
var $dropdown;
var $hint;
$input = $(input);
$wrapper = $(html.wrapper).css(css.wrapper);
$dropdown = $(html.dropdown).css(css.dropdown);
$hint = $input.clone().css(css.hint).css(getBackgroundStyles($input));
$hint
.val('')
.removeData()
.addClass('aa-hint')
.removeAttr('id name placeholder required')
.prop('readonly', true)
.attr({autocomplete: 'off', spellcheck: 'false', tabindex: -1});
// store the original values of the attrs that get modified
// so modifications can be reverted on destroy
$input.data(attrsKey, {
dir: $input.attr('dir'),
autocomplete: $input.attr('autocomplete'),
spellcheck: $input.attr('spellcheck'),
style: $input.attr('style')
});
$input
.addClass('aa-input')
.attr({autocomplete: 'off', spellcheck: false})
.css(withHint ? css.input : css.inputWithNoHint);
// ie7 does not like it when dir is set to auto
try {
if (!$input.attr('dir')) {
$input.attr('dir', 'auto');
}
} catch (e) {
// ignore
}
return $input
.wrap($wrapper)
.parent()
.prepend(withHint ? $hint : null)
.append($dropdown);
}
function getBackgroundStyles($el) {
return {
backgroundAttachment: $el.css('background-attachment'),
backgroundClip: $el.css('background-clip'),
backgroundColor: $el.css('background-color'),
backgroundImage: $el.css('background-image'),
backgroundOrigin: $el.css('background-origin'),
backgroundPosition: $el.css('background-position'),
backgroundRepeat: $el.css('background-repeat'),
backgroundSize: $el.css('background-size')
};
}
function destroyDomStructure($node) {
var $input = $node.find('.aa-input');
// need to remove attrs that weren't previously defined and
// revert attrs that originally had a value
_.each($input.data(attrsKey), function(val, key) {
if (_.isUndefined(val)) {
$input.removeAttr(key);
} else {
$input.attr(key, val);
}
});
$input
.detach()
.removeData(attrsKey)
.removeClass('aa-input')
.insertAfter($node);
$node.remove();
}
Typeahead.Dropdown = Dropdown;
Typeahead.Input = Input;
module.exports = Typeahead;
},{"../common/utils.js":11,"./css.js":2,"./dropdown.js":4,"./event_bus.js":5,"./html.js":7,"./input.js":8}],11:[function(require,module,exports){
'use strict';
/* eslint-env jquery */
module.exports = {
isMsie: function() {
// from https://github.com/ded/bowser/blob/master/bowser.js
return (/(msie|trident)/i).test(navigator.userAgent) ?
navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false;
},
isBlankString: function(str) { return !str || /^\s*$/.test(str); },
// http://stackoverflow.com/a/6969486
escapeRegExChars: function(str) {
return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, '\\$&');
},
isString: function(obj) { return typeof obj === 'string'; },
isNumber: function(obj) { return typeof obj === 'number'; },
isArray: $.isArray,
isFunction: $.isFunction,
isObject: $.isPlainObject,
isUndefined: function(obj) { return typeof obj === 'undefined'; },
toStr: function toStr(s) {
return this.isUndefined(s) || s === null ? '' : s + '';
},
bind: $.proxy,
each: function(collection, cb) {
// stupid argument order for jQuery.each
$.each(collection, reverseArgs);
function reverseArgs(index, value) { return cb(value, index); }
},
map: $.map,
filter: $.grep,
every: function(obj, test) {
var result = true;
if (!obj) { return result; }
$.each(obj, function(key, val) {
result = test.call(null, val, key, obj);
if (!result) {
return false;
}
});
return !!result;
},
some: function(obj, test) {
var result = false;
if (!obj) { return result; }
$.each(obj, function(key, val) {
if (result = test.call(null, val, key, obj)) {
return false;
}
});
return !!result;
},
mixin: $.extend,
getUniqueId: (function() {
var counter = 0;
return function() { return counter++; };
})(),
templatify: function templatify(obj) {
return $.isFunction(obj) ? obj : template;
function template() { return String(obj); }
},
defer: function(fn) { setTimeout(fn, 0); },
debounce: function(func, wait, immediate) {
var timeout;
var result;
return function() {
var context = this;
var args = arguments;
var later;
var callNow;
later = function() {
timeout = null;
if (!immediate) { result = func.apply(context, args); }
};
callNow = immediate && !timeout;
clearTimeout(timeout);
timeout = setTimeout(later, wait);
if (callNow) { result = func.apply(context, args); }
return result;
};
},
throttle: function(func, wait) {
var context;
var args;
var timeout;
var result;
var previous;
var later;
previous = 0;
later = function() {
previous = new Date();
timeout = null;
result = func.apply(context, args);
};
return function() {
var now = new Date();
var remaining = wait - (now - previous);
context = this;
args = arguments;
if (remaining <= 0) {
clearTimeout(timeout);
timeout = null;
previous = now;
result = func.apply(context, args);
} else if (!timeout) {
timeout = setTimeout(later, remaining);
}
return result;
};
},
noop: function() {}
};
},{}]},{},[1]);
| mit |
gandernack/televie | pma/libraries/plugins/import/upload/UploadProgress.php | 2268 | <?php
/* vim: set expandtab sw=4 ts=4 sts=4: */
/**
* Provides upload functionalities for the import plugins
*
* @package PhpMyAdmin
*/
namespace PMA\libraries\plugins\import\upload;
use PMA\libraries\plugins\UploadInterface;
/**
* Implementation for upload progress
*
* @package PhpMyAdmin
*/
class UploadProgress implements UploadInterface
{
/**
* Gets the specific upload ID Key
*
* @return string ID Key
*/
public static function getIdKey()
{
return 'UPLOAD_IDENTIFIER';
}
/**
* Returns upload status.
*
* This is implementation for upload progress
*
* @param string $id upload id
*
* @return array|null
*/
public static function getUploadStatus($id)
{
global $SESSION_KEY;
if (trim($id) == "") {
return null;
}
if (!array_key_exists($id, $_SESSION[$SESSION_KEY])) {
$_SESSION[$SESSION_KEY][$id] = array(
'id' => $id,
'finished' => false,
'percent' => 0,
'total' => 0,
'complete' => 0,
'plugin' => UploadProgress::getIdKey(),
);
}
$ret = $_SESSION[$SESSION_KEY][$id];
if (!PMA_Import_progressCheck() || $ret['finished']) {
return $ret;
}
$status = uploadprogress_get_info($id);
if ($status) {
if ($status['bytes_uploaded'] == $status['bytes_total']) {
$ret['finished'] = true;
} else {
$ret['finished'] = false;
}
$ret['total'] = $status['bytes_total'];
$ret['complete'] = $status['bytes_uploaded'];
if ($ret['total'] > 0) {
$ret['percent'] = $ret['complete'] / $ret['total'] * 100;
}
} else {
$ret = array(
'id' => $id,
'finished' => true,
'percent' => 100,
'total' => $ret['total'],
'complete' => $ret['total'],
'plugin' => UploadProgress::getIdKey(),
);
}
$_SESSION[$SESSION_KEY][$id] = $ret;
return $ret;
}
}
| gpl-2.0 |
Richard2ndQuadrant/ansible | lib/ansible/plugins/connection/funcd.py | 3591 | # Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# ---
# The func transport permit to use ansible over func. For people who have already setup
# func and that wish to play with ansible, this permit to move gradually to ansible
# without having to redo completely the setup of the network.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
HAVE_FUNC=False
try:
import func.overlord.client as fc
HAVE_FUNC=True
except ImportError:
pass
import os
from ansible.callbacks import vvv
from ansible import errors
import tempfile
import shutil
class Connection(object):
''' Func-based connections '''
def __init__(self, runner, host, port, *args, **kwargs):
self.runner = runner
self.host = host
self.has_pipelining = False
# port is unused, this go on func
self.port = port
def connect(self, port=None):
if not HAVE_FUNC:
raise errors.AnsibleError("func is not installed")
self.client = fc.Client(self.host)
return self
def exec_command(self, cmd, become_user=None, sudoable=False,
executable='/bin/sh', in_data=None):
''' run a command on the remote minion '''
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
# totally ignores privlege escalation
vvv("EXEC %s" % (cmd), host=self.host)
p = self.client.command.run(cmd)[self.host]
return (p[0], p[1], p[2])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
out_path = self._normalize_path(out_path, '/')
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
self.client.local.copyfile.send(in_path, out_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
in_path = self._normalize_path(in_path, '/')
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
# need to use a tmp dir due to difference of semantic for getfile
# ( who take a # directory as destination) and fetch_file, who
# take a file directly
tmpdir = tempfile.mkdtemp(prefix="func_ansible")
self.client.local.getfile.get(in_path, tmpdir)
shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)),
out_path)
shutil.rmtree(tmpdir)
def close(self):
''' terminate the connection; nothing to do here '''
pass
| gpl-3.0 |
HereSinceres/TypeScript | tests/cases/fourslash/completionListAfterRegularExpressionLiteral04.ts | 231 | /// <reference path="fourslash.ts" />
////let v = 100;
////let x = /absidey/ /**/
// Should not be blocked since there is a
// space separating us from the regex flags.
goTo.marker();
verify.completionListContains("v"); | apache-2.0 |
kissmetrics/spark | graphx/src/test/scala/org/apache/spark/graphx/EdgeRDDSuite.scala | 2491 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx
import org.apache.spark.SparkFunSuite
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
class EdgeRDDSuite extends SparkFunSuite with LocalSparkContext {
test("cache, getStorageLevel") {
// test to see if getStorageLevel returns correct value after caching
withSpark { sc =>
val verts = sc.parallelize(List((0L, 0), (1L, 1), (1L, 2), (2L, 3), (2L, 3), (2L, 3)))
val edges = EdgeRDD.fromEdges(sc.parallelize(List.empty[Edge[Int]]))
assert(edges.getStorageLevel == StorageLevel.NONE)
edges.cache()
assert(edges.getStorageLevel == StorageLevel.MEMORY_ONLY)
}
}
test("checkpointing") {
withSpark { sc =>
val verts = sc.parallelize(List((0L, 0), (1L, 1), (1L, 2), (2L, 3), (2L, 3), (2L, 3)))
val edges = EdgeRDD.fromEdges(sc.parallelize(List.empty[Edge[Int]]))
sc.setCheckpointDir(Utils.createTempDir().getCanonicalPath)
edges.checkpoint()
// EdgeRDD not yet checkpointed
assert(!edges.isCheckpointed)
assert(!edges.isCheckpointedAndMaterialized)
assert(!edges.partitionsRDD.isCheckpointed)
assert(!edges.partitionsRDD.isCheckpointedAndMaterialized)
val data = edges.collect().toSeq // force checkpointing
// EdgeRDD shows up as checkpointed, but internally it is not.
// Only internal partitionsRDD is checkpointed.
assert(edges.isCheckpointed)
assert(!edges.isCheckpointedAndMaterialized)
assert(edges.partitionsRDD.isCheckpointed)
assert(edges.partitionsRDD.isCheckpointedAndMaterialized)
assert(edges.collect().toSeq === data) // test checkpointed RDD
}
}
}
| apache-2.0 |
sinwailam193/node-inspector | front-end/ui/TextPrompt.js | 29135 | /*
* Copyright (C) 2008 Apple Inc. All rights reserved.
* Copyright (C) 2011 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @constructor
* @extends {WebInspector.Object}
* @implements {WebInspector.SuggestBoxDelegate}
* @param {function(!Element, !Range, boolean, function(!Array.<string>, number=))} completions
* @param {string=} stopCharacters
*/
WebInspector.TextPrompt = function(completions, stopCharacters)
{
/**
* @type {!Element|undefined}
*/
this._proxyElement;
this._proxyElementDisplay = "inline-block";
this._loadCompletions = completions;
this._completionStopCharacters = stopCharacters || " =:[({;,!+-*/&|^<>.";
this._autocompletionTimeout = WebInspector.TextPrompt.DefaultAutocompletionTimeout;
}
WebInspector.TextPrompt.DefaultAutocompletionTimeout = 250;
WebInspector.TextPrompt.Events = {
ItemApplied: "text-prompt-item-applied",
ItemAccepted: "text-prompt-item-accepted"
};
WebInspector.TextPrompt.prototype = {
/**
* @param {number} timeout
*/
setAutocompletionTimeout: function(timeout)
{
this._autocompletionTimeout = timeout;
},
get proxyElement()
{
return this._proxyElement;
},
/**
* @param {boolean} suggestBoxEnabled
*/
setSuggestBoxEnabled: function(suggestBoxEnabled)
{
this._suggestBoxEnabled = suggestBoxEnabled;
},
renderAsBlock: function()
{
this._proxyElementDisplay = "block";
},
/**
* Clients should never attach any event listeners to the |element|. Instead,
* they should use the result of this method to attach listeners for bubbling events.
*
* @param {!Element} element
* @return {!Element}
*/
attach: function(element)
{
return this._attachInternal(element);
},
/**
* Clients should never attach any event listeners to the |element|. Instead,
* they should use the result of this method to attach listeners for bubbling events
* or the |blurListener| parameter to register a "blur" event listener on the |element|
* (since the "blur" event does not bubble.)
*
* @param {!Element} element
* @param {function(!Event)} blurListener
* @return {!Element}
*/
attachAndStartEditing: function(element, blurListener)
{
this._attachInternal(element);
this._startEditing(blurListener);
return this.proxyElement;
},
/**
* @param {!Element} element
* @return {!Element}
*/
_attachInternal: function(element)
{
if (this.proxyElement)
throw "Cannot attach an attached TextPrompt";
this._element = element;
this._boundOnKeyDown = this.onKeyDown.bind(this);
this._boundOnInput = this.onInput.bind(this);
this._boundOnMouseWheel = this.onMouseWheel.bind(this);
this._boundSelectStart = this._selectStart.bind(this);
this._boundRemoveSuggestionAids = this._removeSuggestionAids.bind(this);
this._proxyElement = element.ownerDocument.createElement("span");
this._proxyElement.style.display = this._proxyElementDisplay;
element.parentElement.insertBefore(this.proxyElement, element);
this.proxyElement.appendChild(element);
this._element.classList.add("text-prompt");
this._element.addEventListener("keydown", this._boundOnKeyDown, false);
this._element.addEventListener("input", this._boundOnInput, false);
this._element.addEventListener("mousewheel", this._boundOnMouseWheel, false);
this._element.addEventListener("selectstart", this._boundSelectStart, false);
this._element.addEventListener("blur", this._boundRemoveSuggestionAids, false);
this._element.ownerDocument.defaultView.addEventListener("resize", this._boundRemoveSuggestionAids, false);
if (this._suggestBoxEnabled)
this._suggestBox = new WebInspector.SuggestBox(this);
return this.proxyElement;
},
detach: function()
{
this._removeFromElement();
this.proxyElement.parentElement.insertBefore(this._element, this.proxyElement);
this.proxyElement.remove();
delete this._proxyElement;
this._element.classList.remove("text-prompt");
WebInspector.restoreFocusFromElement(this._element);
},
/**
* @type {string}
*/
get text()
{
return this._element.textContent;
},
/**
* @param {string} x
*/
set text(x)
{
this._removeSuggestionAids();
if (!x) {
// Append a break element instead of setting textContent to make sure the selection is inside the prompt.
this._element.removeChildren();
this._element.createChild("br");
} else {
this._element.textContent = x;
}
this.moveCaretToEndOfPrompt();
this._element.scrollIntoView();
},
_removeFromElement: function()
{
this.clearAutoComplete(true);
this._element.removeEventListener("keydown", this._boundOnKeyDown, false);
this._element.removeEventListener("input", this._boundOnInput, false);
this._element.removeEventListener("selectstart", this._boundSelectStart, false);
this._element.removeEventListener("blur", this._boundRemoveSuggestionAids, false);
this._element.ownerDocument.defaultView.removeEventListener("resize", this._boundRemoveSuggestionAids, false);
if (this._isEditing)
this._stopEditing();
if (this._suggestBox)
this._suggestBox.removeFromElement();
},
/**
* @param {function(!Event)=} blurListener
*/
_startEditing: function(blurListener)
{
this._isEditing = true;
this._element.classList.add("editing");
if (blurListener) {
this._blurListener = blurListener;
this._element.addEventListener("blur", this._blurListener, false);
}
this._oldTabIndex = this._element.tabIndex;
if (this._element.tabIndex < 0)
this._element.tabIndex = 0;
WebInspector.setCurrentFocusElement(this._element);
if (!this.text)
this._updateAutoComplete();
},
_stopEditing: function()
{
this._element.tabIndex = this._oldTabIndex;
if (this._blurListener)
this._element.removeEventListener("blur", this._blurListener, false);
this._element.classList.remove("editing");
delete this._isEditing;
},
_removeSuggestionAids: function()
{
this.clearAutoComplete();
this.hideSuggestBox();
},
_selectStart: function()
{
if (this._selectionTimeout)
clearTimeout(this._selectionTimeout);
this._removeSuggestionAids();
/**
* @this {WebInspector.TextPrompt}
*/
function moveBackIfOutside()
{
delete this._selectionTimeout;
if (!this.isCaretInsidePrompt() && this._element.window().getSelection().isCollapsed) {
this.moveCaretToEndOfPrompt();
this.autoCompleteSoon();
}
}
this._selectionTimeout = setTimeout(moveBackIfOutside.bind(this), 100);
},
/**
* @param {boolean=} force
*/
_updateAutoComplete: function(force)
{
this.clearAutoComplete();
this.autoCompleteSoon(force);
},
/**
* @param {!Event} event
*/
onMouseWheel: function(event)
{
// Subclasses can implement.
},
/**
* @param {!Event} event
*/
onKeyDown: function(event)
{
var handled = false;
delete this._needUpdateAutocomplete;
switch (event.keyIdentifier) {
case "U+0009": // Tab
handled = this.tabKeyPressed(event);
break;
case "Left":
case "Home":
this._removeSuggestionAids();
break;
case "Right":
case "End":
if (this.isCaretAtEndOfPrompt())
handled = this.acceptAutoComplete();
else
this._removeSuggestionAids();
break;
case "U+001B": // Esc
if (this.isSuggestBoxVisible()) {
this._removeSuggestionAids();
handled = true;
}
break;
case "U+0020": // Space
if (event.ctrlKey && !event.metaKey && !event.altKey && !event.shiftKey) {
this._updateAutoComplete(true);
handled = true;
}
break;
case "Alt":
case "Meta":
case "Shift":
case "Control":
break;
}
if (!handled && this.isSuggestBoxVisible())
handled = this._suggestBox.keyPressed(event);
if (!handled)
this._needUpdateAutocomplete = true;
if (handled)
event.consume(true);
},
/**
* @param {!Event} event
*/
onInput: function(event)
{
if (this._needUpdateAutocomplete)
this._updateAutoComplete();
},
/**
* @return {boolean}
*/
acceptAutoComplete: function()
{
var result = false;
if (this.isSuggestBoxVisible())
result = this._suggestBox.acceptSuggestion();
if (!result)
result = this._acceptSuggestionInternal();
return result;
},
/**
* @param {boolean=} includeTimeout
*/
clearAutoComplete: function(includeTimeout)
{
if (includeTimeout && this._completeTimeout) {
clearTimeout(this._completeTimeout);
delete this._completeTimeout;
}
delete this._waitingForCompletions;
if (!this.autoCompleteElement)
return;
this.autoCompleteElement.remove();
delete this.autoCompleteElement;
delete this._userEnteredRange;
delete this._userEnteredText;
},
/**
* @param {boolean=} force
*/
autoCompleteSoon: function(force)
{
var immediately = this.isSuggestBoxVisible() || force;
if (!this._completeTimeout)
this._completeTimeout = setTimeout(this.complete.bind(this, force), immediately ? 0 : this._autocompletionTimeout);
},
/**
* @param {boolean=} force
* @param {boolean=} reverse
*/
complete: function(force, reverse)
{
this.clearAutoComplete(true);
var selection = this._element.window().getSelection();
if (!selection.rangeCount)
return;
var selectionRange = selection.getRangeAt(0);
var shouldExit;
if (!force && !this.isCaretAtEndOfPrompt() && !this.isSuggestBoxVisible())
shouldExit = true;
else if (!selection.isCollapsed)
shouldExit = true;
else if (!force) {
// BUG72018: Do not show suggest box if caret is followed by a non-stop character.
var wordSuffixRange = selectionRange.startContainer.rangeOfWord(selectionRange.endOffset, this._completionStopCharacters, this._element, "forward");
if (wordSuffixRange.toString().length)
shouldExit = true;
}
if (shouldExit) {
this.hideSuggestBox();
return;
}
var wordPrefixRange = selectionRange.startContainer.rangeOfWord(selectionRange.startOffset, this._completionStopCharacters, this._element, "backward");
this._waitingForCompletions = true;
this._loadCompletions(this.proxyElement, wordPrefixRange, force || false, this._completionsReady.bind(this, selection, wordPrefixRange, !!reverse));
},
disableDefaultSuggestionForEmptyInput: function()
{
this._disableDefaultSuggestionForEmptyInput = true;
},
/**
* @param {!Selection} selection
* @param {!Range} textRange
*/
_boxForAnchorAtStart: function(selection, textRange)
{
var rangeCopy = selection.getRangeAt(0).cloneRange();
var anchorElement = createElement("span");
anchorElement.textContent = "\u200B";
textRange.insertNode(anchorElement);
var box = anchorElement.boxInWindow(window);
anchorElement.remove();
selection.removeAllRanges();
selection.addRange(rangeCopy);
return box;
},
/**
* @param {!Array.<string>} completions
* @param {number} wordPrefixLength
*/
_buildCommonPrefix: function(completions, wordPrefixLength)
{
var commonPrefix = completions[0];
for (var i = 0; i < completions.length; ++i) {
var completion = completions[i];
var lastIndex = Math.min(commonPrefix.length, completion.length);
for (var j = wordPrefixLength; j < lastIndex; ++j) {
if (commonPrefix[j] !== completion[j]) {
commonPrefix = commonPrefix.substr(0, j);
break;
}
}
}
return commonPrefix;
},
/**
* @return {?Range}
* @suppressGlobalPropertiesCheck
*/
_createRange: function()
{
return document.createRange();
},
/**
* @param {!Selection} selection
* @param {!Range} originalWordPrefixRange
* @param {boolean} reverse
* @param {!Array.<string>} completions
* @param {number=} selectedIndex
*/
_completionsReady: function(selection, originalWordPrefixRange, reverse, completions, selectedIndex)
{
if (!this._waitingForCompletions || !completions.length) {
this.hideSuggestBox();
return;
}
delete this._waitingForCompletions;
var selectionRange = selection.getRangeAt(0);
var fullWordRange = this._createRange();
fullWordRange.setStart(originalWordPrefixRange.startContainer, originalWordPrefixRange.startOffset);
fullWordRange.setEnd(selectionRange.endContainer, selectionRange.endOffset);
if (originalWordPrefixRange.toString() + selectionRange.toString() !== fullWordRange.toString())
return;
selectedIndex = (this._disableDefaultSuggestionForEmptyInput && !this.text) ? -1 : (selectedIndex || 0);
this._userEnteredRange = fullWordRange;
this._userEnteredText = fullWordRange.toString();
if (this._suggestBox)
this._suggestBox.updateSuggestions(this._boxForAnchorAtStart(selection, fullWordRange), completions, selectedIndex, !this.isCaretAtEndOfPrompt(), this._userEnteredText);
if (selectedIndex === -1)
return;
var wordPrefixLength = originalWordPrefixRange.toString().length;
this._commonPrefix = this._buildCommonPrefix(completions, wordPrefixLength);
if (this.isCaretAtEndOfPrompt()) {
var completionText = completions[selectedIndex];
var prefixText = this._userEnteredRange.toString();
var suffixText = completionText.substring(wordPrefixLength);
this._userEnteredRange.deleteContents();
this._element.normalize();
var finalSelectionRange = this._createRange();
var prefixTextNode = createTextNode(prefixText);
fullWordRange.insertNode(prefixTextNode);
this.autoCompleteElement = createElementWithClass("span", "auto-complete-text");
this.autoCompleteElement.textContent = suffixText;
prefixTextNode.parentNode.insertBefore(this.autoCompleteElement, prefixTextNode.nextSibling);
finalSelectionRange.setStart(prefixTextNode, wordPrefixLength);
finalSelectionRange.setEnd(prefixTextNode, wordPrefixLength);
selection.removeAllRanges();
selection.addRange(finalSelectionRange);
this.dispatchEventToListeners(WebInspector.TextPrompt.Events.ItemApplied);
}
},
_completeCommonPrefix: function()
{
if (!this.autoCompleteElement || !this._commonPrefix || !this._userEnteredText || !this._commonPrefix.startsWith(this._userEnteredText))
return;
if (!this.isSuggestBoxVisible()) {
this.acceptAutoComplete();
return;
}
this.autoCompleteElement.textContent = this._commonPrefix.substring(this._userEnteredText.length);
this._acceptSuggestionInternal(true);
},
/**
* @param {string} completionText
* @param {boolean=} isIntermediateSuggestion
*/
applySuggestion: function(completionText, isIntermediateSuggestion)
{
this._applySuggestion(completionText, isIntermediateSuggestion);
},
/**
* @param {string} completionText
* @param {boolean=} isIntermediateSuggestion
* @param {!Range=} originalPrefixRange
*/
_applySuggestion: function(completionText, isIntermediateSuggestion, originalPrefixRange)
{
var wordPrefixLength;
if (originalPrefixRange)
wordPrefixLength = originalPrefixRange.toString().length;
else
wordPrefixLength = this._userEnteredText ? this._userEnteredText.length : 0;
this._userEnteredRange.deleteContents();
this._element.normalize();
var finalSelectionRange = this._createRange();
var completionTextNode = createTextNode(completionText);
this._userEnteredRange.insertNode(completionTextNode);
if (this.autoCompleteElement) {
this.autoCompleteElement.remove();
delete this.autoCompleteElement;
}
if (isIntermediateSuggestion)
finalSelectionRange.setStart(completionTextNode, wordPrefixLength);
else
finalSelectionRange.setStart(completionTextNode, completionText.length);
finalSelectionRange.setEnd(completionTextNode, completionText.length);
var selection = this._element.window().getSelection();
selection.removeAllRanges();
selection.addRange(finalSelectionRange);
if (isIntermediateSuggestion)
this.dispatchEventToListeners(WebInspector.TextPrompt.Events.ItemApplied, { itemText: completionText });
},
/**
* @override
*/
acceptSuggestion: function()
{
this._acceptSuggestionInternal();
},
/**
* @param {boolean=} prefixAccepted
* @return {boolean}
*/
_acceptSuggestionInternal: function(prefixAccepted)
{
if (!this.autoCompleteElement || !this.autoCompleteElement.parentNode)
return false;
var text = this.autoCompleteElement.textContent;
var textNode = createTextNode(text);
this.autoCompleteElement.parentNode.replaceChild(textNode, this.autoCompleteElement);
delete this.autoCompleteElement;
var finalSelectionRange = this._createRange();
finalSelectionRange.setStart(textNode, text.length);
finalSelectionRange.setEnd(textNode, text.length);
var selection = this._element.window().getSelection();
selection.removeAllRanges();
selection.addRange(finalSelectionRange);
if (!prefixAccepted) {
this.hideSuggestBox();
this.dispatchEventToListeners(WebInspector.TextPrompt.Events.ItemAccepted);
} else
this.autoCompleteSoon(true);
return true;
},
hideSuggestBox: function()
{
if (this.isSuggestBoxVisible())
this._suggestBox.hide();
},
/**
* @return {boolean}
*/
isSuggestBoxVisible: function()
{
return this._suggestBox && this._suggestBox.visible();
},
/**
* @return {boolean}
*/
isCaretInsidePrompt: function()
{
return this._element.isInsertionCaretInside();
},
/**
* @return {boolean}
*/
isCaretAtEndOfPrompt: function()
{
var selection = this._element.window().getSelection();
if (!selection.rangeCount || !selection.isCollapsed)
return false;
var selectionRange = selection.getRangeAt(0);
var node = selectionRange.startContainer;
if (!node.isSelfOrDescendant(this._element))
return false;
if (node.nodeType === Node.TEXT_NODE && selectionRange.startOffset < node.nodeValue.length)
return false;
var foundNextText = false;
while (node) {
if (node.nodeType === Node.TEXT_NODE && node.nodeValue.length) {
if (foundNextText && (!this.autoCompleteElement || !this.autoCompleteElement.isAncestor(node)))
return false;
foundNextText = true;
}
node = node.traverseNextNode(this._element);
}
return true;
},
/**
* @return {boolean}
*/
isCaretOnFirstLine: function()
{
var selection = this._element.window().getSelection();
var focusNode = selection.focusNode;
if (!focusNode || focusNode.nodeType !== Node.TEXT_NODE || focusNode.parentNode !== this._element)
return true;
if (focusNode.textContent.substring(0, selection.focusOffset).indexOf("\n") !== -1)
return false;
focusNode = focusNode.previousSibling;
while (focusNode) {
if (focusNode.nodeType !== Node.TEXT_NODE)
return true;
if (focusNode.textContent.indexOf("\n") !== -1)
return false;
focusNode = focusNode.previousSibling;
}
return true;
},
/**
* @return {boolean}
*/
isCaretOnLastLine: function()
{
var selection = this._element.window().getSelection();
var focusNode = selection.focusNode;
if (!focusNode || focusNode.nodeType !== Node.TEXT_NODE || focusNode.parentNode !== this._element)
return true;
if (focusNode.textContent.substring(selection.focusOffset).indexOf("\n") !== -1)
return false;
focusNode = focusNode.nextSibling;
while (focusNode) {
if (focusNode.nodeType !== Node.TEXT_NODE)
return true;
if (focusNode.textContent.indexOf("\n") !== -1)
return false;
focusNode = focusNode.nextSibling;
}
return true;
},
moveCaretToEndOfPrompt: function()
{
var selection = this._element.window().getSelection();
var selectionRange = this._createRange();
var offset = this._element.childNodes.length;
selectionRange.setStart(this._element, offset);
selectionRange.setEnd(this._element, offset);
selection.removeAllRanges();
selection.addRange(selectionRange);
},
/**
* @param {!Event} event
* @return {boolean}
*/
tabKeyPressed: function(event)
{
this._completeCommonPrefix();
// Consume the key.
return true;
},
__proto__: WebInspector.Object.prototype
}
/**
* @constructor
* @extends {WebInspector.TextPrompt}
* @param {function(!Element, !Range, boolean, function(!Array.<string>, number=))} completions
* @param {string=} stopCharacters
*/
WebInspector.TextPromptWithHistory = function(completions, stopCharacters)
{
WebInspector.TextPrompt.call(this, completions, stopCharacters);
/**
* @type {!Array.<string>}
*/
this._data = [];
/**
* 1-based entry in the history stack.
* @type {number}
*/
this._historyOffset = 1;
/**
* Whether to coalesce duplicate items in the history, default is true.
* @type {boolean}
*/
this._coalesceHistoryDupes = true;
}
WebInspector.TextPromptWithHistory.prototype = {
/**
* @return {!Array.<string>}
*/
get historyData()
{
// FIXME: do we need to copy this?
return this._data;
},
/**
* @param {boolean} x
*/
setCoalesceHistoryDupes: function(x)
{
this._coalesceHistoryDupes = x;
},
/**
* @param {!Array.<string>} data
*/
setHistoryData: function(data)
{
this._data = [].concat(data);
this._historyOffset = 1;
},
/**
* Pushes a committed text into the history.
* @param {string} text
*/
pushHistoryItem: function(text)
{
if (this._uncommittedIsTop) {
this._data.pop();
delete this._uncommittedIsTop;
}
this._historyOffset = 1;
if (this._coalesceHistoryDupes && text === this._currentHistoryItem())
return;
this._data.push(text);
},
/**
* Pushes the current (uncommitted) text into the history.
*/
_pushCurrentText: function()
{
if (this._uncommittedIsTop)
this._data.pop(); // Throw away obsolete uncommitted text.
this._uncommittedIsTop = true;
this.clearAutoComplete(true);
this._data.push(this.text);
},
/**
* @return {string|undefined}
*/
_previous: function()
{
if (this._historyOffset > this._data.length)
return undefined;
if (this._historyOffset === 1)
this._pushCurrentText();
++this._historyOffset;
return this._currentHistoryItem();
},
/**
* @return {string|undefined}
*/
_next: function()
{
if (this._historyOffset === 1)
return undefined;
--this._historyOffset;
return this._currentHistoryItem();
},
/**
* @return {string|undefined}
*/
_currentHistoryItem: function()
{
return this._data[this._data.length - this._historyOffset];
},
/**
* @override
*/
onKeyDown: function(event)
{
var newText;
var isPrevious;
switch (event.keyIdentifier) {
case "Up":
if (!this.isCaretOnFirstLine() || this.isSuggestBoxVisible())
break;
newText = this._previous();
isPrevious = true;
break;
case "Down":
if (!this.isCaretOnLastLine() || this.isSuggestBoxVisible())
break;
newText = this._next();
break;
case "U+0050": // Ctrl+P = Previous
if (WebInspector.isMac() && event.ctrlKey && !event.metaKey && !event.altKey && !event.shiftKey) {
newText = this._previous();
isPrevious = true;
}
break;
case "U+004E": // Ctrl+N = Next
if (WebInspector.isMac() && event.ctrlKey && !event.metaKey && !event.altKey && !event.shiftKey)
newText = this._next();
break;
}
if (newText !== undefined) {
event.consume(true);
this.text = newText;
if (isPrevious) {
var firstNewlineIndex = this.text.indexOf("\n");
if (firstNewlineIndex === -1)
this.moveCaretToEndOfPrompt();
else {
var selection = this._element.window().getSelection();
var selectionRange = this._createRange();
selectionRange.setStart(this._element.firstChild, firstNewlineIndex);
selectionRange.setEnd(this._element.firstChild, firstNewlineIndex);
selection.removeAllRanges();
selection.addRange(selectionRange);
}
}
return;
}
WebInspector.TextPrompt.prototype.onKeyDown.apply(this, arguments);
},
__proto__: WebInspector.TextPrompt.prototype
}
| bsd-2-clause |
klas/joomla-cms | tests/unit/stubs/discover1/challenger.php | 53 | <?php /* @codingStandardsIgnoreFile Empty file */ ?>
| gpl-2.0 |
taoguan/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java | 14349 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.xml.sax.SAXException;
import com.google.common.annotations.VisibleForTesting;
/**
* Maintains a list of queues as well as scheduling parameters for each queue,
* such as guaranteed share allocations, from the fair scheduler config file.
*
*/
@Private
@Unstable
public class QueueManager {
public static final Log LOG = LogFactory.getLog(
QueueManager.class.getName());
public static final String ROOT_QUEUE = "root";
private final FairScheduler scheduler;
private final Collection<FSLeafQueue> leafQueues =
new CopyOnWriteArrayList<FSLeafQueue>();
private final Map<String, FSQueue> queues = new HashMap<String, FSQueue>();
private FSParentQueue rootQueue;
public QueueManager(FairScheduler scheduler) {
this.scheduler = scheduler;
}
public FSParentQueue getRootQueue() {
return rootQueue;
}
public void initialize(Configuration conf) throws IOException,
SAXException, AllocationConfigurationException, ParserConfigurationException {
rootQueue = new FSParentQueue("root", scheduler, null);
queues.put(rootQueue.getName(), rootQueue);
// Create the default queue
getLeafQueue(YarnConfiguration.DEFAULT_QUEUE_NAME, true);
}
/**
* Get a leaf queue by name, creating it if the create param is true and is necessary.
* If the queue is not or can not be a leaf queue, i.e. it already exists as a
* parent queue, or one of the parents in its name is already a leaf queue,
* null is returned.
*
* The root part of the name is optional, so a queue underneath the root
* named "queue1" could be referred to as just "queue1", and a queue named
* "queue2" underneath a parent named "parent1" that is underneath the root
* could be referred to as just "parent1.queue2".
*/
public FSLeafQueue getLeafQueue(String name, boolean create) {
FSQueue queue = getQueue(name, create, FSQueueType.LEAF);
if (queue instanceof FSParentQueue) {
return null;
}
return (FSLeafQueue) queue;
}
/**
* Remove a leaf queue if empty
* @param name name of the queue
* @return true if queue was removed or false otherwise
*/
public boolean removeLeafQueue(String name) {
name = ensureRootPrefix(name);
return removeEmptyIncompatibleQueues(name, FSQueueType.PARENT);
}
/**
* Get a parent queue by name, creating it if the create param is true and is necessary.
* If the queue is not or can not be a parent queue, i.e. it already exists as a
* leaf queue, or one of the parents in its name is already a leaf queue,
* null is returned.
*
* The root part of the name is optional, so a queue underneath the root
* named "queue1" could be referred to as just "queue1", and a queue named
* "queue2" underneath a parent named "parent1" that is underneath the root
* could be referred to as just "parent1.queue2".
*/
public FSParentQueue getParentQueue(String name, boolean create) {
FSQueue queue = getQueue(name, create, FSQueueType.PARENT);
if (queue instanceof FSLeafQueue) {
return null;
}
return (FSParentQueue) queue;
}
private FSQueue getQueue(String name, boolean create, FSQueueType queueType) {
name = ensureRootPrefix(name);
synchronized (queues) {
FSQueue queue = queues.get(name);
if (queue == null && create) {
// if the queue doesn't exist,create it and return
queue = createQueue(name, queueType);
// Update steady fair share for all queues
if (queue != null) {
rootQueue.recomputeSteadyShares();
}
}
return queue;
}
}
/**
* Creates a leaf or parent queue based on what is specified in 'queueType'
* and places it in the tree. Creates any parents that don't already exist.
*
* @return
* the created queue, if successful. null if not allowed (one of the parent
* queues in the queue name is already a leaf queue)
*/
private FSQueue createQueue(String name, FSQueueType queueType) {
List<String> newQueueNames = new ArrayList<String>();
newQueueNames.add(name);
int sepIndex = name.length();
FSParentQueue parent = null;
// Move up the queue tree until we reach one that exists.
while (sepIndex != -1) {
int prevSepIndex = sepIndex;
sepIndex = name.lastIndexOf('.', sepIndex-1);
String node = name.substring(sepIndex+1, prevSepIndex);
if (!isQueueNameValid(node)) {
throw new InvalidQueueNameException("Illegal node name at offset " +
(sepIndex+1) + " for queue name " + name);
}
FSQueue queue;
String curName = null;
curName = name.substring(0, sepIndex);
queue = queues.get(curName);
if (queue == null) {
newQueueNames.add(curName);
} else {
if (queue instanceof FSParentQueue) {
parent = (FSParentQueue)queue;
break;
} else {
return null;
}
}
}
// At this point, parent refers to the deepest existing parent of the
// queue to create.
// Now that we know everything worked out, make all the queues
// and add them to the map.
AllocationConfiguration queueConf = scheduler.getAllocationConfiguration();
FSLeafQueue leafQueue = null;
for (int i = newQueueNames.size()-1; i >= 0; i--) {
String queueName = newQueueNames.get(i);
if (i == 0 && queueType != FSQueueType.PARENT) {
leafQueue = new FSLeafQueue(name, scheduler, parent);
try {
leafQueue.setPolicy(queueConf.getDefaultSchedulingPolicy());
} catch (AllocationConfigurationException ex) {
LOG.warn("Failed to set default scheduling policy "
+ queueConf.getDefaultSchedulingPolicy() + " on new leaf queue.", ex);
}
parent.addChildQueue(leafQueue);
queues.put(leafQueue.getName(), leafQueue);
leafQueues.add(leafQueue);
leafQueue.updatePreemptionVariables();
return leafQueue;
} else {
FSParentQueue newParent = new FSParentQueue(queueName, scheduler, parent);
try {
newParent.setPolicy(queueConf.getDefaultSchedulingPolicy());
} catch (AllocationConfigurationException ex) {
LOG.warn("Failed to set default scheduling policy "
+ queueConf.getDefaultSchedulingPolicy() + " on new parent queue.", ex);
}
parent.addChildQueue(newParent);
queues.put(newParent.getName(), newParent);
newParent.updatePreemptionVariables();
parent = newParent;
}
}
return parent;
}
/**
* Make way for the given queue if possible, by removing incompatible
* queues with no apps in them. Incompatibility could be due to
* (1) queueToCreate being currently a parent but needs to change to leaf
* (2) queueToCreate being currently a leaf but needs to change to parent
* (3) an existing leaf queue in the ancestry of queueToCreate.
*
* We will never remove the root queue or the default queue in this way.
*
* @return true if we can create queueToCreate or it already exists.
*/
private boolean removeEmptyIncompatibleQueues(String queueToCreate,
FSQueueType queueType) {
queueToCreate = ensureRootPrefix(queueToCreate);
// Ensure queueToCreate is not root and doesn't have the default queue in its
// ancestry.
if (queueToCreate.equals(ROOT_QUEUE) ||
queueToCreate.startsWith(
ROOT_QUEUE + "." + YarnConfiguration.DEFAULT_QUEUE_NAME + ".")) {
return false;
}
FSQueue queue = queues.get(queueToCreate);
// Queue exists already.
if (queue != null) {
if (queue instanceof FSLeafQueue) {
if (queueType == FSQueueType.LEAF) {
// if queue is already a leaf then return true
return true;
}
// remove incompatibility since queue is a leaf currently
// needs to change to a parent.
return removeQueueIfEmpty(queue);
} else {
if (queueType == FSQueueType.PARENT) {
return true;
}
// If it's an existing parent queue and needs to change to leaf,
// remove it if it's empty.
return removeQueueIfEmpty(queue);
}
}
// Queue doesn't exist already. Check if the new queue would be created
// under an existing leaf queue. If so, try removing that leaf queue.
int sepIndex = queueToCreate.length();
sepIndex = queueToCreate.lastIndexOf('.', sepIndex-1);
while (sepIndex != -1) {
String prefixString = queueToCreate.substring(0, sepIndex);
FSQueue prefixQueue = queues.get(prefixString);
if (prefixQueue != null && prefixQueue instanceof FSLeafQueue) {
return removeQueueIfEmpty(prefixQueue);
}
sepIndex = queueToCreate.lastIndexOf('.', sepIndex-1);
}
return true;
}
/**
* Remove the queue if it and its descendents are all empty.
* @param queue
* @return true if removed, false otherwise
*/
private boolean removeQueueIfEmpty(FSQueue queue) {
if (isEmpty(queue)) {
removeQueue(queue);
return true;
}
return false;
}
/**
* Remove a queue and all its descendents.
*/
private void removeQueue(FSQueue queue) {
if (queue instanceof FSLeafQueue) {
leafQueues.remove(queue);
} else {
List<FSQueue> childQueues = queue.getChildQueues();
while (!childQueues.isEmpty()) {
removeQueue(childQueues.get(0));
}
}
queues.remove(queue.getName());
FSParentQueue parent = queue.getParent();
parent.removeChildQueue(queue);
}
/**
* Returns true if there are no applications, running or not, in the given
* queue or any of its descendents.
*/
protected boolean isEmpty(FSQueue queue) {
if (queue instanceof FSLeafQueue) {
FSLeafQueue leafQueue = (FSLeafQueue)queue;
return queue.getNumRunnableApps() == 0 &&
leafQueue.getNumNonRunnableApps() == 0;
} else {
for (FSQueue child : queue.getChildQueues()) {
if (!isEmpty(child)) {
return false;
}
}
return true;
}
}
/**
* Gets a queue by name.
*/
public FSQueue getQueue(String name) {
name = ensureRootPrefix(name);
synchronized (queues) {
return queues.get(name);
}
}
/**
* Return whether a queue exists already.
*/
public boolean exists(String name) {
name = ensureRootPrefix(name);
synchronized (queues) {
return queues.containsKey(name);
}
}
/**
* Get a collection of all leaf queues
*/
public Collection<FSLeafQueue> getLeafQueues() {
synchronized (queues) {
return leafQueues;
}
}
/**
* Get a collection of all queues
*/
public Collection<FSQueue> getQueues() {
return queues.values();
}
private String ensureRootPrefix(String name) {
if (!name.startsWith(ROOT_QUEUE + ".") && !name.equals(ROOT_QUEUE)) {
name = ROOT_QUEUE + "." + name;
}
return name;
}
public void updateAllocationConfiguration(AllocationConfiguration queueConf) {
// Create leaf queues and the parent queues in a leaf's ancestry if they do not exist
for (String name : queueConf.getConfiguredQueues().get(FSQueueType.LEAF)) {
if (removeEmptyIncompatibleQueues(name, FSQueueType.LEAF)) {
getLeafQueue(name, true);
}
}
// At this point all leaves and 'parents with at least one child' would have been created.
// Now create parents with no configured leaf.
for (String name : queueConf.getConfiguredQueues().get(
FSQueueType.PARENT)) {
if (removeEmptyIncompatibleQueues(name, FSQueueType.PARENT)) {
getParentQueue(name, true);
}
}
for (FSQueue queue : queues.values()) {
// Update queue metrics
FSQueueMetrics queueMetrics = queue.getMetrics();
queueMetrics.setMinShare(queue.getMinShare());
queueMetrics.setMaxShare(queue.getMaxShare());
// Set scheduling policies
try {
SchedulingPolicy policy = queueConf.getSchedulingPolicy(queue.getName());
policy.initialize(scheduler.getClusterResource());
queue.setPolicy(policy);
} catch (AllocationConfigurationException ex) {
LOG.warn("Cannot apply configured scheduling policy to queue "
+ queue.getName(), ex);
}
}
// Update steady fair shares for all queues
rootQueue.recomputeSteadyShares();
// Update the fair share preemption timeouts and preemption for all queues
// recursively
rootQueue.updatePreemptionVariables();
}
/**
* Check whether queue name is valid,
* return true if it is valid, otherwise return false.
*/
@VisibleForTesting
boolean isQueueNameValid(String node) {
return !node.isEmpty() && node.equals(node.trim());
}
}
| apache-2.0 |
Sambego/KunstmaanBundlesCMS | src/Kunstmaan/AdminBundle/Resources/ui/vendor_bower/moment/src/locale/sl.js | 3940 | //! moment.js locale configuration
//! locale : slovenian (sl)
//! author : Robert Sedovšek : https://github.com/sedovsek
import moment from '../moment';
function translate(number, withoutSuffix, key) {
var result = number + ' ';
switch (key) {
case 'm':
return withoutSuffix ? 'ena minuta' : 'eno minuto';
case 'mm':
if (number === 1) {
result += 'minuta';
} else if (number === 2) {
result += 'minuti';
} else if (number === 3 || number === 4) {
result += 'minute';
} else {
result += 'minut';
}
return result;
case 'h':
return withoutSuffix ? 'ena ura' : 'eno uro';
case 'hh':
if (number === 1) {
result += 'ura';
} else if (number === 2) {
result += 'uri';
} else if (number === 3 || number === 4) {
result += 'ure';
} else {
result += 'ur';
}
return result;
case 'dd':
if (number === 1) {
result += 'dan';
} else {
result += 'dni';
}
return result;
case 'MM':
if (number === 1) {
result += 'mesec';
} else if (number === 2) {
result += 'meseca';
} else if (number === 3 || number === 4) {
result += 'mesece';
} else {
result += 'mesecev';
}
return result;
case 'yy':
if (number === 1) {
result += 'leto';
} else if (number === 2) {
result += 'leti';
} else if (number === 3 || number === 4) {
result += 'leta';
} else {
result += 'let';
}
return result;
}
}
export default moment.defineLocale('sl', {
months : 'januar_februar_marec_april_maj_junij_julij_avgust_september_oktober_november_december'.split('_'),
monthsShort : 'jan._feb._mar._apr._maj._jun._jul._avg._sep._okt._nov._dec.'.split('_'),
weekdays : 'nedelja_ponedeljek_torek_sreda_četrtek_petek_sobota'.split('_'),
weekdaysShort : 'ned._pon._tor._sre._čet._pet._sob.'.split('_'),
weekdaysMin : 'ne_po_to_sr_če_pe_so'.split('_'),
longDateFormat : {
LT : 'H:mm',
LTS : 'LT:ss',
L : 'DD. MM. YYYY',
LL : 'D. MMMM YYYY',
LLL : 'D. MMMM YYYY LT',
LLLL : 'dddd, D. MMMM YYYY LT'
},
calendar : {
sameDay : '[danes ob] LT',
nextDay : '[jutri ob] LT',
nextWeek : function () {
switch (this.day()) {
case 0:
return '[v] [nedeljo] [ob] LT';
case 3:
return '[v] [sredo] [ob] LT';
case 6:
return '[v] [soboto] [ob] LT';
case 1:
case 2:
case 4:
case 5:
return '[v] dddd [ob] LT';
}
},
lastDay : '[včeraj ob] LT',
lastWeek : function () {
switch (this.day()) {
case 0:
case 3:
case 6:
return '[prejšnja] dddd [ob] LT';
case 1:
case 2:
case 4:
case 5:
return '[prejšnji] dddd [ob] LT';
}
},
sameElse : 'L'
},
relativeTime : {
future : 'čez %s',
past : '%s nazaj',
s : 'nekaj sekund',
m : translate,
mm : translate,
h : translate,
hh : translate,
d : 'en dan',
dd : translate,
M : 'en mesec',
MM : translate,
y : 'eno leto',
yy : translate
},
ordinalParse: /\d{1,2}\./,
ordinal : '%d.',
week : {
dow : 1, // Monday is the first day of the week.
doy : 7 // The week that contains Jan 1st is the first week of the year.
}
});
| mit |
morethanthemes/newsplus-lite | site/core/modules/user/src/Controller/UserAuthenticationController.php | 13399 | <?php
namespace Drupal\user\Controller;
use Drupal\Core\Access\CsrfTokenGenerator;
use Drupal\Core\Controller\ControllerBase;
use Drupal\Core\DependencyInjection\ContainerInjectionInterface;
use Drupal\Core\Flood\FloodInterface;
use Drupal\Core\Routing\RouteProviderInterface;
use Drupal\user\UserAuthInterface;
use Drupal\user\UserInterface;
use Drupal\user\UserStorageInterface;
use Psr\Log\LoggerInterface;
use Symfony\Component\DependencyInjection\ContainerInterface;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\HttpKernel\Exception\AccessDeniedHttpException;
use Symfony\Component\HttpKernel\Exception\BadRequestHttpException;
use Symfony\Component\Serializer\Encoder\JsonEncoder;
use Symfony\Component\Serializer\Serializer;
/**
* Provides controllers for login, login status and logout via HTTP requests.
*/
class UserAuthenticationController extends ControllerBase implements ContainerInjectionInterface {
/**
* String sent in responses, to describe the user as being logged in.
*
* @var string
*/
const LOGGED_IN = 1;
/**
* String sent in responses, to describe the user as being logged out.
*
* @var string
*/
const LOGGED_OUT = 0;
/**
* The flood controller.
*
* @var \Drupal\Core\Flood\FloodInterface
*/
protected $flood;
/**
* The user storage.
*
* @var \Drupal\user\UserStorageInterface
*/
protected $userStorage;
/**
* The CSRF token generator.
*
* @var \Drupal\Core\Access\CsrfTokenGenerator
*/
protected $csrfToken;
/**
* The user authentication.
*
* @var \Drupal\user\UserAuthInterface
*/
protected $userAuth;
/**
* The route provider.
*
* @var \Drupal\Core\Routing\RouteProviderInterface
*/
protected $routeProvider;
/**
* The serializer.
*
* @var \Symfony\Component\Serializer\Serializer
*/
protected $serializer;
/**
* The available serialization formats.
*
* @var array
*/
protected $serializerFormats = [];
/**
* A logger instance.
*
* @var \Psr\Log\LoggerInterface
*/
protected $logger;
/**
* Constructs a new UserAuthenticationController object.
*
* @param \Drupal\Core\Flood\FloodInterface $flood
* The flood controller.
* @param \Drupal\user\UserStorageInterface $user_storage
* The user storage.
* @param \Drupal\Core\Access\CsrfTokenGenerator $csrf_token
* The CSRF token generator.
* @param \Drupal\user\UserAuthInterface $user_auth
* The user authentication.
* @param \Drupal\Core\Routing\RouteProviderInterface $route_provider
* The route provider.
* @param \Symfony\Component\Serializer\Serializer $serializer
* The serializer.
* @param array $serializer_formats
* The available serialization formats.
* @param \Psr\Log\LoggerInterface $logger
* A logger instance.
*/
public function __construct(FloodInterface $flood, UserStorageInterface $user_storage, CsrfTokenGenerator $csrf_token, UserAuthInterface $user_auth, RouteProviderInterface $route_provider, Serializer $serializer, array $serializer_formats, LoggerInterface $logger) {
$this->flood = $flood;
$this->userStorage = $user_storage;
$this->csrfToken = $csrf_token;
$this->userAuth = $user_auth;
$this->serializer = $serializer;
$this->serializerFormats = $serializer_formats;
$this->routeProvider = $route_provider;
$this->logger = $logger;
}
/**
* {@inheritdoc}
*/
public static function create(ContainerInterface $container) {
if ($container->hasParameter('serializer.formats') && $container->has('serializer')) {
$serializer = $container->get('serializer');
$formats = $container->getParameter('serializer.formats');
}
else {
$formats = ['json'];
$encoders = [new JsonEncoder()];
$serializer = new Serializer([], $encoders);
}
return new static(
$container->get('flood'),
$container->get('entity_type.manager')->getStorage('user'),
$container->get('csrf_token'),
$container->get('user.auth'),
$container->get('router.route_provider'),
$serializer,
$formats,
$container->get('logger.factory')->get('user')
);
}
/**
* Logs in a user.
*
* @param \Symfony\Component\HttpFoundation\Request $request
* The request.
*
* @return \Symfony\Component\HttpFoundation\Response
* A response which contains the ID and CSRF token.
*/
public function login(Request $request) {
$format = $this->getRequestFormat($request);
$content = $request->getContent();
$credentials = $this->serializer->decode($content, $format);
if (!isset($credentials['name']) && !isset($credentials['pass'])) {
throw new BadRequestHttpException('Missing credentials.');
}
if (!isset($credentials['name'])) {
throw new BadRequestHttpException('Missing credentials.name.');
}
if (!isset($credentials['pass'])) {
throw new BadRequestHttpException('Missing credentials.pass.');
}
$this->floodControl($request, $credentials['name']);
if ($this->userIsBlocked($credentials['name'])) {
throw new BadRequestHttpException('The user has not been activated or is blocked.');
}
if ($uid = $this->userAuth->authenticate($credentials['name'], $credentials['pass'])) {
$this->flood->clear('user.http_login', $this->getLoginFloodIdentifier($request, $credentials['name']));
/** @var \Drupal\user\UserInterface $user */
$user = $this->userStorage->load($uid);
$this->userLoginFinalize($user);
// Send basic metadata about the logged in user.
$response_data = [];
if ($user->get('uid')->access('view', $user)) {
$response_data['current_user']['uid'] = $user->id();
}
if ($user->get('roles')->access('view', $user)) {
$response_data['current_user']['roles'] = $user->getRoles();
}
if ($user->get('name')->access('view', $user)) {
$response_data['current_user']['name'] = $user->getAccountName();
}
$response_data['csrf_token'] = $this->csrfToken->get('rest');
$logout_route = $this->routeProvider->getRouteByName('user.logout.http');
// Trim '/' off path to match \Drupal\Core\Access\CsrfAccessCheck.
$logout_path = ltrim($logout_route->getPath(), '/');
$response_data['logout_token'] = $this->csrfToken->get($logout_path);
$encoded_response_data = $this->serializer->encode($response_data, $format);
return new Response($encoded_response_data);
}
$flood_config = $this->config('user.flood');
if ($identifier = $this->getLoginFloodIdentifier($request, $credentials['name'])) {
$this->flood->register('user.http_login', $flood_config->get('user_window'), $identifier);
}
// Always register an IP-based failed login event.
$this->flood->register('user.failed_login_ip', $flood_config->get('ip_window'));
throw new BadRequestHttpException('Sorry, unrecognized username or password.');
}
/**
* Resets a user password.
*
* @param \Symfony\Component\HttpFoundation\Request $request
* The request.
*
* @return \Symfony\Component\HttpFoundation\Response
* The response object.
*/
public function resetPassword(Request $request) {
$format = $this->getRequestFormat($request);
$content = $request->getContent();
$credentials = $this->serializer->decode($content, $format);
// Check if a name or mail is provided.
if (!isset($credentials['name']) && !isset($credentials['mail'])) {
throw new BadRequestHttpException('Missing credentials.name or credentials.mail');
}
// Load by name if provided.
if (isset($credentials['name'])) {
$users = $this->userStorage->loadByProperties(['name' => trim($credentials['name'])]);
}
elseif (isset($credentials['mail'])) {
$users = $this->userStorage->loadByProperties(['mail' => trim($credentials['mail'])]);
}
/** @var \Drupal\Core\Session\AccountInterface $account */
$account = reset($users);
if ($account && $account->id()) {
if ($this->userIsBlocked($account->getAccountName())) {
throw new BadRequestHttpException('The user has not been activated or is blocked.');
}
// Send the password reset email.
$mail = _user_mail_notify('password_reset', $account, $account->getPreferredLangcode());
if (empty($mail)) {
throw new BadRequestHttpException('Unable to send email. Contact the site administrator if the problem persists.');
}
else {
$this->logger->notice('Password reset instructions mailed to %name at %email.', ['%name' => $account->getAccountName(), '%email' => $account->getEmail()]);
return new Response();
}
}
// Error if no users found with provided name or mail.
throw new BadRequestHttpException('Unrecognized username or email address.');
}
/**
* Verifies if the user is blocked.
*
* @param string $name
* The username.
*
* @return bool
* TRUE if the user is blocked, otherwise FALSE.
*/
protected function userIsBlocked($name) {
return user_is_blocked($name);
}
/**
* Finalizes the user login.
*
* @param \Drupal\user\UserInterface $user
* The user.
*/
protected function userLoginFinalize(UserInterface $user) {
user_login_finalize($user);
}
/**
* Logs out a user.
*
* @return \Symfony\Component\HttpFoundation\Response
* The response object.
*/
public function logout() {
$this->userLogout();
return new Response(NULL, 204);
}
/**
* Logs the user out.
*/
protected function userLogout() {
user_logout();
}
/**
* Checks whether a user is logged in or not.
*
* @return \Symfony\Component\HttpFoundation\Response
* The response.
*/
public function loginStatus() {
if ($this->currentUser()->isAuthenticated()) {
$response = new Response(self::LOGGED_IN);
}
else {
$response = new Response(self::LOGGED_OUT);
}
$response->headers->set('Content-Type', 'text/plain');
return $response;
}
/**
* Gets the format of the current request.
*
* @param \Symfony\Component\HttpFoundation\Request $request
* The current request.
*
* @return string
* The format of the request.
*/
protected function getRequestFormat(Request $request) {
$format = $request->getRequestFormat();
if (!in_array($format, $this->serializerFormats)) {
throw new BadRequestHttpException("Unrecognized format: $format.");
}
return $format;
}
/**
* Enforces flood control for the current login request.
*
* @param \Symfony\Component\HttpFoundation\Request $request
* The current request.
* @param string $username
* The user name sent for login credentials.
*/
protected function floodControl(Request $request, $username) {
$flood_config = $this->config('user.flood');
if (!$this->flood->isAllowed('user.failed_login_ip', $flood_config->get('ip_limit'), $flood_config->get('ip_window'))) {
throw new AccessDeniedHttpException('Access is blocked because of IP based flood prevention.', NULL, Response::HTTP_TOO_MANY_REQUESTS);
}
if ($identifier = $this->getLoginFloodIdentifier($request, $username)) {
// Don't allow login if the limit for this user has been reached.
// Default is to allow 5 failed attempts every 6 hours.
if (!$this->flood->isAllowed('user.http_login', $flood_config->get('user_limit'), $flood_config->get('user_window'), $identifier)) {
if ($flood_config->get('uid_only')) {
$error_message = sprintf('There have been more than %s failed login attempts for this account. It is temporarily blocked. Try again later or request a new password.', $flood_config->get('user_limit'));
}
else {
$error_message = 'Too many failed login attempts from your IP address. This IP address is temporarily blocked.';
}
throw new AccessDeniedHttpException($error_message, NULL, Response::HTTP_TOO_MANY_REQUESTS);
}
}
}
/**
* Gets the login identifier for user login flood control.
*
* @param \Symfony\Component\HttpFoundation\Request $request
* The current request.
* @param string $username
* The username supplied in login credentials.
*
* @return string
* The login identifier or if the user does not exist an empty string.
*/
protected function getLoginFloodIdentifier(Request $request, $username) {
$flood_config = $this->config('user.flood');
$accounts = $this->userStorage->loadByProperties(['name' => $username, 'status' => 1]);
if ($account = reset($accounts)) {
if ($flood_config->get('uid_only')) {
// Register flood events based on the uid only, so they apply for any
// IP address. This is the most secure option.
$identifier = $account->id();
}
else {
// The default identifier is a combination of uid and IP address. This
// is less secure but more resistant to denial-of-service attacks that
// could lock out all users with public user names.
$identifier = $account->id() . '-' . $request->getClientIp();
}
return $identifier;
}
return '';
}
}
| gpl-2.0 |
ninamanalo19/react-native-sliding-up-panel | node_modules/react-native/node_modules/babel-preset-fbjs/node_modules/babel-plugin-transform-es2015-for-of/node_modules/babel-runtime/core-js/math/hypot.js | 91 | module.exports = { "default": require("core-js/library/fn/math/hypot"), __esModule: true }; | mit |
zdary/intellij-community | plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/classMembers/GrDependantMembersCollector.java | 1960 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.refactoring.classMembers;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiMember;
import com.intellij.psi.PsiMethod;
import com.intellij.refactoring.classMembers.DependentMembersCollectorBase;
import org.jetbrains.plugins.groovy.lang.psi.GrReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMember;
/**
* @author Max Medvedev
*/
public class GrDependantMembersCollector extends DependentMembersCollectorBase<GrMember, PsiClass> {
public GrDependantMembersCollector(PsiClass clazz, PsiClass superClass) {
super(clazz, superClass);
}
@Override
public void collect(GrMember member) {
member.accept(new GrClassMemberReferenceVisitor(getClazz()) {
@Override
protected void visitClassMemberReferenceElement(GrMember classMember, GrReferenceElement ref) {
if (!existsInSuperClass(classMember)) {
myCollection.add(classMember);
}
}
});
}
private boolean existsInSuperClass(PsiMember classMember) {
if (getSuperClass() == null) return false;
if (!(classMember instanceof PsiMethod)) return false;
final PsiMethod method = ((PsiMethod)classMember);
final PsiMethod methodBySignature = (getSuperClass()).findMethodBySignature(method, true);
return methodBySignature != null;
}
}
| apache-2.0 |
xumaolin/moodle | lib/dml/oci_native_moodle_database.php | 75242 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Native oci class representing moodle database interface.
*
* @package core_dml
* @copyright 2008 Petr Skoda (http://skodak.org)
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
defined('MOODLE_INTERNAL') || die();
require_once(__DIR__.'/moodle_database.php');
require_once(__DIR__.'/oci_native_moodle_recordset.php');
require_once(__DIR__.'/oci_native_moodle_temptables.php');
/**
* Native oci class representing moodle database interface.
*
* One complete reference for PHP + OCI:
* http://www.oracle.com/technology/tech/php/underground-php-oracle-manual.html
*
* @package core_dml
* @copyright 2008 Petr Skoda (http://skodak.org)
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class oci_native_moodle_database extends moodle_database {
protected $oci = null;
/** @var To store stmt errors and enable get_last_error() to detect them.*/
private $last_stmt_error = null;
/** @var Default value initialised in connect method, we need the driver to be present.*/
private $commit_status = null;
/** @var To handle oci driver default verbosity.*/
private $last_error_reporting;
/** @var To store unique_session_id. Needed for temp tables unique naming.*/
private $unique_session_id;
/**
* Detects if all needed PHP stuff installed.
* Note: can be used before connect()
* @return mixed true if ok, string if something
*/
public function driver_installed() {
if (!extension_loaded('oci8')) {
return get_string('ociextensionisnotpresentinphp', 'install');
}
return true;
}
/**
* Returns database family type - describes SQL dialect
* Note: can be used before connect()
* @return string db family name (mysql, postgres, mssql, oracle, etc.)
*/
public function get_dbfamily() {
return 'oracle';
}
/**
* Returns more specific database driver type
* Note: can be used before connect()
* @return string db type mysqli, pgsql, oci, mssql, sqlsrv
*/
protected function get_dbtype() {
return 'oci';
}
/**
* Returns general database library name
* Note: can be used before connect()
* @return string db type pdo, native
*/
protected function get_dblibrary() {
return 'native';
}
/**
* Returns localised database type name
* Note: can be used before connect()
* @return string
*/
public function get_name() {
return get_string('nativeoci', 'install');
}
/**
* Returns localised database configuration help.
* Note: can be used before connect()
* @return string
*/
public function get_configuration_help() {
return get_string('nativeocihelp', 'install');
}
/**
* Diagnose database and tables, this function is used
* to verify database and driver settings, db engine types, etc.
*
* @return string null means everything ok, string means problem found.
*/
public function diagnose() {
return null;
}
/**
* Connect to db
* Must be called before other methods.
* @param string $dbhost The database host.
* @param string $dbuser The database username.
* @param string $dbpass The database username's password.
* @param string $dbname The name of the database being connected to.
* @param mixed $prefix string means moodle db prefix, false used for external databases where prefix not used
* @param array $dboptions driver specific options
* @return bool true
* @throws dml_connection_exception if error
*/
public function connect($dbhost, $dbuser, $dbpass, $dbname, $prefix, array $dboptions=null) {
if ($prefix == '' and !$this->external) {
//Enforce prefixes for everybody but mysql
throw new dml_exception('prefixcannotbeempty', $this->get_dbfamily());
}
if (!$this->external and strlen($prefix) > 2) {
//Max prefix length for Oracle is 2cc
$a = (object)array('dbfamily'=>'oracle', 'maxlength'=>2);
throw new dml_exception('prefixtoolong', $a);
}
$driverstatus = $this->driver_installed();
if ($driverstatus !== true) {
throw new dml_exception('dbdriverproblem', $driverstatus);
}
// Autocommit ON by default.
// Switching to OFF (OCI_DEFAULT), when playing with transactions
// please note this thing is not defined if oracle driver not present in PHP
// which means it can not be used as default value of object property!
$this->commit_status = OCI_COMMIT_ON_SUCCESS;
$this->store_settings($dbhost, $dbuser, $dbpass, $dbname, $prefix, $dboptions);
unset($this->dboptions['dbsocket']);
// NOTE: use of ', ", / and \ is very problematic, even native oracle tools seem to have
// problems with these, so just forget them and do not report problems into tracker...
if (empty($this->dbhost)) {
// old style full address (TNS)
$dbstring = $this->dbname;
} else {
if (empty($this->dboptions['dbport'])) {
$this->dboptions['dbport'] = 1521;
}
$dbstring = '//'.$this->dbhost.':'.$this->dboptions['dbport'].'/'.$this->dbname;
}
ob_start();
if (empty($this->dboptions['dbpersist'])) {
$this->oci = oci_new_connect($this->dbuser, $this->dbpass, $dbstring, 'AL32UTF8');
} else {
$this->oci = oci_pconnect($this->dbuser, $this->dbpass, $dbstring, 'AL32UTF8');
}
$dberr = ob_get_contents();
ob_end_clean();
if ($this->oci === false) {
$this->oci = null;
$e = oci_error();
if (isset($e['message'])) {
$dberr = $e['message'];
}
throw new dml_connection_exception($dberr);
}
// Make sure moodle package is installed - now required.
if (!$this->oci_package_installed()) {
try {
$this->attempt_oci_package_install();
} catch (Exception $e) {
// Ignore problems, only the result counts,
// admins have to fix it manually if necessary.
}
if (!$this->oci_package_installed()) {
throw new dml_exception('dbdriverproblem', 'Oracle PL/SQL Moodle support package MOODLELIB is not installed! Database administrator has to execute /lib/dml/oci_native_moodle_package.sql script.');
}
}
// get unique session id, to be used later for temp tables stuff
$sql = 'SELECT DBMS_SESSION.UNIQUE_SESSION_ID() FROM DUAL';
$this->query_start($sql, null, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_ROW);
oci_free_statement($stmt);
$this->unique_session_id = reset($records[0]);
//note: do not send "ALTER SESSION SET NLS_NUMERIC_CHARACTERS='.,'" !
// instead fix our PHP code to convert "," to "." properly!
// Connection stabilised and configured, going to instantiate the temptables controller
$this->temptables = new oci_native_moodle_temptables($this, $this->unique_session_id);
return true;
}
/**
* Close database connection and release all resources
* and memory (especially circular memory references).
* Do NOT use connect() again, create a new instance if needed.
*/
public function dispose() {
parent::dispose(); // Call parent dispose to write/close session and other common stuff before closing connection
if ($this->oci) {
oci_close($this->oci);
$this->oci = null;
}
}
/**
* Called before each db query.
* @param string $sql
* @param array array of parameters
* @param int $type type of query
* @param mixed $extrainfo driver specific extra information
* @return void
*/
protected function query_start($sql, array $params=null, $type, $extrainfo=null) {
parent::query_start($sql, $params, $type, $extrainfo);
// oci driver tents to send debug to output, we do not need that ;-)
$this->last_error_reporting = error_reporting(0);
}
/**
* Called immediately after each db query.
* @param mixed db specific result
* @return void
*/
protected function query_end($result, $stmt=null) {
// reset original debug level
error_reporting($this->last_error_reporting);
if ($stmt and $result === false) {
// Look for stmt error and store it
if (is_resource($stmt)) {
$e = oci_error($stmt);
if ($e !== false) {
$this->last_stmt_error = $e['message'];
}
}
oci_free_statement($stmt);
}
parent::query_end($result);
}
/**
* Returns database server info array
* @return array Array containing 'description' and 'version' info
*/
public function get_server_info() {
static $info = null; // TODO: move to real object property
if (is_null($info)) {
$this->query_start("--oci_server_version()", null, SQL_QUERY_AUX);
$description = oci_server_version($this->oci);
$this->query_end(true);
preg_match('/(\d+\.)+\d+/', $description, $matches);
$info = array('description'=>$description, 'version'=>$matches[0]);
}
return $info;
}
/**
* Converts short table name {tablename} to real table name
* supporting temp tables ($this->unique_session_id based) if detected
*
* @param string sql
* @return string sql
*/
protected function fix_table_names($sql) {
if (preg_match_all('/\{([a-z][a-z0-9_]*)\}/', $sql, $matches)) {
foreach($matches[0] as $key=>$match) {
$name = $matches[1][$key];
if ($this->temptables && $this->temptables->is_temptable($name)) {
$sql = str_replace($match, $this->temptables->get_correct_name($name), $sql);
} else {
$sql = str_replace($match, $this->prefix.$name, $sql);
}
}
}
return $sql;
}
/**
* Returns supported query parameter types
* @return int bitmask of accepted SQL_PARAMS_*
*/
protected function allowed_param_types() {
return SQL_PARAMS_NAMED;
}
/**
* Returns last error reported by database engine.
* @return string error message
*/
public function get_last_error() {
$error = false;
// First look for any previously saved stmt error
if (!empty($this->last_stmt_error)) {
$error = $this->last_stmt_error;
$this->last_stmt_error = null;
} else { // Now try connection error
$e = oci_error($this->oci);
if ($e !== false) {
$error = $e['message'];
}
}
return $error;
}
/**
* Prepare the statement for execution
* @throws dml_connection_exception
* @param string $sql
* @return resource
*/
protected function parse_query($sql) {
$stmt = oci_parse($this->oci, $sql);
if ($stmt == false) {
throw new dml_connection_exception('Can not parse sql query'); //TODO: maybe add better info
}
return $stmt;
}
/**
* Make sure there are no reserved words in param names...
* @param string $sql
* @param array $params
* @return array ($sql, $params) updated query and parameters
*/
protected function tweak_param_names($sql, array $params) {
if (empty($params)) {
return array($sql, $params);
}
$newparams = array();
$searcharr = array(); // search => replace pairs
foreach ($params as $name => $value) {
// Keep the name within the 30 chars limit always (prefixing/replacing)
if (strlen($name) <= 28) {
$newname = 'o_' . $name;
} else {
$newname = 'o_' . substr($name, 2);
}
$newparams[$newname] = $value;
$searcharr[':' . $name] = ':' . $newname;
}
// sort by length desc to avoid potential str_replace() overlap
uksort($searcharr, array('oci_native_moodle_database', 'compare_by_length_desc'));
$sql = str_replace(array_keys($searcharr), $searcharr, $sql);
return array($sql, $newparams);
}
/**
* Return tables in database WITHOUT current prefix
* @param bool $usecache if true, returns list of cached tables.
* @return array of table names in lowercase and without prefix
*/
public function get_tables($usecache=true) {
if ($usecache and $this->tables !== null) {
return $this->tables;
}
$this->tables = array();
$prefix = str_replace('_', "\\_", strtoupper($this->prefix));
$sql = "SELECT TABLE_NAME
FROM CAT
WHERE TABLE_TYPE='TABLE'
AND TABLE_NAME NOT LIKE 'BIN\$%'
AND TABLE_NAME LIKE '$prefix%' ESCAPE '\\'";
$this->query_start($sql, null, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_ASSOC);
oci_free_statement($stmt);
$records = array_map('strtolower', $records['TABLE_NAME']);
foreach ($records as $tablename) {
if ($this->prefix !== false && $this->prefix !== '') {
if (strpos($tablename, $this->prefix) !== 0) {
continue;
}
$tablename = substr($tablename, strlen($this->prefix));
}
$this->tables[$tablename] = $tablename;
}
// Add the currently available temptables
$this->tables = array_merge($this->tables, $this->temptables->get_temptables());
return $this->tables;
}
/**
* Return table indexes - everything lowercased.
* @param string $table The table we want to get indexes from.
* @return array An associative array of indexes containing 'unique' flag and 'columns' being indexed
*/
public function get_indexes($table) {
$indexes = array();
$tablename = strtoupper($this->prefix.$table);
$sql = "SELECT i.INDEX_NAME, i.UNIQUENESS, c.COLUMN_POSITION, c.COLUMN_NAME, ac.CONSTRAINT_TYPE
FROM ALL_INDEXES i
JOIN ALL_IND_COLUMNS c ON c.INDEX_NAME=i.INDEX_NAME
LEFT JOIN ALL_CONSTRAINTS ac ON (ac.TABLE_NAME=i.TABLE_NAME AND ac.CONSTRAINT_NAME=i.INDEX_NAME AND ac.CONSTRAINT_TYPE='P')
WHERE i.TABLE_NAME = '$tablename'
ORDER BY i.INDEX_NAME, c.COLUMN_POSITION";
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_ROW);
oci_free_statement($stmt);
foreach ($records as $record) {
if ($record['CONSTRAINT_TYPE'] === 'P') {
//ignore for now;
continue;
}
$indexname = strtolower($record['INDEX_NAME']);
if (!isset($indexes[$indexname])) {
$indexes[$indexname] = array('primary' => ($record['CONSTRAINT_TYPE'] === 'P'),
'unique' => ($record['UNIQUENESS'] === 'UNIQUE'),
'columns' => array());
}
$indexes[$indexname]['columns'][] = strtolower($record['COLUMN_NAME']);
}
return $indexes;
}
/**
* Returns detailed information about columns in table. This information is cached internally.
* @param string $table name
* @param bool $usecache
* @return array array of database_column_info objects indexed with column names
*/
public function get_columns($table, $usecache=true) {
if ($usecache) {
$properties = array('dbfamily' => $this->get_dbfamily(), 'settings' => $this->get_settings_hash());
$cache = cache::make('core', 'databasemeta', $properties);
if ($data = $cache->get($table)) {
return $data;
}
}
if (!$table) { // table not specified, return empty array directly
return array();
}
$structure = array();
// We give precedence to CHAR_LENGTH for VARCHAR2 columns over WIDTH because the former is always
// BYTE based and, for cross-db operations, we want CHAR based results. See MDL-29415
// Instead of guessing sequence based exclusively on name, check tables against user_triggers to
// ensure the table has a 'before each row' trigger to assume 'id' is auto_increment. MDL-32365
$sql = "SELECT CNAME, COLTYPE, nvl(CHAR_LENGTH, WIDTH) AS WIDTH, SCALE, PRECISION, NULLS, DEFAULTVAL,
DECODE(NVL(TRIGGER_NAME, '0'), '0', '0', '1') HASTRIGGER
FROM COL c
LEFT JOIN USER_TAB_COLUMNS u ON (u.TABLE_NAME = c.TNAME AND u.COLUMN_NAME = c.CNAME AND u.DATA_TYPE = 'VARCHAR2')
LEFT JOIN USER_TRIGGERS t ON (t.TABLE_NAME = c.TNAME AND TRIGGER_TYPE = 'BEFORE EACH ROW' AND c.CNAME = 'ID')
WHERE TNAME = UPPER('{" . $table . "}')
ORDER BY COLNO";
list($sql, $params, $type) = $this->fix_sql_params($sql, null);
$this->query_start($sql, null, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_ROW);
oci_free_statement($stmt);
if (!$records) {
return array();
}
foreach ($records as $rawcolumn) {
$rawcolumn = (object)$rawcolumn;
$info = new stdClass();
$info->name = strtolower($rawcolumn->CNAME);
$info->auto_increment = ((int)$rawcolumn->HASTRIGGER) ? true : false;
$matches = null;
if ($rawcolumn->COLTYPE === 'VARCHAR2'
or $rawcolumn->COLTYPE === 'VARCHAR'
or $rawcolumn->COLTYPE === 'NVARCHAR2'
or $rawcolumn->COLTYPE === 'NVARCHAR'
or $rawcolumn->COLTYPE === 'CHAR'
or $rawcolumn->COLTYPE === 'NCHAR') {
$info->type = $rawcolumn->COLTYPE;
$info->meta_type = 'C';
$info->max_length = $rawcolumn->WIDTH;
$info->scale = null;
$info->not_null = ($rawcolumn->NULLS === 'NOT NULL');
$info->has_default = !is_null($rawcolumn->DEFAULTVAL);
if ($info->has_default) {
// this is hacky :-(
if ($rawcolumn->DEFAULTVAL === 'NULL') {
$info->default_value = null;
} else if ($rawcolumn->DEFAULTVAL === "' ' ") { // Sometimes it's stored with trailing space
$info->default_value = "";
} else if ($rawcolumn->DEFAULTVAL === "' '") { // Sometimes it's stored without trailing space
$info->default_value = "";
} else {
$info->default_value = trim($rawcolumn->DEFAULTVAL); // remove trailing space
$info->default_value = substr($info->default_value, 1, strlen($info->default_value)-2); //trim ''
}
} else {
$info->default_value = null;
}
$info->primary_key = false;
$info->binary = false;
$info->unsigned = null;
$info->unique = null;
} else if ($rawcolumn->COLTYPE === 'NUMBER') {
$info->type = $rawcolumn->COLTYPE;
$info->max_length = $rawcolumn->PRECISION;
$info->binary = false;
if (!is_null($rawcolumn->SCALE) && $rawcolumn->SCALE == 0) { // null in oracle scale allows decimals => not integer
// integer
if ($info->name === 'id') {
$info->primary_key = true;
$info->meta_type = 'R';
$info->unique = true;
$info->has_default = false;
} else {
$info->primary_key = false;
$info->meta_type = 'I';
$info->unique = null;
}
$info->scale = 0;
} else {
//float
$info->meta_type = 'N';
$info->primary_key = false;
$info->unsigned = null;
$info->unique = null;
$info->scale = $rawcolumn->SCALE;
}
$info->not_null = ($rawcolumn->NULLS === 'NOT NULL');
$info->has_default = !is_null($rawcolumn->DEFAULTVAL);
if ($info->has_default) {
$info->default_value = trim($rawcolumn->DEFAULTVAL); // remove trailing space
} else {
$info->default_value = null;
}
} else if ($rawcolumn->COLTYPE === 'FLOAT') {
$info->type = $rawcolumn->COLTYPE;
$info->max_length = (int)($rawcolumn->PRECISION * 3.32193);
$info->primary_key = false;
$info->meta_type = 'N';
$info->unique = null;
$info->not_null = ($rawcolumn->NULLS === 'NOT NULL');
$info->has_default = !is_null($rawcolumn->DEFAULTVAL);
if ($info->has_default) {
$info->default_value = trim($rawcolumn->DEFAULTVAL); // remove trailing space
} else {
$info->default_value = null;
}
} else if ($rawcolumn->COLTYPE === 'CLOB'
or $rawcolumn->COLTYPE === 'NCLOB') {
$info->type = $rawcolumn->COLTYPE;
$info->meta_type = 'X';
$info->max_length = -1;
$info->scale = null;
$info->scale = null;
$info->not_null = ($rawcolumn->NULLS === 'NOT NULL');
$info->has_default = !is_null($rawcolumn->DEFAULTVAL);
if ($info->has_default) {
// this is hacky :-(
if ($rawcolumn->DEFAULTVAL === 'NULL') {
$info->default_value = null;
} else if ($rawcolumn->DEFAULTVAL === "' ' ") { // Sometimes it's stored with trailing space
$info->default_value = "";
} else if ($rawcolumn->DEFAULTVAL === "' '") { // Other times it's stored without trailing space
$info->default_value = "";
} else {
$info->default_value = trim($rawcolumn->DEFAULTVAL); // remove trailing space
$info->default_value = substr($info->default_value, 1, strlen($info->default_value)-2); //trim ''
}
} else {
$info->default_value = null;
}
$info->primary_key = false;
$info->binary = false;
$info->unsigned = null;
$info->unique = null;
} else if ($rawcolumn->COLTYPE === 'BLOB') {
$info->type = $rawcolumn->COLTYPE;
$info->meta_type = 'B';
$info->max_length = -1;
$info->scale = null;
$info->scale = null;
$info->not_null = ($rawcolumn->NULLS === 'NOT NULL');
$info->has_default = !is_null($rawcolumn->DEFAULTVAL);
if ($info->has_default) {
// this is hacky :-(
if ($rawcolumn->DEFAULTVAL === 'NULL') {
$info->default_value = null;
} else if ($rawcolumn->DEFAULTVAL === "' ' ") { // Sometimes it's stored with trailing space
$info->default_value = "";
} else if ($rawcolumn->DEFAULTVAL === "' '") { // Sometimes it's stored without trailing space
$info->default_value = "";
} else {
$info->default_value = trim($rawcolumn->DEFAULTVAL); // remove trailing space
$info->default_value = substr($info->default_value, 1, strlen($info->default_value)-2); //trim ''
}
} else {
$info->default_value = null;
}
$info->primary_key = false;
$info->binary = true;
$info->unsigned = null;
$info->unique = null;
} else {
// unknown type - sorry
$info->type = $rawcolumn->COLTYPE;
$info->meta_type = '?';
}
$structure[$info->name] = new database_column_info($info);
}
if ($usecache) {
$cache->set($table, $structure);
}
return $structure;
}
/**
* Normalise values based in RDBMS dependencies (booleans, LOBs...)
*
* @param database_column_info $column column metadata corresponding with the value we are going to normalise
* @param mixed $value value we are going to normalise
* @return mixed the normalised value
*/
protected function normalise_value($column, $value) {
$this->detect_objects($value);
if (is_bool($value)) { // Always, convert boolean to int
$value = (int)$value;
} else if ($column->meta_type == 'B') { // BLOB detected, we return 'blob' array instead of raw value to allow
if (!is_null($value)) { // binding/executing code later to know about its nature
$value = array('blob' => $value);
}
} else if ($column->meta_type == 'X' && strlen($value) > 4000) { // CLOB detected (>4000 optimisation), we return 'clob'
if (!is_null($value)) { // array instead of raw value to allow binding/
$value = array('clob' => (string)$value); // executing code later to know about its nature
}
} else if ($value === '') {
if ($column->meta_type == 'I' or $column->meta_type == 'F' or $column->meta_type == 'N') {
$value = 0; // prevent '' problems in numeric fields
}
}
return $value;
}
/**
* Transforms the sql and params in order to emulate the LIMIT clause available in other DBs
*
* @param string $sql the SQL select query to execute.
* @param array $params array of sql parameters
* @param int $limitfrom return a subset of records, starting at this point (optional, required if $limitnum is set).
* @param int $limitnum return a subset comprising this many records (optional, required if $limitfrom is set).
* @return array with the transformed sql and params updated
*/
private function get_limit_sql($sql, array $params = null, $limitfrom=0, $limitnum=0) {
list($limitfrom, $limitnum) = $this->normalise_limit_from_num($limitfrom, $limitnum);
// TODO: Add the /*+ FIRST_ROWS */ hint if there isn't another hint
if ($limitfrom and $limitnum) {
$sql = "SELECT oracle_o.*
FROM (SELECT oracle_i.*, rownum AS oracle_rownum
FROM ($sql) oracle_i
WHERE rownum <= :oracle_num_rows
) oracle_o
WHERE oracle_rownum > :oracle_skip_rows";
$params['oracle_num_rows'] = $limitfrom + $limitnum;
$params['oracle_skip_rows'] = $limitfrom;
} else if ($limitfrom and !$limitnum) {
$sql = "SELECT oracle_o.*
FROM (SELECT oracle_i.*, rownum AS oracle_rownum
FROM ($sql) oracle_i
) oracle_o
WHERE oracle_rownum > :oracle_skip_rows";
$params['oracle_skip_rows'] = $limitfrom;
} else if (!$limitfrom and $limitnum) {
$sql = "SELECT *
FROM ($sql)
WHERE rownum <= :oracle_num_rows";
$params['oracle_num_rows'] = $limitnum;
}
return array($sql, $params);
}
/**
* This function will handle all the column values before being inserted/updated to DB for Oracle
* installations. This is because the "special feature" of Oracle where the empty string is
* equal to NULL and this presents a problem with all our currently NOT NULL default '' fields.
* (and with empties handling in general)
*
* Note that this function is 100% private and should be used, exclusively by DML functions
* in this file. Also, this is considered a DIRTY HACK to be removed when possible.
*
* This function is private and must not be used outside this driver at all
*
* @param $table string the table where the record is going to be inserted/updated (without prefix)
* @param $field string the field where the record is going to be inserted/updated
* @param $value mixed the value to be inserted/updated
*/
private function oracle_dirty_hack ($table, $field, $value) {
// General bound parameter, just hack the spaces and pray it will work.
if (!$table) {
if ($value === '') {
return ' ';
} else if (is_bool($value)) {
return (int)$value;
} else {
return $value;
}
}
// Get metadata
$columns = $this->get_columns($table);
if (!isset($columns[$field])) {
if ($value === '') {
return ' ';
} else if (is_bool($value)) {
return (int)$value;
} else {
return $value;
}
}
$column = $columns[$field];
// !! This paragraph explains behaviour before Moodle 2.0:
//
// For Oracle DB, empty strings are converted to NULLs in DB
// and this breaks a lot of NOT NULL columns currently Moodle. In the future it's
// planned to move some of them to NULL, if they must accept empty values and this
// piece of code will become less and less used. But, for now, we need it.
// What we are going to do is to examine all the data being inserted and if it's
// an empty string (NULL for Oracle) and the field is defined as NOT NULL, we'll modify
// such data in the best form possible ("0" for booleans and numbers and " " for the
// rest of strings. It isn't optimal, but the only way to do so.
// In the opposite, when retrieving records from Oracle, we'll decode " " back to
// empty strings to allow everything to work properly. DIRTY HACK.
// !! These paragraphs explain the rationale about the change for Moodle 2.5:
//
// Before Moodle 2.0, we only used to apply this DIRTY HACK to NOT NULL columns, as
// stated above, but it causes one problem in NULL columns where both empty strings
// and real NULLs are stored as NULLs, being impossible to differentiate them when
// being retrieved from DB.
//
// So, starting with Moodle 2.0, we are going to apply the DIRTY HACK to all the
// CHAR/CLOB columns no matter of their nullability. That way, when retrieving
// NULLABLE fields we'll get proper empties and NULLs differentiated, so we'll be able
// to rely in NULL/empty/content contents without problems, until now that wasn't
// possible at all.
//
// One space DIRTY HACK is now applied automatically for all query parameters
// and results. The only problem is string concatenation where the glue must
// be specified as "' '" sql fragment.
//
// !! Conclusions:
//
// From Moodle 2.5 onwards, ALL empty strings in Oracle DBs will be stored as
// 1-whitespace char, ALL NULLs as NULLs and, obviously, content as content. And
// those 1-whitespace chars will be converted back to empty strings by all the
// get_field/record/set() functions transparently and any SQL needing direct handling
// of empties will have to use placeholders or sql_isempty() helper function.
// If the field isn't VARCHAR or CLOB, skip
if ($column->meta_type != 'C' and $column->meta_type != 'X') {
return $value;
}
// If the value isn't empty, skip
if (!empty($value)) {
return $value;
}
// Now, we have one empty value, going to be inserted to one VARCHAR2 or CLOB field
// Try to get the best value to be inserted
// The '0' string doesn't need any transformation, skip
if ($value === '0') {
return $value;
}
// Transformations start
if (gettype($value) == 'boolean') {
return '0'; // Transform false to '0' that evaluates the same for PHP
} else if (gettype($value) == 'integer') {
return '0'; // Transform 0 to '0' that evaluates the same for PHP
} else if ($value === '') {
return ' '; // Transform '' to ' ' that DON'T EVALUATE THE SAME
// (we'll transform back again on get_records_XXX functions and others)!!
}
// Fail safe to original value
return $value;
}
/**
* Helper function to order by string length desc
*
* @param $a string first element to compare
* @param $b string second element to compare
* @return int < 0 $a goes first (is less), 0 $b goes first, 0 doesn't matter
*/
private function compare_by_length_desc($a, $b) {
return strlen($b) - strlen($a);
}
/**
* Is db in unicode mode?
* @return bool
*/
public function setup_is_unicodedb() {
$sql = "SELECT VALUE
FROM NLS_DATABASE_PARAMETERS
WHERE PARAMETER = 'NLS_CHARACTERSET'";
$this->query_start($sql, null, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_COLUMN);
oci_free_statement($stmt);
return (isset($records['VALUE'][0]) and $records['VALUE'][0] === 'AL32UTF8');
}
/**
* Do NOT use in code, to be used by database_manager only!
* @param string|array $sql query
* @return bool true
* @throws ddl_change_structure_exception A DDL specific exception is thrown for any errors.
*/
public function change_database_structure($sql) {
$this->get_manager(); // Includes DDL exceptions classes ;-)
$sqls = (array)$sql;
try {
foreach ($sqls as $sql) {
$this->query_start($sql, null, SQL_QUERY_STRUCTURE);
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
}
} catch (ddl_change_structure_exception $e) {
$this->reset_caches();
throw $e;
}
$this->reset_caches();
return true;
}
protected function bind_params($stmt, array $params=null, $tablename=null) {
$descriptors = array();
if ($params) {
$columns = array();
if ($tablename) {
$columns = $this->get_columns($tablename);
}
foreach($params as $key => $value) {
// Decouple column name and param name as far as sometimes they aren't the same
if ($key == 'o_newfieldtoset') { // found case where column and key diverge, handle that
$columnname = key($value); // columnname is the key of the array
$params[$key] = $value[$columnname]; // set the proper value in the $params array and
$value = $value[$columnname]; // set the proper value in the $value variable
} else {
$columnname = preg_replace('/^o_/', '', $key); // Default columnname (for DB introspecting is key), but...
}
// Continue processing
// Now, handle already detected LOBs
if (is_array($value)) { // Let's go to bind special cases (lob descriptors)
if (isset($value['clob'])) {
$lob = oci_new_descriptor($this->oci, OCI_DTYPE_LOB);
oci_bind_by_name($stmt, $key, $lob, -1, SQLT_CLOB);
$lob->writeTemporary($this->oracle_dirty_hack($tablename, $columnname, $params[$key]['clob']), OCI_TEMP_CLOB);
$descriptors[] = $lob;
continue; // Column binding finished, go to next one
} else if (isset($value['blob'])) {
$lob = oci_new_descriptor($this->oci, OCI_DTYPE_LOB);
oci_bind_by_name($stmt, $key, $lob, -1, SQLT_BLOB);
$lob->writeTemporary($params[$key]['blob'], OCI_TEMP_BLOB);
$descriptors[] = $lob;
continue; // Column binding finished, go to next one
}
} else {
// If, at this point, the param value > 4000 (bytes), let's assume it's a clob
// passed in an arbitrary sql (not processed by normalise_value() ever,
// and let's handle it as such. This will provide proper binding of CLOBs in
// conditions and other raw SQLs not covered by the above function.
if (strlen($value) > 4000) {
$lob = oci_new_descriptor($this->oci, OCI_DTYPE_LOB);
oci_bind_by_name($stmt, $key, $lob, -1, SQLT_CLOB);
$lob->writeTemporary($this->oracle_dirty_hack($tablename, $columnname, $params[$key]), OCI_TEMP_CLOB);
$descriptors[] = $lob;
continue; // Param binding finished, go to next one.
}
}
// TODO: Put proper types and length is possible (enormous speedup)
// Arrived here, continue with standard processing, using metadata if possible
if (isset($columns[$columnname])) {
$type = $columns[$columnname]->meta_type;
$maxlength = $columns[$columnname]->max_length;
} else {
$type = '?';
$maxlength = -1;
}
switch ($type) {
case 'I':
case 'R':
// TODO: Optimise
oci_bind_by_name($stmt, $key, $params[$key]);
break;
case 'N':
case 'F':
// TODO: Optimise
oci_bind_by_name($stmt, $key, $params[$key]);
break;
case 'B':
// TODO: Only arrive here if BLOB is null: Bind if so, else exception!
// don't break here
case 'X':
// TODO: Only arrive here if CLOB is null or <= 4000 cc, else exception
// don't break here
default: // Bind as CHAR (applying dirty hack)
// TODO: Optimise
$params[$key] = $this->oracle_dirty_hack($tablename, $columnname, $params[$key]);
oci_bind_by_name($stmt, $key, $params[$key]);
}
}
}
return $descriptors;
}
protected function free_descriptors($descriptors) {
foreach ($descriptors as $descriptor) {
oci_free_descriptor($descriptor);
}
}
/**
* This function is used to convert all the Oracle 1-space defaults to the empty string
* like a really DIRTY HACK to allow it to work better until all those NOT NULL DEFAULT ''
* fields will be out from Moodle.
* @param string the string to be converted to '' (empty string) if it's ' ' (one space)
* @param mixed the key of the array in case we are using this function from array_walk,
* defaults to null for other (direct) uses
* @return boolean always true (the converted variable is returned by reference)
*/
public static function onespace2empty(&$item, $key=null) {
$item = ($item === ' ') ? '' : $item;
return true;
}
/**
* Execute general sql query. Should be used only when no other method suitable.
* Do NOT use this to make changes in db structure, use database_manager methods instead!
* @param string $sql query
* @param array $params query parameters
* @return bool true
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function execute($sql, array $params=null) {
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
if (strpos($sql, ';') !== false) {
throw new coding_exception('moodle_database::execute() Multiple sql statements found or bound parameters not used properly in query!');
}
list($sql, $params) = $this->tweak_param_names($sql, $params);
$this->query_start($sql, $params, SQL_QUERY_UPDATE);
$stmt = $this->parse_query($sql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
return true;
}
/**
* Get a single database record as an object using a SQL statement.
*
* The SQL statement should normally only return one record.
* It is recommended to use get_records_sql() if more matches possible!
*
* @param string $sql The SQL string you wish to be executed, should normally only return one record.
* @param array $params array of sql parameters
* @param int $strictness IGNORE_MISSING means compatible mode, false returned if record not found, debug message if more found;
* IGNORE_MULTIPLE means return first, ignore multiple records found(not recommended);
* MUST_EXIST means throw exception if no record or multiple records found
* @return mixed a fieldset object containing the first matching record, false or exception if error not found depending on mode
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function get_record_sql($sql, array $params=null, $strictness=IGNORE_MISSING) {
$strictness = (int)$strictness;
if ($strictness == IGNORE_MULTIPLE) {
// do not limit here - ORA does not like that
$rs = $this->get_recordset_sql($sql, $params);
$result = false;
foreach ($rs as $rec) {
$result = $rec;
break;
}
$rs->close();
return $result;
}
return parent::get_record_sql($sql, $params, $strictness);
}
/**
* Get a number of records as a moodle_recordset using a SQL statement.
*
* Since this method is a little less readable, use of it should be restricted to
* code where it's possible there might be large datasets being returned. For known
* small datasets use get_records_sql - it leads to simpler code.
*
* The return type is like:
* @see function get_recordset.
*
* @param string $sql the SQL select query to execute.
* @param array $params array of sql parameters
* @param int $limitfrom return a subset of records, starting at this point (optional, required if $limitnum is set).
* @param int $limitnum return a subset comprising this many records (optional, required if $limitfrom is set).
* @return moodle_recordset instance
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function get_recordset_sql($sql, array $params=null, $limitfrom=0, $limitnum=0) {
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
list($rawsql, $params) = $this->get_limit_sql($sql, $params, $limitfrom, $limitnum);
list($rawsql, $params) = $this->tweak_param_names($rawsql, $params);
$this->query_start($rawsql, $params, SQL_QUERY_SELECT);
$stmt = $this->parse_query($rawsql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
return $this->create_recordset($stmt);
}
protected function create_recordset($stmt) {
return new oci_native_moodle_recordset($stmt);
}
/**
* Get a number of records as an array of objects using a SQL statement.
*
* Return value is like:
* @see function get_records.
*
* @param string $sql the SQL select query to execute. The first column of this SELECT statement
* must be a unique value (usually the 'id' field), as it will be used as the key of the
* returned array.
* @param array $params array of sql parameters
* @param int $limitfrom return a subset of records, starting at this point (optional, required if $limitnum is set).
* @param int $limitnum return a subset comprising this many records (optional, required if $limitfrom is set).
* @return array of objects, or empty array if no records were found
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function get_records_sql($sql, array $params=null, $limitfrom=0, $limitnum=0) {
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
list($rawsql, $params) = $this->get_limit_sql($sql, $params, $limitfrom, $limitnum);
list($rawsql, $params) = $this->tweak_param_names($rawsql, $params);
$this->query_start($rawsql, $params, SQL_QUERY_SELECT);
$stmt = $this->parse_query($rawsql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_ROW);
oci_free_statement($stmt);
$return = array();
foreach ($records as $row) {
$row = array_change_key_case($row, CASE_LOWER);
unset($row['oracle_rownum']);
array_walk($row, array('oci_native_moodle_database', 'onespace2empty'));
$id = reset($row);
if (isset($return[$id])) {
$colname = key($row);
debugging("Did you remember to make the first column something unique in your call to get_records? Duplicate value '$id' found in column '$colname'.", DEBUG_DEVELOPER);
}
$return[$id] = (object)$row;
}
return $return;
}
/**
* Selects records and return values (first field) as an array using a SQL statement.
*
* @param string $sql The SQL query
* @param array $params array of sql parameters
* @return array of values
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function get_fieldset_sql($sql, array $params=null) {
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
list($sql, $params) = $this->tweak_param_names($sql, $params);
$this->query_start($sql, $params, SQL_QUERY_SELECT);
$stmt = $this->parse_query($sql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_COLUMN);
oci_free_statement($stmt);
$return = reset($records);
array_walk($return, array('oci_native_moodle_database', 'onespace2empty'));
return $return;
}
/**
* Insert new record into database, as fast as possible, no safety checks, lobs not supported.
* @param string $table name
* @param mixed $params data record as object or array
* @param bool $returnit return it of inserted record
* @param bool $bulk true means repeated inserts expected
* @param bool $customsequence true if 'id' included in $params, disables $returnid
* @return bool|int true or new id
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function insert_record_raw($table, $params, $returnid=true, $bulk=false, $customsequence=false) {
if (!is_array($params)) {
$params = (array)$params;
}
$returning = "";
if ($customsequence) {
if (!isset($params['id'])) {
throw new coding_exception('moodle_database::insert_record_raw() id field must be specified if custom sequences used.');
}
$returnid = false;
} else {
unset($params['id']);
if ($returnid) {
$returning = " RETURNING id INTO :oracle_id"; // crazy name nobody is ever going to use or parameter ;-)
}
}
if (empty($params)) {
throw new coding_exception('moodle_database::insert_record_raw() no fields found.');
}
$fields = implode(',', array_keys($params));
$values = array();
foreach ($params as $pname => $value) {
$values[] = ":$pname";
}
$values = implode(',', $values);
$sql = "INSERT INTO {" . $table . "} ($fields) VALUES ($values)";
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
$sql .= $returning;
$id = null;
// note we don't need tweak_param_names() here. Placeholders are safe column names. MDL-28080
// list($sql, $params) = $this->tweak_param_names($sql, $params);
$this->query_start($sql, $params, SQL_QUERY_INSERT);
$stmt = $this->parse_query($sql);
$descriptors = $this->bind_params($stmt, $params, $table);
if ($returning) {
oci_bind_by_name($stmt, ":oracle_id", $id, 10, SQLT_INT);
}
$result = oci_execute($stmt, $this->commit_status);
$this->free_descriptors($descriptors);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
if (!$returnid) {
return true;
}
if (!$returning) {
die('TODO - implement oracle 9.2 insert support'); //TODO
}
return (int)$id;
}
/**
* Insert a record into a table and return the "id" field if required.
*
* Some conversions and safety checks are carried out. Lobs are supported.
* If the return ID isn't required, then this just reports success as true/false.
* $data is an object containing needed data
* @param string $table The database table to be inserted into
* @param object $data A data object with values for one or more fields in the record
* @param bool $returnid Should the id of the newly created record entry be returned? If this option is not requested then true/false is returned.
* @return bool|int true or new id
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function insert_record($table, $dataobject, $returnid=true, $bulk=false) {
$dataobject = (array)$dataobject;
$columns = $this->get_columns($table);
if (empty($columns)) {
throw new dml_exception('ddltablenotexist', $table);
}
$cleaned = array();
foreach ($dataobject as $field=>$value) {
if ($field === 'id') {
continue;
}
if (!isset($columns[$field])) { // Non-existing table field, skip it
continue;
}
$column = $columns[$field];
$cleaned[$field] = $this->normalise_value($column, $value);
}
return $this->insert_record_raw($table, $cleaned, $returnid, $bulk);
}
/**
* Import a record into a table, id field is required.
* Safety checks are NOT carried out. Lobs are supported.
*
* @param string $table name of database table to be inserted into
* @param object $dataobject A data object with values for one or more fields in the record
* @return bool true
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function import_record($table, $dataobject) {
$dataobject = (array)$dataobject;
$columns = $this->get_columns($table);
$cleaned = array();
foreach ($dataobject as $field=>$value) {
if (!isset($columns[$field])) {
continue;
}
$column = $columns[$field];
$cleaned[$field] = $this->normalise_value($column, $value);
}
return $this->insert_record_raw($table, $cleaned, false, true, true);
}
/**
* Update record in database, as fast as possible, no safety checks, lobs not supported.
* @param string $table name
* @param mixed $params data record as object or array
* @param bool true means repeated updates expected
* @return bool true
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function update_record_raw($table, $params, $bulk=false) {
$params = (array)$params;
if (!isset($params['id'])) {
throw new coding_exception('moodle_database::update_record_raw() id field must be specified.');
}
if (empty($params)) {
throw new coding_exception('moodle_database::update_record_raw() no fields found.');
}
$sets = array();
foreach ($params as $field=>$value) {
if ($field == 'id') {
continue;
}
$sets[] = "$field = :$field";
}
$sets = implode(',', $sets);
$sql = "UPDATE {" . $table . "} SET $sets WHERE id=:id";
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
// note we don't need tweak_param_names() here. Placeholders are safe column names. MDL-28080
// list($sql, $params) = $this->tweak_param_names($sql, $params);
$this->query_start($sql, $params, SQL_QUERY_UPDATE);
$stmt = $this->parse_query($sql);
$descriptors = $this->bind_params($stmt, $params, $table);
$result = oci_execute($stmt, $this->commit_status);
$this->free_descriptors($descriptors);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
return true;
}
/**
* Update a record in a table
*
* $dataobject is an object containing needed data
* Relies on $dataobject having a variable "id" to
* specify the record to update
*
* @param string $table The database table to be checked against.
* @param object $dataobject An object with contents equal to fieldname=>fieldvalue. Must have an entry for 'id' to map to the table specified.
* @param bool true means repeated updates expected
* @return bool true
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function update_record($table, $dataobject, $bulk=false) {
$dataobject = (array)$dataobject;
$columns = $this->get_columns($table);
$cleaned = array();
foreach ($dataobject as $field=>$value) {
if (!isset($columns[$field])) {
continue;
}
$column = $columns[$field];
$cleaned[$field] = $this->normalise_value($column, $value);
}
$this->update_record_raw($table, $cleaned, $bulk);
return true;
}
/**
* Set a single field in every table record which match a particular WHERE clause.
*
* @param string $table The database table to be checked against.
* @param string $newfield the field to set.
* @param string $newvalue the value to set the field to.
* @param string $select A fragment of SQL to be used in a where clause in the SQL call.
* @param array $params array of sql parameters
* @return bool true
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function set_field_select($table, $newfield, $newvalue, $select, array $params=null) {
if ($select) {
$select = "WHERE $select";
}
if (is_null($params)) {
$params = array();
}
// Get column metadata
$columns = $this->get_columns($table);
$column = $columns[$newfield];
$newvalue = $this->normalise_value($column, $newvalue);
list($select, $params, $type) = $this->fix_sql_params($select, $params);
if (is_bool($newvalue)) {
$newvalue = (int)$newvalue; // prevent "false" problems
}
if (is_null($newvalue)) {
$newsql = "$newfield = NULL";
} else {
// Set the param to array ($newfield => $newvalue) and key to 'newfieldtoset'
// name in the build sql. Later, bind_params() will detect the value array and
// perform the needed modifications to allow the query to work. Note that
// 'newfieldtoset' is one arbitrary name that hopefully won't be used ever
// in order to avoid problems where the same field is used both in the set clause and in
// the conditions. This was breaking badly in drivers using NAMED params like oci.
$params['newfieldtoset'] = array($newfield => $newvalue);
$newsql = "$newfield = :newfieldtoset";
}
$sql = "UPDATE {" . $table . "} SET $newsql $select";
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
list($sql, $params) = $this->tweak_param_names($sql, $params);
$this->query_start($sql, $params, SQL_QUERY_UPDATE);
$stmt = $this->parse_query($sql);
$descriptors = $this->bind_params($stmt, $params, $table);
$result = oci_execute($stmt, $this->commit_status);
$this->free_descriptors($descriptors);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
return true;
}
/**
* Delete one or more records from a table which match a particular WHERE clause.
*
* @param string $table The database table to be checked against.
* @param string $select A fragment of SQL to be used in a where clause in the SQL call (used to define the selection criteria).
* @param array $params array of sql parameters
* @return bool true
* @throws dml_exception A DML specific exception is thrown for any errors.
*/
public function delete_records_select($table, $select, array $params=null) {
if ($select) {
$select = "WHERE $select";
}
$sql = "DELETE FROM {" . $table . "} $select";
list($sql, $params, $type) = $this->fix_sql_params($sql, $params);
list($sql, $params) = $this->tweak_param_names($sql, $params);
$this->query_start($sql, $params, SQL_QUERY_UPDATE);
$stmt = $this->parse_query($sql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
return true;
}
function sql_null_from_clause() {
return ' FROM dual';
}
public function sql_bitand($int1, $int2) {
return 'bitand((' . $int1 . '), (' . $int2 . '))';
}
public function sql_bitnot($int1) {
return '((0 - (' . $int1 . ')) - 1)';
}
public function sql_bitor($int1, $int2) {
return 'MOODLELIB.BITOR(' . $int1 . ', ' . $int2 . ')';
}
public function sql_bitxor($int1, $int2) {
return 'MOODLELIB.BITXOR(' . $int1 . ', ' . $int2 . ')';
}
/**
* Returns the SQL text to be used in order to perform module '%'
* operation - remainder after division
*
* @param integer int1 first integer in the operation
* @param integer int2 second integer in the operation
* @return string the piece of SQL code to be used in your statement.
*/
public function sql_modulo($int1, $int2) {
return 'MOD(' . $int1 . ', ' . $int2 . ')';
}
public function sql_cast_char2int($fieldname, $text=false) {
if (!$text) {
return ' CAST(' . $fieldname . ' AS INT) ';
} else {
return ' CAST(' . $this->sql_compare_text($fieldname) . ' AS INT) ';
}
}
public function sql_cast_char2real($fieldname, $text=false) {
if (!$text) {
return ' CAST(' . $fieldname . ' AS FLOAT) ';
} else {
return ' CAST(' . $this->sql_compare_text($fieldname) . ' AS FLOAT) ';
}
}
/**
* Returns 'LIKE' part of a query.
*
* @param string $fieldname usually name of the table column
* @param string $param usually bound query parameter (?, :named)
* @param bool $casesensitive use case sensitive search
* @param bool $accensensitive use accent sensitive search (not all databases support accent insensitive)
* @param bool $notlike true means "NOT LIKE"
* @param string $escapechar escape char for '%' and '_'
* @return string SQL code fragment
*/
public function sql_like($fieldname, $param, $casesensitive = true, $accentsensitive = true, $notlike = false, $escapechar = '\\') {
if (strpos($param, '%') !== false) {
debugging('Potential SQL injection detected, sql_like() expects bound parameters (? or :named)');
}
$LIKE = $notlike ? 'NOT LIKE' : 'LIKE';
// no accent sensitiveness here for now, sorry
if ($casesensitive) {
return "$fieldname $LIKE $param ESCAPE '$escapechar'";
} else {
return "LOWER($fieldname) $LIKE LOWER($param) ESCAPE '$escapechar'";
}
}
public function sql_concat() {
$arr = func_get_args();
if (empty($arr)) {
return " ' ' ";
}
foreach ($arr as $k => $v) {
if ($v === "' '") {
$arr[$k] = "'*OCISP*'"; // New mega hack.
}
}
$s = $this->recursive_concat($arr);
return " MOODLELIB.UNDO_MEGA_HACK($s) ";
}
public function sql_concat_join($separator="' '", $elements = array()) {
if ($separator === "' '") {
$separator = "'*OCISP*'"; // New mega hack.
}
foreach ($elements as $k => $v) {
if ($v === "' '") {
$elements[$k] = "'*OCISP*'"; // New mega hack.
}
}
for ($n = count($elements)-1; $n > 0 ; $n--) {
array_splice($elements, $n, 0, $separator);
}
if (empty($elements)) {
return " ' ' ";
}
$s = $this->recursive_concat($elements);
return " MOODLELIB.UNDO_MEGA_HACK($s) ";
}
/**
* Constructs 'IN()' or '=' sql fragment
*
* Method overriding {@link moodle_database::get_in_or_equal} to be able to get
* more than 1000 elements working, to avoid ORA-01795. We use a pivoting technique
* to be able to transform the params into virtual rows, so the original IN()
* expression gets transformed into a subquery. Once more, be noted that we shouldn't
* be using ever get_in_or_equal() with such number of parameters (proper subquery and/or
* chunking should be used instead).
*
* @param mixed $items A single value or array of values for the expression.
* @param int $type Parameter bounding type : SQL_PARAMS_QM or SQL_PARAMS_NAMED.
* @param string $prefix Named parameter placeholder prefix (a unique counter value is appended to each parameter name).
* @param bool $equal True means we want to equate to the constructed expression, false means we don't want to equate to it.
* @param mixed $onemptyitems This defines the behavior when the array of items provided is empty. Defaults to false,
* meaning throw exceptions. Other values will become part of the returned SQL fragment.
* @throws coding_exception | dml_exception
* @return array A list containing the constructed sql fragment and an array of parameters.
*/
public function get_in_or_equal($items, $type=SQL_PARAMS_QM, $prefix='param', $equal=true, $onemptyitems=false) {
list($sql, $params) = parent::get_in_or_equal($items, $type, $prefix, $equal, $onemptyitems);
// Less than 1000 elements, nothing to do.
if (count($params) < 1000) {
return array($sql, $params); // Return unmodified.
}
// Extract the interesting parts of the sql to rewrite.
if (preg_match('!(^.*IN \()([^\)]*)(.*)$!', $sql, $matches) === false) {
return array($sql, $params); // Return unmodified.
}
$instart = $matches[1];
$insql = $matches[2];
$inend = $matches[3];
$newsql = '';
// Some basic verification about the matching going ok.
$insqlarr = explode(',', $insql);
if (count($insqlarr) !== count($params)) {
return array($sql, $params); // Return unmodified.
}
// Arrived here, we need to chunk and pivot the params, building a new sql (params remain the same).
$addunionclause = false;
while ($chunk = array_splice($insqlarr, 0, 125)) { // Each chunk will handle up to 125 (+125 +1) elements (DECODE max is 255).
$chunksize = count($chunk);
if ($addunionclause) {
$newsql .= "\n UNION ALL";
}
$newsql .= "\n SELECT DECODE(pivot";
$counter = 1;
foreach ($chunk as $element) {
$newsql .= ",\n {$counter}, " . trim($element);
$counter++;
}
$newsql .= ")";
$newsql .= "\n FROM dual";
$newsql .= "\n CROSS JOIN (SELECT LEVEL AS pivot FROM dual CONNECT BY LEVEL <= {$chunksize})";
$addunionclause = true;
}
// Rebuild the complete IN() clause and return it.
return array($instart . $newsql . $inend, $params);
}
/**
* Mega hacky magic to work around crazy Oracle NULL concats.
* @param array $args
* @return string
*/
protected function recursive_concat(array $args) {
$count = count($args);
if ($count == 1) {
$arg = reset($args);
return $arg;
}
if ($count == 2) {
$args[] = "' '";
// No return here intentionally.
}
$first = array_shift($args);
$second = array_shift($args);
$third = $this->recursive_concat($args);
return "MOODLELIB.TRICONCAT($first, $second, $third)";
}
/**
* Returns the SQL for returning searching one string for the location of another.
*/
public function sql_position($needle, $haystack) {
return "INSTR(($haystack), ($needle))";
}
/**
* Returns the SQL to know if one field is empty.
*
* @param string $tablename Name of the table (without prefix). Not used for now but can be
* necessary in the future if we want to use some introspection using
* meta information against the DB.
* @param string $fieldname Name of the field we are going to check
* @param bool $nullablefield For specifying if the field is nullable (true) or no (false) in the DB.
* @param bool $textfield For specifying if it is a text (also called clob) field (true) or a varchar one (false)
* @return string the sql code to be added to check for empty values
*/
public function sql_isempty($tablename, $fieldname, $nullablefield, $textfield) {
if ($textfield) {
return " (".$this->sql_compare_text($fieldname)." = ' ') ";
} else {
return " ($fieldname = ' ') ";
}
}
public function sql_order_by_text($fieldname, $numchars=32) {
return 'dbms_lob.substr(' . $fieldname . ', ' . $numchars . ',1)';
}
/**
* Is the required OCI server package installed?
* @return bool
*/
protected function oci_package_installed() {
$sql = "SELECT 1
FROM user_objects
WHERE object_type = 'PACKAGE BODY'
AND object_name = 'MOODLELIB'
AND status = 'VALID'";
$this->query_start($sql, null, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
$records = null;
oci_fetch_all($stmt, $records, 0, -1, OCI_FETCHSTATEMENT_BY_ROW);
oci_free_statement($stmt);
return isset($records[0]) && reset($records[0]) ? true : false;
}
/**
* Try to add required moodle package into oracle server.
*/
protected function attempt_oci_package_install() {
$sqls = file_get_contents(__DIR__.'/oci_native_moodle_package.sql');
$sqls = preg_split('/^\/$/sm', $sqls);
foreach ($sqls as $sql) {
$sql = trim($sql);
if ($sql === '' or $sql === 'SHOW ERRORS') {
continue;
}
$this->change_database_structure($sql);
}
}
/**
* Does this driver support tool_replace?
*
* @since Moodle 2.8
* @return bool
*/
public function replace_all_text_supported() {
return true;
}
public function session_lock_supported() {
return true;
}
/**
* Obtain session lock
* @param int $rowid id of the row with session record
* @param int $timeout max allowed time to wait for the lock in seconds
* @return void
*/
public function get_session_lock($rowid, $timeout) {
parent::get_session_lock($rowid, $timeout);
$fullname = $this->dbname.'-'.$this->prefix.'-session-'.$rowid;
$sql = 'SELECT MOODLELIB.GET_LOCK(:lockname, :locktimeout) FROM DUAL';
$params = array('lockname' => $fullname , 'locktimeout' => $timeout);
$this->query_start($sql, $params, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
if ($result === false) { // Any failure in get_lock() raises error, causing return of bool false
throw new dml_sessionwait_exception();
}
$this->query_end($result, $stmt);
oci_free_statement($stmt);
}
public function release_session_lock($rowid) {
if (!$this->used_for_db_sessions) {
return;
}
parent::release_session_lock($rowid);
$fullname = $this->dbname.'-'.$this->prefix.'-session-'.$rowid;
$params = array('lockname' => $fullname);
$sql = 'SELECT MOODLELIB.RELEASE_LOCK(:lockname) FROM DUAL';
$this->query_start($sql, $params, SQL_QUERY_AUX);
$stmt = $this->parse_query($sql);
$this->bind_params($stmt, $params);
$result = oci_execute($stmt, $this->commit_status);
$this->query_end($result, $stmt);
oci_free_statement($stmt);
}
/**
* Driver specific start of real database transaction,
* this can not be used directly in code.
* @return void
*/
protected function begin_transaction() {
$this->commit_status = OCI_DEFAULT; //Done! ;-)
}
/**
* Driver specific commit of real database transaction,
* this can not be used directly in code.
* @return void
*/
protected function commit_transaction() {
$this->query_start('--oracle_commit', NULL, SQL_QUERY_AUX);
$result = oci_commit($this->oci);
$this->commit_status = OCI_COMMIT_ON_SUCCESS;
$this->query_end($result);
}
/**
* Driver specific abort of real database transaction,
* this can not be used directly in code.
* @return void
*/
protected function rollback_transaction() {
$this->query_start('--oracle_rollback', NULL, SQL_QUERY_AUX);
$result = oci_rollback($this->oci);
$this->commit_status = OCI_COMMIT_ON_SUCCESS;
$this->query_end($result);
}
}
| gpl-3.0 |
ogahara/esp8266-devkit | Espressif/examples/nodemcu-firmware/lua_modules/mcp23008/mcp23008.lua | 4973 | ---
-- @description Expands the ESP8266's GPIO to 8 more I/O pins via I2C with the MCP23008 I/O Expander
-- MCP23008 Datasheet: http://ww1.microchip.com/downloads/en/DeviceDoc/21919e.pdf
-- Tested on NodeMCU 0.9.5 build 20150213.
-- @date March 02, 2015
-- @author Miguel
-- GitHub: https://github.com/AllAboutEE
-- YouTube: https://www.youtube.com/user/AllAboutEE
-- Website: http://AllAboutEE.com
--------------------------------------------------------------------------------
local moduleName = ...
local M = {}
_G[moduleName] = M
-- Constant device address.
local MCP23008_ADDRESS = 0x20
-- Registers' address as defined in the MCP23008's datashseet
local MCP23008_IODIR = 0x00
local MCP23008_IPOL = 0x01
local MCP23008_GPINTEN = 0x02
local MCP23008_DEFVAL = 0x03
local MCP23008_INTCON = 0x04
local MCP23008_IOCON = 0x05
local MCP23008_GPPU = 0x06
local MCP23008_INTF = 0x07
local MCP23008_INTCAP = 0x08
local MCP23008_GPIO = 0x09
local MCP23008_OLAT = 0x0A
-- Default value for i2c communication
local id = 0
-- pin modes for I/O direction
M.INPUT = 1
M.OUTPUT = 0
-- pin states for I/O i.e. on/off
M.HIGH = 1
M.LOW = 0
-- Weak pull-up resistor state
M.ENABLE = 1
M.DISABLE = 0
---
-- @name write
-- @description Writes one byte to a register
-- @param registerAddress The register where we'll write data
-- @param data The data to write to the register
-- @return void
----------------------------------------------------------
local function write(registerAddress, data)
i2c.start(id)
i2c.address(id,MCP23008_ADDRESS,i2c.TRANSMITTER) -- send MCP's address and write bit
i2c.write(id,registerAddress)
i2c.write(id,data)
i2c.stop(id)
end
---
-- @name read
-- @description Reads the value of a regsiter
-- @param registerAddress The reigster address from which to read
-- @return The byte stored in the register
----------------------------------------------------------
local function read(registerAddress)
-- Tell the MCP which register you want to read from
i2c.start(id)
i2c.address(id,MCP23008_ADDRESS,i2c.TRANSMITTER) -- send MCP's address and write bit
i2c.write(id,registerAddress)
i2c.stop(id)
i2c.start(id)
-- Read the data form the register
i2c.address(id,MCP23008_ADDRESS,i2c.RECEIVER) -- send the MCP's address and read bit
local data = 0x00
data = i2c.read(id,1) -- we expect only one byte of data
i2c.stop(id)
return string.byte(data) -- i2c.read returns a string so we convert to it's int value
end
---
--! @name begin
--! @description Sets the MCP23008 device address's last three bits.
-- Note: The address is defined as binary 0100[A2][A1][A0] where
-- A2, A1, and A0 are defined by the connection of the pins,
-- e.g. if the pins are connected all to GND then the paramter address
-- will need to be 0x0.
-- @param address The 3 least significant bits (LSB) of the address
-- @param pinSDA The pin to use for SDA
-- @param pinSCL The pin to use for SCL
-- @param speed The speed of the I2C signal
-- @return void
---------------------------------------------------------------------------
function M.begin(address,pinSDA,pinSCL,speed)
MCP23008_ADDRESS = bit.bor(MCP23008_ADDRESS,address)
i2c.setup(id,pinSDA,pinSCL,speed)
end
---
-- @name writeGPIO
-- @description Writes a byte of data to the GPIO register
-- @param dataByte The byte of data to write
-- @return void
----------------------------------------------------------
function M.writeGPIO(dataByte)
write(MCP23008_GPIO,dataByte)
end
---
-- @name readGPIO
-- @description reads a byte of data from the GPIO register
-- @return One byte of data
----------------------------------------------------------
function M.readGPIO()
return read(MCP23008_GPIO)
end
---
-- @name writeIODIR
-- @description Writes one byte of data to the IODIR register
-- @param dataByte The byte of data to write
-- @return void
----------------------------------------------------------
function M.writeIODIR(dataByte)
write(MCP23008_IODIR,dataByte)
end
---
-- @name readIODIR
-- @description Reads a byte from the IODIR register
-- @return The byte of data in IODIR
----------------------------------------------------------
function M.readIODIR()
return read(MCP23008_IODIR)
end
---
-- @name writeGPPU The byte to write to the GPPU
-- @description Writes a byte of data to the
-- PULL-UP RESISTOR CONFIGURATION (GPPU)REGISTER
-- @param databyte the value to write to the GPPU register.
-- each bit in this byte is assigned to an individual GPIO pin
-- @return void
----------------------------------------------------------------
function M.writeGPPU(dataByte)
write(MCP23008_GPPU,dataByte)
end
---
-- @name readGPPU
-- @description Reads the GPPU (Pull-UP resistors register) byte
-- @return The GPPU byte i.e. state of all internal pull-up resistors
-------------------------------------------------------------------
function M.readGPPU()
return read(MCP23008_GPPU)
end
return M
| gpl-3.0 |
zommuter/libgdx | gdx/src/com/badlogic/gdx/graphics/VertexAttributes.java | 8139 | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.graphics;
import java.util.Iterator;
import java.util.NoSuchElementException;
import com.badlogic.gdx.utils.GdxRuntimeException;
/** Instances of this class specify the vertex attributes of a mesh. VertexAttributes are used by {@link Mesh} instances to define
* its vertex structure. Vertex attributes have an order. The order is specified by the order they are added to this class.
*
* @author mzechner, Xoppa */
public final class VertexAttributes implements Iterable<VertexAttribute>, Comparable<VertexAttributes> {
/** The usage of a vertex attribute.
*
* @author mzechner */
public static final class Usage {
public static final int Position = 1;
public static final int ColorUnpacked = 2;
public static final int ColorPacked = 4;
public static final int Normal = 8;
public static final int TextureCoordinates = 16;
public static final int Generic = 32;
public static final int BoneWeight = 64;
public static final int Tangent = 128;
public static final int BiNormal = 256;
}
/** the attributes in the order they were specified **/
private final VertexAttribute[] attributes;
/** the size of a single vertex in bytes **/
public final int vertexSize;
/** cache of the value calculated by {@link #getMask()} **/
private long mask = -1;
private ReadonlyIterable<VertexAttribute> iterable;
/** Constructor, sets the vertex attributes in a specific order */
public VertexAttributes (VertexAttribute... attributes) {
if (attributes.length == 0) throw new IllegalArgumentException("attributes must be >= 1");
VertexAttribute[] list = new VertexAttribute[attributes.length];
for (int i = 0; i < attributes.length; i++)
list[i] = attributes[i];
this.attributes = list;
vertexSize = calculateOffsets();
}
/** Returns the offset for the first VertexAttribute with the specified usage.
* @param usage The usage of the VertexAttribute. */
public int getOffset (int usage, int defaultIfNotFound) {
VertexAttribute vertexAttribute = findByUsage(usage);
if (vertexAttribute == null) return defaultIfNotFound;
return vertexAttribute.offset / 4;
}
/** Returns the offset for the first VertexAttribute with the specified usage.
* @param usage The usage of the VertexAttribute. */
public int getOffset (int usage) {
return getOffset(usage, 0);
}
/** Returns the first VertexAttribute for the given usage.
* @param usage The usage of the VertexAttribute to find. */
public VertexAttribute findByUsage (int usage) {
int len = size();
for (int i = 0; i < len; i++)
if (get(i).usage == usage) return get(i);
return null;
}
private int calculateOffsets () {
int count = 0;
for (int i = 0; i < attributes.length; i++) {
VertexAttribute attribute = attributes[i];
attribute.offset = count;
if (attribute.usage == VertexAttributes.Usage.ColorPacked)
count += 4;
else
count += 4 * attribute.numComponents;
}
return count;
}
/** @return the number of attributes */
public int size () {
return attributes.length;
}
/** @param index the index
* @return the VertexAttribute at the given index */
public VertexAttribute get (int index) {
return attributes[index];
}
public String toString () {
StringBuilder builder = new StringBuilder();
builder.append("[");
for (int i = 0; i < attributes.length; i++) {
builder.append("(");
builder.append(attributes[i].alias);
builder.append(", ");
builder.append(attributes[i].usage);
builder.append(", ");
builder.append(attributes[i].numComponents);
builder.append(", ");
builder.append(attributes[i].offset);
builder.append(")");
builder.append("\n");
}
builder.append("]");
return builder.toString();
}
@Override
public boolean equals (final Object obj) {
if (obj == this) return true;
if (!(obj instanceof VertexAttributes)) return false;
VertexAttributes other = (VertexAttributes)obj;
if (this.attributes.length != other.attributes.length) return false;
for (int i = 0; i < attributes.length; i++) {
if (!attributes[i].equals(other.attributes[i])) return false;
}
return true;
}
@Override
public int hashCode () {
long result = 61 * attributes.length;
for (int i = 0; i < attributes.length; i++)
result = result * 61 + attributes[i].hashCode();
return (int)(result ^ (result >> 32));
}
/** Calculates a mask based on the contained {@link VertexAttribute} instances. The mask is a bit-wise or of each attributes
* {@link VertexAttribute#usage}.
* @return the mask */
public long getMask () {
if (mask == -1) {
long result = 0;
for (int i = 0; i < attributes.length; i++) {
result |= attributes[i].usage;
}
mask = result;
}
return mask;
}
@Override
public int compareTo (VertexAttributes o) {
if (attributes.length != o.attributes.length) return attributes.length - o.attributes.length;
final long m1 = getMask();
final long m2 = o.getMask();
if (m1 != m2) return m1 < m2 ? -1 : 1;
for (int i = attributes.length - 1; i >= 0; --i) {
final VertexAttribute va0 = attributes[i];
final VertexAttribute va1 = o.attributes[i];
if (va0.usage != va1.usage) return va0.usage - va1.usage;
if (va0.unit != va1.unit) return va0.unit - va1.unit;
if (va0.numComponents != va1.numComponents) return va0.numComponents - va1.numComponents;
if (va0.normalized != va1.normalized) return va0.normalized ? 1 : -1;
if (va0.type != va1.type) return va0.type - va1.type;
}
return 0;
}
@Override
public Iterator<VertexAttribute> iterator () {
if (iterable == null) iterable = new ReadonlyIterable<VertexAttribute>(attributes);
return iterable.iterator();
}
static private class ReadonlyIterator<T> implements Iterator<T>, Iterable<T> {
private final T[] array;
int index;
boolean valid = true;
public ReadonlyIterator (T[] array) {
this.array = array;
}
@Override
public boolean hasNext () {
if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested.");
return index < array.length;
}
@Override
public T next () {
if (index >= array.length) throw new NoSuchElementException(String.valueOf(index));
if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested.");
return array[index++];
}
@Override
public void remove () {
throw new GdxRuntimeException("Remove not allowed.");
}
public void reset () {
index = 0;
}
@Override
public Iterator<T> iterator () {
return this;
}
}
static private class ReadonlyIterable<T> implements Iterable<T> {
private final T[] array;
private ReadonlyIterator iterator1, iterator2;
public ReadonlyIterable (T[] array) {
this.array = array;
}
@Override
public Iterator<T> iterator () {
if (iterator1 == null) {
iterator1 = new ReadonlyIterator(array);
iterator2 = new ReadonlyIterator(array);
}
if (!iterator1.valid) {
iterator1.index = 0;
iterator1.valid = true;
iterator2.valid = false;
return iterator1;
}
iterator2.index = 0;
iterator2.valid = true;
iterator1.valid = false;
return iterator2;
}
}
}
| apache-2.0 |
bkrukowski/phantomjs | src/qt/qtbase/src/opengl/qglbuffer.cpp | 16669 | /****************************************************************************
**
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the QtOpenGL module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QtOpenGL/qgl.h>
#include <QtOpenGL/private/qgl_p.h>
#include <private/qopenglextensions_p.h>
#include <QtCore/qatomic.h>
#include "qglbuffer.h"
QT_BEGIN_NAMESPACE
/*!
\class QGLBuffer
\inmodule QtOpenGL
\brief The QGLBuffer class provides functions for creating and managing GL buffer objects.
\since 4.7
\obsolete
\ingroup painting-3D
Buffer objects are created in the GL server so that the
client application can avoid uploading vertices, indices,
texture image data, etc every time they are needed.
QGLBuffer objects can be copied around as a reference to the
underlying GL buffer object:
\code
QGLBuffer buffer1(QGLBuffer::IndexBuffer);
buffer1.create();
QGLBuffer buffer2 = buffer1;
\endcode
QGLBuffer performs a shallow copy when objects are copied in this
manner, but does not implement copy-on-write semantics. The original
object will be affected whenever the copy is modified.
\note This class has been deprecated in favor of QOpenGLBuffer.
*/
/*!
\enum QGLBuffer::Type
This enum defines the type of GL buffer object to create with QGLBuffer.
\value VertexBuffer Vertex buffer object for use when specifying
vertex arrays.
\value IndexBuffer Index buffer object for use with \c{glDrawElements()}.
\value PixelPackBuffer Pixel pack buffer object for reading pixel
data from the GL server (for example, with \c{glReadPixels()}).
Not supported under OpenGL/ES.
\value PixelUnpackBuffer Pixel unpack buffer object for writing pixel
data to the GL server (for example, with \c{glTexImage2D()}).
Not supported under OpenGL/ES.
*/
/*!
\enum QGLBuffer::UsagePattern
This enum defines the usage pattern of a QGLBuffer object.
\value StreamDraw The data will be set once and used a few times
for drawing operations. Under OpenGL/ES 1.1 this is identical
to StaticDraw.
\value StreamRead The data will be set once and used a few times
for reading data back from the GL server. Not supported
under OpenGL/ES.
\value StreamCopy The data will be set once and used a few times
for reading data back from the GL server for use in further
drawing operations. Not supported under OpenGL/ES.
\value StaticDraw The data will be set once and used many times
for drawing operations.
\value StaticRead The data will be set once and used many times
for reading data back from the GL server. Not supported
under OpenGL/ES.
\value StaticCopy The data will be set once and used many times
for reading data back from the GL server for use in further
drawing operations. Not supported under OpenGL/ES.
\value DynamicDraw The data will be modified repeatedly and used
many times for drawing operations.
\value DynamicRead The data will be modified repeatedly and used
many times for reading data back from the GL server.
Not supported under OpenGL/ES.
\value DynamicCopy The data will be modified repeatedly and used
many times for reading data back from the GL server for
use in further drawing operations. Not supported under OpenGL/ES.
*/
/*!
\enum QGLBuffer::Access
This enum defines the access mode for QGLBuffer::map().
\value ReadOnly The buffer will be mapped for reading only.
\value WriteOnly The buffer will be mapped for writing only.
\value ReadWrite The buffer will be mapped for reading and writing.
*/
class QGLBufferPrivate
{
public:
QGLBufferPrivate(QGLBuffer::Type t)
: ref(1),
type(t),
guard(0),
usagePattern(QGLBuffer::StaticDraw),
actualUsagePattern(QGLBuffer::StaticDraw),
funcs(0)
{
}
QAtomicInt ref;
QGLBuffer::Type type;
QGLSharedResourceGuardBase *guard;
QGLBuffer::UsagePattern usagePattern;
QGLBuffer::UsagePattern actualUsagePattern;
QOpenGLExtensions *funcs;
};
/*!
Constructs a new buffer object of type QGLBuffer::VertexBuffer.
Note: this constructor just creates the QGLBuffer instance. The actual
buffer object in the GL server is not created until create() is called.
\sa create()
*/
QGLBuffer::QGLBuffer()
: d_ptr(new QGLBufferPrivate(QGLBuffer::VertexBuffer))
{
}
/*!
Constructs a new buffer object of \a type.
Note: this constructor just creates the QGLBuffer instance. The actual
buffer object in the GL server is not created until create() is called.
\sa create()
*/
QGLBuffer::QGLBuffer(QGLBuffer::Type type)
: d_ptr(new QGLBufferPrivate(type))
{
}
/*!
Constructs a shallow copy of \a other.
Note: QGLBuffer does not implement copy-on-write semantics,
so \a other will be affected whenever the copy is modified.
*/
QGLBuffer::QGLBuffer(const QGLBuffer &other)
: d_ptr(other.d_ptr)
{
d_ptr->ref.ref();
}
#define ctx QGLContext::currentContext();
/*!
Destroys this buffer object, including the storage being
used in the GL server.
*/
QGLBuffer::~QGLBuffer()
{
if (!d_ptr->ref.deref()) {
destroy();
delete d_ptr;
}
}
/*!
Assigns a shallow copy of \a other to this object.
Note: QGLBuffer does not implement copy-on-write semantics,
so \a other will be affected whenever the copy is modified.
*/
QGLBuffer &QGLBuffer::operator=(const QGLBuffer &other)
{
if (d_ptr != other.d_ptr) {
other.d_ptr->ref.ref();
if (!d_ptr->ref.deref()) {
destroy();
delete d_ptr;
}
d_ptr = other.d_ptr;
}
return *this;
}
/*!
Returns the type of buffer represented by this object.
*/
QGLBuffer::Type QGLBuffer::type() const
{
Q_D(const QGLBuffer);
return d->type;
}
/*!
Returns the usage pattern for this buffer object.
The default value is StaticDraw.
\sa setUsagePattern()
*/
QGLBuffer::UsagePattern QGLBuffer::usagePattern() const
{
Q_D(const QGLBuffer);
return d->usagePattern;
}
/*!
Sets the usage pattern for this buffer object to \a value.
This function must be called before allocate() or write().
\sa usagePattern(), allocate(), write()
*/
void QGLBuffer::setUsagePattern(QGLBuffer::UsagePattern value)
{
Q_D(QGLBuffer);
d->usagePattern = d->actualUsagePattern = value;
}
#undef ctx
namespace {
void freeBufferFunc(QGLContext *ctx, GLuint id)
{
Q_ASSERT(ctx);
ctx->contextHandle()->functions()->glDeleteBuffers(1, &id);
}
}
/*!
Creates the buffer object in the GL server. Returns \c true if
the object was created; false otherwise.
This function must be called with a current QGLContext.
The buffer will be bound to and can only be used in
that context (or any other context that is shared with it).
This function will return false if the GL implementation
does not support buffers, or there is no current QGLContext.
\sa isCreated(), allocate(), write(), destroy()
*/
bool QGLBuffer::create()
{
Q_D(QGLBuffer);
if (d->guard && d->guard->id())
return true;
QGLContext *ctx = const_cast<QGLContext *>(QGLContext::currentContext());
if (ctx) {
delete d->funcs;
d->funcs = new QOpenGLExtensions(ctx->contextHandle());
if (!d->funcs->hasOpenGLFeature(QOpenGLFunctions::Buffers))
return false;
GLuint bufferId = 0;
d->funcs->glGenBuffers(1, &bufferId);
if (bufferId) {
if (d->guard)
d->guard->free();
d->guard = createSharedResourceGuard(ctx, bufferId, freeBufferFunc);
return true;
}
}
return false;
}
#define ctx QGLContext::currentContext()
/*!
Returns \c true if this buffer has been created; false otherwise.
\sa create(), destroy()
*/
bool QGLBuffer::isCreated() const
{
Q_D(const QGLBuffer);
return d->guard && d->guard->id();
}
/*!
Destroys this buffer object, including the storage being
used in the GL server. All references to the buffer will
become invalid.
*/
void QGLBuffer::destroy()
{
Q_D(QGLBuffer);
if (d->guard) {
d->guard->free();
d->guard = 0;
}
}
/*!
Reads the \a count bytes in this buffer starting at \a offset
into \a data. Returns \c true on success; false if reading from
the buffer is not supported. Buffer reading is not supported
under OpenGL/ES.
It is assumed that this buffer has been bound to the current context.
\sa write(), bind()
*/
bool QGLBuffer::read(int offset, void *data, int count)
{
#if !defined(QT_OPENGL_ES)
Q_D(QGLBuffer);
if (!d->funcs->hasOpenGLFeature(QOpenGLFunctions::Buffers) || !d->guard->id())
return false;
while (d->funcs->glGetError() != GL_NO_ERROR) ; // Clear error state.
d->funcs->glGetBufferSubData(d->type, offset, count, data);
return d->funcs->glGetError() == GL_NO_ERROR;
#else
Q_UNUSED(offset);
Q_UNUSED(data);
Q_UNUSED(count);
return false;
#endif
}
/*!
Replaces the \a count bytes of this buffer starting at \a offset
with the contents of \a data. Any other bytes in the buffer
will be left unmodified.
It is assumed that create() has been called on this buffer and that
it has been bound to the current context.
\sa create(), read(), allocate()
*/
void QGLBuffer::write(int offset, const void *data, int count)
{
#ifndef QT_NO_DEBUG
if (!isCreated())
qWarning("QGLBuffer::allocate(): buffer not created");
#endif
Q_D(QGLBuffer);
if (d->guard && d->guard->id())
d->funcs->glBufferSubData(d->type, offset, count, data);
}
/*!
Allocates \a count bytes of space to the buffer, initialized to
the contents of \a data. Any previous contents will be removed.
It is assumed that create() has been called on this buffer and that
it has been bound to the current context.
\sa create(), read(), write()
*/
void QGLBuffer::allocate(const void *data, int count)
{
#ifndef QT_NO_DEBUG
if (!isCreated())
qWarning("QGLBuffer::allocate(): buffer not created");
#endif
Q_D(QGLBuffer);
if (d->guard && d->guard->id())
d->funcs->glBufferData(d->type, count, data, d->actualUsagePattern);
}
/*!
\fn void QGLBuffer::allocate(int count)
\overload
Allocates \a count bytes of space to the buffer. Any previous
contents will be removed.
It is assumed that create() has been called on this buffer and that
it has been bound to the current context.
\sa create(), write()
*/
/*!
Binds the buffer associated with this object to the current
GL context. Returns \c false if binding was not possible, usually because
type() is not supported on this GL implementation.
The buffer must be bound to the same QGLContext current when create()
was called, or to another QGLContext that is sharing with it.
Otherwise, false will be returned from this function.
\sa release(), create()
*/
bool QGLBuffer::bind()
{
#ifndef QT_NO_DEBUG
if (!isCreated())
qWarning("QGLBuffer::bind(): buffer not created");
#endif
Q_D(const QGLBuffer);
GLuint bufferId = d->guard ? d->guard->id() : 0;
if (bufferId) {
if (d->guard->group() != QOpenGLContextGroup::currentContextGroup()) {
#ifndef QT_NO_DEBUG
qWarning("QGLBuffer::bind: buffer is not valid in the current context");
#endif
return false;
}
d->funcs->glBindBuffer(d->type, bufferId);
return true;
} else {
return false;
}
}
/*!
Releases the buffer associated with this object from the
current GL context.
This function must be called with the same QGLContext current
as when bind() was called on the buffer.
\sa bind()
*/
void QGLBuffer::release()
{
#ifndef QT_NO_DEBUG
if (!isCreated())
qWarning("QGLBuffer::release(): buffer not created");
#endif
Q_D(const QGLBuffer);
if (d->guard && d->guard->id())
d->funcs->glBindBuffer(d->type, 0);
}
#undef ctx
/*!
Releases the buffer associated with \a type in the current
QGLContext.
This function is a direct call to \c{glBindBuffer(type, 0)}
for use when the caller does not know which QGLBuffer has
been bound to the context but wants to make sure that it
is released.
\code
QGLBuffer::release(QGLBuffer::VertexBuffer);
\endcode
*/
void QGLBuffer::release(QGLBuffer::Type type)
{
if (QOpenGLContext *ctx = QOpenGLContext::currentContext())
ctx->functions()->glBindBuffer(GLenum(type), 0);
}
#define ctx QGLContext::currentContext()
/*!
Returns the GL identifier associated with this buffer; zero if
the buffer has not been created.
\sa isCreated()
*/
GLuint QGLBuffer::bufferId() const
{
Q_D(const QGLBuffer);
return d->guard ? d->guard->id() : 0;
}
#ifndef GL_BUFFER_SIZE
#define GL_BUFFER_SIZE 0x8764
#endif
/*!
Returns the size of the data in this buffer, for reading operations.
Returns -1 if fetching the buffer size is not supported, or the
buffer has not been created.
It is assumed that this buffer has been bound to the current context.
\sa isCreated(), bind()
*/
int QGLBuffer::size() const
{
Q_D(const QGLBuffer);
if (!d->guard || !d->guard->id())
return -1;
GLint value = -1;
d->funcs->glGetBufferParameteriv(d->type, GL_BUFFER_SIZE, &value);
return value;
}
/*!
Maps the contents of this buffer into the application's memory
space and returns a pointer to it. Returns null if memory
mapping is not possible. The \a access parameter indicates the
type of access to be performed.
It is assumed that create() has been called on this buffer and that
it has been bound to the current context.
This function is only supported under OpenGL/ES if the
\c{GL_OES_mapbuffer} extension is present.
\sa unmap(), create(), bind()
*/
void *QGLBuffer::map(QGLBuffer::Access access)
{
Q_D(QGLBuffer);
#ifndef QT_NO_DEBUG
if (!isCreated())
qWarning("QGLBuffer::map(): buffer not created");
#endif
if (!d->guard || !d->guard->id())
return 0;
return d->funcs->glMapBuffer(d->type, access);
}
/*!
Unmaps the buffer after it was mapped into the application's
memory space with a previous call to map(). Returns \c true if
the unmap succeeded; false otherwise.
It is assumed that this buffer has been bound to the current context,
and that it was previously mapped with map().
This function is only supported under OpenGL/ES if the
\c{GL_OES_mapbuffer} extension is present.
\sa map()
*/
bool QGLBuffer::unmap()
{
Q_D(QGLBuffer);
#ifndef QT_NO_DEBUG
if (!isCreated())
qWarning("QGLBuffer::unmap(): buffer not created");
#endif
if (!d->guard || !d->guard->id())
return false;
return d->funcs->glUnmapBuffer(d->type) == GL_TRUE;
}
QT_END_NAMESPACE
| bsd-3-clause |
dakshshah96/cdnjs | ajax/libs/bootstrap-select/1.12.1/js/i18n/defaults-cro_CRO.min.js | 977 | /*!
* Bootstrap-select v1.12.1 (http://silviomoreto.github.io/bootstrap-select)
*
* Copyright 2013-2016 bootstrap-select
* Licensed under MIT (https://github.com/silviomoreto/bootstrap-select/blob/master/LICENSE)
*/
!function(a,b){"function"==typeof define&&define.amd?define(["jquery"],function(a){return b(a)}):"object"==typeof module&&module.exports?module.exports=b(require("jquery")):b(a.jQuery)}(this,function(a){!function(a){a.fn.selectpicker.defaults={noneSelectedText:"Odaberite stavku",noneResultsText:"Nema rezultata pretrage {0}",countSelectedText:function(a,b){return 1==a?"{0} stavka selektirana":"{0} stavke selektirane"},maxOptionsText:function(a,b){return[1==a?"Limit je postignut ({n} stvar maximalno)":"Limit je postignut ({n} stavke maksimalno)",1==b?"Grupni limit je postignut ({n} stvar maksimalno)":"Grupni limit je postignut ({n} stavke maksimalno)"]},selectAllText:"Selektiraj sve",deselectAllText:"Deselektiraj sve",multipleSeparator:", "}}(a)}); | mit |
ricklon/Arduino | arduino-core/src/processing/app/windows/ListComPortsParser.java | 2644 | /*
* This file is part of Arduino.
*
* Copyright 2015 Arduino LLC (http://www.arduino.cc/)
*
* Arduino is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* As a special exception, you may use this file as part of a free software
* library without restriction. Specifically, if other files instantiate
* templates or use macros or inline functions from this file, or you compile
* this file and link it with other files to produce an executable, this
* file does not by itself cause the resulting executable to be covered by
* the GNU General Public License. This exception does not however
* invalidate any other reasons why the executable file might be covered by
* the GNU General Public License.
*/
package processing.app.windows;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Interprets the output of listComPorts.exe
* <p>
* https://github.com/todbot/usbSearch/
*/
public class ListComPortsParser {
private final Pattern vidRegExp;
private final Pattern pidRegExp;
public ListComPortsParser() {
vidRegExp = Pattern.compile("VID_(\\w\\w\\w\\w)");
pidRegExp = Pattern.compile("PID_(\\w\\w\\w\\w)");
}
public String extractVIDAndPID(String output, String serial) throws IOException {
BufferedReader reader = new BufferedReader(new StringReader(output));
String line;
while ((line = reader.readLine()) != null) {
String[] lineParts = line.split(" ");
if (lineParts.length > 0 && lineParts[0].toUpperCase().equals(serial.toUpperCase())) {
String vidPidPart = lineParts[lineParts.length - 1];
Matcher vidMatcher = vidRegExp.matcher(vidPidPart);
Matcher pidMatcher = pidRegExp.matcher(vidPidPart);
if (vidMatcher.find() && pidMatcher.find()) {
return ("0x" + vidMatcher.group(1) + "_0x" + pidMatcher.group(1)).toUpperCase();
}
}
}
return null;
}
}
| lgpl-2.1 |
Socratacom/OpenDataTV | wp-content/plugins/wordpress-seo/admin/class-import-woothemes-seo.php | 4359 | <?php
/**
* @package WPSEO\Admin\Import\External
*/
/**
* class WPSEO_Import_WooThemes_SEO
*
* Class with functionality to import WP SEO settings from WooThemes SEO
*/
class WPSEO_Import_WooThemes_SEO extends WPSEO_Import_External {
/**
* Class constructor
*/
public function __construct() {
parent::__construct();
$this->import_home();
$this->import_option( 'seo_woo_single_layout', 'post' );
$this->import_option( 'seo_woo_page_layout', 'page' );
$this->import_archive_option();
$this->import_custom_values( 'seo_woo_meta_home_desc', 'metadesc-home-wpseo' );
$this->import_custom_values( 'seo_woo_meta_home_key', 'metakey-home-wpseo' );
$this->import_metas();
update_option( 'wpseo_titles', $this->options );
$this->set_msg( __( 'WooThemes SEO framework settings & data successfully imported.', 'wordpress-seo' ) );
}
/**
* Holds the WPSEO Title Options
*
* @var array
*/
private $options;
/**
* Import options.
*
* @param string $option
* @param string $post_type
*/
private function import_option( $option, $post_type ) {
switch ( get_option( $option ) ) {
case 'a':
$this->options[ 'title-' . $post_type ] = '%%title%% %%sep%% %%sitename%%';
break;
case 'b':
$this->options[ 'title-' . $post_type ] = '%%title%%';
break;
case 'c':
$this->options[ 'title-' . $post_type ] = '%%sitename%% %%sep%% %%title%%';
break;
case 'd':
$this->options[ 'title-' . $post_type ] = '%%title%% %%sep%% %%sitedesc%%';
break;
case 'e':
$this->options[ 'title-' . $post_type ] = '%%sitename%% %%sep%% %%title%% %%sep%% %%sitedesc%%';
break;
}
$this->perhaps_delete( $option );
}
/**
* Import the archive layout for all taxonomies
*/
private function import_archive_option() {
$reinstate_replace = false;
if ( $this->replace ) {
$this->replace = false;
$reinstate_replace = true;
}
$taxonomies = get_taxonomies( array( 'public' => true ), 'names' );
if ( is_array( $taxonomies ) && $taxonomies !== array() ) {
foreach ( $taxonomies as $tax ) {
$this->import_option( 'seo_woo_archive_layout', 'tax-' . $tax );
}
}
if ( $reinstate_replace ) {
$this->replace = true;
$this->perhaps_delete( 'seo_woo_archive_layout' );
}
}
/**
* Import custom descriptions and meta keys
*
* @param string $option
* @param string $key
*/
private function import_custom_values( $option, $key ) {
// Import the custom homepage description
if ( 'c' == get_option( $option ) ) {
$this->options[ $key ] = get_option( $option . '_custom' );
}
$this->perhaps_delete( $option );
$this->perhaps_delete( $option . '_custom' );
}
/**
* Imports the WooThemes SEO homepage settings
*/
private function import_home() {
switch ( get_option( 'seo_woo_home_layout' ) ) {
case 'a':
$this->options['title-home-wpseo'] = '%%sitename%% %%sep%% %%sitedesc%%';
break;
case 'b':
$this->options['title-home-wpseo'] = '%%sitename%% ' . get_option( 'seo_woo_paged_var' ) . ' %%pagenum%%';
break;
case 'c':
$this->options['title-home-wpseo'] = '%%sitedesc%%';
break;
}
$this->perhaps_delete( 'seo_woo_home_layout' );
}
/**
* Import meta values if they're applicable
*/
private function import_metas() {
WPSEO_Meta::replace_meta( 'seo_follow', WPSEO_Meta::$meta_prefix . 'meta-robots-nofollow', $this->replace );
WPSEO_Meta::replace_meta( 'seo_noindex', WPSEO_Meta::$meta_prefix . 'meta-robots-noindex', $this->replace );
// If WooSEO is set to use the Woo titles, import those
if ( 'true' == get_option( 'seo_woo_wp_title' ) ) {
WPSEO_Meta::replace_meta( 'seo_title', WPSEO_Meta::$meta_prefix . 'title', $this->replace );
}
// If WooSEO is set to use the Woo meta descriptions, import those
if ( 'b' == get_option( 'seo_woo_meta_single_desc' ) ) {
WPSEO_Meta::replace_meta( 'seo_description', WPSEO_Meta::$meta_prefix . 'metadesc', $this->replace );
}
// If WooSEO is set to use the Woo meta keywords, import those
if ( 'b' == get_option( 'seo_woo_meta_single_key' ) ) {
WPSEO_Meta::replace_meta( 'seo_keywords', WPSEO_Meta::$meta_prefix . 'metakeywords', $this->replace );
}
foreach ( array( 'seo_woo_wp_title', 'seo_woo_meta_single_desc', 'seo_woo_meta_single_key' ) as $option ) {
$this->perhaps_delete( $option );
}
}
} | gpl-2.0 |
better0332/kubernetes | federation/client/clientset_generated/federation_internalclientset/fake/clientset_generated.go | 4446 | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fake
import (
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/discovery"
fakediscovery "k8s.io/client-go/discovery/fake"
"k8s.io/client-go/testing"
clientset "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset"
autoscalinginternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/autoscaling/internalversion"
fakeautoscalinginternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/autoscaling/internalversion/fake"
batchinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/batch/internalversion"
fakebatchinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/batch/internalversion/fake"
coreinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/core/internalversion"
fakecoreinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/core/internalversion/fake"
extensionsinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/extensions/internalversion"
fakeextensionsinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/extensions/internalversion/fake"
federationinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/federation/internalversion"
fakefederationinternalversion "k8s.io/kubernetes/federation/client/clientset_generated/federation_internalclientset/typed/federation/internalversion/fake"
)
// NewSimpleClientset returns a clientset that will respond with the provided objects.
// It's backed by a very simple object tracker that processes creates, updates and deletions as-is,
// without applying any validations and/or defaults. It shouldn't be considered a replacement
// for a real clientset and is mostly useful in simple unit tests.
func NewSimpleClientset(objects ...runtime.Object) *Clientset {
o := testing.NewObjectTracker(registry, scheme, codecs.UniversalDecoder())
for _, obj := range objects {
if err := o.Add(obj); err != nil {
panic(err)
}
}
fakePtr := testing.Fake{}
fakePtr.AddReactor("*", "*", testing.ObjectReaction(o, registry.RESTMapper()))
fakePtr.AddWatchReactor("*", testing.DefaultWatchReactor(watch.NewFake(), nil))
return &Clientset{fakePtr}
}
// Clientset implements clientset.Interface. Meant to be embedded into a
// struct to get a default implementation. This makes faking out just the method
// you want to test easier.
type Clientset struct {
testing.Fake
}
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
return &fakediscovery.FakeDiscovery{Fake: &c.Fake}
}
var _ clientset.Interface = &Clientset{}
// Core retrieves the CoreClient
func (c *Clientset) Core() coreinternalversion.CoreInterface {
return &fakecoreinternalversion.FakeCore{Fake: &c.Fake}
}
// Autoscaling retrieves the AutoscalingClient
func (c *Clientset) Autoscaling() autoscalinginternalversion.AutoscalingInterface {
return &fakeautoscalinginternalversion.FakeAutoscaling{Fake: &c.Fake}
}
// Batch retrieves the BatchClient
func (c *Clientset) Batch() batchinternalversion.BatchInterface {
return &fakebatchinternalversion.FakeBatch{Fake: &c.Fake}
}
// Extensions retrieves the ExtensionsClient
func (c *Clientset) Extensions() extensionsinternalversion.ExtensionsInterface {
return &fakeextensionsinternalversion.FakeExtensions{Fake: &c.Fake}
}
// Federation retrieves the FederationClient
func (c *Clientset) Federation() federationinternalversion.FederationInterface {
return &fakefederationinternalversion.FakeFederation{Fake: &c.Fake}
}
| apache-2.0 |
KaleRoberts/Angular2_Heroes | node_modules/angular2/ts/src/common/pipes/uppercase_pipe.ts | 732 | import {isString, CONST, isBlank} from 'angular2/src/facade/lang';
import {PipeTransform, WrappedValue, Injectable, Pipe} from 'angular2/core';
import {InvalidPipeArgumentException} from './invalid_pipe_argument_exception';
/**
* Implements uppercase transforms to text.
*
* ### Example
*
* {@example core/pipes/ts/lowerupper_pipe/lowerupper_pipe_example.ts region='LowerUpperPipe'}
*/
@CONST()
@Pipe({name: 'uppercase'})
@Injectable()
export class UpperCasePipe implements PipeTransform {
transform(value: string, args: any[] = null): string {
if (isBlank(value)) return value;
if (!isString(value)) {
throw new InvalidPipeArgumentException(UpperCasePipe, value);
}
return value.toUpperCase();
}
}
| mit |
codedellemc/rexray | vendor/github.com/gophercloud/gophercloud/acceptance/openstack/orchestration/v1/stackresources_test.go | 1872 | // +build acceptance
package v1
import (
"testing"
"github.com/gophercloud/gophercloud"
"github.com/gophercloud/gophercloud/openstack/orchestration/v1/stackresources"
"github.com/gophercloud/gophercloud/openstack/orchestration/v1/stacks"
"github.com/gophercloud/gophercloud/pagination"
th "github.com/gophercloud/gophercloud/testhelper"
)
func TestStackResources(t *testing.T) {
// Create a provider client for making the HTTP requests.
// See common.go in this directory for more information.
client := newClient(t)
stackName := "postman_stack_2"
createOpts := stacks.CreateOpts{
Name: stackName,
Template: template,
Timeout: 5,
}
stack, err := stacks.Create(client, createOpts).Extract()
th.AssertNoErr(t, err)
t.Logf("Created stack: %+v\n", stack)
defer func() {
err := stacks.Delete(client, stackName, stack.ID).ExtractErr()
th.AssertNoErr(t, err)
t.Logf("Deleted stack (%s)", stackName)
}()
err = gophercloud.WaitFor(60, func() (bool, error) {
getStack, err := stacks.Get(client, stackName, stack.ID).Extract()
if err != nil {
return false, err
}
if getStack.Status == "CREATE_COMPLETE" {
return true, nil
}
return false, nil
})
resourceName := "hello_world"
resource, err := stackresources.Get(client, stackName, stack.ID, resourceName).Extract()
th.AssertNoErr(t, err)
t.Logf("Got stack resource: %+v\n", resource)
metadata, err := stackresources.Metadata(client, stackName, stack.ID, resourceName).Extract()
th.AssertNoErr(t, err)
t.Logf("Got stack resource metadata: %+v\n", metadata)
err = stackresources.List(client, stackName, stack.ID, stackresources.ListOpts{}).EachPage(func(page pagination.Page) (bool, error) {
resources, err := stackresources.ExtractResources(page)
th.AssertNoErr(t, err)
t.Logf("resources: %+v\n", resources)
return false, nil
})
th.AssertNoErr(t, err)
}
| apache-2.0 |