code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the Apache License Version 2.0.
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
package com.microsoft.uprove;
import java.util.Arrays;
/**
* Specifies a U-Prove token.
*/
public class UProveToken {
private byte[] issuerParametersUID;
private byte[] publicKey;
private byte[] tokenInformation;
private byte[] proverInformation;
private byte[] sigmaZ;
private byte[] sigmaC;
private byte[] sigmaR;
private boolean isDeviceProtected = false;
/**
* Constructs a new U-Prove token.
*/
public UProveToken() {
super();
}
/**
* Constructs a new U-Prove token.
* @param issuerParametersUID an issuer parameters UID.
* @param publicKey a public key.
* @param tokenInformation a token information value.
* @param proverInformation a prover information value.
* @param sigmaZ a sigmaZ value.
* @param sigmaC a sigmaC value.
* @param sigmaR a sigmaR value.
* @param isDeviceProtected indicates if the token is Device-protected.
*/
public UProveToken(byte[] issuerParametersUID, byte[] publicKey,
byte[] tokenInformation, byte[] proverInformation,
byte[] sigmaZ, byte[] sigmaC,
byte[] sigmaR, boolean isDeviceProtected) {
super();
this.issuerParametersUID = issuerParametersUID;
this.publicKey = publicKey;
this.tokenInformation = tokenInformation;
this.proverInformation = proverInformation;
this.sigmaZ = sigmaZ;
this.sigmaC = sigmaC;
this.sigmaR = sigmaR;
this.isDeviceProtected = isDeviceProtected;
}
/**
* Gets the issuer parameters UID value.
* @return the issuerParameters UID value.
*/
public byte[] getIssuerParametersUID() {
return issuerParametersUID;
}
/**
* Sets the issuer parameters UID value.
* @param issuerParametersUID the issuerParameters UID value to set.
*/
public void setIssuerParametersUID(byte[] issuerParametersUID) {
this.issuerParametersUID = issuerParametersUID;
}
/**
* Gets the public key value.
* @return the publicKey value.
*/
public byte[] getPublicKey() {
return publicKey;
}
/**
* Sets the public key value.
* @param publicKey the public key value to set.
*/
public void setPublicKey(byte[] publicKey) {
this.publicKey = publicKey;
}
/**
* Gets the token information value.
* @return the token information value.
*/
public byte[] getTokenInformation() {
return tokenInformation;
}
/**
* Sets the token information value.
* @param tokenInformation the token information value to set.
*/
public void setTokenInformation(byte[] tokenInformation) {
this.tokenInformation = tokenInformation;
}
/**
* Gets the prover information value.
* @return the prover information value.
*/
public byte[] getProverInformation() {
return proverInformation;
}
/**
* Sets the prover information value.
* @param proverInformation the prover information value to set.
*/
public void setProverInformation(byte[] proverInformation) {
this.proverInformation = proverInformation;
}
/**
* Gets the sigmaZ value.
* @return the sigmaZ value.
*/
public byte[] getSigmaZ() {
return sigmaZ;
}
/**
* Sets the sigmaZ value.
* @param sigmaZ the sigmaZ value to set.
*/
public void setSigmaZ(byte[] sigmaZ) {
this.sigmaZ = sigmaZ;
}
/**
* Gets the sigmaC value.
* @return the sigmaC value.
*/
public byte[] getSigmaC() {
return sigmaC;
}
/**
* Sets the sigmaC value.
* @param sigmaC the sigmaC value to set.
*/
public void setSigmaC(byte[] sigmaC) {
this.sigmaC = sigmaC;
}
/**
* Gets the sigmaR value.
* @return the sigmaR value.
*/
public byte[] getSigmaR() {
return sigmaR;
}
/**
* Sets the sigmaR value.
* @param sigmaR the sigmaR value to set.
*/
public void setSigmaR(byte[] sigmaR) {
this.sigmaR = sigmaR;
}
/**
* Returns true if the token is Device-protected, false otherwise.
* @return the Device-protected boolean.
*/
boolean isDeviceProtected() {
return isDeviceProtected;
}
/**
* Sets the boolean indicating if the token is Device-protected.
* @param isDeviceProtected true if the token is Device-protected.
*/
void setIsDeviceProtected(boolean isDeviceProtected) {
this.isDeviceProtected = isDeviceProtected;
}
/**
* Indicates whether some other object is "equal to" this one.
* @param o the reference object with which to compare.
* @return <code>true</code> if this object is the same as the
* <code>o</code> argument; <code>false</code> otherwise.
*/
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof UProveToken)) {
return false;
}
UProveToken upt = (UProveToken) o;
return
Arrays.equals(this.issuerParametersUID, upt.issuerParametersUID) &&
Arrays.equals(this.publicKey, upt.publicKey) &&
Arrays.equals(this.tokenInformation, upt.tokenInformation) &&
Arrays.equals(this.proverInformation, upt.proverInformation) &&
Arrays.equals(this.sigmaZ, upt.sigmaZ) &&
Arrays.equals(this.sigmaC, upt.sigmaC) &&
Arrays.equals(this.sigmaR, upt.sigmaR) &&
this.isDeviceProtected == upt.isDeviceProtected;
}
/**
* Returns a hash code value for the object.
* @return a hash code value for the object.
*/
public int hashCode() {
int result = 237;
result = 201 * result + Arrays.hashCode(this.issuerParametersUID);
result = 201 * result + Arrays.hashCode(this.publicKey);
result = 201 * result + Arrays.hashCode(this.tokenInformation);
result = 201 * result + Arrays.hashCode(this.proverInformation);
result = 201 * result + Arrays.hashCode(this.sigmaZ);
result = 201 * result + Arrays.hashCode(this.sigmaC);
result = 201 * result + Arrays.hashCode(this.sigmaR);
result = result + (this.isDeviceProtected ? 201 : 0);
return result;
}
}
| albdum/uprove | src/main/java/com/microsoft/uprove/UProveToken.java | Java | apache-2.0 | 6,515 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\ServiceNetworking;
class LabelDescriptor extends \Google\Model
{
/**
* @var string
*/
public $description;
/**
* @var string
*/
public $key;
/**
* @var string
*/
public $valueType;
/**
* @param string
*/
public function setDescription($description)
{
$this->description = $description;
}
/**
* @return string
*/
public function getDescription()
{
return $this->description;
}
/**
* @param string
*/
public function setKey($key)
{
$this->key = $key;
}
/**
* @return string
*/
public function getKey()
{
return $this->key;
}
/**
* @param string
*/
public function setValueType($valueType)
{
$this->valueType = $valueType;
}
/**
* @return string
*/
public function getValueType()
{
return $this->valueType;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(LabelDescriptor::class, 'Google_Service_ServiceNetworking_LabelDescriptor');
| googleapis/google-api-php-client-services | src/ServiceNetworking/LabelDescriptor.php | PHP | apache-2.0 | 1,643 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.olio.webapp.cache;
/**
* The cache interface provides all operations necessary for the cache.
* We could have extended java.util.Map but that would make a lot of
* unnecessary work for the scope of this project. We can always implement that
* interface later if desired.
*/
public interface Cache {
/**
* Gets the cached value based on a key.
* @param key The key
* @return The cached object, or null if none is available
*/
Object get(String key);
/**
* Sets a cached item using a key.
* @param key The key
* @param value The object to cache.
*/
void put(String key, Object value);
/**
* Sets a cached item using a key.
* @param key The key
* @param value The object to cache.
* @param timeToLive Time to cache this object in seconds
*/
void put(String key, Object value, long timeToLive);
/**
* Invalidates a cached item using a key
* @param key
* @return success
*/
boolean invalidate(String key);
/*
* Check if cache needs refresh based on existence cached object and of Semaphore
* @param key The key
* @param cacheObjPresent false if the cache object for this key exists
* @return true if the cache object needs a refresh
*/
boolean needsRefresh (boolean cacheObjPresent, String key);
void doneRefresh (String key, long timeToNextRefresh) throws CacheException;
boolean isLocal();
}
| shanti/olio | webapp/java/trunk/ws/apps/webapp/src/java/org/apache/olio/webapp/cache/Cache.java | Java | apache-2.0 | 2,298 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// script.aculo.us unittest.js v1.8.0_pre1, Fri Oct 12 21:34:51 +0200 2007
// Copyright (c) 2005-2007 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us)
// (c) 2005-2007 Jon Tirsen (http://www.tirsen.com)
// (c) 2005-2007 Michael Schuerig (http://www.schuerig.de/michael/)
//
// script.aculo.us is freely distributable under the terms of an MIT-style license.
// For details, see the script.aculo.us web site: http://script.aculo.us/
// experimental, Firefox-only
Event.simulateMouse = function(element, eventName) {
var options = Object.extend({
pointerX: 0,
pointerY: 0,
buttons: 0,
ctrlKey: false,
altKey: false,
shiftKey: false,
metaKey: false
}, arguments[2] || {});
var oEvent = document.createEvent("MouseEvents");
oEvent.initMouseEvent(eventName, true, true, document.defaultView,
options.buttons, options.pointerX, options.pointerY, options.pointerX, options.pointerY,
options.ctrlKey, options.altKey, options.shiftKey, options.metaKey, 0, $(element));
if(this.mark) Element.remove(this.mark);
this.mark = document.createElement('div');
this.mark.appendChild(document.createTextNode(" "));
document.body.appendChild(this.mark);
this.mark.style.position = 'absolute';
this.mark.style.top = options.pointerY + "px";
this.mark.style.left = options.pointerX + "px";
this.mark.style.width = "5px";
this.mark.style.height = "5px;";
this.mark.style.borderTop = "1px solid red;"
this.mark.style.borderLeft = "1px solid red;"
if(this.step)
alert('['+new Date().getTime().toString()+'] '+eventName+'/'+Test.Unit.inspect(options));
$(element).dispatchEvent(oEvent);
};
// Note: Due to a fix in Firefox 1.0.5/6 that probably fixed "too much", this doesn't work in 1.0.6 or DP2.
// You need to downgrade to 1.0.4 for now to get this working
// See https://bugzilla.mozilla.org/show_bug.cgi?id=289940 for the fix that fixed too much
Event.simulateKey = function(element, eventName) {
var options = Object.extend({
ctrlKey: false,
altKey: false,
shiftKey: false,
metaKey: false,
keyCode: 0,
charCode: 0
}, arguments[2] || {});
var oEvent = document.createEvent("KeyEvents");
oEvent.initKeyEvent(eventName, true, true, window,
options.ctrlKey, options.altKey, options.shiftKey, options.metaKey,
options.keyCode, options.charCode );
$(element).dispatchEvent(oEvent);
};
Event.simulateKeys = function(element, command) {
for(var i=0; i<command.length; i++) {
Event.simulateKey(element,'keypress',{charCode:command.charCodeAt(i)});
}
};
var Test = {}
Test.Unit = {};
// security exception workaround
Test.Unit.inspect = Object.inspect;
Test.Unit.Logger = Class.create();
Test.Unit.Logger.prototype = {
initialize: function(log) {
this.log = $(log);
if (this.log) {
this._createLogTable();
}
},
start: function(testName) {
if (!this.log) return;
this.testName = testName;
this.lastLogLine = document.createElement('tr');
this.statusCell = document.createElement('td');
this.nameCell = document.createElement('td');
this.nameCell.className = "nameCell";
this.nameCell.appendChild(document.createTextNode(testName));
this.messageCell = document.createElement('td');
this.lastLogLine.appendChild(this.statusCell);
this.lastLogLine.appendChild(this.nameCell);
this.lastLogLine.appendChild(this.messageCell);
this.loglines.appendChild(this.lastLogLine);
},
finish: function(status, summary) {
if (!this.log) return;
this.lastLogLine.className = status;
this.statusCell.innerHTML = status;
this.messageCell.innerHTML = this._toHTML(summary);
this.addLinksToResults();
},
message: function(message) {
if (!this.log) return;
this.messageCell.innerHTML = this._toHTML(message);
},
summary: function(summary) {
if (!this.log) return;
this.logsummary.innerHTML = this._toHTML(summary);
},
_createLogTable: function() {
this.log.innerHTML =
'<div id="logsummary"></div>' +
'<table id="logtable">' +
'<thead><tr><th>Status</th><th>Test</th><th>Message</th></tr></thead>' +
'<tbody id="loglines"></tbody>' +
'</table>';
this.logsummary = $('logsummary')
this.loglines = $('loglines');
},
_toHTML: function(txt) {
return txt.escapeHTML().replace(/\n/g,"<br/>");
},
addLinksToResults: function(){
$$("tr.failed .nameCell").each( function(td){ // todo: limit to children of this.log
td.title = "Run only this test"
Event.observe(td, 'click', function(){ window.location.search = "?tests=" + td.innerHTML;});
});
$$("tr.passed .nameCell").each( function(td){ // todo: limit to children of this.log
td.title = "Run all tests"
Event.observe(td, 'click', function(){ window.location.search = "";});
});
}
}
Test.Unit.Runner = Class.create();
Test.Unit.Runner.prototype = {
initialize: function(testcases) {
this.options = Object.extend({
testLog: 'testlog'
}, arguments[1] || {});
this.options.resultsURL = this.parseResultsURLQueryParameter();
this.options.tests = this.parseTestsQueryParameter();
if (this.options.testLog) {
this.options.testLog = $(this.options.testLog) || null;
}
if(this.options.tests) {
this.tests = [];
for(var i = 0; i < this.options.tests.length; i++) {
if(/^test/.test(this.options.tests[i])) {
this.tests.push(new Test.Unit.Testcase(this.options.tests[i], testcases[this.options.tests[i]], testcases["setup"], testcases["teardown"]));
}
}
} else {
if (this.options.test) {
this.tests = [new Test.Unit.Testcase(this.options.test, testcases[this.options.test], testcases["setup"], testcases["teardown"])];
} else {
this.tests = [];
for(var testcase in testcases) {
if(/^test/.test(testcase)) {
this.tests.push(
new Test.Unit.Testcase(
this.options.context ? ' -> ' + this.options.titles[testcase] : testcase,
testcases[testcase], testcases["setup"], testcases["teardown"]
));
}
}
}
}
this.currentTest = 0;
this.logger = new Test.Unit.Logger(this.options.testLog);
setTimeout(this.runTests.bind(this), 1000);
},
parseResultsURLQueryParameter: function() {
return window.location.search.parseQuery()["resultsURL"];
},
parseTestsQueryParameter: function(){
if (window.location.search.parseQuery()["tests"]){
return window.location.search.parseQuery()["tests"].split(',');
};
},
// Returns:
// "ERROR" if there was an error,
// "FAILURE" if there was a failure, or
// "SUCCESS" if there was neither
getResult: function() {
var hasFailure = false;
for(var i=0;i<this.tests.length;i++) {
if (this.tests[i].errors > 0) {
return "ERROR";
}
if (this.tests[i].failures > 0) {
hasFailure = true;
}
}
if (hasFailure) {
return "FAILURE";
} else {
return "SUCCESS";
}
},
postResults: function() {
if (this.options.resultsURL) {
new Ajax.Request(this.options.resultsURL,
{ method: 'get', parameters: 'result=' + this.getResult(), asynchronous: false });
}
},
runTests: function() {
var test = this.tests[this.currentTest];
if (!test) {
// finished!
this.postResults();
this.logger.summary(this.summary());
return;
}
if(!test.isWaiting) {
this.logger.start(test.name);
}
test.run();
if(test.isWaiting) {
this.logger.message("Waiting for " + test.timeToWait + "ms");
setTimeout(this.runTests.bind(this), test.timeToWait || 1000);
} else {
this.logger.finish(test.status(), test.summary());
this.currentTest++;
// tail recursive, hopefully the browser will skip the stackframe
this.runTests();
}
},
summary: function() {
var assertions = 0;
var failures = 0;
var errors = 0;
var messages = [];
for(var i=0;i<this.tests.length;i++) {
assertions += this.tests[i].assertions;
failures += this.tests[i].failures;
errors += this.tests[i].errors;
}
return (
(this.options.context ? this.options.context + ': ': '') +
this.tests.length + " tests, " +
assertions + " assertions, " +
failures + " failures, " +
errors + " errors");
}
}
Test.Unit.Assertions = Class.create();
Test.Unit.Assertions.prototype = {
initialize: function() {
this.assertions = 0;
this.failures = 0;
this.errors = 0;
this.messages = [];
},
summary: function() {
return (
this.assertions + " assertions, " +
this.failures + " failures, " +
this.errors + " errors" + "\n" +
this.messages.join("\n"));
},
pass: function() {
this.assertions++;
},
fail: function(message) {
this.failures++;
this.messages.push("Failure: " + message);
},
info: function(message) {
this.messages.push("Info: " + message);
},
error: function(error) {
this.errors++;
this.messages.push(error.name + ": "+ error.message + "(" + Test.Unit.inspect(error) +")");
},
status: function() {
if (this.failures > 0) return 'failed';
if (this.errors > 0) return 'error';
return 'passed';
},
assert: function(expression) {
var message = arguments[1] || 'assert: got "' + Test.Unit.inspect(expression) + '"';
try { expression ? this.pass() :
this.fail(message); }
catch(e) { this.error(e); }
},
assertEqual: function(expected, actual) {
var message = arguments[2] || "assertEqual";
try { (expected == actual) ? this.pass() :
this.fail(message + ': expected "' + Test.Unit.inspect(expected) +
'", actual "' + Test.Unit.inspect(actual) + '"'); }
catch(e) { this.error(e); }
},
assertInspect: function(expected, actual) {
var message = arguments[2] || "assertInspect";
try { (expected == actual.inspect()) ? this.pass() :
this.fail(message + ': expected "' + Test.Unit.inspect(expected) +
'", actual "' + Test.Unit.inspect(actual) + '"'); }
catch(e) { this.error(e); }
},
assertEnumEqual: function(expected, actual) {
var message = arguments[2] || "assertEnumEqual";
try { $A(expected).length == $A(actual).length &&
expected.zip(actual).all(function(pair) { return pair[0] == pair[1] }) ?
this.pass() : this.fail(message + ': expected ' + Test.Unit.inspect(expected) +
', actual ' + Test.Unit.inspect(actual)); }
catch(e) { this.error(e); }
},
assertNotEqual: function(expected, actual) {
var message = arguments[2] || "assertNotEqual";
try { (expected != actual) ? this.pass() :
this.fail(message + ': got "' + Test.Unit.inspect(actual) + '"'); }
catch(e) { this.error(e); }
},
assertIdentical: function(expected, actual) {
var message = arguments[2] || "assertIdentical";
try { (expected === actual) ? this.pass() :
this.fail(message + ': expected "' + Test.Unit.inspect(expected) +
'", actual "' + Test.Unit.inspect(actual) + '"'); }
catch(e) { this.error(e); }
},
assertNotIdentical: function(expected, actual) {
var message = arguments[2] || "assertNotIdentical";
try { !(expected === actual) ? this.pass() :
this.fail(message + ': expected "' + Test.Unit.inspect(expected) +
'", actual "' + Test.Unit.inspect(actual) + '"'); }
catch(e) { this.error(e); }
},
assertNull: function(obj) {
var message = arguments[1] || 'assertNull'
try { (obj==null) ? this.pass() :
this.fail(message + ': got "' + Test.Unit.inspect(obj) + '"'); }
catch(e) { this.error(e); }
},
assertMatch: function(expected, actual) {
var message = arguments[2] || 'assertMatch';
var regex = new RegExp(expected);
try { (regex.exec(actual)) ? this.pass() :
this.fail(message + ' : regex: "' + Test.Unit.inspect(expected) + ' did not match: ' + Test.Unit.inspect(actual) + '"'); }
catch(e) { this.error(e); }
},
assertHidden: function(element) {
var message = arguments[1] || 'assertHidden';
this.assertEqual("none", element.style.display, message);
},
assertNotNull: function(object) {
var message = arguments[1] || 'assertNotNull';
this.assert(object != null, message);
},
assertType: function(expected, actual) {
var message = arguments[2] || 'assertType';
try {
(actual.constructor == expected) ? this.pass() :
this.fail(message + ': expected "' + Test.Unit.inspect(expected) +
'", actual "' + (actual.constructor) + '"'); }
catch(e) { this.error(e); }
},
assertNotOfType: function(expected, actual) {
var message = arguments[2] || 'assertNotOfType';
try {
(actual.constructor != expected) ? this.pass() :
this.fail(message + ': expected "' + Test.Unit.inspect(expected) +
'", actual "' + (actual.constructor) + '"'); }
catch(e) { this.error(e); }
},
assertInstanceOf: function(expected, actual) {
var message = arguments[2] || 'assertInstanceOf';
try {
(actual instanceof expected) ? this.pass() :
this.fail(message + ": object was not an instance of the expected type"); }
catch(e) { this.error(e); }
},
assertNotInstanceOf: function(expected, actual) {
var message = arguments[2] || 'assertNotInstanceOf';
try {
!(actual instanceof expected) ? this.pass() :
this.fail(message + ": object was an instance of the not expected type"); }
catch(e) { this.error(e); }
},
assertRespondsTo: function(method, obj) {
var message = arguments[2] || 'assertRespondsTo';
try {
(obj[method] && typeof obj[method] == 'function') ? this.pass() :
this.fail(message + ": object doesn't respond to [" + method + "]"); }
catch(e) { this.error(e); }
},
assertReturnsTrue: function(method, obj) {
var message = arguments[2] || 'assertReturnsTrue';
try {
var m = obj[method];
if(!m) m = obj['is'+method.charAt(0).toUpperCase()+method.slice(1)];
m() ? this.pass() :
this.fail(message + ": method returned false"); }
catch(e) { this.error(e); }
},
assertReturnsFalse: function(method, obj) {
var message = arguments[2] || 'assertReturnsFalse';
try {
var m = obj[method];
if(!m) m = obj['is'+method.charAt(0).toUpperCase()+method.slice(1)];
!m() ? this.pass() :
this.fail(message + ": method returned true"); }
catch(e) { this.error(e); }
},
assertRaise: function(exceptionName, method) {
var message = arguments[2] || 'assertRaise';
try {
method();
this.fail(message + ": exception expected but none was raised"); }
catch(e) {
((exceptionName == null) || (e.name==exceptionName)) ? this.pass() : this.error(e);
}
},
assertElementsMatch: function() {
var expressions = $A(arguments), elements = $A(expressions.shift());
if (elements.length != expressions.length) {
this.fail('assertElementsMatch: size mismatch: ' + elements.length + ' elements, ' + expressions.length + ' expressions');
return false;
}
elements.zip(expressions).all(function(pair, index) {
var element = $(pair.first()), expression = pair.last();
if (element.match(expression)) return true;
this.fail('assertElementsMatch: (in index ' + index + ') expected ' + expression.inspect() + ' but got ' + element.inspect());
}.bind(this)) && this.pass();
},
assertElementMatches: function(element, expression) {
this.assertElementsMatch([element], expression);
},
benchmark: function(operation, iterations) {
var startAt = new Date();
(iterations || 1).times(operation);
var timeTaken = ((new Date())-startAt);
this.info((arguments[2] || 'Operation') + ' finished ' +
iterations + ' iterations in ' + (timeTaken/1000)+'s' );
return timeTaken;
},
_isVisible: function(element) {
element = $(element);
if(!element.parentNode) return true;
this.assertNotNull(element);
if(element.style && Element.getStyle(element, 'display') == 'none')
return false;
return this._isVisible(element.parentNode);
},
assertNotVisible: function(element) {
this.assert(!this._isVisible(element), Test.Unit.inspect(element) + " was not hidden and didn't have a hidden parent either. " + ("" || arguments[1]));
},
assertVisible: function(element) {
this.assert(this._isVisible(element), Test.Unit.inspect(element) + " was not visible. " + ("" || arguments[1]));
},
benchmark: function(operation, iterations) {
var startAt = new Date();
(iterations || 1).times(operation);
var timeTaken = ((new Date())-startAt);
this.info((arguments[2] || 'Operation') + ' finished ' +
iterations + ' iterations in ' + (timeTaken/1000)+'s' );
return timeTaken;
}
}
Test.Unit.Testcase = Class.create();
Object.extend(Object.extend(Test.Unit.Testcase.prototype, Test.Unit.Assertions.prototype), {
initialize: function(name, test, setup, teardown) {
Test.Unit.Assertions.prototype.initialize.bind(this)();
this.name = name;
if(typeof test == 'string') {
test = test.gsub(/(\.should[^\(]+\()/,'#{0}this,');
test = test.gsub(/(\.should[^\(]+)\(this,\)/,'#{1}(this)');
this.test = function() {
eval('with(this){'+test+'}');
}
} else {
this.test = test || function() {};
}
this.setup = setup || function() {};
this.teardown = teardown || function() {};
this.isWaiting = false;
this.timeToWait = 1000;
},
wait: function(time, nextPart) {
this.isWaiting = true;
this.test = nextPart;
this.timeToWait = time;
},
run: function() {
try {
try {
if (!this.isWaiting) this.setup.bind(this)();
this.isWaiting = false;
this.test.bind(this)();
} finally {
if(!this.isWaiting) {
this.teardown.bind(this)();
}
}
}
catch(e) { this.error(e); }
}
});
// *EXPERIMENTAL* BDD-style testing to please non-technical folk
// This draws many ideas from RSpec http://rspec.rubyforge.org/
Test.setupBDDExtensionMethods = function(){
var METHODMAP = {
shouldEqual: 'assertEqual',
shouldNotEqual: 'assertNotEqual',
shouldEqualEnum: 'assertEnumEqual',
shouldBeA: 'assertType',
shouldNotBeA: 'assertNotOfType',
shouldBeAn: 'assertType',
shouldNotBeAn: 'assertNotOfType',
shouldBeNull: 'assertNull',
shouldNotBeNull: 'assertNotNull',
shouldBe: 'assertReturnsTrue',
shouldNotBe: 'assertReturnsFalse',
shouldRespondTo: 'assertRespondsTo'
};
var makeAssertion = function(assertion, args, object) {
this[assertion].apply(this,(args || []).concat([object]));
}
Test.BDDMethods = {};
$H(METHODMAP).each(function(pair) {
Test.BDDMethods[pair.key] = function() {
var args = $A(arguments);
var scope = args.shift();
makeAssertion.apply(scope, [pair.value, args, this]); };
});
[Array.prototype, String.prototype, Number.prototype, Boolean.prototype].each(
function(p){ Object.extend(p, Test.BDDMethods) }
);
}
Test.context = function(name, spec, log){
Test.setupBDDExtensionMethods();
var compiledSpec = {};
var titles = {};
for(specName in spec) {
switch(specName){
case "setup":
case "teardown":
compiledSpec[specName] = spec[specName];
break;
default:
var testName = 'test'+specName.gsub(/\s+/,'-').camelize();
var body = spec[specName].toString().split('\n').slice(1);
if(/^\{/.test(body[0])) body = body.slice(1);
body.pop();
body = body.map(function(statement){
return statement.strip()
});
compiledSpec[testName] = body.join('\n');
titles[testName] = specName;
}
}
new Test.Unit.Runner(compiledSpec, { titles: titles, testLog: log || 'testlog', context: name });
}; | shanti/olio | webapp/rails/trunk/vendor/plugins/rspec/story_server/prototype/javascripts/unittest.js | JavaScript | apache-2.0 | 21,009 |
// Copyright 2004 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.vlib.ejb;
import java.rmi.RemoteException;
import javax.ejb.EJBObject;
/**
* Remote interface for the BookQuery stateless session bean.
*
* @version $Id$
* @author Howard Lewis Ship
*
**/
public interface IBookQuery extends EJBObject
{
/**
* Returns the total number of results rows in the query.
*
**/
public int getResultCount() throws RemoteException;
/**
* Returns a selected subset of the results.
*
**/
public Book[] get(int offset, int length) throws RemoteException;
/**
* Performs a query of books with the matching title and (optionally) publisher.
*
* @param parameters defines subset of books to return.
* @param sortOrdering order of items in result set.
*
**/
public int masterQuery(MasterQueryParameters parameters, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries on books owned by a given person.
*
**/
public int ownerQuery(Integer ownerPK, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries on books held by a given person.
*
**/
public int holderQuery(Integer holderPK, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries the list of books held by the borrower but not owned by the borrower.
*
**/
public int borrowerQuery(Integer borrowerPK, SortOrdering sortOrdering) throws RemoteException;
} | apache/tapestry3 | tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/IBookQuery.java | Java | apache-2.0 | 2,080 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.structuralsearch.impl.matcher.compiler;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.dupLocator.util.NodeFilter;
import com.intellij.lang.Language;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiRecursiveElementWalkingVisitor;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.structuralsearch.*;
import com.intellij.structuralsearch.impl.matcher.CompiledPattern;
import com.intellij.structuralsearch.impl.matcher.MatcherImplUtil;
import com.intellij.structuralsearch.impl.matcher.PatternTreeContext;
import com.intellij.structuralsearch.impl.matcher.filters.LexicalNodesFilter;
import com.intellij.structuralsearch.impl.matcher.handlers.MatchingHandler;
import com.intellij.structuralsearch.impl.matcher.handlers.SubstitutionHandler;
import com.intellij.structuralsearch.impl.matcher.predicates.*;
import com.intellij.structuralsearch.plugin.ui.Configuration;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntHashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Compiles the handlers for usability
*/
public class PatternCompiler {
private static CompileContext lastTestingContext;
public static CompiledPattern compilePattern(final Project project, final MatchOptions options)
throws MalformedPatternException, NoMatchFoundException, UnsupportedOperationException {
FileType fileType = options.getFileType();
assert fileType instanceof LanguageFileType;
Language language = ((LanguageFileType)fileType).getLanguage();
StructuralSearchProfile profile = StructuralSearchUtil.getProfileByLanguage(language);
assert profile != null;
CompiledPattern result = profile.createCompiledPattern();
final String[] prefixes = result.getTypedVarPrefixes();
assert prefixes.length > 0;
final CompileContext context = new CompileContext(result, options, project);
if (ApplicationManager.getApplication().isUnitTestMode()) lastTestingContext = context;
try {
List<PsiElement> elements = compileByAllPrefixes(project, options, result, context, prefixes);
final CompiledPattern pattern = context.getPattern();
checkForUnknownVariables(pattern, elements);
pattern.setNodes(elements);
if (context.getSearchHelper().doOptimizing() && context.getSearchHelper().isScannedSomething()) {
final Set<PsiFile> set = context.getSearchHelper().getFilesSetToScan();
final List<PsiFile> filesToScan = new SmartList<>();
final GlobalSearchScope scope = (GlobalSearchScope)options.getScope();
for (final PsiFile file : set) {
if (!scope.contains(file.getVirtualFile())) {
continue;
}
filesToScan.add(file);
}
if (filesToScan.size() == 0) {
throw new NoMatchFoundException(SSRBundle.message("ssr.will.not.find.anything", scope.getDisplayName()));
}
result.setScope(new LocalSearchScope(PsiUtilCore.toPsiElementArray(filesToScan)));
}
} finally {
context.clear();
}
return result;
}
private static void checkForUnknownVariables(final CompiledPattern pattern, List<PsiElement> elements) {
for (PsiElement element : elements) {
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
if (element.getUserData(CompiledPattern.HANDLER_KEY) != null) {
return;
}
super.visitElement(element);
if (!(element instanceof LeafElement) || !pattern.isTypedVar(element)) {
return;
}
final MatchingHandler handler = pattern.getHandler(pattern.getTypedVarString(element));
if (handler == null) {
throw new MalformedPatternException();
}
}
});
}
}
public static String getLastFindPlan() {
return ((TestModeOptimizingSearchHelper)lastTestingContext.getSearchHelper()).getSearchPlan();
}
@NotNull
private static List<PsiElement> compileByAllPrefixes(Project project,
MatchOptions options,
CompiledPattern pattern,
CompileContext context,
String[] applicablePrefixes) throws MalformedPatternException {
if (applicablePrefixes.length == 0) {
return Collections.emptyList();
}
List<PsiElement> elements = doCompile(project, options, pattern, new ConstantPrefixProvider(applicablePrefixes[0]), context);
if (elements.isEmpty()) {
return elements;
}
final PsiFile file = elements.get(0).getContainingFile();
if (file == null) {
return elements;
}
final PsiElement last = elements.get(elements.size() - 1);
final Pattern[] patterns = new Pattern[applicablePrefixes.length];
for (int i = 0; i < applicablePrefixes.length; i++) {
patterns[i] = Pattern.compile(StructuralSearchUtil.shieldRegExpMetaChars(applicablePrefixes[i]) + "\\w+\\b");
}
final int[] varEndOffsets = findAllTypedVarOffsets(file, patterns);
final int patternEndOffset = last.getTextRange().getEndOffset();
if (elements.size() == 0 ||
checkErrorElements(file, patternEndOffset, patternEndOffset, varEndOffsets, true) != Boolean.TRUE) {
return elements;
}
final int varCount = varEndOffsets.length;
final String[] prefixSequence = new String[varCount];
for (int i = 0; i < varCount; i++) {
prefixSequence[i] = applicablePrefixes[0];
}
final List<PsiElement> finalElements =
compileByPrefixes(project, options, pattern, context, applicablePrefixes, patterns, prefixSequence, 0);
return finalElements != null
? finalElements
: doCompile(project, options, pattern, new ConstantPrefixProvider(applicablePrefixes[0]), context);
}
@Nullable
private static List<PsiElement> compileByPrefixes(Project project,
MatchOptions options,
CompiledPattern pattern,
CompileContext context,
String[] applicablePrefixes,
Pattern[] substitutionPatterns,
String[] prefixSequence,
int index) throws MalformedPatternException {
if (index >= prefixSequence.length) {
final List<PsiElement> elements = doCompile(project, options, pattern, new ArrayPrefixProvider(prefixSequence), context);
if (elements.isEmpty()) {
return elements;
}
final PsiElement parent = elements.get(0).getParent();
final PsiElement last = elements.get(elements.size() - 1);
final int[] varEndOffsets = findAllTypedVarOffsets(parent.getContainingFile(), substitutionPatterns);
final int patternEndOffset = last.getTextRange().getEndOffset();
return checkErrorElements(parent, patternEndOffset, patternEndOffset, varEndOffsets, false) != Boolean.TRUE
? elements
: null;
}
String[] alternativeVariant = null;
for (String applicablePrefix : applicablePrefixes) {
prefixSequence[index] = applicablePrefix;
List<PsiElement> elements = doCompile(project, options, pattern, new ArrayPrefixProvider(prefixSequence), context);
if (elements.isEmpty()) {
return elements;
}
final PsiFile file = elements.get(0).getContainingFile();
if (file == null) {
return elements;
}
final int[] varEndOffsets = findAllTypedVarOffsets(file, substitutionPatterns);
final int offset = varEndOffsets[index];
final int patternEndOffset = elements.get(elements.size() - 1).getTextRange().getEndOffset();
final Boolean result = checkErrorElements(file, offset, patternEndOffset, varEndOffsets, false);
if (result == Boolean.TRUE) {
continue;
}
if (result == Boolean.FALSE || (result == null && alternativeVariant == null)) {
final List<PsiElement> finalElements =
compileByPrefixes(project, options, pattern, context, applicablePrefixes, substitutionPatterns, prefixSequence, index + 1);
if (finalElements != null) {
if (result == Boolean.FALSE) {
return finalElements;
}
alternativeVariant = new String[prefixSequence.length];
System.arraycopy(prefixSequence, 0, alternativeVariant, 0, prefixSequence.length);
}
}
}
return alternativeVariant != null ?
compileByPrefixes(project, options, pattern, context, applicablePrefixes, substitutionPatterns, alternativeVariant, index + 1) :
null;
}
@NotNull
private static int[] findAllTypedVarOffsets(final PsiFile file, final Pattern[] substitutionPatterns) {
final TIntHashSet result = new TIntHashSet();
file.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
super.visitElement(element);
if (element instanceof LeafElement) {
final String text = element.getText();
for (Pattern pattern : substitutionPatterns) {
final Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
result.add(element.getTextRange().getStartOffset() + matcher.end());
}
}
}
}
});
final int[] resultArray = result.toArray();
Arrays.sort(resultArray);
return resultArray;
}
/**
* False: there are no error elements before offset, except patternEndOffset
* Null: there are only error elements located exactly after template variables or at the end of the pattern
* True: otherwise
*/
@Nullable
private static Boolean checkErrorElements(PsiElement element,
final int offset,
final int patternEndOffset,
final int[] varEndOffsets,
final boolean strict) {
final TIntArrayList errorOffsets = new TIntArrayList();
final boolean[] containsErrorTail = {false};
final TIntHashSet varEndOffsetsSet = new TIntHashSet(varEndOffsets);
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitErrorElement(PsiErrorElement element) {
super.visitErrorElement(element);
final int startOffset = element.getTextRange().getStartOffset();
if ((strict || !varEndOffsetsSet.contains(startOffset)) && startOffset != patternEndOffset) {
errorOffsets.add(startOffset);
}
if (startOffset == offset) {
containsErrorTail[0] = true;
}
}
});
for (int i = 0; i < errorOffsets.size(); i++) {
final int errorOffset = errorOffsets.get(i);
if (errorOffset <= offset) {
return true;
}
}
return containsErrorTail[0] ? null : false;
}
private interface PrefixProvider {
String getPrefix(int varIndex);
}
private static class ConstantPrefixProvider implements PrefixProvider {
private final String myPrefix;
ConstantPrefixProvider(String prefix) {
myPrefix = prefix;
}
@Override
public String getPrefix(int varIndex) {
return myPrefix;
}
}
private static class ArrayPrefixProvider implements PrefixProvider {
private final String[] myPrefixes;
ArrayPrefixProvider(String[] prefixes) {
myPrefixes = prefixes;
}
@Override
public String getPrefix(int varIndex) {
if (varIndex >= myPrefixes.length) return null;
return myPrefixes[varIndex];
}
}
private static List<PsiElement> doCompile(Project project,
MatchOptions options,
CompiledPattern result,
PrefixProvider prefixProvider,
CompileContext context) throws MalformedPatternException {
result.clearHandlers();
final StringBuilder buf = new StringBuilder();
Template template = TemplateManager.getInstance(project).createTemplate("","",options.getSearchPattern());
int segmentsCount = template.getSegmentsCount();
String text = template.getTemplateText();
int prevOffset = 0;
for(int i=0;i<segmentsCount;++i) {
final int offset = template.getSegmentOffset(i);
final String name = template.getSegmentName(i);
final String prefix = prefixProvider.getPrefix(i);
if (prefix == null) {
throw new MalformedPatternException();
}
buf.append(text.substring(prevOffset,offset));
buf.append(prefix);
buf.append(name);
MatchVariableConstraint constraint = options.getVariableConstraint(name);
if (constraint==null) {
// we do not edited the constraints
constraint = new MatchVariableConstraint();
constraint.setName( name );
options.addVariableConstraint(constraint);
}
SubstitutionHandler handler = result.createSubstitutionHandler(
name,
prefix + name,
constraint.isPartOfSearchResults(),
constraint.getMinCount(),
constraint.getMaxCount(),
constraint.isGreedy()
);
if(constraint.isWithinHierarchy()) {
handler.setSubtype(true);
}
if(constraint.isStrictlyWithinHierarchy()) {
handler.setStrictSubtype(true);
}
MatchPredicate predicate;
if (!StringUtil.isEmptyOrSpaces(constraint.getRegExp())) {
predicate = new RegExpPredicate(
constraint.getRegExp(),
options.isCaseSensitiveMatch(),
name,
constraint.isWholeWordsOnly(),
constraint.isPartOfSearchResults()
);
if (constraint.isInvertRegExp()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
if (constraint.isReference()) {
predicate = new ReferencePredicate( constraint.getNameOfReferenceVar() );
if (constraint.isInvertReference()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
addExtensionPredicates(options, constraint, handler);
addScriptConstraint(project, name, constraint, handler);
if (!StringUtil.isEmptyOrSpaces(constraint.getContainsConstraint())) {
predicate = new ContainsPredicate(name, constraint.getContainsConstraint());
if (constraint.isInvertContainsConstraint()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
if (!StringUtil.isEmptyOrSpaces(constraint.getWithinConstraint())) {
assert false;
}
prevOffset = offset;
}
MatchVariableConstraint constraint = options.getVariableConstraint(Configuration.CONTEXT_VAR_NAME);
if (constraint != null) {
SubstitutionHandler handler = result.createSubstitutionHandler(
Configuration.CONTEXT_VAR_NAME,
Configuration.CONTEXT_VAR_NAME,
constraint.isPartOfSearchResults(),
constraint.getMinCount(),
constraint.getMaxCount(),
constraint.isGreedy()
);
if (!StringUtil.isEmptyOrSpaces(constraint.getWithinConstraint())) {
MatchPredicate predicate = new WithinPredicate(constraint.getWithinConstraint(), options.getFileType(), project);
if (constraint.isInvertWithinConstraint()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
addExtensionPredicates(options, constraint, handler);
addScriptConstraint(project, Configuration.CONTEXT_VAR_NAME, constraint, handler);
}
buf.append(text.substring(prevOffset,text.length()));
PsiElement[] matchStatements;
try {
matchStatements = MatcherImplUtil.createTreeFromText(buf.toString(), PatternTreeContext.Block, options.getFileType(),
options.getDialect(), options.getPatternContext(), project, false);
if (matchStatements.length==0) throw new MalformedPatternException();
} catch (IncorrectOperationException e) {
throw new MalformedPatternException(e.getMessage());
}
NodeFilter filter = LexicalNodesFilter.getInstance();
GlobalCompilingVisitor compilingVisitor = new GlobalCompilingVisitor();
compilingVisitor.compile(matchStatements,context);
List<PsiElement> elements = new SmartList<>();
for (PsiElement matchStatement : matchStatements) {
if (!filter.accepts(matchStatement)) {
elements.add(matchStatement);
}
}
new DeleteNodesAction(compilingVisitor.getLexicalNodes()).run();
return elements;
}
private static void addExtensionPredicates(MatchOptions options, MatchVariableConstraint constraint, SubstitutionHandler handler) {
Set<MatchPredicate> predicates = new LinkedHashSet<>();
for (MatchPredicateProvider matchPredicateProvider : Extensions.getExtensions(MatchPredicateProvider.EP_NAME)) {
matchPredicateProvider.collectPredicates(constraint, handler.getName(), options, predicates);
}
for (MatchPredicate matchPredicate : predicates) {
addPredicate(handler, matchPredicate);
}
}
private static void addScriptConstraint(Project project, String name, MatchVariableConstraint constraint, SubstitutionHandler handler)
throws MalformedPatternException {
if (constraint.getScriptCodeConstraint()!= null && constraint.getScriptCodeConstraint().length() > 2) {
final String script = StringUtil.unquoteString(constraint.getScriptCodeConstraint());
final String problem = ScriptSupport.checkValidScript(script);
if (problem != null) {
throw new MalformedPatternException("Script constraint for " + constraint.getName() + " has problem " + problem);
}
addPredicate(handler, new ScriptPredicate(project, name, script));
}
}
private static void addPredicate(SubstitutionHandler handler, MatchPredicate predicate) {
if (handler.getPredicate()==null) {
handler.setPredicate(predicate);
} else {
handler.setPredicate(new AndPredicate(handler.getPredicate(), predicate));
}
}
} | apixandru/intellij-community | platform/structuralsearch/source/com/intellij/structuralsearch/impl/matcher/compiler/PatternCompiler.java | Java | apache-2.0 | 20,077 |
package examples.model;
import java.util.ArrayList;
import java.util.Collection;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.OneToMany;
@Entity
public class Department {
@Id
private int id;
private String name;
@OneToMany(mappedBy="department")
private Collection<Employee> employees;
public Department() {
employees = new ArrayList<Employee>();
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public Collection<Employee> getEmployees() {
return employees;
}
public String toString() {
return "Department no: " + getId() +
", name: " + getName();
}
}
| velmuruganvelayutham/jpa | examples/Chapter7/02-namedQueryExample/src/model/examples/model/Department.java | Java | apache-2.0 | 759 |
export const FILLPATTERN = {
none: '',
crossHatched: 'Crosshatched',
hatched: 'Hatched',
solid: 'Solid'
};
export const STROKEPATTERN = {
none: '',
dashed: 'Dashed',
dotted: 'Dotted',
solid: 'Solid'
};
export const ALTITUDEMODE = {
NONE: '',
ABSOLUTE: 'Absolute',
RELATIVE_TO_GROUND: 'Relative to ground',
CLAMP_TO_GROUND: 'Clamp to ground'
};
export const ICONSIZE = {
none: '',
verySmall: 'Very Small',
small: 'Small',
medium: 'Medium',
large: 'Large',
extraLarge: 'Extra Large'
};
export const ACMATTRIBUTES = {
innerRadius: 'Inner Radius',
leftAzimuth: 'Left Azimuth',
rightAzimuth: 'Right Azimuth',
minAlt: 'Minimum Altitude',
maxAlt: 'Maximum Altitude',
leftWidth: 'Left Width',
rightWidth: 'Right Width',
radius: 'Radius',
turn: 'Turn',
width: 'Width'
};
export const WMSVERSION = {
WMS: 'none',
WMS1_1: '1.0',
WMS1_1_1: '1.1.1',
WMS1_3_0: '1.3.0'
};
export const WMTSVERSION = {
WMTS: 'none',
WMTS1_0_0: '1.0.0'
};
| missioncommand/emp3-web | src/validation/js/constants/PropertyConstants.js | JavaScript | apache-2.0 | 997 |
๏ปฟ// ----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------------------
using System;
using System.IO;
namespace Microsoft.WindowsAzure.MobileServices
{
/// <summary>
/// Provides access to platform specific functionality for the current client platform.
/// </summary>
public class CurrentPlatform : IPlatform
{
/// <summary>
/// You must call this method from your application in order to ensure
/// that this platform specific assembly is included in your app.
/// </summary>
public static void Init()
{
}
/// <summary>
/// Returns a platform-specific implemention of application storage.
/// </summary>
IApplicationStorage IPlatform.ApplicationStorage
{
get { return ApplicationStorage.Instance; }
}
/// <summary>
/// Returns a platform-specific implemention of platform information.
/// </summary>
IPlatformInformation IPlatform.PlatformInformation
{
get { return PlatformInformation.Instance; }
}
/// <summary>
/// Returns a platform-specific implementation of a utility class
/// that provides functionality for manipulating
/// <see cref="System.Linq.Expressions.Expression"/> instances.
/// </summary>
IExpressionUtility IPlatform.ExpressionUtility
{
get { return ExpressionUtility.Instance; }
}
/// <summary>
/// Returns a platform-specific implementation of a utility class
/// that provides functionality for platform-specifc push capabilities.
/// </summary>
IPushUtility IPlatform.PushUtility { get { return Microsoft.WindowsAzure.MobileServices.PushUtility.Instance; } }
/// <summary>
/// Returns a platform-specific path for storing offline databases
/// that are not fully-qualified.
/// </summary>
string IPlatform.DefaultDatabasePath
{
get
{
return Environment.GetFolderPath(Environment.SpecialFolder.Personal);
}
}
/// <summary>
/// Retrieves an ApplicationStorage where all items stored are segmented from other stored items
/// </summary>
/// <param name="name">The name of the segemented area in application storage</param>
/// <returns>The specific instance of that segment</returns>
IApplicationStorage IPlatform.GetNamedApplicationStorage(string name)
{
return new ApplicationStorage(name);
}
/// <summary>
/// Ensures that a file exists, creating it if necessary
/// </summary>
/// <param name="path">The fully-qualified pathname to check</param>
public void EnsureFileExists(string path)
{
if (!File.Exists(path))
{
File.Create(path);
}
}
}
} | MatkovIvan/azure-mobile-apps-net-client | src/Microsoft.Azure.Mobile.Client/Platforms/ios/CurrentPlatform.cs | C# | apache-2.0 | 3,139 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\ChromePolicy;
class GoogleChromePolicyV1PolicySchemaFieldDescription extends \Google\Collection
{
protected $collection_key = 'nestedFieldDescriptions';
/**
* @var string
*/
public $description;
/**
* @var string
*/
public $field;
protected $fieldDependenciesType = GoogleChromePolicyV1PolicySchemaFieldDependencies::class;
protected $fieldDependenciesDataType = 'array';
/**
* @var string
*/
public $inputConstraint;
protected $knownValueDescriptionsType = GoogleChromePolicyV1PolicySchemaFieldKnownValueDescription::class;
protected $knownValueDescriptionsDataType = 'array';
protected $nestedFieldDescriptionsType = GoogleChromePolicyV1PolicySchemaFieldDescription::class;
protected $nestedFieldDescriptionsDataType = 'array';
/**
* @param string
*/
public function setDescription($description)
{
$this->description = $description;
}
/**
* @return string
*/
public function getDescription()
{
return $this->description;
}
/**
* @param string
*/
public function setField($field)
{
$this->field = $field;
}
/**
* @return string
*/
public function getField()
{
return $this->field;
}
/**
* @param GoogleChromePolicyV1PolicySchemaFieldDependencies[]
*/
public function setFieldDependencies($fieldDependencies)
{
$this->fieldDependencies = $fieldDependencies;
}
/**
* @return GoogleChromePolicyV1PolicySchemaFieldDependencies[]
*/
public function getFieldDependencies()
{
return $this->fieldDependencies;
}
/**
* @param string
*/
public function setInputConstraint($inputConstraint)
{
$this->inputConstraint = $inputConstraint;
}
/**
* @return string
*/
public function getInputConstraint()
{
return $this->inputConstraint;
}
/**
* @param GoogleChromePolicyV1PolicySchemaFieldKnownValueDescription[]
*/
public function setKnownValueDescriptions($knownValueDescriptions)
{
$this->knownValueDescriptions = $knownValueDescriptions;
}
/**
* @return GoogleChromePolicyV1PolicySchemaFieldKnownValueDescription[]
*/
public function getKnownValueDescriptions()
{
return $this->knownValueDescriptions;
}
/**
* @param GoogleChromePolicyV1PolicySchemaFieldDescription[]
*/
public function setNestedFieldDescriptions($nestedFieldDescriptions)
{
$this->nestedFieldDescriptions = $nestedFieldDescriptions;
}
/**
* @return GoogleChromePolicyV1PolicySchemaFieldDescription[]
*/
public function getNestedFieldDescriptions()
{
return $this->nestedFieldDescriptions;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(GoogleChromePolicyV1PolicySchemaFieldDescription::class, 'Google_Service_ChromePolicy_GoogleChromePolicyV1PolicySchemaFieldDescription');
| googleapis/google-api-php-client-services | src/ChromePolicy/GoogleChromePolicyV1PolicySchemaFieldDescription.php | PHP | apache-2.0 | 3,468 |
<?php
namespace EventEspresso\core\services\commands\registration;
use EventEspresso\core\services\commands\Command;
if ( ! defined( 'EVENT_ESPRESSO_VERSION' ) ) {
exit( 'No direct script access allowed' );
}
/**
* Class SingleRegistrationCommand
* DTO for passing data a single EE_Registration object to a CommandHandler
*
* @package Event Espresso
* @author Brent Christensen
* @since 4.9.0
*/
abstract class SingleRegistrationCommand extends Command
{
/**
* @var \EE_Registration $registration
*/
private $registration;
/**
* CancelRegistrationAndTicketLineItemCommand constructor.
*
* @param \EE_Registration $registration
*/
public function __construct(
\EE_Registration $registration
) {
$this->registration = $registration;
}
/**
* @return \EE_Registration
*/
public function registration()
{
return $this->registration;
}
}
// End of file SingleRegistrationCommand.php
// Location: /SingleRegistrationCommand.php | yoanngern/iahm_2016 | wp-content/plugins/event-espresso-core-reg/core/services/commands/registration/SingleRegistrationCommand.php | PHP | apache-2.0 | 998 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
/**
* Service definition for FactCheckTools (v1alpha1).
*
* <p>
</p>
*
* <p>
* For more information about this service, see the API
* <a href="https://developers.google.com/fact-check/tools/api/" target="_blank">Documentation</a>
* </p>
*
* @author Google, Inc.
*/
class Google_Service_FactCheckTools extends Google_Service
{
/** View your email address. */
const USERINFO_EMAIL =
"https://www.googleapis.com/auth/userinfo.email";
public $claims;
public $pages;
/**
* Constructs the internal representation of the FactCheckTools service.
*
* @param Google_Client $client The client used to deliver requests.
* @param string $rootUrl The root URL used for requests to the service.
*/
public function __construct(Google_Client $client, $rootUrl = null)
{
parent::__construct($client);
$this->rootUrl = $rootUrl ?: 'https://factchecktools.googleapis.com/';
$this->servicePath = '';
$this->batchPath = 'batch';
$this->version = 'v1alpha1';
$this->serviceName = 'factchecktools';
$this->claims = new Google_Service_FactCheckTools_Resource_Claims(
$this,
$this->serviceName,
'claims',
array(
'methods' => array(
'search' => array(
'path' => 'v1alpha1/claims:search',
'httpMethod' => 'GET',
'parameters' => array(
'languageCode' => array(
'location' => 'query',
'type' => 'string',
),
'maxAgeDays' => array(
'location' => 'query',
'type' => 'integer',
),
'offset' => array(
'location' => 'query',
'type' => 'integer',
),
'pageSize' => array(
'location' => 'query',
'type' => 'integer',
),
'pageToken' => array(
'location' => 'query',
'type' => 'string',
),
'query' => array(
'location' => 'query',
'type' => 'string',
),
'reviewPublisherSiteFilter' => array(
'location' => 'query',
'type' => 'string',
),
),
),
)
)
);
$this->pages = new Google_Service_FactCheckTools_Resource_Pages(
$this,
$this->serviceName,
'pages',
array(
'methods' => array(
'create' => array(
'path' => 'v1alpha1/pages',
'httpMethod' => 'POST',
'parameters' => array(),
),'delete' => array(
'path' => 'v1alpha1/{+name}',
'httpMethod' => 'DELETE',
'parameters' => array(
'name' => array(
'location' => 'path',
'type' => 'string',
'required' => true,
),
),
),'get' => array(
'path' => 'v1alpha1/{+name}',
'httpMethod' => 'GET',
'parameters' => array(
'name' => array(
'location' => 'path',
'type' => 'string',
'required' => true,
),
),
),'list' => array(
'path' => 'v1alpha1/pages',
'httpMethod' => 'GET',
'parameters' => array(
'offset' => array(
'location' => 'query',
'type' => 'integer',
),
'organization' => array(
'location' => 'query',
'type' => 'string',
),
'pageSize' => array(
'location' => 'query',
'type' => 'integer',
),
'pageToken' => array(
'location' => 'query',
'type' => 'string',
),
'url' => array(
'location' => 'query',
'type' => 'string',
),
),
),'update' => array(
'path' => 'v1alpha1/{+name}',
'httpMethod' => 'PUT',
'parameters' => array(
'name' => array(
'location' => 'path',
'type' => 'string',
'required' => true,
),
),
),
)
)
);
}
}
| tsugiproject/tsugi | vendor/google/apiclient-services/src/Google/Service/FactCheckTools.php | PHP | apache-2.0 | 5,227 |
# ******************************************************************************
# Copyright 2014-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
| NervanaSystems/neon | neon/backends/kernels/cuda/__init__.py | Python | apache-2.0 | 748 |
/**
* Copyright (c) 2010 RedEngine Ltd, http://www.redengine.co.nz. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package net.stickycode.deploy.sample.helloworld;
public class HelloWorld implements Runnable {
public void hello() {
System.out.println("Hello World!");
}
@Override
public void run() {
System.out.println("Hello Embedded World!");
try {
Thread.sleep(5000);
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
| walterDurin/stickycode | net.stickycode.deploy.samples/sticky-deploy-sample-helloworld/src/main/java/net/stickycode/deploy/sample/helloworld/HelloWorld.java | Java | apache-2.0 | 1,097 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.parser.rule;
import lombok.Getter;
import org.apache.shardingsphere.infra.rule.identifier.scope.GlobalRule;
import org.apache.shardingsphere.parser.config.SQLParserRuleConfiguration;
import org.apache.shardingsphere.sql.parser.api.CacheOption;
/**
* SQL parser rule.
*/
@Getter
public final class SQLParserRule implements GlobalRule {
private final boolean sqlCommentParseEnabled;
private final CacheOption sqlStatementCache;
private final CacheOption parseTreeCache;
public SQLParserRule(final SQLParserRuleConfiguration ruleConfig) {
sqlCommentParseEnabled = ruleConfig.isSqlCommentParseEnabled();
sqlStatementCache = ruleConfig.getSqlStatementCache();
parseTreeCache = ruleConfig.getParseTreeCache();
}
@Override
public String getType() {
return SQLParserRule.class.getSimpleName();
}
}
| apache/incubator-shardingsphere | shardingsphere-kernel/shardingsphere-parser/shardingsphere-parser-core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java | Java | apache-2.0 | 1,720 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.runtime.operators.sort;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputPushRuntime;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputRuntimeFactory;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputer;
import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.resources.IDeallocatable;
import org.apache.hyracks.api.util.CleanupUtils;
import org.apache.hyracks.dataflow.common.io.GeneratedRunFileReader;
import org.apache.hyracks.dataflow.std.buffermanager.EnumFreeSlotPolicy;
import org.apache.hyracks.dataflow.std.sort.Algorithm;
import org.apache.hyracks.dataflow.std.sort.ExternalSortRunGenerator;
import org.apache.hyracks.dataflow.std.sort.ExternalSortRunMerger;
public class MicroSortRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
private static final long serialVersionUID = 1L;
private final int framesLimit;
private final int[] sortFields;
private final INormalizedKeyComputerFactory[] keyNormalizerFactories;
private final IBinaryComparatorFactory[] comparatorFactories;
public MicroSortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList, int framesLimit) {
this(sortFields, firstKeyNormalizerFactory != null
? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null, comparatorFactories,
projectionList, framesLimit);
}
public MicroSortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory[] keyNormalizerFactories,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList, int framesLimit) {
super(projectionList);
// Obs: the projection list is currently ignored.
if (projectionList != null) {
throw new NotImplementedException("Cannot push projection into InMemorySortRuntime.");
}
this.sortFields = sortFields;
this.keyNormalizerFactories = keyNormalizerFactories;
this.comparatorFactories = comparatorFactories;
this.framesLimit = framesLimit;
}
@Override
public AbstractOneInputOneOutputPushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx)
throws HyracksDataException {
InMemorySortPushRuntime pushRuntime = new InMemorySortPushRuntime(ctx);
ctx.registerDeallocatable(pushRuntime);
return pushRuntime;
}
private class InMemorySortPushRuntime extends AbstractOneInputOneOutputPushRuntime implements IDeallocatable {
final IHyracksTaskContext ctx;
ExternalSortRunGenerator runsGenerator = null;
ExternalSortRunMerger runsMerger = null;
IFrameWriter wrappingWriter = null;
private InMemorySortPushRuntime(IHyracksTaskContext ctx) {
this.ctx = ctx;
}
@Override
public void open() throws HyracksDataException {
if (runsGenerator == null) {
runsGenerator = new ExternalSortRunGenerator(ctx, sortFields, keyNormalizerFactories,
comparatorFactories, outputRecordDesc, Algorithm.MERGE_SORT, EnumFreeSlotPolicy.LAST_FIT,
framesLimit, Integer.MAX_VALUE);
}
// next writer will be opened later when preparing the merger
isOpen = true;
runsGenerator.open();
runsGenerator.getSorter().reset();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
runsGenerator.nextFrame(buffer);
}
@Override
public void close() throws HyracksDataException {
Throwable failure = null;
if (isOpen) {
try {
if (!failed) {
runsGenerator.close();
createOrResetRunsMerger();
if (runsGenerator.getRuns().isEmpty()) {
wrappingWriter = runsMerger.prepareSkipMergingFinalResultWriter(writer);
wrappingWriter.open();
if (runsGenerator.getSorter().hasRemaining()) {
runsGenerator.getSorter().flush(wrappingWriter);
}
} else {
wrappingWriter = runsMerger.prepareFinalMergeResultWriter(writer);
wrappingWriter.open();
runsMerger.process(wrappingWriter);
}
}
} catch (Throwable th) {
failure = th;
fail(th);
} finally {
failure = CleanupUtils.close(wrappingWriter, failure);
wrappingWriter = null;
}
}
isOpen = false;
if (failure != null) {
throw HyracksDataException.create(failure);
}
}
@Override
public void fail() throws HyracksDataException {
failed = true;
// clean up the runs if some have been generated. double close should be idempotent.
if (runsGenerator != null) {
List<GeneratedRunFileReader> runs = runsGenerator.getRuns();
for (int i = 0, size = runs.size(); i < size; i++) {
try {
runs.get(i).close();
} catch (Throwable th) {
// ignore
}
}
}
if (wrappingWriter != null) {
wrappingWriter.fail();
}
}
@Override
public void deallocate() {
if (runsGenerator != null) {
try {
runsGenerator.getSorter().close();
} catch (Exception e) {
// ignore
}
}
}
private void createOrResetRunsMerger() {
if (runsMerger == null) {
IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
INormalizedKeyComputer nmkComputer =
keyNormalizerFactories == null ? null : keyNormalizerFactories[0].createNormalizedKeyComputer();
runsMerger = new ExternalSortRunMerger(ctx, runsGenerator.getRuns(), sortFields, comparators,
nmkComputer, outputRecordDesc, framesLimit, Integer.MAX_VALUE);
} else {
runsMerger.reset(runsGenerator.getRuns());
}
}
}
}
| apache/incubator-asterixdb | hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/MicroSortRuntimeFactory.java | Java | apache-2.0 | 8,262 |
package gov.va.medora.mdws.emrsvc;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="fromDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="toDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="nNotes" type="{http://www.w3.org/2001/XMLSchema}int"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"fromDate",
"toDate",
"nNotes"
})
@XmlRootElement(name = "getDischargeSummaries")
public class GetDischargeSummaries {
protected String fromDate;
protected String toDate;
protected int nNotes;
/**
* Gets the value of the fromDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFromDate() {
return fromDate;
}
/**
* Sets the value of the fromDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFromDate(String value) {
this.fromDate = value;
}
/**
* Gets the value of the toDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToDate() {
return toDate;
}
/**
* Sets the value of the toDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToDate(String value) {
this.toDate = value;
}
/**
* Gets the value of the nNotes property.
*
*/
public int getNNotes() {
return nNotes;
}
/**
* Sets the value of the nNotes property.
*
*/
public void setNNotes(int value) {
this.nNotes = value;
}
}
| VHAINNOVATIONS/TheDailyPlan | LegacyApp/tdpWeb/src/main/java/gov/va/medora/mdws/emrsvc/GetDischargeSummaries.java | Java | apache-2.0 | 2,528 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static java.util.regex.Matcher.quoteReplacement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.log4j.Appender;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.apache.pig.ExecType;
import org.apache.pig.ExecTypeProvider;
import org.apache.pig.LoadCaster;
import org.apache.pig.PigException;
import org.apache.pig.PigServer;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.tez.TezResourceManager;
import org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil;
import org.apache.pig.builtin.Utf8StorageConverter;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.newplan.logical.optimizer.LogicalPlanPrinter;
import org.apache.pig.newplan.logical.optimizer.SchemaResetter;
import org.apache.pig.newplan.logical.optimizer.UidResetter;
import org.apache.pig.newplan.logical.relational.LogToPhyTranslationVisitor;
import org.apache.pig.newplan.logical.relational.LogicalPlan;
import org.apache.pig.newplan.logical.relational.LogicalSchema;
import org.apache.pig.newplan.logical.relational.LogicalSchema.LogicalFieldSchema;
import org.apache.pig.newplan.logical.visitor.DanglingNestedNodeRemover;
import org.apache.pig.newplan.logical.visitor.SortInfoSetter;
import org.apache.pig.newplan.logical.visitor.StoreAliasSetter;
import org.apache.pig.parser.ParserException;
import org.apache.pig.parser.QueryParserDriver;
import org.apache.pig.tools.grunt.GruntParser;
import org.apache.pig.tools.pigstats.ScriptState;
import org.apache.spark.package$;
import org.junit.Assert;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
public class Util {
private static BagFactory mBagFactory = BagFactory.getInstance();
private static TupleFactory mTupleFactory = TupleFactory.getInstance();
// Commonly-checked system state
// =================
public static final boolean WINDOWS /* borrowed from Path.WINDOWS, Shell.WINDOWS */
= System.getProperty("os.name").startsWith("Windows");
public static final String TEST_DIR = System.getProperty("test.build.dir", "build/test");
// Helper Functions
// =================
static public Tuple loadFlatTuple(Tuple t, int[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, new Integer(input[i]));
}
return t;
}
static public Tuple loadTuple(Tuple t, String[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, input[i]);
}
return t;
}
static public Tuple loadTuple(Tuple t, DataByteArray[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, input[i]);
}
return t;
}
static public Tuple loadNestTuple(Tuple t, int[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, input[i]);
bag.add(f);
}
t.set(0, bag);
return t;
}
static public Tuple loadNestTuple(Tuple t, long[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, new Long(input[i]));
bag.add(f);
}
t.set(0, bag);
return t;
}
// this one should handle String, DataByteArray, Long, Integer etc..
static public <T> Tuple loadNestTuple(Tuple t, T[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, input[i]);
bag.add(f);
}
t.set(0, bag);
return t;
}
/**
* Create an array of tuple bags with specified size created by splitting
* the input array of primitive types
*
* @param input Array of primitive types
* @param bagSize The number of tuples to be split and copied into each bag
*
* @return an array of tuple bags with each bag containing bagSize tuples split from the input
*/
static public <T> Tuple[] splitCreateBagOfTuples(T[] input, int bagSize)
throws ExecException {
List<Tuple> result = new ArrayList<Tuple>();
for (int from = 0; from < input.length; from += bagSize) {
Tuple t = TupleFactory.getInstance().newTuple(1);
int to = from + bagSize < input.length ? from + bagSize
: input.length;
T[] array = Arrays.copyOfRange(input, from, to);
result.add(loadNestTuple(t, array));
}
return result.toArray(new Tuple[0]);
}
static public <T>void addToTuple(Tuple t, T[] b)
{
for(int i = 0; i < b.length; i++)
t.append(b[i]);
}
static public Tuple buildTuple(Object... args) throws ExecException {
return TupleFactory.getInstance().newTupleNoCopy(Lists.newArrayList(args));
}
static public Tuple buildBinTuple(final Object... args) throws IOException {
return TupleFactory.getInstance().newTuple(Lists.transform(
Lists.newArrayList(args), new Function<Object, DataByteArray>() {
@Override
public DataByteArray apply(Object o) {
if (o == null) {
return null;
}
try {
return new DataByteArray(DataType.toBytes(o));
} catch (ExecException e) {
return null;
}
}
}));
}
static public <T>Tuple createTuple(T[] s)
{
Tuple t = mTupleFactory.newTuple();
addToTuple(t, s);
return t;
}
static public DataBag createBag(Tuple[] t)
{
DataBag b = mBagFactory.newDefaultBag();
for(int i = 0; i < t.length; i++)b.add(t[i]);
return b;
}
static public<T> DataBag createBagOfOneColumn(T[] input) throws ExecException {
DataBag result = mBagFactory.newDefaultBag();
for (int i = 0; i < input.length; i++) {
Tuple t = mTupleFactory.newTuple(1);
t.set(0, input[i]);
result.add(t);
}
return result;
}
static public Map<String, Object> createMap(String[] contents)
{
Map<String, Object> m = new HashMap<String, Object>();
for(int i = 0; i < contents.length; ) {
m.put(contents[i], contents[i+1]);
i += 2;
}
return m;
}
static public<T> DataByteArray[] toDataByteArrays(T[] input) {
DataByteArray[] dbas = new DataByteArray[input.length];
for (int i = 0; i < input.length; i++) {
dbas[i] = (input[i] == null)?null:new DataByteArray(input[i].toString().getBytes());
}
return dbas;
}
static public Tuple loadNestTuple(Tuple t, int[][] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
DataBag bag = BagFactory.getInstance().newDefaultBag();
Tuple f = loadFlatTuple(TupleFactory.getInstance().newTuple(input[i].length), input[i]);
bag.add(f);
t.set(i, bag);
}
return t;
}
static public Tuple loadTuple(Tuple t, String[][] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
DataBag bag = BagFactory.getInstance().newDefaultBag();
Tuple f = loadTuple(TupleFactory.getInstance().newTuple(input[i].length), input[i]);
bag.add(f);
t.set(i, bag);
}
return t;
}
/**
* Helper to remove colons (if any exist) from paths to sanitize them for
* consumption by hdfs.
*
* @param origPath original path name
* @return String sanitized path with anything prior to : removed
* @throws IOException
*/
static public String removeColon(String origPath)
{
return origPath.replaceAll(":", "");
}
/**
* Helper to convert \r\n to \n for cross-platform string
* matching with checked-in baselines.
*
* @param origPath original string
* @return String newline-standardized string
* @throws IOException
*/
static public String standardizeNewline(String origPath)
{
return origPath.replaceAll("\r\n", "\n");
}
/**
* Helper to create a temporary file with given input data for use in test cases.
*
* @param tmpFilenamePrefix file-name prefix
* @param tmpFilenameSuffix file-name suffix
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @return {@link File} handle to the created temporary file
* @throws IOException
*/
static public File createInputFile(String tmpFilenamePrefix,
String tmpFilenameSuffix,
String[] inputData)
throws IOException {
File f = File.createTempFile(tmpFilenamePrefix, tmpFilenameSuffix);
f.deleteOnExit();
writeToFile(f, inputData);
return f;
}
static public File createLocalInputFile(String filename, String[] inputData)
throws IOException {
File f = new File(filename);
f.deleteOnExit();
writeToFile(f, inputData);
return f;
}
public static void writeToFile(File f, String[] inputData) throws
IOException {
PrintWriter pw = new PrintWriter(new OutputStreamWriter(new
FileOutputStream(f), "UTF-8"));
for (int i=0; i<inputData.length; i++){
pw.print(inputData[i]);
pw.print("\n");
}
pw.close();
}
/**
* Helper to create a dfs file on the Minicluster DFS with given
* input data for use in test cases.
*
* @param miniCluster reference to the Minicluster where the file should be created
* @param fileName pathname of the file to be created
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @throws IOException
*/
static public void createInputFile(MiniGenericCluster miniCluster, String fileName,
String[] inputData)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
createInputFile(fs, fileName, inputData);
}
static public void createInputFile(FileSystem fs, String fileName,
String[] inputData) throws IOException {
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
if(fs.exists(new Path(fileName))) {
throw new IOException("File " + fileName + " already exists on the FileSystem");
}
FSDataOutputStream stream = fs.create(new Path(fileName));
PrintWriter pw = new PrintWriter(new OutputStreamWriter(stream, "UTF-8"));
for (int i=0; i<inputData.length; i++){
pw.print(inputData[i]);
pw.print("\n");
}
pw.close();
}
static public String[] readOutput(FileSystem fs, String fileName) throws IOException {
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
Path path = new Path(fileName);
if(!fs.exists(path)) {
throw new IOException("Path " + fileName + " does not exist on the FileSystem");
}
FileStatus fileStatus = fs.getFileStatus(path);
FileStatus[] files;
if (fileStatus.isDirectory()) {
files = fs.listStatus(path, new PathFilter() {
@Override
public boolean accept(Path p) {
return !p.getName().startsWith("_");
}
});
} else {
files = new FileStatus[] { fileStatus };
}
List<String> result = new ArrayList<String>();
for (FileStatus f : files) {
FSDataInputStream stream = fs.open(f.getPath());
BufferedReader br = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String line;
while ((line = br.readLine()) != null) {
result.add(line);
}
br.close();
}
return result.toArray(new String[result.size()]);
}
/**
* Helper to create a dfs file on the MiniCluster dfs. This returns an
* outputstream that can be used in test cases to write data.
*
* @param cluster
* reference to the MiniCluster where the file should be created
* @param fileName
* pathname of the file to be created
* @return OutputStream to write any data to the file created on the
* MiniCluster.
* @throws IOException
*/
static public OutputStream createInputFile(MiniGenericCluster cluster,
String fileName) throws IOException {
FileSystem fs = cluster.getFileSystem();
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
if (fs.exists(new Path(fileName))) {
throw new IOException("File " + fileName
+ " already exists on the minicluster");
}
return fs.create(new Path(fileName));
}
/**
* Helper to create an empty temp file on local file system
* which will be deleted on exit
* @param prefix
* @param suffix
* @return File denoting a newly-created empty file
* @throws IOException
*/
static public File createTempFileDelOnExit(String prefix, String suffix)
throws IOException {
File tmpFile = File.createTempFile(prefix, suffix);
tmpFile.deleteOnExit();
return tmpFile;
}
/**
* Helper to remove a dfs file from the minicluster DFS
*
* @param miniCluster reference to the Minicluster where the file should be deleted
* @param fileName pathname of the file to be deleted
* @throws IOException
*/
static public void deleteFile(MiniGenericCluster miniCluster, String fileName)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
fs.delete(new Path(fileName), true);
}
/**
* Deletes a dfs file from the MiniCluster DFS quietly
*
* @param miniCluster the MiniCluster where the file should be deleted
* @param fileName the path of the file to be deleted
*/
public static void deleteQuietly(MiniGenericCluster miniCluster, String fileName) {
try {
deleteFile(miniCluster, fileName);
} catch (IOException ignored) {
}
}
static public void deleteFile(PigContext pigContext, String fileName)
throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
FileSystem fs = FileSystem.get(conf);
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
fs.delete(new Path(fileName), true);
}
static public boolean exists(PigContext pigContext, String fileName)
throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
FileSystem fs = FileSystem.get(conf);
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
return fs.exists(new Path(fileName));
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results Array to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
Tuple[] expectedResults) {
checkQueryOutputs(actualResults, Arrays.asList(expectedResults));
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results List to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
List<Tuple> expectedResults) {
checkQueryOutputs(actualResults, expectedResults.iterator(), null );
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results List to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
Iterator<Tuple> expectedResults, Integer expectedRows) {
int count = 0;
while (expectedResults.hasNext()) {
Tuple expected = expectedResults.next();
Assert.assertTrue("Actual result has less records than expected results", actualResults.hasNext());
Tuple actual = actualResults.next();
// If this tuple contains any bags, bags will be sorted before comparisons
if( !expected.equals(actual) ) {
// Using string comparisons since error message is more readable
// (only showing the part which differs)
Assert.assertEquals(expected.toString(), actual.toString());
// if above goes through, simply failing with object comparisons
Assert.assertEquals(expected, actual);
}
count++;
}
Assert.assertFalse("Actual result has more records than expected results", actualResults.hasNext());
if (expectedRows != null) {
Assert.assertEquals((int)expectedRows, count);
}
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results. It sorts actual and expected results before comparison
*
* @param actualResultsIt Result of the executed Pig query
* @param expectedResList Expected results to validate against
*/
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
List<Tuple> expectedResList) {
List<Tuple> actualResList = new ArrayList<Tuple>();
while(actualResultsIt.hasNext()){
actualResList.add(actualResultsIt.next());
}
checkQueryOutputsAfterSort(actualResList, expectedResList);
}
/**
* Helper function to check if the result of Pig Query is in line with expected results.
* It sorts actual and expected results before comparison.
* The tuple size in the tuple list can vary. Pass by a two-dimension array, it will be converted to be a tuple list.
* e.g. expectedTwoDimensionObjects is [{{10, "will_join", 10, "will_join"}, {11, "will_not_join", null}, {null, 12, "will_not_join"}}],
* the field size of these 3 tuples are [4,3,3]
*
* @param actualResultsIt
* @param expectedTwoDimensionObjects represents a tuple list, in which the tuple can have variable size.
*/
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
Object[][] expectedTwoDimensionObjects) {
List<Tuple> expectedResTupleList = new ArrayList<Tuple>();
for (int i = 0; i < expectedTwoDimensionObjects.length; ++i) {
Tuple t = TupleFactory.getInstance().newTuple();
for (int j = 0; j < expectedTwoDimensionObjects[i].length; ++j) {
t.append(expectedTwoDimensionObjects[i][j]);
}
expectedResTupleList.add(t);
}
checkQueryOutputsAfterSort(actualResultsIt, expectedResTupleList);
}
static public void checkQueryOutputsAfterSort(
List<Tuple> actualResList, List<Tuple> expectedResList) {
Collections.sort(actualResList);
Collections.sort(expectedResList);
checkQueryOutputs(actualResList.iterator(), expectedResList);
}
/**
* Check if subStr is a subString of str . calls org.junit.Assert.fail if it is not
* @param str
* @param subStr
*/
static public void checkStrContainsSubStr(String str, String subStr){
if(!str.contains(subStr)){
fail("String '"+ subStr + "' is not a substring of '" + str + "'");
}
}
/**
* Check if query plan for alias argument produces exception with expected
* error message in expectedErr argument.
* @param query
* @param alias
* @param expectedErr
* @throws IOException
*/
static public void checkExceptionMessage(String query, String alias, String expectedErr)
throws IOException {
PigServer pig = new PigServer(ExecType.LOCAL);
boolean foundEx = false;
try{
Util.registerMultiLineQuery(pig, query);
pig.explain(alias, System.out);
}catch(FrontendException e){
foundEx = true;
checkMessageInException(e, expectedErr);
}
if(!foundEx)
fail("No exception thrown. Exception is expected.");
}
public static void checkMessageInException(FrontendException e,
String expectedErr) {
PigException pigEx = LogUtils.getPigException(e);
String message = pigEx.getMessage();
checkErrorMessageContainsExpected(message, expectedErr);
}
public static void checkErrorMessageContainsExpected(String message, String expectedMessage){
if(!message.contains(expectedMessage)){
String msg = "Expected error message containing '"
+ expectedMessage + "' but got '" + message + "'" ;
fail(msg);
}
}
static private String getFSMkDirCommand(String fileName) {
Path parentDir = new Path(fileName).getParent();
String mkdirCommand = parentDir.getName().isEmpty() ? "" : "fs -mkdir -p " + parentDir + "\n";
return mkdirCommand;
}
/**
* Utility method to copy a file form local filesystem to the dfs on
* the minicluster for testing in mapreduce mode
* @param cluster a reference to the minicluster
* @param localFileName the pathname of local file
* @param fileNameOnCluster the name with which the file should be created on the minicluster
* @throws IOException
*/
static public void copyFromLocalToCluster(MiniGenericCluster cluster,
String localFileName, String fileNameOnCluster) throws IOException {
if(Util.WINDOWS){
if (!localFileName.contains(":")) {
localFileName = localFileName.replace('\\','/');
} else {
localFileName = localFileName.replace('/','\\');
}
fileNameOnCluster = fileNameOnCluster.replace('\\','/');
}
PigServer ps = new PigServer(cluster.getExecType(), cluster.getProperties());
String script = getFSMkDirCommand(fileNameOnCluster) + "fs -put " + localFileName + " " + fileNameOnCluster;
GruntParser parser = new GruntParser(new StringReader(script), ps);
parser.setInteractive(false);
try {
parser.parseStopOnError();
} catch (org.apache.pig.tools.pigscript.parser.ParseException e) {
throw new IOException(e);
}
}
static public void copyFromLocalToLocal(String fromLocalFileName,
String toLocalFileName) throws IOException {
FileUtils.copyFile(new File(fromLocalFileName), new File(toLocalFileName));
}
static public void copyFromClusterToLocal(MiniGenericCluster cluster,
String fileNameOnCluster, String localFileName) throws IOException {
if(Util.WINDOWS){
fileNameOnCluster = fileNameOnCluster.replace('\\','/');
localFileName = localFileName.replace('\\','/');
}
File parent = new File(localFileName).getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
PrintWriter writer = new PrintWriter(new FileWriter(localFileName));
FileSystem fs = FileSystem.get(ConfigurationUtil.toConfiguration(
cluster.getProperties()));
if(!fs.exists(new Path(fileNameOnCluster))) {
throw new IOException("File " + fileNameOnCluster + " does not exists on the minicluster");
}
String line = null;
FileStatus fst = fs.getFileStatus(new Path(fileNameOnCluster));
if(fst.isDirectory()) {
throw new IOException("Only files from cluster can be copied locally," +
" " + fileNameOnCluster + " is a directory");
}
FSDataInputStream stream = fs.open(new Path(fileNameOnCluster));
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
while( (line = reader.readLine()) != null) {
writer.println(line);
}
reader.close();
writer.close();
}
static public void printQueryOutput(Iterator<Tuple> actualResults,
Tuple[] expectedResults) {
System.out.println("Expected :") ;
for (Tuple expected : expectedResults) {
System.out.println(expected.toString()) ;
}
System.out.println("---End----") ;
System.out.println("Actual :") ;
while (actualResults.hasNext()) {
System.out.println(actualResults.next().toString()) ;
}
System.out.println("---End----") ;
}
/**
* Helper method to replace all occurrences of "\" with "\\" in a
* string. This is useful to fix the file path string on Windows
* where "\" is used as the path separator.
*
* @param str Any string
* @return The resulting string
*/
public static String encodeEscape(String str) {
String regex = "\\\\";
String replacement = quoteReplacement("\\\\");
return str.replaceAll(regex, replacement);
}
public static String generateURI(String filename, PigContext context)
throws IOException {
if(Util.WINDOWS){
filename = filename.replace('\\','/');
}
if (context.getExecType() == ExecType.MAPREDUCE || context.getExecType().name().equals("TEZ") ||
context.getExecType().name().equals("SPARK")) {
return FileLocalizer.hadoopify(filename, context);
} else if (context.getExecType().isLocal()) {
return filename;
} else {
throw new IllegalStateException("ExecType: " + context.getExecType());
}
}
public static Object getPigConstant(String pigConstantAsString) throws ParserException {
QueryParserDriver queryParser = new QueryParserDriver( new PigContext(),
"util", new HashMap<String, String>() ) ;
return queryParser.parseConstant(pigConstantAsString);
}
/**
* Parse list of strings in to list of tuples, convert quoted strings into
* @param tupleConstants
* @return
* @throws ParserException
*/
public static List<Tuple> getTuplesFromConstantTupleStrings(String[] tupleConstants) throws ParserException {
List<Tuple> result = new ArrayList<Tuple>(tupleConstants.length);
for(int i = 0; i < tupleConstants.length; i++) {
result.add((Tuple) getPigConstant(tupleConstants[i]));
}
return result;
}
/**
* Parse list of strings in to list of tuples, convert quoted strings into
* DataByteArray
* @param tupleConstants
* @return
* @throws ParserException
* @throws ExecException
*/
public static List<Tuple> getTuplesFromConstantTupleStringAsByteArray(String[] tupleConstants)
throws ParserException, ExecException {
List<Tuple> tuples = getTuplesFromConstantTupleStrings(tupleConstants);
for(Tuple t : tuples){
convertStringToDataByteArray(t);
}
return tuples;
}
/**
* Convert String objects in argument t to DataByteArray objects
* @param t
* @throws ExecException
*/
private static void convertStringToDataByteArray(Tuple t) throws ExecException {
if(t == null)
return;
for(int i=0; i<t.size(); i++){
Object col = t.get(i);
if(col == null)
continue;
if(col instanceof String){
DataByteArray dba = (col == null) ?
null : new DataByteArray((String)col);
t.set(i, dba);
}else if(col instanceof Tuple){
convertStringToDataByteArray((Tuple)col);
}else if(col instanceof DataBag){
Iterator<Tuple> it = ((DataBag)col).iterator();
while(it.hasNext()){
convertStringToDataByteArray(it.next());
}
}
}
}
public static File createFile(String[] data) throws Exception{
return createFile(null,data);
}
public static File createFile(String filePath, String[] data) throws Exception {
File f;
if( null == filePath || filePath.isEmpty() ) {
f = File.createTempFile("tmp", "");
} else {
f = new File(filePath);
}
if (f.getParent() != null && !(new File(f.getParent())).exists()) {
(new File(f.getParent())).mkdirs();
}
f.deleteOnExit();
PrintWriter pw = new PrintWriter(f);
for (int i=0; i<data.length; i++){
pw.println(data[i]);
}
pw.close();
return f;
}
/**
* Run default set of optimizer rules on new logical plan
* @param lp
* @return optimized logical plan
* @throws FrontendException
*/
public static LogicalPlan optimizeNewLP(
LogicalPlan lp)
throws FrontendException{
DanglingNestedNodeRemover DanglingNestedNodeRemover = new DanglingNestedNodeRemover( lp );
DanglingNestedNodeRemover.visit();
UidResetter uidResetter = new UidResetter( lp );
uidResetter.visit();
SchemaResetter schemaResetter =
new SchemaResetter( lp, true /*disable duplicate uid check*/ );
schemaResetter.visit();
StoreAliasSetter storeAliasSetter = new StoreAliasSetter( lp );
storeAliasSetter.visit();
// run optimizer
org.apache.pig.newplan.logical.optimizer.LogicalPlanOptimizer optimizer =
new org.apache.pig.newplan.logical.optimizer.LogicalPlanOptimizer(lp, 100, null);
optimizer.optimize();
SortInfoSetter sortInfoSetter = new SortInfoSetter( lp );
sortInfoSetter.visit();
return lp;
}
/**
* migrate old LP(logical plan) to new LP, optimize it, and build physical
* plan
* @param lp
* @param pc PigContext
* @return physical plan
* @throws Exception
*/
public static PhysicalPlan buildPhysicalPlanFromNewLP(
LogicalPlan lp, PigContext pc)
throws Exception {
LogToPhyTranslationVisitor visitor = new LogToPhyTranslationVisitor(lp);
visitor.setPigContext(pc);
visitor.visit();
return visitor.getPhysicalPlan();
}
public static MROperPlan buildMRPlan(PhysicalPlan pp, PigContext pc) throws Exception{
MRCompiler comp = new MRCompiler(pp, pc);
comp.compile();
comp.aggregateScalarsFiles();
comp.connectSoftLink();
return comp.getMRPlan();
}
public static MROperPlan buildMRPlanWithOptimizer(PhysicalPlan pp, PigContext pc) throws Exception {
MapRedUtil.checkLeafIsStore(pp, pc);
MapReduceLauncher launcher = new MapReduceLauncher();
return launcher.compile(pp,pc);
}
public static MROperPlan buildMRPlan(String query, PigContext pc) throws Exception {
LogicalPlan lp = Util.parse(query, pc);
Util.optimizeNewLP(lp);
PhysicalPlan pp = Util.buildPhysicalPlanFromNewLP(lp, pc);
MROperPlan mrp = Util.buildMRPlanWithOptimizer(pp, pc);
return mrp;
}
public static void registerMultiLineQuery(PigServer pigServer, String query) throws IOException {
File f = File.createTempFile("tmp", "");
PrintWriter pw = new PrintWriter(f);
pw.println(query);
pw.close();
pigServer.registerScript(f.getCanonicalPath());
}
public static int executeJavaCommand(String cmd) throws Exception {
return executeJavaCommandAndReturnInfo(cmd).exitCode;
}
public static class ReadStream implements Runnable {
InputStream is;
Thread thread;
String message = "";
public ReadStream(InputStream is) {
this.is = is;
}
public void start () {
thread = new Thread (this);
thread.start ();
}
@Override
public void run () {
try {
InputStreamReader isr = new InputStreamReader (is);
BufferedReader br = new BufferedReader (isr);
while (true) {
String s = br.readLine ();
if (s == null) break;
if (!message.isEmpty()) {
message += "\n";
}
message += s;
}
is.close ();
} catch (Exception ex) {
ex.printStackTrace ();
}
}
public String getMessage() {
return message;
}
}
public static ProcessReturnInfo executeJavaCommandAndReturnInfo(String cmd)
throws Exception {
String javaHome = System.getenv("JAVA_HOME");
if(javaHome != null) {
String fileSeparator = System.getProperty("file.separator");
cmd = javaHome + fileSeparator + "bin" + fileSeparator + cmd;
}
Process cmdProc = Runtime.getRuntime().exec(cmd);
ProcessReturnInfo pri = new ProcessReturnInfo();
ReadStream stdoutStream = new ReadStream(cmdProc.getInputStream ());
ReadStream stderrStream = new ReadStream(cmdProc.getErrorStream ());
stdoutStream.start();
stderrStream.start();
cmdProc.waitFor();
pri.exitCode = cmdProc.exitValue();
pri.stdoutContents = stdoutStream.getMessage();
pri.stderrContents = stderrStream.getMessage();
return pri;
}
public static class ProcessReturnInfo {
public int exitCode;
public String stderrContents;
public String stdoutContents;
@Override
public String toString() {
return "[Exit code: " + exitCode + ", stdout: <" + stdoutContents + ">, " +
"stderr: <" + stderrContents + ">";
}
}
static public boolean deleteDirectory(File path) {
if(path.exists()) {
File[] files = path.listFiles();
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
deleteDirectory(files[i]);
}
else {
files[i].delete();
}
}
}
return(path.delete());
}
/**
* @param pigContext
* @param fileName
* @param input
* @throws IOException
*/
public static void createInputFile(PigContext pigContext,
String fileName, String[] input) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
createInputFile(FileSystem.get(conf), fileName, input);
}
public static String[] readOutput(PigContext pigContext,
String fileName) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
return readOutput(FileSystem.get(conf), fileName);
}
public static void printPlan(LogicalPlan logicalPlan ) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(out);
LogicalPlanPrinter pp = new LogicalPlanPrinter(logicalPlan,ps);
pp.visit();
System.err.println(out.toString());
}
public static void printPlan(PhysicalPlan physicalPlan) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(out);
physicalPlan.explain(ps, "text", true);
System.err.println(out.toString());
}
public static List<Tuple> readFile2TupleList(String file, String delimiter) throws IOException{
List<Tuple> tuples=new ArrayList<Tuple>();
String line=null;
BufferedReader reader=new BufferedReader(new InputStreamReader(new FileInputStream(file)));
while((line=reader.readLine())!=null){
String[] tokens=line.split(delimiter);
Tuple tuple=TupleFactory.getInstance().newTuple(Arrays.asList(tokens));
tuples.add(tuple);
}
reader.close();
return tuples;
}
/**
* Delete the existing logFile for the class and set the logging to a
* use a new log file and set log level to DEBUG
* @param clazz class for which the log file is being set
* @param logFile current log file
* @return new log file
* @throws Exception
*/
public static File resetLog(Class<?> clazz, File logFile) throws Exception {
if (logFile != null)
logFile.delete();
Logger logger = Logger.getLogger(clazz);
logger.removeAllAppenders();
logger.setLevel(Level.DEBUG);
SimpleLayout layout = new SimpleLayout();
File newLogFile = File.createTempFile("log", "");
FileAppender appender = new FileAppender(layout, newLogFile.toString(),
false, false, 0);
logger.addAppender(appender);
return newLogFile;
}
/**
* Check if logFile (does not/)contains the given list of messages.
* @param logFile
* @param messages
* @param expected if true, the messages are expected in the logFile,
* otherwise messages should not be there in the log
*/
public static void checkLogFileMessage(File logFile, String[] messages, boolean expected) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(logFile));
String logMessage = "";
String line;
while ((line = reader.readLine()) != null) {
logMessage = logMessage + line + "\n";
}
reader.close();
for (int i = 0; i < messages.length; i++) {
boolean present = logMessage.contains(messages[i]);
if (expected) {
if(!present){
fail("The message " + messages[i] + " is not present in" +
"log file contents: " + logMessage);
}
}else{
if(present){
fail("The message " + messages[i] + " is present in" +
"log file contents: " + logMessage);
}
}
}
return ;
}
catch (IOException e) {
fail("caught exception while checking log message :" + e);
}
}
public static LogicalPlan buildLp(PigServer pigServer, String query)
throws Exception {
pigServer.setBatchOn();
pigServer.registerQuery( query );
java.lang.reflect.Method buildLp = pigServer.getClass().getDeclaredMethod("buildLp");
buildLp.setAccessible(true);
return (LogicalPlan ) buildLp.invoke( pigServer );
}
public static PhysicalPlan buildPp(PigServer pigServer, String query)
throws Exception {
LogicalPlan lp = buildLp( pigServer, query );
lp.optimize(pigServer.getPigContext());
return ((HExecutionEngine)pigServer.getPigContext().getExecutionEngine()).compile(lp,
pigServer.getPigContext().getProperties());
}
public static LogicalPlan parse(String query, PigContext pc) throws FrontendException {
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver parserDriver = new QueryParserDriver( pc, "test", fileNameMap );
org.apache.pig.newplan.logical.relational.LogicalPlan lp = parserDriver.parse( query );
lp.validate(pc, "test", false);
return lp;
}
public static LogicalPlan parseAndPreprocess(String query, PigContext pc) throws FrontendException {
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver parserDriver = new QueryParserDriver( pc, "test", fileNameMap );
org.apache.pig.newplan.logical.relational.LogicalPlan lp = parserDriver.parse( query );
lp.validate(pc, "test", false);
return lp;
}
/**
* Replaces any alias in given schema that has name that starts with
* "NullAlias" with null . it does a case insensitive comparison of
* the alias name
* @param sch
*/
public static void schemaReplaceNullAlias(Schema sch){
if(sch == null)
return ;
for(FieldSchema fs : sch.getFields()){
if(fs.alias != null && fs.alias.toLowerCase().startsWith("nullalias")){
fs.alias = null;
}
schemaReplaceNullAlias(fs.schema);
}
}
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
Tuple[] expectedResArray) {
List<Tuple> list = new ArrayList<Tuple>();
Collections.addAll(list, expectedResArray);
checkQueryOutputsAfterSort(actualResultsIt, list);
}
static public void convertBagToSortedBag(Tuple t) {
for (int i=0;i<t.size();i++) {
Object obj = null;
try {
obj = t.get(i);
} catch (ExecException e) {
// shall not happen
}
if (obj instanceof DataBag) {
DataBag bag = (DataBag)obj;
Iterator<Tuple> iter = bag.iterator();
DataBag sortedBag = DefaultBagFactory.getInstance().newSortedBag(null);
while (iter.hasNext()) {
Tuple t2 = iter.next();
sortedBag.add(t2);
convertBagToSortedBag(t2);
}
try {
t.set(i, sortedBag);
} catch (ExecException e) {
// shall not happen
}
}
}
}
static public void checkQueryOutputsAfterSortRecursive(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, String schemaString) throws IOException {
LogicalSchema resultSchema = org.apache.pig.impl.util.Utils.parseSchema(schemaString);
checkQueryOutputsAfterSortRecursive(actualResultsIt, expectedResArray, resultSchema);
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results. It sorts actual and expected string results before comparison
*
* @param actualResultsIt Result of the executed Pig query
* @param expectedResArray Expected string results to validate against
* @param schema fieldSchema of expecteResArray
* @throws IOException
*/
static public void checkQueryOutputsAfterSortRecursive(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema) throws IOException {
LogicalFieldSchema fs = new LogicalFieldSchema("tuple", schema, DataType.TUPLE);
ResourceFieldSchema rfs = new ResourceFieldSchema(fs);
LoadCaster caster = new Utf8StorageConverter();
List<Tuple> actualResList = new ArrayList<Tuple>();
while(actualResultsIt.hasNext()){
actualResList.add(actualResultsIt.next());
}
List<Tuple> expectedResList = new ArrayList<Tuple>();
for (String str : expectedResArray) {
Tuple newTuple = caster.bytesToTuple(str.getBytes(), rfs);
expectedResList.add(newTuple);
}
for (Tuple t : actualResList) {
convertBagToSortedBag(t);
}
for (Tuple t : expectedResList) {
convertBagToSortedBag(t);
}
Collections.sort(actualResList);
Collections.sort(expectedResList);
Assert.assertEquals("Comparing actual and expected results. ",
expectedResList, actualResList);
}
public static String readFile(File file) throws IOException {
BufferedReader reader = new BufferedReader(new FileReader(file));
String result = "";
String line;
while ((line=reader.readLine())!=null) {
result += line;
result += "\n";
}
reader.close();
return result;
}
/**
* this removes the signature from the serialized plan changing the way the
* unique signature is generated should not break this test
* @param plan the plan to canonicalize
* @return the cleaned up plan
*/
public static String removeSignature(String plan) {
return plan.replaceAll("','','[^']*','scope','true'\\)\\)", "','','','scope','true'))");
}
public static boolean isHadoop203plus() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b0\\.20\\.2\\b"))
return false;
return true;
}
public static boolean isHadoop205() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b0\\.20\\.205\\..+"))
return true;
return false;
}
public static boolean isHadoop1_x() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b1\\.*\\..+"))
return true;
return false;
}
public static boolean isSpark2_2_plus() throws IOException {
String sparkVersion = package$.MODULE$.SPARK_VERSION();
return sparkVersion != null && sparkVersion.matches("2\\.([\\d&&[^01]]|[\\d]{2,})\\..*");
}
public static void sortQueryOutputsIfNeed(List<Tuple> actualResList, boolean toSort){
if( toSort == true) {
for (Tuple t : actualResList) {
Util.convertBagToSortedBag(t);
}
Collections.sort(actualResList);
}
}
public static void checkQueryOutputs(Iterator<Tuple> actualResults, List<Tuple> expectedResults, boolean checkAfterSort) {
if (checkAfterSort) {
checkQueryOutputsAfterSort(actualResults, expectedResults);
} else {
checkQueryOutputs(actualResults, expectedResults);
}
}
static public void checkQueryOutputs(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema, boolean
checkAfterSort) throws IOException {
if (checkAfterSort) {
checkQueryOutputsAfterSortRecursive(actualResultsIt,
expectedResArray, schema);
} else {
checkQueryOutputs(actualResultsIt,
expectedResArray, schema);
}
}
static void checkQueryOutputs(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema) throws IOException {
LogicalFieldSchema fs = new LogicalFieldSchema("tuple", schema, DataType.TUPLE);
ResourceFieldSchema rfs = new ResourceFieldSchema(fs);
LoadCaster caster = new Utf8StorageConverter();
List<Tuple> actualResList = new ArrayList<Tuple>();
while (actualResultsIt.hasNext()) {
actualResList.add(actualResultsIt.next());
}
List<Tuple> expectedResList = new ArrayList<Tuple>();
for (String str : expectedResArray) {
Tuple newTuple = caster.bytesToTuple(str.getBytes(), rfs);
expectedResList.add(newTuple);
}
for (Tuple t : actualResList) {
convertBagToSortedBag(t);
}
for (Tuple t : expectedResList) {
convertBagToSortedBag(t);
}
Assert.assertEquals("Comparing actual and expected results. ",
expectedResList, actualResList);
}
public static void assertParallelValues(long defaultParallel,
long requestedParallel,
long estimatedParallel,
long runtimeParallel,
Configuration conf) {
assertConfLong(conf, "pig.info.reducers.default.parallel", defaultParallel);
assertConfLong(conf, "pig.info.reducers.requested.parallel", requestedParallel);
assertConfLong(conf, "pig.info.reducers.estimated.parallel", estimatedParallel);
assertConfLong(conf, MRConfiguration.REDUCE_TASKS, runtimeParallel);
}
public static void assertConfLong(Configuration conf, String param, long expected) {
assertEquals("Unexpected value found in configs for " + param, expected, conf.getLong(param, -1));
}
/**
* Returns a PathFilter that filters out filenames that start with _.
* @return PathFilter
*/
public static PathFilter getSuccessMarkerPathFilter() {
return new PathFilter() {
@Override
public boolean accept(Path p) {
return !p.getName().startsWith("_");
}
};
}
/**
*
* @param expected
* Exception class that is expected to be thrown
* @param found
* Exception that occurred in the test
* @param message
* expected String to verify against
*/
public static void assertExceptionAndMessage(Class<?> expected,
Exception found, String message) {
assertEquals(expected, found.getClass());
assertEquals(found.getMessage(), message);
}
/**
* Called to reset ThreadLocal or static states that PigServer depends on
* when a test suite has testcases switching between LOCAL and MAPREDUCE/TEZ
* execution modes
*/
public static void resetStateForExecModeSwitch() {
FileLocalizer.setInitialized(false);
// For tez testing, we want to avoid TezResourceManager/LocalResource reuse
// (when switching between local and mapreduce/tez)
TezResourceManager.dropInstance();
// TODO: once we have Tez local mode, we can get rid of this. For now,
// if we run this test suite in Tez mode and there are some tests
// in LOCAL mode, we need to set ScriptState to
// null to force ScriptState gets initialized every time.
ScriptState.start(null);
}
public static boolean isMapredExecType(ExecType execType) {
return execType == ExecType.MAPREDUCE;
}
public static boolean isTezExecType(ExecType execType) {
if (execType.name().toLowerCase().startsWith("tez")) {
return true;
}
return false;
}
public static boolean isSparkExecType(ExecType execType) {
if (execType.name().toLowerCase().startsWith("spark")) {
return true;
}
return false;
}
public static String findPigJarName() {
final String suffix = System.getProperty("hadoopversion").equals("20") ? "1" : "2";
File baseDir = new File(".");
String[] jarNames = baseDir.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (!name.matches("pig.*h" + suffix + "\\.jar")) {
return false;
}
if (name.contains("all")) {
return false;
}
return true;
}
});
if (jarNames==null || jarNames.length!=1) {
throw new RuntimeException("Cannot find pig.jar");
}
return jarNames[0];
}
public static ExecType getLocalTestMode() throws Exception {
String execType = System.getProperty("test.exec.type");
if (execType != null) {
if (execType.equals("tez")) {
return ExecTypeProvider.fromString("tez_local");
} else if (execType.equals("spark")) {
return ExecTypeProvider.fromString("spark_local");
}
}
return ExecTypeProvider.fromString("local");
}
public static void createLogAppender(String appenderName, Writer writer, Class...clazzes) {
WriterAppender writerAppender = new WriterAppender(new PatternLayout("%d [%t] %-5p %c %x - %m%n"), writer);
writerAppender.setName(appenderName);
for (Class clazz : clazzes) {
Logger logger = Logger.getLogger(clazz);
logger.addAppender(writerAppender);
}
}
public static void removeLogAppender(String appenderName, Class...clazzes) {
for (Class clazz : clazzes) {
Logger logger = Logger.getLogger(clazz);
Appender appender = logger.getAppender(appenderName);
appender.close();
logger.removeAppender(appenderName);
}
}
public static Path getFirstPartFile(Path path) throws Exception {
FileStatus[] parts = FileSystem.get(path.toUri(), new Configuration()).listStatus(path,
new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith("part-");
}
});
return parts[0].getPath();
}
public static File getFirstPartFile(File dir) throws Exception {
File[] parts = dir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("part-");
};
});
return parts[0];
}
@SuppressWarnings("rawtypes")
public static String getTestDirectory(Class testClass) {
return TEST_DIR + Path.SEPARATOR + "testdata" + Path.SEPARATOR +testClass.getSimpleName();
}
}
| apache/pig | test/org/apache/pig/test/Util.java | Java | apache-2.0 | 58,248 |
/*!
* LESS - Leaner CSS v1.7.0
* http://lesscss.org
*
* Copyright (c) 2009-2014, Alexis Sellier <self@cloudhead.net>
* Licensed under the Apache v2 License.
*
*/
/** * @license Apache v2
*/
(function (window, undefined) {//
// Stub out `require` in the browser
//
function require(arg) {
return window.less[arg.split('/')[1]];
};
if (typeof(window.less) === 'undefined' || typeof(window.less.nodeType) !== 'undefined') { window.less = {}; }
less = window.less;
tree = window.less.tree = {};
less.mode = 'browser';
var less, tree;
// Node.js does not have a header file added which defines less
if (less === undefined) {
less = exports;
tree = require('./tree');
less.mode = 'node';
}
//
// less.js - parser
//
// A relatively straight-forward predictive parser.
// There is no tokenization/lexing stage, the input is parsed
// in one sweep.
//
// To make the parser fast enough to run in the browser, several
// optimization had to be made:
//
// - Matching and slicing on a huge input is often cause of slowdowns.
// The solution is to chunkify the input into smaller strings.
// The chunks are stored in the `chunks` var,
// `j` holds the current chunk index, and `currentPos` holds
// the index of the current chunk in relation to `input`.
// This gives us an almost 4x speed-up.
//
// - In many cases, we don't need to match individual tokens;
// for example, if a value doesn't hold any variables, operations
// or dynamic references, the parser can effectively 'skip' it,
// treating it as a literal.
// An example would be '1px solid #000' - which evaluates to itself,
// we don't need to know what the individual components are.
// The drawback, of course is that you don't get the benefits of
// syntax-checking on the CSS. This gives us a 50% speed-up in the parser,
// and a smaller speed-up in the code-gen.
//
//
// Token matching is done with the `$` function, which either takes
// a terminal string or regexp, or a non-terminal function to call.
// It also takes care of moving all the indices forwards.
//
//
less.Parser = function Parser(env) {
var input, // LeSS input string
i, // current index in `input`
j, // current chunk
saveStack = [], // holds state for backtracking
furthest, // furthest index the parser has gone to
chunks, // chunkified input
current, // current chunk
currentPos, // index of current chunk, in `input`
parser,
parsers,
rootFilename = env && env.filename;
// Top parser on an import tree must be sure there is one "env"
// which will then be passed around by reference.
if (!(env instanceof tree.parseEnv)) {
env = new tree.parseEnv(env);
}
var imports = this.imports = {
paths: env.paths || [], // Search paths, when importing
queue: [], // Files which haven't been imported yet
files: env.files, // Holds the imported parse trees
contents: env.contents, // Holds the imported file contents
contentsIgnoredChars: env.contentsIgnoredChars, // lines inserted, not in the original less
mime: env.mime, // MIME type of .less files
error: null, // Error in parsing/evaluating an import
push: function (path, currentFileInfo, importOptions, callback) {
var parserImports = this;
this.queue.push(path);
var fileParsedFunc = function (e, root, fullPath) {
parserImports.queue.splice(parserImports.queue.indexOf(path), 1); // Remove the path from the queue
var importedPreviously = fullPath === rootFilename;
parserImports.files[fullPath] = root; // Store the root
if (e && !parserImports.error) { parserImports.error = e; }
callback(e, root, importedPreviously, fullPath);
};
if (less.Parser.importer) {
less.Parser.importer(path, currentFileInfo, fileParsedFunc, env);
} else {
less.Parser.fileLoader(path, currentFileInfo, function(e, contents, fullPath, newFileInfo) {
if (e) {fileParsedFunc(e); return;}
var newEnv = new tree.parseEnv(env);
newEnv.currentFileInfo = newFileInfo;
newEnv.processImports = false;
newEnv.contents[fullPath] = contents;
if (currentFileInfo.reference || importOptions.reference) {
newFileInfo.reference = true;
}
if (importOptions.inline) {
fileParsedFunc(null, contents, fullPath);
} else {
new(less.Parser)(newEnv).parse(contents, function (e, root) {
fileParsedFunc(e, root, fullPath);
});
}
}, env);
}
}
};
function save() { currentPos = i; saveStack.push( { current: current, i: i, j: j }); }
function restore() { var state = saveStack.pop(); current = state.current; currentPos = i = state.i; j = state.j; }
function forget() { saveStack.pop(); }
function sync() {
if (i > currentPos) {
current = current.slice(i - currentPos);
currentPos = i;
}
}
function isWhitespace(str, pos) {
var code = str.charCodeAt(pos | 0);
return (code <= 32) && (code === 32 || code === 10 || code === 9);
}
//
// Parse from a token, regexp or string, and move forward if match
//
function $(tok) {
var tokType = typeof tok,
match, length;
// Either match a single character in the input,
// or match a regexp in the current chunk (`current`).
//
if (tokType === "string") {
if (input.charAt(i) !== tok) {
return null;
}
skipWhitespace(1);
return tok;
}
// regexp
sync ();
if (! (match = tok.exec(current))) {
return null;
}
length = match[0].length;
// The match is confirmed, add the match length to `i`,
// and consume any extra white-space characters (' ' || '\n')
// which come after that. The reason for this is that LeSS's
// grammar is mostly white-space insensitive.
//
skipWhitespace(length);
if(typeof(match) === 'string') {
return match;
} else {
return match.length === 1 ? match[0] : match;
}
}
// Specialization of $(tok)
function $re(tok) {
if (i > currentPos) {
current = current.slice(i - currentPos);
currentPos = i;
}
var m = tok.exec(current);
if (!m) {
return null;
}
skipWhitespace(m[0].length);
if(typeof m === "string") {
return m;
}
return m.length === 1 ? m[0] : m;
}
var _$re = $re;
// Specialization of $(tok)
function $char(tok) {
if (input.charAt(i) !== tok) {
return null;
}
skipWhitespace(1);
return tok;
}
function skipWhitespace(length) {
var oldi = i, oldj = j,
curr = i - currentPos,
endIndex = i + current.length - curr,
mem = (i += length),
inp = input,
c;
for (; i < endIndex; i++) {
c = inp.charCodeAt(i);
if (c > 32) {
break;
}
if ((c !== 32) && (c !== 10) && (c !== 9) && (c !== 13)) {
break;
}
}
current = current.slice(length + i - mem + curr);
currentPos = i;
if (!current.length && (j < chunks.length - 1)) {
current = chunks[++j];
skipWhitespace(0); // skip space at the beginning of a chunk
return true; // things changed
}
return oldi !== i || oldj !== j;
}
function expect(arg, msg) {
// some older browsers return typeof 'function' for RegExp
var result = (Object.prototype.toString.call(arg) === '[object Function]') ? arg.call(parsers) : $(arg);
if (result) {
return result;
}
error(msg || (typeof(arg) === 'string' ? "expected '" + arg + "' got '" + input.charAt(i) + "'"
: "unexpected token"));
}
// Specialization of expect()
function expectChar(arg, msg) {
if (input.charAt(i) === arg) {
skipWhitespace(1);
return arg;
}
error(msg || "expected '" + arg + "' got '" + input.charAt(i) + "'");
}
function error(msg, type) {
var e = new Error(msg);
e.index = i;
e.type = type || 'Syntax';
throw e;
}
// Same as $(), but don't change the state of the parser,
// just return the match.
function peek(tok) {
if (typeof(tok) === 'string') {
return input.charAt(i) === tok;
} else {
return tok.test(current);
}
}
// Specialization of peek()
function peekChar(tok) {
return input.charAt(i) === tok;
}
function getInput(e, env) {
if (e.filename && env.currentFileInfo.filename && (e.filename !== env.currentFileInfo.filename)) {
return parser.imports.contents[e.filename];
} else {
return input;
}
}
function getLocation(index, inputStream) {
var n = index + 1,
line = null,
column = -1;
while (--n >= 0 && inputStream.charAt(n) !== '\n') {
column++;
}
if (typeof index === 'number') {
line = (inputStream.slice(0, index).match(/\n/g) || "").length;
}
return {
line: line,
column: column
};
}
function getDebugInfo(index, inputStream, env) {
var filename = env.currentFileInfo.filename;
if(less.mode !== 'browser' && less.mode !== 'rhino') {
filename = require('path').resolve(filename);
}
return {
lineNumber: getLocation(index, inputStream).line + 1,
fileName: filename
};
}
function LessError(e, env) {
var input = getInput(e, env),
loc = getLocation(e.index, input),
line = loc.line,
col = loc.column,
callLine = e.call && getLocation(e.call, input).line,
lines = input.split('\n');
this.type = e.type || 'Syntax';
this.message = e.message;
this.filename = e.filename || env.currentFileInfo.filename;
this.index = e.index;
this.line = typeof(line) === 'number' ? line + 1 : null;
this.callLine = callLine + 1;
this.callExtract = lines[callLine];
this.stack = e.stack;
this.column = col;
this.extract = [
lines[line - 1],
lines[line],
lines[line + 1]
];
}
LessError.prototype = new Error();
LessError.prototype.constructor = LessError;
this.env = env = env || {};
// The optimization level dictates the thoroughness of the parser,
// the lower the number, the less nodes it will create in the tree.
// This could matter for debugging, or if you want to access
// the individual nodes in the tree.
this.optimization = ('optimization' in this.env) ? this.env.optimization : 1;
//
// The Parser
//
parser = {
imports: imports,
//
// Parse an input string into an abstract syntax tree,
// @param str A string containing 'less' markup
// @param callback call `callback` when done.
// @param [additionalData] An optional map which can contains vars - a map (key, value) of variables to apply
//
parse: function (str, callback, additionalData) {
var root, line, lines, error = null, globalVars, modifyVars, preText = "";
i = j = currentPos = furthest = 0;
globalVars = (additionalData && additionalData.globalVars) ? less.Parser.serializeVars(additionalData.globalVars) + '\n' : '';
modifyVars = (additionalData && additionalData.modifyVars) ? '\n' + less.Parser.serializeVars(additionalData.modifyVars) : '';
if (globalVars || (additionalData && additionalData.banner)) {
preText = ((additionalData && additionalData.banner) ? additionalData.banner : "") + globalVars;
parser.imports.contentsIgnoredChars[env.currentFileInfo.filename] = preText.length;
}
str = str.replace(/\r\n/g, '\n');
// Remove potential UTF Byte Order Mark
input = str = preText + str.replace(/^\uFEFF/, '') + modifyVars;
parser.imports.contents[env.currentFileInfo.filename] = str;
// Split the input into chunks.
chunks = (function (input) {
var len = input.length, level = 0, parenLevel = 0,
lastOpening, lastOpeningParen, lastMultiComment, lastMultiCommentEndBrace,
chunks = [], emitFrom = 0,
parserCurrentIndex, currentChunkStartIndex, cc, cc2, matched;
function fail(msg, index) {
error = new(LessError)({
index: index || parserCurrentIndex,
type: 'Parse',
message: msg,
filename: env.currentFileInfo.filename
}, env);
}
function emitChunk(force) {
var len = parserCurrentIndex - emitFrom;
if (((len < 512) && !force) || !len) {
return;
}
chunks.push(input.slice(emitFrom, parserCurrentIndex + 1));
emitFrom = parserCurrentIndex + 1;
}
for (parserCurrentIndex = 0; parserCurrentIndex < len; parserCurrentIndex++) {
cc = input.charCodeAt(parserCurrentIndex);
if (((cc >= 97) && (cc <= 122)) || (cc < 34)) {
// a-z or whitespace
continue;
}
switch (cc) {
case 40: // (
parenLevel++;
lastOpeningParen = parserCurrentIndex;
continue;
case 41: // )
if (--parenLevel < 0) {
return fail("missing opening `(`");
}
continue;
case 59: // ;
if (!parenLevel) { emitChunk(); }
continue;
case 123: // {
level++;
lastOpening = parserCurrentIndex;
continue;
case 125: // }
if (--level < 0) {
return fail("missing opening `{`");
}
if (!level && !parenLevel) { emitChunk(); }
continue;
case 92: // \
if (parserCurrentIndex < len - 1) { parserCurrentIndex++; continue; }
return fail("unescaped `\\`");
case 34:
case 39:
case 96: // ", ' and `
matched = 0;
currentChunkStartIndex = parserCurrentIndex;
for (parserCurrentIndex = parserCurrentIndex + 1; parserCurrentIndex < len; parserCurrentIndex++) {
cc2 = input.charCodeAt(parserCurrentIndex);
if (cc2 > 96) { continue; }
if (cc2 == cc) { matched = 1; break; }
if (cc2 == 92) { // \
if (parserCurrentIndex == len - 1) {
return fail("unescaped `\\`");
}
parserCurrentIndex++;
}
}
if (matched) { continue; }
return fail("unmatched `" + String.fromCharCode(cc) + "`", currentChunkStartIndex);
case 47: // /, check for comment
if (parenLevel || (parserCurrentIndex == len - 1)) { continue; }
cc2 = input.charCodeAt(parserCurrentIndex + 1);
if (cc2 == 47) {
// //, find lnfeed
for (parserCurrentIndex = parserCurrentIndex + 2; parserCurrentIndex < len; parserCurrentIndex++) {
cc2 = input.charCodeAt(parserCurrentIndex);
if ((cc2 <= 13) && ((cc2 == 10) || (cc2 == 13))) { break; }
}
} else if (cc2 == 42) {
// /*, find */
lastMultiComment = currentChunkStartIndex = parserCurrentIndex;
for (parserCurrentIndex = parserCurrentIndex + 2; parserCurrentIndex < len - 1; parserCurrentIndex++) {
cc2 = input.charCodeAt(parserCurrentIndex);
if (cc2 == 125) { lastMultiCommentEndBrace = parserCurrentIndex; }
if (cc2 != 42) { continue; }
if (input.charCodeAt(parserCurrentIndex + 1) == 47) { break; }
}
if (parserCurrentIndex == len - 1) {
return fail("missing closing `*/`", currentChunkStartIndex);
}
parserCurrentIndex++;
}
continue;
case 42: // *, check for unmatched */
if ((parserCurrentIndex < len - 1) && (input.charCodeAt(parserCurrentIndex + 1) == 47)) {
return fail("unmatched `/*`");
}
continue;
}
}
if (level !== 0) {
if ((lastMultiComment > lastOpening) && (lastMultiCommentEndBrace > lastMultiComment)) {
return fail("missing closing `}` or `*/`", lastOpening);
} else {
return fail("missing closing `}`", lastOpening);
}
} else if (parenLevel !== 0) {
return fail("missing closing `)`", lastOpeningParen);
}
emitChunk(true);
return chunks;
})(str);
if (error) {
return callback(new(LessError)(error, env));
}
current = chunks[0];
// Start with the primary rule.
// The whole syntax tree is held under a Ruleset node,
// with the `root` property set to true, so no `{}` are
// output. The callback is called when the input is parsed.
try {
root = new(tree.Ruleset)(null, this.parsers.primary());
root.root = true;
root.firstRoot = true;
} catch (e) {
return callback(new(LessError)(e, env));
}
root.toCSS = (function (evaluate) {
return function (options, variables) {
options = options || {};
var evaldRoot,
css,
evalEnv = new tree.evalEnv(options);
//
// Allows setting variables with a hash, so:
//
// `{ color: new(tree.Color)('#f01') }` will become:
//
// new(tree.Rule)('@color',
// new(tree.Value)([
// new(tree.Expression)([
// new(tree.Color)('#f01')
// ])
// ])
// )
//
if (typeof(variables) === 'object' && !Array.isArray(variables)) {
variables = Object.keys(variables).map(function (k) {
var value = variables[k];
if (! (value instanceof tree.Value)) {
if (! (value instanceof tree.Expression)) {
value = new(tree.Expression)([value]);
}
value = new(tree.Value)([value]);
}
return new(tree.Rule)('@' + k, value, false, null, 0);
});
evalEnv.frames = [new(tree.Ruleset)(null, variables)];
}
try {
var preEvalVisitors = [],
visitors = [
new(tree.joinSelectorVisitor)(),
new(tree.processExtendsVisitor)(),
new(tree.toCSSVisitor)({compress: Boolean(options.compress)})
], i, root = this;
if (options.plugins) {
for(i =0; i < options.plugins.length; i++) {
if (options.plugins[i].isPreEvalVisitor) {
preEvalVisitors.push(options.plugins[i]);
} else {
if (options.plugins[i].isPreVisitor) {
visitors.splice(0, 0, options.plugins[i]);
} else {
visitors.push(options.plugins[i]);
}
}
}
}
for(i = 0; i < preEvalVisitors.length; i++) {
preEvalVisitors[i].run(root);
}
evaldRoot = evaluate.call(root, evalEnv);
for(i = 0; i < visitors.length; i++) {
visitors[i].run(evaldRoot);
}
if (options.sourceMap) {
evaldRoot = new tree.sourceMapOutput(
{
contentsIgnoredCharsMap: parser.imports.contentsIgnoredChars,
writeSourceMap: options.writeSourceMap,
rootNode: evaldRoot,
contentsMap: parser.imports.contents,
sourceMapFilename: options.sourceMapFilename,
sourceMapURL: options.sourceMapURL,
outputFilename: options.sourceMapOutputFilename,
sourceMapBasepath: options.sourceMapBasepath,
sourceMapRootpath: options.sourceMapRootpath,
outputSourceFiles: options.outputSourceFiles,
sourceMapGenerator: options.sourceMapGenerator
});
}
css = evaldRoot.toCSS({
compress: Boolean(options.compress),
dumpLineNumbers: env.dumpLineNumbers,
strictUnits: Boolean(options.strictUnits),
numPrecision: 8});
} catch (e) {
throw new(LessError)(e, env);
}
if (options.cleancss && less.mode === 'node') {
var CleanCSS = require('clean-css'),
cleancssOptions = options.cleancssOptions || {};
if (cleancssOptions.keepSpecialComments === undefined) {
cleancssOptions.keepSpecialComments = "*";
}
cleancssOptions.processImport = false;
cleancssOptions.noRebase = true;
if (cleancssOptions.noAdvanced === undefined) {
cleancssOptions.noAdvanced = true;
}
return new CleanCSS(cleancssOptions).minify(css);
} else if (options.compress) {
return css.replace(/(^(\s)+)|((\s)+$)/g, "");
} else {
return css;
}
};
})(root.eval);
// If `i` is smaller than the `input.length - 1`,
// it means the parser wasn't able to parse the whole
// string, so we've got a parsing error.
//
// We try to extract a \n delimited string,
// showing the line where the parse error occured.
// We split it up into two parts (the part which parsed,
// and the part which didn't), so we can color them differently.
if (i < input.length - 1) {
i = furthest;
var loc = getLocation(i, input);
lines = input.split('\n');
line = loc.line + 1;
error = {
type: "Parse",
message: "Unrecognised input",
index: i,
filename: env.currentFileInfo.filename,
line: line,
column: loc.column,
extract: [
lines[line - 2],
lines[line - 1],
lines[line]
]
};
}
var finish = function (e) {
e = error || e || parser.imports.error;
if (e) {
if (!(e instanceof LessError)) {
e = new(LessError)(e, env);
}
return callback(e);
}
else {
return callback(null, root);
}
};
if (env.processImports !== false) {
new tree.importVisitor(this.imports, finish)
.run(root);
} else {
return finish();
}
},
//
// Here in, the parsing rules/functions
//
// The basic structure of the syntax tree generated is as follows:
//
// Ruleset -> Rule -> Value -> Expression -> Entity
//
// Here's some LESS code:
//
// .class {
// color: #fff;
// border: 1px solid #000;
// width: @w + 4px;
// > .child {...}
// }
//
// And here's what the parse tree might look like:
//
// Ruleset (Selector '.class', [
// Rule ("color", Value ([Expression [Color #fff]]))
// Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]]))
// Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]]))
// Ruleset (Selector [Element '>', '.child'], [...])
// ])
//
// In general, most rules will try to parse a token with the `$()` function, and if the return
// value is truly, will return a new node, of the relevant type. Sometimes, we need to check
// first, before parsing, that's when we use `peek()`.
//
parsers: parsers = {
//
// The `primary` rule is the *entry* and *exit* point of the parser.
// The rules here can appear at any level of the parse tree.
//
// The recursive nature of the grammar is an interplay between the `block`
// rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule,
// as represented by this simplified grammar:
//
// primary โ (ruleset | rule)+
// ruleset โ selector+ block
// block โ '{' primary '}'
//
// Only at one point is the primary rule not called from the
// block rule: at the root level.
//
primary: function () {
var mixin = this.mixin, $re = _$re, root = [], node;
while (current)
{
node = this.extendRule() || mixin.definition() || this.rule() || this.ruleset() ||
mixin.call() || this.comment() || this.rulesetCall() || this.directive();
if (node) {
root.push(node);
} else {
if (!($re(/^[\s\n]+/) || $re(/^;+/))) {
break;
}
}
if (peekChar('}')) {
break;
}
}
return root;
},
// We create a Comment node for CSS comments `/* */`,
// but keep the LeSS comments `//` silent, by just skipping
// over them.
comment: function () {
var comment;
if (input.charAt(i) !== '/') { return; }
if (input.charAt(i + 1) === '/') {
return new(tree.Comment)($re(/^\/\/.*/), true, i, env.currentFileInfo);
}
comment = $re(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/);
if (comment) {
return new(tree.Comment)(comment, false, i, env.currentFileInfo);
}
},
comments: function () {
var comment, comments = [];
while(true) {
comment = this.comment();
if (!comment) {
break;
}
comments.push(comment);
}
return comments;
},
//
// Entities are tokens which can be found inside an Expression
//
entities: {
//
// A string, which supports escaping " and '
//
// "milky way" 'he\'s the one!'
//
quoted: function () {
var str, j = i, e, index = i;
if (input.charAt(j) === '~') { j++; e = true; } // Escaped strings
if (input.charAt(j) !== '"' && input.charAt(j) !== "'") { return; }
if (e) { $char('~'); }
str = $re(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/);
if (str) {
return new(tree.Quoted)(str[0], str[1] || str[2], e, index, env.currentFileInfo);
}
},
//
// A catch-all word, such as:
//
// black border-collapse
//
keyword: function () {
var k;
k = $re(/^%|^[_A-Za-z-][_A-Za-z0-9-]*/);
if (k) {
var color = tree.Color.fromKeyword(k);
if (color) {
return color;
}
return new(tree.Keyword)(k);
}
},
//
// A function call
//
// rgb(255, 0, 255)
//
// We also try to catch IE's `alpha()`, but let the `alpha` parser
// deal with the details.
//
// The arguments are parsed with the `entities.arguments` parser.
//
call: function () {
var name, nameLC, args, alpha_ret, index = i;
name = /^([\w-]+|%|progid:[\w\.]+)\(/.exec(current);
if (!name) { return; }
name = name[1];
nameLC = name.toLowerCase();
if (nameLC === 'url') {
return null;
}
i += name.length;
if (nameLC === 'alpha') {
alpha_ret = parsers.alpha();
if(typeof alpha_ret !== 'undefined') {
return alpha_ret;
}
}
$char('('); // Parse the '(' and consume whitespace.
args = this.arguments();
if (! $char(')')) {
return;
}
if (name) { return new(tree.Call)(name, args, index, env.currentFileInfo); }
},
arguments: function () {
var args = [], arg;
while (true) {
arg = this.assignment() || parsers.expression();
if (!arg) {
break;
}
args.push(arg);
if (! $char(',')) {
break;
}
}
return args;
},
literal: function () {
return this.dimension() ||
this.color() ||
this.quoted() ||
this.unicodeDescriptor();
},
// Assignments are argument entities for calls.
// They are present in ie filter properties as shown below.
//
// filter: progid:DXImageTransform.Microsoft.Alpha( *opacity=50* )
//
assignment: function () {
var key, value;
key = $re(/^\w+(?=\s?=)/i);
if (!key) {
return;
}
if (!$char('=')) {
return;
}
value = parsers.entity();
if (value) {
return new(tree.Assignment)(key, value);
}
},
//
// Parse url() tokens
//
// We use a specific rule for urls, because they don't really behave like
// standard function calls. The difference is that the argument doesn't have
// to be enclosed within a string, so it can't be parsed as an Expression.
//
url: function () {
var value;
if (input.charAt(i) !== 'u' || !$re(/^url\(/)) {
return;
}
value = this.quoted() || this.variable() ||
$re(/^(?:(?:\\[\(\)'"])|[^\(\)'"])+/) || "";
expectChar(')');
return new(tree.URL)((value.value != null || value instanceof tree.Variable)
? value : new(tree.Anonymous)(value), env.currentFileInfo);
},
//
// A Variable entity, such as `@fink`, in
//
// width: @fink + 2px
//
// We use a different parser for variable definitions,
// see `parsers.variable`.
//
variable: function () {
var name, index = i;
if (input.charAt(i) === '@' && (name = $re(/^@@?[\w-]+/))) {
return new(tree.Variable)(name, index, env.currentFileInfo);
}
},
// A variable entity useing the protective {} e.g. @{var}
variableCurly: function () {
var curly, index = i;
if (input.charAt(i) === '@' && (curly = $re(/^@\{([\w-]+)\}/))) {
return new(tree.Variable)("@" + curly[1], index, env.currentFileInfo);
}
},
//
// A Hexadecimal color
//
// #4F3C2F
//
// `rgb` and `hsl` colors are parsed through the `entities.call` parser.
//
color: function () {
var rgb;
if (input.charAt(i) === '#' && (rgb = $re(/^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})/))) {
return new(tree.Color)(rgb[1]);
}
},
//
// A Dimension, that is, a number and a unit
//
// 0.5em 95%
//
dimension: function () {
var value, c = input.charCodeAt(i);
//Is the first char of the dimension 0-9, '.', '+' or '-'
if ((c > 57 || c < 43) || c === 47 || c == 44) {
return;
}
value = $re(/^([+-]?\d*\.?\d+)(%|[a-z]+)?/);
if (value) {
return new(tree.Dimension)(value[1], value[2]);
}
},
//
// A unicode descriptor, as is used in unicode-range
//
// U+0?? or U+00A1-00A9
//
unicodeDescriptor: function () {
var ud;
ud = $re(/^U\+[0-9a-fA-F?]+(\-[0-9a-fA-F?]+)?/);
if (ud) {
return new(tree.UnicodeDescriptor)(ud[0]);
}
},
//
// JavaScript code to be evaluated
//
// `window.location.href`
//
javascript: function () {
var str, j = i, e;
if (input.charAt(j) === '~') { j++; e = true; } // Escaped strings
if (input.charAt(j) !== '`') { return; }
if (env.javascriptEnabled !== undefined && !env.javascriptEnabled) {
error("You are using JavaScript, which has been disabled.");
}
if (e) { $char('~'); }
str = $re(/^`([^`]*)`/);
if (str) {
return new(tree.JavaScript)(str[1], i, e);
}
}
},
//
// The variable part of a variable definition. Used in the `rule` parser
//
// @fink:
//
variable: function () {
var name;
if (input.charAt(i) === '@' && (name = $re(/^(@[\w-]+)\s*:/))) { return name[1]; }
},
//
// The variable part of a variable definition. Used in the `rule` parser
//
// @fink();
//
rulesetCall: function () {
var name;
if (input.charAt(i) === '@' && (name = $re(/^(@[\w-]+)\s*\(\s*\)\s*;/))) {
return new tree.RulesetCall(name[1]);
}
},
//
// extend syntax - used to extend selectors
//
extend: function(isRule) {
var elements, e, index = i, option, extendList, extend;
if (!(isRule ? $re(/^&:extend\(/) : $re(/^:extend\(/))) { return; }
do {
option = null;
elements = null;
while (! (option = $re(/^(all)(?=\s*(\)|,))/))) {
e = this.element();
if (!e) { break; }
if (elements) { elements.push(e); } else { elements = [ e ]; }
}
option = option && option[1];
extend = new(tree.Extend)(new(tree.Selector)(elements), option, index);
if (extendList) { extendList.push(extend); } else { extendList = [ extend ]; }
} while($char(","));
expect(/^\)/);
if (isRule) {
expect(/^;/);
}
return extendList;
},
//
// extendRule - used in a rule to extend all the parent selectors
//
extendRule: function() {
return this.extend(true);
},
//
// Mixins
//
mixin: {
//
// A Mixin call, with an optional argument list
//
// #mixins > .square(#fff);
// .rounded(4px, black);
// .button;
//
// The `while` loop is there because mixins can be
// namespaced, but we only support the child and descendant
// selector for now.
//
call: function () {
var s = input.charAt(i), important = false, index = i, elemIndex,
elements, elem, e, c, args;
if (s !== '.' && s !== '#') { return; }
save(); // stop us absorbing part of an invalid selector
while (true) {
elemIndex = i;
e = $re(/^[#.](?:[\w-]|\\(?:[A-Fa-f0-9]{1,6} ?|[^A-Fa-f0-9]))+/);
if (!e) {
break;
}
elem = new(tree.Element)(c, e, elemIndex, env.currentFileInfo);
if (elements) { elements.push(elem); } else { elements = [ elem ]; }
c = $char('>');
}
if (elements) {
if ($char('(')) {
args = this.args(true).args;
expectChar(')');
}
if (parsers.important()) {
important = true;
}
if (parsers.end()) {
forget();
return new(tree.mixin.Call)(elements, args, index, env.currentFileInfo, important);
}
}
restore();
},
args: function (isCall) {
var parsers = parser.parsers, entities = parsers.entities,
returner = { args:null, variadic: false },
expressions = [], argsSemiColon = [], argsComma = [],
isSemiColonSeperated, expressionContainsNamed, name, nameLoop, value, arg;
save();
while (true) {
if (isCall) {
arg = parsers.detachedRuleset() || parsers.expression();
} else {
parsers.comments();
if (input.charAt(i) === '.' && $re(/^\.{3}/)) {
returner.variadic = true;
if ($char(";") && !isSemiColonSeperated) {
isSemiColonSeperated = true;
}
(isSemiColonSeperated ? argsSemiColon : argsComma)
.push({ variadic: true });
break;
}
arg = entities.variable() || entities.literal() || entities.keyword();
}
if (!arg) {
break;
}
nameLoop = null;
if (arg.throwAwayComments) {
arg.throwAwayComments();
}
value = arg;
var val = null;
if (isCall) {
// Variable
if (arg.value && arg.value.length == 1) {
val = arg.value[0];
}
} else {
val = arg;
}
if (val && val instanceof tree.Variable) {
if ($char(':')) {
if (expressions.length > 0) {
if (isSemiColonSeperated) {
error("Cannot mix ; and , as delimiter types");
}
expressionContainsNamed = true;
}
// we do not support setting a ruleset as a default variable - it doesn't make sense
// However if we do want to add it, there is nothing blocking it, just don't error
// and remove isCall dependency below
value = (isCall && parsers.detachedRuleset()) || parsers.expression();
if (!value) {
if (isCall) {
error("could not understand value for named argument");
} else {
restore();
returner.args = [];
return returner;
}
}
nameLoop = (name = val.name);
} else if (!isCall && $re(/^\.{3}/)) {
returner.variadic = true;
if ($char(";") && !isSemiColonSeperated) {
isSemiColonSeperated = true;
}
(isSemiColonSeperated ? argsSemiColon : argsComma)
.push({ name: arg.name, variadic: true });
break;
} else if (!isCall) {
name = nameLoop = val.name;
value = null;
}
}
if (value) {
expressions.push(value);
}
argsComma.push({ name:nameLoop, value:value });
if ($char(',')) {
continue;
}
if ($char(';') || isSemiColonSeperated) {
if (expressionContainsNamed) {
error("Cannot mix ; and , as delimiter types");
}
isSemiColonSeperated = true;
if (expressions.length > 1) {
value = new(tree.Value)(expressions);
}
argsSemiColon.push({ name:name, value:value });
name = null;
expressions = [];
expressionContainsNamed = false;
}
}
forget();
returner.args = isSemiColonSeperated ? argsSemiColon : argsComma;
return returner;
},
//
// A Mixin definition, with a list of parameters
//
// .rounded (@radius: 2px, @color) {
// ...
// }
//
// Until we have a finer grained state-machine, we have to
// do a look-ahead, to make sure we don't have a mixin call.
// See the `rule` function for more information.
//
// We start by matching `.rounded (`, and then proceed on to
// the argument list, which has optional default values.
// We store the parameters in `params`, with a `value` key,
// if there is a value, such as in the case of `@radius`.
//
// Once we've got our params list, and a closing `)`, we parse
// the `{...}` block.
//
definition: function () {
var name, params = [], match, ruleset, cond, variadic = false;
if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') ||
peek(/^[^{]*\}/)) {
return;
}
save();
match = $re(/^([#.](?:[\w-]|\\(?:[A-Fa-f0-9]{1,6} ?|[^A-Fa-f0-9]))+)\s*\(/);
if (match) {
name = match[1];
var argInfo = this.args(false);
params = argInfo.args;
variadic = argInfo.variadic;
// .mixincall("@{a}");
// looks a bit like a mixin definition..
// also
// .mixincall(@a: {rule: set;});
// so we have to be nice and restore
if (!$char(')')) {
furthest = i;
restore();
return;
}
parsers.comments();
if ($re(/^when/)) { // Guard
cond = expect(parsers.conditions, 'expected condition');
}
ruleset = parsers.block();
if (ruleset) {
forget();
return new(tree.mixin.Definition)(name, params, ruleset, cond, variadic);
} else {
restore();
}
} else {
forget();
}
}
},
//
// Entities are the smallest recognized token,
// and can be found inside a rule's value.
//
entity: function () {
var entities = this.entities;
return entities.literal() || entities.variable() || entities.url() ||
entities.call() || entities.keyword() || entities.javascript() ||
this.comment();
},
//
// A Rule terminator. Note that we use `peek()` to check for '}',
// because the `block` rule will be expecting it, but we still need to make sure
// it's there, if ';' was ommitted.
//
end: function () {
return $char(';') || peekChar('}');
},
//
// IE's alpha function
//
// alpha(opacity=88)
//
alpha: function () {
var value;
if (! $re(/^\(opacity=/i)) { return; }
value = $re(/^\d+/) || this.entities.variable();
if (value) {
expectChar(')');
return new(tree.Alpha)(value);
}
},
//
// A Selector Element
//
// div
// + h1
// #socks
// input[type="text"]
//
// Elements are the building blocks for Selectors,
// they are made out of a `Combinator` (see combinator rule),
// and an element name, such as a tag a class, or `*`.
//
element: function () {
var e, c, v, index = i;
c = this.combinator();
e = $re(/^(?:\d+\.\d+|\d+)%/) || $re(/^(?:[.#]?|:*)(?:[\w-]|[^\x00-\x9f]|\\(?:[A-Fa-f0-9]{1,6} ?|[^A-Fa-f0-9]))+/) ||
$char('*') || $char('&') || this.attribute() || $re(/^\([^()@]+\)/) || $re(/^[\.#](?=@)/) ||
this.entities.variableCurly();
if (! e) {
save();
if ($char('(')) {
if ((v = this.selector()) && $char(')')) {
e = new(tree.Paren)(v);
forget();
} else {
restore();
}
} else {
forget();
}
}
if (e) { return new(tree.Element)(c, e, index, env.currentFileInfo); }
},
//
// Combinators combine elements together, in a Selector.
//
// Because our parser isn't white-space sensitive, special care
// has to be taken, when parsing the descendant combinator, ` `,
// as it's an empty space. We have to check the previous character
// in the input, to see if it's a ` ` character. More info on how
// we deal with this in *combinator.js*.
//
combinator: function () {
var c = input.charAt(i);
if (c === '>' || c === '+' || c === '~' || c === '|' || c === '^') {
i++;
if (input.charAt(i) === '^') {
c = '^^';
i++;
}
while (isWhitespace(input, i)) { i++; }
return new(tree.Combinator)(c);
} else if (isWhitespace(input, i - 1)) {
return new(tree.Combinator)(" ");
} else {
return new(tree.Combinator)(null);
}
},
//
// A CSS selector (see selector below)
// with less extensions e.g. the ability to extend and guard
//
lessSelector: function () {
return this.selector(true);
},
//
// A CSS Selector
//
// .class > div + h1
// li a:hover
//
// Selectors are made out of one or more Elements, see above.
//
selector: function (isLess) {
var index = i, $re = _$re, elements, extendList, c, e, extend, when, condition;
while ((isLess && (extend = this.extend())) || (isLess && (when = $re(/^when/))) || (e = this.element())) {
if (when) {
condition = expect(this.conditions, 'expected condition');
} else if (condition) {
error("CSS guard can only be used at the end of selector");
} else if (extend) {
if (extendList) { extendList.push(extend); } else { extendList = [ extend ]; }
} else {
if (extendList) { error("Extend can only be used at the end of selector"); }
c = input.charAt(i);
if (elements) { elements.push(e); } else { elements = [ e ]; }
e = null;
}
if (c === '{' || c === '}' || c === ';' || c === ',' || c === ')') {
break;
}
}
if (elements) { return new(tree.Selector)(elements, extendList, condition, index, env.currentFileInfo); }
if (extendList) { error("Extend must be used to extend a selector, it cannot be used on its own"); }
},
attribute: function () {
if (! $char('[')) { return; }
var entities = this.entities,
key, val, op;
if (!(key = entities.variableCurly())) {
key = expect(/^(?:[_A-Za-z0-9-\*]*\|)?(?:[_A-Za-z0-9-]|\\.)+/);
}
op = $re(/^[|~*$^]?=/);
if (op) {
val = entities.quoted() || $re(/^[0-9]+%/) || $re(/^[\w-]+/) || entities.variableCurly();
}
expectChar(']');
return new(tree.Attribute)(key, op, val);
},
//
// The `block` rule is used by `ruleset` and `mixin.definition`.
// It's a wrapper around the `primary` rule, with added `{}`.
//
block: function () {
var content;
if ($char('{') && (content = this.primary()) && $char('}')) {
return content;
}
},
blockRuleset: function() {
var block = this.block();
if (block) {
block = new tree.Ruleset(null, block);
}
return block;
},
detachedRuleset: function() {
var blockRuleset = this.blockRuleset();
if (blockRuleset) {
return new tree.DetachedRuleset(blockRuleset);
}
},
//
// div, .class, body > p {...}
//
ruleset: function () {
var selectors, s, rules, debugInfo;
save();
if (env.dumpLineNumbers) {
debugInfo = getDebugInfo(i, input, env);
}
while (true) {
s = this.lessSelector();
if (!s) {
break;
}
if (selectors) { selectors.push(s); } else { selectors = [ s ]; }
this.comments();
if (s.condition && selectors.length > 1) {
error("Guards are only currently allowed on a single selector.");
}
if (! $char(',')) { break; }
if (s.condition) {
error("Guards are only currently allowed on a single selector.");
}
this.comments();
}
if (selectors && (rules = this.block())) {
forget();
var ruleset = new(tree.Ruleset)(selectors, rules, env.strictImports);
if (env.dumpLineNumbers) {
ruleset.debugInfo = debugInfo;
}
return ruleset;
} else {
// Backtrack
furthest = i;
restore();
}
},
rule: function (tryAnonymous) {
var name, value, startOfRule = i, c = input.charAt(startOfRule), important, merge, isVariable;
if (c === '.' || c === '#' || c === '&') { return; }
save();
name = this.variable() || this.ruleProperty();
if (name) {
isVariable = typeof name === "string";
if (isVariable) {
value = this.detachedRuleset();
}
if (!value) {
// prefer to try to parse first if its a variable or we are compressing
// but always fallback on the other one
value = !tryAnonymous && (env.compress || isVariable) ?
(this.value() || this.anonymousValue()) :
(this.anonymousValue() || this.value());
important = this.important();
// a name returned by this.ruleProperty() is always an array of the form:
// [string-1, ..., string-n, ""] or [string-1, ..., string-n, "+"]
// where each item is a tree.Keyword or tree.Variable
merge = !isVariable && name.pop().value;
}
if (value && this.end()) {
forget();
return new (tree.Rule)(name, value, important, merge, startOfRule, env.currentFileInfo);
} else {
furthest = i;
restore();
if (value && !tryAnonymous) {
return this.rule(true);
}
}
} else {
forget();
}
},
anonymousValue: function () {
var match;
match = /^([^@+\/'"*`(;{}-]*);/.exec(current);
if (match) {
i += match[0].length - 1;
return new(tree.Anonymous)(match[1]);
}
},
//
// An @import directive
//
// @import "lib";
//
// Depending on our environemnt, importing is done differently:
// In the browser, it's an XHR request, in Node, it would be a
// file-system operation. The function used for importing is
// stored in `import`, which we pass to the Import constructor.
//
"import": function () {
var path, features, index = i;
save();
var dir = $re(/^@import?\s+/);
var options = (dir ? this.importOptions() : null) || {};
if (dir && (path = this.entities.quoted() || this.entities.url())) {
features = this.mediaFeatures();
if ($char(';')) {
forget();
features = features && new(tree.Value)(features);
return new(tree.Import)(path, features, options, index, env.currentFileInfo);
}
}
restore();
},
importOptions: function() {
var o, options = {}, optionName, value;
// list of options, surrounded by parens
if (! $char('(')) { return null; }
do {
o = this.importOption();
if (o) {
optionName = o;
value = true;
switch(optionName) {
case "css":
optionName = "less";
value = false;
break;
case "once":
optionName = "multiple";
value = false;
break;
}
options[optionName] = value;
if (! $char(',')) { break; }
}
} while (o);
expectChar(')');
return options;
},
importOption: function() {
var opt = $re(/^(less|css|multiple|once|inline|reference)/);
if (opt) {
return opt[1];
}
},
mediaFeature: function () {
var entities = this.entities, nodes = [], e, p;
do {
e = entities.keyword() || entities.variable();
if (e) {
nodes.push(e);
} else if ($char('(')) {
p = this.property();
e = this.value();
if ($char(')')) {
if (p && e) {
nodes.push(new(tree.Paren)(new(tree.Rule)(p, e, null, null, i, env.currentFileInfo, true)));
} else if (e) {
nodes.push(new(tree.Paren)(e));
} else {
return null;
}
} else { return null; }
}
} while (e);
if (nodes.length > 0) {
return new(tree.Expression)(nodes);
}
},
mediaFeatures: function () {
var entities = this.entities, features = [], e;
do {
e = this.mediaFeature();
if (e) {
features.push(e);
if (! $char(',')) { break; }
} else {
e = entities.variable();
if (e) {
features.push(e);
if (! $char(',')) { break; }
}
}
} while (e);
return features.length > 0 ? features : null;
},
media: function () {
var features, rules, media, debugInfo;
if (env.dumpLineNumbers) {
debugInfo = getDebugInfo(i, input, env);
}
if ($re(/^@media/)) {
features = this.mediaFeatures();
rules = this.block();
if (rules) {
media = new(tree.Media)(rules, features, i, env.currentFileInfo);
if (env.dumpLineNumbers) {
media.debugInfo = debugInfo;
}
return media;
}
}
},
//
// A CSS Directive
//
// @charset "utf-8";
//
directive: function () {
var index = i, name, value, rules, nonVendorSpecificName,
hasIdentifier, hasExpression, hasUnknown, hasBlock = true;
if (input.charAt(i) !== '@') { return; }
value = this['import']() || this.media();
if (value) {
return value;
}
save();
name = $re(/^@[a-z-]+/);
if (!name) { return; }
nonVendorSpecificName = name;
if (name.charAt(1) == '-' && name.indexOf('-', 2) > 0) {
nonVendorSpecificName = "@" + name.slice(name.indexOf('-', 2) + 1);
}
switch(nonVendorSpecificName) {
/*
case "@font-face":
case "@viewport":
case "@top-left":
case "@top-left-corner":
case "@top-center":
case "@top-right":
case "@top-right-corner":
case "@bottom-left":
case "@bottom-left-corner":
case "@bottom-center":
case "@bottom-right":
case "@bottom-right-corner":
case "@left-top":
case "@left-middle":
case "@left-bottom":
case "@right-top":
case "@right-middle":
case "@right-bottom":
hasBlock = true;
break;
*/
case "@charset":
hasIdentifier = true;
hasBlock = false;
break;
case "@namespace":
hasExpression = true;
hasBlock = false;
break;
case "@keyframes":
hasIdentifier = true;
break;
case "@host":
case "@page":
case "@document":
case "@supports":
hasUnknown = true;
break;
}
if (hasIdentifier) {
value = this.entity();
if (!value) {
error("expected " + name + " identifier");
}
} else if (hasExpression) {
value = this.expression();
if (!value) {
error("expected " + name + " expression");
}
} else if (hasUnknown) {
value = ($re(/^[^{;]+/) || '').trim();
if (value) {
value = new(tree.Anonymous)(value);
}
}
if (hasBlock) {
rules = this.blockRuleset();
}
if (rules || (!hasBlock && value && $char(';'))) {
forget();
return new(tree.Directive)(name, value, rules, index, env.currentFileInfo,
env.dumpLineNumbers ? getDebugInfo(index, input, env) : null);
}
restore();
},
//
// A Value is a comma-delimited list of Expressions
//
// font-family: Baskerville, Georgia, serif;
//
// In a Rule, a Value represents everything after the `:`,
// and before the `;`.
//
value: function () {
var e, expressions = [];
do {
e = this.expression();
if (e) {
expressions.push(e);
if (! $char(',')) { break; }
}
} while(e);
if (expressions.length > 0) {
return new(tree.Value)(expressions);
}
},
important: function () {
if (input.charAt(i) === '!') {
return $re(/^! *important/);
}
},
sub: function () {
var a, e;
if ($char('(')) {
a = this.addition();
if (a) {
e = new(tree.Expression)([a]);
expectChar(')');
e.parens = true;
return e;
}
}
},
multiplication: function () {
var m, a, op, operation, isSpaced;
m = this.operand();
if (m) {
isSpaced = isWhitespace(input, i - 1);
while (true) {
if (peek(/^\/[*\/]/)) {
break;
}
op = $char('/') || $char('*');
if (!op) { break; }
a = this.operand();
if (!a) { break; }
m.parensInOp = true;
a.parensInOp = true;
operation = new(tree.Operation)(op, [operation || m, a], isSpaced);
isSpaced = isWhitespace(input, i - 1);
}
return operation || m;
}
},
addition: function () {
var m, a, op, operation, isSpaced;
m = this.multiplication();
if (m) {
isSpaced = isWhitespace(input, i - 1);
while (true) {
op = $re(/^[-+]\s+/) || (!isSpaced && ($char('+') || $char('-')));
if (!op) {
break;
}
a = this.multiplication();
if (!a) {
break;
}
m.parensInOp = true;
a.parensInOp = true;
operation = new(tree.Operation)(op, [operation || m, a], isSpaced);
isSpaced = isWhitespace(input, i - 1);
}
return operation || m;
}
},
conditions: function () {
var a, b, index = i, condition;
a = this.condition();
if (a) {
while (true) {
if (!peek(/^,\s*(not\s*)?\(/) || !$char(',')) {
break;
}
b = this.condition();
if (!b) {
break;
}
condition = new(tree.Condition)('or', condition || a, b, index);
}
return condition || a;
}
},
condition: function () {
var entities = this.entities, index = i, negate = false,
a, b, c, op;
if ($re(/^not/)) { negate = true; }
expectChar('(');
a = this.addition() || entities.keyword() || entities.quoted();
if (a) {
op = $re(/^(?:>=|<=|=<|[<=>])/);
if (op) {
b = this.addition() || entities.keyword() || entities.quoted();
if (b) {
c = new(tree.Condition)(op, a, b, index, negate);
} else {
error('expected expression');
}
} else {
c = new(tree.Condition)('=', a, new(tree.Keyword)('true'), index, negate);
}
expectChar(')');
return $re(/^and/) ? new(tree.Condition)('and', c, this.condition()) : c;
}
},
//
// An operand is anything that can be part of an operation,
// such as a Color, or a Variable
//
operand: function () {
var entities = this.entities,
p = input.charAt(i + 1), negate;
if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $char('-'); }
var o = this.sub() || entities.dimension() ||
entities.color() || entities.variable() ||
entities.call();
if (negate) {
o.parensInOp = true;
o = new(tree.Negative)(o);
}
return o;
},
//
// Expressions either represent mathematical operations,
// or white-space delimited Entities.
//
// 1px solid black
// @var * 2
//
expression: function () {
var entities = [], e, delim;
do {
e = this.addition() || this.entity();
if (e) {
entities.push(e);
// operations do not allow keyword "/" dimension (e.g. small/20px) so we support that here
if (!peek(/^\/[\/*]/)) {
delim = $char('/');
if (delim) {
entities.push(new(tree.Anonymous)(delim));
}
}
}
} while (e);
if (entities.length > 0) {
return new(tree.Expression)(entities);
}
},
property: function () {
var name = $re(/^(\*?-?[_a-zA-Z0-9-]+)\s*:/);
if (name) {
return name[1];
}
},
ruleProperty: function () {
var c = current, name = [], index = [], length = 0, s, k;
function match(re) {
var a = re.exec(c);
if (a) {
index.push(i + length);
length += a[0].length;
c = c.slice(a[1].length);
return name.push(a[1]);
}
}
match(/^(\*?)/);
while (match(/^((?:[\w-]+)|(?:@\{[\w-]+\}))/)); // !
if ((name.length > 1) && match(/^\s*((?:\+_|\+)?)\s*:/)) {
// at last, we have the complete match now. move forward,
// convert name particles to tree objects and return:
skipWhitespace(length);
if (name[0] === '') {
name.shift();
index.shift();
}
for (k = 0; k < name.length; k++) {
s = name[k];
name[k] = (s.charAt(0) !== '@')
? new(tree.Keyword)(s)
: new(tree.Variable)('@' + s.slice(2, -1),
index[k], env.currentFileInfo);
}
return name;
}
}
}
};
return parser;
};
less.Parser.serializeVars = function(vars) {
var s = '';
for (var name in vars) {
if (Object.hasOwnProperty.call(vars, name)) {
var value = vars[name];
s += ((name[0] === '@') ? '' : '@') + name +': '+ value +
((('' + value).slice(-1) === ';') ? '' : ';');
}
}
return s;
};
(function (tree) {
tree.functions = {
rgb: function (r, g, b) {
return this.rgba(r, g, b, 1.0);
},
rgba: function (r, g, b, a) {
var rgb = [r, g, b].map(function (c) { return scaled(c, 255); });
a = number(a);
return new(tree.Color)(rgb, a);
},
hsl: function (h, s, l) {
return this.hsla(h, s, l, 1.0);
},
hsla: function (h, s, l, a) {
function hue(h) {
h = h < 0 ? h + 1 : (h > 1 ? h - 1 : h);
if (h * 6 < 1) { return m1 + (m2 - m1) * h * 6; }
else if (h * 2 < 1) { return m2; }
else if (h * 3 < 2) { return m1 + (m2 - m1) * (2/3 - h) * 6; }
else { return m1; }
}
h = (number(h) % 360) / 360;
s = clamp(number(s)); l = clamp(number(l)); a = clamp(number(a));
var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s;
var m1 = l * 2 - m2;
return this.rgba(hue(h + 1/3) * 255,
hue(h) * 255,
hue(h - 1/3) * 255,
a);
},
hsv: function(h, s, v) {
return this.hsva(h, s, v, 1.0);
},
hsva: function(h, s, v, a) {
h = ((number(h) % 360) / 360) * 360;
s = number(s); v = number(v); a = number(a);
var i, f;
i = Math.floor((h / 60) % 6);
f = (h / 60) - i;
var vs = [v,
v * (1 - s),
v * (1 - f * s),
v * (1 - (1 - f) * s)];
var perm = [[0, 3, 1],
[2, 0, 1],
[1, 0, 3],
[1, 2, 0],
[3, 1, 0],
[0, 1, 2]];
return this.rgba(vs[perm[i][0]] * 255,
vs[perm[i][1]] * 255,
vs[perm[i][2]] * 255,
a);
},
hue: function (color) {
return new(tree.Dimension)(Math.round(color.toHSL().h));
},
saturation: function (color) {
return new(tree.Dimension)(Math.round(color.toHSL().s * 100), '%');
},
lightness: function (color) {
return new(tree.Dimension)(Math.round(color.toHSL().l * 100), '%');
},
hsvhue: function(color) {
return new(tree.Dimension)(Math.round(color.toHSV().h));
},
hsvsaturation: function (color) {
return new(tree.Dimension)(Math.round(color.toHSV().s * 100), '%');
},
hsvvalue: function (color) {
return new(tree.Dimension)(Math.round(color.toHSV().v * 100), '%');
},
red: function (color) {
return new(tree.Dimension)(color.rgb[0]);
},
green: function (color) {
return new(tree.Dimension)(color.rgb[1]);
},
blue: function (color) {
return new(tree.Dimension)(color.rgb[2]);
},
alpha: function (color) {
return new(tree.Dimension)(color.toHSL().a);
},
luma: function (color) {
return new(tree.Dimension)(Math.round(color.luma() * color.alpha * 100), '%');
},
luminance: function (color) {
var luminance =
(0.2126 * color.rgb[0] / 255)
+ (0.7152 * color.rgb[1] / 255)
+ (0.0722 * color.rgb[2] / 255);
return new(tree.Dimension)(Math.round(luminance * color.alpha * 100), '%');
},
saturate: function (color, amount) {
// filter: saturate(3.2);
// should be kept as is, so check for color
if (!color.rgb) {
return null;
}
var hsl = color.toHSL();
hsl.s += amount.value / 100;
hsl.s = clamp(hsl.s);
return hsla(hsl);
},
desaturate: function (color, amount) {
var hsl = color.toHSL();
hsl.s -= amount.value / 100;
hsl.s = clamp(hsl.s);
return hsla(hsl);
},
lighten: function (color, amount) {
var hsl = color.toHSL();
hsl.l += amount.value / 100;
hsl.l = clamp(hsl.l);
return hsla(hsl);
},
darken: function (color, amount) {
var hsl = color.toHSL();
hsl.l -= amount.value / 100;
hsl.l = clamp(hsl.l);
return hsla(hsl);
},
fadein: function (color, amount) {
var hsl = color.toHSL();
hsl.a += amount.value / 100;
hsl.a = clamp(hsl.a);
return hsla(hsl);
},
fadeout: function (color, amount) {
var hsl = color.toHSL();
hsl.a -= amount.value / 100;
hsl.a = clamp(hsl.a);
return hsla(hsl);
},
fade: function (color, amount) {
var hsl = color.toHSL();
hsl.a = amount.value / 100;
hsl.a = clamp(hsl.a);
return hsla(hsl);
},
spin: function (color, amount) {
var hsl = color.toHSL();
var hue = (hsl.h + amount.value) % 360;
hsl.h = hue < 0 ? 360 + hue : hue;
return hsla(hsl);
},
//
// Copyright (c) 2006-2009 Hampton Catlin, Nathan Weizenbaum, and Chris Eppstein
// http://sass-lang.com
//
mix: function (color1, color2, weight) {
if (!weight) {
weight = new(tree.Dimension)(50);
}
var p = weight.value / 100.0;
var w = p * 2 - 1;
var a = color1.toHSL().a - color2.toHSL().a;
var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0;
var w2 = 1 - w1;
var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2,
color1.rgb[1] * w1 + color2.rgb[1] * w2,
color1.rgb[2] * w1 + color2.rgb[2] * w2];
var alpha = color1.alpha * p + color2.alpha * (1 - p);
return new(tree.Color)(rgb, alpha);
},
greyscale: function (color) {
return this.desaturate(color, new(tree.Dimension)(100));
},
contrast: function (color, dark, light, threshold) {
// filter: contrast(3.2);
// should be kept as is, so check for color
if (!color.rgb) {
return null;
}
if (typeof light === 'undefined') {
light = this.rgba(255, 255, 255, 1.0);
}
if (typeof dark === 'undefined') {
dark = this.rgba(0, 0, 0, 1.0);
}
//Figure out which is actually light and dark!
if (dark.luma() > light.luma()) {
var t = light;
light = dark;
dark = t;
}
if (typeof threshold === 'undefined') {
threshold = 0.43;
} else {
threshold = number(threshold);
}
if (color.luma() < threshold) {
return light;
} else {
return dark;
}
},
e: function (str) {
return new(tree.Anonymous)(str instanceof tree.JavaScript ? str.evaluated : str);
},
escape: function (str) {
return new(tree.Anonymous)(encodeURI(str.value).replace(/=/g, "%3D").replace(/:/g, "%3A").replace(/#/g, "%23").replace(/;/g, "%3B").replace(/\(/g, "%28").replace(/\)/g, "%29"));
},
replace: function (string, pattern, replacement, flags) {
var result = string.value;
result = result.replace(new RegExp(pattern.value, flags ? flags.value : ''), replacement.value);
return new(tree.Quoted)(string.quote || '', result, string.escaped);
},
'%': function (string /* arg, arg, ...*/) {
var args = Array.prototype.slice.call(arguments, 1),
result = string.value;
for (var i = 0; i < args.length; i++) {
/*jshint loopfunc:true */
result = result.replace(/%[sda]/i, function(token) {
var value = token.match(/s/i) ? args[i].value : args[i].toCSS();
return token.match(/[A-Z]$/) ? encodeURIComponent(value) : value;
});
}
result = result.replace(/%%/g, '%');
return new(tree.Quoted)(string.quote || '', result, string.escaped);
},
unit: function (val, unit) {
if(!(val instanceof tree.Dimension)) {
throw { type: "Argument", message: "the first argument to unit must be a number" + (val instanceof tree.Operation ? ". Have you forgotten parenthesis?" : "") };
}
if (unit) {
if (unit instanceof tree.Keyword) {
unit = unit.value;
} else {
unit = unit.toCSS();
}
} else {
unit = "";
}
return new(tree.Dimension)(val.value, unit);
},
convert: function (val, unit) {
return val.convertTo(unit.value);
},
round: function (n, f) {
var fraction = typeof(f) === "undefined" ? 0 : f.value;
return _math(function(num) { return num.toFixed(fraction); }, null, n);
},
pi: function () {
return new(tree.Dimension)(Math.PI);
},
mod: function(a, b) {
return new(tree.Dimension)(a.value % b.value, a.unit);
},
pow: function(x, y) {
if (typeof x === "number" && typeof y === "number") {
x = new(tree.Dimension)(x);
y = new(tree.Dimension)(y);
} else if (!(x instanceof tree.Dimension) || !(y instanceof tree.Dimension)) {
throw { type: "Argument", message: "arguments must be numbers" };
}
return new(tree.Dimension)(Math.pow(x.value, y.value), x.unit);
},
_minmax: function (isMin, args) {
args = Array.prototype.slice.call(args);
switch(args.length) {
case 0: throw { type: "Argument", message: "one or more arguments required" };
}
var i, j, current, currentUnified, referenceUnified, unit, unitStatic, unitClone,
order = [], // elems only contains original argument values.
values = {}; // key is the unit.toString() for unified tree.Dimension values,
// value is the index into the order array.
for (i = 0; i < args.length; i++) {
current = args[i];
if (!(current instanceof tree.Dimension)) {
if(Array.isArray(args[i].value)) {
Array.prototype.push.apply(args, Array.prototype.slice.call(args[i].value));
}
continue;
}
currentUnified = current.unit.toString() === "" && unitClone !== undefined ? new(tree.Dimension)(current.value, unitClone).unify() : current.unify();
unit = currentUnified.unit.toString() === "" && unitStatic !== undefined ? unitStatic : currentUnified.unit.toString();
unitStatic = unit !== "" && unitStatic === undefined || unit !== "" && order[0].unify().unit.toString() === "" ? unit : unitStatic;
unitClone = unit !== "" && unitClone === undefined ? current.unit.toString() : unitClone;
j = values[""] !== undefined && unit !== "" && unit === unitStatic ? values[""] : values[unit];
if (j === undefined) {
if(unitStatic !== undefined && unit !== unitStatic) {
throw{ type: "Argument", message: "incompatible types" };
}
values[unit] = order.length;
order.push(current);
continue;
}
referenceUnified = order[j].unit.toString() === "" && unitClone !== undefined ? new(tree.Dimension)(order[j].value, unitClone).unify() : order[j].unify();
if ( isMin && currentUnified.value < referenceUnified.value ||
!isMin && currentUnified.value > referenceUnified.value) {
order[j] = current;
}
}
if (order.length == 1) {
return order[0];
}
args = order.map(function (a) { return a.toCSS(this.env); }).join(this.env.compress ? "," : ", ");
return new(tree.Anonymous)((isMin ? "min" : "max") + "(" + args + ")");
},
min: function () {
return this._minmax(true, arguments);
},
max: function () {
return this._minmax(false, arguments);
},
"get-unit": function (n) {
return new(tree.Anonymous)(n.unit);
},
argb: function (color) {
return new(tree.Anonymous)(color.toARGB());
},
percentage: function (n) {
return new(tree.Dimension)(n.value * 100, '%');
},
color: function (n) {
if (n instanceof tree.Quoted) {
var colorCandidate = n.value,
returnColor;
returnColor = tree.Color.fromKeyword(colorCandidate);
if (returnColor) {
return returnColor;
}
if (/^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})/.test(colorCandidate)) {
return new(tree.Color)(colorCandidate.slice(1));
}
throw { type: "Argument", message: "argument must be a color keyword or 3/6 digit hex e.g. #FFF" };
} else {
throw { type: "Argument", message: "argument must be a string" };
}
},
iscolor: function (n) {
return this._isa(n, tree.Color);
},
isnumber: function (n) {
return this._isa(n, tree.Dimension);
},
isstring: function (n) {
return this._isa(n, tree.Quoted);
},
iskeyword: function (n) {
return this._isa(n, tree.Keyword);
},
isurl: function (n) {
return this._isa(n, tree.URL);
},
ispixel: function (n) {
return this.isunit(n, 'px');
},
ispercentage: function (n) {
return this.isunit(n, '%');
},
isem: function (n) {
return this.isunit(n, 'em');
},
isunit: function (n, unit) {
return (n instanceof tree.Dimension) && n.unit.is(unit.value || unit) ? tree.True : tree.False;
},
_isa: function (n, Type) {
return (n instanceof Type) ? tree.True : tree.False;
},
tint: function(color, amount) {
return this.mix(this.rgb(255,255,255), color, amount);
},
shade: function(color, amount) {
return this.mix(this.rgb(0, 0, 0), color, amount);
},
extract: function(values, index) {
index = index.value - 1; // (1-based index)
// handle non-array values as an array of length 1
// return 'undefined' if index is invalid
return Array.isArray(values.value)
? values.value[index] : Array(values)[index];
},
length: function(values) {
var n = Array.isArray(values.value) ? values.value.length : 1;
return new tree.Dimension(n);
},
"data-uri": function(mimetypeNode, filePathNode) {
if (typeof window !== 'undefined') {
return new tree.URL(filePathNode || mimetypeNode, this.currentFileInfo).eval(this.env);
}
var mimetype = mimetypeNode.value;
var filePath = (filePathNode && filePathNode.value);
var fs = require('fs'),
path = require('path'),
useBase64 = false;
if (arguments.length < 2) {
filePath = mimetype;
}
if (this.env.isPathRelative(filePath)) {
if (this.currentFileInfo.relativeUrls) {
filePath = path.join(this.currentFileInfo.currentDirectory, filePath);
} else {
filePath = path.join(this.currentFileInfo.entryPath, filePath);
}
}
// detect the mimetype if not given
if (arguments.length < 2) {
var mime;
try {
mime = require('mime');
} catch (ex) {
mime = tree._mime;
}
mimetype = mime.lookup(filePath);
// use base 64 unless it's an ASCII or UTF-8 format
var charset = mime.charsets.lookup(mimetype);
useBase64 = ['US-ASCII', 'UTF-8'].indexOf(charset) < 0;
if (useBase64) { mimetype += ';base64'; }
}
else {
useBase64 = /;base64$/.test(mimetype);
}
var buf = fs.readFileSync(filePath);
// IE8 cannot handle a data-uri larger than 32KB. If this is exceeded
// and the --ieCompat flag is enabled, return a normal url() instead.
var DATA_URI_MAX_KB = 32,
fileSizeInKB = parseInt((buf.length / 1024), 10);
if (fileSizeInKB >= DATA_URI_MAX_KB) {
if (this.env.ieCompat !== false) {
if (!this.env.silent) {
console.warn("Skipped data-uri embedding of %s because its size (%dKB) exceeds IE8-safe %dKB!", filePath, fileSizeInKB, DATA_URI_MAX_KB);
}
return new tree.URL(filePathNode || mimetypeNode, this.currentFileInfo).eval(this.env);
}
}
buf = useBase64 ? buf.toString('base64')
: encodeURIComponent(buf);
var uri = "\"data:" + mimetype + ',' + buf + "\"";
return new(tree.URL)(new(tree.Anonymous)(uri));
},
"svg-gradient": function(direction) {
function throwArgumentDescriptor() {
throw { type: "Argument", message: "svg-gradient expects direction, start_color [start_position], [color position,]..., end_color [end_position]" };
}
if (arguments.length < 3) {
throwArgumentDescriptor();
}
var stops = Array.prototype.slice.call(arguments, 1),
gradientDirectionSvg,
gradientType = "linear",
rectangleDimension = 'x="0" y="0" width="1" height="1"',
useBase64 = true,
renderEnv = {compress: false},
returner,
directionValue = direction.toCSS(renderEnv),
i, color, position, positionValue, alpha;
switch (directionValue) {
case "to bottom":
gradientDirectionSvg = 'x1="0%" y1="0%" x2="0%" y2="100%"';
break;
case "to right":
gradientDirectionSvg = 'x1="0%" y1="0%" x2="100%" y2="0%"';
break;
case "to bottom right":
gradientDirectionSvg = 'x1="0%" y1="0%" x2="100%" y2="100%"';
break;
case "to top right":
gradientDirectionSvg = 'x1="0%" y1="100%" x2="100%" y2="0%"';
break;
case "ellipse":
case "ellipse at center":
gradientType = "radial";
gradientDirectionSvg = 'cx="50%" cy="50%" r="75%"';
rectangleDimension = 'x="-50" y="-50" width="101" height="101"';
break;
default:
throw { type: "Argument", message: "svg-gradient direction must be 'to bottom', 'to right', 'to bottom right', 'to top right' or 'ellipse at center'" };
}
returner = '<?xml version="1.0" ?>' +
'<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="100%" height="100%" viewBox="0 0 1 1" preserveAspectRatio="none">' +
'<' + gradientType + 'Gradient id="gradient" gradientUnits="userSpaceOnUse" ' + gradientDirectionSvg + '>';
for (i = 0; i < stops.length; i+= 1) {
if (stops[i].value) {
color = stops[i].value[0];
position = stops[i].value[1];
} else {
color = stops[i];
position = undefined;
}
if (!(color instanceof tree.Color) || (!((i === 0 || i+1 === stops.length) && position === undefined) && !(position instanceof tree.Dimension))) {
throwArgumentDescriptor();
}
positionValue = position ? position.toCSS(renderEnv) : i === 0 ? "0%" : "100%";
alpha = color.alpha;
returner += '<stop offset="' + positionValue + '" stop-color="' + color.toRGB() + '"' + (alpha < 1 ? ' stop-opacity="' + alpha + '"' : '') + '/>';
}
returner += '</' + gradientType + 'Gradient>' +
'<rect ' + rectangleDimension + ' fill="url(#gradient)" /></svg>';
if (useBase64) {
try {
returner = require('./encoder').encodeBase64(returner); // TODO browser implementation
} catch(e) {
useBase64 = false;
}
}
returner = "'data:image/svg+xml" + (useBase64 ? ";base64" : "") + "," + returner + "'";
return new(tree.URL)(new(tree.Anonymous)(returner));
}
};
// these static methods are used as a fallback when the optional 'mime' dependency is missing
tree._mime = {
// this map is intentionally incomplete
// if you want more, install 'mime' dep
_types: {
'.htm' : 'text/html',
'.html': 'text/html',
'.gif' : 'image/gif',
'.jpg' : 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png' : 'image/png'
},
lookup: function (filepath) {
var ext = require('path').extname(filepath),
type = tree._mime._types[ext];
if (type === undefined) {
throw new Error('Optional dependency "mime" is required for ' + ext);
}
return type;
},
charsets: {
lookup: function (type) {
// assumes all text types are UTF-8
return type && (/^text\//).test(type) ? 'UTF-8' : '';
}
}
};
// Math
var mathFunctions = {
// name, unit
ceil: null,
floor: null,
sqrt: null,
abs: null,
tan: "",
sin: "",
cos: "",
atan: "rad",
asin: "rad",
acos: "rad"
};
function _math(fn, unit, n) {
if (!(n instanceof tree.Dimension)) {
throw { type: "Argument", message: "argument must be a number" };
}
if (unit == null) {
unit = n.unit;
} else {
n = n.unify();
}
return new(tree.Dimension)(fn(parseFloat(n.value)), unit);
}
// ~ End of Math
// Color Blending
// ref: http://www.w3.org/TR/compositing-1
function colorBlend(mode, color1, color2) {
var ab = color1.alpha, cb, // backdrop
as = color2.alpha, cs, // source
ar, cr, r = []; // result
ar = as + ab * (1 - as);
for (var i = 0; i < 3; i++) {
cb = color1.rgb[i] / 255;
cs = color2.rgb[i] / 255;
cr = mode(cb, cs);
if (ar) {
cr = (as * cs + ab * (cb
- as * (cb + cs - cr))) / ar;
}
r[i] = cr * 255;
}
return new(tree.Color)(r, ar);
}
var colorBlendMode = {
multiply: function(cb, cs) {
return cb * cs;
},
screen: function(cb, cs) {
return cb + cs - cb * cs;
},
overlay: function(cb, cs) {
cb *= 2;
return (cb <= 1)
? colorBlendMode.multiply(cb, cs)
: colorBlendMode.screen(cb - 1, cs);
},
softlight: function(cb, cs) {
var d = 1, e = cb;
if (cs > 0.5) {
e = 1;
d = (cb > 0.25) ? Math.sqrt(cb)
: ((16 * cb - 12) * cb + 4) * cb;
}
return cb - (1 - 2 * cs) * e * (d - cb);
},
hardlight: function(cb, cs) {
return colorBlendMode.overlay(cs, cb);
},
difference: function(cb, cs) {
return Math.abs(cb - cs);
},
exclusion: function(cb, cs) {
return cb + cs - 2 * cb * cs;
},
// non-w3c functions:
average: function(cb, cs) {
return (cb + cs) / 2;
},
negation: function(cb, cs) {
return 1 - Math.abs(cb + cs - 1);
}
};
// ~ End of Color Blending
tree.defaultFunc = {
eval: function () {
var v = this.value_, e = this.error_;
if (e) {
throw e;
}
if (v != null) {
return v ? tree.True : tree.False;
}
},
value: function (v) {
this.value_ = v;
},
error: function (e) {
this.error_ = e;
},
reset: function () {
this.value_ = this.error_ = null;
}
};
function initFunctions() {
var f, tf = tree.functions;
// math
for (f in mathFunctions) {
if (mathFunctions.hasOwnProperty(f)) {
tf[f] = _math.bind(null, Math[f], mathFunctions[f]);
}
}
// color blending
for (f in colorBlendMode) {
if (colorBlendMode.hasOwnProperty(f)) {
tf[f] = colorBlend.bind(null, colorBlendMode[f]);
}
}
// default
f = tree.defaultFunc;
tf["default"] = f.eval.bind(f);
} initFunctions();
function hsla(color) {
return tree.functions.hsla(color.h, color.s, color.l, color.a);
}
function scaled(n, size) {
if (n instanceof tree.Dimension && n.unit.is('%')) {
return parseFloat(n.value * size / 100);
} else {
return number(n);
}
}
function number(n) {
if (n instanceof tree.Dimension) {
return parseFloat(n.unit.is('%') ? n.value / 100 : n.value);
} else if (typeof(n) === 'number') {
return n;
} else {
throw {
error: "RuntimeError",
message: "color functions take numbers as parameters"
};
}
}
function clamp(val) {
return Math.min(1, Math.max(0, val));
}
tree.fround = function(env, value) {
var p;
if (env && (env.numPrecision != null)) {
p = Math.pow(10, env.numPrecision);
return Math.round(value * p) / p;
} else {
return value;
}
};
tree.functionCall = function(env, currentFileInfo) {
this.env = env;
this.currentFileInfo = currentFileInfo;
};
tree.functionCall.prototype = tree.functions;
})(require('./tree'));
(function (tree) {
tree.colors = {
'aliceblue':'#f0f8ff',
'antiquewhite':'#faebd7',
'aqua':'#00ffff',
'aquamarine':'#7fffd4',
'azure':'#f0ffff',
'beige':'#f5f5dc',
'bisque':'#ffe4c4',
'black':'#000000',
'blanchedalmond':'#ffebcd',
'blue':'#0000ff',
'blueviolet':'#8a2be2',
'brown':'#a52a2a',
'burlywood':'#deb887',
'cadetblue':'#5f9ea0',
'chartreuse':'#7fff00',
'chocolate':'#d2691e',
'coral':'#ff7f50',
'cornflowerblue':'#6495ed',
'cornsilk':'#fff8dc',
'crimson':'#dc143c',
'cyan':'#00ffff',
'darkblue':'#00008b',
'darkcyan':'#008b8b',
'darkgoldenrod':'#b8860b',
'darkgray':'#a9a9a9',
'darkgrey':'#a9a9a9',
'darkgreen':'#006400',
'darkkhaki':'#bdb76b',
'darkmagenta':'#8b008b',
'darkolivegreen':'#556b2f',
'darkorange':'#ff8c00',
'darkorchid':'#9932cc',
'darkred':'#8b0000',
'darksalmon':'#e9967a',
'darkseagreen':'#8fbc8f',
'darkslateblue':'#483d8b',
'darkslategray':'#2f4f4f',
'darkslategrey':'#2f4f4f',
'darkturquoise':'#00ced1',
'darkviolet':'#9400d3',
'deeppink':'#ff1493',
'deepskyblue':'#00bfff',
'dimgray':'#696969',
'dimgrey':'#696969',
'dodgerblue':'#1e90ff',
'firebrick':'#b22222',
'floralwhite':'#fffaf0',
'forestgreen':'#228b22',
'fuchsia':'#ff00ff',
'gainsboro':'#dcdcdc',
'ghostwhite':'#f8f8ff',
'gold':'#ffd700',
'goldenrod':'#daa520',
'gray':'#808080',
'grey':'#808080',
'green':'#008000',
'greenyellow':'#adff2f',
'honeydew':'#f0fff0',
'hotpink':'#ff69b4',
'indianred':'#cd5c5c',
'indigo':'#4b0082',
'ivory':'#fffff0',
'khaki':'#f0e68c',
'lavender':'#e6e6fa',
'lavenderblush':'#fff0f5',
'lawngreen':'#7cfc00',
'lemonchiffon':'#fffacd',
'lightblue':'#add8e6',
'lightcoral':'#f08080',
'lightcyan':'#e0ffff',
'lightgoldenrodyellow':'#fafad2',
'lightgray':'#d3d3d3',
'lightgrey':'#d3d3d3',
'lightgreen':'#90ee90',
'lightpink':'#ffb6c1',
'lightsalmon':'#ffa07a',
'lightseagreen':'#20b2aa',
'lightskyblue':'#87cefa',
'lightslategray':'#778899',
'lightslategrey':'#778899',
'lightsteelblue':'#b0c4de',
'lightyellow':'#ffffe0',
'lime':'#00ff00',
'limegreen':'#32cd32',
'linen':'#faf0e6',
'magenta':'#ff00ff',
'maroon':'#800000',
'mediumaquamarine':'#66cdaa',
'mediumblue':'#0000cd',
'mediumorchid':'#ba55d3',
'mediumpurple':'#9370d8',
'mediumseagreen':'#3cb371',
'mediumslateblue':'#7b68ee',
'mediumspringgreen':'#00fa9a',
'mediumturquoise':'#48d1cc',
'mediumvioletred':'#c71585',
'midnightblue':'#191970',
'mintcream':'#f5fffa',
'mistyrose':'#ffe4e1',
'moccasin':'#ffe4b5',
'navajowhite':'#ffdead',
'navy':'#000080',
'oldlace':'#fdf5e6',
'olive':'#808000',
'olivedrab':'#6b8e23',
'orange':'#ffa500',
'orangered':'#ff4500',
'orchid':'#da70d6',
'palegoldenrod':'#eee8aa',
'palegreen':'#98fb98',
'paleturquoise':'#afeeee',
'palevioletred':'#d87093',
'papayawhip':'#ffefd5',
'peachpuff':'#ffdab9',
'peru':'#cd853f',
'pink':'#ffc0cb',
'plum':'#dda0dd',
'powderblue':'#b0e0e6',
'purple':'#800080',
'red':'#ff0000',
'rosybrown':'#bc8f8f',
'royalblue':'#4169e1',
'saddlebrown':'#8b4513',
'salmon':'#fa8072',
'sandybrown':'#f4a460',
'seagreen':'#2e8b57',
'seashell':'#fff5ee',
'sienna':'#a0522d',
'silver':'#c0c0c0',
'skyblue':'#87ceeb',
'slateblue':'#6a5acd',
'slategray':'#708090',
'slategrey':'#708090',
'snow':'#fffafa',
'springgreen':'#00ff7f',
'steelblue':'#4682b4',
'tan':'#d2b48c',
'teal':'#008080',
'thistle':'#d8bfd8',
'tomato':'#ff6347',
'turquoise':'#40e0d0',
'violet':'#ee82ee',
'wheat':'#f5deb3',
'white':'#ffffff',
'whitesmoke':'#f5f5f5',
'yellow':'#ffff00',
'yellowgreen':'#9acd32'
};
})(require('./tree'));
(function (tree) {
tree.debugInfo = function(env, ctx, lineSeperator) {
var result="";
if (env.dumpLineNumbers && !env.compress) {
switch(env.dumpLineNumbers) {
case 'comments':
result = tree.debugInfo.asComment(ctx);
break;
case 'mediaquery':
result = tree.debugInfo.asMediaQuery(ctx);
break;
case 'all':
result = tree.debugInfo.asComment(ctx) + (lineSeperator || "") + tree.debugInfo.asMediaQuery(ctx);
break;
}
}
return result;
};
tree.debugInfo.asComment = function(ctx) {
return '/* line ' + ctx.debugInfo.lineNumber + ', ' + ctx.debugInfo.fileName + ' */\n';
};
tree.debugInfo.asMediaQuery = function(ctx) {
return '@media -sass-debug-info{filename{font-family:' +
('file://' + ctx.debugInfo.fileName).replace(/([.:\/\\])/g, function (a) {
if (a == '\\') {
a = '\/';
}
return '\\' + a;
}) +
'}line{font-family:\\00003' + ctx.debugInfo.lineNumber + '}}\n';
};
tree.find = function (obj, fun) {
for (var i = 0, r; i < obj.length; i++) {
r = fun.call(obj, obj[i]);
if (r) { return r; }
}
return null;
};
tree.jsify = function (obj) {
if (Array.isArray(obj.value) && (obj.value.length > 1)) {
return '[' + obj.value.map(function (v) { return v.toCSS(false); }).join(', ') + ']';
} else {
return obj.toCSS(false);
}
};
tree.toCSS = function (env) {
var strs = [];
this.genCSS(env, {
add: function(chunk, fileInfo, index) {
strs.push(chunk);
},
isEmpty: function () {
return strs.length === 0;
}
});
return strs.join('');
};
tree.outputRuleset = function (env, output, rules) {
var ruleCnt = rules.length, i;
env.tabLevel = (env.tabLevel | 0) + 1;
// Compressed
if (env.compress) {
output.add('{');
for (i = 0; i < ruleCnt; i++) {
rules[i].genCSS(env, output);
}
output.add('}');
env.tabLevel--;
return;
}
// Non-compressed
var tabSetStr = '\n' + Array(env.tabLevel).join(" "), tabRuleStr = tabSetStr + " ";
if (!ruleCnt) {
output.add(" {" + tabSetStr + '}');
} else {
output.add(" {" + tabRuleStr);
rules[0].genCSS(env, output);
for (i = 1; i < ruleCnt; i++) {
output.add(tabRuleStr);
rules[i].genCSS(env, output);
}
output.add(tabSetStr + '}');
}
env.tabLevel--;
};
})(require('./tree'));
(function (tree) {
tree.Alpha = function (val) {
this.value = val;
};
tree.Alpha.prototype = {
type: "Alpha",
accept: function (visitor) {
this.value = visitor.visit(this.value);
},
eval: function (env) {
if (this.value.eval) { return new tree.Alpha(this.value.eval(env)); }
return this;
},
genCSS: function (env, output) {
output.add("alpha(opacity=");
if (this.value.genCSS) {
this.value.genCSS(env, output);
} else {
output.add(this.value);
}
output.add(")");
},
toCSS: tree.toCSS
};
})(require('../tree'));
(function (tree) {
tree.Anonymous = function (string, index, currentFileInfo, mapLines) {
this.value = string.value || string;
this.index = index;
this.mapLines = mapLines;
this.currentFileInfo = currentFileInfo;
};
tree.Anonymous.prototype = {
type: "Anonymous",
eval: function () {
return new tree.Anonymous(this.value, this.index, this.currentFileInfo, this.mapLines);
},
compare: function (x) {
if (!x.toCSS) {
return -1;
}
var left = this.toCSS(),
right = x.toCSS();
if (left === right) {
return 0;
}
return left < right ? -1 : 1;
},
genCSS: function (env, output) {
output.add(this.value, this.currentFileInfo, this.index, this.mapLines);
},
toCSS: tree.toCSS
};
})(require('../tree'));
(function (tree) {
tree.Assignment = function (key, val) {
this.key = key;
this.value = val;
};
tree.Assignment.prototype = {
type: "Assignment",
accept: function (visitor) {
this.value = visitor.visit(this.value);
},
eval: function (env) {
if (this.value.eval) {
return new(tree.Assignment)(this.key, this.value.eval(env));
}
return this;
},
genCSS: function (env, output) {
output.add(this.key + '=');
if (this.value.genCSS) {
this.value.genCSS(env, output);
} else {
output.add(this.value);
}
},
toCSS: tree.toCSS
};
})(require('../tree'));
(function (tree) {
//
// A function call node.
//
tree.Call = function (name, args, index, currentFileInfo) {
this.name = name;
this.args = args;
this.index = index;
this.currentFileInfo = currentFileInfo;
};
tree.Call.prototype = {
type: "Call",
accept: function (visitor) {
if (this.args) {
this.args = visitor.visitArray(this.args);
}
},
//
// When evaluating a function call,
// we either find the function in `tree.functions` [1],
// in which case we call it, passing the evaluated arguments,
// if this returns null or we cannot find the function, we
// simply print it out as it appeared originally [2].
//
// The *functions.js* file contains the built-in functions.
//
// The reason why we evaluate the arguments, is in the case where
// we try to pass a variable to a function, like: `saturate(@color)`.
// The function should receive the value, not the variable.
//
eval: function (env) {
var args = this.args.map(function (a) { return a.eval(env); }),
nameLC = this.name.toLowerCase(),
result, func;
if (nameLC in tree.functions) { // 1.
try {
func = new tree.functionCall(env, this.currentFileInfo);
result = func[nameLC].apply(func, args);
if (result != null) {
return result;
}
} catch (e) {
throw { type: e.type || "Runtime",
message: "error evaluating function `" + this.name + "`" +
(e.message ? ': ' + e.message : ''),
index: this.index, filename: this.currentFileInfo.filename };
}
}
return new tree.Call(this.name, args, this.index, this.currentFileInfo);
},
genCSS: function (env, output) {
output.add(this.name + "(", this.currentFileInfo, this.index);
for(var i = 0; i < this.args.length; i++) {
this.args[i].genCSS(env, output);
if (i + 1 < this.args.length) {
output.add(", ");
}
}
output.add(")");
},
toCSS: tree.toCSS
};
})(require('../tree'));
(function (tree) {
//
// RGB Colors - #ff0014, #eee
//
tree.Color = function (rgb, a) {
//
// The end goal here, is to parse the arguments
// into an integer triplet, such as `128, 255, 0`
//
// This facilitates operations and conversions.
//
if (Array.isArray(rgb)) {
this.rgb = rgb;
} else if (rgb.length == 6) {
this.rgb = rgb.match(/.{2}/g).map(function (c) {
return parseInt(c, 16);
});
} else {
this.rgb = rgb.split('').map(function (c) {
return parseInt(c + c, 16);
});
}
this.alpha = typeof(a) === 'number' ? a : 1;
};
var transparentKeyword = "transparent";
tree.Color.prototype = {
type: "Color",
eval: function () { return this; },
luma: function () {
var r = this.rgb[0] / 255,
g = this.rgb[1] / 255,
b = this.rgb[2] / 255;
r = (r <= 0.03928) ? r / 12.92 : Math.pow(((r + 0.055) / 1.055), 2.4);
g = (g <= 0.03928) ? g / 12.92 : Math.pow(((g + 0.055) / 1.055), 2.4);
b = (b <= 0.03928) ? b / 12.92 : Math.pow(((b + 0.055) / 1.055), 2.4);
return 0.2126 * r + 0.7152 * g + 0.0722 * b;
},
genCSS: function (env, output) {
output.add(this.toCSS(env));
},
toCSS: function (env, doNotCompress) {
var compress = env && env.compress && !doNotCompress,
alpha = tree.fround(env, this.alpha);
// If we have some transparency, the only way to represent it
// is via `rgba`. Otherwise, we use the hex representation,
// which has better compatibility with older browsers.
// Values are capped between `0` and `255`, rounded and zero-padded.
if (alpha < 1) {
if (alpha === 0 && this.isTransparentKeyword) {
return transparentKeyword;
}
return "rgba(" + this.rgb.map(function (c) {
return clamp(Math.round(c), 255);
}).concat(clamp(alpha, 1))
.join(',' + (compress ? '' : ' ')) + ")";
} else {
var color = this.toRGB();
if (compress) {
var splitcolor = color.split('');
// Convert color to short format
if (splitcolor[1] === splitcolor[2] && splitcolor[3] === splitcolor[4] && splitcolor[5] === splitcolor[6]) {
color = '#' + splitcolor[1] + splitcolor[3] + splitcolor[5];
}
}
return color;
}
},
//
// Operations have to be done per-channel, if not,
// channels will spill onto each other. Once we have
// our result, in the form of an integer triplet,
// we create a new Color node to hold the result.
//
operate: function (env, op, other) {
var rgb = [];
var alpha = this.alpha * (1 - other.alpha) + other.alpha;
for (var c = 0; c < 3; c++) {
rgb[c] = tree.operate(env, op, this.rgb[c], other.rgb[c]);
}
return new(tree.Color)(rgb, alpha);
},
toRGB: function () {
return toHex(this.rgb);
},
toHSL: function () {
var r = this.rgb[0] / 255,
g = this.rgb[1] / 255,
b = this.rgb[2] / 255,
a = this.alpha;
var max = Math.max(r, g, b), min = Math.min(r, g, b);
var h, s, l = (max + min) / 2, d = max - min;
if (max === min) {
h = s = 0;
} else {
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
switch (max) {
case r: h = (g - b) / d + (g < b ? 6 : 0); break;
case g: h = (b - r) / d + 2; break;
case b: h = (r - g) / d + 4; break;
}
h /= 6;
}
return { h: h * 360, s: s, l: l, a: a };
},
//Adapted from http://mjijackson.com/2008/02/rgb-to-hsl-and-rgb-to-hsv-color-model-conversion-algorithms-in-javascript
toHSV: function () {
var r = this.rgb[0] / 255,
g = this.rgb[1] / 255,
b = this.rgb[2] / 255,
a = this.alpha;
var max = Math.max(r, g, b), min = Math.min(r, g, b);
var h, s, v = max;
var d = max - min;
if (max === 0) {
s = 0;
} else {
s = d / max;
}
if (max === min) {
h = 0;
} else {
switch(max){
case r: h = (g - b) / d + (g < b ? 6 : 0); break;
case g: h = (b - r) / d + 2; break;
case b: h = (r - g) / d + 4; break;
}
h /= 6;
}
return { h: h * 360, s: s, v: v, a: a };
},
toARGB: function () {
return toHex([this.alpha * 255].concat(this.rgb));
},
compare: function (x) {
if (!x.rgb) {
return -1;
}
return (x.rgb[0] === this.rgb[0] &&
x.rgb[1] === this.rgb[1] &&
x.rgb[2] === this.rgb[2] &&
x.alpha === this.alpha) ? 0 : -1;
}
};
tree.Color.fromKeyword = function(keyword) {
keyword = keyword.toLowerCase();
if (tree.colors.hasOwnProperty(keyword)) {
// detect named color
return new(tree.Color)(tree.colors[keyword].slice(1));
}
if (keyword === transparentKeyword) {
var transparent = new(tree.Color)([0, 0, 0], 0);
transparent.isTransparentKeyword = true;
return transparent;
}
};
function toHex(v) {
return '#' + v.map(function (c) {
c = clamp(Math.round(c), 255);
return (c < 16 ? '0' : '') + c.toString(16);
}).join('');
}
function clamp(v, max) {
return Math.min(Math.max(v, 0), max);
}
})(require('../tree'));
(function (tree) {
tree.Comment = function (value, silent, index, currentFileInfo) {
this.value = value;
this.silent = !!silent;
this.currentFileInfo = currentFileInfo;
};
tree.Comment.prototype = {
type: "Comment",
genCSS: function (env, output) {
if (this.debugInfo) {
output.add(tree.debugInfo(env, this), this.currentFileInfo, this.index);
}
output.add(this.value.trim()); //TODO shouldn't need to trim, we shouldn't grab the \n
},
toCSS: tree.toCSS,
isSilent: function(env) {
var isReference = (this.currentFileInfo && this.currentFileInfo.reference && !this.isReferenced),
isCompressed = env.compress && !this.value.match(/^\/\*!/);
return this.silent || isReference || isCompressed;
},
eval: function () { return this; },
markReferenced: function () {
this.isReferenced = true;
}
};
})(require('../tree'));
(function (tree) {
tree.Condition = function (op, l, r, i, negate) {
this.op = op.trim();
this.lvalue = l;
this.rvalue = r;
this.index = i;
this.negate = negate;
};
tree.Condition.prototype = {
type: "Condition",
accept: function (visitor) {
this.lvalue = visitor.visit(this.lvalue);
this.rvalue = visitor.visit(this.rvalue);
},
eval: function (env) {
var a = this.lvalue.eval(env),
b = this.rvalue.eval(env);
var i = this.index, result;
result = (function (op) {
switch (op) {
case 'and':
return a && b;
case 'or':
return a || b;
default:
if (a.compare) {
result = a.compare(b);
} else if (b.compare) {
result = b.compare(a);
} else {
throw { type: "Type",
message: "Unable to perform comparison",
index: i };
}
switch (result) {
case -1: return op === '<' || op === '=<' || op === '<=';
case 0: return op === '=' || op === '>=' || op === '=<' || op === '<=';
case 1: return op === '>' || op === '>=';
}
}
})(this.op);
return this.negate ? !result : result;
}
};
})(require('../tree'));
(function (tree) {
tree.DetachedRuleset = function (ruleset, frames) {
this.ruleset = ruleset;
this.frames = frames;
};
tree.DetachedRuleset.prototype = {
type: "DetachedRuleset",
accept: function (visitor) {
this.ruleset = visitor.visit(this.ruleset);
},
eval: function (env) {
var frames = this.frames || env.frames.slice(0);
return new tree.DetachedRuleset(this.ruleset, frames);
},
callEval: function (env) {
return this.ruleset.eval(this.frames ? new(tree.evalEnv)(env, this.frames.concat(env.frames)) : env);
}
};
})(require('../tree'));
(function (tree) {
//
// A number with a unit
//
tree.Dimension = function (value, unit) {
this.value = parseFloat(value);
this.unit = (unit && unit instanceof tree.Unit) ? unit :
new(tree.Unit)(unit ? [unit] : undefined);
};
tree.Dimension.prototype = {
type: "Dimension",
accept: function (visitor) {
this.unit = visitor.visit(this.unit);
},
eval: function (env) {
return this;
},
toColor: function () {
return new(tree.Color)([this.value, this.value, this.value]);
},
genCSS: function (env, output) {
if ((env && env.strictUnits) && !this.unit.isSingular()) {
throw new Error("Multiple units in dimension. Correct the units or use the unit function. Bad unit: "+this.unit.toString());
}
var value = tree.fround(env, this.value),
strValue = String(value);
if (value !== 0 && value < 0.000001 && value > -0.000001) {
// would be output 1e-6 etc.
strValue = value.toFixed(20).replace(/0+$/, "");
}
if (env && env.compress) {
// Zero values doesn't need a unit
if (value === 0 && this.unit.isLength()) {
output.add(strValue);
return;
}
// Float values doesn't need a leading zero
if (value > 0 && value < 1) {
strValue = (strValue).substr(1);
}
}
output.add(strValue);
this.unit.genCSS(env, output);
},
toCSS: tree.toCSS,
// In an operation between two Dimensions,
// we default to the first Dimension's unit,
// so `1px + 2` will yield `3px`.
operate: function (env, op, other) {
/*jshint noempty:false */
var value = tree.operate(env, op, this.value, other.value),
unit = this.unit.clone();
if (op === '+' || op === '-') {
if (unit.numerator.length === 0 && unit.denominator.length === 0) {
unit.numerator = other.unit.numerator.slice(0);
unit.denominator = other.unit.denominator.slice(0);
} else if (other.unit.numerator.length === 0 && unit.denominator.length === 0) {
// do nothing
} else {
other = other.convertTo(this.unit.usedUnits());
if(env.strictUnits && other.unit.toString() !== unit.toString()) {
throw new Error("Incompatible units. Change the units or use the unit function. Bad units: '" + unit.toString() +
"' and '" + other.unit.toString() + "'.");
}
value = tree.operate(env, op, this.value, other.value);
}
} else if (op === '*') {
unit.numerator = unit.numerator.concat(other.unit.numerator).sort();
unit.denominator = unit.denominator.concat(other.unit.denominator).sort();
unit.cancel();
} else if (op === '/') {
unit.numerator = unit.numerator.concat(other.unit.denominator).sort();
unit.denominator = unit.denominator.concat(other.unit.numerator).sort();
unit.cancel();
}
return new(tree.Dimension)(value, unit);
},
compare: function (other) {
if (other instanceof tree.Dimension) {
var a, b,
aValue, bValue;
if (this.unit.isEmpty() || other.unit.isEmpty()) {
a = this;
b = other;
} else {
a = this.unify();
b = other.unify();
if (a.unit.compare(b.unit) !== 0) {
return -1;
}
}
aValue = a.value;
bValue = b.value;
if (bValue > aValue) {
return -1;
} else if (bValue < aValue) {
return 1;
} else {
return 0;
}
} else {
return -1;
}
},
unify: function () {
return this.convertTo({ length: 'px', duration: 's', angle: 'rad' });
},
convertTo: function (conversions) {
var value = this.value, unit = this.unit.clone(),
i, groupName, group, targetUnit, derivedConversions = {}, applyUnit;
if (typeof conversions === 'string') {
for(i in tree.UnitConversions) {
if (tree.UnitConversions[i].hasOwnProperty(conversions)) {
derivedConversions = {};
derivedConversions[i] = conversions;
}
}
conversions = derivedConversions;
}
applyUnit = function (atomicUnit, denominator) {
/*jshint loopfunc:true */
if (group.hasOwnProperty(atomicUnit)) {
if (denominator) {
value = value / (group[atomicUnit] / group[targetUnit]);
} else {
value = value * (group[atomicUnit] / group[targetUnit]);
}
return targetUnit;
}
return atomicUnit;
};
for (groupName in conversions) {
if (conversions.hasOwnProperty(groupName)) {
targetUnit = conversions[groupName];
group = tree.UnitConversions[groupName];
unit.map(applyUnit);
}
}
unit.cancel();
return new(tree.Dimension)(value, unit);
}
};
// http://www.w3.org/TR/css3-values/#absolute-lengths
tree.UnitConversions = {
length: {
'm': 1,
'cm': 0.01,
'mm': 0.001,
'in': 0.0254,
'px': 0.0254 / 96,
'pt': 0.0254 / 72,
'pc': 0.0254 / 72 * 12
},
duration: {
's': 1,
'ms': 0.001
},
angle: {
'rad': 1/(2*Math.PI),
'deg': 1/360,
'grad': 1/400,
'turn': 1
}
};
tree.Unit = function (numerator, denominator, backupUnit) {
this.numerator = numerator ? numerator.slice(0).sort() : [];
this.denominator = denominator ? denominator.slice(0).sort() : [];
this.backupUnit = backupUnit;
};
tree.Unit.prototype = {
type: "Unit",
clone: function () {
return new tree.Unit(this.numerator.slice(0), this.denominator.slice(0), this.backupUnit);
},
genCSS: function (env, output) {
if (this.numerator.length >= 1) {
output.add(this.numerator[0]);
} else
if (this.denominator.length >= 1) {
output.add(this.denominator[0]);
} else
if ((!env || !env.strictUnits) && this.backupUnit) {
output.add(this.backupUnit);
}
},
toCSS: tree.toCSS,
toString: function () {
var i, returnStr = this.numerator.join("*");
for (i = 0; i < this.denominator.length; i++) {
returnStr += "/" + this.denominator[i];
}
return returnStr;
},
compare: function (other) {
return this.is(other.toString()) ? 0 : -1;
},
is: function (unitString) {
return this.toString() === unitString;
},
isLength: function () {
return Boolean(this.toCSS().match(/px|em|%|in|cm|mm|pc|pt|ex/));
},
isEmpty: function () {
return this.numerator.length === 0 && this.denominator.length === 0;
},
isSingular: function() {
return this.numerator.length <= 1 && this.denominator.length === 0;
},
map: function(callback) {
var i;
for (i = 0; i < this.numerator.length; i++) {
this.numerator[i] = callback(this.numerator[i], false);
}
for (i = 0; i < this.denominator.length; i++) {
this.denominator[i] = callback(this.denominator[i], true);
}
},
usedUnits: function() {
var group, result = {}, mapUnit;
mapUnit = function (atomicUnit) {
/*jshint loopfunc:true */
if (group.hasOwnProperty(atomicUnit) && !result[groupName]) {
result[groupName] = atomicUnit;
}
return atomicUnit;
};
for (var groupName in tree.UnitConversions) {
if (tree.UnitConversions.hasOwnProperty(groupName)) {
group = tree.UnitConversions[groupName];
this.map(mapUnit);
}
}
return result;
},
cancel: function () {
var counter = {}, atomicUnit, i, backup;
for (i = 0; i < this.numerator.length; i++) {
atomicUnit = this.numerator[i];
if (!backup) {
backup = atomicUnit;
}
counter[atomicUnit] = (counter[atomicUnit] || 0) + 1;
}
for (i = 0; i < this.denominator.length; i++) {
atomicUnit = this.denominator[i];
if (!backup) {
backup = atomicUnit;
}
counter[atomicUnit] = (counter[atomicUnit] || 0) - 1;
}
this.numerator = [];
this.denominator = [];
for (atomicUnit in counter) {
if (counter.hasOwnProperty(atomicUnit)) {
var count = counter[atomicUnit];
if (count > 0) {
for (i = 0; i < count; i++) {
this.numerator.push(atomicUnit);
}
} else if (count < 0) {
for (i = 0; i < -count; i++) {
this.denominator.push(atomicUnit);
}
}
}
}
if (this.numerator.length === 0 && this.denominator.length === 0 && backup) {
this.backupUnit = backup;
}
this.numerator.sort();
this.denominator.sort();
}
};
})(require('../tree'));
(function (tree) {
tree.Directive = function (name, value, rules, index, currentFileInfo, debugInfo) {
this.name = name;
this.value = value;
if (rules) {
this.rules = rules;
this.rules.allowImports = true;
}
this.index = index;
this.currentFileInfo = currentFileInfo;
this.debugInfo = debugInfo;
};
tree.Directive.prototype = {
type: "Directive",
accept: function (visitor) {
var value = this.value, rules = this.rules;
if (rules) {
rules = visitor.visit(rules);
}
if (value) {
value = visitor.visit(value);
}
},
genCSS: function (env, output) {
var value = this.value, rules = this.rules;
output.add(this.name, this.currentFileInfo, this.index);
if (value) {
output.add(' ');
value.genCSS(env, output);
}
if (rules) {
tree.outputRuleset(env, output, [rules]);
} else {
output.add(';');
}
},
toCSS: tree.toCSS,
eval: function (env) {
var value = this.value, rules = this.rules;
if (value) {
value = value.eval(env);
}
if (rules) {
rules = rules.eval(env);
rules.root = true;
}
return new(tree.Directive)(this.name, value, rules,
this.index, this.currentFileInfo, this.debugInfo);
},
variable: function (name) { if (this.rules) return tree.Ruleset.prototype.variable.call(this.rules, name); },
find: function () { if (this.rules) return tree.Ruleset.prototype.find.apply(this.rules, arguments); },
rulesets: function () { if (this.rules) return tree.Ruleset.prototype.rulesets.apply(this.rules); },
markReferenced: function () {
var i, rules;
this.isReferenced = true;
if (this.rules) {
rules = this.rules.rules;
for (i = 0; i < rules.length; i++) {
if (rules[i].markReferenced) {
rules[i].markReferenced();
}
}
}
}
};
})(require('../tree'));
(function (tree) {
tree.Element = function (combinator, value, index, currentFileInfo) {
this.combinator = combinator instanceof tree.Combinator ?
combinator : new(tree.Combinator)(combinator);
if (typeof(value) === 'string') {
this.value = value.trim();
} else if (value) {
this.value = value;
} else {
this.value = "";
}
this.index = index;
this.currentFileInfo = currentFileInfo;
};
tree.Element.prototype = {
type: "Element",
accept: function (visitor) {
var value = this.value;
this.combinator = visitor.visit(this.combinator);
if (typeof value === "object") {
this.value = visitor.visit(value);
}
},
eval: function (env) {
return new(tree.Element)(this.combinator,
this.value.eval ? this.value.eval(env) : this.value,
this.index,
this.currentFileInfo);
},
genCSS: function (env, output) {
output.add(this.toCSS(env), this.currentFileInfo, this.index);
},
toCSS: function (env) {
var value = (this.value.toCSS ? this.value.toCSS(env) : this.value);
if (value === '' && this.combinator.value.charAt(0) === '&') {
return '';
} else {
return this.combinator.toCSS(env || {}) + value;
}
}
};
tree.Attribute = function (key, op, value) {
this.key = key;
this.op = op;
this.value = value;
};
tree.Attribute.prototype = {
type: "Attribute",
eval: function (env) {
return new(tree.Attribute)(this.key.eval ? this.key.eval(env) : this.key,
this.op, (this.value && this.value.eval) ? this.value.eval(env) : this.value);
},
genCSS: function (env, output) {
output.add(this.toCSS(env));
},
toCSS: function (env) {
var value = this.key.toCSS ? this.key.toCSS(env) : this.key;
if (this.op) {
value += this.op;
value += (this.value.toCSS ? this.value.toCSS(env) : this.value);
}
return '[' + value + ']';
}
};
tree.Combinator = function (value) {
if (value === ' ') {
this.value = ' ';
} else {
this.value = value ? value.trim() : "";
}
};
tree.Combinator.prototype = {
type: "Combinator",
_outputMap: {
'' : '',
' ' : ' ',
':' : ' :',
'+' : ' + ',
'~' : ' ~ ',
'>' : ' > ',
'|' : '|',
'^' : ' ^ ',
'^^' : ' ^^ '
},
_outputMapCompressed: {
'' : '',
' ' : ' ',
':' : ' :',
'+' : '+',
'~' : '~',
'>' : '>',
'|' : '|',
'^' : '^',
'^^' : '^^'
},
genCSS: function (env, output) {
output.add((env.compress ? this._outputMapCompressed : this._outputMap)[this.value]);
},
toCSS: tree.toCSS
};
})(require('../tree'));
(function (tree) {
tree.Expression = function (value) { this.value = value; };
tree.Expression.prototype = {
type: "Expression",
accept: function (visitor) {
if (this.value) {
this.value = visitor.visitArray(this.value);
}
},
eval: function (env) {
var returnValue,
inParenthesis = this.parens && !this.parensInOp,
doubleParen = false;
if (inParenthesis) {
env.inParenthesis();
}
if (this.value.length > 1) {
returnValue = new(tree.Expression)(this.value.map(function (e) {
return e.eval(env);
}));
} else if (this.value.length === 1) {
if (this.value[0].parens && !this.value[0].parensInOp) {
doubleParen = true;
}
returnValue = this.value[0].eval(env);
} else {
returnValue = this;
}
if (inParenthesis) {
env.outOfParenthesis();
}
if (this.parens && this.parensInOp && !(env.isMathOn()) && !doubleParen) {
returnValue = new(tree.Paren)(returnValue);
}
return returnValue;
},
genCSS: function (env, output) {
for(var i = 0; i < this.value.length; i++) {
this.value[i].genCSS(env, output);
if (i + 1 < this.value.length) {
output.add(" ");
}
}
},
toCSS: tree.toCSS,
throwAwayComments: function () {
this.value = this.value.filter(function(v) {
return !(v instanceof tree.Comment);
});
}
};
})(require('../tree'));
(function (tree) {
tree.Extend = function Extend(selector, option, index) {
this.selector = selector;
this.option = option;
this.index = index;
this.object_id = tree.Extend.next_id++;
this.parent_ids = [this.object_id];
switch(option) {
case "all":
this.allowBefore = true;
this.allowAfter = true;
break;
default:
this.allowBefore = false;
this.allowAfter = false;
break;
}
};
tree.Extend.next_id = 0;
tree.Extend.prototype = {
type: "Extend",
accept: function (visitor) {
this.selector = visitor.visit(this.selector);
},
eval: function (env) {
return new(tree.Extend)(this.selector.eval(env), this.option, this.index);
},
clone: function (env) {
return new(tree.Extend)(this.selector, this.option, this.index);
},
findSelfSelectors: function (selectors) {
var selfElements = [],
i,
selectorElements;
for(i = 0; i < selectors.length; i++) {
selectorElements = selectors[i].elements;
// duplicate the logic in genCSS function inside the selector node.
// future TODO - move both logics into the selector joiner visitor
if (i > 0 && selectorElements.length && selectorElements[0].combinator.value === "") {
selectorElements[0].combinator.value = ' ';
}
selfElements = selfElements.concat(selectors[i].elements);
}
this.selfSelectors = [{ elements: selfElements }];
}
};
})(require('../tree'));
(function (tree) {
//
// CSS @import node
//
// The general strategy here is that we don't want to wait
// for the parsing to be completed, before we start importing
// the file. That's because in the context of a browser,
// most of the time will be spent waiting for the server to respond.
//
// On creation, we push the import path to our import queue, though
// `import,push`, we also pass it a callback, which it'll call once
// the file has been fetched, and parsed.
//
tree.Import = function (path, features, options, index, currentFileInfo) {
this.options = options;
this.index = index;
this.path = path;
this.features = features;
this.currentFileInfo = currentFileInfo;
if (this.options.less !== undefined || this.options.inline) {
this.css = !this.options.less || this.options.inline;
} else {
var pathValue = this.getPath();
if (pathValue && /css([\?;].*)?$/.test(pathValue)) {
this.css = true;
}
}
};
//
// The actual import node doesn't return anything, when converted to CSS.
// The reason is that it's used at the evaluation stage, so that the rules
// it imports can be treated like any other rules.
//
// In `eval`, we make sure all Import nodes get evaluated, recursively, so
// we end up with a flat structure, which can easily be imported in the parent
// ruleset.
//
tree.Import.prototype = {
type: "Import",
accept: function (visitor) {
if (this.features) {
this.features = visitor.visit(this.features);
}
this.path = visitor.visit(this.path);
if (!this.options.inline && this.root) {
this.root = visitor.visit(this.root);
}
},
genCSS: function (env, output) {
if (this.css) {
output.add("@import ", this.currentFileInfo, this.index);
this.path.genCSS(env, output);
if (this.features) {
output.add(" ");
this.features.genCSS(env, output);
}
output.add(';');
}
},
toCSS: tree.toCSS,
getPath: function () {
if (this.path instanceof tree.Quoted) {
var path = this.path.value;
return (this.css !== undefined || /(\.[a-z]*$)|([\?;].*)$/.test(path)) ? path : path + '.less';
} else if (this.path instanceof tree.URL) {
return this.path.value.value;
}
return null;
},
evalForImport: function (env) {
return new(tree.Import)(this.path.eval(env), this.features, this.options, this.index, this.currentFileInfo);
},
evalPath: function (env) {
var path = this.path.eval(env);
var rootpath = this.currentFileInfo && this.currentFileInfo.rootpath;
if (!(path instanceof tree.URL)) {
if (rootpath) {
var pathValue = path.value;
// Add the base path if the import is relative
if (pathValue && env.isPathRelative(pathValue)) {
path.value = rootpath +pathValue;
}
}
path.value = env.normalizePath(path.value);
}
return path;
},
eval: function (env) {
var ruleset, features = this.features && this.features.eval(env);
if (this.skip) {
if (typeof this.skip === "function") {
this.skip = this.skip();
}
if (this.skip) {
return [];
}
}
if (this.options.inline) {
//todo needs to reference css file not import
var contents = new(tree.Anonymous)(this.root, 0, {filename: this.importedFilename}, true);
return this.features ? new(tree.Media)([contents], this.features.value) : [contents];
} else if (this.css) {
var newImport = new(tree.Import)(this.evalPath(env), features, this.options, this.index);
if (!newImport.css && this.error) {
throw this.error;
}
return newImport;
} else {
ruleset = new(tree.Ruleset)(null, this.root.rules.slice(0));
ruleset.evalImports(env);
return this.features ? new(tree.Media)(ruleset.rules, this.features.value) : ruleset.rules;
}
}
};
})(require('../tree'));
(function (tree) {
tree.JavaScript = function (string, index, escaped) {
this.escaped = escaped;
this.expression = string;
this.index = index;
};
tree.JavaScript.prototype = {
type: "JavaScript",
eval: function (env) {
var result,
that = this,
context = {};
var expression = this.expression.replace(/@\{([\w-]+)\}/g, function (_, name) {
return tree.jsify(new(tree.Variable)('@' + name, that.index).eval(env));
});
try {
expression = new(Function)('return (' + expression + ')');
} catch (e) {
throw { message: "JavaScript evaluation error: " + e.message + " from `" + expression + "`" ,
index: this.index };
}
var variables = env.frames[0].variables();
for (var k in variables) {
if (variables.hasOwnProperty(k)) {
/*jshint loopfunc:true */
context[k.slice(1)] = {
value: variables[k].value,
toJS: function () {
return this.value.eval(env).toCSS();
}
};
}
}
try {
result = expression.call(context);
} catch (e) {
throw { message: "JavaScript evaluation error: '" + e.name + ': ' + e.message.replace(/["]/g, "'") + "'" ,
index: this.index };
}
if (typeof(result) === 'number') {
return new(tree.Dimension)(result);
} else if (typeof(result) === 'string') {
return new(tree.Quoted)('"' + result + '"', result, this.escaped, this.index);
} else if (Array.isArray(result)) {
return new(tree.Anonymous)(result.join(', '));
} else {
return new(tree.Anonymous)(result);
}
}
};
})(require('../tree'));
(function (tree) {
tree.Keyword = function (value) { this.value = value; };
tree.Keyword.prototype = {
type: "Keyword",
eval: function () { return this; },
genCSS: function (env, output) {
if (this.value === '%') { throw { type: "Syntax", message: "Invalid % without number" }; }
output.add(this.value);
},
toCSS: tree.toCSS,
compare: function (other) {
if (other instanceof tree.Keyword) {
return other.value === this.value ? 0 : 1;
} else {
return -1;
}
}
};
tree.True = new(tree.Keyword)('true');
tree.False = new(tree.Keyword)('false');
})(require('../tree'));
(function (tree) {
tree.Media = function (value, features, index, currentFileInfo) {
this.index = index;
this.currentFileInfo = currentFileInfo;
var selectors = this.emptySelectors();
this.features = new(tree.Value)(features);
this.rules = [new(tree.Ruleset)(selectors, value)];
this.rules[0].allowImports = true;
};
tree.Media.prototype = {
type: "Media",
accept: function (visitor) {
if (this.features) {
this.features = visitor.visit(this.features);
}
if (this.rules) {
this.rules = visitor.visitArray(this.rules);
}
},
genCSS: function (env, output) {
output.add('@media ', this.currentFileInfo, this.index);
this.features.genCSS(env, output);
tree.outputRuleset(env, output, this.rules);
},
toCSS: tree.toCSS,
eval: function (env) {
if (!env.mediaBlocks) {
env.mediaBlocks = [];
env.mediaPath = [];
}
var media = new(tree.Media)(null, [], this.index, this.currentFileInfo);
if(this.debugInfo) {
this.rules[0].debugInfo = this.debugInfo;
media.debugInfo = this.debugInfo;
}
var strictMathBypass = false;
if (!env.strictMath) {
strictMathBypass = true;
env.strictMath = true;
}
try {
media.features = this.features.eval(env);
}
finally {
if (strictMathBypass) {
env.strictMath = false;
}
}
env.mediaPath.push(media);
env.mediaBlocks.push(media);
env.frames.unshift(this.rules[0]);
media.rules = [this.rules[0].eval(env)];
env.frames.shift();
env.mediaPath.pop();
return env.mediaPath.length === 0 ? media.evalTop(env) :
media.evalNested(env);
},
variable: function (name) { return tree.Ruleset.prototype.variable.call(this.rules[0], name); },
find: function () { return tree.Ruleset.prototype.find.apply(this.rules[0], arguments); },
rulesets: function () { return tree.Ruleset.prototype.rulesets.apply(this.rules[0]); },
emptySelectors: function() {
var el = new(tree.Element)('', '&', this.index, this.currentFileInfo),
sels = [new(tree.Selector)([el], null, null, this.index, this.currentFileInfo)];
sels[0].mediaEmpty = true;
return sels;
},
markReferenced: function () {
var i, rules = this.rules[0].rules;
this.rules[0].markReferenced();
this.isReferenced = true;
for (i = 0; i < rules.length; i++) {
if (rules[i].markReferenced) {
rules[i].markReferenced();
}
}
},
evalTop: function (env) {
var result = this;
// Render all dependent Media blocks.
if (env.mediaBlocks.length > 1) {
var selectors = this.emptySelectors();
result = new(tree.Ruleset)(selectors, env.mediaBlocks);
result.multiMedia = true;
}
delete env.mediaBlocks;
delete env.mediaPath;
return result;
},
evalNested: function (env) {
var i, value,
path = env.mediaPath.concat([this]);
// Extract the media-query conditions separated with `,` (OR).
for (i = 0; i < path.length; i++) {
value = path[i].features instanceof tree.Value ?
path[i].features.value : path[i].features;
path[i] = Array.isArray(value) ? value : [value];
}
// Trace all permutations to generate the resulting media-query.
//
// (a, b and c) with nested (d, e) ->
// a and d
// a and e
// b and c and d
// b and c and e
this.features = new(tree.Value)(this.permute(path).map(function (path) {
path = path.map(function (fragment) {
return fragment.toCSS ? fragment : new(tree.Anonymous)(fragment);
});
for(i = path.length - 1; i > 0; i--) {
path.splice(i, 0, new(tree.Anonymous)("and"));
}
return new(tree.Expression)(path);
}));
// Fake a tree-node that doesn't output anything.
return new(tree.Ruleset)([], []);
},
permute: function (arr) {
if (arr.length === 0) {
return [];
} else if (arr.length === 1) {
return arr[0];
} else {
var result = [];
var rest = this.permute(arr.slice(1));
for (var i = 0; i < rest.length; i++) {
for (var j = 0; j < arr[0].length; j++) {
result.push([arr[0][j]].concat(rest[i]));
}
}
return result;
}
},
bubbleSelectors: function (selectors) {
if (!selectors)
return;
this.rules = [new(tree.Ruleset)(selectors.slice(0), [this.rules[0]])];
}
};
})(require('../tree'));
(function (tree) {
tree.mixin = {};
tree.mixin.Call = function (elements, args, index, currentFileInfo, important) {
this.selector = new(tree.Selector)(elements);
this.arguments = (args && args.length) ? args : null;
this.index = index;
this.currentFileInfo = currentFileInfo;
this.important = important;
};
tree.mixin.Call.prototype = {
type: "MixinCall",
accept: function (visitor) {
if (this.selector) {
this.selector = visitor.visit(this.selector);
}
if (this.arguments) {
this.arguments = visitor.visitArray(this.arguments);
}
},
eval: function (env) {
var mixins, mixin, args, rules = [], match = false, i, m, f, isRecursive, isOneFound, rule,
candidates = [], candidate, conditionResult = [], defaultFunc = tree.defaultFunc,
defaultResult, defNone = 0, defTrue = 1, defFalse = 2, count;
args = this.arguments && this.arguments.map(function (a) {
return { name: a.name, value: a.value.eval(env) };
});
for (i = 0; i < env.frames.length; i++) {
if ((mixins = env.frames[i].find(this.selector)).length > 0) {
isOneFound = true;
// To make `default()` function independent of definition order we have two "subpasses" here.
// At first we evaluate each guard *twice* (with `default() == true` and `default() == false`),
// and build candidate list with corresponding flags. Then, when we know all possible matches,
// we make a final decision.
for (m = 0; m < mixins.length; m++) {
mixin = mixins[m];
isRecursive = false;
for(f = 0; f < env.frames.length; f++) {
if ((!(mixin instanceof tree.mixin.Definition)) && mixin === (env.frames[f].originalRuleset || env.frames[f])) {
isRecursive = true;
break;
}
}
if (isRecursive) {
continue;
}
if (mixin.matchArgs(args, env)) {
candidate = {mixin: mixin, group: defNone};
if (mixin.matchCondition) {
for (f = 0; f < 2; f++) {
defaultFunc.value(f);
conditionResult[f] = mixin.matchCondition(args, env);
}
if (conditionResult[0] || conditionResult[1]) {
if (conditionResult[0] != conditionResult[1]) {
candidate.group = conditionResult[1] ?
defTrue : defFalse;
}
candidates.push(candidate);
}
}
else {
candidates.push(candidate);
}
match = true;
}
}
defaultFunc.reset();
count = [0, 0, 0];
for (m = 0; m < candidates.length; m++) {
count[candidates[m].group]++;
}
if (count[defNone] > 0) {
defaultResult = defFalse;
} else {
defaultResult = defTrue;
if ((count[defTrue] + count[defFalse]) > 1) {
throw { type: 'Runtime',
message: 'Ambiguous use of `default()` found when matching for `'
+ this.format(args) + '`',
index: this.index, filename: this.currentFileInfo.filename };
}
}
for (m = 0; m < candidates.length; m++) {
candidate = candidates[m].group;
if ((candidate === defNone) || (candidate === defaultResult)) {
try {
mixin = candidates[m].mixin;
if (!(mixin instanceof tree.mixin.Definition)) {
mixin = new tree.mixin.Definition("", [], mixin.rules, null, false);
mixin.originalRuleset = mixins[m].originalRuleset || mixins[m];
}
Array.prototype.push.apply(
rules, mixin.evalCall(env, args, this.important).rules);
} catch (e) {
throw { message: e.message, index: this.index, filename: this.currentFileInfo.filename, stack: e.stack };
}
}
}
if (match) {
if (!this.currentFileInfo || !this.currentFileInfo.reference) {
for (i = 0; i < rules.length; i++) {
rule = rules[i];
if (rule.markReferenced) {
rule.markReferenced();
}
}
}
return rules;
}
}
}
if (isOneFound) {
throw { type: 'Runtime',
message: 'No matching definition was found for `' + this.format(args) + '`',
index: this.index, filename: this.currentFileInfo.filename };
} else {
throw { type: 'Name',
message: this.selector.toCSS().trim() + " is undefined",
index: this.index, filename: this.currentFileInfo.filename };
}
},
format: function (args) {
return this.selector.toCSS().trim() + '(' +
(args ? args.map(function (a) {
var argValue = "";
if (a.name) {
argValue += a.name + ":";
}
if (a.value.toCSS) {
argValue += a.value.toCSS();
} else {
argValue += "???";
}
return argValue;
}).join(', ') : "") + ")";
}
};
tree.mixin.Definition = function (name, params, rules, condition, variadic, frames) {
this.name = name;
this.selectors = [new(tree.Selector)([new(tree.Element)(null, name, this.index, this.currentFileInfo)])];
this.params = params;
this.condition = condition;
this.variadic = variadic;
this.arity = params.length;
this.rules = rules;
this._lookups = {};
this.required = params.reduce(function (count, p) {
if (!p.name || (p.name && !p.value)) { return count + 1; }
else { return count; }
}, 0);
this.parent = tree.Ruleset.prototype;
this.frames = frames;
};
tree.mixin.Definition.prototype = {
type: "MixinDefinition",
accept: function (visitor) {
if (this.params && this.params.length) {
this.params = visitor.visitArray(this.params);
}
this.rules = visitor.visitArray(this.rules);
if (this.condition) {
this.condition = visitor.visit(this.condition);
}
},
variable: function (name) { return this.parent.variable.call(this, name); },
variables: function () { return this.parent.variables.call(this); },
find: function () { return this.parent.find.apply(this, arguments); },
rulesets: function () { return this.parent.rulesets.apply(this); },
evalParams: function (env, mixinEnv, args, evaldArguments) {
/*jshint boss:true */
var frame = new(tree.Ruleset)(null, null),
varargs, arg,
params = this.params.slice(0),
i, j, val, name, isNamedFound, argIndex, argsLength = 0;
mixinEnv = new tree.evalEnv(mixinEnv, [frame].concat(mixinEnv.frames));
if (args) {
args = args.slice(0);
argsLength = args.length;
for(i = 0; i < argsLength; i++) {
arg = args[i];
if (name = (arg && arg.name)) {
isNamedFound = false;
for(j = 0; j < params.length; j++) {
if (!evaldArguments[j] && name === params[j].name) {
evaldArguments[j] = arg.value.eval(env);
frame.prependRule(new(tree.Rule)(name, arg.value.eval(env)));
isNamedFound = true;
break;
}
}
if (isNamedFound) {
args.splice(i, 1);
i--;
continue;
} else {
throw { type: 'Runtime', message: "Named argument for " + this.name +
' ' + args[i].name + ' not found' };
}
}
}
}
argIndex = 0;
for (i = 0; i < params.length; i++) {
if (evaldArguments[i]) { continue; }
arg = args && args[argIndex];
if (name = params[i].name) {
if (params[i].variadic) {
varargs = [];
for (j = argIndex; j < argsLength; j++) {
varargs.push(args[j].value.eval(env));
}
frame.prependRule(new(tree.Rule)(name, new(tree.Expression)(varargs).eval(env)));
} else {
val = arg && arg.value;
if (val) {
val = val.eval(env);
} else if (params[i].value) {
val = params[i].value.eval(mixinEnv);
frame.resetCache();
} else {
throw { type: 'Runtime', message: "wrong number of arguments for " + this.name +
' (' + argsLength + ' for ' + this.arity + ')' };
}
frame.prependRule(new(tree.Rule)(name, val));
evaldArguments[i] = val;
}
}
if (params[i].variadic && args) {
for (j = argIndex; j < argsLength; j++) {
evaldArguments[j] = args[j].value.eval(env);
}
}
argIndex++;
}
return frame;
},
eval: function (env) {
return new tree.mixin.Definition(this.name, this.params, this.rules, this.condition, this.variadic, this.frames || env.frames.slice(0));
},
evalCall: function (env, args, important) {
var _arguments = [],
mixinFrames = this.frames ? this.frames.concat(env.frames) : env.frames,
frame = this.evalParams(env, new(tree.evalEnv)(env, mixinFrames), args, _arguments),
rules, ruleset;
frame.prependRule(new(tree.Rule)('@arguments', new(tree.Expression)(_arguments).eval(env)));
rules = this.rules.slice(0);
ruleset = new(tree.Ruleset)(null, rules);
ruleset.originalRuleset = this;
ruleset = ruleset.eval(new(tree.evalEnv)(env, [this, frame].concat(mixinFrames)));
if (important) {
ruleset = this.parent.makeImportant.apply(ruleset);
}
return ruleset;
},
matchCondition: function (args, env) {
if (this.condition && !this.condition.eval(
new(tree.evalEnv)(env,
[this.evalParams(env, new(tree.evalEnv)(env, this.frames.concat(env.frames)), args, [])] // the parameter variables
.concat(this.frames) // the parent namespace/mixin frames
.concat(env.frames)))) { // the current environment frames
return false;
}
return true;
},
matchArgs: function (args, env) {
var argsLength = (args && args.length) || 0, len;
if (! this.variadic) {
if (argsLength < this.required) { return false; }
if (argsLength > this.params.length) { return false; }
} else {
if (argsLength < (this.required - 1)) { return false; }
}
len = Math.min(argsLength, this.arity);
for (var i = 0; i < len; i++) {
if (!this.params[i].name && !this.params[i].variadic) {
if (args[i].value.eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) {
return false;
}
}
}
return true;
}
};
})(require('../tree'));
(function (tree) {
tree.Negative = function (node) {
this.value = node;
};
tree.Negative.prototype = {
type: "Negative",
accept: function (visitor) {
this.value = visitor.visit(this.value);
},
genCSS: function (env, output) {
output.add('-');
this.value.genCSS(env, output);
},
toCSS: tree.toCSS,
eval: function (env) {
if (env.isMathOn()) {
return (new(tree.Operation)('*', [new(tree.Dimension)(-1), this.value])).eval(env);
}
return new(tree.Negative)(this.value.eval(env));
}
};
})(require('../tree'));
(function (tree) {
tree.Operation = function (op, operands, isSpaced) {
this.op = op.trim();
this.operands = operands;
this.isSpaced = isSpaced;
};
tree.Operation.prototype = {
type: "Operation",
accept: function (visitor) {
this.operands = visitor.visit(this.operands);
},
eval: function (env) {
var a = this.operands[0].eval(env),
b = this.operands[1].eval(env);
if (env.isMathOn()) {
if (a instanceof tree.Dimension && b instanceof tree.Color) {
a = a.toColor();
}
if (b instanceof tree.Dimension && a instanceof tree.Color) {
b = b.toColor();
}
if (!a.operate) {
throw { type: "Operation",
message: "Operation on an invalid type" };
}
return a.operate(env, this.op, b);
} else {
return new(tree.Operation)(this.op, [a, b], this.isSpaced);
}
},
genCSS: function (env, output) {
this.operands[0].genCSS(env, output);
if (this.isSpaced) {
output.add(" ");
}
output.add(this.op);
if (this.isSpaced) {
output.add(" ");
}
this.operands[1].genCSS(env, output);
},
toCSS: tree.toCSS
};
tree.operate = function (env, op, a, b) {
switch (op) {
case '+': return a + b;
case '-': return a - b;
case '*': return a * b;
case '/': return a / b;
}
};
})(require('../tree'));
(function (tree) {
tree.Paren = function (node) {
this.value = node;
};
tree.Paren.prototype = {
type: "Paren",
accept: function (visitor) {
this.value = visitor.visit(this.value);
},
genCSS: function (env, output) {
output.add('(');
this.value.genCSS(env, output);
output.add(')');
},
toCSS: tree.toCSS,
eval: function (env) {
return new(tree.Paren)(this.value.eval(env));
}
};
})(require('../tree'));
(function (tree) {
tree.Quoted = function (str, content, escaped, index, currentFileInfo) {
this.escaped = escaped;
this.value = content || '';
this.quote = str.charAt(0);
this.index = index;
this.currentFileInfo = currentFileInfo;
};
tree.Quoted.prototype = {
type: "Quoted",
genCSS: function (env, output) {
if (!this.escaped) {
output.add(this.quote, this.currentFileInfo, this.index);
}
output.add(this.value);
if (!this.escaped) {
output.add(this.quote);
}
},
toCSS: tree.toCSS,
eval: function (env) {
var that = this;
var value = this.value.replace(/`([^`]+)`/g, function (_, exp) {
return new(tree.JavaScript)(exp, that.index, true).eval(env).value;
}).replace(/@\{([\w-]+)\}/g, function (_, name) {
var v = new(tree.Variable)('@' + name, that.index, that.currentFileInfo).eval(env, true);
return (v instanceof tree.Quoted) ? v.value : v.toCSS();
});
return new(tree.Quoted)(this.quote + value + this.quote, value, this.escaped, this.index, this.currentFileInfo);
},
compare: function (x) {
if (!x.toCSS) {
return -1;
}
var left = this.toCSS(),
right = x.toCSS();
if (left === right) {
return 0;
}
return left < right ? -1 : 1;
}
};
})(require('../tree'));
(function (tree) {
tree.Rule = function (name, value, important, merge, index, currentFileInfo, inline) {
this.name = name;
this.value = (value instanceof tree.Value || value instanceof tree.Ruleset) ? value : new(tree.Value)([value]);
this.important = important ? ' ' + important.trim() : '';
this.merge = merge;
this.index = index;
this.currentFileInfo = currentFileInfo;
this.inline = inline || false;
this.variable = name.charAt && (name.charAt(0) === '@');
};
tree.Rule.prototype = {
type: "Rule",
accept: function (visitor) {
this.value = visitor.visit(this.value);
},
genCSS: function (env, output) {
output.add(this.name + (env.compress ? ':' : ': '), this.currentFileInfo, this.index);
try {
this.value.genCSS(env, output);
}
catch(e) {
e.index = this.index;
e.filename = this.currentFileInfo.filename;
throw e;
}
output.add(this.important + ((this.inline || (env.lastRule && env.compress)) ? "" : ";"), this.currentFileInfo, this.index);
},
toCSS: tree.toCSS,
eval: function (env) {
var strictMathBypass = false, name = this.name, evaldValue;
if (typeof name !== "string") {
// expand 'primitive' name directly to get
// things faster (~10% for benchmark.less):
name = (name.length === 1)
&& (name[0] instanceof tree.Keyword)
? name[0].value : evalName(env, name);
}
if (name === "font" && !env.strictMath) {
strictMathBypass = true;
env.strictMath = true;
}
try {
evaldValue = this.value.eval(env);
if (!this.variable && evaldValue.type === "DetachedRuleset") {
throw { message: "Rulesets cannot be evaluated on a property.",
index: this.index, filename: this.currentFileInfo.filename };
}
return new(tree.Rule)(name,
evaldValue,
this.important,
this.merge,
this.index, this.currentFileInfo, this.inline);
}
catch(e) {
if (typeof e.index !== 'number') {
e.index = this.index;
e.filename = this.currentFileInfo.filename;
}
throw e;
}
finally {
if (strictMathBypass) {
env.strictMath = false;
}
}
},
makeImportant: function () {
return new(tree.Rule)(this.name,
this.value,
"!important",
this.merge,
this.index, this.currentFileInfo, this.inline);
}
};
function evalName(env, name) {
var value = "", i, n = name.length,
output = {add: function (s) {value += s;}};
for (i = 0; i < n; i++) {
name[i].eval(env).genCSS(env, output);
}
return value;
}
})(require('../tree'));
(function (tree) {
tree.RulesetCall = function (variable) {
this.variable = variable;
};
tree.RulesetCall.prototype = {
type: "RulesetCall",
accept: function (visitor) {
},
eval: function (env) {
var detachedRuleset = new(tree.Variable)(this.variable).eval(env);
return detachedRuleset.callEval(env);
}
};
})(require('../tree'));
(function (tree) {
tree.Ruleset = function (selectors, rules, strictImports) {
this.selectors = selectors;
this.rules = rules;
this._lookups = {};
this.strictImports = strictImports;
};
tree.Ruleset.prototype = {
type: "Ruleset",
accept: function (visitor) {
if (this.paths) {
visitor.visitArray(this.paths, true);
} else if (this.selectors) {
this.selectors = visitor.visitArray(this.selectors);
}
if (this.rules && this.rules.length) {
this.rules = visitor.visitArray(this.rules);
}
},
eval: function (env) {
var thisSelectors = this.selectors, selectors,
selCnt, selector, i, defaultFunc = tree.defaultFunc, hasOnePassingSelector = false;
if (thisSelectors && (selCnt = thisSelectors.length)) {
selectors = [];
defaultFunc.error({
type: "Syntax",
message: "it is currently only allowed in parametric mixin guards,"
});
for (i = 0; i < selCnt; i++) {
selector = thisSelectors[i].eval(env);
selectors.push(selector);
if (selector.evaldCondition) {
hasOnePassingSelector = true;
}
}
defaultFunc.reset();
} else {
hasOnePassingSelector = true;
}
var rules = this.rules ? this.rules.slice(0) : null,
ruleset = new(tree.Ruleset)(selectors, rules, this.strictImports),
rule, subRule;
ruleset.originalRuleset = this;
ruleset.root = this.root;
ruleset.firstRoot = this.firstRoot;
ruleset.allowImports = this.allowImports;
if(this.debugInfo) {
ruleset.debugInfo = this.debugInfo;
}
if (!hasOnePassingSelector) {
rules.length = 0;
}
// push the current ruleset to the frames stack
var envFrames = env.frames;
envFrames.unshift(ruleset);
// currrent selectors
var envSelectors = env.selectors;
if (!envSelectors) {
env.selectors = envSelectors = [];
}
envSelectors.unshift(this.selectors);
// Evaluate imports
if (ruleset.root || ruleset.allowImports || !ruleset.strictImports) {
ruleset.evalImports(env);
}
// Store the frames around mixin definitions,
// so they can be evaluated like closures when the time comes.
var rsRules = ruleset.rules, rsRuleCnt = rsRules ? rsRules.length : 0;
for (i = 0; i < rsRuleCnt; i++) {
if (rsRules[i] instanceof tree.mixin.Definition || rsRules[i] instanceof tree.DetachedRuleset) {
rsRules[i] = rsRules[i].eval(env);
}
}
var mediaBlockCount = (env.mediaBlocks && env.mediaBlocks.length) || 0;
// Evaluate mixin calls.
for (i = 0; i < rsRuleCnt; i++) {
if (rsRules[i] instanceof tree.mixin.Call) {
/*jshint loopfunc:true */
rules = rsRules[i].eval(env).filter(function(r) {
if ((r instanceof tree.Rule) && r.variable) {
// do not pollute the scope if the variable is
// already there. consider returning false here
// but we need a way to "return" variable from mixins
return !(ruleset.variable(r.name));
}
return true;
});
rsRules.splice.apply(rsRules, [i, 1].concat(rules));
rsRuleCnt += rules.length - 1;
i += rules.length-1;
ruleset.resetCache();
} else if (rsRules[i] instanceof tree.RulesetCall) {
/*jshint loopfunc:true */
rules = rsRules[i].eval(env).rules.filter(function(r) {
if ((r instanceof tree.Rule) && r.variable) {
// do not pollute the scope at all
return false;
}
return true;
});
rsRules.splice.apply(rsRules, [i, 1].concat(rules));
rsRuleCnt += rules.length - 1;
i += rules.length-1;
ruleset.resetCache();
}
}
// Evaluate everything else
for (i = 0; i < rsRules.length; i++) {
rule = rsRules[i];
if (! (rule instanceof tree.mixin.Definition || rule instanceof tree.DetachedRuleset)) {
rsRules[i] = rule = rule.eval ? rule.eval(env) : rule;
}
}
// Evaluate everything else
for (i = 0; i < rsRules.length; i++) {
rule = rsRules[i];
// for rulesets, check if it is a css guard and can be removed
if (rule instanceof tree.Ruleset && rule.selectors && rule.selectors.length === 1) {
// check if it can be folded in (e.g. & where)
if (rule.selectors[0].isJustParentSelector()) {
rsRules.splice(i--, 1);
for(var j = 0; j < rule.rules.length; j++) {
subRule = rule.rules[j];
if (!(subRule instanceof tree.Rule) || !subRule.variable) {
rsRules.splice(++i, 0, subRule);
}
}
}
}
}
// Pop the stack
envFrames.shift();
envSelectors.shift();
if (env.mediaBlocks) {
for (i = mediaBlockCount; i < env.mediaBlocks.length; i++) {
env.mediaBlocks[i].bubbleSelectors(selectors);
}
}
return ruleset;
},
evalImports: function(env) {
var rules = this.rules, i, importRules;
if (!rules) { return; }
for (i = 0; i < rules.length; i++) {
if (rules[i] instanceof tree.Import) {
importRules = rules[i].eval(env);
if (importRules && importRules.length) {
rules.splice.apply(rules, [i, 1].concat(importRules));
i+= importRules.length-1;
} else {
rules.splice(i, 1, importRules);
}
this.resetCache();
}
}
},
makeImportant: function() {
return new tree.Ruleset(this.selectors, this.rules.map(function (r) {
if (r.makeImportant) {
return r.makeImportant();
} else {
return r;
}
}), this.strictImports);
},
matchArgs: function (args) {
return !args || args.length === 0;
},
// lets you call a css selector with a guard
matchCondition: function (args, env) {
var lastSelector = this.selectors[this.selectors.length-1];
if (!lastSelector.evaldCondition) {
return false;
}
if (lastSelector.condition &&
!lastSelector.condition.eval(
new(tree.evalEnv)(env,
env.frames))) {
return false;
}
return true;
},
resetCache: function () {
this._rulesets = null;
this._variables = null;
this._lookups = {};
},
variables: function () {
if (!this._variables) {
this._variables = !this.rules ? {} : this.rules.reduce(function (hash, r) {
if (r instanceof tree.Rule && r.variable === true) {
hash[r.name] = r;
}
return hash;
}, {});
}
return this._variables;
},
variable: function (name) {
return this.variables()[name];
},
rulesets: function () {
if (!this.rules) { return null; }
var _Ruleset = tree.Ruleset, _MixinDefinition = tree.mixin.Definition,
filtRules = [], rules = this.rules, cnt = rules.length,
i, rule;
for (i = 0; i < cnt; i++) {
rule = rules[i];
if ((rule instanceof _Ruleset) || (rule instanceof _MixinDefinition)) {
filtRules.push(rule);
}
}
return filtRules;
},
prependRule: function (rule) {
var rules = this.rules;
if (rules) { rules.unshift(rule); } else { this.rules = [ rule ]; }
},
find: function (selector, self) {
self = self || this;
var rules = [], match,
key = selector.toCSS();
if (key in this._lookups) { return this._lookups[key]; }
this.rulesets().forEach(function (rule) {
if (rule !== self) {
for (var j = 0; j < rule.selectors.length; j++) {
match = selector.match(rule.selectors[j]);
if (match) {
if (selector.elements.length > match) {
Array.prototype.push.apply(rules, rule.find(
new(tree.Selector)(selector.elements.slice(match)), self));
} else {
rules.push(rule);
}
break;
}
}
}
});
this._lookups[key] = rules;
return rules;
},
genCSS: function (env, output) {
var i, j,
ruleNodes = [],
rulesetNodes = [],
rulesetNodeCnt,
debugInfo, // Line number debugging
rule,
path;
env.tabLevel = (env.tabLevel || 0);
if (!this.root) {
env.tabLevel++;
}
var tabRuleStr = env.compress ? '' : Array(env.tabLevel + 1).join(" "),
tabSetStr = env.compress ? '' : Array(env.tabLevel).join(" "),
sep;
for (i = 0; i < this.rules.length; i++) {
rule = this.rules[i];
if (rule.rules || (rule instanceof tree.Media) || rule instanceof tree.Directive || (this.root && rule instanceof tree.Comment)) {
rulesetNodes.push(rule);
} else {
ruleNodes.push(rule);
}
}
// If this is the root node, we don't render
// a selector, or {}.
if (!this.root) {
debugInfo = tree.debugInfo(env, this, tabSetStr);
if (debugInfo) {
output.add(debugInfo);
output.add(tabSetStr);
}
var paths = this.paths, pathCnt = paths.length,
pathSubCnt;
sep = env.compress ? ',' : (',\n' + tabSetStr);
for (i = 0; i < pathCnt; i++) {
path = paths[i];
if (!(pathSubCnt = path.length)) { continue; }
if (i > 0) { output.add(sep); }
env.firstSelector = true;
path[0].genCSS(env, output);
env.firstSelector = false;
for (j = 1; j < pathSubCnt; j++) {
path[j].genCSS(env, output);
}
}
output.add((env.compress ? '{' : ' {\n') + tabRuleStr);
}
// Compile rules and rulesets
for (i = 0; i < ruleNodes.length; i++) {
rule = ruleNodes[i];
// @page{ directive ends up with root elements inside it, a mix of rules and rulesets
// In this instance we do not know whether it is the last property
if (i + 1 === ruleNodes.length && (!this.root || rulesetNodes.length === 0 || this.firstRoot)) {
env.lastRule = true;
}
if (rule.genCSS) {
rule.genCSS(env, output);
} else if (rule.value) {
output.add(rule.value.toString());
}
if (!env.lastRule) {
output.add(env.compress ? '' : ('\n' + tabRuleStr));
} else {
env.lastRule = false;
}
}
if (!this.root) {
output.add((env.compress ? '}' : '\n' + tabSetStr + '}'));
env.tabLevel--;
}
sep = (env.compress ? "" : "\n") + (this.root ? tabRuleStr : tabSetStr);
rulesetNodeCnt = rulesetNodes.length;
if (rulesetNodeCnt) {
if (ruleNodes.length && sep) { output.add(sep); }
rulesetNodes[0].genCSS(env, output);
for (i = 1; i < rulesetNodeCnt; i++) {
if (sep) { output.add(sep); }
rulesetNodes[i].genCSS(env, output);
}
}
if (!output.isEmpty() && !env.compress && this.firstRoot) {
output.add('\n');
}
},
toCSS: tree.toCSS,
markReferenced: function () {
if (!this.selectors) {
return;
}
for (var s = 0; s < this.selectors.length; s++) {
this.selectors[s].markReferenced();
}
},
joinSelectors: function (paths, context, selectors) {
for (var s = 0; s < selectors.length; s++) {
this.joinSelector(paths, context, selectors[s]);
}
},
joinSelector: function (paths, context, selector) {
var i, j, k,
hasParentSelector, newSelectors, el, sel, parentSel,
newSelectorPath, afterParentJoin, newJoinedSelector,
newJoinedSelectorEmpty, lastSelector, currentElements,
selectorsMultiplied;
for (i = 0; i < selector.elements.length; i++) {
el = selector.elements[i];
if (el.value === '&') {
hasParentSelector = true;
}
}
if (!hasParentSelector) {
if (context.length > 0) {
for (i = 0; i < context.length; i++) {
paths.push(context[i].concat(selector));
}
}
else {
paths.push([selector]);
}
return;
}
// The paths are [[Selector]]
// The first list is a list of comma seperated selectors
// The inner list is a list of inheritance seperated selectors
// e.g.
// .a, .b {
// .c {
// }
// }
// == [[.a] [.c]] [[.b] [.c]]
//
// the elements from the current selector so far
currentElements = [];
// the current list of new selectors to add to the path.
// We will build it up. We initiate it with one empty selector as we "multiply" the new selectors
// by the parents
newSelectors = [[]];
for (i = 0; i < selector.elements.length; i++) {
el = selector.elements[i];
// non parent reference elements just get added
if (el.value !== "&") {
currentElements.push(el);
} else {
// the new list of selectors to add
selectorsMultiplied = [];
// merge the current list of non parent selector elements
// on to the current list of selectors to add
if (currentElements.length > 0) {
this.mergeElementsOnToSelectors(currentElements, newSelectors);
}
// loop through our current selectors
for (j = 0; j < newSelectors.length; j++) {
sel = newSelectors[j];
// if we don't have any parent paths, the & might be in a mixin so that it can be used
// whether there are parents or not
if (context.length === 0) {
// the combinator used on el should now be applied to the next element instead so that
// it is not lost
if (sel.length > 0) {
sel[0].elements = sel[0].elements.slice(0);
sel[0].elements.push(new(tree.Element)(el.combinator, '', el.index, el.currentFileInfo));
}
selectorsMultiplied.push(sel);
}
else {
// and the parent selectors
for (k = 0; k < context.length; k++) {
parentSel = context[k];
// We need to put the current selectors
// then join the last selector's elements on to the parents selectors
// our new selector path
newSelectorPath = [];
// selectors from the parent after the join
afterParentJoin = [];
newJoinedSelectorEmpty = true;
//construct the joined selector - if & is the first thing this will be empty,
// if not newJoinedSelector will be the last set of elements in the selector
if (sel.length > 0) {
newSelectorPath = sel.slice(0);
lastSelector = newSelectorPath.pop();
newJoinedSelector = selector.createDerived(lastSelector.elements.slice(0));
newJoinedSelectorEmpty = false;
}
else {
newJoinedSelector = selector.createDerived([]);
}
//put together the parent selectors after the join
if (parentSel.length > 1) {
afterParentJoin = afterParentJoin.concat(parentSel.slice(1));
}
if (parentSel.length > 0) {
newJoinedSelectorEmpty = false;
// join the elements so far with the first part of the parent
newJoinedSelector.elements.push(new(tree.Element)(el.combinator, parentSel[0].elements[0].value, el.index, el.currentFileInfo));
newJoinedSelector.elements = newJoinedSelector.elements.concat(parentSel[0].elements.slice(1));
}
if (!newJoinedSelectorEmpty) {
// now add the joined selector
newSelectorPath.push(newJoinedSelector);
}
// and the rest of the parent
newSelectorPath = newSelectorPath.concat(afterParentJoin);
// add that to our new set of selectors
selectorsMultiplied.push(newSelectorPath);
}
}
}
// our new selectors has been multiplied, so reset the state
newSelectors = selectorsMultiplied;
currentElements = [];
}
}
// if we have any elements left over (e.g. .a& .b == .b)
// add them on to all the current selectors
if (currentElements.length > 0) {
this.mergeElementsOnToSelectors(currentElements, newSelectors);
}
for (i = 0; i < newSelectors.length; i++) {
if (newSelectors[i].length > 0) {
paths.push(newSelectors[i]);
}
}
},
mergeElementsOnToSelectors: function(elements, selectors) {
var i, sel;
if (selectors.length === 0) {
selectors.push([ new(tree.Selector)(elements) ]);
return;
}
for (i = 0; i < selectors.length; i++) {
sel = selectors[i];
// if the previous thing in sel is a parent this needs to join on to it
if (sel.length > 0) {
sel[sel.length - 1] = sel[sel.length - 1].createDerived(sel[sel.length - 1].elements.concat(elements));
}
else {
sel.push(new(tree.Selector)(elements));
}
}
}
};
})(require('../tree'));
(function (tree) {
tree.Selector = function (elements, extendList, condition, index, currentFileInfo, isReferenced) {
this.elements = elements;
this.extendList = extendList;
this.condition = condition;
this.currentFileInfo = currentFileInfo || {};
this.isReferenced = isReferenced;
if (!condition) {
this.evaldCondition = true;
}
};
tree.Selector.prototype = {
type: "Selector",
accept: function (visitor) {
if (this.elements) {
this.elements = visitor.visitArray(this.elements);
}
if (this.extendList) {
this.extendList = visitor.visitArray(this.extendList);
}
if (this.condition) {
this.condition = visitor.visit(this.condition);
}
},
createDerived: function(elements, extendList, evaldCondition) {
evaldCondition = (evaldCondition != null) ? evaldCondition : this.evaldCondition;
var newSelector = new(tree.Selector)(elements, extendList || this.extendList, null, this.index, this.currentFileInfo, this.isReferenced);
newSelector.evaldCondition = evaldCondition;
newSelector.mediaEmpty = this.mediaEmpty;
return newSelector;
},
match: function (other) {
var elements = this.elements,
len = elements.length,
olen, i;
other.CacheElements();
olen = other._elements.length;
if (olen === 0 || len < olen) {
return 0;
} else {
for (i = 0; i < olen; i++) {
if (elements[i].value !== other._elements[i]) {
return 0;
}
}
}
return olen; // return number of matched elements
},
CacheElements: function(){
var css = '', len, v, i;
if( !this._elements ){
len = this.elements.length;
for(i = 0; i < len; i++){
v = this.elements[i];
css += v.combinator.value;
if( !v.value.value ){
css += v.value;
continue;
}
if( typeof v.value.value !== "string" ){
css = '';
break;
}
css += v.value.value;
}
this._elements = css.match(/[,&#\.\w-]([\w-]|(\\.))*/g);
if (this._elements) {
if (this._elements[0] === "&") {
this._elements.shift();
}
} else {
this._elements = [];
}
}
},
isJustParentSelector: function() {
return !this.mediaEmpty &&
this.elements.length === 1 &&
this.elements[0].value === '&' &&
(this.elements[0].combinator.value === ' ' || this.elements[0].combinator.value === '');
},
eval: function (env) {
var evaldCondition = this.condition && this.condition.eval(env),
elements = this.elements, extendList = this.extendList;
elements = elements && elements.map(function (e) { return e.eval(env); });
extendList = extendList && extendList.map(function(extend) { return extend.eval(env); });
return this.createDerived(elements, extendList, evaldCondition);
},
genCSS: function (env, output) {
var i, element;
if ((!env || !env.firstSelector) && this.elements[0].combinator.value === "") {
output.add(' ', this.currentFileInfo, this.index);
}
if (!this._css) {
//TODO caching? speed comparison?
for(i = 0; i < this.elements.length; i++) {
element = this.elements[i];
element.genCSS(env, output);
}
}
},
toCSS: tree.toCSS,
markReferenced: function () {
this.isReferenced = true;
},
getIsReferenced: function() {
return !this.currentFileInfo.reference || this.isReferenced;
},
getIsOutput: function() {
return this.evaldCondition;
}
};
})(require('../tree'));
(function (tree) {
tree.UnicodeDescriptor = function (value) {
this.value = value;
};
tree.UnicodeDescriptor.prototype = {
type: "UnicodeDescriptor",
genCSS: function (env, output) {
output.add(this.value);
},
toCSS: tree.toCSS,
eval: function () { return this; }
};
})(require('../tree'));
(function (tree) {
tree.URL = function (val, currentFileInfo, isEvald) {
this.value = val;
this.currentFileInfo = currentFileInfo;
this.isEvald = isEvald;
};
tree.URL.prototype = {
type: "Url",
accept: function (visitor) {
this.value = visitor.visit(this.value);
},
genCSS: function (env, output) {
output.add("url(");
this.value.genCSS(env, output);
output.add(")");
},
toCSS: tree.toCSS,
eval: function (ctx) {
var val = this.value.eval(ctx),
rootpath;
if (!this.isEvald) {
// Add the base path if the URL is relative
rootpath = this.currentFileInfo && this.currentFileInfo.rootpath;
if (rootpath && typeof val.value === "string" && ctx.isPathRelative(val.value)) {
if (!val.quote) {
rootpath = rootpath.replace(/[\(\)'"\s]/g, function(match) { return "\\"+match; });
}
val.value = rootpath + val.value;
}
val.value = ctx.normalizePath(val.value);
// Add url args if enabled
if (ctx.urlArgs) {
if (!val.value.match(/^\s*data:/)) {
var delimiter = val.value.indexOf('?') === -1 ? '?' : '&';
var urlArgs = delimiter + ctx.urlArgs;
if (val.value.indexOf('#') !== -1) {
val.value = val.value.replace('#', urlArgs + '#');
} else {
val.value += urlArgs;
}
}
}
}
return new(tree.URL)(val, this.currentFileInfo, true);
}
};
})(require('../tree'));
(function (tree) {
tree.Value = function (value) {
this.value = value;
};
tree.Value.prototype = {
type: "Value",
accept: function (visitor) {
if (this.value) {
this.value = visitor.visitArray(this.value);
}
},
eval: function (env) {
if (this.value.length === 1) {
return this.value[0].eval(env);
} else {
return new(tree.Value)(this.value.map(function (v) {
return v.eval(env);
}));
}
},
genCSS: function (env, output) {
var i;
for(i = 0; i < this.value.length; i++) {
this.value[i].genCSS(env, output);
if (i+1 < this.value.length) {
output.add((env && env.compress) ? ',' : ', ');
}
}
},
toCSS: tree.toCSS
};
})(require('../tree'));
(function (tree) {
tree.Variable = function (name, index, currentFileInfo) {
this.name = name;
this.index = index;
this.currentFileInfo = currentFileInfo || {};
};
tree.Variable.prototype = {
type: "Variable",
eval: function (env) {
var variable, name = this.name;
if (name.indexOf('@@') === 0) {
name = '@' + new(tree.Variable)(name.slice(1)).eval(env).value;
}
if (this.evaluating) {
throw { type: 'Name',
message: "Recursive variable definition for " + name,
filename: this.currentFileInfo.file,
index: this.index };
}
this.evaluating = true;
variable = tree.find(env.frames, function (frame) {
var v = frame.variable(name);
if (v) {
return v.value.eval(env);
}
});
if (variable) {
this.evaluating = false;
return variable;
} else {
throw { type: 'Name',
message: "variable " + name + " is undefined",
filename: this.currentFileInfo.filename,
index: this.index };
}
}
};
})(require('../tree'));
(function (tree) {
var parseCopyProperties = [
'paths', // option - unmodified - paths to search for imports on
'optimization', // option - optimization level (for the chunker)
'files', // list of files that have been imported, used for import-once
'contents', // map - filename to contents of all the files
'contentsIgnoredChars', // map - filename to lines at the begining of each file to ignore
'relativeUrls', // option - whether to adjust URL's to be relative
'rootpath', // option - rootpath to append to URL's
'strictImports', // option -
'insecure', // option - whether to allow imports from insecure ssl hosts
'dumpLineNumbers', // option - whether to dump line numbers
'compress', // option - whether to compress
'processImports', // option - whether to process imports. if false then imports will not be imported
'syncImport', // option - whether to import synchronously
'javascriptEnabled',// option - whether JavaScript is enabled. if undefined, defaults to true
'mime', // browser only - mime type for sheet import
'useFileCache', // browser only - whether to use the per file session cache
'currentFileInfo' // information about the current file - for error reporting and importing and making urls relative etc.
];
//currentFileInfo = {
// 'relativeUrls' - option - whether to adjust URL's to be relative
// 'filename' - full resolved filename of current file
// 'rootpath' - path to append to normal URLs for this node
// 'currentDirectory' - path to the current file, absolute
// 'rootFilename' - filename of the base file
// 'entryPath' - absolute path to the entry file
// 'reference' - whether the file should not be output and only output parts that are referenced
tree.parseEnv = function(options) {
copyFromOriginal(options, this, parseCopyProperties);
if (!this.contents) { this.contents = {}; }
if (!this.contentsIgnoredChars) { this.contentsIgnoredChars = {}; }
if (!this.files) { this.files = {}; }
if (!this.currentFileInfo) {
var filename = (options && options.filename) || "input";
var entryPath = filename.replace(/[^\/\\]*$/, "");
if (options) {
options.filename = null;
}
this.currentFileInfo = {
filename: filename,
relativeUrls: this.relativeUrls,
rootpath: (options && options.rootpath) || "",
currentDirectory: entryPath,
entryPath: entryPath,
rootFilename: filename
};
}
};
var evalCopyProperties = [
'silent', // whether to swallow errors and warnings
'verbose', // whether to log more activity
'compress', // whether to compress
'yuicompress', // whether to compress with the outside tool yui compressor
'ieCompat', // whether to enforce IE compatibility (IE8 data-uri)
'strictMath', // whether math has to be within parenthesis
'strictUnits', // whether units need to evaluate correctly
'cleancss', // whether to compress with clean-css
'sourceMap', // whether to output a source map
'importMultiple', // whether we are currently importing multiple copies
'urlArgs' // whether to add args into url tokens
];
tree.evalEnv = function(options, frames) {
copyFromOriginal(options, this, evalCopyProperties);
this.frames = frames || [];
};
tree.evalEnv.prototype.inParenthesis = function () {
if (!this.parensStack) {
this.parensStack = [];
}
this.parensStack.push(true);
};
tree.evalEnv.prototype.outOfParenthesis = function () {
this.parensStack.pop();
};
tree.evalEnv.prototype.isMathOn = function () {
return this.strictMath ? (this.parensStack && this.parensStack.length) : true;
};
tree.evalEnv.prototype.isPathRelative = function (path) {
return !/^(?:[a-z-]+:|\/)/.test(path);
};
tree.evalEnv.prototype.normalizePath = function( path ) {
var
segments = path.split("/").reverse(),
segment;
path = [];
while (segments.length !== 0 ) {
segment = segments.pop();
switch( segment ) {
case ".":
break;
case "..":
if ((path.length === 0) || (path[path.length - 1] === "..")) {
path.push( segment );
} else {
path.pop();
}
break;
default:
path.push( segment );
break;
}
}
return path.join("/");
};
//todo - do the same for the toCSS env
//tree.toCSSEnv = function (options) {
//};
var copyFromOriginal = function(original, destination, propertiesToCopy) {
if (!original) { return; }
for(var i = 0; i < propertiesToCopy.length; i++) {
if (original.hasOwnProperty(propertiesToCopy[i])) {
destination[propertiesToCopy[i]] = original[propertiesToCopy[i]];
}
}
};
})(require('./tree'));
(function (tree) {
var _visitArgs = { visitDeeper: true },
_hasIndexed = false;
function _noop(node) {
return node;
}
function indexNodeTypes(parent, ticker) {
// add .typeIndex to tree node types for lookup table
var key, child;
for (key in parent) {
if (parent.hasOwnProperty(key)) {
child = parent[key];
switch (typeof child) {
case "function":
// ignore bound functions directly on tree which do not have a prototype
// or aren't nodes
if (child.prototype && child.prototype.type) {
child.prototype.typeIndex = ticker++;
}
break;
case "object":
ticker = indexNodeTypes(child, ticker);
break;
}
}
}
return ticker;
}
tree.visitor = function(implementation) {
this._implementation = implementation;
this._visitFnCache = [];
if (!_hasIndexed) {
indexNodeTypes(tree, 1);
_hasIndexed = true;
}
};
tree.visitor.prototype = {
visit: function(node) {
if (!node) {
return node;
}
var nodeTypeIndex = node.typeIndex;
if (!nodeTypeIndex) {
return node;
}
var visitFnCache = this._visitFnCache,
impl = this._implementation,
aryIndx = nodeTypeIndex << 1,
outAryIndex = aryIndx | 1,
func = visitFnCache[aryIndx],
funcOut = visitFnCache[outAryIndex],
visitArgs = _visitArgs,
fnName;
visitArgs.visitDeeper = true;
if (!func) {
fnName = "visit" + node.type;
func = impl[fnName] || _noop;
funcOut = impl[fnName + "Out"] || _noop;
visitFnCache[aryIndx] = func;
visitFnCache[outAryIndex] = funcOut;
}
if (func !== _noop) {
var newNode = func.call(impl, node, visitArgs);
if (impl.isReplacing) {
node = newNode;
}
}
if (visitArgs.visitDeeper && node && node.accept) {
node.accept(this);
}
if (funcOut != _noop) {
funcOut.call(impl, node);
}
return node;
},
visitArray: function(nodes, nonReplacing) {
if (!nodes) {
return nodes;
}
var cnt = nodes.length, i;
// Non-replacing
if (nonReplacing || !this._implementation.isReplacing) {
for (i = 0; i < cnt; i++) {
this.visit(nodes[i]);
}
return nodes;
}
// Replacing
var out = [];
for (i = 0; i < cnt; i++) {
var evald = this.visit(nodes[i]);
if (!evald.splice) {
out.push(evald);
} else if (evald.length) {
this.flatten(evald, out);
}
}
return out;
},
flatten: function(arr, out) {
if (!out) {
out = [];
}
var cnt, i, item,
nestedCnt, j, nestedItem;
for (i = 0, cnt = arr.length; i < cnt; i++) {
item = arr[i];
if (!item.splice) {
out.push(item);
continue;
}
for (j = 0, nestedCnt = item.length; j < nestedCnt; j++) {
nestedItem = item[j];
if (!nestedItem.splice) {
out.push(nestedItem);
} else if (nestedItem.length) {
this.flatten(nestedItem, out);
}
}
}
return out;
}
};
})(require('./tree'));
(function (tree) {
tree.importVisitor = function(importer, finish, evalEnv, onceFileDetectionMap, recursionDetector) {
this._visitor = new tree.visitor(this);
this._importer = importer;
this._finish = finish;
this.env = evalEnv || new tree.evalEnv();
this.importCount = 0;
this.onceFileDetectionMap = onceFileDetectionMap || {};
this.recursionDetector = {};
if (recursionDetector) {
for(var fullFilename in recursionDetector) {
if (recursionDetector.hasOwnProperty(fullFilename)) {
this.recursionDetector[fullFilename] = true;
}
}
}
};
tree.importVisitor.prototype = {
isReplacing: true,
run: function (root) {
var error;
try {
// process the contents
this._visitor.visit(root);
}
catch(e) {
error = e;
}
this.isFinished = true;
if (this.importCount === 0) {
this._finish(error);
}
},
visitImport: function (importNode, visitArgs) {
var importVisitor = this,
evaldImportNode,
inlineCSS = importNode.options.inline;
if (!importNode.css || inlineCSS) {
try {
evaldImportNode = importNode.evalForImport(this.env);
} catch(e){
if (!e.filename) { e.index = importNode.index; e.filename = importNode.currentFileInfo.filename; }
// attempt to eval properly and treat as css
importNode.css = true;
// if that fails, this error will be thrown
importNode.error = e;
}
if (evaldImportNode && (!evaldImportNode.css || inlineCSS)) {
importNode = evaldImportNode;
this.importCount++;
var env = new tree.evalEnv(this.env, this.env.frames.slice(0));
if (importNode.options.multiple) {
env.importMultiple = true;
}
this._importer.push(importNode.getPath(), importNode.currentFileInfo, importNode.options, function (e, root, importedAtRoot, fullPath) {
if (e && !e.filename) { e.index = importNode.index; e.filename = importNode.currentFileInfo.filename; }
if (!env.importMultiple) {
if (importedAtRoot) {
importNode.skip = true;
} else {
importNode.skip = function() {
if (fullPath in importVisitor.onceFileDetectionMap) {
return true;
}
importVisitor.onceFileDetectionMap[fullPath] = true;
return false;
};
}
}
var subFinish = function(e) {
importVisitor.importCount--;
if (importVisitor.importCount === 0 && importVisitor.isFinished) {
importVisitor._finish(e);
}
};
if (root) {
importNode.root = root;
importNode.importedFilename = fullPath;
var duplicateImport = importedAtRoot || fullPath in importVisitor.recursionDetector;
if (!inlineCSS && (env.importMultiple || !duplicateImport)) {
importVisitor.recursionDetector[fullPath] = true;
new(tree.importVisitor)(importVisitor._importer, subFinish, env, importVisitor.onceFileDetectionMap, importVisitor.recursionDetector)
.run(root);
return;
}
}
subFinish();
});
}
}
visitArgs.visitDeeper = false;
return importNode;
},
visitRule: function (ruleNode, visitArgs) {
visitArgs.visitDeeper = false;
return ruleNode;
},
visitDirective: function (directiveNode, visitArgs) {
this.env.frames.unshift(directiveNode);
return directiveNode;
},
visitDirectiveOut: function (directiveNode) {
this.env.frames.shift();
},
visitMixinDefinition: function (mixinDefinitionNode, visitArgs) {
this.env.frames.unshift(mixinDefinitionNode);
return mixinDefinitionNode;
},
visitMixinDefinitionOut: function (mixinDefinitionNode) {
this.env.frames.shift();
},
visitRuleset: function (rulesetNode, visitArgs) {
this.env.frames.unshift(rulesetNode);
return rulesetNode;
},
visitRulesetOut: function (rulesetNode) {
this.env.frames.shift();
},
visitMedia: function (mediaNode, visitArgs) {
this.env.frames.unshift(mediaNode.ruleset);
return mediaNode;
},
visitMediaOut: function (mediaNode) {
this.env.frames.shift();
}
};
})(require('./tree'));
(function (tree) {
tree.joinSelectorVisitor = function() {
this.contexts = [[]];
this._visitor = new tree.visitor(this);
};
tree.joinSelectorVisitor.prototype = {
run: function (root) {
return this._visitor.visit(root);
},
visitRule: function (ruleNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitMixinDefinition: function (mixinDefinitionNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitRuleset: function (rulesetNode, visitArgs) {
var context = this.contexts[this.contexts.length - 1],
paths = [], selectors;
this.contexts.push(paths);
if (! rulesetNode.root) {
selectors = rulesetNode.selectors;
if (selectors) {
selectors = selectors.filter(function(selector) { return selector.getIsOutput(); });
rulesetNode.selectors = selectors.length ? selectors : (selectors = null);
if (selectors) { rulesetNode.joinSelectors(paths, context, selectors); }
}
if (!selectors) { rulesetNode.rules = null; }
rulesetNode.paths = paths;
}
},
visitRulesetOut: function (rulesetNode) {
this.contexts.length = this.contexts.length - 1;
},
visitMedia: function (mediaNode, visitArgs) {
var context = this.contexts[this.contexts.length - 1];
mediaNode.rules[0].root = (context.length === 0 || context[0].multiMedia);
}
};
})(require('./tree'));
(function (tree) {
tree.toCSSVisitor = function(env) {
this._visitor = new tree.visitor(this);
this._env = env;
};
tree.toCSSVisitor.prototype = {
isReplacing: true,
run: function (root) {
return this._visitor.visit(root);
},
visitRule: function (ruleNode, visitArgs) {
if (ruleNode.variable) {
return [];
}
return ruleNode;
},
visitMixinDefinition: function (mixinNode, visitArgs) {
// mixin definitions do not get eval'd - this means they keep state
// so we have to clear that state here so it isn't used if toCSS is called twice
mixinNode.frames = [];
return [];
},
visitExtend: function (extendNode, visitArgs) {
return [];
},
visitComment: function (commentNode, visitArgs) {
if (commentNode.isSilent(this._env)) {
return [];
}
return commentNode;
},
visitMedia: function(mediaNode, visitArgs) {
mediaNode.accept(this._visitor);
visitArgs.visitDeeper = false;
if (!mediaNode.rules.length) {
return [];
}
return mediaNode;
},
visitDirective: function(directiveNode, visitArgs) {
if (directiveNode.currentFileInfo.reference && !directiveNode.isReferenced) {
return [];
}
if (directiveNode.name === "@charset") {
// Only output the debug info together with subsequent @charset definitions
// a comment (or @media statement) before the actual @charset directive would
// be considered illegal css as it has to be on the first line
if (this.charset) {
if (directiveNode.debugInfo) {
var comment = new tree.Comment("/* " + directiveNode.toCSS(this._env).replace(/\n/g, "")+" */\n");
comment.debugInfo = directiveNode.debugInfo;
return this._visitor.visit(comment);
}
return [];
}
this.charset = true;
}
return directiveNode;
},
checkPropertiesInRoot: function(rules) {
var ruleNode;
for(var i = 0; i < rules.length; i++) {
ruleNode = rules[i];
if (ruleNode instanceof tree.Rule && !ruleNode.variable) {
throw { message: "properties must be inside selector blocks, they cannot be in the root.",
index: ruleNode.index, filename: ruleNode.currentFileInfo ? ruleNode.currentFileInfo.filename : null};
}
}
},
visitRuleset: function (rulesetNode, visitArgs) {
var rule, rulesets = [];
if (rulesetNode.firstRoot) {
this.checkPropertiesInRoot(rulesetNode.rules);
}
if (! rulesetNode.root) {
if (rulesetNode.paths) {
rulesetNode.paths = rulesetNode.paths
.filter(function(p) {
var i;
if (p[0].elements[0].combinator.value === ' ') {
p[0].elements[0].combinator = new(tree.Combinator)('');
}
for(i = 0; i < p.length; i++) {
if (p[i].getIsReferenced() && p[i].getIsOutput()) {
return true;
}
}
return false;
});
}
// Compile rules and rulesets
var nodeRules = rulesetNode.rules, nodeRuleCnt = nodeRules ? nodeRules.length : 0;
for (var i = 0; i < nodeRuleCnt; ) {
rule = nodeRules[i];
if (rule && rule.rules) {
// visit because we are moving them out from being a child
rulesets.push(this._visitor.visit(rule));
nodeRules.splice(i, 1);
nodeRuleCnt--;
continue;
}
i++;
}
// accept the visitor to remove rules and refactor itself
// then we can decide now whether we want it or not
if (nodeRuleCnt > 0) {
rulesetNode.accept(this._visitor);
} else {
rulesetNode.rules = null;
}
visitArgs.visitDeeper = false;
nodeRules = rulesetNode.rules;
if (nodeRules) {
this._mergeRules(nodeRules);
nodeRules = rulesetNode.rules;
}
if (nodeRules) {
this._removeDuplicateRules(nodeRules);
nodeRules = rulesetNode.rules;
}
// now decide whether we keep the ruleset
if (nodeRules && nodeRules.length > 0 && rulesetNode.paths.length > 0) {
rulesets.splice(0, 0, rulesetNode);
}
} else {
rulesetNode.accept(this._visitor);
visitArgs.visitDeeper = false;
if (rulesetNode.firstRoot || (rulesetNode.rules && rulesetNode.rules.length > 0)) {
rulesets.splice(0, 0, rulesetNode);
}
}
if (rulesets.length === 1) {
return rulesets[0];
}
return rulesets;
},
_removeDuplicateRules: function(rules) {
if (!rules) { return; }
// remove duplicates
var ruleCache = {},
ruleList, rule, i;
for(i = rules.length - 1; i >= 0 ; i--) {
rule = rules[i];
if (rule instanceof tree.Rule) {
if (!ruleCache[rule.name]) {
ruleCache[rule.name] = rule;
} else {
ruleList = ruleCache[rule.name];
if (ruleList instanceof tree.Rule) {
ruleList = ruleCache[rule.name] = [ruleCache[rule.name].toCSS(this._env)];
}
var ruleCSS = rule.toCSS(this._env);
if (ruleList.indexOf(ruleCSS) !== -1) {
rules.splice(i, 1);
} else {
ruleList.push(ruleCSS);
}
}
}
}
},
_mergeRules: function (rules) {
if (!rules) { return; }
var groups = {},
parts,
rule,
key;
for (var i = 0; i < rules.length; i++) {
rule = rules[i];
if ((rule instanceof tree.Rule) && rule.merge) {
key = [rule.name,
rule.important ? "!" : ""].join(",");
if (!groups[key]) {
groups[key] = [];
} else {
rules.splice(i--, 1);
}
groups[key].push(rule);
}
}
Object.keys(groups).map(function (k) {
function toExpression(values) {
return new (tree.Expression)(values.map(function (p) {
return p.value;
}));
}
function toValue(values) {
return new (tree.Value)(values.map(function (p) {
return p;
}));
}
parts = groups[k];
if (parts.length > 1) {
rule = parts[0];
var spacedGroups = [];
var lastSpacedGroup = [];
parts.map(function (p) {
if (p.merge==="+") {
if (lastSpacedGroup.length > 0) {
spacedGroups.push(toExpression(lastSpacedGroup));
}
lastSpacedGroup = [];
}
lastSpacedGroup.push(p);
});
spacedGroups.push(toExpression(lastSpacedGroup));
rule.value = toValue(spacedGroups);
}
});
}
};
})(require('./tree'));
(function (tree) {
/*jshint loopfunc:true */
tree.extendFinderVisitor = function() {
this._visitor = new tree.visitor(this);
this.contexts = [];
this.allExtendsStack = [[]];
};
tree.extendFinderVisitor.prototype = {
run: function (root) {
root = this._visitor.visit(root);
root.allExtends = this.allExtendsStack[0];
return root;
},
visitRule: function (ruleNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitMixinDefinition: function (mixinDefinitionNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitRuleset: function (rulesetNode, visitArgs) {
if (rulesetNode.root) {
return;
}
var i, j, extend, allSelectorsExtendList = [], extendList;
// get &:extend(.a); rules which apply to all selectors in this ruleset
var rules = rulesetNode.rules, ruleCnt = rules ? rules.length : 0;
for(i = 0; i < ruleCnt; i++) {
if (rulesetNode.rules[i] instanceof tree.Extend) {
allSelectorsExtendList.push(rules[i]);
rulesetNode.extendOnEveryPath = true;
}
}
// now find every selector and apply the extends that apply to all extends
// and the ones which apply to an individual extend
var paths = rulesetNode.paths;
for(i = 0; i < paths.length; i++) {
var selectorPath = paths[i],
selector = selectorPath[selectorPath.length - 1],
selExtendList = selector.extendList;
extendList = selExtendList ? selExtendList.slice(0).concat(allSelectorsExtendList)
: allSelectorsExtendList;
if (extendList) {
extendList = extendList.map(function(allSelectorsExtend) {
return allSelectorsExtend.clone();
});
}
for(j = 0; j < extendList.length; j++) {
this.foundExtends = true;
extend = extendList[j];
extend.findSelfSelectors(selectorPath);
extend.ruleset = rulesetNode;
if (j === 0) { extend.firstExtendOnThisSelectorPath = true; }
this.allExtendsStack[this.allExtendsStack.length-1].push(extend);
}
}
this.contexts.push(rulesetNode.selectors);
},
visitRulesetOut: function (rulesetNode) {
if (!rulesetNode.root) {
this.contexts.length = this.contexts.length - 1;
}
},
visitMedia: function (mediaNode, visitArgs) {
mediaNode.allExtends = [];
this.allExtendsStack.push(mediaNode.allExtends);
},
visitMediaOut: function (mediaNode) {
this.allExtendsStack.length = this.allExtendsStack.length - 1;
},
visitDirective: function (directiveNode, visitArgs) {
directiveNode.allExtends = [];
this.allExtendsStack.push(directiveNode.allExtends);
},
visitDirectiveOut: function (directiveNode) {
this.allExtendsStack.length = this.allExtendsStack.length - 1;
}
};
tree.processExtendsVisitor = function() {
this._visitor = new tree.visitor(this);
};
tree.processExtendsVisitor.prototype = {
run: function(root) {
var extendFinder = new tree.extendFinderVisitor();
extendFinder.run(root);
if (!extendFinder.foundExtends) { return root; }
root.allExtends = root.allExtends.concat(this.doExtendChaining(root.allExtends, root.allExtends));
this.allExtendsStack = [root.allExtends];
return this._visitor.visit(root);
},
doExtendChaining: function (extendsList, extendsListTarget, iterationCount) {
//
// chaining is different from normal extension.. if we extend an extend then we are not just copying, altering and pasting
// the selector we would do normally, but we are also adding an extend with the same target selector
// this means this new extend can then go and alter other extends
//
// this method deals with all the chaining work - without it, extend is flat and doesn't work on other extend selectors
// this is also the most expensive.. and a match on one selector can cause an extension of a selector we had already processed if
// we look at each selector at a time, as is done in visitRuleset
var extendIndex, targetExtendIndex, matches, extendsToAdd = [], newSelector, extendVisitor = this, selectorPath, extend, targetExtend, newExtend;
iterationCount = iterationCount || 0;
//loop through comparing every extend with every target extend.
// a target extend is the one on the ruleset we are looking at copy/edit/pasting in place
// e.g. .a:extend(.b) {} and .b:extend(.c) {} then the first extend extends the second one
// and the second is the target.
// the seperation into two lists allows us to process a subset of chains with a bigger set, as is the
// case when processing media queries
for(extendIndex = 0; extendIndex < extendsList.length; extendIndex++){
for(targetExtendIndex = 0; targetExtendIndex < extendsListTarget.length; targetExtendIndex++){
extend = extendsList[extendIndex];
targetExtend = extendsListTarget[targetExtendIndex];
// look for circular references
if( extend.parent_ids.indexOf( targetExtend.object_id ) >= 0 ){ continue; }
// find a match in the target extends self selector (the bit before :extend)
selectorPath = [targetExtend.selfSelectors[0]];
matches = extendVisitor.findMatch(extend, selectorPath);
if (matches.length) {
// we found a match, so for each self selector..
extend.selfSelectors.forEach(function(selfSelector) {
// process the extend as usual
newSelector = extendVisitor.extendSelector(matches, selectorPath, selfSelector);
// but now we create a new extend from it
newExtend = new(tree.Extend)(targetExtend.selector, targetExtend.option, 0);
newExtend.selfSelectors = newSelector;
// add the extend onto the list of extends for that selector
newSelector[newSelector.length-1].extendList = [newExtend];
// record that we need to add it.
extendsToAdd.push(newExtend);
newExtend.ruleset = targetExtend.ruleset;
//remember its parents for circular references
newExtend.parent_ids = newExtend.parent_ids.concat(targetExtend.parent_ids, extend.parent_ids);
// only process the selector once.. if we have :extend(.a,.b) then multiple
// extends will look at the same selector path, so when extending
// we know that any others will be duplicates in terms of what is added to the css
if (targetExtend.firstExtendOnThisSelectorPath) {
newExtend.firstExtendOnThisSelectorPath = true;
targetExtend.ruleset.paths.push(newSelector);
}
});
}
}
}
if (extendsToAdd.length) {
// try to detect circular references to stop a stack overflow.
// may no longer be needed.
this.extendChainCount++;
if (iterationCount > 100) {
var selectorOne = "{unable to calculate}";
var selectorTwo = "{unable to calculate}";
try
{
selectorOne = extendsToAdd[0].selfSelectors[0].toCSS();
selectorTwo = extendsToAdd[0].selector.toCSS();
}
catch(e) {}
throw {message: "extend circular reference detected. One of the circular extends is currently:"+selectorOne+":extend(" + selectorTwo+")"};
}
// now process the new extends on the existing rules so that we can handle a extending b extending c ectending d extending e...
return extendsToAdd.concat(extendVisitor.doExtendChaining(extendsToAdd, extendsListTarget, iterationCount+1));
} else {
return extendsToAdd;
}
},
visitRule: function (ruleNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitMixinDefinition: function (mixinDefinitionNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitSelector: function (selectorNode, visitArgs) {
visitArgs.visitDeeper = false;
},
visitRuleset: function (rulesetNode, visitArgs) {
if (rulesetNode.root) {
return;
}
var matches, pathIndex, extendIndex, allExtends = this.allExtendsStack[this.allExtendsStack.length-1], selectorsToAdd = [], extendVisitor = this, selectorPath;
// look at each selector path in the ruleset, find any extend matches and then copy, find and replace
for(extendIndex = 0; extendIndex < allExtends.length; extendIndex++) {
for(pathIndex = 0; pathIndex < rulesetNode.paths.length; pathIndex++) {
selectorPath = rulesetNode.paths[pathIndex];
// extending extends happens initially, before the main pass
if (rulesetNode.extendOnEveryPath) { continue; }
var extendList = selectorPath[selectorPath.length-1].extendList;
if (extendList && extendList.length) { continue; }
matches = this.findMatch(allExtends[extendIndex], selectorPath);
if (matches.length) {
allExtends[extendIndex].selfSelectors.forEach(function(selfSelector) {
selectorsToAdd.push(extendVisitor.extendSelector(matches, selectorPath, selfSelector));
});
}
}
}
rulesetNode.paths = rulesetNode.paths.concat(selectorsToAdd);
},
findMatch: function (extend, haystackSelectorPath) {
//
// look through the haystack selector path to try and find the needle - extend.selector
// returns an array of selector matches that can then be replaced
//
var haystackSelectorIndex, hackstackSelector, hackstackElementIndex, haystackElement,
targetCombinator, i,
extendVisitor = this,
needleElements = extend.selector.elements,
potentialMatches = [], potentialMatch, matches = [];
// loop through the haystack elements
for(haystackSelectorIndex = 0; haystackSelectorIndex < haystackSelectorPath.length; haystackSelectorIndex++) {
hackstackSelector = haystackSelectorPath[haystackSelectorIndex];
for(hackstackElementIndex = 0; hackstackElementIndex < hackstackSelector.elements.length; hackstackElementIndex++) {
haystackElement = hackstackSelector.elements[hackstackElementIndex];
// if we allow elements before our match we can add a potential match every time. otherwise only at the first element.
if (extend.allowBefore || (haystackSelectorIndex === 0 && hackstackElementIndex === 0)) {
potentialMatches.push({pathIndex: haystackSelectorIndex, index: hackstackElementIndex, matched: 0, initialCombinator: haystackElement.combinator});
}
for(i = 0; i < potentialMatches.length; i++) {
potentialMatch = potentialMatches[i];
// selectors add " " onto the first element. When we use & it joins the selectors together, but if we don't
// then each selector in haystackSelectorPath has a space before it added in the toCSS phase. so we need to work out
// what the resulting combinator will be
targetCombinator = haystackElement.combinator.value;
if (targetCombinator === '' && hackstackElementIndex === 0) {
targetCombinator = ' ';
}
// if we don't match, null our match to indicate failure
if (!extendVisitor.isElementValuesEqual(needleElements[potentialMatch.matched].value, haystackElement.value) ||
(potentialMatch.matched > 0 && needleElements[potentialMatch.matched].combinator.value !== targetCombinator)) {
potentialMatch = null;
} else {
potentialMatch.matched++;
}
// if we are still valid and have finished, test whether we have elements after and whether these are allowed
if (potentialMatch) {
potentialMatch.finished = potentialMatch.matched === needleElements.length;
if (potentialMatch.finished &&
(!extend.allowAfter && (hackstackElementIndex+1 < hackstackSelector.elements.length || haystackSelectorIndex+1 < haystackSelectorPath.length))) {
potentialMatch = null;
}
}
// if null we remove, if not, we are still valid, so either push as a valid match or continue
if (potentialMatch) {
if (potentialMatch.finished) {
potentialMatch.length = needleElements.length;
potentialMatch.endPathIndex = haystackSelectorIndex;
potentialMatch.endPathElementIndex = hackstackElementIndex + 1; // index after end of match
potentialMatches.length = 0; // we don't allow matches to overlap, so start matching again
matches.push(potentialMatch);
}
} else {
potentialMatches.splice(i, 1);
i--;
}
}
}
}
return matches;
},
isElementValuesEqual: function(elementValue1, elementValue2) {
if (typeof elementValue1 === "string" || typeof elementValue2 === "string") {
return elementValue1 === elementValue2;
}
if (elementValue1 instanceof tree.Attribute) {
if (elementValue1.op !== elementValue2.op || elementValue1.key !== elementValue2.key) {
return false;
}
if (!elementValue1.value || !elementValue2.value) {
if (elementValue1.value || elementValue2.value) {
return false;
}
return true;
}
elementValue1 = elementValue1.value.value || elementValue1.value;
elementValue2 = elementValue2.value.value || elementValue2.value;
return elementValue1 === elementValue2;
}
elementValue1 = elementValue1.value;
elementValue2 = elementValue2.value;
if (elementValue1 instanceof tree.Selector) {
if (!(elementValue2 instanceof tree.Selector) || elementValue1.elements.length !== elementValue2.elements.length) {
return false;
}
for(var i = 0; i <elementValue1.elements.length; i++) {
if (elementValue1.elements[i].combinator.value !== elementValue2.elements[i].combinator.value) {
if (i !== 0 || (elementValue1.elements[i].combinator.value || ' ') !== (elementValue2.elements[i].combinator.value || ' ')) {
return false;
}
}
if (!this.isElementValuesEqual(elementValue1.elements[i].value, elementValue2.elements[i].value)) {
return false;
}
}
return true;
}
return false;
},
extendSelector:function (matches, selectorPath, replacementSelector) {
//for a set of matches, replace each match with the replacement selector
var currentSelectorPathIndex = 0,
currentSelectorPathElementIndex = 0,
path = [],
matchIndex,
selector,
firstElement,
match,
newElements;
for (matchIndex = 0; matchIndex < matches.length; matchIndex++) {
match = matches[matchIndex];
selector = selectorPath[match.pathIndex];
firstElement = new tree.Element(
match.initialCombinator,
replacementSelector.elements[0].value,
replacementSelector.elements[0].index,
replacementSelector.elements[0].currentFileInfo
);
if (match.pathIndex > currentSelectorPathIndex && currentSelectorPathElementIndex > 0) {
path[path.length - 1].elements = path[path.length - 1].elements.concat(selectorPath[currentSelectorPathIndex].elements.slice(currentSelectorPathElementIndex));
currentSelectorPathElementIndex = 0;
currentSelectorPathIndex++;
}
newElements = selector.elements
.slice(currentSelectorPathElementIndex, match.index)
.concat([firstElement])
.concat(replacementSelector.elements.slice(1));
if (currentSelectorPathIndex === match.pathIndex && matchIndex > 0) {
path[path.length - 1].elements =
path[path.length - 1].elements.concat(newElements);
} else {
path = path.concat(selectorPath.slice(currentSelectorPathIndex, match.pathIndex));
path.push(new tree.Selector(
newElements
));
}
currentSelectorPathIndex = match.endPathIndex;
currentSelectorPathElementIndex = match.endPathElementIndex;
if (currentSelectorPathElementIndex >= selectorPath[currentSelectorPathIndex].elements.length) {
currentSelectorPathElementIndex = 0;
currentSelectorPathIndex++;
}
}
if (currentSelectorPathIndex < selectorPath.length && currentSelectorPathElementIndex > 0) {
path[path.length - 1].elements = path[path.length - 1].elements.concat(selectorPath[currentSelectorPathIndex].elements.slice(currentSelectorPathElementIndex));
currentSelectorPathIndex++;
}
path = path.concat(selectorPath.slice(currentSelectorPathIndex, selectorPath.length));
return path;
},
visitRulesetOut: function (rulesetNode) {
},
visitMedia: function (mediaNode, visitArgs) {
var newAllExtends = mediaNode.allExtends.concat(this.allExtendsStack[this.allExtendsStack.length-1]);
newAllExtends = newAllExtends.concat(this.doExtendChaining(newAllExtends, mediaNode.allExtends));
this.allExtendsStack.push(newAllExtends);
},
visitMediaOut: function (mediaNode) {
this.allExtendsStack.length = this.allExtendsStack.length - 1;
},
visitDirective: function (directiveNode, visitArgs) {
var newAllExtends = directiveNode.allExtends.concat(this.allExtendsStack[this.allExtendsStack.length-1]);
newAllExtends = newAllExtends.concat(this.doExtendChaining(newAllExtends, directiveNode.allExtends));
this.allExtendsStack.push(newAllExtends);
},
visitDirectiveOut: function (directiveNode) {
this.allExtendsStack.length = this.allExtendsStack.length - 1;
}
};
})(require('./tree'));
(function (tree) {
tree.sourceMapOutput = function (options) {
this._css = [];
this._rootNode = options.rootNode;
this._writeSourceMap = options.writeSourceMap;
this._contentsMap = options.contentsMap;
this._contentsIgnoredCharsMap = options.contentsIgnoredCharsMap;
this._sourceMapFilename = options.sourceMapFilename;
this._outputFilename = options.outputFilename;
this._sourceMapURL = options.sourceMapURL;
if (options.sourceMapBasepath) {
this._sourceMapBasepath = options.sourceMapBasepath.replace(/\\/g, '/');
}
this._sourceMapRootpath = options.sourceMapRootpath;
this._outputSourceFiles = options.outputSourceFiles;
this._sourceMapGeneratorConstructor = options.sourceMapGenerator || require("source-map").SourceMapGenerator;
if (this._sourceMapRootpath && this._sourceMapRootpath.charAt(this._sourceMapRootpath.length-1) !== '/') {
this._sourceMapRootpath += '/';
}
this._lineNumber = 0;
this._column = 0;
};
tree.sourceMapOutput.prototype.normalizeFilename = function(filename) {
filename = filename.replace(/\\/g, '/');
if (this._sourceMapBasepath && filename.indexOf(this._sourceMapBasepath) === 0) {
filename = filename.substring(this._sourceMapBasepath.length);
if (filename.charAt(0) === '\\' || filename.charAt(0) === '/') {
filename = filename.substring(1);
}
}
return (this._sourceMapRootpath || "") + filename;
};
tree.sourceMapOutput.prototype.add = function(chunk, fileInfo, index, mapLines) {
//ignore adding empty strings
if (!chunk) {
return;
}
var lines,
sourceLines,
columns,
sourceColumns,
i;
if (fileInfo) {
var inputSource = this._contentsMap[fileInfo.filename];
// remove vars/banner added to the top of the file
if (this._contentsIgnoredCharsMap[fileInfo.filename]) {
// adjust the index
index -= this._contentsIgnoredCharsMap[fileInfo.filename];
if (index < 0) { index = 0; }
// adjust the source
inputSource = inputSource.slice(this._contentsIgnoredCharsMap[fileInfo.filename]);
}
inputSource = inputSource.substring(0, index);
sourceLines = inputSource.split("\n");
sourceColumns = sourceLines[sourceLines.length-1];
}
lines = chunk.split("\n");
columns = lines[lines.length-1];
if (fileInfo) {
if (!mapLines) {
this._sourceMapGenerator.addMapping({ generated: { line: this._lineNumber + 1, column: this._column},
original: { line: sourceLines.length, column: sourceColumns.length},
source: this.normalizeFilename(fileInfo.filename)});
} else {
for(i = 0; i < lines.length; i++) {
this._sourceMapGenerator.addMapping({ generated: { line: this._lineNumber + i + 1, column: i === 0 ? this._column : 0},
original: { line: sourceLines.length + i, column: i === 0 ? sourceColumns.length : 0},
source: this.normalizeFilename(fileInfo.filename)});
}
}
}
if (lines.length === 1) {
this._column += columns.length;
} else {
this._lineNumber += lines.length - 1;
this._column = columns.length;
}
this._css.push(chunk);
};
tree.sourceMapOutput.prototype.isEmpty = function() {
return this._css.length === 0;
};
tree.sourceMapOutput.prototype.toCSS = function(env) {
this._sourceMapGenerator = new this._sourceMapGeneratorConstructor({ file: this._outputFilename, sourceRoot: null });
if (this._outputSourceFiles) {
for(var filename in this._contentsMap) {
if (this._contentsMap.hasOwnProperty(filename))
{
var source = this._contentsMap[filename];
if (this._contentsIgnoredCharsMap[filename]) {
source = source.slice(this._contentsIgnoredCharsMap[filename]);
}
this._sourceMapGenerator.setSourceContent(this.normalizeFilename(filename), source);
}
}
}
this._rootNode.genCSS(env, this);
if (this._css.length > 0) {
var sourceMapURL,
sourceMapContent = JSON.stringify(this._sourceMapGenerator.toJSON());
if (this._sourceMapURL) {
sourceMapURL = this._sourceMapURL;
} else if (this._sourceMapFilename) {
sourceMapURL = this.normalizeFilename(this._sourceMapFilename);
}
if (this._writeSourceMap) {
this._writeSourceMap(sourceMapContent);
} else {
sourceMapURL = "data:application/json," + encodeURIComponent(sourceMapContent);
}
if (sourceMapURL) {
this._css.push("/*# sourceMappingURL=" + sourceMapURL + " */");
}
}
return this._css.join('');
};
})(require('./tree'));
//
// browser.js - client-side engine
//
/*global less, window, document, XMLHttpRequest, location */
var isFileProtocol = /^(file|chrome(-extension)?|resource|qrc|app):/.test(location.protocol);
less.env = less.env || (location.hostname == '127.0.0.1' ||
location.hostname == '0.0.0.0' ||
location.hostname == 'localhost' ||
(location.port &&
location.port.length > 0) ||
isFileProtocol ? 'development'
: 'production');
var logLevel = {
debug: 3,
info: 2,
errors: 1,
none: 0
};
// The amount of logging in the javascript console.
// 3 - Debug, information and errors
// 2 - Information and errors
// 1 - Errors
// 0 - None
// Defaults to 2
less.logLevel = typeof(less.logLevel) != 'undefined' ? less.logLevel : (less.env === 'development' ? logLevel.debug : logLevel.errors);
// Load styles asynchronously (default: false)
//
// This is set to `false` by default, so that the body
// doesn't start loading before the stylesheets are parsed.
// Setting this to `true` can result in flickering.
//
less.async = less.async || false;
less.fileAsync = less.fileAsync || false;
// Interval between watch polls
less.poll = less.poll || (isFileProtocol ? 1000 : 1500);
//Setup user functions
if (less.functions) {
for(var func in less.functions) {
if (less.functions.hasOwnProperty(func)) {
less.tree.functions[func] = less.functions[func];
}
}
}
var dumpLineNumbers = /!dumpLineNumbers:(comments|mediaquery|all)/.exec(location.hash);
if (dumpLineNumbers) {
less.dumpLineNumbers = dumpLineNumbers[1];
}
var typePattern = /^text\/(x-)?less$/;
var cache = null;
var fileCache = {};
function log(str, level) {
if (typeof(console) !== 'undefined' && less.logLevel >= level) {
console.log('less: ' + str);
}
}
function extractId(href) {
return href.replace(/^[a-z-]+:\/+?[^\/]+/, '' ) // Remove protocol & domain
.replace(/^\//, '' ) // Remove root /
.replace(/\.[a-zA-Z]+$/, '' ) // Remove simple extension
.replace(/[^\.\w-]+/g, '-') // Replace illegal characters
.replace(/\./g, ':'); // Replace dots with colons(for valid id)
}
function errorConsole(e, rootHref) {
var template = '{line} {content}';
var filename = e.filename || rootHref;
var errors = [];
var content = (e.type || "Syntax") + "Error: " + (e.message || 'There is an error in your .less file') +
" in " + filename + " ";
var errorline = function (e, i, classname) {
if (e.extract[i] !== undefined) {
errors.push(template.replace(/\{line\}/, (parseInt(e.line, 10) || 0) + (i - 1))
.replace(/\{class\}/, classname)
.replace(/\{content\}/, e.extract[i]));
}
};
if (e.extract) {
errorline(e, 0, '');
errorline(e, 1, 'line');
errorline(e, 2, '');
content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':\n' +
errors.join('\n');
} else if (e.stack) {
content += e.stack;
}
log(content, logLevel.errors);
}
function createCSS(styles, sheet, lastModified) {
// Strip the query-string
var href = sheet.href || '';
// If there is no title set, use the filename, minus the extension
var id = 'less:' + (sheet.title || extractId(href));
// If this has already been inserted into the DOM, we may need to replace it
var oldCss = document.getElementById(id);
var keepOldCss = false;
// Create a new stylesheet node for insertion or (if necessary) replacement
var css = document.createElement('style');
css.setAttribute('type', 'text/css');
if (sheet.media) {
css.setAttribute('media', sheet.media);
}
css.id = id;
if (css.styleSheet) { // IE
try {
css.styleSheet.cssText = styles;
} catch (e) {
throw new(Error)("Couldn't reassign styleSheet.cssText.");
}
} else {
css.appendChild(document.createTextNode(styles));
// If new contents match contents of oldCss, don't replace oldCss
keepOldCss = (oldCss !== null && oldCss.childNodes.length > 0 && css.childNodes.length > 0 &&
oldCss.firstChild.nodeValue === css.firstChild.nodeValue);
}
var head = document.getElementsByTagName('head')[0];
// If there is no oldCss, just append; otherwise, only append if we need
// to replace oldCss with an updated stylesheet
if (oldCss === null || keepOldCss === false) {
var nextEl = sheet && sheet.nextSibling || null;
if (nextEl) {
nextEl.parentNode.insertBefore(css, nextEl);
} else {
head.appendChild(css);
}
}
if (oldCss && keepOldCss === false) {
oldCss.parentNode.removeChild(oldCss);
}
// Don't update the local store if the file wasn't modified
if (lastModified && cache) {
log('saving ' + href + ' to cache.', logLevel.info);
try {
cache.setItem(href, styles);
cache.setItem(href + ':timestamp', lastModified);
} catch(e) {
//TODO - could do with adding more robust error handling
log('failed to save', logLevel.errors);
}
}
}
function postProcessCSS(styles) {
if (less.postProcessor && typeof less.postProcessor === 'function') {
styles = less.postProcessor.call(styles, styles) || styles;
}
return styles;
}
function errorHTML(e, rootHref) {
var id = 'less-error-message:' + extractId(rootHref || "");
var template = '<li><label>{line}</label><pre class="{class}">{content}</pre></li>';
var elem = document.createElement('div'), timer, content, errors = [];
var filename = e.filename || rootHref;
var filenameNoPath = filename.match(/([^\/]+(\?.*)?)$/)[1];
elem.id = id;
elem.className = "less-error-message";
content = '<h3>' + (e.type || "Syntax") + "Error: " + (e.message || 'There is an error in your .less file') +
'</h3>' + '<p>in <a href="' + filename + '">' + filenameNoPath + "</a> ";
var errorline = function (e, i, classname) {
if (e.extract[i] !== undefined) {
errors.push(template.replace(/\{line\}/, (parseInt(e.line, 10) || 0) + (i - 1))
.replace(/\{class\}/, classname)
.replace(/\{content\}/, e.extract[i]));
}
};
if (e.extract) {
errorline(e, 0, '');
errorline(e, 1, 'line');
errorline(e, 2, '');
content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':</p>' +
'<ul>' + errors.join('') + '</ul>';
} else if (e.stack) {
content += '<br/>' + e.stack.split('\n').slice(1).join('<br/>');
}
elem.innerHTML = content;
// CSS for error messages
createCSS([
'.less-error-message ul, .less-error-message li {',
'list-style-type: none;',
'margin-right: 15px;',
'padding: 4px 0;',
'margin: 0;',
'}',
'.less-error-message label {',
'font-size: 12px;',
'margin-right: 15px;',
'padding: 4px 0;',
'color: #cc7777;',
'}',
'.less-error-message pre {',
'color: #dd6666;',
'padding: 4px 0;',
'margin: 0;',
'display: inline-block;',
'}',
'.less-error-message pre.line {',
'color: #ff0000;',
'}',
'.less-error-message h3 {',
'font-size: 20px;',
'font-weight: bold;',
'padding: 15px 0 5px 0;',
'margin: 0;',
'}',
'.less-error-message a {',
'color: #10a',
'}',
'.less-error-message .error {',
'color: red;',
'font-weight: bold;',
'padding-bottom: 2px;',
'border-bottom: 1px dashed red;',
'}'
].join('\n'), { title: 'error-message' });
elem.style.cssText = [
"font-family: Arial, sans-serif",
"border: 1px solid #e00",
"background-color: #eee",
"border-radius: 5px",
"-webkit-border-radius: 5px",
"-moz-border-radius: 5px",
"color: #e00",
"padding: 15px",
"margin-bottom: 15px"
].join(';');
if (less.env == 'development') {
timer = setInterval(function () {
if (document.body) {
if (document.getElementById(id)) {
document.body.replaceChild(elem, document.getElementById(id));
} else {
document.body.insertBefore(elem, document.body.firstChild);
}
clearInterval(timer);
}
}, 10);
}
}
function error(e, rootHref) {
if (!less.errorReporting || less.errorReporting === "html") {
errorHTML(e, rootHref);
} else if (less.errorReporting === "console") {
errorConsole(e, rootHref);
} else if (typeof less.errorReporting === 'function') {
less.errorReporting("add", e, rootHref);
}
}
function removeErrorHTML(path) {
var node = document.getElementById('less-error-message:' + extractId(path));
if (node) {
node.parentNode.removeChild(node);
}
}
function removeErrorConsole(path) {
//no action
}
function removeError(path) {
if (!less.errorReporting || less.errorReporting === "html") {
removeErrorHTML(path);
} else if (less.errorReporting === "console") {
removeErrorConsole(path);
} else if (typeof less.errorReporting === 'function') {
less.errorReporting("remove", path);
}
}
function loadStyles(modifyVars) {
var styles = document.getElementsByTagName('style'),
style;
for (var i = 0; i < styles.length; i++) {
style = styles[i];
if (style.type.match(typePattern)) {
var env = new less.tree.parseEnv(less),
lessText = style.innerHTML || '';
env.filename = document.location.href.replace(/#.*$/, '');
if (modifyVars || less.globalVars) {
env.useFileCache = true;
}
/*jshint loopfunc:true */
// use closure to store current value of i
var callback = (function(style) {
return function (e, cssAST) {
if (e) {
return error(e, "inline");
}
var css = cssAST.toCSS(less);
style.type = 'text/css';
if (style.styleSheet) {
style.styleSheet.cssText = css;
} else {
style.innerHTML = css;
}
};
})(style);
new(less.Parser)(env).parse(lessText, callback, {globalVars: less.globalVars, modifyVars: modifyVars});
}
}
}
function extractUrlParts(url, baseUrl) {
// urlParts[1] = protocol&hostname || /
// urlParts[2] = / if path relative to host base
// urlParts[3] = directories
// urlParts[4] = filename
// urlParts[5] = parameters
var urlPartsRegex = /^((?:[a-z-]+:)?\/+?(?:[^\/\?#]*\/)|([\/\\]))?((?:[^\/\\\?#]*[\/\\])*)([^\/\\\?#]*)([#\?].*)?$/i,
urlParts = url.match(urlPartsRegex),
returner = {}, directories = [], i, baseUrlParts;
if (!urlParts) {
throw new Error("Could not parse sheet href - '"+url+"'");
}
// Stylesheets in IE don't always return the full path
if (!urlParts[1] || urlParts[2]) {
baseUrlParts = baseUrl.match(urlPartsRegex);
if (!baseUrlParts) {
throw new Error("Could not parse page url - '"+baseUrl+"'");
}
urlParts[1] = urlParts[1] || baseUrlParts[1] || "";
if (!urlParts[2]) {
urlParts[3] = baseUrlParts[3] + urlParts[3];
}
}
if (urlParts[3]) {
directories = urlParts[3].replace(/\\/g, "/").split("/");
// extract out . before .. so .. doesn't absorb a non-directory
for(i = 0; i < directories.length; i++) {
if (directories[i] === ".") {
directories.splice(i, 1);
i -= 1;
}
}
for(i = 0; i < directories.length; i++) {
if (directories[i] === ".." && i > 0) {
directories.splice(i-1, 2);
i -= 2;
}
}
}
returner.hostPart = urlParts[1];
returner.directories = directories;
returner.path = urlParts[1] + directories.join("/");
returner.fileUrl = returner.path + (urlParts[4] || "");
returner.url = returner.fileUrl + (urlParts[5] || "");
return returner;
}
function pathDiff(url, baseUrl) {
// diff between two paths to create a relative path
var urlParts = extractUrlParts(url),
baseUrlParts = extractUrlParts(baseUrl),
i, max, urlDirectories, baseUrlDirectories, diff = "";
if (urlParts.hostPart !== baseUrlParts.hostPart) {
return "";
}
max = Math.max(baseUrlParts.directories.length, urlParts.directories.length);
for(i = 0; i < max; i++) {
if (baseUrlParts.directories[i] !== urlParts.directories[i]) { break; }
}
baseUrlDirectories = baseUrlParts.directories.slice(i);
urlDirectories = urlParts.directories.slice(i);
for(i = 0; i < baseUrlDirectories.length-1; i++) {
diff += "../";
}
for(i = 0; i < urlDirectories.length-1; i++) {
diff += urlDirectories[i] + "/";
}
return diff;
}
function getXMLHttpRequest() {
if (window.XMLHttpRequest && (window.location.protocol !== "file:" || !window.ActiveXObject)) {
return new XMLHttpRequest();
} else {
try {
/*global ActiveXObject */
return new ActiveXObject("Microsoft.XMLHTTP");
} catch (e) {
log("browser doesn't support AJAX.", logLevel.errors);
return null;
}
}
}
function doXHR(url, type, callback, errback) {
var xhr = getXMLHttpRequest();
var async = isFileProtocol ? less.fileAsync : less.async;
if (typeof(xhr.overrideMimeType) === 'function') {
xhr.overrideMimeType('text/css');
}
log("XHR: Getting '" + url + "'", logLevel.debug);
xhr.open('GET', url+"?_nocache="+Math.random(), async);
xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5');
xhr.send(null);
function handleResponse(xhr, callback, errback) {
if (xhr.status >= 200 && xhr.status < 300) {
callback(xhr.responseText,
xhr.getResponseHeader("Last-Modified"));
} else if (typeof(errback) === 'function') {
errback(xhr.status, url);
}
}
if (isFileProtocol && !less.fileAsync) {
if (xhr.status === 0 || (xhr.status >= 200 && xhr.status < 300)) {
callback(xhr.responseText);
} else {
errback(xhr.status, url);
}
} else if (async) {
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
handleResponse(xhr, callback, errback);
}
};
} else {
handleResponse(xhr, callback, errback);
}
}
function loadFile(originalHref, currentFileInfo, callback, env, modifyVars) {
if (currentFileInfo && currentFileInfo.currentDirectory && !/^([a-z-]+:)?\//.test(originalHref)) {
originalHref = currentFileInfo.currentDirectory + originalHref;
}
// sheet may be set to the stylesheet for the initial load or a collection of properties including
// some env variables for imports
var hrefParts = extractUrlParts(originalHref, window.location.href);
var href = hrefParts.url;
var newFileInfo = {
currentDirectory: hrefParts.path,
filename: href
};
if (currentFileInfo) {
newFileInfo.entryPath = currentFileInfo.entryPath;
newFileInfo.rootpath = currentFileInfo.rootpath;
newFileInfo.rootFilename = currentFileInfo.rootFilename;
newFileInfo.relativeUrls = currentFileInfo.relativeUrls;
} else {
newFileInfo.entryPath = hrefParts.path;
newFileInfo.rootpath = less.rootpath || hrefParts.path;
newFileInfo.rootFilename = href;
newFileInfo.relativeUrls = env.relativeUrls;
}
if (newFileInfo.relativeUrls) {
if (env.rootpath) {
newFileInfo.rootpath = extractUrlParts(env.rootpath + pathDiff(hrefParts.path, newFileInfo.entryPath)).path;
} else {
newFileInfo.rootpath = hrefParts.path;
}
}
if (env.useFileCache && fileCache[href]) {
try {
var lessText = fileCache[href];
callback(null, lessText, href, newFileInfo, { lastModified: new Date() });
} catch (e) {
callback(e, null, href);
}
return;
}
doXHR(href, env.mime, function (data, lastModified) {
// per file cache
fileCache[href] = data;
// Use remote copy (re-parse)
try {
callback(null, data, href, newFileInfo, { lastModified: lastModified });
} catch (e) {
callback(e, null, href);
}
}, function (status, url) {
callback({ type: 'File', message: "'" + url + "' wasn't found (" + status + ")" }, null, href);
});
}
function loadStyleSheet(sheet, callback, reload, remaining, modifyVars) {
var env = new less.tree.parseEnv(less);
env.mime = sheet.type;
if (modifyVars || less.globalVars) {
env.useFileCache = true;
}
loadFile(sheet.href, null, function(e, data, path, newFileInfo, webInfo) {
if (webInfo) {
webInfo.remaining = remaining;
var css = cache && cache.getItem(path),
timestamp = cache && cache.getItem(path + ':timestamp');
if (!reload && timestamp && webInfo.lastModified &&
(new(Date)(webInfo.lastModified).valueOf() ===
new(Date)(timestamp).valueOf())) {
// Use local copy
createCSS(css, sheet);
webInfo.local = true;
callback(null, null, data, sheet, webInfo, path);
return;
}
}
//TODO add tests around how this behaves when reloading
removeError(path);
if (data) {
env.currentFileInfo = newFileInfo;
new(less.Parser)(env).parse(data, function (e, root) {
if (e) { return callback(e, null, null, sheet); }
try {
callback(e, root, data, sheet, webInfo, path);
} catch (e) {
callback(e, null, null, sheet);
}
}, {modifyVars: modifyVars, globalVars: less.globalVars});
} else {
callback(e, null, null, sheet, webInfo, path);
}
}, env, modifyVars);
}
function loadStyleSheets(callback, reload, modifyVars) {
for (var i = 0; i < less.sheets.length; i++) {
loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1), modifyVars);
}
}
function initRunningMode(){
if (less.env === 'development') {
less.optimization = 0;
less.watchTimer = setInterval(function () {
if (less.watchMode) {
loadStyleSheets(function (e, root, _, sheet, env) {
if (e) {
error(e, sheet.href);
} else if (root) {
var styles = root.toCSS(less);
styles = postProcessCSS(styles);
createCSS(styles, sheet, env.lastModified);
}
});
}
}, less.poll);
} else {
less.optimization = 3;
}
}
//
// Watch mode
//
less.watch = function () {
if (!less.watchMode ){
less.env = 'development';
initRunningMode();
}
this.watchMode = true;
return true;
};
less.unwatch = function () {clearInterval(less.watchTimer); this.watchMode = false; return false; };
if (/!watch/.test(location.hash)) {
less.watch();
}
if (less.env != 'development') {
try {
cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage;
} catch (_) {}
}
//
// Get all <link> tags with the 'rel' attribute set to "stylesheet/less"
//
var links = document.getElementsByTagName('link');
less.sheets = [];
for (var i = 0; i < links.length; i++) {
if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) &&
(links[i].type.match(typePattern)))) {
less.sheets.push(links[i]);
}
}
//
// With this function, it's possible to alter variables and re-render
// CSS without reloading less-files
//
less.modifyVars = function(record) {
less.refresh(false, record);
};
less.refresh = function (reload, modifyVars) {
var startTime, endTime;
startTime = endTime = new Date();
loadStyleSheets(function (e, root, _, sheet, env) {
if (e) {
return error(e, sheet.href);
}
if (env.local) {
log("loading " + sheet.href + " from cache.", logLevel.info);
} else {
log("parsed " + sheet.href + " successfully.", logLevel.debug);
var styles = root.toCSS(less);
styles = postProcessCSS(styles);
createCSS(styles, sheet, env.lastModified);
}
log("css for " + sheet.href + " generated in " + (new Date() - endTime) + 'ms', logLevel.info);
if (env.remaining === 0) {
log("less has finished. css generated in " + (new Date() - startTime) + 'ms', logLevel.info);
}
endTime = new Date();
}, reload, modifyVars);
loadStyles(modifyVars);
};
less.refreshStyles = loadStyles;
less.Parser.fileLoader = loadFile;
less.refresh(less.env === 'development');
// amd.js
//
// Define Less as an AMD module.
if (typeof define === "function" && define.amd) {
define(function () { return less; } );
}
})(window); | lnc2014/sxg | ace-v1.3/build/lib/less.js | JavaScript | apache-2.0 | 280,205 |
package org.devspark.aws.tools;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.ApiGateway;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.Resource;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.ResourceMethod;
import org.devspark.aws.tools.model.resources.EndpointResource;
import org.devspark.aws.tools.model.resources.EndpointResourceMethod;
import org.devspark.aws.tools.model.resources.EndpointResourceMethodParameter;
import org.devspark.aws.tools.model.resources.EndpointResponse;
import org.devspark.aws.tools.model.resources.EndpointResponseHeader;
import org.devspark.aws.tools.model.resources.EndpointResponseSchema;
import org.devspark.aws.tools.swagger.SwaggerFileWriter;
import org.devspark.aws.tools.swagger.VelocitySwaggerFileWriter;
import org.reflections.ReflectionUtils;
import org.reflections.Reflections;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
@Mojo(name = "apigateway-deployer")
public class AWSAPIGatewayDeployer extends AbstractMojo {
@Parameter(property = "base-package")
private String basePackage;
private SwaggerFileWriter fileWriter = new VelocitySwaggerFileWriter();
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
Reflections reflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage(basePackage)).setScanners(
new SubTypesScanner(), new TypeAnnotationsScanner()));
Set<Class<?>> resources = reflections
.getTypesAnnotatedWith(Resource.class);
Set<Class<?>> apis = reflections
.getTypesAnnotatedWith(ApiGateway.class);
Map<String, EndpointResource> endpointResources = getEndpointResources(resources);
String apiName = getApiName(apis);
fileWriter.createSwaggerFile(new ArrayList<EndpointResource>(endpointResources.values()), apiName);
}
private String getApiName(Set<Class<?>> apis) {
if (apis.size() != 1) {
getLog().warn("Invalid number of @ApiGateway found.");
}
return apis.iterator().next().getAnnotationsByType(ApiGateway.class)[0].name();
}
@SuppressWarnings("unchecked")
private Map<String, EndpointResource> getEndpointResources(Set<Class<?>> resources) {
Map<String, EndpointResource> endpointResources = new HashMap<String, EndpointResource>();
for (Class<?> type : resources) {
Set<Method> resourceMethods = ReflectionUtils.getAllMethods(type,
ReflectionUtils.withAnnotation(ResourceMethod.class));
if (resourceMethods.isEmpty()) {
getLog().warn(
"No methods annotated with @Resource found in type: "
+ type.getName());
continue;
}
for (Method method : resourceMethods) {
Resource methodResource = method.getAnnotation(Resource.class);
String resourceName = type.getAnnotationsByType(Resource.class)[0].name();
if(methodResource != null) {
resourceName = resourceName + "/" + methodResource.name();
}
EndpointResourceMethod endpointMethod = createMethodResource(method, resourceName);
EndpointResource endpointResource = endpointResources.get(resourceName);
if (endpointResource == null) {
endpointResource = new EndpointResource();
endpointResource.setName(resourceName);
endpointResource.setMethods(new ArrayList<EndpointResourceMethod>());
endpointResources.put(resourceName, endpointResource);
}
endpointResource.getMethods().add(endpointMethod);
}
}
return endpointResources;
}
private EndpointResourceMethod createMethodResource(Method method, String resourceName) {
EndpointResourceMethod endpointMethod = new EndpointResourceMethod();
ResourceMethod resourceMethod = method.getAnnotation(ResourceMethod.class);
endpointMethod.setVerb(resourceMethod.httpMethod().name());
endpointMethod.setParameters(getParameters(resourceName));
endpointMethod.setProduces(Arrays.asList("application/json"));
endpointMethod.setResponses(getMethodResponses());
return endpointMethod;
}
//TODO: Replace mocked list with the generation of the responses of the method.
private List<EndpointResponse> getMethodResponses() {
List<EndpointResponse> responses = new ArrayList<EndpointResponse>();
EndpointResponse sucessfulResponse = new EndpointResponse();
sucessfulResponse.setHttpStatus("200");
sucessfulResponse.setDescription("200 response");
sucessfulResponse.setHeaders(new EndpointResponseHeader());
EndpointResponseSchema schema = new EndpointResponseSchema();
schema.setRef("#/definitions/Empty");
sucessfulResponse.setSchema(schema);
return responses;
}
private List<EndpointResourceMethodParameter> getParameters(String resourceName) {
String pattern = "\\{[a-zA-A]*\\}";
Pattern r = Pattern.compile(pattern);
List<EndpointResourceMethodParameter> parameters = new ArrayList<EndpointResourceMethodParameter>();
Matcher m = r.matcher(resourceName);
while(m.find()){
EndpointResourceMethodParameter parameter = new EndpointResourceMethodParameter();
parameter.setName(m.group(0).replaceAll("\\{*\\}*", ""));
//TODO: Review how to populate the parameter metadata.
parameter.setRequired(true);
parameter.setType("string");
parameter.setIn("path");
parameters.add(parameter);
}
return parameters;
}
}
| devspark-com/aws-lambda-deploy | src/main/java/org/devspark/aws/tools/AWSAPIGatewayDeployer.java | Java | apache-2.0 | 5,913 |
<?php
//--------------------------------
//db_mysqli, db_pdo
$var->db_driver = 'db_mysqli';
//$var->db_driver = 'db_pdo';
//for pdo: mysql, sqlite, ...
//$var->db_engine = 'mysql';
$var->db_host = 'localhost';
$var->db_database = 'ada';
$var->db_user = 'ad';
$var->db_pass = 'dddd';
//$var->db_database = 'kargosha';
//$var->db_user = 'root';
//$var->db_pass = '';
$var->db_perfix = 'x_';
$var->db_set_utf8 = true;
//--------------------------------
$var->session_unique = 'kargosha';
//--------------------------------
$var->auto_lang = false;
$var->multi_domain = false;
//------------ News letter { --------------------
$var->use_mailer_lite_api = true;
$var->mailer_lite_api_key = '';
$var->mailer_lite_group_id = '';
//------------ } News letter --------------------
//--------------------------------
$var->cache_page = false;
$var->cache_image = false;
$var->cache_page_time = 1*24*60*60; // 1 day = 1 days * 24 hours * 60 mins * 60 secs
$var->cache_image_time = 1*24*60*60; // 1 day = 1 days * 24 hours * 60 mins * 60 secs
$var->cookie_expire_time = 30*24*60*60; // 30 day = 30 days * 24 hours * 60 mins * 60 secs
//--------------------------------
$var->hit_counter = true;
$var->hit_online = true;
$var->hit_referers = false;
$var->hit_requests = false;
//--------------------------------
//--------------------------------bank: Beh Pardakht - Bank e Mellat
$var->bank_mellat_terminal = 0;
$var->bank_mellat_user = '';
$var->bank_mellat_pass = '';
$var->callBackUrl = "http://site.com/?a=transaction.callBack";
$var->bank_startpayUrl = "https://bpm.shaparak.ir/pgwchannel/startpay.mellat";
$var->bank_nusoap_client = "https://bpm.shaparak.ir/pgwchannel/services/pgw?wsdl";
$var->bank_namespace = "http://interfaces.core.sw.bps.com/";
//--------------------------------
$var->zarinpal_merchant_code = "AAAA-BBBB-CCCC-DDDD";
$var->zarinpal_callBackUrl = "http://site.com/?a=transaction_zarinpal.callBack";
//--------------------------------
?>
| ariax/phpcms | core/config.php | PHP | apache-2.0 | 1,959 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.CommonBundle;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.application.WriteActionAware;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author peter
*/
public abstract class ElementCreator implements WriteActionAware {
private static final Logger LOG = Logger.getInstance(ElementCreator.class);
private final Project myProject;
private final @NlsContexts.DialogTitle String myErrorTitle;
protected ElementCreator(Project project, @NotNull @NlsContexts.DialogTitle String errorTitle) {
myProject = project;
myErrorTitle = errorTitle;
}
protected abstract PsiElement @NotNull [] create(@NotNull String newName) throws Exception;
@NlsContexts.Command
@NotNull
protected abstract String getActionName(@NotNull String newName);
public @NotNull PsiElement @NotNull [] tryCreate(@NotNull final String inputString) {
if (inputString.isEmpty()) {
Messages.showMessageDialog(myProject, IdeBundle.message("error.name.should.be.specified"), CommonBundle.getErrorTitle(),
Messages.getErrorIcon());
return PsiElement.EMPTY_ARRAY;
}
Ref<List<SmartPsiElementPointer<?>>> createdElements = Ref.create();
Exception exception = executeCommand(getActionName(inputString), () -> {
PsiElement[] psiElements = create(inputString);
SmartPointerManager manager = SmartPointerManager.getInstance(myProject);
createdElements.set(ContainerUtil.map(psiElements, manager::createSmartPsiElementPointer));
});
if (exception != null) {
handleException(exception);
return PsiElement.EMPTY_ARRAY;
}
return ContainerUtil.mapNotNull(createdElements.get(), SmartPsiElementPointer::getElement).toArray(PsiElement.EMPTY_ARRAY);
}
@Nullable
private Exception executeCommand(@NotNull @NlsContexts.Command String commandName, @NotNull ThrowableRunnable<? extends Exception> invokeCreate) {
final Exception[] exception = new Exception[1];
CommandProcessor.getInstance().executeCommand(myProject, () -> {
LocalHistoryAction action = LocalHistory.getInstance().startAction(commandName);
try {
if (startInWriteAction()) {
WriteAction.run(invokeCreate);
}
else {
invokeCreate.run();
}
}
catch (Exception ex) {
exception[0] = ex;
}
finally {
action.finish();
}
}, commandName, null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
return exception[0];
}
private void handleException(Exception t) {
LOG.info(t);
String errorMessage = getErrorMessage(t);
Messages.showMessageDialog(myProject, errorMessage, myErrorTitle, Messages.getErrorIcon());
}
public static @NlsContexts.DialogMessage String getErrorMessage(Throwable t) {
String errorMessage = CreateElementActionBase.filterMessage(t.getMessage());
if (StringUtil.isEmpty(errorMessage)) {
errorMessage = t.toString();
}
return errorMessage;
}
}
| jwren/intellij-community | platform/lang-api/src/com/intellij/ide/actions/ElementCreator.java | Java | apache-2.0 | 4,462 |
// Copyright 2018-2020 Authors of Cilium
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package datapath
import (
"net"
"github.com/cilium/cilium/pkg/cidr"
)
// NodeAddressingFamily is the node addressing information for a particular
// address family
type NodeAddressingFamily interface {
// Router is the address that will act as the router on each node where
// an agent is running on. Endpoints have a default route that points
// to this address.
Router() net.IP
// PrimaryExternal is the primary external address of the node. Nodes
// must be able to reach each other via this address.
PrimaryExternal() net.IP
// AllocationCIDR is the CIDR used for IP allocation of all endpoints
// on the node
AllocationCIDR() *cidr.CIDR
// LocalAddresses lists all local addresses
LocalAddresses() ([]net.IP, error)
// LoadBalancerNodeAddresses lists all addresses on which HostPort and
// NodePort services should be responded to
LoadBalancerNodeAddresses() []net.IP
}
// NodeAddressing implements addressing of a node
type NodeAddressing interface {
IPv6() NodeAddressingFamily
IPv4() NodeAddressingFamily
}
| tgraf/cilium | pkg/datapath/node_addressing.go | GO | apache-2.0 | 1,642 |
package com.zswxsqxt.wf.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Repository;
import cn.org.rapid_framework.page.Page;
import com.opendata.common.base.BaseHibernateDao;
import com.zswxsqxt.wf.model.WfActivity;
import com.zswxsqxt.wf.model.WfProject;
import com.zswxsqxt.wf.query.WfActivityQuery;
/**
describe:ๆต็จ่็น่กจDao
*/
@Repository
public class WfActivityDao extends BaseHibernateDao<WfActivity,String>
{
public Class getEntityClass()
{
return WfActivity.class;
}
/**
้่ฟWfActivityQueryๅฏน่ฑก๏ผๆฅ่ฏขๆต็จ่็น่กจ
*/
public Page findPage(WfActivityQuery query,int pageSize,int pageNum)
{
StringBuilder hql=new StringBuilder();
hql.append(" from WfActivity ett where 1=1");
List param=new ArrayList();
if(query!=null)
{
if(!StringUtils.isEmpty(query.getId()))
{
hql.append(" and ett.id=?");
param.add(query.getId());
}
if(!StringUtils.isEmpty(query.getName()))
{
hql.append(" and ett.name like ?");
param.add("%"+query.getName()+"%");
}
if(query.getOrderNum()!=null)
{
hql.append(" and ett.orderNum=?");
param.add(query.getOrderNum());
}
if(query.getActType()!=null)
{
hql.append(" and ett.actType=?");
param.add(query.getActType());
}
if(query.getActFlag()!=null)
{
hql.append(" and ett.actFlag=?");
param.add(query.getActFlag());
}
if(!StringUtils.isEmpty(query.getDescription()))
{
hql.append(" and ett.description=?");
param.add(query.getDescription());
}
if(!StringUtils.isEmpty(query.getUrl()))
{
hql.append(" and ett.url=?");
param.add(query.getUrl());
}
if(!StringUtils.isEmpty(query.getGroupFlag()))
{
hql.append(" and ett.groupFlag=?");
param.add(query.getGroupFlag());
}
if(!StringUtils.isEmpty(query.getExtFiled3()))
{
hql.append(" and ett.extFiled3=?");
param.add(query.getExtFiled3());
}
if(query.getTs()!=null)
{
hql.append(" and ett.ts=?");
param.add(query.getTs());
}
if(query.getWfProject()!=null)
{
hql.append(" and ett.wfProject.id=?");
param.add(query.getWfProject().getId());
}
if(query.getWfInstance()!=null)
{
hql.append(" and ett.wfInstance=?");
param.add(query.getWfInstance());
}
}
if(!StringUtils.isEmpty(query.getSortColumns())){
if(!query.getSortColumns().equals("ts")){
hql.append(" order by ett."+query.getSortColumns()+" , ett.ts desc ");
}else{
hql.append(" order by ett.orderNum asc ");
}
}else{
hql.append(" order by ett.orderNum asc ");
}
return super.findByHql(hql.toString(), pageSize, pageNum, param.toArray());
}
/**
* ๆ นๆฎๆต็จidๅพๅฐๆต็จไธๆๆ่็น๏ผๅนถๆ็
ง่็น้กบๅบๆๅบ
* @param proId
* @return
*/
public List<WfActivity> getWfActivity(String proId){
String hql = "from WfActivity where wfProject.id = ? order by orderNum asc";
List<WfActivity> list = super.findFastByHql(hql, proId);
if(list.size()>0){
return list;
}else{
return null;
}
}
}
| TuWei1992/zswxsqxt | src/main/zswxsqxt/com/zswxsqxt/wf/dao/WfActivityDao.java | Java | apache-2.0 | 3,115 |
/**
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.plugin.maven;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.eclipse.aether.repository.RemoteRepository;
import org.wildfly.swarm.bootstrap.util.BootstrapProperties;
import org.wildfly.swarm.fractionlist.FractionList;
import org.wildfly.swarm.spi.api.SwarmProperties;
import org.wildfly.swarm.tools.ArtifactSpec;
import org.wildfly.swarm.tools.BuildTool;
import org.wildfly.swarm.tools.DependencyManager;
import org.wildfly.swarm.tools.FractionDescriptor;
import org.wildfly.swarm.tools.FractionUsageAnalyzer;
import org.wildfly.swarm.tools.exec.SwarmExecutor;
import org.wildfly.swarm.tools.exec.SwarmProcess;
/**
* @author Bob McWhirter
* @author Ken Finnigan
*/
@Mojo(name = "start",
requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME,
requiresDependencyCollection = ResolutionScope.COMPILE_PLUS_RUNTIME)
public class StartMojo extends AbstractSwarmMojo {
@Parameter(alias = "stdoutFile", property = "swarm.stdout")
public File stdoutFile;
@Parameter(alias = "stderrFile", property = "swarm.stderr" )
public File stderrFile;
@Parameter(alias = "useUberJar", defaultValue = "${wildfly-swarm.useUberJar}")
public boolean useUberJar;
@Parameter(alias = "debug", property = SwarmProperties.DEBUG_PORT)
public Integer debugPort;
@Parameter(alias = "jvmArguments", property = "swarm.jvmArguments")
public List<String> jvmArguments = new ArrayList<>();
@Parameter(alias = "arguments" )
public List<String> arguments = new ArrayList<>();
@Parameter(property = "swarm.arguments", defaultValue = "")
public String argumentsProp;
boolean waitForProcess;
@SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored"})
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
initProperties(true);
initEnvironment();
final SwarmExecutor executor;
if (this.useUberJar) {
executor = uberJarExecutor();
} else if (this.project.getPackaging().equals("war")) {
executor = warExecutor();
} else if (this.project.getPackaging().equals("jar")) {
executor = jarExecutor();
} else {
throw new MojoExecutionException("Unsupported packaging: " + this.project.getPackaging());
}
executor.withJVMArguments( this.jvmArguments );
if ( this.argumentsProp != null ) {
StringTokenizer args = new StringTokenizer(this.argumentsProp);
while ( args.hasMoreTokens() ) {
this.arguments.add( args.nextToken() );
}
}
executor.withArguments( this.arguments );
final SwarmProcess process;
try {
process = executor.withDebug(debugPort)
.withProperties(this.properties)
.withStdoutFile(this.stdoutFile != null ? this.stdoutFile.toPath() : null)
.withStderrFile(this.stderrFile != null ? this.stderrFile.toPath() : null)
.withEnvironment(this.environment)
.withWorkingDirectory(this.project.getBasedir().toPath())
.withProperty("remote.maven.repo",
String.join(",",
this.project.getRemoteProjectRepositories().stream()
.map(RemoteRepository::getUrl)
.collect(Collectors.toList())))
.execute();
Runtime.getRuntime().addShutdownHook( new Thread(()->{
try {
// Sleeping for a few millis will give time to shutdown gracefully
Thread.sleep(100L);
process.stop( 10, TimeUnit.SECONDS );
} catch (InterruptedException e) {
}
}));
process.awaitReadiness(2, TimeUnit.MINUTES);
if (!process.isAlive()) {
throw new MojoFailureException("Process failed to start");
}
if (process.getError() != null) {
throw new MojoFailureException("Error starting process", process.getError());
}
} catch (IOException e) {
throw new MojoFailureException("unable to execute", e);
} catch (InterruptedException e) {
throw new MojoFailureException("Error waiting for deployment", e);
}
List<SwarmProcess> procs = (List<SwarmProcess>) getPluginContext().get("swarm-process");
if (procs == null) {
procs = new ArrayList<>();
getPluginContext().put("swarm-process", procs);
}
procs.add(process);
if (waitForProcess) {
try {
process.waitFor();
} catch (InterruptedException e) {
try {
process.stop( 10, TimeUnit.SECONDS );
} catch (InterruptedException ie) {
// Do nothing
}
} finally {
process.destroyForcibly();
}
}
}
protected SwarmExecutor uberJarExecutor() throws MojoFailureException {
getLog().info("Starting -swarm.jar");
String finalName = this.project.getBuild().getFinalName();
if (finalName.endsWith(".war") || finalName.endsWith(".jar")) {
finalName = finalName.substring(0, finalName.length() - 4);
}
return new SwarmExecutor()
.withExecutableJar(Paths.get(this.projectBuildDir, finalName + "-swarm.jar"));
}
protected SwarmExecutor warExecutor() throws MojoFailureException {
getLog().info("Starting .war");
String finalName = this.project.getBuild().getFinalName();
if (!finalName.endsWith(".war")) {
finalName = finalName + ".war";
}
return executor(Paths.get(this.projectBuildDir, finalName), finalName, false);
}
protected SwarmExecutor jarExecutor() throws MojoFailureException {
getLog().info("Starting .jar");
final String finalName = this.project.getBuild().getFinalName();
return executor(Paths.get(this.project.getBuild().getOutputDirectory()),
finalName.endsWith(".jar") ? finalName : finalName + ".jar",
true);
}
protected SwarmExecutor executor(final Path appPath, final String name,
final boolean scanDependencies) throws MojoFailureException {
final SwarmExecutor executor = new SwarmExecutor()
.withModules(expandModules())
.withProperty(BootstrapProperties.APP_NAME, name)
.withClassPathEntries(dependencies(appPath, scanDependencies));
if (this.mainClass != null) {
executor.withMainClass(this.mainClass);
} else {
executor.withDefaultMainClass();
}
return executor;
}
List<Path> findNeededFractions(final Set<Artifact> existingDeps,
final Path source,
final boolean scanDeps) throws MojoFailureException {
getLog().info("Scanning for needed WildFly Swarm fractions with mode: " + fractionDetectMode);
final Set<String> existingDepGASet = existingDeps.stream()
.map(d -> String.format("%s:%s", d.getGroupId(), d.getArtifactId()))
.collect(Collectors.toSet());
final Set<FractionDescriptor> fractions;
final FractionUsageAnalyzer analyzer = new FractionUsageAnalyzer(FractionList.get()).source(source);
if (scanDeps) {
existingDeps.forEach(d -> analyzer.source(d.getFile()));
}
final Predicate<FractionDescriptor> notExistingDep =
d -> !existingDepGASet.contains(String.format("%s:%s", d.getGroupId(), d.getArtifactId()));
try {
fractions = analyzer.detectNeededFractions().stream()
.filter(notExistingDep)
.collect(Collectors.toSet());
} catch (IOException e) {
throw new MojoFailureException("failed to scan for fractions", e);
}
getLog().info("Detected fractions: " + String.join(", ", fractions.stream()
.map(FractionDescriptor::av)
.sorted()
.collect(Collectors.toList())));
fractions.addAll(this.additionalFractions.stream()
.map(f -> FractionDescriptor.fromGav(FractionList.get(), f))
.collect(Collectors.toSet()));
final Set<FractionDescriptor> allFractions = new HashSet<>(fractions);
allFractions.addAll(fractions.stream()
.flatMap(f -> f.getDependencies().stream())
.filter(notExistingDep)
.collect(Collectors.toSet()));
getLog().info("Using fractions: " +
String.join(", ", allFractions.stream()
.map(FractionDescriptor::gavOrAv)
.sorted()
.collect(Collectors.toList())));
final Set<ArtifactSpec> specs = new HashSet<>();
specs.addAll(existingDeps.stream()
.map(this::artifactToArtifactSpec)
.collect(Collectors.toList()));
specs.addAll(allFractions.stream()
.map(FractionDescriptor::toArtifactSpec)
.collect(Collectors.toList()));
try {
return mavenArtifactResolvingHelper().resolveAll(specs).stream()
.map(s -> s.file.toPath())
.collect(Collectors.toList());
} catch (Exception e) {
throw new MojoFailureException("failed to resolve fraction dependencies", e);
}
}
List<Path> dependencies(final Path archiveContent,
final boolean scanDependencies) throws MojoFailureException {
final List<Path> elements = new ArrayList<>();
final Set<Artifact> artifacts = this.project.getArtifacts();
boolean hasSwarmDeps = false;
for (Artifact each : artifacts) {
if (each.getGroupId().equals(DependencyManager.WILDFLY_SWARM_GROUP_ID)
&& each.getArtifactId().equals(DependencyManager.WILDFLY_SWARM_BOOTSTRAP_ARTIFACT_ID)) {
hasSwarmDeps = true;
}
if (each.getGroupId().equals("org.jboss.logmanager")
&& each.getArtifactId().equals("jboss-logmanager")) {
continue;
}
if (each.getScope().equals("provided")) {
continue;
}
elements.add(each.getFile().toPath());
}
elements.add(Paths.get(this.project.getBuild().getOutputDirectory()));
if (fractionDetectMode != BuildTool.FractionDetectionMode.never) {
if (fractionDetectMode == BuildTool.FractionDetectionMode.force ||
!hasSwarmDeps) {
List<Path> fractionDeps = findNeededFractions(artifacts, archiveContent, scanDependencies);
for(Path p : fractionDeps) {
if(!elements.contains(p))
elements.add(p);
}
}
} else if (!hasSwarmDeps) {
getLog().warn("No WildFly Swarm dependencies found and fraction detection disabled");
}
return elements;
}
List<Path> expandModules() {
return this.additionalModules.stream()
.map(m -> Paths.get(this.project.getBuild().getOutputDirectory(), m))
.collect(Collectors.toList());
}
} | bobmcwhirter/wildfly-swarm | plugin/src/main/java/org/wildfly/swarm/plugin/maven/StartMojo.java | Java | apache-2.0 | 13,234 |
/*
* Copyright (C) 2007-2015 Peter Monks.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of an unsupported extension to Alfresco.
*
*/
package org.alfresco.extension.bulkimport.source.fs;
import java.io.File;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.util.Pair;
import org.alfresco.extension.bulkimport.source.BulkImportSourceStatus;
import static org.alfresco.extension.bulkimport.util.LogUtils.*;
import static org.alfresco.extension.bulkimport.source.fs.FilesystemSourceUtils.*;
/**
* This interface defines a directory analyser. This is the process by which
* the contents of a source directory are grouped together into a list of
* <code>FilesystemBulkImportItem</code>s.
*
* @author Peter Monks (pmonks@gmail.com)
*/
public final class DirectoryAnalyser
{
private final static Log log = LogFactory.getLog(DirectoryAnalyser.class);
// Status counters
private final static String COUNTER_NAME_FILES_SCANNED = "Files scanned";
private final static String COUNTER_NAME_DIRECTORIES_SCANNED = "Directories scanned";
private final static String COUNTER_NAME_UNREADABLE_ENTRIES = "Unreadable entries";
private final static String[] COUNTER_NAMES = { COUNTER_NAME_FILES_SCANNED,
COUNTER_NAME_DIRECTORIES_SCANNED,
COUNTER_NAME_UNREADABLE_ENTRIES };
private final ServiceRegistry serviceRegistry;
private final ContentStore configuredContentStore;
private final MetadataLoader metadataLoader;
private BulkImportSourceStatus importStatus;
public DirectoryAnalyser(final ServiceRegistry serviceRegistry,
final ContentStore configuredContentStore,
final MetadataLoader metadataLoader)
{
// PRECONDITIONS
assert serviceRegistry != null : "serviceRegistry must not be null.";
assert configuredContentStore != null : "configuredContentStore must not be null.";
assert metadataLoader != null : "metadataLoader must not be null.";
assert importStatus != null : "importStatus must not be null.";
// Body
this.serviceRegistry = serviceRegistry;
this.configuredContentStore = configuredContentStore;
this.metadataLoader = metadataLoader;
}
public void init(final BulkImportSourceStatus importStatus)
{
this.importStatus = importStatus;
importStatus.preregisterSourceCounters(COUNTER_NAMES);
}
/**
* Analyses the given directory.
*
* @param sourceDirectory The source directory for the entire import (note: <u>must</u> be a directory) <i>(must not be null)</i>.
* @param directory The directory to analyse (note: <u>must</u> be a directory) <i>(must not be null)</i>.
* @return An <code>AnalysedDirectory</code> object <i>(will not be null)</i>.
* @throws InterruptedException If the thread executing the method is interrupted.
*/
public Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analyseDirectory(final File sourceDirectory, final File directory)
throws InterruptedException
{
// PRECONDITIONS
if (sourceDirectory == null) throw new IllegalArgumentException("sourceDirectory cannot be null.");
if (directory == null) throw new IllegalArgumentException("directory cannot be null.");
// Body
if (debug(log)) debug(log, "Analysing directory " + getFileName(directory) + "...");
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
File[] directoryListing = null;
long analysisStart = 0L;
long analysisEnd = 0L;
long start = 0L;
long end = 0L;
String sourceRelativeParentDirectory = sourceDirectory.toPath().relativize(directory.toPath()).toString(); // Note: JDK 1.7 specific
// List the directory
start = System.nanoTime();
analysisStart = start;
directoryListing = directory.listFiles();
end = System.nanoTime();
if (trace(log)) trace(log, "List directory (" + directoryListing.length + " entries) took: " + (float)(end - start) / (1000 * 1000 * 1000) + "s.");
// Build up the list of items from the directory listing
start = System.nanoTime();
result = analyseDirectory(sourceRelativeParentDirectory, directoryListing);
end = System.nanoTime();
if (trace(log)) trace(log, "Convert directory listing to set of filesystem import items took: " + (float)(end - start) / (1000 * 1000 * 1000) + "s.");
analysisEnd = end;
if (debug(log)) debug(log, "Finished analysing directory " + getFileName(directory) + ", in " + (float)(analysisEnd - analysisStart) / (1000 * 1000 * 1000) + "s.");
return(result);
}
private Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analyseDirectory(final String sourceRelativeParentDirectory, final File[] directoryListing)
{
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
if (directoryListing != null)
{
// This needs some Clojure, desperately...
Map<String, SortedMap<BigDecimal, Pair<File, File>>> categorisedFiles = categoriseFiles(directoryListing);
if (debug(log)) debug(log, "Categorised files: " + String.valueOf(categorisedFiles));
result = constructImportItems(sourceRelativeParentDirectory, categorisedFiles);
}
return(result);
}
private Map<String, SortedMap<BigDecimal, Pair<File, File>>> categoriseFiles(final File[] directoryListing)
{
Map<String, SortedMap<BigDecimal, Pair<File, File>>> result = null;
if (directoryListing != null)
{
result = new HashMap<String, SortedMap<BigDecimal, Pair<File, File>>>();
for (final File file : directoryListing)
{
categoriseFile(result, file);
}
}
return(result);
}
/*
* This method does the hard work of figuring out where the file belongs (which parent item, and where in that item's
* version history).
*/
private void categoriseFile(final Map<String, SortedMap<BigDecimal, Pair<File, File>>> categorisedFiles, final File file)
{
if (file != null)
{
if (file.canRead())
{
final String fileName = file.getName();
final String parentName = getParentName(metadataLoader, fileName);
final boolean isMetadata = isMetadataFile(metadataLoader, fileName);
final BigDecimal versionNumber = getVersionNumber(fileName);
SortedMap<BigDecimal, Pair<File, File>> versions = categorisedFiles.get(parentName);
// Find the item
if (versions == null)
{
versions = new TreeMap<BigDecimal, Pair<File, File>>();
categorisedFiles.put(parentName, versions);
}
// Find the version within the item
Pair<File, File> version = versions.get(versionNumber);
if (version == null)
{
version = new Pair<File, File>(null, null);
}
// Categorise the incoming file in that version of the item
if (isMetadata)
{
version = new Pair<File, File>(version.getFirst(), file);
}
else
{
version = new Pair<File, File>(file, version.getSecond());
}
versions.put(versionNumber, version);
if (file.isDirectory())
{
importStatus.incrementSourceCounter(COUNTER_NAME_DIRECTORIES_SCANNED);
}
else
{
importStatus.incrementSourceCounter(COUNTER_NAME_FILES_SCANNED);
}
}
else
{
if (warn(log)) warn(log, "Skipping '" + getFileName(file) + "' as Alfresco does not have permission to read it.");
importStatus.incrementSourceCounter(COUNTER_NAME_UNREADABLE_ENTRIES);
}
}
}
private Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> constructImportItems(final String sourceRelativeParentDirectory,
final Map<String, SortedMap<BigDecimal,Pair<File,File>>> categorisedFiles)
{
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
if (categorisedFiles != null)
{
final List<FilesystemBulkImportItem> directoryItems = new ArrayList<FilesystemBulkImportItem>();
final List<FilesystemBulkImportItem> fileItems = new ArrayList<FilesystemBulkImportItem>();
result = new Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>>(directoryItems, fileItems);
for (final String parentName : categorisedFiles.keySet())
{
final SortedMap<BigDecimal,Pair<File,File>> itemVersions = categorisedFiles.get(parentName);
final NavigableSet<FilesystemBulkImportItemVersion> versions = constructImportItemVersions(itemVersions);
final boolean isDirectory = versions.last().isDirectory();
final FilesystemBulkImportItem item = new FilesystemBulkImportItem(parentName,
isDirectory,
sourceRelativeParentDirectory,
versions);
if (isDirectory)
{
directoryItems.add(item);
}
else
{
fileItems.add(item);
}
}
}
return(result);
}
private final NavigableSet<FilesystemBulkImportItemVersion> constructImportItemVersions(final SortedMap<BigDecimal,Pair<File,File>> itemVersions)
{
// PRECONDITIONS
if (itemVersions == null) throw new IllegalArgumentException("itemVersions cannot be null.");
if (itemVersions.size() <= 0) throw new IllegalArgumentException("itemVersions cannot be empty.");
// Body
final NavigableSet<FilesystemBulkImportItemVersion> result = new TreeSet<FilesystemBulkImportItemVersion>();
for (final BigDecimal versionNumber : itemVersions.keySet())
{
final Pair<File,File> contentAndMetadataFiles = itemVersions.get(versionNumber);
final FilesystemBulkImportItemVersion version = new FilesystemBulkImportItemVersion(serviceRegistry,
configuredContentStore,
metadataLoader,
versionNumber,
contentAndMetadataFiles.getFirst(),
contentAndMetadataFiles.getSecond());
result.add(version);
}
return(result);
}
}
| aureg/alfresco-bulk-import | amp/src/main/java/org/alfresco/extension/bulkimport/source/fs/DirectoryAnalyser.java | Java | apache-2.0 | 13,939 |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base model class."""
__author__ = 'Sean Lip'
import feconf
import utils
from core.platform import models
transaction_services = models.Registry.import_transaction_services()
from google.appengine.ext import ndb
class BaseModel(ndb.Model):
"""Base model for all persistent object storage classes."""
# When this entity was first created.
created_on = ndb.DateTimeProperty(auto_now_add=True)
# When this entity was last updated.
last_updated = ndb.DateTimeProperty(auto_now=True)
# Whether the current version of the file is deleted.
deleted = ndb.BooleanProperty(indexed=True, default=False)
@property
def id(self):
"""A unique id for this model instance."""
return self.key.id()
def _pre_put_hook(self):
"""This is run before model instances are saved to the datastore.
Subclasses of BaseModel should override this method.
"""
pass
class EntityNotFoundError(Exception):
"""Raised when no entity for a given id exists in the datastore."""
pass
@classmethod
def get(cls, entity_id, strict=True):
"""Gets an entity by id. Fails noisily if strict == True.
Args:
entity_id: str. The id of the entity.
strict: bool. Whether to fail noisily if no entity with the given id
exists in the datastore.
Returns:
None, if strict == False and no undeleted entity with the given id
exists in the datastore. Otherwise, the entity instance that
corresponds to the given id.
Raises:
- base_models.BaseModel.EntityNotFoundError: if strict == True and
no undeleted entity with the given id exists in the datastore.
"""
entity = cls.get_by_id(entity_id)
if entity and entity.deleted:
entity = None
if strict and entity is None:
raise cls.EntityNotFoundError(
'Entity for class %s with id %s not found' %
(cls.__name__, entity_id))
return entity
def put(self):
super(BaseModel, self).put()
@classmethod
def get_multi(cls, entity_ids):
entity_keys = [ndb.Key(cls, entity_id) for entity_id in entity_ids]
return ndb.get_multi(entity_keys)
@classmethod
def put_multi(cls, entities):
return ndb.put_multi(entities)
def delete(self):
super(BaseModel, self).key.delete()
@classmethod
def get_all(cls, include_deleted_entities=False):
"""Returns a filterable iterable of all entities of this class.
If include_deleted_entities is True then entities that have been marked
deleted are returned as well.
"""
query = cls.query()
if not include_deleted_entities:
query = query.filter(cls.deleted == False)
return query
@classmethod
def get_new_id(cls, entity_name):
"""Gets a new id for an entity, based on its name.
The returned id is guaranteed to be unique among all instances of this
entity.
Args:
entity_name: the name of the entity. Coerced to a utf-8 encoded
string. Defaults to ''.
Returns:
str: a new unique id for this entity class.
Raises:
- Exception: if an id cannot be generated within a reasonable number
of attempts.
"""
try:
entity_name = unicode(entity_name).encode('utf-8')
except Exception:
entity_name = ''
MAX_RETRIES = 10
RAND_RANGE = 127 * 127
ID_LENGTH = 12
for i in range(MAX_RETRIES):
new_id = utils.convert_to_hash(
'%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)),
ID_LENGTH)
if not cls.get_by_id(new_id):
return new_id
raise Exception('New id generator is producing too many collisions.')
class VersionedModel(BaseModel):
"""Model that handles storage of the version history of model instances.
To use this class, you must declare a SNAPSHOT_METADATA_CLASS and a
SNAPSHOT_CONTENT_CLASS. The former must contain the String fields
'committer_id', 'commit_type' and 'commit_message', and a JSON field for
the Python list of dicts, 'commit_cmds'. The latter must contain the JSON
field 'content'. The item that is being versioned must be serializable to a
JSON blob.
Note that commit() should be used for VersionedModels, as opposed to put()
for direct subclasses of BaseModel.
"""
# The class designated as the snapshot model. This should be a subclass of
# BaseSnapshotMetadataModel.
SNAPSHOT_METADATA_CLASS = None
# The class designated as the snapshot content model. This should be a
# subclass of BaseSnapshotContentModel.
SNAPSHOT_CONTENT_CLASS = None
# Whether reverting is allowed. Default is False.
ALLOW_REVERT = False
### IMPORTANT: Subclasses should only overwrite things above this line. ###
# The possible commit types.
_COMMIT_TYPE_CREATE = 'create'
_COMMIT_TYPE_REVERT = 'revert'
_COMMIT_TYPE_EDIT = 'edit'
_COMMIT_TYPE_DELETE = 'delete'
# A list containing the possible commit types.
COMMIT_TYPE_CHOICES = [
_COMMIT_TYPE_CREATE, _COMMIT_TYPE_REVERT, _COMMIT_TYPE_EDIT,
_COMMIT_TYPE_DELETE
]
# The delimiter used to separate the version number from the model instance
# id. To get the instance id from a snapshot id, use Python's rfind()
# method to find the location of this delimiter.
_VERSION_DELIMITER = '-'
# The reserved prefix for keys that are automatically inserted into a
# commit_cmd dict by this model.
_AUTOGENERATED_PREFIX = 'AUTO'
# The current version number of this instance. In each PUT operation,
# this number is incremented and a snapshot of the modified instance is
# stored in the snapshot metadata and content models. The snapshot
# version number starts at 1 when the model instance is first created.
# All data in this instance represents the version at HEAD; data about the
# previous versions is stored in the snapshot models.
version = ndb.IntegerProperty(default=0)
def _require_not_marked_deleted(self):
if self.deleted:
raise Exception('This model instance has been deleted.')
def _compute_snapshot(self):
"""Generates a snapshot (a Python dict) from the model fields."""
return self.to_dict(exclude=['created_on', 'last_updated'])
def _reconstitute(self, snapshot_dict):
"""Makes this instance into a reconstitution of the given snapshot."""
self.populate(**snapshot_dict)
return self
def _reconstitute_from_snapshot_id(self, snapshot_id):
"""Makes this instance into a reconstitution of the given snapshot."""
snapshot_model = self.SNAPSHOT_CONTENT_CLASS.get(snapshot_id)
snapshot_dict = snapshot_model.content
return self._reconstitute(snapshot_dict)
@classmethod
def _get_snapshot_id(cls, instance_id, version_number):
return '%s%s%s' % (
instance_id, cls._VERSION_DELIMITER, version_number)
def _trusted_commit(
self, committer_id, commit_type, commit_message, commit_cmds):
if self.SNAPSHOT_METADATA_CLASS is None:
raise Exception('No snapshot metadata class defined.')
if self.SNAPSHOT_CONTENT_CLASS is None:
raise Exception('No snapshot content class defined.')
if not isinstance(commit_cmds, list):
raise Exception(
'Expected commit_cmds to be a list of dicts, received %s'
% commit_cmds)
for item in commit_cmds:
if not isinstance(item, dict):
raise Exception(
'Expected commit_cmds to be a list of dicts, received %s'
% commit_cmds)
self.version += 1
snapshot = self._compute_snapshot()
snapshot_id = self._get_snapshot_id(self.id, self.version)
snapshot_metadata_instance = self.SNAPSHOT_METADATA_CLASS(
id=snapshot_id, committer_id=committer_id, commit_type=commit_type,
commit_message=commit_message, commit_cmds=commit_cmds)
snapshot_content_instance = self.SNAPSHOT_CONTENT_CLASS(
id=snapshot_id, content=snapshot)
transaction_services.run_in_transaction(
ndb.put_multi,
[snapshot_metadata_instance, snapshot_content_instance, self])
def delete(self, committer_id, commit_message, force_deletion=False):
if force_deletion:
current_version = self.version
version_numbers = [str(num + 1) for num in range(current_version)]
snapshot_ids = [
self._get_snapshot_id(self.id, version_number)
for version_number in version_numbers]
metadata_keys = [
ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id)
for snapshot_id in snapshot_ids]
ndb.delete_multi(metadata_keys)
content_keys = [
ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id)
for snapshot_id in snapshot_ids]
ndb.delete_multi(content_keys)
super(VersionedModel, self).delete()
else:
self._require_not_marked_deleted()
self.deleted = True
CMD_DELETE = '%s_mark_deleted' % self._AUTOGENERATED_PREFIX
commit_cmds = [{
'cmd': CMD_DELETE
}]
self._trusted_commit(
committer_id, self._COMMIT_TYPE_DELETE, commit_message,
commit_cmds)
def put(self, *args, **kwargs):
"""For VersionedModels, this method is replaced with commit()."""
raise NotImplementedError
def commit(self, committer_id, commit_message, commit_cmds):
"""Saves a version snapshot and updates the model.
commit_cmds should give sufficient information to reconstruct the
commit.
"""
self._require_not_marked_deleted()
for commit_cmd in commit_cmds:
if 'cmd' not in commit_cmd:
raise Exception(
'Invalid commit_cmd: %s. Expected a \'cmd\' key.'
% commit_cmd)
if commit_cmd['cmd'].startswith(self._AUTOGENERATED_PREFIX):
raise Exception(
'Invalid change list command: ' % commit_cmd['cmd'])
commit_type = (
self._COMMIT_TYPE_CREATE if self.version == 0 else
self._COMMIT_TYPE_EDIT)
self._trusted_commit(
committer_id, commit_type, commit_message, commit_cmds)
def revert(self, committer_id, commit_message, version_number):
self._require_not_marked_deleted()
if not self.ALLOW_REVERT:
raise Exception(
'Reverting of objects of type %s is not allowed.'
% self.__class__.__name__)
CMD_REVERT = '%s_revert_version_number' % self._AUTOGENERATED_PREFIX
commit_cmds = [{
'cmd': CMD_REVERT,
'version_number': version_number
}]
# Do not overwrite the version number.
current_version = self.version
snapshot_id = self._get_snapshot_id(self.id, version_number)
self._reconstitute_from_snapshot_id(snapshot_id)
self.version = current_version
self._trusted_commit(
committer_id, self._COMMIT_TYPE_REVERT, commit_message,
commit_cmds)
@classmethod
def get_version(cls, model_instance_id, version_number):
"""Returns a model instance representing the given version.
The snapshot content is used to populate this model instance. The
snapshot metadata is not used.
"""
cls.get(model_instance_id)._require_not_marked_deleted()
snapshot_id = cls._get_snapshot_id(model_instance_id, version_number)
return cls(id=model_instance_id)._reconstitute_from_snapshot_id(
snapshot_id)
@classmethod
def get(cls, entity_id, strict=True, version=None):
"""Gets an entity by id. Fails noisily if strict == True."""
if version is None:
return super(VersionedModel, cls).get(entity_id, strict=strict)
else:
return cls.get_version(entity_id, version)
@classmethod
def get_snapshots_metadata(cls, model_instance_id, version_numbers):
"""Returns a list of dicts, each representing a model snapshot.
One dict is returned for each version number in the list of version
numbers requested. If any of the version numbers does not exist, an
error is raised.
"""
cls.get(model_instance_id)._require_not_marked_deleted()
snapshot_ids = [
cls._get_snapshot_id(model_instance_id, version_number)
for version_number in version_numbers]
metadata_keys = [
ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id)
for snapshot_id in snapshot_ids]
returned_models = ndb.get_multi(metadata_keys)
for ind, model in enumerate(returned_models):
if model is None:
raise Exception(
'Invalid version number %s for model %s with id %s'
% (version_numbers[ind], cls.__name__, model_instance_id))
return [{
'committer_id': model.committer_id,
'commit_message': model.commit_message,
'commit_cmds': model.commit_cmds,
'commit_type': model.commit_type,
'version_number': version_numbers[ind],
'created_on': model.created_on.strftime(
feconf.HUMAN_READABLE_DATETIME_FORMAT),
} for (ind, model) in enumerate(returned_models)]
class BaseSnapshotMetadataModel(BaseModel):
"""Base class for snapshot metadata classes.
The id of this model is computed using VersionedModel.get_snapshot_id().
"""
# The id of the user who committed this revision.
committer_id = ndb.StringProperty(required=True)
# The type of the commit associated with this snapshot.
commit_type = ndb.StringProperty(
required=True, choices=VersionedModel.COMMIT_TYPE_CHOICES)
# The commit message associated with this snapshot.
commit_message = ndb.TextProperty(indexed=False)
# A sequence of commands that can be used to describe this commit.
# Represented as a list of dicts.
commit_cmds = ndb.JsonProperty(indexed=False)
class BaseSnapshotContentModel(BaseModel):
"""Base class for snapshot content classes.
The id of this model is computed using VersionedModel.get_snapshot_id().
"""
# The snapshot content, as a JSON blob.
content = ndb.JsonProperty(indexed=False)
| openhatch/oh-missions-oppia-beta | core/storage/base_model/gae_models.py | Python | apache-2.0 | 15,533 |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Tpu.V1.Snippets
{
// [START tpu_v1_generated_Tpu_CreateNode_async]
using Google.Api.Gax.ResourceNames;
using Google.Cloud.Tpu.V1;
using Google.LongRunning;
using System.Threading.Tasks;
public sealed partial class GeneratedTpuClientSnippets
{
/// <summary>Snippet for CreateNodeAsync</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public async Task CreateNodeRequestObjectAsync()
{
// Create client
TpuClient tpuClient = await TpuClient.CreateAsync();
// Initialize request argument(s)
CreateNodeRequest request = new CreateNodeRequest
{
ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
NodeId = "",
Node = new Node(),
};
// Make the request
Operation<Node, OperationMetadata> response = await tpuClient.CreateNodeAsync(request);
// Poll until the returned long-running operation is complete
Operation<Node, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Node result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Node, OperationMetadata> retrievedResponse = await tpuClient.PollOnceCreateNodeAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Node retrievedResult = retrievedResponse.Result;
}
}
}
// [END tpu_v1_generated_Tpu_CreateNode_async]
}
| googleapis/google-cloud-dotnet | apis/Google.Cloud.Tpu.V1/Google.Cloud.Tpu.V1.GeneratedSnippets/TpuClient.CreateNodeRequestObjectAsyncSnippet.g.cs | C# | apache-2.0 | 2,721 |
/*
* Copyright 2015-2018 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.api.jaxrs.mixins.deserializers;
import java.io.IOException;
import org.mycontroller.standalone.timer.TimerUtils.FREQUENCY_TYPE;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.2
*/
public class FrequencyTypeDeserializer extends JsonDeserializer<FREQUENCY_TYPE> {
@Override
public FREQUENCY_TYPE deserialize(JsonParser parser, DeserializationContext context)
throws IOException, JsonProcessingException {
final String nodeType = parser.getText();
if (nodeType != null) {
return FREQUENCY_TYPE.fromString(nodeType);
} else {
return null;
}
}
}
| pgh70/mycontroller | modules/core/src/main/java/org/mycontroller/standalone/api/jaxrs/mixins/deserializers/FrequencyTypeDeserializer.java | Java | apache-2.0 | 1,580 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.mskcc.shenkers.data.interval;
import htsjdk.tribble.Feature;
import htsjdk.tribble.annotation.Strand;
import htsjdk.tribble.bed.FullBEDFeature;
import java.awt.Color;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
*
* @author sol
*/
public interface IntervalFeature<T> extends Feature {
Strand getStrand();
T getValue();
}
| shenkers/CrossBrowse | src/main/java/org/mskcc/shenkers/data/interval/IntervalFeature.java | Java | apache-2.0 | 585 |
/*! @license
* Shaka Player
* Copyright 2016 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
goog.provide('shaka.ui.FastForwardButton');
goog.require('shaka.ui.Controls');
goog.require('shaka.ui.Element');
goog.require('shaka.ui.Enums');
goog.require('shaka.ui.Locales');
goog.require('shaka.ui.Localization');
goog.require('shaka.util.Dom');
/**
* @extends {shaka.ui.Element}
* @final
* @export
*/
shaka.ui.FastForwardButton = class extends shaka.ui.Element {
/**
* @param {!HTMLElement} parent
* @param {!shaka.ui.Controls} controls
*/
constructor(parent, controls) {
super(parent, controls);
/** @private {!HTMLButtonElement} */
this.button_ = shaka.util.Dom.createButton();
this.button_.classList.add('material-icons-round');
this.button_.classList.add('shaka-fast-forward-button');
this.button_.classList.add('shaka-tooltip-status');
this.button_.setAttribute('shaka-status', '1x');
this.button_.textContent =
shaka.ui.Enums.MaterialDesignIcons.FAST_FORWARD;
this.parent.appendChild(this.button_);
this.updateAriaLabel_();
/** @private {!Array.<number>} */
this.fastForwardRates_ = this.controls.getConfig().fastForwardRates;
this.eventManager.listen(
this.localization, shaka.ui.Localization.LOCALE_UPDATED, () => {
this.updateAriaLabel_();
});
this.eventManager.listen(
this.localization, shaka.ui.Localization.LOCALE_CHANGED, () => {
this.updateAriaLabel_();
});
this.eventManager.listen(this.button_, 'click', () => {
this.fastForward_();
});
}
/**
* @private
*/
updateAriaLabel_() {
this.button_.ariaLabel =
this.localization.resolve(shaka.ui.Locales.Ids.FAST_FORWARD);
}
/**
* Cycles trick play rate between the selected fast forward rates.
* @private
*/
fastForward_() {
if (!this.video.duration) {
return;
}
const trickPlayRate = this.player.getPlaybackRate();
const newRateIndex = this.fastForwardRates_.indexOf(trickPlayRate) + 1;
// When the button is clicked, the next rate in this.fastForwardRates_ is
// selected. If no more rates are available, the first one is set.
const newRate = (newRateIndex != this.fastForwardRates_.length) ?
this.fastForwardRates_[newRateIndex] : this.fastForwardRates_[0];
this.player.trickPlay(newRate);
this.button_.setAttribute('shaka-status', newRate + 'x');
}
};
/**
* @implements {shaka.extern.IUIElement.Factory}
* @final
*/
shaka.ui.FastForwardButton.Factory = class {
/** @override */
create(rootElement, controls) {
return new shaka.ui.FastForwardButton(rootElement, controls);
}
};
shaka.ui.Controls.registerElement(
'fast_forward', new shaka.ui.FastForwardButton.Factory());
| tvoli/shaka-player | ui/fast_forward_button.js | JavaScript | apache-2.0 | 2,812 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.widgets.containers;
import com.eas.core.XElement;
import com.google.gwt.dom.client.Style;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.RequiresResize;
import com.google.gwt.user.client.ui.Widget;
/**
*
* @author mg
*/
public class FlowGapPanel extends FlowPanel implements RequiresResize {
protected int hgap;
protected int vgap;
public FlowGapPanel() {
super();
getElement().<XElement>cast().addResizingTransitionEnd(this);
getElement().getStyle().setLineHeight(0, Style.Unit.PX);
}
public int getHgap() {
return hgap;
}
public void setHgap(int aValue) {
hgap = aValue;
for (int i = 0; i < getWidgetCount(); i++) {
Widget w = getWidget(i);
w.getElement().getStyle().setMarginLeft(hgap, Style.Unit.PX);
}
}
public int getVgap() {
return vgap;
}
public void setVgap(int aValue) {
vgap = aValue;
for (int i = 0; i < getWidgetCount(); i++) {
Widget w = getWidget(i);
w.getElement().getStyle().setMarginTop(vgap, Style.Unit.PX);
}
}
@Override
public void add(Widget w) {
w.getElement().getStyle().setMarginLeft(hgap, Style.Unit.PX);
w.getElement().getStyle().setMarginTop(vgap, Style.Unit.PX);
w.getElement().getStyle().setDisplay(Style.Display.INLINE_BLOCK);
w.getElement().getStyle().setVerticalAlign(Style.VerticalAlign.BOTTOM);
super.add(w);
}
@Override
public void onResize() {
// reserved for future use.
}
}
| jskonst/PlatypusJS | web-client/src/platypus/src/com/eas/widgets/containers/FlowGapPanel.java | Java | apache-2.0 | 1,696 |
package org.cohorte.herald.core.utils;
import java.util.Iterator;
import org.cohorte.herald.Message;
import org.cohorte.herald.MessageReceived;
import org.jabsorb.ng.JSONSerializer;
import org.jabsorb.ng.serializer.MarshallException;
import org.jabsorb.ng.serializer.UnmarshallException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class MessageUtils {
/** The Jabsorb serializer */
private static JSONSerializer pSerializer = new JSONSerializer();
static {
try {
pSerializer.registerDefaultSerializers();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static String toJSON(Message aMsg) throws MarshallException {
JSONObject json = new JSONObject();
try {
// headers
JSONObject headers = new JSONObject();
for (String key : aMsg.getHeaders().keySet()) {
headers.put(key, aMsg.getHeaders().get(key));
}
json.put(Message.MESSAGE_HEADERS, headers);
// subject
json.put(Message.MESSAGE_SUBJECT, aMsg.getSubject());
// content
if (aMsg.getContent() != null) {
if (aMsg.getContent() instanceof String) {
json.put(Message.MESSAGE_CONTENT, aMsg.getContent());
} else {
JSONObject content = new JSONObject(pSerializer.toJSON(aMsg.getContent()));
json.put(Message.MESSAGE_CONTENT, content);
}
}
// metadata
JSONObject metadata = new JSONObject();
for (String key : aMsg.getMetadata().keySet()) {
metadata.put(key, aMsg.getMetadata().get(key));
}
json.put(Message.MESSAGE_METADATA, metadata);
} catch (JSONException e) {
e.printStackTrace();
return null;
}
return json.toString();
}
@SuppressWarnings("unchecked")
public static MessageReceived fromJSON(String json) throws UnmarshallException {
try {
JSONObject wParsedMsg = new JSONObject(json);
{
try {
// check if valid herald message (respects herald specification version)
int heraldVersion = -1;
JSONObject jHeader = wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS);
if (jHeader != null) {
if (jHeader.has(Message.MESSAGE_HERALD_VERSION)) {
heraldVersion = jHeader.getInt(Message.MESSAGE_HERALD_VERSION);
}
}
if (heraldVersion != Message.HERALD_SPECIFICATION_VERSION) {
throw new JSONException("Herald specification of the received message is not supported!");
}
MessageReceived wMsg = new MessageReceived(
wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).getString(Message.MESSAGE_HEADER_UID),
wParsedMsg.getString(Message.MESSAGE_SUBJECT),
null,
null,
null,
null,
null,
null);
// content
Object cont = wParsedMsg.opt(Message.MESSAGE_CONTENT);
if (cont != null) {
if (cont instanceof JSONObject || cont instanceof JSONArray) {
wMsg.setContent(pSerializer.fromJSON(cont.toString()));
} else
wMsg.setContent(cont);
} else {
wMsg.setContent(null);
}
// headers
Iterator<String> wKeys;
if (wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS) != null) {
wKeys = wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).keys();
while(wKeys.hasNext()) {
String key = wKeys.next();
wMsg.addHeader(key, wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).get(key));
}
}
// metadata
Iterator<String> wKeys2;
if (wParsedMsg.getJSONObject(Message.MESSAGE_METADATA) != null) {
wKeys2 = wParsedMsg.getJSONObject(Message.MESSAGE_METADATA).keys();
while(wKeys2.hasNext()) {
String key = wKeys2.next();
wMsg.addMetadata(key, wParsedMsg.getJSONObject(Message.MESSAGE_METADATA).get(key));
}
}
return wMsg;
} catch (JSONException e) {
e.printStackTrace();
return null;
}
}
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
}
| isandlaTech/cohorte-herald | java/org.cohorte.herald.core/src/org/cohorte/herald/core/utils/MessageUtils.java | Java | apache-2.0 | 4,164 |
package org.apache.cocoon.transformation;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipEntry;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* This transformer downloads a new file to disk.
* <p>
* It triggers for elements in the namespace "http://apache.org/cocoon/download/1.0".
* Attributes:
* @src : the file that should be downloaded
* @target (optional): path where the file should be stored (includes filename)
* @target-dir (optional): directory where the file should be stored
* @unzip (optional): if "true" then unzip file after downloading.
* If there is no @target or @target-dir attribute a temporary file is created.
* <p>
* Example XML input:
* <pre>
* {@code
* <download:download src="http://some.server.com/zipfile.zip"
* target="/tmp/zipfile.zip" unzip="true"/>
* }
* </pre>
* The @src specifies the file that should be downloaded. The
* @target specifies where the file should be stored. @unzip is true, so the
* file will be unzipped immediately.
* <p>
* The result is
* <pre>
* {@code
* <download:result unzipped="/path/to/unzipped/file/on/disk">/path/to/file/on/disk</download:result>
* }
* </pre>
* (@unzipped is only present when @unzip="true") or
* <pre>
* {@code
* <download:error>The error message</download:file>
* }
* </pre>
* if an error (other than a HTTP error) occurs.
* HTTP errors are thrown.
* Define this transformer in the sitemap:
* <pre>
* {@code
* <map:components>
* <map:transformers>
* <map:transformer name="download" logger="sitemap.transformer.download"
* src="org.apache.cocoon.transformation.DownloadTransformer"/>
* ...
* }
* </pre>
* Use this transformer:
* <pre>
* {@code
* <map:transform type="download"/>
* }
* </pre>
*
*
* @author <a href="mailto:maarten.kroon@koop.overheid.nl">Maarten Kroon</a>
* @author <a href="mailto:hhv@x-scale.nl">Huib Verweij</a>
*/
public class DownloadTransformer extends AbstractSAXTransformer {
public static final String DOWNLOAD_NS = "http://apache.org/cocoon/download/1.0";
public static final String DOWNLOAD_ELEMENT = "download";
private static final String DOWNLOAD_PREFIX = "download";
public static final String RESULT_ELEMENT = "result";
public static final String ERROR_ELEMENT = "error";
public static final String SRC_ATTRIBUTE = "src";
public static final String TARGET_ATTRIBUTE = "target";
public static final String TARGETDIR_ATTRIBUTE = "target-dir";
public static final String UNZIP_ATTRIBUTE = "unzip";
public static final String RECURSIVE_UNZIP_ATTRIBUTE = "recursive-unzip";
public static final String UNZIPPED_ATTRIBUTE = "unzipped";
public DownloadTransformer() {
this.defaultNamespaceURI = DOWNLOAD_NS;
}
@Override
public void setup(SourceResolver resolver, Map objectModel, String src,
Parameters params) throws ProcessingException, SAXException, IOException {
super.setup(resolver, objectModel, src, params);
}
@Override
public void startTransformingElement(String uri, String localName,
String qName, Attributes attributes) throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(uri) && DOWNLOAD_ELEMENT.equals(localName)) {
try {
File[] downloadResult = download(
attributes.getValue(SRC_ATTRIBUTE),
attributes.getValue(TARGETDIR_ATTRIBUTE),
attributes.getValue(TARGET_ATTRIBUTE),
attributes.getValue(UNZIP_ATTRIBUTE),
attributes.getValue(RECURSIVE_UNZIP_ATTRIBUTE)
);
File downloadedFile = downloadResult[0];
File unzipDir = downloadResult[1];
String absPath = downloadedFile.getCanonicalPath();
AttributesImpl attrsImpl = new AttributesImpl();
if (unzipDir != null) {
attrsImpl.addAttribute("", UNZIPPED_ATTRIBUTE, UNZIPPED_ATTRIBUTE, "CDATA", unzipDir.getAbsolutePath());
}
xmlConsumer.startElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT), attrsImpl);
xmlConsumer.characters(absPath.toCharArray(), 0, absPath.length());
xmlConsumer.endElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT));
} catch (Exception e) {
// throw new SAXException("Error downloading file", e);
xmlConsumer.startElement(uri, ERROR_ELEMENT, qName, attributes);
String message = e.getMessage();
xmlConsumer.characters(message.toCharArray(), 0, message.length());
xmlConsumer.endElement(uri, ERROR_ELEMENT, qName);
}
} else {
super.startTransformingElement(uri, localName, qName, attributes);
}
}
@Override
public void endTransformingElement(String uri, String localName, String qName)
throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(namespaceURI) && DOWNLOAD_ELEMENT.equals(localName)) {
return;
}
super.endTransformingElement(uri, localName, qName);
}
private File[] download(String sourceUri, String targetDir, String target, String unzip, String recursiveUnzip)
throws ProcessingException, IOException, SAXException {
File targetFile;
File unZipped = null;
if (null != target && !target.equals("")) {
targetFile = new File(target);
} else if (null != targetDir && !targetDir.equals("")) {
targetFile = new File(targetDir);
} else {
String baseName = FilenameUtils.getBaseName(sourceUri);
String extension = FilenameUtils.getExtension(sourceUri);
targetFile = File.createTempFile(baseName, "." + extension);
}
if (!targetFile.getParentFile().exists()) {
targetFile.getParentFile().mkdirs();
}
boolean unzipFile = (null != unzip && unzip.equals("true")) ||
(null != recursiveUnzip && recursiveUnzip.equals("true"));
String absPath = targetFile.getAbsolutePath();
String unzipDir = unzipFile ? FilenameUtils.removeExtension(absPath) : "";
HttpClient httpClient = new HttpClient();
httpClient.setConnectionTimeout(60000);
httpClient.setTimeout(60000);
if (System.getProperty("http.proxyHost") != null) {
// getLogger().warn("PROXY: "+System.getProperty("http.proxyHost"));
String nonProxyHostsRE = System.getProperty("http.nonProxyHosts", "");
if (nonProxyHostsRE.length() > 0) {
String[] pHosts = nonProxyHostsRE.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*").split("\\|");
nonProxyHostsRE = "";
for (String pHost : pHosts) {
nonProxyHostsRE += "|(^https?://" + pHost + ".*$)";
}
nonProxyHostsRE = nonProxyHostsRE.substring(1);
}
if (nonProxyHostsRE.length() == 0 || !sourceUri.matches(nonProxyHostsRE)) {
try {
HostConfiguration hostConfiguration = httpClient.getHostConfiguration();
hostConfiguration.setProxy(System.getProperty("http.proxyHost"), Integer.parseInt(System.getProperty("http.proxyPort", "80")));
httpClient.setHostConfiguration(hostConfiguration);
} catch (Exception e) {
throw new ProcessingException("Cannot set proxy!", e);
}
}
}
HttpMethod httpMethod = new GetMethod(sourceUri);
try {
int responseCode = httpClient.executeMethod(httpMethod);
if (responseCode < 200 || responseCode >= 300) {
throw new ProcessingException(String.format("Received HTTP status code %d (%s)", responseCode, httpMethod.getStatusText()));
}
OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile));
try {
IOUtils.copyLarge(httpMethod.getResponseBodyAsStream(), os);
} finally {
os.close();
}
} finally {
httpMethod.releaseConnection();
}
if (!"".equals(unzipDir)) {
unZipped = unZipIt(targetFile, unzipDir, recursiveUnzip);
}
return new File[] {targetFile, unZipped};
}
/**
* Unzip it
* @param zipFile input zip file
* @param outputFolder zip file output folder
*/
private File unZipIt(File zipFile, String outputFolder, String recursiveUnzip){
byte[] buffer = new byte[4096];
File folder = null;
try{
//create output directory is not exists
folder = new File(outputFolder);
if (!folder.exists()){
folder.mkdir();
}
//get the zipped file list entry
try (
//get the zip file content
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile))) {
//get the zipped file list entry
ZipEntry ze = zis.getNextEntry();
while(ze != null){
String fileName = ze.getName();
File newFile = new File(outputFolder + File.separator + fileName);
// System.out.println("file unzip : "+ newFile.getAbsoluteFile());
// create all non existing folders
// else you will hit FileNotFoundException for compressed folder
new File(newFile.getParent()).mkdirs();
try (FileOutputStream fos = new FileOutputStream(newFile)) {
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
}
if ((null != recursiveUnzip && "true".equals(recursiveUnzip)) && FilenameUtils.getExtension(fileName).equals("zip")) {
unZipIt(newFile, FilenameUtils.concat(outputFolder, FilenameUtils.getBaseName(fileName)), recursiveUnzip);
}
ze = zis.getNextEntry();
}
zis.closeEntry();
}
// System.out.println("Done unzipping.");
} catch(IOException ex){
ex.printStackTrace();
}
return folder;
}
}
| nverwer/cocooncomponents | src/org/apache/cocoon/transformation/DownloadTransformer.java | Java | apache-2.0 | 11,508 |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.testing.sync;
import java.io.IOException;
import org.apache.hc.client5.http.HttpRoute;
import org.apache.hc.client5.http.UserTokenHandler;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
import org.apache.hc.client5.http.protocol.HttpClientContext;
import org.apache.hc.core5.http.ClassicHttpRequest;
import org.apache.hc.core5.http.ClassicHttpResponse;
import org.apache.hc.core5.http.EndpointDetails;
import org.apache.hc.core5.http.HttpException;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.http.HttpStatus;
import org.apache.hc.core5.http.io.HttpRequestHandler;
import org.apache.hc.core5.http.io.entity.EntityUtils;
import org.apache.hc.core5.http.io.entity.StringEntity;
import org.apache.hc.core5.http.protocol.BasicHttpContext;
import org.apache.hc.core5.http.protocol.HttpContext;
import org.junit.Assert;
import org.junit.Test;
/**
* Test cases for state-ful connections.
*/
public class TestStatefulConnManagement extends LocalServerTestBase {
private static class SimpleService implements HttpRequestHandler {
public SimpleService() {
super();
}
@Override
public void handle(
final ClassicHttpRequest request,
final ClassicHttpResponse response,
final HttpContext context) throws HttpException, IOException {
response.setCode(HttpStatus.SC_OK);
final StringEntity entity = new StringEntity("Whatever");
response.setEntity(entity);
}
}
@Test
public void testStatefulConnections() throws Exception {
final int workerCount = 5;
final int requestCount = 5;
this.server.registerHandler("*", new SimpleService());
this.connManager.setMaxTotal(workerCount);
this.connManager.setDefaultMaxPerRoute(workerCount);
final UserTokenHandler userTokenHandler = new UserTokenHandler() {
@Override
public Object getUserToken(final HttpRoute route, final HttpContext context) {
final String id = (String) context.getAttribute("user");
return id;
}
};
this.clientBuilder.setUserTokenHandler(userTokenHandler);
final HttpHost target = start();
final HttpClientContext[] contexts = new HttpClientContext[workerCount];
final HttpWorker[] workers = new HttpWorker[workerCount];
for (int i = 0; i < contexts.length; i++) {
final HttpClientContext context = HttpClientContext.create();
contexts[i] = context;
workers[i] = new HttpWorker(
"user" + i,
context, requestCount, target, this.httpclient);
}
for (final HttpWorker worker : workers) {
worker.start();
}
for (final HttpWorker worker : workers) {
worker.join(LONG_TIMEOUT.toMillis());
}
for (final HttpWorker worker : workers) {
final Exception ex = worker.getException();
if (ex != null) {
throw ex;
}
Assert.assertEquals(requestCount, worker.getCount());
}
for (final HttpContext context : contexts) {
final String state0 = (String) context.getAttribute("r0");
Assert.assertNotNull(state0);
for (int r = 1; r < requestCount; r++) {
Assert.assertEquals(state0, context.getAttribute("r" + r));
}
}
}
static class HttpWorker extends Thread {
private final String uid;
private final HttpClientContext context;
private final int requestCount;
private final HttpHost target;
private final CloseableHttpClient httpclient;
private volatile Exception exception;
private volatile int count;
public HttpWorker(
final String uid,
final HttpClientContext context,
final int requestCount,
final HttpHost target,
final CloseableHttpClient httpclient) {
super();
this.uid = uid;
this.context = context;
this.requestCount = requestCount;
this.target = target;
this.httpclient = httpclient;
this.count = 0;
}
public int getCount() {
return this.count;
}
public Exception getException() {
return this.exception;
}
@Override
public void run() {
try {
this.context.setAttribute("user", this.uid);
for (int r = 0; r < this.requestCount; r++) {
final HttpGet httpget = new HttpGet("/");
final ClassicHttpResponse response = this.httpclient.execute(
this.target,
httpget,
this.context);
this.count++;
final EndpointDetails endpointDetails = this.context.getEndpointDetails();
final String connuid = Integer.toHexString(System.identityHashCode(endpointDetails));
this.context.setAttribute("r" + r, connuid);
EntityUtils.consume(response.getEntity());
}
} catch (final Exception ex) {
this.exception = ex;
}
}
}
@Test
public void testRouteSpecificPoolRecylcing() throws Exception {
// This tests what happens when a maxed connection pool needs
// to kill the last idle connection to a route to build a new
// one to the same route.
final int maxConn = 2;
this.server.registerHandler("*", new SimpleService());
this.connManager.setMaxTotal(maxConn);
this.connManager.setDefaultMaxPerRoute(maxConn);
final UserTokenHandler userTokenHandler = new UserTokenHandler() {
@Override
public Object getUserToken(final HttpRoute route, final HttpContext context) {
return context.getAttribute("user");
}
};
this.clientBuilder.setUserTokenHandler(userTokenHandler);
final HttpHost target = start();
// Bottom of the pool : a *keep alive* connection to Route 1.
final HttpContext context1 = new BasicHttpContext();
context1.setAttribute("user", "stuff");
final ClassicHttpResponse response1 = this.httpclient.execute(
target, new HttpGet("/"), context1);
EntityUtils.consume(response1.getEntity());
// The ConnPoolByRoute now has 1 free connection, out of 2 max
// The ConnPoolByRoute has one RouteSpcfcPool, that has one free connection
// for [localhost][stuff]
Thread.sleep(100);
// Send a very simple HTTP get (it MUST be simple, no auth, no proxy, no 302, no 401, ...)
// Send it to another route. Must be a keepalive.
final HttpContext context2 = new BasicHttpContext();
final ClassicHttpResponse response2 = this.httpclient.execute(
new HttpHost("127.0.0.1", this.server.getPort()), new HttpGet("/"), context2);
EntityUtils.consume(response2.getEntity());
// ConnPoolByRoute now has 2 free connexions, out of its 2 max.
// The [localhost][stuff] RouteSpcfcPool is the same as earlier
// And there is a [127.0.0.1][null] pool with 1 free connection
Thread.sleep(100);
// This will put the ConnPoolByRoute to the targeted state :
// [localhost][stuff] will not get reused because this call is [localhost][null]
// So the ConnPoolByRoute will need to kill one connection (it is maxed out globally).
// The killed conn is the oldest, which means the first HTTPGet ([localhost][stuff]).
// When this happens, the RouteSpecificPool becomes empty.
final HttpContext context3 = new BasicHttpContext();
final ClassicHttpResponse response3 = this.httpclient.execute(
target, new HttpGet("/"), context3);
// If the ConnPoolByRoute did not behave coherently with the RouteSpecificPool
// this may fail. Ex : if the ConnPool discared the route pool because it was empty,
// but still used it to build the request3 connection.
EntityUtils.consume(response3.getEntity());
}
}
| UlrichColby/httpcomponents-client | httpclient5-testing/src/test/java/org/apache/hc/client5/testing/sync/TestStatefulConnManagement.java | Java | apache-2.0 | 9,740 |
package droidkit.app;
import android.content.Intent;
import android.net.Uri;
import android.support.annotation.NonNull;
import java.util.Locale;
/**
* @author Daniel Serdyukov
*/
public final class MapsIntent {
private static final String MAPS_URL = "https://maps.google.com/maps";
private MapsIntent() {
}
@NonNull
public static Intent openMaps() {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL));
}
@NonNull
public static Intent openMaps(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?q=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?daddr=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double fromLat, double fromLng, double toLat, double toLng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL +
"?saddr=%f,%f&daddr=%f,%f", fromLat, fromLng, toLat, toLng)));
}
@NonNull
public static Intent search(@NonNull String query) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL + "?q=" + query));
}
}
| DanielSerdyukov/droidkit-4.x | library/src/main/java/droidkit/app/MapsIntent.java | Java | apache-2.0 | 1,295 |
'use strict';
var nconf = require('nconf');
var path = require('path');
/**
* Handle the configuration management.
*
* @constructor
*/
function Config() {
nconf.argv().env("_");
var environment = nconf.get("NODE:ENV") || "development";
nconf.file(environment, {file: path.resolve(__dirname, '../config/' + environment + '.json')});
nconf.file('default', {file: path.resolve(__dirname, '../config/default.json')});
}
/**
* Return the value of the provided key from the configuration object.
*
๏ปฟ* @param {string} key - Key from the configuration object.
*/
Config.prototype.get = function (key) {
return nconf.get(key);
};
module.exports = new Config();
| mxr576/webpage-content-extractor-api | lib/config.js | JavaScript | apache-2.0 | 676 |
/* -*- mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
* Main authors:
* Christian Schulte <schulte@gecode.org>
*
* Copyright:
* Christian Schulte, 2004
*
* Last modified:
* $Date: 2010-03-04 03:40:32 +1100 (Thu, 04 Mar 2010) $ by $Author: schulte $
* $Revision: 10365 $
*
* This file is part of Gecode, the generic constraint
* development environment:
* http://www.gecode.org
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
#include <gecode/int/element.hh>
namespace Gecode {
using namespace Int;
void
element(Home home, IntSharedArray c, IntVar x0, IntVar x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
for (int i = c.size(); i--; )
Limits::check(c[i],"Int::element");
GECODE_ES_FAIL((Element::post_int<IntView,IntView>(home,c,x0,x1)));
}
void
element(Home home, IntSharedArray c, IntVar x0, BoolVar x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
for (int i = c.size(); i--; )
Limits::check(c[i],"Int::element");
GECODE_ES_FAIL((Element::post_int<IntView,BoolView>(home,c,x0,x1)));
}
void
element(Home home, IntSharedArray c, IntVar x0, int x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
Limits::check(x1,"Int::element");
if (home.failed()) return;
for (int i = c.size(); i--; )
Limits::check(c[i],"Int::element");
ConstIntView cx1(x1);
GECODE_ES_FAIL(
(Element::post_int<IntView,ConstIntView>(home,c,x0,cx1)));
}
void
element(Home home, const IntVarArgs& c, IntVar x0, IntVar x1,
IntConLevel icl) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
Element::IdxViewArray<IntView> iv(home,c);
if ((icl == ICL_DOM) || (icl == ICL_DEF)) {
GECODE_ES_FAIL((Element::ViewDom<IntView,IntView,IntView>
::post(home,iv,x0,x1)));
} else {
GECODE_ES_FAIL((Element::ViewBnd<IntView,IntView,IntView>
::post(home,iv,x0,x1)));
}
}
void
element(Home home, const IntVarArgs& c, IntVar x0, int x1,
IntConLevel icl) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
Limits::check(x1,"Int::element");
if (home.failed()) return;
Element::IdxViewArray<IntView> iv(home,c);
ConstIntView v1(x1);
if ((icl == ICL_DOM) || (icl == ICL_DEF)) {
GECODE_ES_FAIL((Element::ViewDom<IntView,IntView,ConstIntView>
::post(home,iv,x0,v1)));
} else {
GECODE_ES_FAIL((Element::ViewBnd<IntView,IntView,ConstIntView>
::post(home,iv,x0,v1)));
}
}
void
element(Home home, const BoolVarArgs& c, IntVar x0, BoolVar x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
if (home.failed()) return;
Element::IdxViewArray<BoolView> iv(home,c);
GECODE_ES_FAIL((Element::ViewBnd<BoolView,IntView,BoolView>
::post(home,iv,x0,x1)));
}
void
element(Home home, const BoolVarArgs& c, IntVar x0, int x1,
IntConLevel) {
if (c.size() == 0)
throw TooFewArguments("Int::element");
Limits::check(x1,"Int::element");
if (home.failed()) return;
Element::IdxViewArray<BoolView> iv(home,c);
ConstIntView v1(x1);
GECODE_ES_FAIL((Element::ViewBnd<BoolView,IntView,ConstIntView>
::post(home,iv,x0,v1)));
}
namespace {
IntVar
pair(Home home, IntVar x, int w, IntVar y, int h) {
IntVar xy(home,0,w*h-1);
if (Element::Pair::post(home,x,y,xy,w,h) != ES_OK)
home.fail();
return xy;
}
}
void
element(Home home, IntSharedArray a,
IntVar x, int w, IntVar y, int h, IntVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
void
element(Home home, IntSharedArray a,
IntVar x, int w, IntVar y, int h, BoolVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
void
element(Home home, const IntVarArgs& a,
IntVar x, int w, IntVar y, int h, IntVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
void
element(Home home, const BoolVarArgs& a,
IntVar x, int w, IntVar y, int h, BoolVar z,
IntConLevel icl) {
if (a.size() != w*h)
throw Int::ArgumentSizeMismatch("Int::element");
if (home.failed()) return;
element(home, a, pair(home,x,w,y,h), z, icl);
}
}
// STATISTICS: int-post
| h4ck3rm1k3/dep-selector-libgecode | ext/libgecode3/vendor/gecode-3.7.3/gecode/int/element.cpp | C++ | apache-2.0 | 6,133 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.admin.jmx.internal;
import javax.management.ObjectName;
import javax.management.modelmbean.ModelMBean;
import org.apache.geode.admin.internal.SystemMemberCacheImpl;
import org.apache.geode.cache.Region;
import org.apache.geode.internal.admin.GemFireVM;
/**
* MBean representation of {@link org.apache.geode.admin.SystemMemberRegion}.
*
* @since GemFire 3.5
*/
public class SystemMemberRegionJmxImpl
extends org.apache.geode.admin.internal.SystemMemberRegionImpl
implements org.apache.geode.admin.jmx.internal.ManagedResource {
/** The object name of this managed resource */
private ObjectName objectName;
// -------------------------------------------------------------------------
// Constructor(s)
// -------------------------------------------------------------------------
/**
* Constructs an instance of SystemMemberRegionJmxImpl.
*
* @param cache the cache this region belongs to
* @param region internal region to delegate real work to
*/
public SystemMemberRegionJmxImpl(SystemMemberCacheImpl cache, Region region)
throws org.apache.geode.admin.AdminException {
super(cache, region);
initializeMBean(cache);
}
/** Create and register the MBean to manage this resource */
private void initializeMBean(SystemMemberCacheImpl cache)
throws org.apache.geode.admin.AdminException {
GemFireVM vm = cache.getVM();
mbeanName = "GemFire.Cache:" + "path="
+ MBeanUtils.makeCompliantMBeanNameProperty(getFullPath()) + ",name="
+ MBeanUtils.makeCompliantMBeanNameProperty(cache.getName()) + ",id="
+ cache.getId() + ",owner="
+ MBeanUtils.makeCompliantMBeanNameProperty(vm.getId().toString())
+ ",type=Region";
objectName = MBeanUtils.createMBean(this);
}
// -------------------------------------------------------------------------
// ManagedResource implementation
// -------------------------------------------------------------------------
/** The name of the MBean that will manage this resource */
private String mbeanName;
/** The ModelMBean that is configured to manage this resource */
private ModelMBean modelMBean;
@Override
public String getMBeanName() {
return mbeanName;
}
@Override
public ModelMBean getModelMBean() {
return modelMBean;
}
@Override
public void setModelMBean(ModelMBean modelMBean) {
this.modelMBean = modelMBean;
}
@Override
public ObjectName getObjectName() {
return objectName;
}
@Override
public ManagedResourceType getManagedResourceType() {
return ManagedResourceType.SYSTEM_MEMBER_REGION;
}
@Override
public void cleanupResource() {}
/**
* Checks equality of the given object with <code>this</code> based on the type (Class) and the
* MBean Name returned by <code>getMBeanName()</code> methods.
*
* @param obj object to check equality with
* @return true if the given object is if the same type and its MBean Name is same as
* <code>this</code> object's MBean Name, false otherwise
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SystemMemberRegionJmxImpl)) {
return false;
}
SystemMemberRegionJmxImpl other = (SystemMemberRegionJmxImpl) obj;
return getMBeanName().equals(other.getMBeanName());
}
/**
* Returns hash code for <code>this</code> object which is based on the MBean Name generated.
*
* @return hash code for <code>this</code> object
*/
@Override
public int hashCode() {
return getMBeanName().hashCode();
}
}
| jdeppe-pivotal/geode | geode-core/src/main/java/org/apache/geode/admin/jmx/internal/SystemMemberRegionJmxImpl.java | Java | apache-2.0 | 4,385 |
let hookTypes;
const callStyles = {
sync: 'applyPlugins',
syncWaterfall: 'applyPluginsWaterfall',
syncBail: 'applyPluginsBailResult',
sync_map: 'applyPlugins',
asyncWaterfall: 'applyPluginsAsyncWaterfall',
asyncParallel: 'applyPluginsParallel',
asyncSerial: 'applyPluginsAsync',
};
const camelToDash = camel =>
camel.replace(/_/g, '--').replace(/[A-Z]/g, c => `-${c.toLowerCase()}`);
const knownPluginRegistrations = {
Compilation: {
needAdditionalPass: ['sync', []],
succeedModule: ['sync', ['module']],
buildModule: ['sync', ['module']],
seal: ['sync', []],
},
Compiler: {
afterCompile: ['asyncSerial', ['compilation']],
afterEnvironment: ['sync', []],
afterPlugins: ['sync', []],
afterResolvers: ['sync', []],
compilation: ['sync', ['compilation', 'params']],
emit: ['asyncSerial', ['compilation']],
make: ['asyncParallel', ['compilation']],
watchRun: ['asyncSerial', ['watcher']],
run: ['asyncSerial', ['compiler']],
},
NormalModuleFactory: {
createModule: ['syncBail', ['data']],
parser: ['sync_map', ['parser', 'parserOptions']],
resolver: ['syncWaterfall', ['nextResolver']],
},
ContextModuleFactory: {
afterResolve: ['asyncWaterfall', ['data']],
},
};
exports.register = (tapable, name, style, args) => {
if (tapable.hooks) {
if (!hookTypes) {
const Tapable = require('tapable');
hookTypes = {
sync: Tapable.SyncHook,
syncWaterfall: Tapable.SyncWaterfallHook,
syncBail: Tapable.SyncBailHook,
asyncWaterfall: Tapable.AsyncWaterfallHook,
asyncParallel: Tapable.AsyncParallelHook,
asyncSerial: Tapable.AsyncSeriesHook,
asyncSeries: Tapable.AsyncSeriesHook,
};
}
if (!tapable.hooks[name]) {
tapable.hooks[name] = new hookTypes[style](args);
}
} else {
if (!tapable.__hardSource_hooks) {
tapable.__hardSource_hooks = {};
}
if (!tapable.__hardSource_hooks[name]) {
tapable.__hardSource_hooks[name] = {
name,
dashName: camelToDash(name),
style,
args,
async: style.startsWith('async'),
map: style.endsWith('_map'),
};
}
if (!tapable.__hardSource_proxy) {
tapable.__hardSource_proxy = {};
}
if (!tapable.__hardSource_proxy[name]) {
if (tapable.__hardSource_hooks[name].map) {
const _forCache = {};
tapable.__hardSource_proxy[name] = {
_forCache,
for: key => {
let hook = _forCache[key];
if (hook) {
return hook;
}
_forCache[key] = {
tap: (...args) => exports.tapFor(tapable, name, key, ...args),
tapPromise: (...args) =>
exports.tapPromiseFor(tapable, name, key, ...args),
call: (...args) => exports.callFor(tapable, name, key, ...args),
promise: (...args) =>
exports.promiseFor(tapable, name, key, ...args),
};
return _forCache[key];
},
tap: (...args) => exports.tapFor(tapable, name, ...args),
tapPromise: (...args) =>
exports.tapPromiseFor(tapable, name, ...args),
call: (...args) => exports.callFor(tapable, name, ...args),
promise: (...args) => exports.promiseFor(tapable, name, ...args),
};
} else {
tapable.__hardSource_proxy[name] = {
tap: (...args) => exports.tap(tapable, name, ...args),
tapPromise: (...args) => exports.tapPromise(tapable, name, ...args),
call: (...args) => exports.call(tapable, name, args),
promise: (...args) => exports.promise(tapable, name, args),
};
}
}
}
};
exports.tap = (tapable, name, reason, callback) => {
if (tapable.hooks) {
tapable.hooks[name].tap(reason, callback);
} else {
if (!tapable.__hardSource_hooks || !tapable.__hardSource_hooks[name]) {
const registration =
knownPluginRegistrations[tapable.constructor.name][name];
exports.register(tapable, name, registration[0], registration[1]);
}
const dashName = tapable.__hardSource_hooks[name].dashName;
if (tapable.__hardSource_hooks[name].async) {
tapable.plugin(dashName, (...args) => {
const cb = args.pop();
cb(null, callback(...args));
});
} else {
tapable.plugin(dashName, callback);
}
}
};
exports.tapPromise = (tapable, name, reason, callback) => {
if (tapable.hooks) {
tapable.hooks[name].tapPromise(reason, callback);
} else {
if (!tapable.__hardSource_hooks || !tapable.__hardSource_hooks[name]) {
const registration =
knownPluginRegistrations[tapable.constructor.name][name];
exports.register(tapable, name, registration[0], registration[1]);
}
const dashName = tapable.__hardSource_hooks[name].dashName;
tapable.plugin(dashName, (...args) => {
const cb = args.pop();
return callback(...args).then(value => cb(null, value), cb);
});
}
};
exports.tapAsync = (tapable, name, reason, callback) => {
if (tapable.hooks) {
tapable.hooks[name].tapAsync(reason, callback);
} else {
if (!tapable.__hardSource_hooks || !tapable.__hardSource_hooks[name]) {
const registration =
knownPluginRegistrations[tapable.constructor.name][name];
exports.register(tapable, name, registration[0], registration[1]);
}
const dashName = tapable.__hardSource_hooks[name].dashName;
tapable.plugin(dashName, callback);
}
};
exports.call = (tapable, name, args) => {
if (tapable.hooks) {
const hook = tapable.hooks[name];
return hook.call(...args);
} else {
const dashName = tapable.__hardSource_hooks[name].dashName;
const style = tapable.__hardSource_hooks[name].style;
return tapable[callStyles[style]](...[dashName].concat(args));
}
};
exports.promise = (tapable, name, args) => {
if (tapable.hooks) {
const hook = tapable.hooks[name];
return hook.promise(...args);
} else {
const dashName = tapable.__hardSource_hooks[name].dashName;
const style = tapable.__hardSource_hooks[name].style;
return new Promise((resolve, reject) => {
tapable[callStyles[style]](
...[dashName].concat(args, (err, value) => {
if (err) {
reject(err);
} else {
resolve(value);
}
}),
);
});
}
};
exports.tapFor = (tapable, name, key, reason, callback) => {
if (tapable.hooks) {
tapable.hooks[name].for(key).tap(reason, callback);
} else {
exports.tap(tapable, name, reason, callback);
}
};
exports.tapPromiseFor = (tapable, name, key, reason, callback) => {
if (tapable.hooks) {
tapable.hooks[name].for(key).tapPromise(reason, callback);
} else {
exports.tapPromise(tapable, name, reason, callback);
}
};
exports.callFor = (tapable, name, key, args) => {
if (tapable.hooks) {
tapable.hooks[name].for(key).call(...args);
} else {
exports.call(tapable, name, args);
}
};
exports.promiseFor = (tapable, name, key, args) => {
if (tapable.hooks) {
tapable.hooks[name].for(key).promise(...args);
} else {
exports.promise(tapable, name, args);
}
};
exports.hooks = tapable => {
if (tapable.hooks) {
return tapable.hooks;
}
if (!tapable.__hardSource_proxy) {
tapable.__hardSource_proxy = {};
}
const registrations = knownPluginRegistrations[tapable.constructor.name];
if (registrations) {
for (const name in registrations) {
const registration = registrations[name];
exports.register(tapable, name, registration[0], registration[1]);
}
}
return tapable.__hardSource_proxy;
};
| BigBoss424/portfolio | v6/node_modules/hard-source-webpack-plugin/lib/util/plugin-compat.js | JavaScript | apache-2.0 | 7,732 |
/*
* Copyright (c) 2013 Houbrechts IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.houbie.lesscss.engine;
import com.github.houbie.lesscss.LessParseException;
import com.github.houbie.lesscss.resourcereader.ResourceReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.SequenceInputStream;
import java.util.Map;
import static com.github.houbie.lesscss.LessCompiler.CompilationDetails;
/**
* LessCompilationEngine implementation that uses a standard {@link javax.script.ScriptEngine} implementation.
*/
public class ScriptEngineLessCompilationEngine implements LessCompilationEngine {
private static Logger logger = LoggerFactory.getLogger(ScriptEngineLessCompilationEngine.class);
private static final String JS_ALL_MIN_JS = "js/all-min.js";
private static final String LESS_SCRIPT = "js/less-rhino-1.7.0-mod.js";
private static final String MINIFY_SCRIPT = "js/cssmin.js";
private static final String COMPILE_SCRIPT = "js/compile.js";
private static final boolean MINIFIED = true;
private ScriptEngine scriptEngine;
/**
* @param scriptEngineName the name of the underlying ScriptEngine (e.g. "nashorn", "rhino", ...)
*/
public ScriptEngineLessCompilationEngine(String scriptEngineName) {
logger.info("creating new NashornEngine");
ScriptEngineManager factory = new ScriptEngineManager();
scriptEngine = factory.getEngineByName(scriptEngineName);
if (scriptEngine == null) {
throw new RuntimeException("The ScriptEngine " + scriptEngineName + " could not be loaded");
}
}
/**
* @param scriptEngine the underlying ScriptEngine
*/
public ScriptEngineLessCompilationEngine(ScriptEngine scriptEngine) {
logger.info("creating new engine with {}", scriptEngine.getClass());
this.scriptEngine = scriptEngine;
}
@Override
public void initialize(Reader customJavaScriptReader) {
try {
if (customJavaScriptReader != null) {
scriptEngine.eval(customJavaScriptReader);
}
scriptEngine.eval(getLessScriptReader());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Reader getLessScriptReader() {
ClassLoader cl = getClass().getClassLoader();
InputStream concatenatedScripts;
if (MINIFIED) {
concatenatedScripts = cl.getResourceAsStream(JS_ALL_MIN_JS);
} else {
concatenatedScripts = new SequenceInputStream(cl.getResourceAsStream(LESS_SCRIPT), new SequenceInputStream(cl.getResourceAsStream(MINIFY_SCRIPT), cl.getResourceAsStream(COMPILE_SCRIPT)));
}
return new InputStreamReader(concatenatedScripts);
}
@Override
public CompilationDetails compile(String less, CompilationOptions compilationOptions, ResourceReader resourceReader) {
Map result;
try {
result = (Map) ((Invocable) scriptEngine).invokeFunction("compile", less, compilationOptions, resourceReader);
} catch (Exception e) {
throw new RuntimeException("Exception while compiling less", e);
}
if (result.get("parseException") != null) {
throw new LessParseException((String) result.get("parseException"));
}
return new CompilationDetails((String) result.get("css"), (String) result.get("sourceMapContent"));
}
public ScriptEngine getScriptEngine() {
return scriptEngine;
}
}
| houbie/lesscss | src/main/java/com/github/houbie/lesscss/engine/ScriptEngineLessCompilationEngine.java | Java | apache-2.0 | 4,246 |
package issues.issue130;
public class Impl_0 {
public int a = 0;
protected void printMe(String s) {
System.out.println(s);
}
}
| intrigus/jtransc | jtransc-main/test/issues/issue130/Impl_0.java | Java | apache-2.0 | 134 |
/**
* Wraps the
*
* @param text
* {string} haystack to search through
* @param search
* {string} needle to search for
* @param [caseSensitive]
* {boolean} optional boolean to use case-sensitive searching
*/
angular.module('ui.highlight', []).filter('highlight', function(highlight) {
return function(text, search, caseSensitive) {
if (search || angular.isNumber(search)) {
var ltext = text.toString();
var lsearch = search.toString();
if (caseSensitive) {
return ltext.split(lsearch).join('<span class="ui-match">' + lsearch + '</span>');
} else {
return ltext.replace(new RegExp(lsearch, 'gi'), '<span class="ui-match">$&</span>');
}
} else {
return text;
}
};
});
| WestCoastInformatics/OTF-Mapping-Service | webapp/src/main/webapp/js/highlight.js | JavaScript | apache-2.0 | 771 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.export;
import java.io.IOException;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.OrdinalMap;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
class StringValue implements SortValue {
private final SortedDocValues globalDocValues;
private final OrdinalMap ordinalMap;
private final String field;
private final IntComp comp;
protected LongValues toGlobal = LongValues.IDENTITY; // this segment to global ordinal. NN;
protected SortedDocValues docValues;
public int currentOrd;
protected int lastDocID;
private boolean present;
private BytesRef lastBytes;
private String lastString;
private int lastOrd = -1;
private int leafOrd = -1;
public StringValue(SortedDocValues globalDocValues, String field, IntComp comp) {
this.globalDocValues = globalDocValues;
this.docValues = globalDocValues;
if (globalDocValues instanceof MultiDocValues.MultiSortedDocValues) {
this.ordinalMap = ((MultiDocValues.MultiSortedDocValues) globalDocValues).mapping;
} else {
this.ordinalMap = null;
}
this.field = field;
this.comp = comp;
this.currentOrd = comp.resetValue();
this.present = false;
}
public String getLastString() {
return this.lastString;
}
public void setLastString(String lastString) {
this.lastString = lastString;
}
public StringValue copy() {
StringValue copy = new StringValue(globalDocValues, field, comp);
return copy;
}
public void setCurrentValue(int docId) throws IOException {
// System.out.println(docId +":"+lastDocID);
/*
if (docId < lastDocID) {
throw new AssertionError("docs were sent out-of-order: lastDocID=" + lastDocID + " vs doc=" + docId);
}
lastDocID = docId;
*/
if (docId > docValues.docID()) {
docValues.advance(docId);
}
if (docId == docValues.docID()) {
present = true;
currentOrd = docValues.ordValue();
} else {
present = false;
currentOrd = -1;
}
}
@Override
public boolean isPresent() {
return present;
}
public void setCurrentValue(SortValue sv) {
StringValue v = (StringValue) sv;
this.currentOrd = v.currentOrd;
this.present = v.present;
this.leafOrd = v.leafOrd;
this.lastOrd = v.lastOrd;
this.toGlobal = v.toGlobal;
}
public Object getCurrentValue() throws IOException {
assert present == true;
if (currentOrd != lastOrd) {
lastBytes = docValues.lookupOrd(currentOrd);
lastOrd = currentOrd;
lastString = null;
}
return lastBytes;
}
public void toGlobalValue(SortValue previousValue) {
lastOrd = currentOrd;
StringValue sv = (StringValue) previousValue;
if (sv.lastOrd == currentOrd) {
// Take the global ord from the previousValue unless we are a -1 which is the same in both
// global and leaf ordinal
if (this.currentOrd != -1) {
this.currentOrd = sv.currentOrd;
}
} else {
if (this.currentOrd > -1) {
this.currentOrd = (int) toGlobal.get(this.currentOrd);
}
}
}
public String getField() {
return field;
}
public void setNextReader(LeafReaderContext context) throws IOException {
leafOrd = context.ord;
if (ordinalMap != null) {
toGlobal = ordinalMap.getGlobalOrds(context.ord);
}
docValues = DocValues.getSorted(context.reader(), field);
lastDocID = 0;
}
public void reset() {
this.currentOrd = comp.resetValue();
this.present = false;
lastDocID = 0;
}
public int compareTo(SortValue o) {
StringValue sv = (StringValue) o;
return comp.compare(currentOrd, sv.currentOrd);
}
public String toString() {
return Integer.toString(this.currentOrd);
}
}
| apache/solr | solr/core/src/java/org/apache/solr/handler/export/StringValue.java | Java | apache-2.0 | 4,760 |
package at.jku.sea.cloud.rest.pojo.stream.provider;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.annotation.JsonTypeName;
import at.jku.sea.cloud.rest.pojo.PojoCollectionArtifact;
@JsonTypeInfo(use = Id.NAME, property = "__type")
@JsonTypeName(value = "CollectionArtifactProvider")
public class PojoCollectionArtifactProvider extends PojoProvider {
private PojoCollectionArtifact collectionArtifact;
public PojoCollectionArtifactProvider() {
}
public PojoCollectionArtifactProvider(PojoCollectionArtifact collectionArtifact) {
this.collectionArtifact = collectionArtifact;
}
public PojoCollectionArtifact getCollectionArtifact() {
return collectionArtifact;
}
public void setCollectionArtifact(PojoCollectionArtifact collectionArtifact) {
this.collectionArtifact = collectionArtifact;
}
}
| OnurKirkizoglu/master_thesis | at.jku.sea.cloud.rest/src/main/java/at/jku/sea/cloud/rest/pojo/stream/provider/PojoCollectionArtifactProvider.java | Java | apache-2.0 | 932 |
package com.sebastian_daschner.scalable_coffee_shop.beans.boundary;
import javax.inject.Inject;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
@Path("beans")
public class BeansResource {
@Inject
BeanCommandService commandService;
@Inject
BeanQueryService queryService;
@GET
public JsonObject getBeans() {
final JsonObjectBuilder builder = Json.createObjectBuilder();
queryService.getStoredBeans()
.entrySet().forEach(e -> builder.add(e.getKey(), e.getValue()));
return builder.build();
}
@POST
public void storeBeans(JsonObject object) {
final String beanOrigin = object.getString("beanOrigin", null);
final int amount = object.getInt("amount", 0);
if (beanOrigin == null || amount == 0)
throw new BadRequestException();
commandService.storeBeans(beanOrigin, amount);
}
}
| sdaschner/scalable-coffee-shop | beans/src/main/java/com/sebastian_daschner/scalable_coffee_shop/beans/boundary/BeansResource.java | Java | apache-2.0 | 1,063 |
package com.inmobi.messaging;
/*
* #%L
* messaging-client-core
* %%
* Copyright (C) 2012 - 2014 InMobi
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.nio.ByteBuffer;
/**
* Message class holding the data.
*
*/
public final class Message implements MessageBase {
private ByteBuffer data;
public Message() {
}
/**
* Create new message with {@link ByteBuffer}
*
* @param data The {@link ByteBuffer}
*/
public Message(ByteBuffer data) {
this.data = data;
}
/**
* Create new message with byte array
*
* @param data The byte array.
*/
public Message(byte[] data) {
this.data = ByteBuffer.wrap(data);
}
/**
* Get the data associated with message.
*
* @return {@link ByteBuffer} holding the data.
*/
public ByteBuffer getData() {
return data;
}
public synchronized void set(ByteBuffer data) {
this.data = data;
}
public synchronized void clear() {
data.clear();
}
public long getSize() {
return data.limit();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((data == null) ? 0 : data.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Message other = (Message) obj;
if (data == null) {
if (other.data != null) {
return false;
}
} else if (!data.equals(other.data)) {
return false;
}
return true;
}
@Override
public Message clone() {
Message m = new Message(data.duplicate());
return m;
}
}
| sreedishps/pintail | messaging-client-core/src/main/java/com/inmobi/messaging/Message.java | Java | apache-2.0 | 2,267 |
module.exports = {
"env": {
"es6": true,
"node": true
},
"globals": {
"expect": true,
"it": true,
"describe": true,
},
"extends": "eslint:recommended",
"parser": "babel-eslint",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true,
"jsx": true
},
"sourceType": "module"
},
"plugins": [
"react"
],
"rules": {
"no-unused-vars": 2,
"react/jsx-uses-vars": 2,
"react/jsx-uses-react": 2,
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
]
}
};
| airingursb/two-life | src/components/react-native-calendars/.eslintrc.js | JavaScript | apache-2.0 | 859 |
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package cn.com.smartdevices.bracelet.view;
import android.animation.Animator;
// Referenced classes of package cn.com.smartdevices.bracelet.view:
// RoundProgressBar
class s
implements android.animation.Animator.AnimatorListener
{
final RoundProgressBar a;
s(RoundProgressBar roundprogressbar)
{
a = roundprogressbar;
super();
}
public void onAnimationCancel(Animator animator)
{
}
public void onAnimationEnd(Animator animator)
{
if (RoundProgressBar.a(a) < RoundProgressBar.b(a) && RoundProgressBar.c(a) < RoundProgressBar.b(a))
{
RoundProgressBar.a(a, RoundProgressBar.b(a));
RoundProgressBar.a(a, RoundProgressBar.a(a) - RoundProgressBar.c(a), RoundProgressBar.c(a), RoundProgressBar.a(a));
}
}
public void onAnimationRepeat(Animator animator)
{
}
public void onAnimationStart(Animator animator)
{
}
}
| vishnudevk/MiBandDecompiled | Original Files/source/src/cn/com/smartdevices/bracelet/view/s.java | Java | apache-2.0 | 1,132 |
(function(window) {
var DEFAULT_ERROR_ID = 'error-default';
/**
* Very simple base class for views.
* Provides functionality for active/inactive.
*
* The first time the view is activated
* the onactive function/event will fire.
*
* The .seen property is added to each object
* with view in its prototype. .seen can be used
* to detect if the view has ever been activated.
*
* @param {String|Object} options options or a selector for element.
*/
function View(options) {
if (typeof(options) === 'undefined') {
options = {};
}
if (typeof(options) === 'string') {
this.selectors = { element: options };
} else {
var key;
if (typeof(options) === 'undefined') {
options = {};
}
for (key in options) {
if (options.hasOwnProperty(key)) {
this[key] = options[key];
}
}
}
this.hideErrors = this.hideErrors.bind(this);
}
const INVALID_CSS = /([^a-zA-Z\-\_0-9])/g;
View.ACTIVE = 'active';
View.prototype = {
seen: false,
activeClass: View.ACTIVE,
errorVisible: false,
get element() {
return this._findElement('element');
},
get status() {
return this._findElement('status');
},
get errors() {
return this._findElement('errors');
},
/**
* Creates a string id for a given model.
*
* view.idForModel('foo-', { _id: 1 }); // => foo-1
* view.idForModel('foo-', '2'); // => foo-2
*
* @param {String} prefix of string.
* @param {Object|String|Numeric} objectOrString representation of model.
*/
idForModel: function(prefix, objectOrString) {
prefix += (typeof(objectOrString) === 'object') ?
objectOrString._id :
objectOrString;
return prefix;
},
calendarId: function(input) {
if (typeof(input) !== 'string') {
input = input.calendarId;
}
input = this.cssClean(input);
return 'calendar-id-' + input;
},
/**
* Delegate pattern event listener.
*
* @param {HTMLElement} element parent element.
* @param {String} type type of dom event.
* @param {String} selector css selector element should match
* _note_ there is no magic here this
* is determined from the root of the document.
* @param {Function|Object} handler event handler.
* first argument is the raw
* event second is the element
* matching the pattern.
*/
delegate: function(element, type, selector, handler) {
if (typeof(handler) === 'object') {
var context = handler;
handler = function() {
context.handleEvent.apply(context, arguments);
};
}
element.addEventListener(type, function(e) {
var target = e.target;
while (target !== element) {
if ('mozMatchesSelector' in target &&
target.mozMatchesSelector(selector)) {
return handler(e, target);
}
target = target.parentNode;
}
});
},
/**
* Clean a string for use with css.
* Converts illegal chars to legal ones.
*/
cssClean: function(string) {
if (typeof(string) !== 'string')
return string;
//TODO: I am worried about the performance
//of using this all over the place =/
//consider sanitizing all keys to ensure
//they don't blow up when used as a selector?
return string.replace(INVALID_CSS, '-');
},
/**
* Finds a caches a element defined
* by selectors
*
* @param {String} selector name as defined in selectors.
* @param {Boolean} all true when to find all elements. (default false).
*/
_findElement: function(name, all, element) {
if (typeof(all) === 'object') {
element = all;
all = false;
}
element = element || document;
var cacheName;
var selector;
if (typeof(all) === 'undefined') {
all = false;
}
if (name in this.selectors) {
cacheName = '_' + name + 'Element';
selector = this.selectors[name];
if (!this[cacheName]) {
if (all) {
this[cacheName] = element.querySelectorAll(selector);
} else {
this[cacheName] = element.querySelector(selector);
}
}
return this[cacheName];
}
return null;
},
/**
* Displays a list of errors
*
* @param {Array} list error list
* (see Event.validaitonErrors) or Error object.
*/
showErrors: function(list) {
var _ = navigator.mozL10n.get;
var errors = '';
// We can pass Error objects or
// Array of {name: foo} objects
if (!Array.isArray(list)) {
list = [list];
}
var i = 0;
var len = list.length;
for (; i < len; i++) {
var name = list[i].l10nID || list[i].name;
errors += _('error-' + name) || _(DEFAULT_ERROR_ID);
}
// populate error and display it.
this.errors.textContent = errors;
this.errorVisible = true;
this.status.classList.add(this.activeClass);
this.status.addEventListener('animationend', this.hideErrors);
},
hideErrors: function() {
this.status.classList.remove(this.activeClass);
this.status.removeEventListener('animationend', this.hideErrors);
this.errorVisible = false;
},
onactive: function() {
if (this.errorVisible) {
this.hideErrors();
}
//seen can be set to anything other
//then false to override this behaviour
if (this.seen === false) {
this.onfirstseen();
}
// intentionally using 'in'
if ('dispatch' in this) {
this.dispatch.apply(this, arguments);
}
this.seen = true;
if (this.element) {
this.element.classList.add(this.activeClass);
}
},
oninactive: function() {
if (this.element) {
this.element.classList.remove(this.activeClass);
}
},
onfirstseen: function() {}
};
Calendar.View = View;
}(this));
| wilebeast/FireFox-OS | B2G/gaia/apps/calendar/js/view.js | JavaScript | apache-2.0 | 6,314 |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.design.widget;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.DrawableContainer;
import android.util.Log;
import java.lang.reflect.Method;
/** Caution. Gross hacks ahead. */
class DrawableUtils {
private static final String LOG_TAG = "DrawableUtils";
private static Method sSetConstantStateMethod;
private static boolean sSetConstantStateMethodFetched;
private DrawableUtils() {}
static boolean setContainerConstantState(
DrawableContainer drawable, Drawable.ConstantState constantState) {
// We can use getDeclaredMethod() on v9+
return setContainerConstantStateV9(drawable, constantState);
}
private static boolean setContainerConstantStateV9(
DrawableContainer drawable, Drawable.ConstantState constantState) {
if (!sSetConstantStateMethodFetched) {
try {
sSetConstantStateMethod =
DrawableContainer.class.getDeclaredMethod(
"setConstantState", DrawableContainer.DrawableContainerState.class);
sSetConstantStateMethod.setAccessible(true);
} catch (NoSuchMethodException e) {
Log.e(LOG_TAG, "Could not fetch setConstantState(). Oh well.");
}
sSetConstantStateMethodFetched = true;
}
if (sSetConstantStateMethod != null) {
try {
sSetConstantStateMethod.invoke(drawable, constantState);
return true;
} catch (Exception e) {
Log.e(LOG_TAG, "Could not invoke setConstantState(). Oh well.");
}
}
return false;
}
}
| WeRockStar/iosched | third_party/material-components-android/lib/src/android/support/design/widget/DrawableUtils.java | Java | apache-2.0 | 2,166 |
package com.artemis;
import static org.junit.Assert.assertEquals;
import java.util.NoSuchElementException;
import com.artemis.systems.EntityProcessingSystem;
import com.artemis.utils.IntBag;
import org.junit.Test;
import com.artemis.utils.ImmutableBag;
/**
* Created by obartley on 6/9/14.
*/
public class EntitySystemTest {
@SuppressWarnings("static-method")
@Test(expected = NoSuchElementException.class)
public void test_process_one_inactive() {
World w = new World(new WorldConfiguration()
.setSystem(new IteratorTestSystem(0)));
Entity e = w.createEntity();
e.edit().add(new C());
e.disable();
w.process();
}
@SuppressWarnings("static-method")
@Test
public void test_process_one_active() {
World w = new World(new WorldConfiguration()
.setSystem(new IteratorTestSystem(1)));
Entity e = w.createEntity();
e.edit().add(new C());
w.process();
}
@Test
public void aspect_exclude_only() {
ExcludingSystem es1 = new ExcludingSystem();
EmptySystem es2 = new EmptySystem();
World w = new World(new WorldConfiguration()
.setSystem(es1)
.setSystem(es2));
Entity e = w.createEntity();
w.process();
assertEquals(1, es1.getActives().size());
assertEquals(1, es2.getActives().size());
}
public static class C extends Component {}
public static class C2 extends Component {}
public static class IteratorTestSystem extends EntitySystem {
public int expectedSize;
@SuppressWarnings("unchecked")
public IteratorTestSystem(int expectedSize) {
super(Aspect.all(C.class));
this.expectedSize = expectedSize;
}
@Override
protected void processSystem() {
assertEquals(expectedSize, subscription.getEntities().size());
getActives().iterator().next();
}
@Override
protected boolean checkProcessing() {
return true;
}
}
public static class ExcludingSystem extends EntityProcessingSystem {
public ExcludingSystem() {
super(Aspect.exclude(C.class));
}
@Override
protected void process(Entity e) {}
}
public static class EmptySystem extends EntityProcessingSystem {
public EmptySystem() {
super(Aspect.all());
}
@Override
protected void process(Entity e) {}
}
}
| antag99/artemis-odb | artemis/src/test/java/com/artemis/EntitySystemTest.java | Java | apache-2.0 | 2,187 |
package com.sqisland.gce2retrofit;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.stream.JsonReader;
import com.squareup.javawriter.JavaWriter;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.text.WordUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static javax.lang.model.element.Modifier.PUBLIC;
public class Generator {
private static final String OPTION_CLASS_MAP = "classmap";
private static final String OPTION_METHODS = "methods";
private static Gson gson = new Gson();
public enum MethodType {
SYNC, ASYNC, REACTIVE
}
public static void main(String... args)
throws IOException, URISyntaxException {
Options options = getOptions();
CommandLine cmd = getCommandLine(options, args);
if (cmd == null) {
return;
}
String[] arguments = cmd.getArgs();
if (arguments.length != 2) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("java -jar gce2retrofit.jar discovery.json output_dir", options);
System.exit(1);
}
String discoveryFile = arguments[0];
String outputDir = arguments[1];
Map<String, String> classMap = cmd.hasOption(OPTION_CLASS_MAP)?
readClassMap(new FileReader(cmd.getOptionValue(OPTION_CLASS_MAP))) : null;
EnumSet<MethodType> methodTypes = getMethods(cmd.getOptionValue(OPTION_METHODS));
generate(new FileReader(discoveryFile), new FileWriterFactory(new File(outputDir)),
classMap, methodTypes);
}
private static Options getOptions() {
Options options = new Options();
options.addOption(
OPTION_CLASS_MAP, true, "Map fields to classes. Format: field_name\\tclass_name");
options.addOption(
OPTION_METHODS, true,
"Methods to generate, either sync, async or reactive. Default is to generate sync & async.");
return options;
}
private static CommandLine getCommandLine(Options options, String... args) {
CommandLineParser parser = new BasicParser();
try {
CommandLine cmd = parser.parse(options, args);
return cmd;
} catch (ParseException e) {
System.out.println("Unexpected exception:" + e.getMessage());
}
return null;
}
public static void generate(
Reader discoveryReader, WriterFactory writerFactory,
Map<String, String> classMap, EnumSet<MethodType> methodTypes)
throws IOException, URISyntaxException {
JsonReader jsonReader = new JsonReader(discoveryReader);
Discovery discovery = gson.fromJson(jsonReader, Discovery.class);
String packageName = StringUtil.getPackageName(discovery.baseUrl);
if (packageName == null || packageName.isEmpty()) {
packageName = StringUtil.getPackageName(discovery.rootUrl);
}
String modelPackageName = packageName + ".model";
for (Entry<String, JsonElement> entry : discovery.schemas.entrySet()) {
generateModel(
writerFactory, modelPackageName, entry.getValue().getAsJsonObject(), classMap);
}
if (discovery.resources != null) {
generateInterfaceFromResources(
writerFactory, packageName, "", discovery.resources, methodTypes);
}
if (discovery.name != null && discovery.methods != null) {
generateInterface(
writerFactory, packageName, discovery.name, discovery.methods, methodTypes);
}
}
public static Map<String, String> readClassMap(Reader reader) throws IOException {
Map<String, String> classMap = new HashMap<String, String>();
String line;
BufferedReader bufferedReader = new BufferedReader(reader);
while ((line = bufferedReader.readLine()) != null) {
String[] fields = line.split("\t");
if (fields.length == 2) {
classMap.put(fields[0], fields[1]);
}
}
return classMap;
}
public static EnumSet<MethodType> getMethods(String input) {
EnumSet<MethodType> methodTypes = EnumSet.noneOf(MethodType.class);
if (input != null) {
String[] parts = input.split(",");
for (String part : parts) {
if ("sync".equals(part) || "both".equals(part)) {
methodTypes.add(MethodType.SYNC);
}
if ("async".equals(part) || "both".equals(part)) {
methodTypes.add(MethodType.ASYNC);
}
if ("reactive".equals(part)) {
methodTypes.add(MethodType.REACTIVE);
}
}
}
if (methodTypes.isEmpty()) {
methodTypes = EnumSet.of(Generator.MethodType.ASYNC, Generator.MethodType.SYNC);
}
return methodTypes;
}
private static void generateModel(
WriterFactory writerFactory, String modelPackageName,
JsonObject schema, Map<String, String> classMap)
throws IOException {
String id = schema.get("id").getAsString();
String path = StringUtil.getPath(modelPackageName, id + ".java");
Writer writer = writerFactory.getWriter(path);
JavaWriter javaWriter = new JavaWriter(writer);
javaWriter.emitPackage(modelPackageName)
.emitImports("com.google.gson.annotations.SerializedName")
.emitEmptyLine()
.emitImports("java.util.List")
.emitEmptyLine();
String type = schema.get("type").getAsString();
if (type.equals("object")) {
javaWriter.beginType(modelPackageName + "." + id, "class", EnumSet.of(PUBLIC));
generateObject(javaWriter, schema, classMap);
javaWriter.endType();
} else if (type.equals("string")) {
javaWriter.beginType(modelPackageName + "." + id, "enum", EnumSet.of(PUBLIC));
generateEnum(javaWriter, schema);
javaWriter.endType();
}
writer.close();
}
private static void generateObject(
JavaWriter javaWriter, JsonObject schema, Map<String, String> classMap)
throws IOException {
JsonElement element = schema.get("properties");
if (element == null) {
return;
}
JsonObject properties = element.getAsJsonObject();
for (Entry<String, JsonElement> entry : properties.entrySet()) {
String key = entry.getKey();
String variableName = key;
if (StringUtil.isReservedWord(key)) {
javaWriter.emitAnnotation("SerializedName(\"" + key + "\")");
variableName += "_";
}
PropertyType propertyType = gson.fromJson(
entry.getValue(), PropertyType.class);
String javaType = propertyType.toJavaType();
if (classMap != null && classMap.containsKey(key)) {
javaType = classMap.get(key);
}
javaWriter.emitField(javaType, variableName, EnumSet.of(PUBLIC));
}
}
private static void generateEnum(JavaWriter javaWriter, JsonObject schema) throws IOException {
JsonArray enums = schema.get("enum").getAsJsonArray();
for (int i = 0; i < enums.size(); ++i) {
javaWriter.emitEnumValue(enums.get(i).getAsString());
}
}
private static void generateInterfaceFromResources(
WriterFactory writerFactory, String packageName,
String resourceName, JsonObject resources,
EnumSet<MethodType> methodTypes)
throws IOException {
for (Entry<String, JsonElement> entry : resources.entrySet()) {
JsonObject entryValue = entry.getValue().getAsJsonObject();
if (entryValue.has("methods")) {
generateInterface(writerFactory, packageName,
resourceName + "_" + entry.getKey(),
entryValue.get("methods").getAsJsonObject(),
methodTypes);
}
if (entryValue.has("resources")) {
generateInterfaceFromResources(writerFactory, packageName,
resourceName + "_" + entry.getKey(),
entryValue.get("resources").getAsJsonObject(),
methodTypes);
}
}
}
private static void generateInterface(
WriterFactory writerFactory, String packageName,
String resourceName, JsonObject methods,
EnumSet<MethodType> methodTypes)
throws IOException {
String capitalizedName = WordUtils.capitalizeFully(resourceName, '_');
String className = capitalizedName.replaceAll("_", "");
String path = StringUtil.getPath(packageName, className + ".java");
Writer fileWriter = writerFactory.getWriter(path);
JavaWriter javaWriter = new JavaWriter(fileWriter);
javaWriter.emitPackage(packageName)
.emitImports(packageName + ".model.*")
.emitEmptyLine()
.emitImports(
"retrofit.Callback",
"retrofit.client.Response",
"retrofit.http.GET",
"retrofit.http.POST",
"retrofit.http.PATCH",
"retrofit.http.DELETE",
"retrofit.http.Body",
"retrofit.http.Path",
"retrofit.http.Query");
if (methodTypes.contains(MethodType.REACTIVE)) {
javaWriter.emitImports("rx.Observable");
}
javaWriter.emitEmptyLine();
javaWriter.beginType(
packageName + "." + className, "interface", EnumSet.of(PUBLIC));
for (Entry<String, JsonElement> entry : methods.entrySet()) {
String methodName = entry.getKey();
Method method = gson.fromJson(entry.getValue(), Method.class);
for (MethodType methodType : methodTypes) {
javaWriter.emitAnnotation(method.httpMethod, "\"/" + method.path + "\"");
emitMethodSignature(fileWriter, methodName, method, methodType);
}
}
javaWriter.endType();
fileWriter.close();
}
// TODO: Use JavaWriter to emit method signature
private static void emitMethodSignature(
Writer writer, String methodName, Method method, MethodType methodType) throws IOException {
ArrayList<String> params = new ArrayList<String>();
if (method.request != null) {
params.add("@Body " + method.request.$ref + " " +
(method.request.parameterName != null ? method.request.parameterName : "resource"));
}
for (Entry<String, JsonElement> param : getParams(method)) {
params.add(param2String(param));
}
String returnValue = "void";
if (methodType == MethodType.SYNC && "POST".equals(method.httpMethod)) {
returnValue = "Response";
}
if (method.response != null) {
if (methodType == MethodType.SYNC) {
returnValue = method.response.$ref;
} else if (methodType == MethodType.REACTIVE) {
returnValue = "Observable<" + method.response.$ref + ">";
}
}
if (methodType == MethodType.ASYNC) {
if (method.response == null) {
params.add("Callback<Void> cb");
} else {
params.add("Callback<" + method.response.$ref + "> cb");
}
}
writer.append(" " + returnValue + " " + methodName + (methodType == MethodType.REACTIVE ? "Rx" : "") + "(");
for (int i = 0; i < params.size(); ++i) {
if (i != 0) {
writer.append(", ");
}
writer.append(params.get(i));
}
writer.append(");\n");
}
/**
* Assemble a list of parameters, with the first entries matching the ones
* listed in parameterOrder
*
* @param method The method containing parameters and parameterOrder
* @return Ordered parameters
*/
private static List<Entry<String, JsonElement>> getParams(Method method) {
List<Entry<String, JsonElement>> params
= new ArrayList<Entry<String, JsonElement>>();
if (method.parameters == null) {
return params;
}
// Convert the entry set into a map, and extract the keys not listed in
// parameterOrder
HashMap<String, Entry<String, JsonElement>> map
= new HashMap<String, Entry<String, JsonElement>>();
List<String> remaining = new ArrayList<String>();
for (Entry<String, JsonElement> entry : method.parameters.entrySet()) {
String key = entry.getKey();
map.put(key, entry);
if (method.parameterOrder == null ||
!method.parameterOrder.contains(key)) {
remaining.add(key);
}
}
// Add the keys in parameterOrder
if (method.parameterOrder != null) {
for (String key : method.parameterOrder) {
params.add(map.get(key));
}
}
// Then add the keys not in parameterOrder
for (String key : remaining) {
params.add(map.get(key));
}
return params;
}
private static String param2String(Entry<String, JsonElement> param) {
StringBuffer buf = new StringBuffer();
String paramName = param.getKey();
ParameterType paramType = gson.fromJson(
param.getValue(), ParameterType.class);
if ("path".equals(paramType.location)) {
buf.append("@Path(\"" + paramName + "\") ");
}
if ("query".equals(paramType.location)) {
buf.append("@Query(\"" + paramName + "\") ");
}
String type = paramType.toJavaType();
if (!paramType.required) {
type = StringUtil.primitiveToObject(type);
}
buf.append(type + " " + paramName);
return buf.toString();
}
}
| MaTriXy/gce2retrofit | gce2retrofit/src/main/java/com/sqisland/gce2retrofit/Generator.java | Java | apache-2.0 | 13,413 |
#!/usr/bin/env ruby
#--
# set.rb - defines the Set class
#++
# Copyright (c) 2002 Akinori MUSHA <knu@iDaemons.org>
#
# Documentation by Akinori MUSHA and Gavin Sinclair.
#
# All rights reserved. You can redistribute and/or modify it under the same
# terms as Ruby.
#
# $Id: set.rb 8696 2009-01-10 21:17:58Z headius $
#
# == Overview
#
# This library provides the Set class, which deals with a collection
# of unordered values with no duplicates. It is a hybrid of Array's
# intuitive inter-operation facilities and Hash's fast lookup. If you
# need to keep values ordered, use the SortedSet class.
#
# The method +to_set+ is added to Enumerable for convenience.
#
# See the Set class for an example of usage.
#
# Set implements a collection of unordered values with no duplicates.
# This is a hybrid of Array's intuitive inter-operation facilities and
# Hash's fast lookup.
#
# Several methods accept any Enumerable object (implementing +each+)
# for greater flexibility: new, replace, merge, subtract, |, &, -, ^.
#
# The equality of each couple of elements is determined according to
# Object#eql? and Object#hash, since Set uses Hash as storage.
#
# Finally, if you are using class Set, you can also use Enumerable#to_set
# for convenience.
#
# == Example
#
# require 'set'
# s1 = Set.new [1, 2] # -> #<Set: {1, 2}>
# s2 = [1, 2].to_set # -> #<Set: {1, 2}>
# s1 == s2 # -> true
# s1.add("foo") # -> #<Set: {1, 2, "foo"}>
# s1.merge([2, 6]) # -> #<Set: {6, 1, 2, "foo"}>
# s1.subset? s2 # -> false
# s2.subset? s1 # -> true
#
class Set
include Enumerable
# Creates a new set containing the given objects.
def self.[](*ary)
new(ary)
end
# Creates a new set containing the elements of the given enumerable
# object.
#
# If a block is given, the elements of enum are preprocessed by the
# given block.
def initialize(enum = nil, &block) # :yields: o
@hash ||= Hash.new
enum.nil? and return
if block
enum.each { |o| add(block[o]) }
else
merge(enum)
end
end
# Copy internal hash.
def initialize_copy(orig)
@hash = orig.instance_eval{@hash}.dup
end
# Returns the number of elements.
def size
@hash.size
end
alias length size
# Returns true if the set contains no elements.
def empty?
@hash.empty?
end
# Removes all elements and returns self.
def clear
@hash.clear
self
end
# Replaces the contents of the set with the contents of the given
# enumerable object and returns self.
def replace(enum)
if enum.class == self.class
@hash.replace(enum.instance_eval { @hash })
else
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
clear
enum.each { |o| add(o) }
end
self
end
# Converts the set to an array. The order of elements is uncertain.
def to_a
@hash.keys
end
def flatten_merge(set, seen = Set.new)
set.each { |e|
if e.is_a?(Set)
if seen.include?(e_id = e.object_id)
raise ArgumentError, "tried to flatten recursive Set"
end
seen.add(e_id)
flatten_merge(e, seen)
seen.delete(e_id)
else
add(e)
end
}
self
end
protected :flatten_merge
# Returns a new set that is a copy of the set, flattening each
# containing set recursively.
def flatten
self.class.new.flatten_merge(self)
end
# Equivalent to Set#flatten, but replaces the receiver with the
# result in place. Returns nil if no modifications were made.
def flatten!
if detect { |e| e.is_a?(Set) }
replace(flatten())
else
nil
end
end
# Returns true if the set contains the given object.
def include?(o)
@hash.include?(o)
end
alias member? include?
# Returns true if the set is a superset of the given set.
def superset?(set)
set.is_a?(Set) or raise ArgumentError, "value must be a set"
return false if size < set.size
set.all? { |o| include?(o) }
end
# Returns true if the set is a proper superset of the given set.
def proper_superset?(set)
set.is_a?(Set) or raise ArgumentError, "value must be a set"
return false if size <= set.size
set.all? { |o| include?(o) }
end
# Returns true if the set is a subset of the given set.
def subset?(set)
set.is_a?(Set) or raise ArgumentError, "value must be a set"
return false if set.size < size
all? { |o| set.include?(o) }
end
# Returns true if the set is a proper subset of the given set.
def proper_subset?(set)
set.is_a?(Set) or raise ArgumentError, "value must be a set"
return false if set.size <= size
all? { |o| set.include?(o) }
end
# Calls the given block once for each element in the set, passing
# the element as parameter.
def each
@hash.each_key { |o| yield(o) }
self
end
# Adds the given object to the set and returns self. Use +merge+ to
# add several elements at once.
def add(o)
@hash[o] = true
self
end
alias << add
# Adds the given object to the set and returns self. If the
# object is already in the set, returns nil.
def add?(o)
if include?(o)
nil
else
add(o)
end
end
# Deletes the given object from the set and returns self. Use +subtract+ to
# delete several items at once.
def delete(o)
@hash.delete(o)
self
end
# Deletes the given object from the set and returns self. If the
# object is not in the set, returns nil.
def delete?(o)
if include?(o)
delete(o)
else
nil
end
end
# Deletes every element of the set for which block evaluates to
# true, and returns self.
def delete_if
to_a.each { |o| @hash.delete(o) if yield(o) }
self
end
# Do collect() destructively.
def collect!
set = self.class.new
each { |o| set << yield(o) }
replace(set)
end
alias map! collect!
# Equivalent to Set#delete_if, but returns nil if no changes were
# made.
def reject!
n = size
delete_if { |o| yield(o) }
size == n ? nil : self
end
# Merges the elements of the given enumerable object to the set and
# returns self.
def merge(enum)
if enum.is_a?(Set)
@hash.update(enum.instance_eval { @hash })
else
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
enum.each { |o| add(o) }
end
self
end
# Deletes every element that appears in the given enumerable object
# and returns self.
def subtract(enum)
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
enum.each { |o| delete(o) }
self
end
# Returns a new set built by merging the set and the elements of the
# given enumerable object.
def |(enum)
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
dup.merge(enum)
end
alias + | ##
alias union | ##
# Returns a new set built by duplicating the set, removing every
# element that appears in the given enumerable object.
def -(enum)
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
dup.subtract(enum)
end
alias difference - ##
# Returns a new set containing elements common to the set and the
# given enumerable object.
def &(enum)
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
n = self.class.new
enum.each { |o| n.add(o) if include?(o) }
n
end
alias intersection & ##
# Returns a new set containing elements exclusive between the set
# and the given enumerable object. (set ^ enum) is equivalent to
# ((set | enum) - (set & enum)).
def ^(enum)
enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
n = Set.new(enum)
each { |o| if n.include?(o) then n.delete(o) else n.add(o) end }
n
end
# Returns true if two sets are equal. The equality of each couple
# of elements is defined according to Object#eql?.
def ==(set)
equal?(set) and return true
set.is_a?(Set) && size == set.size or return false
hash = @hash.dup
set.all? { |o| hash.include?(o) }
end
def hash # :nodoc:
@hash.hash
end
def eql?(o) # :nodoc:
return false unless o.is_a?(Set)
@hash.eql?(o.instance_eval{@hash})
end
# Classifies the set by the return value of the given block and
# returns a hash of {value => set of elements} pairs. The block is
# called once for each element of the set, passing the element as
# parameter.
#
# e.g.:
#
# require 'set'
# files = Set.new(Dir.glob("*.rb"))
# hash = files.classify { |f| File.mtime(f).year }
# p hash # => {2000=>#<Set: {"a.rb", "b.rb"}>,
# # 2001=>#<Set: {"c.rb", "d.rb", "e.rb"}>,
# # 2002=>#<Set: {"f.rb"}>}
def classify # :yields: o
h = {}
each { |i|
x = yield(i)
(h[x] ||= self.class.new).add(i)
}
h
end
# Divides the set into a set of subsets according to the commonality
# defined by the given block.
#
# If the arity of the block is 2, elements o1 and o2 are in common
# if block.call(o1, o2) is true. Otherwise, elements o1 and o2 are
# in common if block.call(o1) == block.call(o2).
#
# e.g.:
#
# require 'set'
# numbers = Set[1, 3, 4, 6, 9, 10, 11]
# set = numbers.divide { |i,j| (i - j).abs == 1 }
# p set # => #<Set: {#<Set: {1}>,
# # #<Set: {11, 9, 10}>,
# # #<Set: {3, 4}>,
# # #<Set: {6}>}>
def divide(&func)
if func.arity == 2
require 'tsort'
class << dig = {} # :nodoc:
include TSort
alias tsort_each_node each_key
def tsort_each_child(node, &block)
fetch(node).each(&block)
end
end
each { |u|
dig[u] = a = []
each{ |v| func.call(u, v) and a << v }
}
set = Set.new()
dig.each_strongly_connected_component { |css|
set.add(self.class.new(css))
}
set
else
Set.new(classify(&func).values)
end
end
InspectKey = :__inspect_key__ # :nodoc:
# Returns a string containing a human-readable representation of the
# set. ("#<Set: {element1, element2, ...}>")
def inspect
ids = (Thread.current[InspectKey] ||= [])
if ids.include?(object_id)
return sprintf('#<%s: {...}>', self.class.name)
end
begin
ids << object_id
return sprintf('#<%s: {%s}>', self.class, to_a.inspect[1..-2])
ensure
ids.pop
end
end
def pretty_print(pp) # :nodoc:
pp.text sprintf('#<%s: {', self.class.name)
pp.nest(1) {
pp.seplist(self) { |o|
pp.pp o
}
}
pp.text "}>"
end
def pretty_print_cycle(pp) # :nodoc:
pp.text sprintf('#<%s: {%s}>', self.class.name, empty? ? '' : '...')
end
end
# SortedSet implements a set which elements are sorted in order. See Set.
class SortedSet < Set
@@setup = false
class << self
def [](*ary) # :nodoc:
new(ary)
end
def setup # :nodoc:
@@setup and return
module_eval {
# a hack to shut up warning
alias old_init initialize
remove_method :old_init
}
begin
require 'rbtree'
module_eval %{
def initialize(*args, &block)
@hash = RBTree.new
super
end
}
rescue LoadError
module_eval %{
def initialize(*args, &block)
@keys = nil
super
end
def clear
@keys = nil
super
end
def replace(enum)
@keys = nil
super
end
def add(o)
@keys = nil
@hash[o] = true
self
end
alias << add
def delete(o)
@keys = nil
@hash.delete(o)
self
end
def delete_if
n = @hash.size
super
@keys = nil if @hash.size != n
self
end
def merge(enum)
@keys = nil
super
end
def each
to_a.each { |o| yield(o) }
self
end
def to_a
(@keys = @hash.keys).sort! unless @keys
@keys
end
}
end
@@setup = true
end
end
def initialize(*args, &block) # :nodoc:
SortedSet.setup
initialize(*args, &block)
end
end
module Enumerable
# Makes a set from the enumerable object with given arguments.
# Needs to +require "set"+ to use this method.
def to_set(klass = Set, *args, &block)
klass.new(self, *args, &block)
end
end
# =begin
# == RestricedSet class
# RestricedSet implements a set with restrictions defined by a given
# block.
#
# === Super class
# Set
#
# === Class Methods
# --- RestricedSet::new(enum = nil) { |o| ... }
# --- RestricedSet::new(enum = nil) { |rset, o| ... }
# Creates a new restricted set containing the elements of the given
# enumerable object. Restrictions are defined by the given block.
#
# If the block's arity is 2, it is called with the RestrictedSet
# itself and an object to see if the object is allowed to be put in
# the set.
#
# Otherwise, the block is called with an object to see if the object
# is allowed to be put in the set.
#
# === Instance Methods
# --- restriction_proc
# Returns the restriction procedure of the set.
#
# =end
#
# class RestricedSet < Set
# def initialize(*args, &block)
# @proc = block or raise ArgumentError, "missing a block"
#
# if @proc.arity == 2
# instance_eval %{
# def add(o)
# @hash[o] = true if @proc.call(self, o)
# self
# end
# alias << add
#
# def add?(o)
# if include?(o) || !@proc.call(self, o)
# nil
# else
# @hash[o] = true
# self
# end
# end
#
# def replace(enum)
# enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
# clear
# enum.each { |o| add(o) }
#
# self
# end
#
# def merge(enum)
# enum.is_a?(Enumerable) or raise ArgumentError, "value must be enumerable"
# enum.each { |o| add(o) }
#
# self
# end
# }
# else
# instance_eval %{
# def add(o)
# if @proc.call(o)
# @hash[o] = true
# end
# self
# end
# alias << add
#
# def add?(o)
# if include?(o) || !@proc.call(o)
# nil
# else
# @hash[o] = true
# self
# end
# end
# }
# end
#
# super(*args)
# end
#
# def restriction_proc
# @proc
# end
# end
if $0 == __FILE__
eval DATA.read, nil, $0, __LINE__+4
end
__END__
require 'test/unit'
class TC_Set < Test::Unit::TestCase
def test_aref
assert_nothing_raised {
Set[]
Set[nil]
Set[1,2,3]
}
assert_equal(0, Set[].size)
assert_equal(1, Set[nil].size)
assert_equal(1, Set[[]].size)
assert_equal(1, Set[[nil]].size)
set = Set[2,4,6,4]
assert_equal(Set.new([2,4,6]), set)
end
def test_s_new
assert_nothing_raised {
Set.new()
Set.new(nil)
Set.new([])
Set.new([1,2])
Set.new('a'..'c')
Set.new('XYZ')
}
assert_raises(ArgumentError) {
Set.new(false)
}
assert_raises(ArgumentError) {
Set.new(1)
}
assert_raises(ArgumentError) {
Set.new(1,2)
}
assert_equal(0, Set.new().size)
assert_equal(0, Set.new(nil).size)
assert_equal(0, Set.new([]).size)
assert_equal(1, Set.new([nil]).size)
ary = [2,4,6,4]
set = Set.new(ary)
ary.clear
assert_equal(false, set.empty?)
assert_equal(3, set.size)
ary = [1,2,3]
s = Set.new(ary) { |o| o * 2 }
assert_equal([2,4,6], s.sort)
end
def test_clone
set1 = Set.new
set2 = set1.clone
set1 << 'abc'
assert_equal(Set.new, set2)
end
def test_dup
set1 = Set[1,2]
set2 = set1.dup
assert_not_same(set1, set2)
assert_equal(set1, set2)
set1.add(3)
assert_not_equal(set1, set2)
end
def test_size
assert_equal(0, Set[].size)
assert_equal(2, Set[1,2].size)
assert_equal(2, Set[1,2,1].size)
end
def test_empty?
assert_equal(true, Set[].empty?)
assert_equal(false, Set[1, 2].empty?)
end
def test_clear
set = Set[1,2]
ret = set.clear
assert_same(set, ret)
assert_equal(true, set.empty?)
end
def test_replace
set = Set[1,2]
ret = set.replace('a'..'c')
assert_same(set, ret)
assert_equal(Set['a','b','c'], set)
end
def test_to_a
set = Set[1,2,3,2]
ary = set.to_a
assert_equal([1,2,3], ary.sort)
end
def test_flatten
# test1
set1 = Set[
1,
Set[
5,
Set[7,
Set[0]
],
Set[6,2],
1
],
3,
Set[3,4]
]
set2 = set1.flatten
set3 = Set.new(0..7)
assert_not_same(set2, set1)
assert_equal(set3, set2)
# test2; destructive
orig_set1 = set1
set1.flatten!
assert_same(orig_set1, set1)
assert_equal(set3, set1)
# test3; multiple occurrences of a set in an set
set1 = Set[1, 2]
set2 = Set[set1, Set[set1, 4], 3]
assert_nothing_raised {
set2.flatten!
}
assert_equal(Set.new(1..4), set2)
# test4; recursion
set2 = Set[]
set1 = Set[1, set2]
set2.add(set1)
assert_raises(ArgumentError) {
set1.flatten!
}
# test5; miscellaneous
empty = Set[]
set = Set[Set[empty, "a"],Set[empty, "b"]]
assert_nothing_raised {
set.flatten
}
set1 = empty.merge(Set["no_more", set])
assert_nil(Set.new(0..31).flatten!)
x = Set[Set[],Set[1,2]].flatten!
y = Set[1,2]
assert_equal(x, y)
end
def test_include?
set = Set[1,2,3]
assert_equal(true, set.include?(1))
assert_equal(true, set.include?(2))
assert_equal(true, set.include?(3))
assert_equal(false, set.include?(0))
assert_equal(false, set.include?(nil))
set = Set["1",nil,"2",nil,"0","1",false]
assert_equal(true, set.include?(nil))
assert_equal(true, set.include?(false))
assert_equal(true, set.include?("1"))
assert_equal(false, set.include?(0))
assert_equal(false, set.include?(true))
end
def test_superset?
set = Set[1,2,3]
assert_raises(ArgumentError) {
set.superset?()
}
assert_raises(ArgumentError) {
set.superset?(2)
}
assert_raises(ArgumentError) {
set.superset?([2])
}
assert_equal(true, set.superset?(Set[]))
assert_equal(true, set.superset?(Set[1,2]))
assert_equal(true, set.superset?(Set[1,2,3]))
assert_equal(false, set.superset?(Set[1,2,3,4]))
assert_equal(false, set.superset?(Set[1,4]))
assert_equal(true, Set[].superset?(Set[]))
end
def test_proper_superset?
set = Set[1,2,3]
assert_raises(ArgumentError) {
set.proper_superset?()
}
assert_raises(ArgumentError) {
set.proper_superset?(2)
}
assert_raises(ArgumentError) {
set.proper_superset?([2])
}
assert_equal(true, set.proper_superset?(Set[]))
assert_equal(true, set.proper_superset?(Set[1,2]))
assert_equal(false, set.proper_superset?(Set[1,2,3]))
assert_equal(false, set.proper_superset?(Set[1,2,3,4]))
assert_equal(false, set.proper_superset?(Set[1,4]))
assert_equal(false, Set[].proper_superset?(Set[]))
end
def test_subset?
set = Set[1,2,3]
assert_raises(ArgumentError) {
set.subset?()
}
assert_raises(ArgumentError) {
set.subset?(2)
}
assert_raises(ArgumentError) {
set.subset?([2])
}
assert_equal(true, set.subset?(Set[1,2,3,4]))
assert_equal(true, set.subset?(Set[1,2,3]))
assert_equal(false, set.subset?(Set[1,2]))
assert_equal(false, set.subset?(Set[]))
assert_equal(true, Set[].subset?(Set[1]))
assert_equal(true, Set[].subset?(Set[]))
end
def test_proper_subset?
set = Set[1,2,3]
assert_raises(ArgumentError) {
set.proper_subset?()
}
assert_raises(ArgumentError) {
set.proper_subset?(2)
}
assert_raises(ArgumentError) {
set.proper_subset?([2])
}
assert_equal(true, set.proper_subset?(Set[1,2,3,4]))
assert_equal(false, set.proper_subset?(Set[1,2,3]))
assert_equal(false, set.proper_subset?(Set[1,2]))
assert_equal(false, set.proper_subset?(Set[]))
assert_equal(false, Set[].proper_subset?(Set[]))
end
def test_each
ary = [1,3,5,7,10,20]
set = Set.new(ary)
assert_raises(LocalJumpError) {
set.each
}
assert_nothing_raised {
set.each { |o|
ary.delete(o) or raise "unexpected element: #{o}"
}
ary.empty? or raise "forgotten elements: #{ary.join(', ')}"
}
end
def test_add
set = Set[1,2,3]
ret = set.add(2)
assert_same(set, ret)
assert_equal(Set[1,2,3], set)
ret = set.add?(2)
assert_nil(ret)
assert_equal(Set[1,2,3], set)
ret = set.add(4)
assert_same(set, ret)
assert_equal(Set[1,2,3,4], set)
ret = set.add?(5)
assert_same(set, ret)
assert_equal(Set[1,2,3,4,5], set)
end
def test_delete
set = Set[1,2,3]
ret = set.delete(4)
assert_same(set, ret)
assert_equal(Set[1,2,3], set)
ret = set.delete?(4)
assert_nil(ret)
assert_equal(Set[1,2,3], set)
ret = set.delete(2)
assert_equal(set, ret)
assert_equal(Set[1,3], set)
ret = set.delete?(1)
assert_equal(set, ret)
assert_equal(Set[3], set)
end
def test_delete_if
set = Set.new(1..10)
ret = set.delete_if { |i| i > 10 }
assert_same(set, ret)
assert_equal(Set.new(1..10), set)
set = Set.new(1..10)
ret = set.delete_if { |i| i % 3 == 0 }
assert_same(set, ret)
assert_equal(Set[1,2,4,5,7,8,10], set)
end
def test_collect!
set = Set[1,2,3,'a','b','c',-1..1,2..4]
ret = set.collect! { |i|
case i
when Numeric
i * 2
when String
i.upcase
else
nil
end
}
assert_same(set, ret)
assert_equal(Set[2,4,6,'A','B','C',nil], set)
end
def test_reject!
set = Set.new(1..10)
ret = set.reject! { |i| i > 10 }
assert_nil(ret)
assert_equal(Set.new(1..10), set)
ret = set.reject! { |i| i % 3 == 0 }
assert_same(set, ret)
assert_equal(Set[1,2,4,5,7,8,10], set)
end
def test_merge
set = Set[1,2,3]
ret = set.merge([2,4,6])
assert_same(set, ret)
assert_equal(Set[1,2,3,4,6], set)
end
def test_subtract
set = Set[1,2,3]
ret = set.subtract([2,4,6])
assert_same(set, ret)
assert_equal(Set[1,3], set)
end
def test_plus
set = Set[1,2,3]
ret = set + [2,4,6]
assert_not_same(set, ret)
assert_equal(Set[1,2,3,4,6], ret)
end
def test_minus
set = Set[1,2,3]
ret = set - [2,4,6]
assert_not_same(set, ret)
assert_equal(Set[1,3], ret)
end
def test_and
set = Set[1,2,3,4]
ret = set & [2,4,6]
assert_not_same(set, ret)
assert_equal(Set[2,4], ret)
end
def test_xor
set = Set[1,2,3,4]
ret = set ^ [2,4,5,5]
assert_not_same(set, ret)
assert_equal(Set[1,3,5], ret)
end
def test_eq
set1 = Set[2,3,1]
set2 = Set[1,2,3]
assert_equal(set1, set1)
assert_equal(set1, set2)
assert_not_equal(Set[1], [1])
set1 = Class.new(Set)["a", "b"]
set2 = Set["a", "b", set1]
set1 = set1.add(set1.clone)
# assert_equal(set1, set2)
# assert_equal(set2, set1)
assert_equal(set2, set2.clone)
assert_equal(set1.clone, set1)
assert_not_equal(Set[Exception.new,nil], Set[Exception.new,Exception.new], "[ruby-dev:26127]")
end
# def test_hash
# end
# def test_eql?
# end
def test_classify
set = Set.new(1..10)
ret = set.classify { |i| i % 3 }
assert_equal(3, ret.size)
assert_instance_of(Hash, ret)
ret.each_value { |value| assert_instance_of(Set, value) }
assert_equal(Set[3,6,9], ret[0])
assert_equal(Set[1,4,7,10], ret[1])
assert_equal(Set[2,5,8], ret[2])
end
def test_divide
set = Set.new(1..10)
ret = set.divide { |i| i % 3 }
assert_equal(3, ret.size)
n = 0
ret.each { |s| n += s.size }
assert_equal(set.size, n)
assert_equal(set, ret.flatten)
set = Set[7,10,5,11,1,3,4,9,0]
ret = set.divide { |a,b| (a - b).abs == 1 }
assert_equal(4, ret.size)
n = 0
ret.each { |s| n += s.size }
assert_equal(set.size, n)
assert_equal(set, ret.flatten)
ret.each { |s|
if s.include?(0)
assert_equal(Set[0,1], s)
elsif s.include?(3)
assert_equal(Set[3,4,5], s)
elsif s.include?(7)
assert_equal(Set[7], s)
elsif s.include?(9)
assert_equal(Set[9,10,11], s)
else
raise "unexpected group: #{s.inspect}"
end
}
end
def test_inspect
set1 = Set[1]
assert_equal('#<Set: {1}>', set1.inspect)
set2 = Set[Set[0], 1, 2, set1]
assert_equal(false, set2.inspect.include?('#<Set: {...}>'))
set1.add(set2)
assert_equal(true, set1.inspect.include?('#<Set: {...}>'))
end
# def test_pretty_print
# end
# def test_pretty_print_cycle
# end
end
class TC_SortedSet < Test::Unit::TestCase
def test_sortedset
s = SortedSet[4,5,3,1,2]
assert_equal([1,2,3,4,5], s.to_a)
prev = nil
s.each { |o| assert(prev < o) if prev; prev = o }
assert_not_nil(prev)
s.map! { |o| -2 * o }
assert_equal([-10,-8,-6,-4,-2], s.to_a)
prev = nil
ret = s.each { |o| assert(prev < o) if prev; prev = o }
assert_not_nil(prev)
assert_same(s, ret)
s = SortedSet.new([2,1,3]) { |o| o * -2 }
assert_equal([-6,-4,-2], s.to_a)
s = SortedSet.new(['one', 'two', 'three', 'four'])
a = []
ret = s.delete_if { |o| a << o; o[0] == ?t }
assert_same(s, ret)
assert_equal(['four', 'one'], s.to_a)
assert_equal(['four', 'one', 'three', 'two'], a)
s = SortedSet.new(['one', 'two', 'three', 'four'])
a = []
ret = s.reject! { |o| a << o; o[0] == ?t }
assert_same(s, ret)
assert_equal(['four', 'one'], s.to_a)
assert_equal(['four', 'one', 'three', 'two'], a)
s = SortedSet.new(['one', 'two', 'three', 'four'])
a = []
ret = s.reject! { |o| a << o; false }
assert_same(nil, ret)
assert_equal(['four', 'one', 'three', 'two'], s.to_a)
assert_equal(['four', 'one', 'three', 'two'], a)
end
end
class TC_Enumerable < Test::Unit::TestCase
def test_to_set
ary = [2,5,4,3,2,1,3]
set = ary.to_set
assert_instance_of(Set, set)
assert_equal([1,2,3,4,5], set.sort)
set = ary.to_set { |o| o * -2 }
assert_instance_of(Set, set)
assert_equal([-10,-8,-6,-4,-2], set.sort)
set = ary.to_set(SortedSet)
assert_instance_of(SortedSet, set)
assert_equal([1,2,3,4,5], set.to_a)
set = ary.to_set(SortedSet) { |o| o * -2 }
assert_instance_of(SortedSet, set)
assert_equal([-10,-8,-6,-4,-2], set.sort)
end
end
# class TC_RestricedSet < Test::Unit::TestCase
# def test_s_new
# assert_raises(ArgumentError) { RestricedSet.new }
#
# s = RestricedSet.new([-1,2,3]) { |o| o > 0 }
# assert_equal([2,3], s.sort)
# end
#
# def test_restriction_proc
# s = RestricedSet.new([-1,2,3]) { |o| o > 0 }
#
# f = s.restriction_proc
# assert_instance_of(Proc, f)
# assert(f[1])
# assert(!f[0])
# end
#
# def test_replace
# s = RestricedSet.new(-3..3) { |o| o > 0 }
# assert_equal([1,2,3], s.sort)
#
# s.replace([-2,0,3,4,5])
# assert_equal([3,4,5], s.sort)
# end
#
# def test_merge
# s = RestricedSet.new { |o| o > 0 }
# s.merge(-5..5)
# assert_equal([1,2,3,4,5], s.sort)
#
# s.merge([10,-10,-8,8])
# assert_equal([1,2,3,4,5,8,10], s.sort)
# end
# end
| google-code/android-scripting | jruby/src/lib/ruby/1.8/set.rb | Ruby | apache-2.0 | 27,421 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.app.catalog.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
import java.sql.Timestamp;
@Entity
@Table(name = "APPLICATION_INTERFACE")
public class ApplicationInterface implements Serializable {
@Id
@Column(name = "INTERFACE_ID")
private String interfaceID;
@Column(name = "APPLICATION_NAME")
private String appName;
@Column(name = "APPLICATION_DESCRIPTION")
private String appDescription;
@Column(name = "CREATION_TIME")
private Timestamp creationTime;
@Column(name = "GATEWAY_ID")
private String gatewayId;
@Column(name = "ARCHIVE_WORKING_DIRECTORY")
private boolean archiveWorkingDirectory;
@Column(name = "HAS_OPTIONAL_FILE_INPUTS")
private boolean hasOptionalFileInputs;
@Column(name = "UPDATE_TIME")
private Timestamp updateTime;
public String getGatewayId() {
return gatewayId;
}
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
public boolean isArchiveWorkingDirectory() {
return archiveWorkingDirectory;
}
public void setArchiveWorkingDirectory(boolean archiveWorkingDirectory) {
this.archiveWorkingDirectory = archiveWorkingDirectory;
}
public Timestamp getCreationTime() {
return creationTime;
}
public void setCreationTime(Timestamp creationTime) {
this.creationTime = creationTime;
}
public Timestamp getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Timestamp updateTime) {
this.updateTime = updateTime;
}
public String getInterfaceID() {
return interfaceID;
}
public void setInterfaceID(String interfaceID) {
this.interfaceID = interfaceID;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getAppDescription() {
return appDescription;
}
public void setAppDescription(String appDescription) {
this.appDescription = appDescription;
}
public boolean isHasOptionalFileInputs() {
return hasOptionalFileInputs;
}
public void setHasOptionalFileInputs(boolean hasOptionalFileInputs) {
this.hasOptionalFileInputs = hasOptionalFileInputs;
}
}
| machristie/airavata | modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/model/ApplicationInterface.java | Java | apache-2.0 | 3,284 |
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.util import ImmutableLazyDataList
from hazelcast.protocol.codec.map_message_type import *
REQUEST_TYPE = MAP_REMOVEENTRYLISTENER
RESPONSE_TYPE = 101
RETRYABLE = True
def calculate_size(name, registration_id):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += calculate_size_str(registration_id)
return data_size
def encode_request(name, registration_id):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name, registration_id))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.append_str(registration_id)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
parameters['response'] = client_message.read_bool()
return parameters
| cangencer/hazelcast-python-client | hazelcast/protocol/codec/map_remove_entry_listener_codec.py | Python | apache-2.0 | 1,199 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for checks."""
from grr.lib import flags
from grr.lib import test_lib
from grr.lib.checks import hints
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import config_file as rdf_config_file
from grr.lib.rdfvalues import protodict as rdf_protodict
class HintsTests(test_lib.GRRBaseTest):
"""Test hint operations."""
def testCheckOverlay(self):
"""Overlay(hint1, hint2) should populate hint2 with the values of hint1."""
# Fully populated hint.
full = {
"problem": "Terminator needs trousers.\n",
"fix": "Give me your clothes.\n",
"format": "{mission}, {target}\n",
"summary": "I'll be back."
}
# Partial hint
partial = {
"problem": "Terminator needs to go shopping.",
"fix": "Phased plasma rifle in the 40-watt range.",
"format": "",
"summary": ""
}
# Partial overlaid with full.
overlay = {
"problem": "Terminator needs to go shopping.",
"fix": "Phased plasma rifle in the 40-watt range.",
"format": "{mission}, {target}",
"summary": "I'll be back."
}
# Empty hint.
empty = {"problem": "", "fix": "", "format": "", "summary": ""}
# Empty hint should not clobber populated hint.
starts_full = full.copy()
starts_empty = empty.copy()
hints.Overlay(starts_full, starts_empty)
self.assertDictEqual(full, starts_full)
self.assertDictEqual(empty, starts_empty)
# Populate empty hint from partially populated hint.
starts_partial = partial.copy()
starts_empty = empty.copy()
hints.Overlay(starts_empty, starts_partial)
self.assertDictEqual(partial, starts_partial)
self.assertDictEqual(partial, starts_empty)
# Overlay the full and partial hints to get the hybrid.
starts_full = full.copy()
starts_partial = partial.copy()
hints.Overlay(starts_partial, starts_full)
self.assertDictEqual(full, starts_full)
self.assertDictEqual(overlay, starts_partial)
def testRdfFormatter(self):
"""Hints format RDF values with arbitrary values and attributes."""
# Create a complex RDF value
rdf = rdf_client.ClientSummary()
rdf.system_info.system = "Linux"
rdf.system_info.node = "coreai.skynet.com"
# Users (repeated)
rdf.users = [rdf_client.User(username=u) for u in ("root", "jconnor")]
# Interface (nested, repeated)
addresses = [
rdf_client.NetworkAddress(human_readable=a)
for a in ("1.1.1.1", "2.2.2.2", "3.3.3.3")
]
eth0 = rdf_client.Interface(ifname="eth0", addresses=addresses[:2])
ppp0 = rdf_client.Interface(ifname="ppp0", addresses=addresses[2])
rdf.interfaces = [eth0, ppp0]
template = ("{system_info.system} {users.username} {interfaces.ifname} "
"{interfaces.addresses.human_readable}\n")
hinter = hints.Hinter(template=template)
expected = "Linux root,jconnor eth0,ppp0 1.1.1.1,2.2.2.2,3.3.3.3"
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def testRdfFormatterHandlesKeyValuePair(self):
"""rdfvalue.KeyValue items need special handling to expand k and v."""
key = rdf_protodict.DataBlob().SetValue("skynet")
value = rdf_protodict.DataBlob().SetValue([1997])
rdf = rdf_protodict.KeyValue(k=key, v=value)
template = "{k}: {v}"
hinter = hints.Hinter(template=template)
expected = "skynet: 1997"
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def testRdfFormatterAttributedDict(self):
sshd = rdf_config_file.SshdConfig()
sshd.config = rdf_protodict.AttributedDict(skynet="operational")
template = "{config.skynet}"
hinter = hints.Hinter(template=template)
expected = "operational"
result = hinter.Render(sshd)
self.assertEqual(expected, result)
def testRdfFormatterFanOut(self):
rdf = rdf_protodict.Dict()
user1 = rdf_client.User(username="drexler")
user2 = rdf_client.User(username="joy")
rdf["cataclysm"] = "GreyGoo"
rdf["thinkers"] = [user1, user2]
rdf["reference"] = {
"ecophage": ["bots", ["nanobots", ["picobots"]]],
"doomsday": {
"books": ["cats cradle", "prey"]
}
}
template = ("{cataclysm}; {thinkers.username}; {reference.ecophage}; "
"{reference.doomsday}\n")
hinter = hints.Hinter(template=template)
expected = ("GreyGoo; drexler,joy; bots,nanobots,picobots; "
"books:cats cradle,prey")
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def testStatModeFormat(self):
rdf = rdf_client.StatEntry(st_mode=33204)
expected = "-rw-rw-r--"
template = "{st_mode}"
hinter = hints.Hinter(template=template)
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
| destijl/grr | grr/lib/checks/hints_test.py | Python | apache-2.0 | 4,932 |
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* Extension ID of Files.app.
* @type {string}
* @const
*/
var FILE_MANAGER_EXTENSIONS_ID = 'hhaomjibdihmijegdhdafkllkbggdgoj';
/**
* Calls a remote test util in Files.app's extension. See: test_util.js.
*
* @param {string} func Function name.
* @param {?string} appId Target window's App ID or null for functions
* not requiring a window.
* @param {Array.<*>} args Array of arguments.
* @param {function(*)} callback Callback handling the function's result.
*/
function callRemoteTestUtil(func, appId, args, callback) {
chrome.runtime.sendMessage(
FILE_MANAGER_EXTENSIONS_ID, {
func: func,
appId: appId,
args: args
},
callback);
}
chrome.test.runTests([
// Waits for the C++ code to send a string identifying a test, then runs that
// test.
function testRunner() {
var command = chrome.extension.inIncognitoContext ? 'which test guest' :
'which test non-guest';
chrome.test.sendMessage(command, function(testCaseName) {
// Run one of the test cases defined in the testcase namespace, in
// test_cases.js. The test case name is passed via StartTest call in
// file_manager_browsertest.cc.
if (testcase[testCaseName])
testcase[testCaseName]();
else
chrome.test.fail('Bogus test name passed to testRunner()');
});
}
]);
| plxaye/chromium | src/chrome/test/data/extensions/api_test/file_manager_browsertest/background.js | JavaScript | apache-2.0 | 1,527 |
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "stdafx.h"
#include "webdriver.h"
#include "finder.h"
#include "interactions.h"
#include "InternetExplorerDriver.h"
#include "logging.h"
#include "jsxpath.h"
#include "cookies.h"
#include "sizzle.h"
#include "utils.h"
#include "atoms.h"
#include "IEReturnTypes.h"
#include "windowHandling.h"
#include <stdio.h>
#include <iostream>
#include <string>
#include <vector>
#define END_TRY catch(std::wstring& m) \
{ \
if (m.find(L"TIME OUT") != std::wstring::npos) { return ETIMEOUT; } \
wcerr << m.c_str() << endl; \
LOG(WARN) << "Last error: " << GetLastError(); \
return EEXPECTEDERROR; \
} \
catch (...) \
{ \
safeIO::CoutA("CException caught in dll", true); \
return EUNHANDLEDERROR; }
struct WebDriver {
InternetExplorerDriver *ie;
long implicitWaitTimeout;
};
struct WebElement {
ElementWrapper *element;
};
struct ScriptArgs {
LONG currentIndex;
int maxLength;
SAFEARRAY* args;
};
struct ScriptResult {
CComVariant result;
};
struct StringWrapper {
wchar_t *text;
};
struct ElementCollection {
std::vector<ElementWrapper*>* elements;
};
struct StringCollection {
std::vector<std::wstring>* strings;
};
InternetExplorerDriver* openIeInstance = NULL;
clock_t endAt(WebDriver* driver) {
clock_t end = clock() + (driver->implicitWaitTimeout / 1000 * CLOCKS_PER_SEC);
if (driver->implicitWaitTimeout > 0 && driver->implicitWaitTimeout < 1000)
{
end += 1 * CLOCKS_PER_SEC;
}
return end;
}
int terminateIe()
{
std::vector<HWND> allWindows;
getTopLevelWindows(&allWindows);
// Wait until all open windows are gone. Common case, no worries
while (allWindows.size() > 0) {
allWindows.clear();
getTopLevelWindows(&allWindows);
for (vector<HWND>::iterator curr = allWindows.begin();
curr != allWindows.end();
curr++) {
SendMessage(*curr, WM_CLOSE, NULL, NULL);
}
// Pause to allow IE to process the message. If we don't do this and
// we're using IE 8, and "Restore previous session" is enabled (an
// increasingly common state) then a modal system dialog will be
// displayed to the user. Not what we want.
wait(500);
}
// If it's longer than this, we're on a very strange system
wchar_t taskkillPath[256];
if (!ExpandEnvironmentStrings(L"%SystemRoot%\\system32\\taskkill.exe", taskkillPath, 256))
{
cerr << "Unable to find taskkill application" << endl;
return EUNHANDLEDERROR;
}
std::wstring args = L" /f /im iexplore.exe";
STARTUPINFO startup_info;
memset(&startup_info, 0, sizeof(startup_info));
startup_info.cb = sizeof(startup_info);
PROCESS_INFORMATION process_info;
if (!CreateProcessW(taskkillPath, &args[0], NULL, NULL, false, DETACHED_PROCESS, NULL, NULL, &startup_info, &process_info))
{
cerr << "Could not execute taskkill. Bailing: " << GetLastError() << endl;
return EUNHANDLEDERROR;
}
WaitForSingleObject(process_info.hProcess, INFINITE);
CloseHandle(process_info.hThread);
CloseHandle(process_info.hProcess);
return SUCCESS;
}
extern "C"
{
// String manipulation functions
int wdStringLength(StringWrapper* string, int* length)
{
if (!string) {
cerr << "No string to get length of" << endl;
*length = -1;
return -1;
}
if (!string->text) {
cerr << "No underlying string to get length of" << endl;
*length = -1;
return -2;
}
size_t len = wcslen(string->text);
*length = (int) len + 1;
return SUCCESS;
}
int wdFreeString(StringWrapper* string)
{
if (!string) {
return ENOSTRING;
}
if (string->text) delete[] string->text;
delete string;
return SUCCESS;
}
int wdCopyString(StringWrapper* source, int size, wchar_t* dest)
{
if (!source) {
cerr << "No source wrapper" << endl;
return ENOSTRING;
}
if (!source->text) {
cerr << "No source text" << endl;
return ENOSTRING;
}
wcscpy_s(dest, size, source->text);
return SUCCESS;
}
// Collection manipulation functions
int wdcGetElementCollectionLength(ElementCollection* collection, int* length)
{
if (!collection || !collection->elements) return ENOCOLLECTION;
*length = (int) collection->elements->size();
return SUCCESS;
}
int wdcGetElementAtIndex(ElementCollection* collection, int index, WebElement** result)
{
*result = NULL;
if (!collection || !collection->elements) return ENOCOLLECTION;
std::vector<ElementWrapper*>::const_iterator cur = collection->elements->begin();
cur += index;
WebElement* element = new WebElement();
element->element = *cur;
*result = element;
return SUCCESS;
}
int wdcGetStringCollectionLength(StringCollection* collection, int* length)
{
if (!collection) return ENOCOLLECTION;
*length = (int) collection->strings->size();
return SUCCESS;
}
int wdcGetStringAtIndex(StringCollection* collection, int index, StringWrapper** result)
{
*result = NULL;
if (!collection) return ENOCOLLECTION;
std::vector<std::wstring>::const_iterator cur = collection->strings->begin();
cur += index;
StringWrapper* wrapper = new StringWrapper();
size_t size = (*cur).length() + 1;
wrapper->text = new wchar_t[size];
wcscpy_s(wrapper->text, size, (*cur).c_str());
*result = wrapper;
return SUCCESS;
}
// Element manipulation functions
int wdeFreeElement(WebElement* element)
{
if (!element)
return ENOSUCHDRIVER;
if (element->element) delete element->element;
delete element;
return SUCCESS;
}
int wdFreeElementCollection(ElementCollection* collection, int alsoFreeElements)
{
if (!collection || !collection->elements)
return ENOSUCHCOLLECTION;
if (alsoFreeElements) {
std::vector<ElementWrapper*>::const_iterator cur = collection->elements->begin();
std::vector<ElementWrapper*>::const_iterator end = collection->elements->end();
while (cur != end) {
delete *cur;
cur++;
}
}
delete collection->elements;
delete collection;
return SUCCESS;
}
int wdFreeStringCollection(StringCollection* collection)
{
if (!collection || !collection->strings)
return ENOSUCHCOLLECTION;
delete collection->strings;
delete collection;
return SUCCESS;
}
int wdFreeScriptArgs(ScriptArgs* scriptArgs)
{
if (!scriptArgs || !scriptArgs->args)
return ENOSUCHCOLLECTION;
SafeArrayDestroy(scriptArgs->args);
delete scriptArgs;
return SUCCESS;
}
int wdFreeScriptResult(ScriptResult* scriptResult)
{
if (!scriptResult)
return ENOCOLLECTION;
VariantClear(&scriptResult->result);
delete scriptResult;
return SUCCESS;
}
// Driver manipulation functions
int wdFreeDriver(WebDriver* driver)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
terminateIe();
} catch (...) {
// Fine. We're quitting anyway.
}
delete driver->ie;
delete driver;
driver = NULL;
// Let the IE COM instance fade away
wait(4000);
return SUCCESS;
}
int wdNewDriverInstance(WebDriver** result)
{
*result = NULL;
TRY
{
terminateIe();
/*
wchar_t iePath[256];
if (!ExpandEnvironmentStrings(L"%ProgramFiles%\\Internet Explorer\\iexplore.exe", iePath, 256))
{
cerr << "Unable to find IE" << endl;
return EUNHANDLEDERROR;
}
memset(&startup_info, 0, sizeof(startup_info));
startup_info.cb = sizeof(startup_info);
args = L"about:blank";
if (!CreateProcessW(iePath, &args[0], NULL, NULL, false, 0, NULL, NULL, &startup_info, &process_info))
{
cerr << "Could not execute IE. Bailing: " << GetLastError() << endl;
return EUNHANDLEDERROR;
}
*/
WebDriver *driver = new WebDriver();
driver->ie = new InternetExplorerDriver();
driver->ie->setVisible(true);
driver->implicitWaitTimeout = 0;
openIeInstance = driver->ie;
*result = driver;
return SUCCESS;
}
END_TRY
return ENOSUCHDRIVER;
}
int wdGet(WebDriver* driver, const wchar_t* url)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
driver->ie->get(url);
driver->ie->waitForNavigateToFinish();
return SUCCESS;
} END_TRY;
}
int wdGoBack(WebDriver* driver)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
driver->ie->goBack();
return SUCCESS;
} END_TRY;
}
int wdGoForward(WebDriver* driver)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
driver->ie->goForward();
return SUCCESS;
} END_TRY;
}
int wdRefresh(WebDriver* driver)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
StringWrapper* wrapper;
int result = wdGetCurrentUrl(driver, &wrapper);
if (result != SUCCESS) {
return result;
}
result = wdGet(driver, wrapper->text);
wdFreeString(wrapper);
return result;
}
int wdClose(WebDriver* driver)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
driver->ie->close();
return SUCCESS;
} END_TRY
}
int wdGetVisible(WebDriver* driver, int* value)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
*value = driver->ie->getVisible() ? 1 : 0;
return SUCCESS;
} END_TRY;
}
int wdSetVisible(WebDriver* driver, int value)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
driver->ie->setVisible(value != 0);
} END_TRY;
return SUCCESS;
}
int wdGetCurrentUrl(WebDriver* driver, StringWrapper** result)
{
*result = NULL;
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
const std::wstring originalString(driver->ie->getCurrentUrl());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdGetTitle(WebDriver* driver, StringWrapper** result)
{
*result = NULL;
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
const std::wstring originalString(driver->ie->getTitle());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdGetPageSource(WebDriver* driver, StringWrapper** result)
{
*result = NULL;
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
const std::wstring originalString(driver->ie->getPageSource());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdGetCookies(WebDriver* driver, StringWrapper** result)
{
*result = NULL;
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
const std::wstring originalString(driver->ie->getCookies());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdAddCookie(WebDriver* driver, const wchar_t* cookie)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
return driver->ie->addCookie(cookie);
} END_TRY;
}
int wdDeleteCookie(WebDriver* driver, const wchar_t* cookieName)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
// Inject the XPath engine
std::wstring script;
for (int i = 0; DELETECOOKIES[i]; i++) {
script += DELETECOOKIES[i];
}
ScriptArgs* args;
int result = wdNewScriptArgs(&args, 1);
if (result != SUCCESS) {
return result;
}
wdAddStringScriptArg(args, cookieName);
ScriptResult* scriptResult = NULL;
result = wdExecuteScript(driver, script.c_str(), args, &scriptResult);
wdFreeScriptArgs(args);
if (scriptResult) delete scriptResult;
return result;
}
int wdSwitchToActiveElement(WebDriver* driver, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
ElementWrapper* element = driver->ie->getActiveElement();
if (!element)
return ENOSUCHELEMENT;
WebElement* toReturn = new WebElement();
toReturn->element = element;
*result = toReturn;
return SUCCESS;
} END_TRY;
}
int wdSwitchToWindow(WebDriver* driver, const wchar_t* name)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
int result;
// It's entirely possible the window to switch to isn't here yet.
// TODO(simon): Make this configurable
for (int i = 0; i < 8; i++) {
result = driver->ie->switchToWindow(name);
if (result == SUCCESS) { break; }
wait(500);
}
return result;
} END_TRY;
}
int wdSwitchToFrame(WebDriver* driver, const wchar_t* path)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
// TODO(simon): Make this configurable
for (int i = 0; i < 8; i++) {
bool result = driver->ie->switchToFrame(path);
if (result) { return SUCCESS; }
wait(500);
}
return ENOSUCHFRAME;
} END_TRY;
}
int wdWaitForLoadToComplete(WebDriver* driver)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
driver->ie->waitForNavigateToFinish();
return SUCCESS;
} END_TRY;
}
int wdGetCurrentWindowHandle(WebDriver* driver, StringWrapper** handle)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
const std::wstring originalString(driver->ie->getHandle());
// TODO(simon): Check that the handle is in the map of known driver instances
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*handle = res;
return SUCCESS;
} END_TRY;
}
int wdGetAllWindowHandles(WebDriver* driver, StringCollection** handles)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
*handles = NULL;
try {
std::vector<std::wstring> rawHandles = driver->ie->getAllHandles();
StringCollection* collection = new StringCollection();
collection->strings = new std::vector<std::wstring>();
for (std::vector<std::wstring>::iterator curr = rawHandles.begin();
curr != rawHandles.end();
curr++) {
collection->strings->push_back(std::wstring(*curr));
}
*handles = collection;
return SUCCESS;
} END_TRY;
}
int verifyFresh(WebElement* element)
{
if (!element || !element->element) { return ENOSUCHELEMENT; }
try {
if (!element->element->isFresh())
{
return EOBSOLETEELEMENT;
}
} END_TRY;
return SUCCESS;
}
int wdeClick(WebElement* element)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
res = element->element->click();
return res;
} END_TRY;
}
int wdeGetAttribute(WebDriver* driver, WebElement* element, const wchar_t* name, StringWrapper** result)
{
*result = NULL;
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
std::wstring script(L"(function() { return function(){ ");
// Read in all the scripts
for (int j = 0; GET_ATTRIBUTE[j]; j++) {
script += GET_ATTRIBUTE[j];
script += L"\n";
}
// Now for the magic
script += L"var element = arguments[0];\n";
script += L"var attributeName = arguments[1];\n";
script += L"return getAttribute(element, attributeName);\n";
// Close things
script += L"};})();";
ScriptArgs* args;
res = wdNewScriptArgs(&args, 2);
if (res != SUCCESS) {
return res;
}
wdAddElementScriptArg(args, element);
wdAddStringScriptArg(args, name);
WebDriver* driver = new WebDriver();
driver->ie = element->element->getParent();
ScriptResult* scriptResult = NULL;
res = wdExecuteScript(driver, script.c_str(), args, &scriptResult);
wdFreeScriptArgs(args);
driver->ie = NULL;
delete driver;
if (res != SUCCESS)
{
wdFreeScriptResult(scriptResult);
return res;
}
int type;
wdGetScriptResultType(driver, scriptResult, &type);
if (type != TYPE_EMPTY && scriptResult->result.vt != VT_NULL) {
const std::wstring originalString(comvariant2cw(scriptResult->result));
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
*result = new StringWrapper();
(*result)->text = toReturn;
}
wdFreeScriptResult(scriptResult);
return SUCCESS;
} END_TRY;
}
int wdeGetValueOfCssProperty(WebElement* element, const wchar_t* name, StringWrapper** result)
{
*result = NULL;
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
const std::wstring originalString(element->element->getValueOfCssProperty(name));
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdeGetText(WebElement* element, StringWrapper** result)
{
*result = NULL;
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
const std::wstring originalString(element->element->getText());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdeGetTagName(WebElement* element, StringWrapper** result)
{
*result = NULL;
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
const std::wstring originalString(element->element->getTagName());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdeIsSelected(WebElement* element, int* result)
{
*result = 0;
try {
StringWrapper* wrapper;
WebDriver* driver = new WebDriver();
driver->ie = element->element->getParent();
int res = wdeGetAttribute(driver, element, L"selected", &wrapper);
driver->ie = NULL;
delete driver;
if (res != SUCCESS)
{
return res;
}
*result = wrapper && wrapper->text && wcscmp(L"true", wrapper->text) == 0 ? 1 : 0;
wdFreeString(wrapper);
return SUCCESS;
} END_TRY;
}
int wdeSetSelected(WebElement* element)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
return element->element->setSelected();
} END_TRY;
}
int wdeToggle(WebElement* element, int* result)
{
*result = 0;
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
int res = element->element->toggle();
if (res == SUCCESS) {
return wdeIsSelected(element, result);
}
return res;
} END_TRY;
}
int wdeIsEnabled(WebElement* element, int* result)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
*result = element->element->isEnabled() ? 1 : 0;
return SUCCESS;
} END_TRY;
}
int wdeIsDisplayed(WebElement* element, int* result)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
*result = element->element->isDisplayed() ? 1 : 0;
return SUCCESS;
} END_TRY;
}
int wdeSendKeys(WebElement* element, const wchar_t* text)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
return element->element->sendKeys(text);
} END_TRY;
}
int wdeSendKeyPress(WebElement* element, const wchar_t* text)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
return element->element->sendKeyPress(text);
} END_TRY;
}
int wdeSendKeyRelease(WebElement* element, const wchar_t* text)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
return element->element->sendKeyRelease(text);
} END_TRY;
}
int wdeClear(WebElement* element)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
element->element->clear();
return SUCCESS;
} END_TRY;
}
int wdeSubmit(WebElement* element)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
element->element->submit();
return SUCCESS;
} END_TRY;
}
int wdeGetDetailsOnceScrolledOnToScreen(WebElement* element, HWND* hwnd, long* x, long* y, long* width, long* height)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
element->element->getLocationWhenScrolledIntoView(hwnd, x, y, width, height);
return SUCCESS;
} END_TRY;
}
int wdeGetLocation(WebElement* element, long* x, long* y)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
element->element->getLocation(x, y);
return SUCCESS;
} END_TRY;
}
int wdeGetSize(WebElement* element, long* width, long* height)
{
int res = verifyFresh(element); if (res != SUCCESS) { return res; }
try {
int result = element->element->getWidth(width);
if (result != SUCCESS) {
return result;
}
result = element->element->getHeight(height);
return result;
} END_TRY;
}
int wdFindElementById(WebDriver* driver, WebElement* element, const wchar_t* id, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
try {
clock_t end = endAt(driver);
int res = ENOSUCHELEMENT;
do {
ElementWrapper* wrapper;
res = ie->selectElementById(elem, id, &wrapper);
if (res != SUCCESS) {
continue;
}
WebElement* toReturn = new WebElement();
toReturn->element = wrapper;
*result = toReturn;
return SUCCESS;
} while (clock() < end);
return res;
} END_TRY;
}
int wdFindElementsById(WebDriver* driver, WebElement* element, const wchar_t* id, ElementCollection** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
clock_t end = endAt(driver);
ElementCollection* collection = new ElementCollection();
*result = collection;
do {
collection->elements = driver->ie->selectElementsById(elem, id);
if (collection->elements->size() > 0) {
return SUCCESS;
}
} while (clock() < end);
return SUCCESS;
} END_TRY;
}
int wdFindElementByName(WebDriver* driver, WebElement* element, const wchar_t* name, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
CComPtr<IHTMLDOMNode> res;
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
try {
clock_t end = endAt(driver);
int res = ENOSUCHELEMENT;
do {
ElementWrapper* wrapper;
int res = ie->selectElementByName(elem, name, &wrapper);
if (res != SUCCESS) {
continue;
}
WebElement* toReturn = new WebElement();
toReturn->element = wrapper;
*result = toReturn;
return SUCCESS;
} while (clock() < end);
return res;
} END_TRY;
}
int wdFindElementsByName(WebDriver* driver, WebElement* element, const wchar_t* name, ElementCollection** result)
{
*result = NULL;
try {
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
ElementCollection* collection = new ElementCollection();
*result = collection;
clock_t end = endAt(driver);
do {
collection->elements = driver->ie->selectElementsByName(elem, name);
if (collection->elements->size() > 0) {
return SUCCESS;
}
} while (clock() < end);
return SUCCESS;
} END_TRY;
}
int wdFindElementByClassName(WebDriver* driver, WebElement* element, const wchar_t* className, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
try {
clock_t end = endAt(driver);
int res = ENOSUCHELEMENT;
do {
ElementWrapper* wrapper;
int res = ie->selectElementByClassName(elem, className, &wrapper);
if (res != SUCCESS) {
continue;
}
WebElement* toReturn = new WebElement();
toReturn->element = wrapper;
*result = toReturn;
return SUCCESS;
} while (clock() < end);
return res;
} END_TRY;
}
int wdFindElementsByClassName(WebDriver* driver, WebElement* element, const wchar_t* className, ElementCollection** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
clock_t end = endAt(driver);
ElementCollection* collection = new ElementCollection();
*result = collection;
do {
collection->elements = driver->ie->selectElementsByClassName(elem, className);
if (collection->elements->size() > 0) {
return SUCCESS;
}
} while (clock() < end);
return SUCCESS;
} END_TRY;
}
int wdFindElementByCss(WebDriver* driver, WebElement* element, const wchar_t* selector, WebElement** out)
{
*out = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
clock_t end = endAt(driver);
int result = ENOSUCHELEMENT;
do {
std::wstring script(L"(function() { return function(){");
for (int i = 0; SIZZLE[i]; i++) {
script += SIZZLE[i];
script += L"\n";
}
script += L"var root = arguments[1] ? arguments[1] : document.documentElement;";
script += L"if (root['querySelector']) { return root.querySelector(arguments[0]); } ";
script += L"var results = []; Sizzle(arguments[0], root, results);";
script += L"return results.length > 0 ? results[0] : null;";
script += L"};})();";
// Call it
ScriptArgs* args;
result = wdNewScriptArgs(&args, 2);
if (result != SUCCESS) {
wdFreeScriptArgs(args);
continue;
}
result = wdAddStringScriptArg(args, selector);
if (result != SUCCESS) {
wdFreeScriptArgs(args);
continue;
}
if (element) {
result = wdAddElementScriptArg(args, element);
}
if (result != SUCCESS) {
wdFreeScriptArgs(args);
continue;
}
ScriptResult* queryResult;
result = wdExecuteScript(driver, script.c_str(), args, &queryResult);
wdFreeScriptArgs(args);
// And be done
if (result == SUCCESS) {
int type = 0;
result = wdGetScriptResultType(driver, queryResult, &type);
if (type != TYPE_EMPTY) {
result = wdGetElementScriptResult(queryResult, driver, out);
} else {
result = ENOSUCHELEMENT;
wdFreeScriptResult(queryResult);
continue;
}
}
wdFreeScriptResult(queryResult);
return result;
} while (clock() < end);
return result;
} END_TRY;
}
int wdFindElementsByCss(WebDriver* driver, WebElement* element, const wchar_t* selector, ElementCollection** out)
{
*out = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
clock_t end = endAt(driver);
int result = EUNHANDLEDERROR;
do {
// Call it
std::wstring script(L"(function() { return function(){");
for (int i = 0; SIZZLE[i]; i++) {
script += SIZZLE[i];
script += L"\n";
}
script += L"var root = arguments[1] ? arguments[1] : document.documentElement;";
script += L"if (root['querySelectorAll']) { return root.querySelectorAll(arguments[0]); } ";
script += L"var results = []; Sizzle(arguments[0], root, results);";
script += L"return results;";
script += L"};})();";
// Call it
ScriptArgs* args;
result = wdNewScriptArgs(&args, 2);
if (result != SUCCESS) {
wdFreeScriptArgs(args);
continue;
}
result = wdAddStringScriptArg(args, selector);
if (result != SUCCESS) {
wdFreeScriptArgs(args);
continue;
}
result = wdAddElementScriptArg(args, element);
if (result != SUCCESS) {
wdFreeScriptArgs(args);
continue;
}
ScriptResult* queryResult;
result = wdExecuteScript(driver, script.c_str(), args, &queryResult);
wdFreeScriptArgs(args);
// And be done
if (result != SUCCESS) {
wdFreeScriptResult(queryResult);
return result;
}
ElementCollection* elements = new ElementCollection();
elements->elements = new std::vector<ElementWrapper*>();
int length;
result = wdGetArrayLengthScriptResult(driver, queryResult, &length);
if (result != SUCCESS) {
wdFreeScriptResult(queryResult);
return result;
}
for (long i = 0; i < length; i++) {
ScriptResult* getElemRes;
wdGetArrayItemFromScriptResult(driver, queryResult, i, &getElemRes);
WebElement* e;
wdGetElementScriptResult(getElemRes, driver, &e);
elements->elements->push_back(e->element);
e->element = NULL;
delete e;
}
wdFreeScriptResult(queryResult);
*out = elements;
return SUCCESS;
} while (clock() < end);
return result;
} END_TRY;
}
int wdFindElementByLinkText(WebDriver* driver, WebElement* element, const wchar_t* linkText, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
try {
clock_t end = endAt(driver);
int res = ENOSUCHELEMENT;
do {
ElementWrapper* wrapper;
int res = ie->selectElementByLink(elem, linkText, &wrapper);
if (res != SUCCESS) {
continue;
}
WebElement* toReturn = new WebElement();
toReturn->element = wrapper;
*result = toReturn;
return SUCCESS;
} while (clock() < end);
return res;
} END_TRY;
}
int wdFindElementsByLinkText(WebDriver* driver, WebElement* element, const wchar_t* linkText, ElementCollection** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
ElementCollection* collection = new ElementCollection();
*result = collection;
clock_t end = endAt(driver);
do {
collection->elements = driver->ie->selectElementsByLink(elem, linkText);
if (collection->elements->size() > 0) {
return SUCCESS;
}
} while (clock() < end);
return SUCCESS;
} END_TRY;
}
int wdFindElementByPartialLinkText(WebDriver* driver, WebElement* element, const wchar_t* linkText, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
try {
clock_t end = endAt(driver);
int res = ENOSUCHELEMENT;
do {
ElementWrapper* wrapper;
int res = ie->selectElementByPartialLink(elem, linkText, &wrapper);
if (res != SUCCESS) {
continue;
}
WebElement* toReturn = new WebElement();
toReturn->element = wrapper;
*result = toReturn;
return SUCCESS;
} while (clock() < end);
return res;
} END_TRY;
}
int wdFindElementsByPartialLinkText(WebDriver* driver, WebElement* element, const wchar_t* linkText, ElementCollection** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
ElementCollection* collection = new ElementCollection();
*result = collection;
clock_t end = endAt(driver);
do {
collection->elements = driver->ie->selectElementsByPartialLink(elem, linkText);
if (collection->elements->size() > 0) {
return SUCCESS;
}
} while (clock() < end);
return SUCCESS;
} END_TRY;
}
int wdFindElementByTagName(WebDriver* driver, WebElement* element, const wchar_t* name, WebElement** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
try {
clock_t end = endAt(driver);
int res = ENOSUCHELEMENT;
do {
ElementWrapper* wrapper;
int res = ie->selectElementByTagName(elem, name, &wrapper);
if (res != SUCCESS) {
continue;
}
WebElement* toReturn = new WebElement();
toReturn->element = wrapper;
*result = toReturn;
return SUCCESS;
} while (clock() < end);
return res;
} END_TRY;
}
int wdFindElementsByTagName(WebDriver* driver, WebElement* element, const wchar_t* name, ElementCollection** result)
{
*result = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
InternetExplorerDriver* ie = driver->ie;
CComPtr<IHTMLElement> elem;
if (element && element->element) {
elem = element->element->getWrappedElement();
}
ElementCollection* collection = new ElementCollection();
*result = collection;
clock_t end = endAt(driver);
do {
collection->elements = driver->ie->selectElementsByTagName(elem, name);
if (collection->elements->size() > 0) {
return SUCCESS;
}
} while (clock() < end);
return SUCCESS;
} END_TRY;
}
int injectXPathEngine(WebDriver* driver)
{
// Inject the XPath engine
std::wstring script;
for (int i = 0; XPATHJS[i]; i++) {
script += XPATHJS[i];
}
ScriptArgs* args;
int result = wdNewScriptArgs(&args, 0);
if (result != SUCCESS) {
return result;
}
ScriptResult* scriptResult = NULL;
result = wdExecuteScript(driver, script.c_str(), args, &scriptResult);
wdFreeScriptArgs(args);
if (scriptResult) delete scriptResult;
return result;
}
int wdFindElementByXPath(WebDriver* driver, WebElement* element, const wchar_t* xpath, WebElement** out)
{
*out = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
clock_t end = endAt(driver);
int result = ENOSUCHELEMENT;
do {
result = injectXPathEngine(driver);
// TODO(simon): Why does the injecting sometimes fail?
/*
if (result != SUCCESS) {
return result;
}
*/
// Call it
std::wstring query;
if (element) {
query += L"(function() { return function(){var res = document.__webdriver_evaluate(arguments[0], arguments[1], null, 7, null); return res.snapshotItem(0) ;};})();";
} else {
query += L"(function() { return function(){var res = document.__webdriver_evaluate(arguments[0], document, null, 7, null); return res.snapshotLength != 0 ? res.snapshotItem(0) : undefined ;};})();";
}
ScriptArgs* queryArgs;
result = wdNewScriptArgs(&queryArgs, 2);
if (result != SUCCESS) {
wdFreeScriptArgs(queryArgs);
continue;
}
result = wdAddStringScriptArg(queryArgs, xpath);
if (result != SUCCESS) {
wdFreeScriptArgs(queryArgs);
continue;
}
if (element) {
result = wdAddElementScriptArg(queryArgs, element);
}
if (result != SUCCESS) {
wdFreeScriptArgs(queryArgs);
continue;
}
ScriptResult* queryResult;
result = wdExecuteScript(driver, query.c_str(), queryArgs, &queryResult);
wdFreeScriptArgs(queryArgs);
// And be done
if (result == SUCCESS) {
int type = 0;
result = wdGetScriptResultType(driver, queryResult, &type);
if (type != TYPE_EMPTY) {
result = wdGetElementScriptResult(queryResult, driver, out);
} else {
result = ENOSUCHELEMENT;
wdFreeScriptResult(queryResult);
continue;
}
}
wdFreeScriptResult(queryResult);
return result;
} while (clock() < end);
return result;
} END_TRY;
}
int wdFindElementsByXPath(WebDriver* driver, WebElement* element, const wchar_t* xpath, ElementCollection** out)
{
*out = NULL;
if (!driver || !driver->ie) { return ENOSUCHDRIVER; }
try {
clock_t end = endAt(driver);
int result = EUNHANDLEDERROR;
do {
result = injectXPathEngine(driver);
if (result != SUCCESS) {
continue;
}
// Call it
std::wstring query;
if (element)
query += L"(function() { return function() {var res = document.__webdriver_evaluate(arguments[0], arguments[1], null, 7, null); return res;};})();";
else
query += L"(function() { return function() {var res = document.__webdriver_evaluate(arguments[0], document, null, 7, null); return res;};})();";
// We need to use the raw functions because we don't allow random objects
// to be returned from the executeScript method normally
SAFEARRAYBOUND bounds;
bounds.cElements = 2;
bounds.lLbound = 0;
SAFEARRAY* queryArgs = SafeArrayCreate(VT_VARIANT, 1, &bounds);
CComVariant queryArg(xpath);
LONG index = 0;
SafeArrayPutElement(queryArgs, &index, &queryArg);
if (element) {
CComVariant elementArg(element->element->getWrappedElement());
LONG index = 1;
SafeArrayPutElement(queryArgs, &index, &elementArg);
}
CComVariant snapshot;
result = driver->ie->executeScript(query.c_str(), queryArgs, &snapshot);
SafeArrayDestroy(queryArgs);
if (result != SUCCESS) {
continue;
}
bounds.cElements = 1;
SAFEARRAY* lengthArgs = SafeArrayCreate(VT_VARIANT, 1, &bounds);
index = 0;
SafeArrayPutElement(lengthArgs, &index, &snapshot);
CComVariant lengthVar;
result = driver->ie->executeScript(L"(function(){return function() {return arguments[0].snapshotLength;}})();", lengthArgs, &lengthVar);
SafeArrayDestroy(lengthArgs);
if (result != SUCCESS) {
continue;
}
if (lengthVar.vt != VT_I4) {
result = EUNEXPECTEDJSERROR;
continue;
}
long length = lengthVar.lVal;
bounds.cElements = 2;
SAFEARRAY* snapshotArgs = SafeArrayCreate(VT_VARIANT, 1, &bounds);
index = 0;
SafeArrayPutElement(snapshotArgs, &index, &snapshot);
ElementCollection* elements = new ElementCollection();
elements->elements = new std::vector<ElementWrapper*>();
index = 1;
for (long i = 0; i < length; i++) {
ScriptArgs* getElemArgs;
wdNewScriptArgs(&getElemArgs, 2);
// Cheat
index = 0;
SafeArrayPutElement(getElemArgs->args, &index, &snapshot);
getElemArgs->currentIndex++;
wdAddNumberScriptArg(getElemArgs, i);
ScriptResult* getElemRes;
wdExecuteScript(driver, L"(function(){return function() {return arguments[0].iterateNext();}})();", getElemArgs, &getElemRes);
WebElement* e;
wdGetElementScriptResult(getElemRes, driver, &e);
elements->elements->push_back(e->element);
wdFreeScriptArgs(getElemArgs);
}
SafeArrayDestroy(queryArgs);
*out = elements;
return SUCCESS;
} while (clock() < end);
return result;
} END_TRY;
}
int wdNewScriptArgs(ScriptArgs** scriptArgs, int maxLength)
{
*scriptArgs = NULL;
ScriptArgs* args = new ScriptArgs();
args->currentIndex = 0;
args->maxLength = maxLength;
SAFEARRAYBOUND bounds;
bounds.cElements = maxLength;
bounds.lLbound = 0;
args->args = SafeArrayCreate(VT_VARIANT, 1, &bounds);
*scriptArgs = args;
return SUCCESS;
}
int wdAddStringScriptArg(ScriptArgs* scriptArgs, const wchar_t* arg)
{
std::wstring value(arg);
CComVariant dest(arg);
LONG index = scriptArgs->currentIndex;
SafeArrayPutElement(scriptArgs->args, &index, &dest);
scriptArgs->currentIndex++;
return SUCCESS;
}
int wdAddBooleanScriptArg(ScriptArgs* scriptArgs, int trueOrFalse)
{
VARIANT dest;
dest.vt = VT_BOOL;
dest.boolVal = trueOrFalse == 1;
LONG index = scriptArgs->currentIndex;
SafeArrayPutElement(scriptArgs->args, &index, &dest);
scriptArgs->currentIndex++;
return SUCCESS;
}
int wdAddNumberScriptArg(ScriptArgs* scriptArgs, long number)
{
VARIANT dest;
dest.vt = VT_I4;
dest.lVal = (LONG) number;
LONG index = scriptArgs->currentIndex;
SafeArrayPutElement(scriptArgs->args, &index, &dest);
scriptArgs->currentIndex++;
return SUCCESS;
}
int wdAddDoubleScriptArg(ScriptArgs* scriptArgs, double number)
{
VARIANT dest;
dest.vt = VT_R8;
dest.dblVal = (DOUBLE) number;
LONG index = scriptArgs->currentIndex;
SafeArrayPutElement(scriptArgs->args, &index, &dest);
scriptArgs->currentIndex++;
return SUCCESS;
}
int wdAddElementScriptArg(ScriptArgs* scriptArgs, WebElement* element)
{
VARIANT dest;
VariantClear(&dest);
if (!element || !element->element) {
dest.vt = VT_EMPTY;
} else {
dest.vt = VT_DISPATCH;
dest.pdispVal = element->element->getWrappedElement();
}
LONG index = scriptArgs->currentIndex;
SafeArrayPutElement(scriptArgs->args, &index, &dest);
scriptArgs->currentIndex++;
return SUCCESS;
}
int wdExecuteScript(WebDriver* driver, const wchar_t* script, ScriptArgs* scriptArgs, ScriptResult** scriptResultRef)
{
try {
*scriptResultRef = NULL;
CComVariant result;
int res = driver->ie->executeScript(script, scriptArgs->args, &result);
if (res != SUCCESS) {
return res;
}
ScriptResult* toReturn = new ScriptResult();
HRESULT hr = VariantCopy(&(toReturn->result), &result);
if (!SUCCEEDED(hr) && result.vt == VT_USERDEFINED) {
// Special handling of the user defined path *sigh*
toReturn->result.vt = VT_USERDEFINED;
toReturn->result.bstrVal = CComBSTR(result.bstrVal);
}
*scriptResultRef = toReturn;
return SUCCESS;
} END_TRY;
}
int wdGetScriptResultType(WebDriver* driver, ScriptResult* result, int* type)
{
if (!result) { return ENOSCRIPTRESULT; }
switch (result->result.vt) {
case VT_BSTR:
*type = TYPE_STRING;
break;
case VT_I4:
case VT_I8:
*type = TYPE_LONG;
break;
case VT_BOOL:
*type = TYPE_BOOLEAN;
break;
case VT_DISPATCH:
{
LPCWSTR itemType = driver->ie->getScriptResultType(&(result->result));
std::string itemTypeStr;
cw2string(itemType, itemTypeStr);
LOG(DEBUG) << "Got type: " << itemTypeStr;
// If it's a Javascript array or an HTML Collection - type 8 will
// indicate the driver that this is ultimately an array.
if ((itemTypeStr == "JavascriptArray") ||
(itemTypeStr == "HtmlCollection")) {
*type = TYPE_ARRAY;
} else {
*type = TYPE_ELEMENT;
}
}
break;
case VT_EMPTY:
*type = TYPE_EMPTY;
break;
case VT_USERDEFINED:
*type = TYPE_EXCEPTION;
break;
case VT_R4:
case VT_R8:
*type = TYPE_DOUBLE;
break;
default:
return EUNKNOWNSCRIPTRESULT;
}
return SUCCESS;
}
int wdGetStringScriptResult(ScriptResult* result, StringWrapper** wrapper)
{
*wrapper = NULL;
if (!result) { return ENOSCRIPTRESULT; }
StringWrapper* toReturn = new StringWrapper();
BSTR val = result->result.bstrVal;
if (!val) {
toReturn->text = new wchar_t[1];
wcscpy_s(toReturn->text, 1, L"");
} else {
UINT length = SysStringLen(val);
toReturn->text = new wchar_t[length + 1];
wcscpy_s(toReturn->text, length + 1, val);
}
*wrapper = toReturn;
return SUCCESS;
}
int wdGetNumberScriptResult(ScriptResult* result, long* value)
{
if (!result) { return ENOSCRIPTRESULT; }
*value = result->result.lVal;
return SUCCESS;
}
int wdGetDoubleScriptResult(ScriptResult* result, double* value)
{
if (!result) { return ENOSCRIPTRESULT; }
*value = result->result.dblVal;
return SUCCESS;
}
int wdGetBooleanScriptResult(ScriptResult* result, int* value)
{
if (!result) { return ENOSCRIPTRESULT; }
*value = result->result.boolVal == VARIANT_TRUE ? 1 : 0;
return SUCCESS;
}
int wdGetElementScriptResult(ScriptResult* result, WebDriver* driver, WebElement** element)
{
*element = NULL;
if (!result) { return ENOSCRIPTRESULT; }
IHTMLElement *node = (IHTMLElement*) result->result.pdispVal;
WebElement* toReturn = new WebElement();
toReturn->element = new ElementWrapper(driver->ie, node);
*element = toReturn;
return SUCCESS;
}
int wdGetArrayLengthScriptResult(WebDriver* driver, ScriptResult* result,
int* length)
{
// Prepare an array for the Javascript execution, containing only one
// element - the original returned array from a JS execution.
SAFEARRAYBOUND lengthQuery;
lengthQuery.cElements = 1;
lengthQuery.lLbound = 0;
SAFEARRAY* lengthArgs = SafeArrayCreate(VT_VARIANT, 1, &lengthQuery);
LONG index = 0;
SafeArrayPutElement(lengthArgs, &index, &(result->result));
CComVariant lengthVar;
int lengthResult = driver->ie->executeScript(
L"(function(){return function() {return arguments[0].length;}})();",
lengthArgs, &lengthVar);
SafeArrayDestroy(lengthArgs);
if (lengthResult != SUCCESS) {
return lengthResult;
}
// Expect the return type to be an integer. A non-integer means this was
// not an array after all.
if (lengthVar.vt != VT_I4) {
return EUNEXPECTEDJSERROR;
}
*length = lengthVar.lVal;
return SUCCESS;
}
int wdGetArrayItemFromScriptResult(WebDriver* driver, ScriptResult* result,
int index, ScriptResult** arrayItem)
{
// Prepare an array for Javascript execution. The array contains the original
// array returned from a previous execution and the index of the item required
// from that array.
ScriptArgs* getItemArgs;
wdNewScriptArgs(&getItemArgs, 2);
LONG argIndex = 0;
// Original array.
SafeArrayPutElement(getItemArgs->args, &argIndex, &(result->result));
getItemArgs->currentIndex++;
// Item index
wdAddNumberScriptArg(getItemArgs, index);
int execRes = wdExecuteScript(
driver,
L"(function(){return function() {return arguments[0][arguments[1]];}})();",
getItemArgs, arrayItem);
wdFreeScriptArgs(getItemArgs);
getItemArgs = NULL;
return execRes;
}
int wdeMouseDownAt(HWND hwnd, long windowX, long windowY)
{
mouseDownAt(hwnd, windowX, windowY, MOUSEBUTTON_LFET);
return SUCCESS;
}
int wdeMouseUpAt(HWND hwnd, long windowX, long windowY)
{
mouseUpAt(hwnd, windowX, windowY, MOUSEBUTTON_LFET);
return SUCCESS;
}
int wdeMouseMoveTo(HWND hwnd, long duration, long fromX, long fromY, long toX, long toY)
{
mouseMoveTo(hwnd, duration, fromX, fromY, toX, toY);
return SUCCESS;
}
int wdCaptureScreenshotAsBase64(WebDriver* driver, StringWrapper** result) {
*result = NULL;
if (!driver || !driver->ie) return ENOSUCHDRIVER;
try {
const std::wstring originalString(driver->ie->captureScreenshotAsBase64());
size_t length = originalString.length() + 1;
wchar_t* toReturn = new wchar_t[length];
wcscpy_s(toReturn, length, originalString.c_str());
StringWrapper* res = new StringWrapper();
res->text = toReturn;
*result = res;
return SUCCESS;
} END_TRY;
}
int wdSetImplicitWaitTimeout(WebDriver* driver, long timeoutInMillis)
{
if (!driver || !driver->ie) return ENOSUCHDRIVER;
driver->implicitWaitTimeout = timeoutInMillis;
return SUCCESS;
}
}
| zerodiv/CTM-Windows-Agent | Continuum_Windows_Testing_Agent/Vendor/jobbie/src/cpp/InternetExplorerDriver/webdriver.cpp | C++ | apache-2.0 | 47,123 |
/**
* Copyright 2014 Nortal AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nortal.petit.orm.statement;
import java.util.List;
import org.springframework.util.CollectionUtils;
/**
* @author Lauri Lรคttemรคe (lauri.lattemae@nortal.com)
* @created 29.04.2013
*/
public abstract class ExecutableStatement<B> extends SimpleStatement<B> {
/**
* Returns statements sql with parameter values
*
* @return
*/
@Override
public String getSqlWithParams() {
prepare();
StringBuffer sb = new StringBuffer();
if (!CollectionUtils.isEmpty(getBeans())) {
for (B bean : getBeans()) {
prepare(bean);
sb.append(super.getSqlWithParams()).append("\n");
}
} else {
sb.append(super.getSqlWithParams()).append("\n");
}
return sb.toString();
}
protected abstract List<B> getBeans();
protected abstract void prepare(B bean);
public abstract void exec();
}
| jimmytheneutrino/petit | modules/orm/src/main/java/com/nortal/petit/orm/statement/ExecutableStatement.java | Java | apache-2.0 | 1,555 |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.ControlFlowGraph.Branch;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.jscomp.graph.GraphReachability;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphNode;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Removes dead code from a parse tree. The kinds of dead code that this pass
* removes are:
* - Any code following a return statement, such as the <code>alert</code>
* call in: <code>if (x) { return; alert('unreachable'); }</code>.
* - Statements that have no side effects, such as:
* <code>a.b.MyClass.prototype.propertyName;</code> or <code>true;</code>.
* That first kind of statement sometimes appears intentionally, so that
* prototype properties can be annotated using JSDoc without actually
* being initialized.
*
*/
class UnreachableCodeElimination extends AbstractPostOrderCallback
implements CompilerPass, ScopedCallback {
private static final Logger logger =
Logger.getLogger(UnreachableCodeElimination.class.getName());
private final AbstractCompiler compiler;
private final boolean removeNoOpStatements;
Deque<ControlFlowGraph<Node>> cfgStack =
new LinkedList<ControlFlowGraph<Node>>();
ControlFlowGraph<Node> curCfg = null;
UnreachableCodeElimination(AbstractCompiler compiler,
boolean removeNoOpStatements) {
this.compiler = compiler;
this.removeNoOpStatements = removeNoOpStatements;
}
@Override
public void enterScope(NodeTraversal t) {
Scope scope = t.getScope();
// Computes the control flow graph.
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false);
cfa.process(null, scope.getRootNode());
cfgStack.push(curCfg);
curCfg = cfa.getCfg();
new GraphReachability<Node, ControlFlowGraph.Branch>(curCfg)
.compute(curCfg.getEntry().getValue());
}
@Override
public void exitScope(NodeTraversal t) {
curCfg = cfgStack.pop();
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverse(compiler, root, this);
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (parent == null) {
return;
}
if (n.getType() == Token.FUNCTION || n.getType() == Token.SCRIPT) {
return;
}
// Removes TRYs that had its CATCH removed and/or empty FINALLY.
// TODO(dcc): Move the parts of this that don't require a control flow
// graph to PeepholeRemoveDeadCode
if (n.getType() == Token.TRY) {
Node body = n.getFirstChild();
Node catchOrFinallyBlock = body.getNext();
Node finallyBlock = catchOrFinallyBlock.getNext();
if (!catchOrFinallyBlock.hasChildren() &&
(finallyBlock == null || !finallyBlock.hasChildren())) {
n.removeChild(body);
parent.replaceChild(n, body);
compiler.reportCodeChange();
n = body;
}
}
DiGraphNode<Node, Branch> gNode = curCfg.getDirectedGraphNode(n);
if (gNode == null) { // Not in CFG.
return;
}
if (gNode.getAnnotation() != GraphReachability.REACHABLE ||
(removeNoOpStatements && !NodeUtil.mayHaveSideEffects(n))) {
removeDeadExprStatementSafely(n);
return;
}
tryRemoveUnconditionalBranching(n);
}
/**
* Tries to remove n if an unconditional branch node (break, continue or
* return) if the target of n is the same as the the follow of n. That is, if
* we remove n, the control flow remains the same. Also if n targets to
* another unconditional branch, this function will recursively try to remove
* the target branch as well. The reason why we want to cascade this removal
* is because we only run this pass once. If we have code such as
*
* break -> break -> break
*
* where all 3 break's are useless. The order of removal matters. When we
* first look at the first break, we see that it branches to the 2nd break.
* However, if we remove the last break, the 2nd break becomes useless and
* finally the first break becomes useless as well.
*
* @return The target of this jump. If the target is also useless jump,
* the target of that useless jump recursively.
*/
@SuppressWarnings("fallthrough")
private Node tryRemoveUnconditionalBranching(Node n) {
/*
* For each of the unconditional branching control flow node, check to see
* if the ControlFlowAnalysis.computeFollowNode of that node is same as
* the branching target. If it is, the branch node is safe to be removed.
*
* This is not as clever as MinimizeExitPoints because it doesn't do any
* if-else conversion but it handles more complicated switch statements
* much nicer.
*/
// If n is null the target is the end of the function, nothing to do.
if (n == null) {
return n;
}
DiGraphNode<Node, Branch> gNode = curCfg.getDirectedGraphNode(n);
if (gNode == null) {
return n;
}
// If the parent is null, this mean whatever node it was there is now
// useless and it has been removed by other logics in this pass. That node
// while no longer exists in the AST, is still in the CFG because we
// never update the graph as nodes are removed.
if (n.getParent() == null) {
List<DiGraphEdge<Node,Branch>> outEdges = gNode.getOutEdges();
if (outEdges.size() == 1) {
return tryRemoveUnconditionalBranching(
outEdges.get(0).getDestination().getValue());
}
}
switch (n.getType()) {
case Token.BLOCK:
if (n.hasChildren()) {
Node first = n.getFirstChild();
return tryRemoveUnconditionalBranching(first);
} else {
return tryRemoveUnconditionalBranching(
ControlFlowAnalysis.computeFollowNode(n));
}
case Token.RETURN:
if (n.hasChildren()) {
break;
}
case Token.BREAK:
case Token.CONTINUE:
// We are looking for a control flow changing statement that always
// branches to the same node. If removing it the control flow still
// branches to that same node. It is safe to remove it.
List<DiGraphEdge<Node,Branch>> outEdges = gNode.getOutEdges();
if (outEdges.size() == 1 &&
// If there is a next node, there is no chance this jump is useless.
(n.getNext() == null || n.getNext().getType() == Token.FUNCTION)) {
Preconditions.checkState(outEdges.get(0).getValue() == Branch.UNCOND);
Node fallThrough = tryRemoveUnconditionalBranching(
ControlFlowAnalysis.computeFollowNode(n));
Node nextCfgNode = outEdges.get(0).getDestination().getValue();
if (nextCfgNode == fallThrough) {
removeDeadExprStatementSafely(n);
return fallThrough;
}
}
}
return n;
}
private void removeDeadExprStatementSafely(Node n) {
if (n.getType() == Token.EMPTY ||
(n.getType() == Token.BLOCK && !n.hasChildren())) {
// Not always trivial to remove, let FoldContants work its magic later.
return;
}
// Removing an unreachable DO node is messy because it means we still have
// to execute one iteration. If the DO's body has breaks in the middle, it
// can get even more trickier and code size might actually increase.
switch (n.getType()) {
case Token.DO:
case Token.TRY:
case Token.CATCH:
case Token.FINALLY:
return;
}
NodeUtil.redeclareVarsInsideBranch(n);
compiler.reportCodeChange();
if (logger.isLoggable(Level.FINE)) {
logger.fine("Removing " + n.toString());
}
NodeUtil.removeChild(n.getParent(), n);
}
}
| antz29/closure-compiler | src/com/google/javascript/jscomp/UnreachableCodeElimination.java | Java | apache-2.0 | 8,731 |
package theinternet.pages;
import com.frameworkium.core.ui.annotations.Visible;
import com.frameworkium.core.ui.pages.BasePage;
import com.frameworkium.core.ui.pages.PageFactory;
import io.qameta.allure.Step;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import ru.yandex.qatools.htmlelements.annotations.Name;
import ru.yandex.qatools.htmlelements.element.FileInput;
public class FileUploadPage extends BasePage<FileUploadPage> {
@Visible
@Name("Choose Files button")
@FindBy(css = "input#file-upload")
private FileInput chooseFileInput;
@Visible
@Name("Upload button")
@FindBy(css = "input#file-submit")
private WebElement uploadButton;
@Step("Upload a file by choosing file and then clicking upload")
public FileUploadSuccessPage uploadFile(String filePath) {
chooseFileInput.setFileToUpload(filePath);
uploadButton.click();
return PageFactory.newInstance(FileUploadSuccessPage.class);
}
}
| Frameworkium/frameworkium | src/test/java/theinternet/pages/FileUploadPage.java | Java | apache-2.0 | 1,003 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "OpenGLRenderer"
#include "jni.h"
#include "GraphicsJNI.h"
#include <nativehelper/JNIHelp.h>
#include "core_jni_helpers.h"
#include <android_runtime/android_graphics_SurfaceTexture.h>
#include <gui/GLConsumer.h>
#include <Paint.h>
#include <SkBitmap.h>
#include <SkCanvas.h>
#include <SkMatrix.h>
#include <SkXfermode.h>
#include <DeferredLayerUpdater.h>
#include <LayerRenderer.h>
#include <SkiaShader.h>
#include <Rect.h>
#include <RenderNode.h>
namespace android {
using namespace uirenderer;
static jboolean android_view_HardwareLayer_prepare(JNIEnv* env, jobject clazz,
jlong layerUpdaterPtr, jint width, jint height, jboolean isOpaque) {
DeferredLayerUpdater* layer = reinterpret_cast<DeferredLayerUpdater*>(layerUpdaterPtr);
bool changed = false;
changed |= layer->setSize(width, height);
changed |= layer->setBlend(!isOpaque);
return changed;
}
static void android_view_HardwareLayer_setLayerPaint(JNIEnv* env, jobject clazz,
jlong layerUpdaterPtr, jlong paintPtr) {
DeferredLayerUpdater* layer = reinterpret_cast<DeferredLayerUpdater*>(layerUpdaterPtr);
if (layer) {
Paint* paint = reinterpret_cast<Paint*>(paintPtr);
layer->setPaint(paint);
}
}
static void android_view_HardwareLayer_setTransform(JNIEnv* env, jobject clazz,
jlong layerUpdaterPtr, jlong matrixPtr) {
DeferredLayerUpdater* layer = reinterpret_cast<DeferredLayerUpdater*>(layerUpdaterPtr);
SkMatrix* matrix = reinterpret_cast<SkMatrix*>(matrixPtr);
layer->setTransform(matrix);
}
static void android_view_HardwareLayer_setSurfaceTexture(JNIEnv* env, jobject clazz,
jlong layerUpdaterPtr, jobject surface, jboolean isAlreadyAttached) {
DeferredLayerUpdater* layer = reinterpret_cast<DeferredLayerUpdater*>(layerUpdaterPtr);
sp<GLConsumer> surfaceTexture(SurfaceTexture_getSurfaceTexture(env, surface));
layer->setSurfaceTexture(surfaceTexture, !isAlreadyAttached);
}
static void android_view_HardwareLayer_updateSurfaceTexture(JNIEnv* env, jobject clazz,
jlong layerUpdaterPtr) {
DeferredLayerUpdater* layer = reinterpret_cast<DeferredLayerUpdater*>(layerUpdaterPtr);
layer->updateTexImage();
}
static jint android_view_HardwareLayer_getTexName(JNIEnv* env, jobject clazz,
jlong layerUpdaterPtr) {
DeferredLayerUpdater* layer = reinterpret_cast<DeferredLayerUpdater*>(layerUpdaterPtr);
return layer->backingLayer()->getTextureId();
}
// ----------------------------------------------------------------------------
// JNI Glue
// ----------------------------------------------------------------------------
const char* const kClassPathName = "android/view/HardwareLayer";
static JNINativeMethod gMethods[] = {
{ "nPrepare", "(JIIZ)Z", (void*) android_view_HardwareLayer_prepare },
{ "nSetLayerPaint", "(JJ)V", (void*) android_view_HardwareLayer_setLayerPaint },
{ "nSetTransform", "(JJ)V", (void*) android_view_HardwareLayer_setTransform },
{ "nSetSurfaceTexture", "(JLandroid/graphics/SurfaceTexture;Z)V",
(void*) android_view_HardwareLayer_setSurfaceTexture },
{ "nUpdateSurfaceTexture", "(J)V", (void*) android_view_HardwareLayer_updateSurfaceTexture },
{ "nGetTexName", "(J)I", (void*) android_view_HardwareLayer_getTexName },
};
int register_android_view_HardwareLayer(JNIEnv* env) {
return RegisterMethodsOrDie(env, kClassPathName, gMethods, NELEM(gMethods));
}
};
| Ant-Droid/android_frameworks_base_OLD | core/jni/android_view_HardwareLayer.cpp | C++ | apache-2.0 | 4,154 |
import { Type } from 'angular2/src/facade/lang';
import { CanActivate } from './lifecycle_annotations_impl';
import { reflector } from 'angular2/src/core/reflection/reflection';
export function hasLifecycleHook(e, type) {
if (!(type instanceof Type))
return false;
return e.name in type.prototype;
}
export function getCanActivateHook(type) {
var annotations = reflector.annotations(type);
for (let i = 0; i < annotations.length; i += 1) {
let annotation = annotations[i];
if (annotation instanceof CanActivate) {
return annotation.fn;
}
}
return null;
}
//# sourceMappingURL=route_lifecycle_reflector.js.map
| aayushkapoor206/whatshot | node_modules/angular2/es6/prod/src/router/route_lifecycle_reflector.js | JavaScript | apache-2.0 | 677 |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import java.util.concurrent.atomic.AtomicReference;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.annotations.Nullable;
import io.reactivex.disposables.*;
import io.reactivex.exceptions.Exceptions;
import io.reactivex.functions.Function;
import io.reactivex.internal.disposables.DisposableHelper;
import io.reactivex.internal.functions.ObjectHelper;
import io.reactivex.internal.subscriptions.*;
import io.reactivex.internal.util.AtomicThrowable;
import io.reactivex.plugins.RxJavaPlugins;
/**
* Maps a sequence of values into CompletableSources and awaits their termination.
* @param <T> the value type
*/
public final class FlowableFlatMapCompletable<T> extends AbstractFlowableWithUpstream<T, T> {
final Function<? super T, ? extends CompletableSource> mapper;
final int maxConcurrency;
final boolean delayErrors;
public FlowableFlatMapCompletable(Flowable<T> source,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
super(source);
this.mapper = mapper;
this.delayErrors = delayErrors;
this.maxConcurrency = maxConcurrency;
}
@Override
protected void subscribeActual(Subscriber<? super T> subscriber) {
source.subscribe(new FlatMapCompletableMainSubscriber<T>(subscriber, mapper, delayErrors, maxConcurrency));
}
static final class FlatMapCompletableMainSubscriber<T> extends BasicIntQueueSubscription<T>
implements FlowableSubscriber<T> {
private static final long serialVersionUID = 8443155186132538303L;
final Subscriber<? super T> downstream;
final AtomicThrowable errors;
final Function<? super T, ? extends CompletableSource> mapper;
final boolean delayErrors;
final CompositeDisposable set;
final int maxConcurrency;
Subscription upstream;
volatile boolean cancelled;
FlatMapCompletableMainSubscriber(Subscriber<? super T> subscriber,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
this.downstream = subscriber;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.errors = new AtomicThrowable();
this.set = new CompositeDisposable();
this.maxConcurrency = maxConcurrency;
this.lazySet(1);
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
int m = maxConcurrency;
if (m == Integer.MAX_VALUE) {
s.request(Long.MAX_VALUE);
} else {
s.request(m);
}
}
}
@Override
public void onNext(T value) {
CompletableSource cs;
try {
cs = ObjectHelper.requireNonNull(mapper.apply(value), "The mapper returned a null CompletableSource");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
upstream.cancel();
onError(ex);
return;
}
getAndIncrement();
InnerConsumer inner = new InnerConsumer();
if (!cancelled && set.add(inner)) {
cs.subscribe(inner);
}
}
@Override
public void onError(Throwable e) {
if (errors.addThrowable(e)) {
if (delayErrors) {
if (decrementAndGet() == 0) {
Throwable ex = errors.terminate();
downstream.onError(ex);
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
} else {
cancel();
if (getAndSet(0) > 0) {
Throwable ex = errors.terminate();
downstream.onError(ex);
}
}
} else {
RxJavaPlugins.onError(e);
}
}
@Override
public void onComplete() {
if (decrementAndGet() == 0) {
Throwable ex = errors.terminate();
if (ex != null) {
downstream.onError(ex);
} else {
downstream.onComplete();
}
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
}
@Override
public void cancel() {
cancelled = true;
upstream.cancel();
set.dispose();
}
@Override
public void request(long n) {
// ignored, no values emitted
}
@Nullable
@Override
public T poll() throws Exception {
return null; // always empty
}
@Override
public boolean isEmpty() {
return true; // always empty
}
@Override
public void clear() {
// nothing to clear
}
@Override
public int requestFusion(int mode) {
return mode & ASYNC;
}
void innerComplete(InnerConsumer inner) {
set.delete(inner);
onComplete();
}
void innerError(InnerConsumer inner, Throwable e) {
set.delete(inner);
onError(e);
}
final class InnerConsumer extends AtomicReference<Disposable> implements CompletableObserver, Disposable {
private static final long serialVersionUID = 8606673141535671828L;
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(this, d);
}
@Override
public void onComplete() {
innerComplete(this);
}
@Override
public void onError(Throwable e) {
innerError(this, e);
}
@Override
public void dispose() {
DisposableHelper.dispose(this);
}
@Override
public boolean isDisposed() {
return DisposableHelper.isDisposed(get());
}
}
}
}
| NiteshKant/RxJava | src/main/java/io/reactivex/internal/operators/flowable/FlowableFlatMapCompletable.java | Java | apache-2.0 | 7,266 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.core.compiler;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import org.kie.dmn.api.core.DMNType;
import org.kie.dmn.api.core.ast.BusinessKnowledgeModelNode;
import org.kie.dmn.api.core.ast.DMNNode;
import org.kie.dmn.api.core.ast.DecisionNode;
import org.kie.dmn.api.core.ast.DecisionServiceNode;
import org.kie.dmn.api.core.ast.InputDataNode;
import org.kie.dmn.core.api.DMNExpressionEvaluator;
import org.kie.dmn.core.ast.DecisionNodeImpl;
import org.kie.dmn.core.impl.CompositeTypeImpl;
import org.kie.dmn.core.impl.DMNModelImpl;
import org.kie.dmn.core.util.Msg;
import org.kie.dmn.model.api.DRGElement;
import org.kie.dmn.model.api.Decision;
public class DecisionCompiler implements DRGElementCompiler {
@Override
public boolean accept(DRGElement de) {
return de instanceof Decision;
}
@Override
public void compileNode(DRGElement de, DMNCompilerImpl compiler, DMNModelImpl model) {
Decision decision = (Decision) de;
DecisionNodeImpl dn = new DecisionNodeImpl( decision );
DMNType type = null;
if ( decision.getVariable() == null ) {
DMNCompilerHelper.reportMissingVariable( model, de, decision, Msg.MISSING_VARIABLE_FOR_DECISION );
return;
}
DMNCompilerHelper.checkVariableName( model, decision, decision.getName() );
if ( decision.getVariable() != null && decision.getVariable().getTypeRef() != null ) {
type = compiler.resolveTypeRef(model, decision, decision.getVariable(), decision.getVariable().getTypeRef());
} else {
type = compiler.resolveTypeRef(model, decision, decision, null);
}
dn.setResultType( type );
model.addDecision( dn );
}
@Override
public boolean accept(DMNNode node) {
return node instanceof DecisionNodeImpl;
}
@Override
public void compileEvaluator(DMNNode node, DMNCompilerImpl compiler, DMNCompilerContext ctx, DMNModelImpl model) {
DecisionNodeImpl di = (DecisionNodeImpl) node;
compiler.linkRequirements( model, di );
ctx.enterFrame();
try {
Map<String, DMNType> importedTypes = new HashMap<>();
for( DMNNode dep : di.getDependencies().values() ) {
if( dep instanceof DecisionNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((DecisionNode) dep).getResultType());
} else {
// then the Decision dependency is an imported Decision.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionNode) dep).getResultType());
}
}
} else if( dep instanceof InputDataNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((InputDataNode) dep).getType());
} else {
// then the InputData dependency is an imported InputData.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((InputDataNode) dep).getType());
}
}
} else if( dep instanceof BusinessKnowledgeModelNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
}
}
} else if (dep instanceof DecisionServiceNode) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((DecisionServiceNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionServiceNode) dep).getResultType());
}
}
}
}
for (Entry<String, DMNType> importedType : importedTypes.entrySet()) {
ctx.setVariable(importedType.getKey(), importedType.getValue());
}
DMNExpressionEvaluator evaluator = compiler.getEvaluatorCompiler().compileExpression( ctx, model, di, di.getName(), di.getDecision().getExpression() );
di.setEvaluator( evaluator );
} finally {
ctx.exitFrame();
}
}
} | romartin/drools | kie-dmn/kie-dmn-core/src/main/java/org/kie/dmn/core/compiler/DecisionCompiler.java | Java | apache-2.0 | 6,980 |
var msg = require('./locale');
var api = require('./apiJavascript.js');
var paramLists = require('./paramLists.js');
module.exports.blocks = [
{func: 'setDroid', parent: api, category: '', params: ['"R2-D2"'], dropdown: { 0: ['"random"', '"R2-D2"', '"C-3PO"'] } },
{func: 'setDroidSpeed', parent: api, category: '', params: ['"fast"'], dropdown: { 0: ['"random"', '"slow"', '"normal"', '"fast"'] } },
{func: 'setBackground', parent: api, category: '', params: ['"Hoth"'], dropdown: { 0: ['"random"', '"Endor"', '"Hoth"', '"Starship"'] } },
{func: 'setMap', parent: api, category: '', params: ['"blank"'], dropdown: { 0: ['"random"', '"blank"', '"circle"', '"horizontal"', '"grid"', '"blobs"'] } },
{func: 'moveRight', parent: api, category: '', },
{func: 'moveLeft', parent: api, category: '', },
{func: 'moveUp', parent: api, category: '', },
{func: 'moveDown', parent: api, category: '', },
{func: 'goRight', parent: api, category: '', },
{func: 'goLeft', parent: api, category: '', },
{func: 'goUp', parent: api, category: '', },
{func: 'goDown', parent: api, category: '', },
{func: 'playSound', parent: api, category: '', params: ['"R2-D2sound1"'], dropdown: { 0: paramLists.playSoundDropdown } },
{func: 'endGame', parent: api, category: '', params: ['"win"'], dropdown: { 0: ['"win"', '"lose"'] } },
{func: 'addPoints', parent: api, category: '', params: ["100"] },
{func: 'removePoints', parent: api, category: '', params: ["100"] },
{func: 'addCharacter', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'moveFast', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'moveNormal', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'moveSlow', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'whenLeft', block: 'function whenLeft() {}', expansion: 'function whenLeft() {\n __;\n}', category: '' },
{func: 'whenRight', block: 'function whenRight() {}', expansion: 'function whenRight() {\n __;\n}', category: '' },
{func: 'whenUp', block: 'function whenUp() {}', expansion: 'function whenUp() {\n __;\n}', category: '' },
{func: 'whenDown', block: 'function whenDown() {}', expansion: 'function whenDown() {\n __;\n}', category: '' },
{func: 'whenTouchObstacle', block: 'function whenTouchObstacle() {}', expansion: 'function whenTouchObstacle() {\n __;\n}', category: '' },
{func: 'whenGetCharacter', block: 'function whenGetCharacter() {}', expansion: 'function whenGetCharacter() {\n __;\n}', category: '' },
{func: 'whenGetStormtrooper', block: 'function whenGetStormtrooper() {}', expansion: 'function whenGetStormtrooper() {\n __;\n}', category: '' },
{func: 'whenGetRebelPilot', block: 'function whenGetRebelPilot() {}', expansion: 'function whenGetRebelPilot() {\n __;\n}', category: '' },
{func: 'whenGetPufferPig', block: 'function whenGetPufferPig() {}', expansion: 'function whenGetPufferPig() {\n __;\n}', category: '' },
{func: 'whenGetMynock', block: 'function whenGetMynock() {}', expansion: 'function whenGetMynock() {\n __;\n}', category: '' },
{func: 'whenGetMouseDroid', block: 'function whenGetMouseDroid() {}', expansion: 'function whenGetMouseDroid() {\n __;\n}', category: '' },
{func: 'whenGetTauntaun', block: 'function whenGetTauntaun() {}', expansion: 'function whenGetTauntaun() {\n __;\n}', category: '' },
{func: 'whenGetProbot', block: 'function whenGetProbot() {}', expansion: 'function whenGetProbot() {\n __;\n}', category: '' },
{func: 'whenGetAllCharacters', block: 'function whenGetAllCharacters() {}', expansion: 'function whenGetAllCharacters() {\n __;\n}', category: '' },
{func: 'whenGetAllStormtroopers', block: 'function whenGetAllStormtroopers() {}', expansion: 'function whenGetAllStormtroopers() {\n __;\n}', category: '' },
{func: 'whenGetAllRebelPilots', block: 'function whenGetAllRebelPilots() {}', expansion: 'function whenGetAllRebelPilots() {\n __;\n}', category: '' },
{func: 'whenGetAllPufferPigs', block: 'function whenGetAllPufferPigs() {}', expansion: 'function whenGetAllPufferPigs() {\n __;\n}', category: '' },
{func: 'whenGetAllMynocks', block: 'function whenGetAllMynocks() {}', expansion: 'function whenGetAllMynocks() {\n __;\n}', category: '' },
{func: 'whenGetAllMouseDroids', block: 'function whenGetAllMouseDroids() {}', expansion: 'function whenGetAllMouseDroids() {\n __;\n}', category: '' },
{func: 'whenGetAllTauntauns', block: 'function whenGetAllTauntauns() {}', expansion: 'function whenGetAllTauntauns() {\n __;\n}', category: '' },
{func: 'whenGetAllProbots', block: 'function whenGetAllProbots() {}', expansion: 'function whenGetAllProbots() {\n __;\n}', category: '' },
// Functions hidden from autocomplete - not used in hoc2015:
{func: 'whenTouchStormtrooper', block: 'function whenTouchStormtrooper() {}', expansion: 'function whenTouchStormtrooper() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchRebelPilot', block: 'function whenTouchRebelPilot() {}', expansion: 'function whenTouchRebelPilot() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchPufferPig', block: 'function whenTouchPufferPig() {}', expansion: 'function whenTouchPufferPig() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchMynock', block: 'function whenTouchMynock() {}', expansion: 'function whenTouchMynock() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchMouseDroid', block: 'function whenTouchMouseDroid() {}', expansion: 'function whenTouchMouseDroid() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchTauntaun', block: 'function whenTouchTauntaun() {}', expansion: 'function whenTouchTauntaun() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchProbot', block: 'function whenTouchProbot() {}', expansion: 'function whenTouchProbot() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchCharacter', block: 'function whenTouchCharacter() {}', expansion: 'function whenTouchCharacter() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'changeScore', parent: api, category: '', params: ["1"], noAutocomplete: true },
{func: 'whenTouchGoal', block: 'function whenTouchGoal() {}', expansion: 'function whenTouchGoal() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchAllGoals', block: 'function whenTouchAllGoals() {}', expansion: 'function whenTouchAllGoals() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenScore1000', block: 'function whenScore1000() {}', expansion: 'function whenScore1000() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'setToChase', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setToFlee', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setToRoam', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setToStop', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setSprite', parent: api, category: '', params: ['0', '"R2-D2"'], dropdown: { 1: ['"random"', '"R2-D2"', '"C-3PO"'] }, noAutocomplete: true },
{func: 'setSpritePosition', parent: api, category: '', params: ["0", "7"], noAutocomplete: true },
{func: 'setSpriteSpeed', parent: api, category: '', params: ["0", "8"], noAutocomplete: true },
{func: 'setSpriteEmotion', parent: api, category: '', params: ["0", "1"], noAutocomplete: true },
{func: 'setSpriteSize', parent: api, category: '', params: ["0", "1.0"], noAutocomplete: true },
{func: 'throwProjectile', parent: api, category: '', params: ["0", "1", '"blue_fireball"'], noAutocomplete: true },
{func: 'vanish', parent: api, category: '', params: ["0"], noAutocomplete: true },
{func: 'move', parent: api, category: '', params: ["0", "1"], noAutocomplete: true },
{func: 'showDebugInfo', parent: api, category: '', params: ["false"], noAutocomplete: true },
{func: 'onEvent', parent: api, category: '', params: ["'when-left'", "function() {\n \n}"], noAutocomplete: true },
];
module.exports.categories = {
'': {
color: 'red',
blocks: []
},
'Play Lab': {
color: 'red',
blocks: []
},
Commands: {
color: 'red',
blocks: []
},
Events: {
color: 'green',
blocks: []
},
};
module.exports.autocompleteFunctionsWithParens = true;
module.exports.showParamDropdowns = true;
| pickettd/code-dot-org | apps/src/studio/dropletConfig.js | JavaScript | apache-2.0 | 9,539 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\Classroom;
class CloudPubsubTopic extends \Google\Model
{
/**
* @var string
*/
public $topicName;
/**
* @param string
*/
public function setTopicName($topicName)
{
$this->topicName = $topicName;
}
/**
* @return string
*/
public function getTopicName()
{
return $this->topicName;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(CloudPubsubTopic::class, 'Google_Service_Classroom_CloudPubsubTopic');
| googleapis/google-api-php-client-services | src/Classroom/CloudPubsubTopic.php | PHP | apache-2.0 | 1,120 |
/**
* libjass
*
* https://github.com/Arnavion/libjass
*
* Copyright 2013 Arnav Singh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
define(["intern!tdd", "require", "tests/support/test-page"], function (tdd, require, TestPage) {
tdd.suite("Outlines", function () {
tdd.test("Basic", function () {
var testPage = new TestPage(this.remote, require.toUrl("tests/support/browser-test-page.html"), "/tests/functional/outlines/outlines.ass", 1280, 720);
return testPage
.prepare()
.then(function (testPage) { return testPage.seekAndCompareScreenshot(0.5, require.toUrl("./outlines-1.png")); })
.then(function (testPage) { return testPage.seekAndCompareScreenshot(1.5, require.toUrl("./outlines-2.png")); })
.then(function (testPage) { return testPage.seekAndCompareScreenshot(2.5, require.toUrl("./outlines-3.png")); })
.then(function (testPage) { return testPage.seekAndCompareScreenshot(3.5, require.toUrl("./outlines-4.png")); })
.then(function (testPage) { return testPage.done(); });
});
});
});
| joshuabrown-ellation/libjass | tests/functional/outlines/outlines.js | JavaScript | apache-2.0 | 1,551 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\ToolResults;
class NonSdkApiUsageViolation extends \Google\Collection
{
protected $collection_key = 'apiSignatures';
/**
* @var string[]
*/
public $apiSignatures;
/**
* @var int
*/
public $uniqueApis;
/**
* @param string[]
*/
public function setApiSignatures($apiSignatures)
{
$this->apiSignatures = $apiSignatures;
}
/**
* @return string[]
*/
public function getApiSignatures()
{
return $this->apiSignatures;
}
/**
* @param int
*/
public function setUniqueApis($uniqueApis)
{
$this->uniqueApis = $uniqueApis;
}
/**
* @return int
*/
public function getUniqueApis()
{
return $this->uniqueApis;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(NonSdkApiUsageViolation::class, 'Google_Service_ToolResults_NonSdkApiUsageViolation');
| googleapis/google-api-php-client-services | src/ToolResults/NonSdkApiUsageViolation.php | PHP | apache-2.0 | 1,498 |
/*
* Copyright (c) 2001-2007, TIBCO Software Inc.
* Use, modification, and distribution subject to terms of license.
*/
jsx3.require("jsx3.chart.Axis");jsx3.Class.defineClass("jsx3.chart.CategoryAxis",jsx3.chart.Axis,null,function(c,p){var
ub={d:"h6",a:"aligned",c:"av",f:"gn",b:"between",e:"tickAlignment"};c.TICKS_ALIGNED=ub.a;c.TICKS_BETWEEN=ub.b;c.MAX_TICKS=200;c.BG={aligned:1,between:1};p.init=function(i,r,q){this.jsxsuper(i,r,q);this.tickAlignment=ub.b;this.categoryField=null;this.paddingLow=null;this.paddingHigh=null;this.Ho(ub.c,0);this.Ho(ub.d,0);};p.getTickAlignment=function(){return this.tickAlignment;};p.setTickAlignment=function(l){if(c.BG[l]){this.tickAlignment=l;}else throw new
jsx3.IllegalArgumentException(ub.e,l);};p.getCategoryField=function(){return this.categoryField;};p.setCategoryField=function(m){this.categoryField=m;};p.getPaddingLow=function(){return this.paddingLow!=null?this.paddingLow:0;};p.setPaddingLow=function(h){this.paddingLow=h;};p.getPaddingHigh=function(){return this.paddingHigh!=null?this.paddingHigh:0;};p.setPaddingHigh=function(r){this.paddingHigh=r;};p.fl=function(){this.Ll(ub.f);var
da=this.getChart();if(da==null){this.Ho(ub.c,0);this.Ho(ub.d,0);}else{var
tb=da.pe(this,true);var
ib=da.bh();this.Ho(ub.d,tb.length);this.Ho(ub.c,ib!=null?ib.length:0);}};p.Hf=function(){var
nb=this.pj(ub.f);if(nb!=null)return nb;var
B=this.pj(ub.c);nb=[];if(B<1)return nb;var
ga=this.getPaddingLow();var
Va=this.getPaddingHigh();var
x=this.tickAlignment==ub.b?B+1:B;var
La=x-1;var
fb=La+ga+Va;var
pb=this.length/fb;var
C=ga*pb;for(var
Qa=0;Qa<x&&Qa<c.MAX_TICKS;Qa++)nb.push(Math.round(C+Qa*pb));this.Ho(ub.f,nb);return nb;};p.se=function(){var
B=this.pj(ub.c);if(this.tickAlignment==ub.b){var
Ab=this.Hf();var
Xa=[];for(var
va=0;va<B;va++)Xa[va]=Math.round((Ab[va]+Ab[va+1])/2);return Xa;}else return this.Hf();};p.Xj=function(b){var
Pa=b;var
z=this.getChart();if(this.categoryField&&z!=null){var
ab=z.bh();if(ab!=null){var
ga=ab[b];if(ga!=null)Pa=ga.getAttribute([this.categoryField]);}}return Pa;};p.mo=function(){return false;};p.getRangeForCategory=function(j){var
_=this.Hf();if(this.tickAlignment==ub.b){if(j<0||j>=_.length-1)return null;else return [_[j],_[j+1]];}else{if(j<0||j>=_.length||_.length<2)return null;var
Xa=j==0?_[1]-_[0]:_[j]-_[j-1];return [Math.round(_[j]-Xa/2),Math.round(_[j]+Xa/2)];}};p.getPointForCategory=function(n){var
Aa=this.Hf();if(this.tickAlignment==ub.b){if(n<0||n>=Aa.length-1)return null;else return Math.round((Aa[n]+Aa[n+1])/2);}else return Aa[n];};c.getVersion=function(){return jsx3.chart.si;};});
| burris/dwr | ui/gi/demo/web/gi/JSX/addins/charting/classes/jsx3/chart/CategoryAxis.js | JavaScript | apache-2.0 | 2,582 |
/*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios;
import com.google.common.collect.ImmutableList;
import org.libimobiledevice.ios.driver.binding.exceptions.SDKException;
import org.libimobiledevice.ios.driver.binding.model.ApplicationInfo;
import org.libimobiledevice.ios.driver.binding.model.DeviceInfo;
import org.libimobiledevice.ios.driver.binding.services.DeviceCallBack;
import org.libimobiledevice.ios.driver.binding.services.DeviceService;
import org.libimobiledevice.ios.driver.binding.services.IOSDevice;
import org.libimobiledevice.ios.driver.binding.services.ImageMountingService;
import org.libimobiledevice.ios.driver.binding.services.InformationService;
import org.libimobiledevice.ios.driver.binding.services.InstallerService;
import org.openqa.selenium.WebDriverException;
import org.uiautomation.ios.application.IPAShellApplication;
import org.uiautomation.ios.utils.DDILocator;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Logger;
public class DeviceStore extends DeviceCallBack {
private static final Logger log = Logger.getLogger(DeviceStore.class.getName());
private final List<RealDevice> reals = new CopyOnWriteArrayList<RealDevice>();
private final List<SimulatorDevice> sims = new CopyOnWriteArrayList<SimulatorDevice>();
private final ApplicationStore apps;
private final Set<String> uuidWhitelist;
public DeviceStore(ApplicationStore apps, Set<String> uuidWhitelist) {
super();
this.apps = apps;
this.uuidWhitelist = uuidWhitelist;
}
/**
* @return immutable copy of the currently available devices.
*/
public List<Device> getDevices() {
List<Device> all = new ArrayList<Device>();
all.addAll(reals);
all.addAll(sims);
return ImmutableList.copyOf(all);
}
public List<RealDevice> getRealDevices() {
return reals;
}
public List<SimulatorDevice> getSimulatorDevices() {
return sims;
}
public void add(SimulatorDevice simulatorDevice) {
sims.add(simulatorDevice);
}
@Override
protected void onDeviceAdded(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device detected but not whitelisted");
return;
}
RealDevice d = null;
try {
IOSDevice device = DeviceService.get(uuid);
DeviceInfo info = new DeviceInfo(uuid);
d = new RealDevice(info);
log.info("new device detected (" + uuid + ") " + info.getDeviceName());
reals.add(d);
InstallerService s = new InstallerService(device);
String id = "com.apple.mobilesafari";
ApplicationInfo safari = s.getApplication(id);
String v = (String) safari.getProperty("CFBundleVersion");
log.info("device " + info.getDeviceName() + " = safari " + v);
IPAShellApplication ipa = new IPAShellApplication(id, v, safari);
apps.add(ipa);
InformationService i = new InformationService(device);
if (!i.isDevModeEnabled()) {
log.warning(
"The device " + uuid + " is not set to dev mode. It can't be used for testing.");
File ddi = DDILocator.locateDDI(device);
mount(device, ddi);
log.info("DDI mounted.Device now in dev mode.");
}
} catch (SDKException | WebDriverException e) {
if (d != null) {
reals.remove(d);
}
}
}
private void mount(IOSDevice device, File ddi) throws SDKException {
ImageMountingService service = null;
try {
service = new ImageMountingService(device);
service.mount(ddi);
} finally {
if (service != null) {
service.free();
}
}
}
@Override
protected void onDeviceRemoved(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device removed but not whitelisted");
return;
}
for (RealDevice d : reals) {
if (d.getUuid().equals(uuid)) {
log.info("Removing " + uuid + " for the devices pool");
boolean ok = reals.remove(d);
if (!ok) {
log.warning("device " + uuid + " has been unplugged, but was never there ?");
}
}
}
}
}
| darraghgrace/ios-driver | server/src/main/java/org/uiautomation/ios/DeviceStore.java | Java | apache-2.0 | 4,836 |
/*
* Kendo UI Web v2014.1.318 (http://kendoui.com)
* Copyright 2014 Telerik AD. All rights reserved.
*
* Kendo UI Web commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-web
* If you do not own a commercial license, this file shall be governed by the
* GNU General Public License (GPL) version 3.
* For GPL requirements, please review: http://www.gnu.org/copyleft/gpl.html
*/
(function(f, define){
define([], f);
})(function(){
(function( window, undefined ) {
var kendo = window.kendo || (window.kendo = { cultures: {} });
kendo.cultures["nn"] = {
name: "nn",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": "ย ",
".": ",",
groupSize: [3],
percent: {
pattern: ["-n %","n %"],
decimals: 2,
",": "ย ",
".": ",",
groupSize: [3],
symbol: "%"
},
currency: {
pattern: ["$ -n","$ n"],
decimals: 2,
",": "ย ",
".": ",",
groupSize: [3],
symbol: "kr"
}
},
calendars: {
standard: {
days: {
names: ["sรธndag","mรฅndag","tysdag","onsdag","torsdag","fredag","laurdag"],
namesAbbr: ["sรธ","mรฅ","ty","on","to","fr","la"],
namesShort: ["sรธ","mรฅ","ty","on","to","fr","la"]
},
months: {
names: ["januar","februar","mars","april","mai","juni","juli","august","september","oktober","november","desember",""],
namesAbbr: ["jan","feb","mar","apr","mai","jun","jul","aug","sep","okt","nov","des",""]
},
AM: [""],
PM: [""],
patterns: {
d: "dd.MM.yyyy",
D: "d. MMMM yyyy",
F: "d. MMMM yyyy HH:mm:ss",
g: "dd.MM.yyyy HH:mm",
G: "dd.MM.yyyy HH:mm:ss",
m: "d. MMMM",
M: "d. MMMM",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "HH:mm",
T: "HH:mm:ss",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM yyyy",
Y: "MMMM yyyy"
},
"/": ".",
":": ":",
firstDay: 1
}
}
}
})(this);
return window.kendo;
}, typeof define == 'function' && define.amd ? define : function(_, f){ f(); }); | facundolucas/eCuentas | src/main/webapp/resources/kendoui/src/js/cultures/kendo.culture.nn.js | JavaScript | apache-2.0 | 2,710 |
๏ปฟusing System;
using System.Text;
using System.Threading.Tasks;
using Foundatio.Extensions;
namespace Foundatio.Serializer {
public interface ISerializer {
Task<object> DeserializeAsync(byte[] data, Type objectType);
Task<byte[]> SerializeAsync(object value);
}
public static class SerializerExtensions {
public static Task<object> DeserializeAsync(this ISerializer serializer, string data, Type objectType) {
return serializer.DeserializeAsync(Encoding.UTF8.GetBytes(data ?? String.Empty), objectType);
}
public static async Task<T> DeserializeAsync<T>(this ISerializer serializer, byte[] data) {
return (T)await serializer.DeserializeAsync(data, typeof(T)).AnyContext();
}
public static Task<T> DeserializeAsync<T>(this ISerializer serializer, string data) {
return DeserializeAsync<T>(serializer, Encoding.UTF8.GetBytes(data ?? String.Empty));
}
public static async Task<string> SerializeToStringAsync(this ISerializer serializer, object value) {
if (value == null)
return null;
return Encoding.UTF8.GetString(await serializer.SerializeAsync(value).AnyContext());
}
}
}
| wgraham17/Foundatio | src/Foundatio/Serializer/ISerializer.cs | C# | apache-2.0 | 1,253 |
package ec2
import (
"github.com/crowdmob/goamz/aws"
"time"
)
func Sign(auth aws.Auth, method, path string, params map[string]string, host string) {
sign(auth, method, path, params, host)
}
func fixedTime() time.Time {
return time.Date(2012, 1, 1, 0, 0, 0, 0, time.UTC)
}
func FakeTime(fakeIt bool) {
if fakeIt {
timeNow = fixedTime
} else {
timeNow = time.Now
}
}
| coreos/aws-auth-proxy | vendor/github.com/crowdmob/goamz/ec2/export_test.go | GO | apache-2.0 | 380 |
// Copyright 2017, Dell EMC, Inc.
/* jshint node:true */
'use strict';
describe(require('path').basename(__filename), function () {
var base = require('./base-task-data-spec');
base.before(function (context) {
context.taskdefinition = helper.require(
'/lib/task-data/base-tasks/dell-wsman-reset-components.js'
);
});
describe('task-data', function () {
base.examples();
});
});
| AlaricChan/on-tasks | spec/lib/task-data/base-tasks/dell-wsman-reset-components-spec.js | JavaScript | apache-2.0 | 439 |
๏ปฟusing System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using RestSharp.Deserializers;
namespace Twilio
{
public class IpAddress : TwilioBase
{
public string Sid { get; set; }
public string FriendlyName { get; set; }
[DeserializeAs(Name="IpAddress")]
public string Address { get; set; }
}
}
| IRlyDontKnow/twilio-csharp | src/Twilio.Api/Model/IpAddress.cs | C# | apache-2.0 | 378 |
package provision
import (
"bytes"
"fmt"
"io/ioutil"
"net/url"
"path"
"path/filepath"
"strconv"
"strings"
log "github.com/Sirupsen/logrus"
"github.com/docker/machine/libmachine/auth"
"github.com/docker/machine/libmachine/provision/pkgaction"
"github.com/docker/machine/libmachine/swarm"
"github.com/docker/machine/utils"
)
type DockerOptions struct {
EngineOptions string
EngineOptionsPath string
}
func installDockerGeneric(p Provisioner) error {
// install docker - until cloudinit we use ubuntu everywhere so we
// just install it using the docker repos
if output, err := p.SSHCommand("if ! type docker; then curl -sSL https://get.docker.com | sh -; fi"); err != nil {
var buf bytes.Buffer
if _, err := buf.ReadFrom(output.Stderr); err != nil {
return err
}
return fmt.Errorf("error installing docker: %s\n", buf.String())
}
return nil
}
func ConfigureAuth(p Provisioner, authOptions auth.AuthOptions) error {
var (
err error
)
machineName := p.GetDriver().GetMachineName()
org := machineName
bits := 2048
ip, err := p.GetDriver().GetIP()
if err != nil {
return err
}
// copy certs to client dir for docker client
machineDir := filepath.Join(utils.GetMachineDir(), machineName)
if err := utils.CopyFile(authOptions.CaCertPath, filepath.Join(machineDir, "ca.pem")); err != nil {
log.Fatalf("Error copying ca.pem to machine dir: %s", err)
}
if err := utils.CopyFile(authOptions.ClientCertPath, filepath.Join(machineDir, "cert.pem")); err != nil {
log.Fatalf("Error copying cert.pem to machine dir: %s", err)
}
if err := utils.CopyFile(authOptions.ClientKeyPath, filepath.Join(machineDir, "key.pem")); err != nil {
log.Fatalf("Error copying key.pem to machine dir: %s", err)
}
log.Debugf("generating server cert: %s ca-key=%s private-key=%s org=%s",
authOptions.ServerCertPath,
authOptions.CaCertPath,
authOptions.PrivateKeyPath,
org,
)
// TODO: Switch to passing just authOptions to this func
// instead of all these individual fields
err = utils.GenerateCert(
[]string{ip},
authOptions.ServerCertPath,
authOptions.ServerKeyPath,
authOptions.CaCertPath,
authOptions.PrivateKeyPath,
org,
bits,
)
if err != nil {
return fmt.Errorf("error generating server cert: %s", err)
}
if err := p.Service("docker", pkgaction.Stop); err != nil {
return err
}
dockerDir := p.GetDockerOptionsDir()
if _, err := p.SSHCommand(fmt.Sprintf("sudo mkdir -p %s", dockerDir)); err != nil {
return err
}
// upload certs and configure TLS auth
caCert, err := ioutil.ReadFile(authOptions.CaCertPath)
if err != nil {
return err
}
// due to windows clients, we cannot use filepath.Join as the paths
// will be mucked on the linux hosts
machineCaCertPath := path.Join(dockerDir, "ca.pem")
authOptions.CaCertRemotePath = machineCaCertPath
serverCert, err := ioutil.ReadFile(authOptions.ServerCertPath)
if err != nil {
return err
}
machineServerCertPath := path.Join(dockerDir, "server.pem")
authOptions.ServerCertRemotePath = machineServerCertPath
serverKey, err := ioutil.ReadFile(authOptions.ServerKeyPath)
if err != nil {
return err
}
machineServerKeyPath := path.Join(dockerDir, "server-key.pem")
authOptions.ServerKeyRemotePath = machineServerKeyPath
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee %s", string(caCert), machineCaCertPath)); err != nil {
return err
}
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee %s", string(serverKey), machineServerKeyPath)); err != nil {
return err
}
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee %s", string(serverCert), machineServerCertPath)); err != nil {
return err
}
dockerUrl, err := p.GetDriver().GetURL()
if err != nil {
return err
}
u, err := url.Parse(dockerUrl)
if err != nil {
return err
}
dockerPort := 2376
parts := strings.Split(u.Host, ":")
if len(parts) == 2 {
dPort, err := strconv.Atoi(parts[1])
if err != nil {
return err
}
dockerPort = dPort
}
dkrcfg, err := p.GenerateDockerOptions(dockerPort, authOptions)
if err != nil {
return err
}
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee -a %s", dkrcfg.EngineOptions, dkrcfg.EngineOptionsPath)); err != nil {
return err
}
if err := p.Service("docker", pkgaction.Start); err != nil {
return err
}
return nil
}
func getDefaultDaemonOpts(driverName string, authOptions auth.AuthOptions) string {
return fmt.Sprintf(`--tlsverify --tlscacert=%s --tlskey=%s --tlscert=%s %s`,
authOptions.CaCertRemotePath,
authOptions.ServerKeyRemotePath,
authOptions.ServerCertRemotePath,
fmt.Sprintf("--label=provider=%s", driverName),
)
}
func configureSwarm(p Provisioner, swarmOptions swarm.SwarmOptions) error {
if !swarmOptions.IsSwarm {
return nil
}
basePath := p.GetDockerOptionsDir()
ip, err := p.GetDriver().GetIP()
if err != nil {
return err
}
tlsCaCert := path.Join(basePath, "ca.pem")
tlsCert := path.Join(basePath, "server.pem")
tlsKey := path.Join(basePath, "server-key.pem")
masterArgs := fmt.Sprintf("--tlsverify --tlscacert=%s --tlscert=%s --tlskey=%s -H %s %s",
tlsCaCert, tlsCert, tlsKey, swarmOptions.Host, swarmOptions.Discovery)
nodeArgs := fmt.Sprintf("--addr %s:2376 %s", ip, swarmOptions.Discovery)
u, err := url.Parse(swarmOptions.Host)
if err != nil {
return err
}
parts := strings.Split(u.Host, ":")
port := parts[1]
// TODO: Do not hardcode daemon port, ask the driver
if err := utils.WaitForDocker(ip, 2376); err != nil {
return err
}
if _, err := p.SSHCommand(fmt.Sprintf("sudo docker pull %s", swarm.DockerImage)); err != nil {
return err
}
dockerDir := p.GetDockerOptionsDir()
// if master start master agent
if swarmOptions.Master {
log.Debug("launching swarm master")
log.Debugf("master args: %s", masterArgs)
if _, err = p.SSHCommand(fmt.Sprintf("sudo docker run -d -p %s:%s --restart=always --name swarm-agent-master -v %s:%s %s manage %s",
port, port, dockerDir, dockerDir, swarm.DockerImage, masterArgs)); err != nil {
return err
}
}
// start node agent
log.Debug("launching swarm node")
log.Debugf("node args: %s", nodeArgs)
if _, err = p.SSHCommand(fmt.Sprintf("sudo docker run -d --restart=always --name swarm-agent -v %s:%s %s join %s",
dockerDir, dockerDir, swarm.DockerImage, nodeArgs)); err != nil {
return err
}
return nil
}
| zehicle/machine | libmachine/provision/utils.go | GO | apache-2.0 | 6,377 |
/* Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/inverse_op.h"
#include <string>
#include <unordered_map>
namespace paddle {
namespace operators {
class InverseOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
OP_INOUT_CHECK(ctx->HasInput("Input"), "Input", "Input", "Inverse");
OP_INOUT_CHECK(ctx->HasOutput("Output"), "Output", "Output", "Inverse");
auto input_dims = ctx->GetInputDim("Input");
int64_t input_rank = input_dims.size();
PADDLE_ENFORCE_GE(
input_rank, 2,
platform::errors::InvalidArgument(
"The dimension of Input(Input) is expected to be no less than 2. "
"But recieved: Input(Input)'s dimension = %d, shape = [%s].",
input_rank, input_dims));
for (int64_t i = 0; i < input_rank; ++i) {
PADDLE_ENFORCE_EQ(
(input_dims[i] == -1) || (input_dims[i] > 0), true,
platform::errors::InvalidArgument(
"Each dimension of input tensor is expected to be -1 or a "
"positive number, but recieved %d. Input's shape is [%s].",
input_dims[i], input_dims));
}
if (input_dims[input_rank - 2] > 0 && input_dims[input_rank - 1] > 0) {
PADDLE_ENFORCE_EQ(input_dims[input_rank - 2], input_dims[input_rank - 1],
platform::errors::InvalidArgument(
"The last two dimensions are expected to be equal. "
"But recieved: %d and %d; "
"Input(Input)'s shape = [%s].",
input_dims[input_rank - 2],
input_dims[input_rank - 1], input_dims));
}
ctx->SetOutputDim("Output", input_dims);
ctx->ShareLoD("Input", /*->*/ "Output");
}
};
class InverseOpInferVarType : public framework::PassInDtypeAndVarTypeToOutput {
protected:
std::unordered_map<std::string, std::string>& GetInputOutputWithSameType()
const override {
static std::unordered_map<std::string, std::string> m{
{"Input", /*->*/ "Output"}};
return m;
}
};
class InverseGradOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
auto input_grad = framework::GradVarName("Input");
auto output_grad = framework::GradVarName("Output");
OP_INOUT_CHECK(ctx->HasInput("Output"), "Input", "Output", "InverseGrad");
OP_INOUT_CHECK(ctx->HasInput(output_grad), "Input", output_grad,
"InverseGrad");
if (ctx->HasOutput(input_grad)) {
ctx->SetOutputDim(input_grad, ctx->GetInputDim(output_grad));
}
}
};
class InverseOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput(
"Input",
"(Tensor) A square matrix (2-D Tensor) or batches of square matrices"
" to inverse.");
AddOutput("Output", "(Tensor) The inverse of input matrix.");
AddComment(R"DOC(
Inverse Operator
Takes the inverse of the square matrix.
)DOC");
}
};
template <typename T>
class InverseGradOpMaker : public framework::SingleGradOpMaker<T> {
public:
using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
protected:
void Apply(GradOpPtr<T> grad) const override {
grad->SetType(this->ForwardOpType() + "_grad");
grad->SetInput("Output", this->Output("Output"));
grad->SetInput(framework::GradVarName("Output"),
this->OutputGrad("Output"));
grad->SetOutput(framework::GradVarName("Input"), this->InputGrad("Input"));
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OPERATOR(inverse, ops::InverseOp, ops::InverseOpMaker,
ops::InverseOpInferVarType,
ops::InverseGradOpMaker<paddle::framework::OpDesc>,
ops::InverseGradOpMaker<paddle::imperative::OpBase>);
REGISTER_OPERATOR(inverse_grad, ops::InverseGradOp);
REGISTER_OP_CPU_KERNEL(
inverse, ops::InverseKernel<paddle::platform::CPUDeviceContext, float>,
ops::InverseKernel<paddle::platform::CPUDeviceContext, double>);
REGISTER_OP_CPU_KERNEL(
inverse_grad,
ops::InverseGradKernel<paddle::platform::CPUDeviceContext, float>,
ops::InverseGradKernel<paddle::platform::CPUDeviceContext, double>);
| luotao1/Paddle | paddle/fluid/operators/inverse_op.cc | C++ | apache-2.0 | 5,052 |
<?php
/**
* @package php-font-lib
* @link https://github.com/PhenX/php-font-lib
* @author Fabien Mรฉnager <fabien.menager@gmail.com>
* @license http://www.gnu.org/copyleft/lesser.html GNU Lesser General Public License
*/
namespace FontLib\WOFF;
use FontLib\Table\DirectoryEntry;
/**
* WOFF font file.
*
* @package php-font-lib
*
* @property TableDirectoryEntry[] $directory
*/
class File extends \FontLib\TrueType\File {
function parseHeader() {
if (!empty($this->header)) {
return;
}
$this->header = new Header($this);
$this->header->parse();
}
public function load($file) {
parent::load($file);
$this->parseTableEntries();
$dataOffset = $this->pos() + count($this->directory) * 20;
$fw = $this->getTempFile(false);
$fr = $this->f;
$this->f = $fw;
$offset = $this->header->encode();
foreach ($this->directory as $entry) {
// Read ...
$this->f = $fr;
$this->seek($entry->offset);
$data = $this->read($entry->length);
if ($entry->length < $entry->origLength) {
$data = gzuncompress($data);
}
// Prepare data ...
$length = strlen($data);
$entry->length = $entry->origLength = $length;
$entry->offset = $dataOffset;
// Write ...
$this->f = $fw;
// Woff Entry
$this->seek($offset);
$offset += $this->write($entry->tag, 4); // tag
$offset += $this->writeUInt32($dataOffset); // offset
$offset += $this->writeUInt32($length); // length
$offset += $this->writeUInt32($length); // origLength
$offset += $this->writeUInt32(DirectoryEntry::computeChecksum($data)); // checksum
// Data
$this->seek($dataOffset);
$dataOffset += $this->write($data, $length);
}
$this->f = $fw;
$this->seek(0);
// Need to re-parse this, don't know why
$this->header = null;
$this->directory = array();
$this->parseTableEntries();
}
}
| marc0l92/AmministratoreDiCondominio_webapp | application/libraries/dompdf/lib/php-font-lib/classes/WOFF/File.php | PHP | apache-2.0 | 2,056 |
๏ปฟusing Newtonsoft.Json;
namespace Nest
{
[JsonObject]
public interface ILoggingAction : IAction
{
[JsonProperty("text")]
string Text { get; set; }
[JsonProperty("category")]
string Category { get; set; }
[JsonProperty("level")]
LogLevel? Level { get; set; }
}
public class LoggingAction : ActionBase, ILoggingAction
{
public override ActionType ActionType => ActionType.Logging;
public string Text { get; set; }
public string Category { get; set; }
public LogLevel? Level { get; set; }
public LoggingAction(string name) : base(name) {}
}
public class LoggingActionDescriptor : ActionsDescriptorBase<LoggingActionDescriptor, ILoggingAction>, ILoggingAction
{
protected override ActionType ActionType => ActionType.Logging;
LogLevel? ILoggingAction.Level { get; set; }
string ILoggingAction.Text { get; set; }
string ILoggingAction.Category { get; set; }
public LoggingActionDescriptor(string name) : base(name) {}
public LoggingActionDescriptor Level(LogLevel level) => Assign(a => a.Level = level);
public LoggingActionDescriptor Text(string text) => Assign(a => a.Text = text);
public LoggingActionDescriptor Category(string category) => Assign(a => a.Category = category);
}
}
| CSGOpenSource/elasticsearch-net | src/Nest/XPack/Watcher/Action/Logging/LoggingAction.cs | C# | apache-2.0 | 1,240 |
/* (c) British Telecommunications plc, 2009, All Rights Reserved */
package com.bt.pi.sss;
import com.bt.pi.app.common.entities.User;
public interface UserManager {
boolean userExists(String accessKey);
User getUserByAccessKey(String accessKey);
}
| barnyard/pi-sss | src/main/java/com/bt/pi/sss/UserManager.java | Java | apache-2.0 | 259 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import DBMS
from lib.core.settings import MAXDB_SYSTEM_DBS
from lib.core.unescaper import unescaper
from plugins.dbms.maxdb.enumeration import Enumeration
from plugins.dbms.maxdb.filesystem import Filesystem
from plugins.dbms.maxdb.fingerprint import Fingerprint
from plugins.dbms.maxdb.syntax import Syntax
from plugins.dbms.maxdb.takeover import Takeover
from plugins.generic.misc import Miscellaneous
class MaxDBMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
"""
This class defines SAP MaxDB methods
"""
def __init__(self):
self.excludeDbsList = MAXDB_SYSTEM_DBS
Syntax.__init__(self)
Fingerprint.__init__(self)
Enumeration.__init__(self)
Filesystem.__init__(self)
Miscellaneous.__init__(self)
Takeover.__init__(self)
unescaper[DBMS.MAXDB] = Syntax.escape
| glaudsonml/kurgan-ai | tools/sqlmap/plugins/dbms/maxdb/__init__.py | Python | apache-2.0 | 1,033 |
/*
* Copyright 2015 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.jso.impl;
import java.io.IOException;
/**
*
* @author Alexey Andreev
*/
interface NameEmitter {
void emit(int precedence) throws IOException;
}
| jtulach/teavm | jso/impl/src/main/java/org/teavm/jso/impl/NameEmitter.java | Java | apache-2.0 | 781 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <assert.h>
#include <stdarg.h>
#include <stdio.h>
#include <boost/lexical_cast.hpp>
#include "errmsgs.hpp"
namespace Drill{
static Drill::ErrorMessages errorMessages[]={
{ERR_NONE, 0, 0, "No Error."},
{ERR_CONN_FAILURE, ERR_CATEGORY_CONN, 0, "Connection failure. Host:%s port:%s. Error: %s."},
{ERR_CONN_EXCEPT, ERR_CATEGORY_CONN, 0, "Socket connection failure with the following exception: %s."},
{ERR_CONN_UNKPROTO, ERR_CATEGORY_CONN, 0, "Unknown protocol: %s."},
{ERR_CONN_RDFAIL, ERR_CATEGORY_CONN, 0, "Connection failed with error: %s."},
{ERR_CONN_WFAIL, ERR_CATEGORY_CONN, 0, "Synchronous socket write failed with error: %s."},
{ERR_CONN_ZOOKEEPER, ERR_CATEGORY_CONN, 0, "Zookeeper error. %s"},
{ERR_CONN_NOHSHAKE, ERR_CATEGORY_CONN, 0, "Handshake failed because the server killed the connection. "
"Expected RPC version %d."},
{ERR_CONN_ZKFAIL, ERR_CATEGORY_CONN, 0, "Failed to connect to Zookeeper."},
{ERR_CONN_ZKTIMOUT, ERR_CATEGORY_CONN, 0, "Timed out while waiting to connect."},
{ERR_CONN_ZKERR, ERR_CATEGORY_CONN, 0, "Error in reading from Zookeeper (error code: %d)."},
{ERR_CONN_ZKDBITERR, ERR_CATEGORY_CONN, 0, "Error in reading drillbit endpoint from Zookeeper (error code: %d)."},
{ERR_CONN_ZKNODBIT, ERR_CATEGORY_CONN, 0, "No drillbit found with this Zookeeper."},
{ERR_CONN_ZKNOAUTH, ERR_CATEGORY_CONN, 0, "Authentication failed."},
{ERR_CONN_ZKEXP, ERR_CATEGORY_CONN, 0, "Session expired."},
{ERR_CONN_HSHAKETIMOUT, ERR_CATEGORY_CONN, 0, "Handshake Timeout."},
{ERR_CONN_BAD_RPC_VER, ERR_CATEGORY_CONN, 0, "Handshake failed because of a RPC version mismatch. "
"Expected RPC version %d, got %d. [Server message was: (%s) %s]"},
{ERR_CONN_AUTHFAIL, ERR_CATEGORY_CONN, 0, "User authentication failed (please check the username and password)."
"[Server message was: (%s) %s]"},
{ERR_CONN_UNKNOWN_ERR, ERR_CATEGORY_CONN, 0, "Handshake Failed due to an error on the server. [Server message was: (%s) %s]"},
{ERR_CONN_NOCONN, ERR_CATEGORY_CONN, 0, "There is no connection to the server."},
{ERR_CONN_ALREADYCONN, ERR_CATEGORY_CONN, 0, "This client is already connected to a server."},
{ERR_CONN_NOCONNSTR, ERR_CATEGORY_CONN, 0, "Cannot connect if either host name or port number are empty."},
{ERR_CONN_SSLCERTFAIL, ERR_CATEGORY_CONN, 0, "SSL certificate file %s could not be loaded (exception message: %s)."},
{ERR_CONN_NOSOCKET, ERR_CATEGORY_CONN, 0, "Failed to open socket connection."},
{ERR_CONN_NOSERVERAUTH, ERR_CATEGORY_CONN, 0, "Client needs a secure connection but server does not"
" support any security mechanisms. Please contact an administrator. [Warn: This"
" could be due to a bad configuration or a security attack is in progress.]"},
{ERR_CONN_NOSERVERENC, ERR_CATEGORY_CONN, 0, "Client needs encryption but encryption is disabled on the server."
" Please check connection parameters or contact administrator. [Warn: This"
" could be due to a bad configuration or a security attack is in progress.]"},
{ERR_QRY_OUTOFMEM, ERR_CATEGORY_QRY, 0, "Out of memory."},
{ERR_QRY_COMMERR, ERR_CATEGORY_QRY, 0, "Communication error. %s"},
{ERR_QRY_INVREADLEN, ERR_CATEGORY_QRY, 0, "Internal Error: Received a message with an invalid read length."},
{ERR_QRY_INVQUERYID, ERR_CATEGORY_QRY, 0, "Internal Error: Cannot find query Id in internal structure."},
{ERR_QRY_INVRPCTYPE, ERR_CATEGORY_QRY, 0, "Unknown rpc type received from server:%d."},
{ERR_QRY_OUTOFORDER, ERR_CATEGORY_QRY, 0, "Internal Error: Query result received before query id. Aborting ..."},
{ERR_QRY_INVRPC, ERR_CATEGORY_QRY, 0, "Rpc Error: %s."},
{ERR_QRY_TIMOUT, ERR_CATEGORY_QRY, 0, "Timed out waiting for server to respond."},
{ERR_QRY_FAILURE, ERR_CATEGORY_QRY, 0, "Query execution error. Details:[ \n%s\n]"},
{ERR_QRY_SELVEC2, ERR_CATEGORY_QRY, 0, "Receiving a selection_vector_2 from the server came as a complete surprise at this point"},
{ERR_QRY_RESPFAIL, ERR_CATEGORY_QRY, 0, "Received a RESPONSE_FAILURE from the server."},
{ERR_QRY_UNKQRYSTATE, ERR_CATEGORY_QRY, 0, "Got an unknown query state message from the server."},
{ERR_QRY_UNKQRY, ERR_CATEGORY_QRY, 0, "Query not found on server. It might have been terminated already."},
{ERR_QRY_CANCELED, ERR_CATEGORY_QRY, 0, "Query has been cancelled"},
{ERR_QRY_COMPLETED, ERR_CATEGORY_QRY, 0, "Query completed."},
{ERR_QRY_16, ERR_CATEGORY_QRY, 0, "Query Failed."},
{ERR_QRY_17, ERR_CATEGORY_QRY, 0, "Query Failed."},
{ERR_QRY_18, ERR_CATEGORY_QRY, 0, "Query Failed."},
{ERR_QRY_19, ERR_CATEGORY_QRY, 0, "Query Failed."},
{ERR_QRY_20, ERR_CATEGORY_QRY, 0, "Query Failed."},
};
std::string getMessage(uint32_t msgId, ...){
char str[10240];
std::string s;
assert((ERR_NONE <= msgId) && (msgId < ERR_QRY_MAX));
va_list args;
va_start (args, msgId);
vsnprintf (str, sizeof(str), errorMessages[msgId-DRILL_ERR_START].msgFormatStr, args);
va_end (args);
s=std::string("[")+boost::lexical_cast<std::string>(msgId)+std::string("]")+str;
return s;
}
}// namespace Drill
| KulykRoman/drill | contrib/native/client/src/clientlib/errmsgs.cpp | C++ | apache-2.0 | 6,003 |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201502.advancedoperations;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201502.cm.AdvertisingChannelType;
import com.google.api.ads.adwords.axis.v201502.cm.ApiException;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyConfiguration;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyOperation;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Budget;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetBudgetDeliveryMethod;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetBudgetPeriod;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetOperation;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Campaign;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignOperation;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Money;
import com.google.api.ads.adwords.axis.v201502.cm.NetworkSetting;
import com.google.api.ads.adwords.axis.v201502.cm.Operator;
import com.google.api.ads.adwords.axis.v201502.cm.SharedBiddingStrategy;
import com.google.api.ads.adwords.axis.v201502.cm.TargetSpendBiddingScheme;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.client.auth.oauth2.Credential;
import java.rmi.RemoteException;
import javax.xml.rpc.ServiceException;
/**
* This example adds a Shared Bidding Strategy and uses it to construct a campaign.
*/
public class UseSharedBiddingStrategy {
// Optional: If you'd like to use an existing shared budget, assign a
// shared budget ID here.
private static final Long SHARED_BUDGET_ID = null;
public static void main(String[] args) throws Exception {
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
AdWordsSession session = new AdWordsSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
AdWordsServices adWordsServices = new AdWordsServices();
runExample(adWordsServices, session, SHARED_BUDGET_ID);
}
public static void runExample(AdWordsServices adWordsServices, AdWordsSession session,
Long sharedBudgetId) throws Exception {
SharedBiddingStrategy sharedBiddingStrategy = createBiddingStrategy(adWordsServices, session);
if (sharedBudgetId == null) {
Budget budget = createSharedBudget(adWordsServices, session);
sharedBudgetId = budget.getBudgetId();
}
createCampaignWithBiddingStrategy(adWordsServices, session, sharedBiddingStrategy.getId(),
sharedBudgetId);
}
/**
* Creates the bidding strategy object.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @throws RemoteException
* @throws ApiException
* @throws ServiceException
*/
private static SharedBiddingStrategy createBiddingStrategy(AdWordsServices adWordsServices,
AdWordsSession session)
throws ApiException, RemoteException, ServiceException {
// Get the BiddingStrategyService, which loads the required classes.
BiddingStrategyServiceInterface biddingStrategyService =
adWordsServices.get(session, BiddingStrategyServiceInterface.class);
// Create a shared bidding strategy.
SharedBiddingStrategy sharedBiddingStrategy = new SharedBiddingStrategy();
sharedBiddingStrategy.setName("Maximize Clicks" + System.currentTimeMillis());
TargetSpendBiddingScheme biddingScheme = new TargetSpendBiddingScheme();
// Optionally set additional bidding scheme parameters.
biddingScheme.setBidCeiling(new Money(null, 2000000L));
biddingScheme.setSpendTarget(new Money(null, 20000000L));
sharedBiddingStrategy.setBiddingScheme(biddingScheme);
// Create operation.
BiddingStrategyOperation operation = new BiddingStrategyOperation();
operation.setOperand(sharedBiddingStrategy);
operation.setOperator(Operator.ADD);
BiddingStrategyOperation[] operations = new BiddingStrategyOperation[] {operation};
BiddingStrategyReturnValue result = biddingStrategyService.mutate(operations);
SharedBiddingStrategy newBiddingStrategy = result.getValue(0);
System.out.printf(
"Shared bidding strategy with name '%s' and ID %d of type %s was created.\n",
newBiddingStrategy.getName(), newBiddingStrategy.getId(),
newBiddingStrategy.getBiddingScheme().getBiddingSchemeType());
return newBiddingStrategy;
}
/**
* Creates an explicit budget to be used only to create the Campaign.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @throws ServiceException
* @throws RemoteException
* @throws ApiException
*/
private static Budget createSharedBudget(AdWordsServices adWordsServices,
AdWordsSession session)
throws ServiceException, ApiException, RemoteException {
// Get the BudgetService, which loads the required classes.
BudgetServiceInterface budgetService =
adWordsServices.get(session, BudgetServiceInterface.class);
// Create a shared budget.
Budget budget = new Budget();
budget.setName("Shared Interplanetary Budget #" + System.currentTimeMillis());
budget.setPeriod(BudgetBudgetPeriod.DAILY);
budget.setAmount(new Money(null, 50000000L));
budget.setDeliveryMethod(BudgetBudgetDeliveryMethod.STANDARD);
budget.setIsExplicitlyShared(true);
BudgetOperation operation = new BudgetOperation();
operation.setOperand(budget);
operation.setOperator(Operator.ADD);
BudgetOperation[] operations = new BudgetOperation[] {operation};
// Make the mutate request.
BudgetReturnValue result = budgetService.mutate(operations);
Budget newBudget = result.getValue(0);
System.out.printf("Budget with name '%s', ID %d was created.\n", newBudget.getName(),
newBudget.getBudgetId());
return newBudget;
}
/**
* Create a Campaign with a Shared Bidding Strategy.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @param biddingStrategyId the bidding strategy id to use
* @param sharedBudgetId the shared budget id to use
* @throws RemoteException
* @throws ApiException
* @throws ServiceException
*/
private static Campaign createCampaignWithBiddingStrategy(
AdWordsServices adWordsServices, AdWordsSession session, Long biddingStrategyId,
Long sharedBudgetId) throws ApiException, RemoteException, ServiceException {
// Get the CampaignService, which loads the required classes.
CampaignServiceInterface campaignService =
adWordsServices.get(session, CampaignServiceInterface.class);
// Create campaign.
Campaign campaign = new Campaign();
campaign.setName("Interplanetary Cruise #" + System.currentTimeMillis());
// Set the budget.
Budget budget = new Budget();
budget.setBudgetId(sharedBudgetId);
campaign.setBudget(budget);
// Set bidding strategy (required).
BiddingStrategyConfiguration biddingStrategyConfiguration = new BiddingStrategyConfiguration();
biddingStrategyConfiguration.setBiddingStrategyId(biddingStrategyId);
campaign.setBiddingStrategyConfiguration(biddingStrategyConfiguration);
// Set advertising channel type (required).
campaign.setAdvertisingChannelType(AdvertisingChannelType.SEARCH);
// Set network targeting (recommended).
NetworkSetting networkSetting = new NetworkSetting();
networkSetting.setTargetGoogleSearch(true);
networkSetting.setTargetSearchNetwork(true);
networkSetting.setTargetContentNetwork(true);
campaign.setNetworkSetting(networkSetting);
// Create operation.
CampaignOperation operation = new CampaignOperation();
operation.setOperand(campaign);
operation.setOperator(Operator.ADD);
CampaignReturnValue result = campaignService.mutate(new CampaignOperation[] {operation});
Campaign newCampaign = result.getValue(0);
System.out.printf("Campaign with name '%s', ID %d and bidding scheme ID %d was created.\n",
newCampaign.getName(), newCampaign.getId(),
newCampaign.getBiddingStrategyConfiguration().getBiddingStrategyId());
return newCampaign;
}
}
| raja15792/googleads-java-lib | examples/adwords_axis/src/main/java/adwords/axis/v201502/advancedoperations/UseSharedBiddingStrategy.java | Java | apache-2.0 | 9,544 |
<?php
/**
* Copyright 2012-2015 ContactLab, Italy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
class createCampaignResponse
{
/**
* @var Campaign $return
* @access public
*/
public $return = null;
}
| contactlab/soap-api-php-client | src/createCampaignResponse.php | PHP | apache-2.0 | 742 |
/*
* Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.usu.sdl.openstorefront.web.init;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
/**
*
* @author dshurtleff
*/
@WebListener
public class ApplicationInit
implements ServletContextListener
{
@Override
public void contextInitialized(ServletContextEvent sce)
{
}
@Override
public void contextDestroyed(ServletContextEvent sce)
{
}
}
| skycow/Open-Storefront | server/openstorefront/openstorefront-web/src/main/java/edu/usu/sdl/openstorefront/web/init/ApplicationInit.java | Java | apache-2.0 | 1,105 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.scanner;
import java.util.Collection;
import java.util.HashMap;
import org.drools.compiler.kie.builder.impl.MessageImpl;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.internal.builder.IncrementalResults;
import org.kie.internal.builder.InternalKieBuilder;
import static org.junit.Assert.*;
public class KieModuleIncrementalCompilationTest extends AbstractKieCiTest {
@Test
public void testCheckMetaDataAfterIncrementalDelete() throws Exception {
String drl1 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
"end\n";
String drl2 = "package org.kie.scanner\n" +
"rule R2_2 when\n" +
" String( )\n" +
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem()
.write( "src/main/resources/r1.drl", drl1 )
.write( "src/main/resources/r2.drl", drl2 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 2, getRuleNames( kieBuilder ).get( "org.kie.scanner" ).size() );
kfs.delete( "src/main/resources/r2.drl" );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/r2.drl" ).build();
assertEquals( 1, getRuleNames( kieBuilder ).get( "org.kie.scanner" ).size() );
}
private HashMap<String, Collection<String>> getRuleNames( KieBuilder kieBuilder ) {
KieModuleMetaData kieModuleMetaData = getKieModuleMetaData( kieBuilder );
HashMap<String, Collection<String>> ruleNames = new HashMap<String, Collection<String>>();
for ( String packageName : kieModuleMetaData.getPackages() ) {
ruleNames.put( packageName, kieModuleMetaData.getRuleNamesInPackage( packageName ) );
}
return ruleNames;
}
private KieModuleMetaData getKieModuleMetaData( KieBuilder kieBuilder ) {
return KieModuleMetaData.Factory.newKieModuleMetaData( ( (InternalKieBuilder) kieBuilder ).getKieModuleIgnoringErrors() );
}
@Test
public void testIncrementalCompilationFirstBuildHasErrors() throws Exception {
KieServices ks = KieServices.Factory.get();
//Malformed POM - No Version information
ReleaseId releaseId = ks.newReleaseId( "org.kie", "incremental-test-with-invalid pom", "" );
KieFileSystem kfs = createKieFileSystemWithKProject( ks );
kfs.writePomXML( getPom( releaseId ) );
//Valid
String drl1 =
"rule R1 when\n" +
" $s : String()\n" +
"then\n" +
"end\n";
//Invalid
String drl2 =
"rule R2 when\n" +
" $s : Strin( )\n" +
"then\n" +
"end\n";
//Write Rule 1 - No DRL errors, but POM is in error
kfs.write( "src/main/resources/KBase1/r1.drl", drl1 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 1,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
//Add file with error - expect 1 "added" error message
kfs.write( "src/main/resources/KBase1/r2.drl", drl2 );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/KBase1/r2.drl" ).build();
assertEquals( 1, addResults.getAddedMessages().size() );
assertEquals( 0, addResults.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithRuleFunctionRule() throws Exception {
String rule_1 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
"end\n";
String rule_2 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule_1 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 0,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
IncrementalResults addResults1 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/function.drl" ).build();
assertEquals( 0, addResults1.getAddedMessages().size() );
assertEquals( 0, addResults1.getRemovedMessages().size() );
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule_2 );
IncrementalResults addResults2 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/rule.drl" ).build();
assertEquals( 0, addResults2.getAddedMessages().size() );
assertEquals( 0, addResults2.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithRuleThenFunction() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 1,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
IncrementalResults addResults1 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/function.drl" ).build();
assertEquals( 0, addResults1.getAddedMessages().size() );
assertEquals( 1, addResults1.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithFunctionThenRule() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 0,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/rule.drl" ).build();
assertEquals( 0, addResults.getAddedMessages().size() );
assertEquals( 0, addResults.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithMultipleKieBases() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
"then\n" +
"end\n";
String invalidRule = "package org.kie.scanner\n" +
"rule R2 when\n" +
" Cheese()\n" + // missing import
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = createKieFileSystemWithTwoKBases(ks);
kfs.write("src/main/resources/org/kie/scanner/rule.drl",
rule);
KieBuilder kieBuilder = ks.newKieBuilder(kfs).buildAll();
assertEquals(0,
kieBuilder.getResults().getMessages().size());
kfs.write("src/main/resources/org/kie/scanner/invalidRule.drl",
invalidRule);
IncrementalResults addResults = ((InternalKieBuilder) kieBuilder).createFileSet("src/main/resources/org/kie/scanner/invalidRule.drl").build();
assertEquals(2, addResults.getAddedMessages().size());
addResults
.getAddedMessages()
.stream()
.map(m -> (MessageImpl) m )
.forEach(m -> assertNotNull(m.getKieBaseName()));
}
private KieFileSystem createKieFileSystemWithTwoKBases(final KieServices ks) {
final KieModuleModel kproj = ks.newKieModuleModel();
kproj.newKieBaseModel("default").setDefault(true)
.setEqualsBehavior( EqualityBehaviorOption.EQUALITY )
.setEventProcessingMode( EventProcessingOption.STREAM );
kproj.newKieBaseModel("kbase1").setDefault(false)
.setEqualsBehavior(EqualityBehaviorOption.EQUALITY)
.setEventProcessingMode(EventProcessingOption.STREAM);
final KieFileSystem kfs = ks.newKieFileSystem();
kfs.writeKModuleXML(kproj.toXML());
return kfs;
}
}
| reynoldsm88/drools | kie-ci/src/test/java/org/kie/scanner/KieModuleIncrementalCompilationTest.java | Java | apache-2.0 | 10,918 |
๏ปฟusing System;
using BEPUphysics.BroadPhaseEntries;
using BEPUphysics.BroadPhaseSystems;
using BEPUphysics.BroadPhaseEntries.MobileCollidables;
using BEPUphysics.CollisionTests;
using BEPUphysics.CollisionTests.CollisionAlgorithms.GJK;
using BEPUphysics.CollisionTests.Manifolds;
using BEPUphysics.Constraints.Collision;
using BEPUphysics.PositionUpdating;
using BEPUphysics.Settings;
namespace BEPUphysics.NarrowPhaseSystems.Pairs
{
///<summary>
/// Handles a convex-convex collision pair.
///</summary>
public class GeneralConvexPairHandler : ConvexConstraintPairHandler
{
ConvexCollidable convexA;
ConvexCollidable convexB;
GeneralConvexContactManifold contactManifold = new GeneralConvexContactManifold();
public override Collidable CollidableA
{
get { return convexA; }
}
public override Collidable CollidableB
{
get { return convexB; }
}
/// <summary>
/// Gets the contact manifold used by the pair handler.
/// </summary>
public override ContactManifold ContactManifold
{
get { return contactManifold; }
}
public override Entities.Entity EntityA
{
get { return convexA.entity; }
}
public override Entities.Entity EntityB
{
get { return convexB.entity; }
}
///<summary>
/// Initializes the pair handler.
///</summary>
///<param name="entryA">First entry in the pair.</param>
///<param name="entryB">Second entry in the pair.</param>
public override void Initialize(BroadPhaseEntry entryA, BroadPhaseEntry entryB)
{
convexA = entryA as ConvexCollidable;
convexB = entryB as ConvexCollidable;
if (convexA == null || convexB == null)
{
throw new ArgumentException("Inappropriate types used to initialize pair.");
}
base.Initialize(entryA, entryB);
}
///<summary>
/// Cleans up the pair handler.
///</summary>
public override void CleanUp()
{
base.CleanUp();
convexA = null;
convexB = null;
}
}
}
| Anomalous-Software/BEPUPhysics | BEPUphysics/NarrowPhaseSystems/Pairs/GeneralConvexPairHandler.cs | C# | apache-2.0 | 2,405 |
<?php
final class PhabricatorDifferentialApplication extends PhabricatorApplication {
public function getBaseURI() {
return '/differential/';
}
public function getName() {
return pht('Differential');
}
public function getMenuName() {
return pht('Code Review');
}
public function getShortDescription() {
return pht('Pre-Commit Review');
}
public function getIcon() {
return 'fa-cog';
}
public function isPinnedByDefault(PhabricatorUser $viewer) {
return true;
}
public function getHelpDocumentationArticles(PhabricatorUser $viewer) {
return array(
array(
'name' => pht('Differential User Guide'),
'href' => PhabricatorEnv::getDoclink('Differential User Guide'),
),
);
}
public function getFactObjectsForAnalysis() {
return array(
new DifferentialRevision(),
);
}
public function getTitleGlyph() {
return "\xE2\x9A\x99";
}
public function getOverview() {
return pht(
'Differential is a **code review application** which allows '.
'engineers to review, discuss and approve changes to software.');
}
public function getRoutes() {
return array(
'/D(?P<id>[1-9]\d*)' => 'DifferentialRevisionViewController',
'/differential/' => array(
'(?:query/(?P<queryKey>[^/]+)/)?'
=> 'DifferentialRevisionListController',
'diff/' => array(
'(?P<id>[1-9]\d*)/' => 'DifferentialDiffViewController',
'create/' => 'DifferentialDiffCreateController',
),
'changeset/' => 'DifferentialChangesetViewController',
'revision/' => array(
$this->getEditRoutePattern('edit/')
=> 'DifferentialRevisionEditController',
$this->getEditRoutePattern('attach/(?P<diffID>[^/]+)/to/')
=> 'DifferentialRevisionEditController',
'closedetails/(?P<phid>[^/]+)/'
=> 'DifferentialRevisionCloseDetailsController',
'update/(?P<revisionID>[1-9]\d*)/'
=> 'DifferentialDiffCreateController',
'operation/(?P<id>[1-9]\d*)/'
=> 'DifferentialRevisionOperationController',
'inlines/(?P<id>[1-9]\d*)/'
=> 'DifferentialRevisionInlinesController',
),
'comment/' => array(
'preview/(?P<id>[1-9]\d*)/' => 'DifferentialCommentPreviewController',
'save/(?P<id>[1-9]\d*)/' => 'DifferentialCommentSaveController',
'inline/' => array(
'preview/(?P<id>[1-9]\d*)/'
=> 'DifferentialInlineCommentPreviewController',
'edit/(?P<id>[1-9]\d*)/'
=> 'DifferentialInlineCommentEditController',
),
),
'preview/' => 'PhabricatorMarkupPreviewController',
),
);
}
public function getApplicationOrder() {
return 0.100;
}
public function getRemarkupRules() {
return array(
new DifferentialRemarkupRule(),
);
}
public function supportsEmailIntegration() {
return true;
}
public function getAppEmailBlurb() {
return pht(
'Send email to these addresses to create revisions. The body of the '.
'message and / or one or more attachments should be the output of a '.
'"diff" command. %s',
phutil_tag(
'a',
array(
'href' => $this->getInboundEmailSupportLink(),
),
pht('Learn More')));
}
protected function getCustomCapabilities() {
return array(
DifferentialDefaultViewCapability::CAPABILITY => array(
'caption' => pht('Default view policy for newly created revisions.'),
'template' => DifferentialRevisionPHIDType::TYPECONST,
'capability' => PhabricatorPolicyCapability::CAN_VIEW,
),
);
}
public function getMailCommandObjects() {
return array(
'revision' => array(
'name' => pht('Email Commands: Revisions'),
'header' => pht('Interacting with Differential Revisions'),
'object' => new DifferentialRevision(),
'summary' => pht(
'This page documents the commands you can use to interact with '.
'revisions in Differential.'),
),
);
}
public function getApplicationSearchDocumentTypes() {
return array(
DifferentialRevisionPHIDType::TYPECONST,
);
}
}
| r4nt/phabricator | src/applications/differential/application/PhabricatorDifferentialApplication.php | PHP | apache-2.0 | 4,319 |
/*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender.grpc;
import com.navercorp.pinpoint.common.util.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* @author Woonduk Kang(emeroad)
*/
public class ReconnectExecutor {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private volatile boolean shutdown;
private final ScheduledExecutorService scheduledExecutorService;
private final AtomicLong rejectedCounter = new AtomicLong();
public ReconnectExecutor(ScheduledExecutorService scheduledExecutorService) {
this.scheduledExecutorService = Assert.requireNonNull(scheduledExecutorService, "scheduledExecutorService");
}
private void execute0(Runnable command) {
Assert.requireNonNull(command, "command");
if (shutdown) {
logger.debug("already shutdown");
return;
}
if (command instanceof ReconnectJob) {
ReconnectJob reconnectJob = (ReconnectJob) command;
try {
scheduledExecutorService.schedule(reconnectJob, reconnectJob.nextBackoffNanos(), TimeUnit.NANOSECONDS);
} catch (RejectedExecutionException e) {
final long failCount = rejectedCounter.incrementAndGet();
logger.info("{} reconnectJob scheduled fail {}", command, failCount);
}
} else {
throw new IllegalArgumentException("unknown command type " + command);
}
}
public void close() {
shutdown = true;
}
public Reconnector newReconnector(Runnable reconnectJob) {
Assert.requireNonNull(reconnectJob, "reconnectJob");
if (logger.isInfoEnabled()) {
logger.info("newReconnector(reconnectJob = [{}])", reconnectJob);
}
final Executor dispatch = new Executor() {
@Override
public void execute(Runnable command) {
ReconnectExecutor.this.execute0(command);
}
};
final ReconnectJob reconnectJobWrap = wrapReconnectJob(reconnectJob);
return new ReconnectAdaptor(dispatch, reconnectJobWrap);
}
private ReconnectJob wrapReconnectJob(Runnable runnable) {
return new ExponentialBackoffReconnectJob(runnable);
}
}
| suraj-raturi/pinpoint | profiler/src/main/java/com/navercorp/pinpoint/profiler/sender/grpc/ReconnectExecutor.java | Java | apache-2.0 | 3,111 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.internal.csv;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
/** Utility methods for dealing with CSV files */
public class CSVUtils {
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private static final String[][] EMPTY_DOUBLE_STRING_ARRAY = new String[0][0];
/**
* <code>CSVUtils</code> instances should NOT be constructed in standard programming.
*
* <p>This constructor is public to permit tools that require a JavaBean instance to operate.
*/
public CSVUtils() {}
/**
* Converts an array of string values into a single CSV line. All <code>null</code> values are
* converted to the string <code>"null"</code>, all strings equal to <code>"null"</code> will
* additionally get quotes around.
*
* @param values the value array
* @return the CSV string, will be an empty string if the length of the value array is 0
*/
public static String printLine(String[] values, CSVStrategy strategy) {
// set up a CSVUtils
StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter, strategy);
// check for null values an "null" as strings and convert them
// into the strings "null" and "\"null\""
for (int i = 0; i < values.length; i++) {
if (values[i] == null) {
values[i] = "null";
} else if (values[i].equals("null")) {
values[i] = "\"null\"";
}
}
// convert to CSV
try {
csvPrinter.println(values);
} catch (IOException e) {
// should not happen with StringWriter
}
// as the resulting string has \r\n at the end, we will trim that away
return stringWriter.toString().trim();
}
// ======================================================
// static parsers
// ======================================================
/**
* Parses the given String according to the default {@link CSVStrategy}.
*
* @param s CSV String to be parsed.
* @return parsed String matrix (which is never null)
* @throws IOException in case of error
*/
public static String[][] parse(String s) throws IOException {
if (s == null) {
throw new IllegalArgumentException("Null argument not allowed.");
}
String[][] result = (new CSVParser(new StringReader(s))).getAllValues();
if (result == null) {
// since CSVStrategy ignores empty lines an empty array is returned
// (i.e. not "result = new String[][] {{""}};")
result = EMPTY_DOUBLE_STRING_ARRAY;
}
return result;
}
/**
* Parses the first line only according to the default {@link CSVStrategy}.
*
* <p>Parsing empty string will be handled as valid records containing zero elements, so the
* following property holds: parseLine("").length == 0.
*
* @param s CSV String to be parsed.
* @return parsed String vector (which is never null)
* @throws IOException in case of error
*/
public static String[] parseLine(String s) throws IOException {
if (s == null) {
throw new IllegalArgumentException("Null argument not allowed.");
}
// uh,jh: make sure that parseLine("").length == 0
if (s.length() == 0) {
return EMPTY_STRING_ARRAY;
}
return (new CSVParser(new StringReader(s))).getLine();
}
}
| apache/solr | solr/core/src/java/org/apache/solr/internal/csv/CSVUtils.java | Java | apache-2.0 | 4,128 |
/*
* Autosleep
* Copyright (C) 2016 Orange
* Authors: Benjamin Einaudi benjamin.einaudi@orange.com
* Arnaud Ruffin arnaud.ruffin@orange.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.autosleep.ui.proxy;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
@Configuration
public class HttpClientConfiguration {
@Value("${autowakeup.skip.ssl.validation:false}")
private boolean skipSslValidation;
private SSLContext buildSslContext(TrustManager trustManager) {
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, new TrustManager[]{trustManager}, null);
return sslContext;
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private TrustManager buildTrustAllCerts() {
return new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
};
}
private HostnameVerifier buildVerifyNoHostname() {
return (hostname, session) -> true;
}
@Bean
public RestTemplate restTemplate() {
if (!skipSslValidation) {
return new RestTemplate();
} else {
final HostnameVerifier hostnameVerifier = buildVerifyNoHostname();
final SSLContext sslContext = buildSslContext(buildTrustAllCerts());
return new RestTemplate(new SimpleClientHttpRequestFactory() {
@Override
protected void prepareConnection(HttpURLConnection connection, String httpMethod) throws IOException {
if (connection instanceof HttpsURLConnection) {
HttpsURLConnection secureConnection = (HttpsURLConnection) connection;
secureConnection.setHostnameVerifier(hostnameVerifier);
secureConnection.setSSLSocketFactory(sslContext.getSocketFactory());
}
super.prepareConnection(connection, httpMethod);
}
});
}
}
}
| pradyutsarma/autosleep | spring-apps/autowakeup-proxy/src/main/java/org/cloudfoundry/autosleep/ui/proxy/HttpClientConfiguration.java | Java | apache-2.0 | 3,689 |
๏ปฟ// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Web.Mvc
{
public abstract class MvcFilter : IMvcFilter
{
protected MvcFilter()
{
}
protected MvcFilter(bool allowMultiple, int order)
{
AllowMultiple = allowMultiple;
Order = order;
}
public bool AllowMultiple { get; private set; }
public int Order { get; private set; }
}
}
| Terminator-Aaron/Katana | aspnetwebsrc/System.Web.Mvc/MvcFilter.cs | C# | apache-2.0 | 532 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.event.observer.transactional;
import static javax.ejb.TransactionManagementType.BEAN;
import static javax.enterprise.event.TransactionPhase.AFTER_COMPLETION;
import static javax.enterprise.event.TransactionPhase.AFTER_FAILURE;
import static javax.enterprise.event.TransactionPhase.AFTER_SUCCESS;
import static javax.enterprise.event.TransactionPhase.BEFORE_COMPLETION;
import static javax.enterprise.event.TransactionPhase.IN_PROGRESS;
import java.io.Serializable;
import javax.annotation.Priority;
import javax.ejb.Stateful;
import javax.ejb.TransactionManagement;
import javax.enterprise.context.SessionScoped;
import javax.enterprise.event.Observes;
@Stateful
@TransactionManagement(BEAN)
@Tame
@SessionScoped
@SuppressWarnings("serial")
public class Pomeranian implements PomeranianInterface, Serializable {
@Override
public void observeInProgress(@Observes(during = IN_PROGRESS) Bark event) {
Actions.add(IN_PROGRESS);
}
@Override
public void observeAfterCompletion(@Observes(during = AFTER_COMPLETION) Bark someEvent) {
Actions.add(AFTER_COMPLETION);
}
@Override
public void observeAfterSuccess(@Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS);
}
@Override
public void observeAfterSuccessWithHighPriority(@Priority(1) @Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS + "1");
}
@Override
public void observeAfterSuccessWithLowPriority(@Priority(100) @Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS + "100");
}
@Override
public void observeAfterFailure(@Observes(during = AFTER_FAILURE) Bark event) {
Actions.add(AFTER_FAILURE);
}
@Override
public void observeBeforeCompletion(@Observes(during = BEFORE_COMPLETION) Bark event) {
Actions.add(BEFORE_COMPLETION);
}
@Override
public void observeAndFail(@Observes(during=BEFORE_COMPLETION) @Gnarly Bark event) throws FooException {
Actions.add(BEFORE_COMPLETION);
throw new FooException();
}
}
| antoinesd/weld-core | tests-arquillian/src/test/java/org/jboss/weld/tests/event/observer/transactional/Pomeranian.java | Java | apache-2.0 | 2,906 |
// Licensed to the Apache Software Foundation (ASF) under one *
// or more contributor license agreements. See the NOTICE file *
// distributed with this work for additional information *
// regarding copyright ownership. The ASF licenses this file *
// to you under the Apache License, Version 2.0 (the *
// "License"); you may not use this file except in compliance *
// with the License. You may obtain a copy of the License at *
// *
// http://www.apache.org/licenses/LICENSE-2.0 *
// *
// Unless required by applicable law or agreed to in writing, *
// software distributed under the License is distributed on an *
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
// KIND, either express or implied. See the License for the *
// specific language governing permissions and limitations *
// under the License. *
using System.Reflection;
using System.Runtime.CompilerServices;
//
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
//
[assembly: AssemblyTitle("")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Apache Software Foundation")]
[assembly: AssemblyProduct("Apache Etch")]
[assembly: AssemblyCopyright("Copyright ยฉ Apache Software Foundation 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
//
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Revision and Build Numbers
// by using the '*' as shown below:
[assembly: AssemblyVersion("1.0.*")]
//
// In order to sign your assembly you must specify a key to use. Refer to the
// Microsoft .NET Framework documentation for more information on assembly signing.
//
// Use the attributes below to control which key is used for signing.
//
// Notes:
// (*) If no key is specified, the assembly is not signed.
// (*) KeyName refers to a key that has been installed in the Crypto Service
// Provider (CSP) on your machine. KeyFile refers to a file which contains
// a key.
// (*) If the KeyFile and the KeyName values are both specified, the
// following processing occurs:
// (1) If the KeyName can be found in the CSP, that key is used.
// (2) If the KeyName does not exist and the KeyFile does exist, the key
// in the KeyFile is installed into the CSP and used.
// (*) In order to create a KeyFile, you can use the sn.exe (Strong Name) utility.
// When specifying the KeyFile, the location of the KeyFile should be
// relative to the project output directory which is
// %Project Directory%\obj\<configuration>. For example, if your KeyFile is
// located in the project directory, you would specify the AssemblyKeyFile
// attribute as [assembly: AssemblyKeyFile("..\\..\\mykey.snk")]
// (*) Delay Signing is an advanced option - see the Microsoft .NET Framework
// documentation for more information on this.
//
[assembly: AssemblyDelaySign(false)]
[assembly: AssemblyKeyFile("")]
[assembly: AssemblyKeyName("")]
| OBIGOGIT/etch | plugins/visualstudio-addin/postbuild/AssemblyInfo.cs | C# | apache-2.0 | 3,504 |
/*
* Kendo UI Web v2014.1.318 (http://kendoui.com)
* Copyright 2014 Telerik AD. All rights reserved.
*
* Kendo UI Web commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-web
* If you do not own a commercial license, this file shall be governed by the
* GNU General Public License (GPL) version 3.
* For GPL requirements, please review: http://www.gnu.org/copyleft/gpl.html
*/
(function(f, define){
define([], f);
})(function(){
(function( window, undefined ) {
var kendo = window.kendo || (window.kendo = { cultures: {} });
kendo.cultures["he"] = {
name: "he",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n%","n%"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
pattern: ["$-n","$ n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "โช"
}
},
calendars: {
standard: {
days: {
names: ["ืืืย ืจืืฉืื","ืืืย ืฉื ื","ืืืย ืฉืืืฉื","ืืืย ืจืืืขื","ืืืย ืืืืฉื","ืืืย ืฉืืฉื","ืฉืืช"],
namesAbbr: ["ืืืย ื","ืืืย ื","ืืืย ื","ืืืย ื","ืืืย ื","ืืืย ื","ืฉืืช"],
namesShort: ["ื","ื","ื","ื","ื","ื","ืฉ"]
},
months: {
names: ["ืื ืืืจ","ืคืืจืืืจ","ืืจืฅ","ืืคืจืื","ืืื","ืืื ื","ืืืื","ืืืืืกื","ืกืคืืืืจ","ืืืงืืืืจ","ื ืืืืืจ","ืืฆืืืจ",""],
namesAbbr: ["ืื ื","ืคืืจ","ืืจืฅ","ืืคืจ","ืืื","ืืื ","ืืื","ืืื","ืกืคื","ืืืง","ื ืื","ืืฆื",""]
},
AM: ["AM","am","AM"],
PM: ["PM","pm","PM"],
patterns: {
d: "dd/MM/yyyy",
D: "dddd dd MMMM yyyy",
F: "dddd dd MMMM yyyy HH:mm:ss",
g: "dd/MM/yyyy HH:mm",
G: "dd/MM/yyyy HH:mm:ss",
m: "dd MMMM",
M: "dd MMMM",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "HH:mm",
T: "HH:mm:ss",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM yyyy",
Y: "MMMM yyyy"
},
"/": "/",
":": ":",
firstDay: 0
}
}
}
})(this);
return window.kendo;
}, typeof define == 'function' && define.amd ? define : function(_, f){ f(); }); | facundolucas/eCuentas | src/main/webapp/resources/kendoui/src/js/cultures/kendo.culture.he.js | JavaScript | apache-2.0 | 2,934 |
/**
* Copyright (c) Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0.
*
* THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
* OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
* ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
* MERCHANTABLITY OR NON-INFRINGEMENT.
*
* See the Apache Version 2.0 License for specific language governing
* permissions and limitations under the License.
*/
package com.interopbridges.scx.webservices;
/**
*
* <p>
* Concrete representation of an Endpoint to match what is described in the web
* service's WSDL.
* </p>
*
* <p>
*
* <pre>
* <service name="CalculatorService">
* <port name="CalculatorPort" binding="tns:CalculatorPortBinding">
* <soap:address location="http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService" />
* </port>
* </service>
* </pre>
*
* </p>
*
* <p>
* Typically this might look like:
* <ol>
* <li><b>http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService</b></li>
* <li><b>http://scxom-ws7-02:8080/axis2/services/DinnerFinderService</li>
* DinnerFinderServiceHttpSoap11Endpoint/</b>
* </ol>>
* </p>
*
* @author Christopher Crammond
*/
public class Endpoint implements EndpointMBean {
/**
* Key for describing for the (interopbridges) JMX type of MBean
*/
private String _jmxType = "endpoint";
/**
* String representing the full URL of the endpoint address. This should
* match the soap:address's location attribute from the WSDL. <br>
*
*/
private String _url;
/**
* Empty Constructor. It is considered to be a best practice to create this
* default constructor rather than relying on the compiler to auto-generate
* it.
*/
public Endpoint() {
this._url = "";
}
/**
* Preferred Constructor
*
* @param url
* String representing the full URL of the endpoint address.
*/
public Endpoint(String url) {
this._url = url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.EndpointMBean#getUrl()
*/
public String getUrl() {
return this._url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.IMBean#getJmxType()
*/
public String getJmxType() {
return this._jmxType;
}
}
| Microsoft/BeanSpy | test/code/JEE/Common/src/com/interopbridges/scx/webservices/Endpoint.java | Java | apache-2.0 | 2,617 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.snapshot;
import java.io.Serializable;
import org.apache.geode.internal.cache.snapshot.SnapshotFileMapper;
/**
* Provides a way to configure the behavior of snapshot operations. The default options are:
* <dl>
* <dt>filter</dt>
* <dd>null</dd>
* </dl>
*
* @param <K> the cache entry key type
* @param <V> the cache entry value type
*
* @since GemFire 7.0
*/
public interface SnapshotOptions<K, V> extends Serializable {
/**
* Defines the available snapshot file formats.
*
* @since GemFire 7.0
*/
enum SnapshotFormat {
/** an optimized binary format specific to GemFire */
GEMFIRE
}
/**
* Sets a filter to apply to snapshot entries. Entries that are accepted by the filter will be
* included in import and export operations.
*
* @param filter the filter to apply, or null to remove the filter
* @return the snapshot options
*/
SnapshotOptions<K, V> setFilter(SnapshotFilter<K, V> filter);
/**
* Returns the filter to be applied to snapshot entries. Entries that are accepted by the filter
* will be included in import and export operations.
*
* @return the filter, or null if the filter is not set
*/
SnapshotFilter<K, V> getFilter();
/**
* Sets whether to invoke callbacks when loading a snapshot. The default is false.
*
* @param invokeCallbacks
*
* @return the snapshot options
*/
SnapshotOptions<K, V> invokeCallbacks(boolean invokeCallbacks);
/**
* Returns whether loading a snapshot causes callbacks to be invoked
*
* @return whether loading a snapshot causes callbacks to be invoked
*/
boolean shouldInvokeCallbacks();
/**
* Returns true if the snapshot operation will proceed in parallel.
*
* @return true if the parallel mode has been enabled
*
* @since Geode 1.3
*/
boolean isParallelMode();
/**
* Enables parallel mode for snapshot export, which will cause each member of a partitioned region
* to save its local data set (ignoring redundant copies) to a separate snapshot file.
*
* <p>
* Parallelizing snapshot operations may yield significant performance improvements for large data
* sets. This is particularly true when each member is writing to separate physical disks.
* <p>
* This flag is ignored for replicated regions.
*
* @param parallel true if the snapshot operations will be performed in parallel
* @return the snapshot options
*
* @see SnapshotFileMapper
*
* @since Geode 1.3
*/
SnapshotOptions<K, V> setParallelMode(boolean parallel);
}
| charliemblack/geode | geode-core/src/main/java/org/apache/geode/cache/snapshot/SnapshotOptions.java | Java | apache-2.0 | 3,388 |