hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
0b210802849ea63ca13a91c1c7e6c61b37653c7b
|
diff --git a/app/app.js b/app/app.js
index <HASH>..<HASH> 100755
--- a/app/app.js
+++ b/app/app.js
@@ -130,6 +130,9 @@ angular.module('spotmop', [
MopidyService.getCurrentTlTracks().then( function( tlTracks ){
$scope.currentTracklist = tlTracks;
});
+ MopidyService.getConsume().then( function( isConsume ){
+ SettingsService.setSetting('mopidyconsume',isConsume);
+ });
});
$scope.$on('mopidy:state:offline', function(){
|
Fetch consume mode on load, rather than assume the value
|
jaedb_spotmop
|
train
|
3129d6052d973bcbf744d962232945c0872fc73f
|
diff --git a/tests/test_pylast.py b/tests/test_pylast.py
index <HASH>..<HASH> 100755
--- a/tests/test_pylast.py
+++ b/tests/test_pylast.py
@@ -40,7 +40,12 @@ class PyLastTestCase:
assert str.endswith(suffix, start, end)
-@flaky(max_runs=3, min_passes=1)
+def _no_xfail_rerun_filter(err, name, test, plugin):
+ for _ in test.iter_markers(name="xfail"):
+ return False
+
+
+@flaky(max_runs=3, min_passes=1, rerun_filter=_no_xfail_rerun_filter)
class TestPyLastWithLastFm(PyLastTestCase):
secrets = None
|
Run xfail tests only once: no point re-running
|
pylast_pylast
|
train
|
87658c66c8b44fb3726e28ca95caccbd82a6bf19
|
diff --git a/lib/bucket.js b/lib/bucket.js
index <HASH>..<HASH> 100644
--- a/lib/bucket.js
+++ b/lib/bucket.js
@@ -5,10 +5,11 @@ function Bucket() {
}
Bucket.prototype.set = function (key, value) {
- this._cache[key] = value;
+ this._cache['!'+key] = value;
};
Bucket.prototype.get = function (key) {
+ key = '!' + key;
if (!Object.prototype.hasOwnProperty.call(this._cache, key)) {
throw new Error('Cache contains no entry for this key');
}
@@ -16,7 +17,7 @@ Bucket.prototype.get = function (key) {
};
Bucket.prototype.remove = function (key) {
- delete this._cache[key];
+ delete this._cache['!'+key];
};
module.exports = Bucket;
diff --git a/test/spec/bucket.js b/test/spec/bucket.js
index <HASH>..<HASH> 100644
--- a/test/spec/bucket.js
+++ b/test/spec/bucket.js
@@ -35,6 +35,13 @@ describe('The Bucket', function () {
});
+ it('should allow reserved object properties as keys', function () {
+
+ bucket.set('__proto__', 1);
+ expect(bucket.get('__proto__')).to.eql(1);
+
+ });
+
describe('should handle all key types converted to a string', function () {
it('undefined', function () {
|
fix: allow reserved object property names as keys
|
analog-nico_two-buckets-memcache
|
train
|
c4db71c9d67750d41a0841ea2fe55e4d236265ec
|
diff --git a/lib/Compilation.js b/lib/Compilation.js
index <HASH>..<HASH> 100644
--- a/lib/Compilation.js
+++ b/lib/Compilation.js
@@ -219,7 +219,7 @@ Compilation.prototype.addModuleDependencies = function(module, dependencies, bai
return errorOrWarningAndCallback(new ModuleNotFoundError(module, err));
}
if(!dependantModule) {
- return callback();
+ return process.nextTick(callback);
}
if(this.profile) {
if(!dependantModule.profile) {
@@ -260,7 +260,7 @@ Compilation.prototype.addModuleDependencies = function(module, dependencies, bai
}
}
- return callback();
+ return process.nextTick(callback);
}
if(newModule instanceof Module) {
@@ -283,9 +283,9 @@ Compilation.prototype.addModuleDependencies = function(module, dependencies, bai
}
if(recursive) {
- return this.processModuleDependencies(dependantModule, callback);
+ return process.nextTick(this.processModuleDependencies.bind(this, dependantModule, callback));
} else {
- return callback();
+ return process.nextTick(callback);
}
}
|
added some async steps to prevent stack overflows
|
webpack_webpack
|
train
|
ec1e8b2b6b87445d07fe8965c0eb8d0e1446a979
|
diff --git a/app/controllers/people_controller.rb b/app/controllers/people_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/people_controller.rb
+++ b/app/controllers/people_controller.rb
@@ -46,6 +46,7 @@ class PeopleController < ApplicationController
confirm_or_create
else
error :create_error
+ @person.memberships.build unless @person.memberships.present?
render :new
end
end
|
fix role section disappearing
role section disappearing when a create action
fails with validation errors.
|
ministryofjustice_peoplefinder
|
train
|
73e6064d56fc55230ff29f807abbfb5a3af94155
|
diff --git a/DependencyInjection/MonologExtension.php b/DependencyInjection/MonologExtension.php
index <HASH>..<HASH> 100644
--- a/DependencyInjection/MonologExtension.php
+++ b/DependencyInjection/MonologExtension.php
@@ -65,6 +65,34 @@ class MonologExtension extends Extension
}
}
}
+
+ $this->addClassesToCompile(array(
+ 'Monolog\\Formatter\\FormatterInterface',
+ 'Monolog\\Formatter\\LineFormatter',
+ 'Monolog\\Handler\\HandlerInterface',
+ 'Monolog\\Handler\\AbstractHandler',
+ 'Monolog\\Handler\\StreamHandler',
+ 'Monolog\\Handler\\FingersCrossedHandler',
+ 'Monolog\\Handler\\TestHandler',
+ 'Monolog\\Logger',
+ 'Symfony\\Bundle\\MonologBundle\\Logger\\Logger',
+ 'Symfony\\Bundle\\MonologBundle\\Logger\\DebugHandler',
+ ));
+ }
+
+ /**
+ * Returns the base path for the XSD files.
+ *
+ * @return string The XSD base path
+ */
+ public function getXsdValidationBasePath()
+ {
+ return __DIR__.'/../Resources/config/schema';
+ }
+
+ public function getNamespace()
+ {
+ return 'http://symfony.com/schema/dic/monolog';
}
private function buildHandler(ContainerBuilder $container, $name, array $handler)
@@ -142,21 +170,6 @@ class MonologExtension extends Extension
return $handlerId;
}
- /**
- * Returns the base path for the XSD files.
- *
- * @return string The XSD base path
- */
- public function getXsdValidationBasePath()
- {
- return __DIR__.'/../Resources/config/schema';
- }
-
- public function getNamespace()
- {
- return 'http://symfony.com/schema/dic/monolog';
- }
-
private function getHandlerId($name)
{
return sprintf('monolog.handler.%s', $name);
|
[MonologBundle] Added some compiled classes and moved public methods before private ones
|
symfony_monolog-bundle
|
train
|
887f7d8e9b813c8fe0f41d58a4cee216b46ed1f0
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,7 @@ React-mixin-manager is a simple manager for react mixins easily dependencies on
Installation
------------
* Browser: include *react-mixin-manager[.min].js* after [React](http://facebook.github.io/react/)
-* CommonJS: ```require('react-mixin-manager');```
+* CommonJS: ```require('react-mixin-manager')(require('react'));```
Usage
------------
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -1 +1 @@
-require('./react-mixin-manager');
\ No newline at end of file
+module.exports = require('./react-mixin-manager');
\ No newline at end of file
diff --git a/react-mixin-manager.js b/react-mixin-manager.js
index <HASH>..<HASH> 100644
--- a/react-mixin-manager.js
+++ b/react-mixin-manager.js
@@ -27,7 +27,9 @@
main(React);
});
} else if (typeof exports !== 'undefined' && typeof require !== 'undefined') {
- main(require('react'));
+ module.exports = function(React) {
+ main(React);
+ };
} else {
main(React);
}
diff --git a/react-mixin-manager.min.js b/react-mixin-manager.min.js
index <HASH>..<HASH> 100644
--- a/react-mixin-manager.min.js
+++ b/react-mixin-manager.min.js
@@ -1,4 +1,4 @@
/*!
* [react-mixin-manager](https://github.com/jhudson8/react-mixin-manager) v0.1.1; MIT license; Joe Hudson<joehud@gmail.com>
*/
-!function(i){"function"==typeof define&&define.amd?define(["react"],function(n){i(n)}):i("undefined"!=typeof exports&&"undefined"!=typeof require?require("react"):React)}(function(i){function n(e,r,t){function s(n){if(!r[n]){var e=i.mixins._mixins[n];if(!e)throw"invalid mixin '"+n+"'";var a=i.mixins._dependsOn[n];if(a)for(var o=0;o<a.length;o++)s(a[o]);t.push(e),r[n]=!0}}for(var a=0;a<e.length;a++){var o=e[a];o&&(Array.isArray(o)?n(o,r,t):"string"==typeof o?s(o):t.push(o))}}function e(n,e,r,t){var s=i.mixins;if(!t&&s._mixins[n])throw"the '"+n+"' mixin already exists. Use React.mixins.replace to override";s._dependsOn[n]=r.length&&r,s._mixins[n]=e}var r=i.createClass;i.createClass=function(n){return n.mixins&&(n.mixins=i.mixins.get(n.mixins)),r.apply(i,arguments)},i.mixins={get:function(){var i=[],e={};return n(Array.prototype.slice.call(arguments),e,i),i},add:function(i,n){e(i,n,Array.prototype.slice.call(arguments,2),!1)},replace:function(i,n){e(i,n,Array.prototype.slice.call(arguments,2),!0)},exists:function(i){return this._mixins[i]||!1},_dependsOn:{},_mixins:{}}});
+!function(n){"function"==typeof define&&define.amd?define(["react"],function(i){n(i)}):"undefined"!=typeof exports&&"undefined"!=typeof require?module.exports=function(i){n(i)}:n(React)}(function(n){function i(e,t,r){function s(i){if(!t[i]){var e=n.mixins._mixins[i];if(!e)throw"invalid mixin '"+i+"'";var a=n.mixins._dependsOn[i];if(a)for(var o=0;o<a.length;o++)s(a[o]);r.push(e),t[i]=!0}}for(var a=0;a<e.length;a++){var o=e[a];o&&(Array.isArray(o)?i(o,t,r):"string"==typeof o?s(o):r.push(o))}}function e(i,e,t,r){var s=n.mixins;if(!r&&s._mixins[i])throw"the '"+i+"' mixin already exists. Use React.mixins.replace to override";s._dependsOn[i]=t.length&&t,s._mixins[i]=e}var t=n.createClass;n.createClass=function(i){return i.mixins&&(i.mixins=n.mixins.get(i.mixins)),t.apply(n,arguments)},n.mixins={get:function(){var n=[],e={};return i(Array.prototype.slice.call(arguments),e,n),n},add:function(n,i){e(n,i,Array.prototype.slice.call(arguments,2),!1)},replace:function(n,i){e(n,i,Array.prototype.slice.call(arguments,2),!0)},exists:function(n){return this._mixins[n]||!1},_dependsOn:{},_mixins:{}}});
\ No newline at end of file
|
require React to be passed as param for commonJS init
|
jhudson8_react-mixin-manager
|
train
|
0701b98f97964a903efad3de2cbcbd605d98e99e
|
diff --git a/src/PostType.php b/src/PostType.php
index <HASH>..<HASH> 100644
--- a/src/PostType.php
+++ b/src/PostType.php
@@ -505,7 +505,7 @@ class PostType
$meta = $this->columns()->sortableMeta($orderby);
// determine type of ordering
- if (is_string($meta)) {
+ if (is_string($meta) or !$meta[1]) {
$meta_key = $meta;
$meta_value = 'meta_value';
} else {
|
Allow setting false to sort by alphabetical
|
jjgrainger_PostTypes
|
train
|
e87ae4c880600110f1cf0923531cab4b6b891124
|
diff --git a/src/allTrue.spec.js b/src/allTrue.spec.js
index <HASH>..<HASH> 100644
--- a/src/allTrue.spec.js
+++ b/src/allTrue.spec.js
@@ -3,7 +3,6 @@ import { allTrue } from './allTrue'
test('with functions', () => {
const foo = () => 1
const baz = () => false
-
const result = allTrue(foo, baz)
expect(result).toBe(false)
})
diff --git a/src/log.js b/src/log.js
index <HASH>..<HASH> 100644
--- a/src/log.js
+++ b/src/log.js
@@ -1,15 +1,30 @@
export let logHolder = []
-let shouldLog = true
+let shouldLog = false
let shouldPush = false
+let initPassed = false
+
+function init(){
+ if(initPassed) return
+ initPassed = true
+
+ if(!process) return
+ if(!process.env) return
+
+ if(process.env.RAMBDAX_LOG === 'true'){
+ shouldLog = true
+ }
+}
export function logInit({ logFlag = true, pushFlag = false } = {}){
shouldLog = Boolean(logFlag)
shouldPush = Boolean(pushFlag)
logHolder = []
+ initPassed = true
}
export function log(...inputs){
+ init()
if (shouldPush) logHolder.push(inputs)
if (!shouldLog) return
|
feat: R.log is disabled by default
|
selfrefactor_rambdax
|
train
|
d6e949b3e10f91ca8c34364b0b6797a18201fb54
|
diff --git a/lib/proxy.spec.js b/lib/proxy.spec.js
index <HASH>..<HASH> 100644
--- a/lib/proxy.spec.js
+++ b/lib/proxy.spec.js
@@ -249,6 +249,20 @@ describe('Proxy:', () => {
});
});
+ describe('when call metadata is modified by a middleware', () => {
+ it('should pass the modified call to the implementation', (done) => {
+ middlewareMock.globalMiddleware.method = (call) => {
+ call.metadata = {'data': true};
+ };
+ proxy.list(call, (error) => {
+ expect(personServiceMock.list.calls.argsFor(0)[0].metadata.data)
+ .toEqual(true);
+ expect(error).toBeNull();
+ done();
+ });
+ });
+ });
+
describe('when middleware method does not return a promise', () => {
it('should call the next middleware', (done) => {
middlewareMock.globalMiddleware.method = Spy.returnValue('anything');
@@ -382,7 +396,8 @@ describe('Proxy:', () => {
const expectedError = new Error('error');
proxy.list(call, (error) => {
expect(errorHandlerMock.globalErrorHandler.method).toHaveBeenCalledTimes(1);
- expect(errorHandlerMock.globalErrorHandler.method).toHaveBeenCalledWith(expectedError, call);
+ expect(errorHandlerMock.globalErrorHandler.method)
+ .toHaveBeenCalledWith(expectedError, call);
expect(error).toEqual(expectedError);
done();
});
@@ -392,7 +407,8 @@ describe('Proxy:', () => {
const expectedError = new Error('error');
proxy.list(call, (error) => {
expect(errorHandlerMock.packageErrorHandler.method).toHaveBeenCalledTimes(1);
- expect(errorHandlerMock.packageErrorHandler.method).toHaveBeenCalledWith(expectedError, call);
+ expect(errorHandlerMock.packageErrorHandler.method)
+ .toHaveBeenCalledWith(expectedError, call);
expect(error).toEqual(expectedError);
done();
});
@@ -402,7 +418,8 @@ describe('Proxy:', () => {
const expectedError = new Error('error');
proxy.list(call, (error) => {
expect(errorHandlerMock.serviceErrorHandler.method).toHaveBeenCalledTimes(1);
- expect(errorHandlerMock.serviceErrorHandler.method).toHaveBeenCalledWith(expectedError, call);
+ expect(errorHandlerMock.serviceErrorHandler.method)
+ .toHaveBeenCalledWith(expectedError, call);
expect(error).toEqual(expectedError);
done();
});
@@ -412,7 +429,8 @@ describe('Proxy:', () => {
const expectedError = new Error('error');
proxy.list(call, (error) => {
expect(errorHandlerMock.methodErrorHandler.method).toHaveBeenCalledTimes(1);
- expect(errorHandlerMock.methodErrorHandler.method).toHaveBeenCalledWith(expectedError, call);
+ expect(errorHandlerMock.methodErrorHandler.method)
+ .toHaveBeenCalledWith(expectedError, call);
expect(error).toEqual(expectedError);
done();
});
@@ -453,7 +471,8 @@ describe('Proxy:', () => {
it('should pass the modified error to the next error handler', (done) => {
proxy.list(call, () => {
- expect(errorHandlerMock.packageErrorHandler.method).toHaveBeenCalledWith(expectedError, call);
+ expect(errorHandlerMock.packageErrorHandler.method)
+ .toHaveBeenCalledWith(expectedError, call);
done();
});
});
|
If metadata is modified in a middleware, the same metada will be received in the method
|
devsu_condor-framework
|
train
|
0e32853353926152c12e18f56062f11b715c4e4c
|
diff --git a/nailgun/entities.py b/nailgun/entities.py
index <HASH>..<HASH> 100644
--- a/nailgun/entities.py
+++ b/nailgun/entities.py
@@ -958,6 +958,7 @@ class AbstractDockerContainer(
# The "name" field may be any of a-zA-Z0-9_.-,
# "alphanumeric" is a subset of those legal characters.
'name': entity_fields.StringField(
+ length=(2, 30),
required=True,
str_type='alphanumeric',
),
|
Change the length of AbstractDockerContainer name
Docker container names must follow the following rule
`[a-zA-Z0-9][a-zA-Z0-9_.-]`, to match that, update the length of the
random generated length to have at least two characters.
Closes #<I>
|
SatelliteQE_nailgun
|
train
|
379f5a3ed083dbdf23b848b976a25cf0d1929313
|
diff --git a/client/state/themes/actions.js b/client/state/themes/actions.js
index <HASH>..<HASH> 100644
--- a/client/state/themes/actions.js
+++ b/client/state/themes/actions.js
@@ -418,6 +418,8 @@ export function installAndActivate( themeId, siteId, source = 'unknown', purchas
return ( dispatch ) => {
return dispatch( installTheme( themeId, siteId ) )
.then( () => {
+ // This will be called even if `installTheme` silently fails. We rely on
+ // `activateTheme`'s own error handling here.
dispatch( activateTheme( themeId, siteId, source, purchased ) );
} );
};
|
state/themes/actions#installAndActivate: Add explanatory comment
|
Automattic_wp-calypso
|
train
|
40f13b60b7841d88ab1418308c843064b3837e56
|
diff --git a/xproc-engine-calabash/src/main/java/org/daisy/maven/xproc/calabash/Calabash.java b/xproc-engine-calabash/src/main/java/org/daisy/maven/xproc/calabash/Calabash.java
index <HASH>..<HASH> 100644
--- a/xproc-engine-calabash/src/main/java/org/daisy/maven/xproc/calabash/Calabash.java
+++ b/xproc-engine-calabash/src/main/java/org/daisy/maven/xproc/calabash/Calabash.java
@@ -132,7 +132,7 @@ public class Calabash implements XProcEngine {
return new URIResolver() {
public Source resolve(String href, String base) throws TransformerException {
try {
- if (base != null && base.startsWith("jar:file:"))
+ if (!href.startsWith("http:") && base != null && base.startsWith("jar:file:"))
return new SAXSource(new InputSource(new URL(new URL(base), href).toString())); }
catch (MalformedURLException e) {}
return null;
diff --git a/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java b/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java
index <HASH>..<HASH> 100644
--- a/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java
+++ b/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java
@@ -66,7 +66,6 @@ public class XProcSpecRunner {
activate();
engine.setCatalog(catalog);
-
URI xprocspec = asURI(XProcSpecRunner.class.getResource("/content/xml/xproc/xprocspec.xpl"));
URI xprocspecSummary = asURI(XProcSpecRunner.class.getResource("/xprocspec-extra/xprocspec-summary.xpl"));
URL xspecCss = XProcSpecRunner.class.getResource("/xprocspec-extra/xspec.css");
diff --git a/xprocspec-runner/src/test/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunnerTest.java b/xprocspec-runner/src/test/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunnerTest.java
index <HASH>..<HASH> 100644
--- a/xprocspec-runner/src/test/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunnerTest.java
+++ b/xprocspec-runner/src/test/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunnerTest.java
@@ -135,7 +135,6 @@ public class XProcSpecRunnerTest {
}
@Test
- @org.junit.Ignore
public void testMocking() {
Map<String,File> tests = ImmutableMap.of("test_import_foo", new File(testsDir, "test_import_foo.xprocspec"));
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
Bugfix in catalog.xml support
|
daisy_xproc-maven-plugin
|
train
|
2a87d5fdddb564970b6d2dc5bae0ce82fc6100ca
|
diff --git a/seriously.py b/seriously.py
index <HASH>..<HASH> 100755
--- a/seriously.py
+++ b/seriously.py
@@ -99,11 +99,18 @@ class Seriously(object):
if __name__ == '__main__':
srs = Seriously()
- while 1:
- try:
- srs.eval(raw_input('>>> '))
- except EOFError:
- exit()
- finally:
- print
- print srs.stack
\ No newline at end of file
+ if len(sys.argv) < 2:
+ while 1:
+ try:
+ srs.eval(raw_input('>>> '))
+ except EOFError:
+ exit()
+ finally:
+ print
+ print srs.stack
+ else:
+ if sys.argv[1] == '-c':
+ srs.eval(sys.argv[2])
+ else:
+ with open(sys.argv[2]) as f:
+ srs.eval(f.read())
\ No newline at end of file
|
added -e flag for command-line program input and file input
|
Mego_Seriously
|
train
|
4baf49d91bb37d5fd69b257ce5eebec4bba39ff2
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -23,6 +23,8 @@ unless ENV['CI']
end
if ENV['CI']
+ require 'webmock'
+ WebMock.disable_net_connect!(:allow => "codeclimate.com")
require 'codeclimate-test-reporter'
CodeClimate::TestReporter.start
end
|
make webmock ignore requests to codeclimate.com
|
duse-io_duse.rb
|
train
|
a19eeb17b5334072e5e6d070f07a6ec1a5afe718
|
diff --git a/tests/framework/validators/RequiredValidatorTest.php b/tests/framework/validators/RequiredValidatorTest.php
index <HASH>..<HASH> 100644
--- a/tests/framework/validators/RequiredValidatorTest.php
+++ b/tests/framework/validators/RequiredValidatorTest.php
@@ -30,7 +30,10 @@ class RequiredValidatorTest extends TestCase
$val = new RequiredValidator(['requiredValue' => 55]);
$this->assertTrue($val->validate(55));
$this->assertTrue($val->validate("55"));
- $this->assertTrue($val->validate("0x37"));
+ if (defined('HHVM_VERSION') || version_compare(PHP_VERSION, '7.0.0', '<')) {
+ // hex to int conversion of strings is not available in php7 anymore
+ $this->assertTrue($val->validate("0x37"));
+ }
$this->assertFalse($val->validate("should fail"));
$this->assertTrue($val->validate(true));
$val->strict = true;
|
fixed BC compatibility test break for php7
|
yiisoft_yii-core
|
train
|
beef42c7ae3e8ba9ec4fc44db99d49497249ae1d
|
diff --git a/indra/tests/test_docs_code.py b/indra/tests/test_docs_code.py
index <HASH>..<HASH> 100644
--- a/indra/tests/test_docs_code.py
+++ b/indra/tests/test_docs_code.py
@@ -231,6 +231,7 @@ def test_gene_network():
import networkx as nx
paths = nx.single_source_shortest_path(G=indranet, source='BRCA1',
cutoff=1)
+ assert paths
# Chunk 11
from indra.assemblers.pysb import PysbAssembler
|
Assert output from nx algorithm
|
sorgerlab_indra
|
train
|
764fd4ab94866629c1d7c497bfa6eb8964b1a036
|
diff --git a/lib/rails-settings/active_record.rb b/lib/rails-settings/active_record.rb
index <HASH>..<HASH> 100644
--- a/lib/rails-settings/active_record.rb
+++ b/lib/rails-settings/active_record.rb
@@ -9,6 +9,10 @@ ActiveRecord::Base.class_eval do
ScopedSettings.for_target(self)
end
+ def settings=(hash)
+ hash.each { |k,v| settings[k] = v }
+ end
+
after_destroy { |user| user.settings.target_scoped.delete_all }
scope_method = ActiveRecord::VERSION::MAJOR < 3 ? :named_scope : :scope
diff --git a/test/settings_test.rb b/test/settings_test.rb
index <HASH>..<HASH> 100644
--- a/test/settings_test.rb
+++ b/test/settings_test.rb
@@ -165,6 +165,13 @@ class SettingsTest < Test::Unit::TestCase
assert_equal User.settings.foo, 'bar'
end
+ def test_sets_settings_with_hash
+ user = User.create :name => 'Mr. Foo'
+ user.settings = { :one => 1, :two => 2 }
+ assert_equal 1, user.settings[:one]
+ assert_equal 2, user.settings[:two]
+ end
+
private
def assert_setting(value, key, scope_target=nil)
key = key.to_sym
|
Added #settings= to set settings for object that has_settings in bulk
|
ledermann_rails-settings
|
train
|
ce231dfea5dd35a02e9719d4f8742ee18d10431c
|
diff --git a/helios-services/src/main/java/com/spotify/helios/master/ZooKeeperMasterModel.java b/helios-services/src/main/java/com/spotify/helios/master/ZooKeeperMasterModel.java
index <HASH>..<HASH> 100644
--- a/helios-services/src/main/java/com/spotify/helios/master/ZooKeeperMasterModel.java
+++ b/helios-services/src/main/java/com/spotify/helios/master/ZooKeeperMasterModel.java
@@ -66,7 +66,7 @@ import java.util.UUID;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Optional.fromNullable;
-import static com.google.common.base.Strings.isNullOrEmpty;
+import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.reverse;
import static com.spotify.helios.common.descriptors.Descriptor.parse;
@@ -461,7 +461,7 @@ public class ZooKeeperMasterModel implements MasterModel {
@Override
public Job removeJob(JobId jobId) throws JobDoesNotExistException, JobStillDeployedException {
try {
- return removeJob(jobId, null);
+ return removeJob(jobId, Job.EMPTY_TOKEN);
} catch (TokenVerificationException e) {
throw Throwables.propagate(e);
}
@@ -523,7 +523,7 @@ public class ZooKeeperMasterModel implements MasterModel {
throws HostNotFoundException, JobAlreadyDeployedException, JobDoesNotExistException,
JobPortAllocationConflictException {
try {
- deployJob(host, job, null);
+ deployJob(host, job, Job.EMPTY_TOKEN);
} catch (TokenVerificationException e) {
throw Throwables.propagate(e);
}
@@ -669,7 +669,7 @@ public class ZooKeeperMasterModel implements MasterModel {
public void updateDeployment(String host, Deployment deployment)
throws HostNotFoundException, JobNotDeployedException {
try {
- updateDeployment(host, deployment, null);
+ updateDeployment(host, deployment, Job.EMPTY_TOKEN);
} catch (TokenVerificationException e) {
Throwables.propagate(e);
}
@@ -894,7 +894,7 @@ public class ZooKeeperMasterModel implements MasterModel {
public Deployment undeployJob(String host, JobId jobId)
throws HostNotFoundException, JobNotDeployedException {
try {
- return undeployJob(host, jobId, null);
+ return undeployJob(host, jobId, Job.EMPTY_TOKEN);
} catch (TokenVerificationException e) {
throw Throwables.propagate(e);
}
@@ -943,8 +943,10 @@ public class ZooKeeperMasterModel implements MasterModel {
return deployment;
}
- private void verifyToken(final String token, final Job job) throws TokenVerificationException {
- if (!isNullOrEmpty(job.getToken()) && !job.getToken().equals(token)) {
+ private static void verifyToken(final String token, final Job job)
+ throws TokenVerificationException {
+ checkNotNull(token, "token");
+ if (!token.equals(job.getToken())) {
throw new TokenVerificationException(job.getId());
}
}
diff --git a/helios-system-tests/src/main/java/com/spotify/helios/system/TokenTest.java b/helios-system-tests/src/main/java/com/spotify/helios/system/TokenTest.java
index <HASH>..<HASH> 100644
--- a/helios-system-tests/src/main/java/com/spotify/helios/system/TokenTest.java
+++ b/helios-system-tests/src/main/java/com/spotify/helios/system/TokenTest.java
@@ -47,7 +47,7 @@ public class TokenTest extends SystemTestBase {
private static final String WRONG_TOKEN = "--token=wrongToken";
@Test
- public void test() throws Exception {
+ public void testJobWithToken() throws Exception {
startDefaultMaster();
startDefaultAgent(testHost());
final HeliosClient client = defaultClient();
@@ -79,6 +79,35 @@ public class TokenTest extends SystemTestBase {
remove(TOKEN, JobDeleteResponse.Status.OK);
}
+ @Test
+ public void testJobWithoutToken() throws Exception {
+ startDefaultMaster();
+ startDefaultAgent(testHost());
+ final HeliosClient client = defaultClient();
+ awaitHostRegistered(client, testHost(), LONG_WAIT_SECONDS, SECONDS);
+ awaitHostStatus(client, testHost(), UP, LONG_WAIT_SECONDS, SECONDS);
+
+ // Create a job without a token
+ final CreateJobResponse createJobResponse = cliJson(
+ CreateJobResponse.class, "create", testJobNameAndVersion, BUSYBOX);
+ assertThat(createJobResponse.getStatus(), equalTo(CreateJobResponse.Status.OK));
+
+ // Now run all operations which honor the token. Test
+ // that they work as expected with and without a token.
+
+ deploy(TOKEN, JobDeployResponse.Status.FORBIDDEN);
+ deploy(NO_TOKEN, JobDeployResponse.Status.OK);
+
+ stop(TOKEN, SetGoalResponse.Status.FORBIDDEN);
+ stop(NO_TOKEN, SetGoalResponse.Status.OK);
+
+ undeploy(TOKEN, JobUndeployResponse.Status.FORBIDDEN);
+ undeploy(NO_TOKEN, JobUndeployResponse.Status.OK);
+
+ remove(TOKEN, JobDeleteResponse.Status.FORBIDDEN);
+ remove(NO_TOKEN, JobDeleteResponse.Status.OK);
+ }
+
private void deploy(final String token, final JobDeployResponse.Status status)
throws Exception {
final List<String> args = buildArgs(token, testJobNameAndVersion, testHost());
|
[Tokens] Specifying token during operation on job which has no token set is no longer allowed.
Previously, if you specified a token while doing an operation on a job
where a token was not set, it would work. We no longer allow that.
|
spotify_helios
|
train
|
51e26af4b4a064af41bbda919a07663dc9702e87
|
diff --git a/lib/undies/template.rb b/lib/undies/template.rb
index <HASH>..<HASH> 100644
--- a/lib/undies/template.rb
+++ b/lib/undies/template.rb
@@ -25,12 +25,14 @@ module Undies
# Add a text node (data escaped) to the nodes of the current node
def _(data="")
- self.___add(Node.new(self.escape_html(data.to_s)))
+ self.__ self.escape_html(data.to_s)
end
# Add a text node with the data un-escaped
def __(data="")
- self.___add(Node.new(data.to_s))
+ node = Node.new(data.to_s)
+ self.___io << node.to_s if self.___io
+ self.___add(node)
end
# Add an element to the nodes of the current node
@@ -97,9 +99,6 @@ module Undies
end
def ___add(node)
- if self.___io && !node.kind_of?(Element)
- self.___io << node.to_s
- end
self.___stack.last.nodes.append(node)
end
|
refactored the '_' and '__' meths
|
redding_undies
|
train
|
8ecb0b22d80cae8f962b0de573fd276bbcac3385
|
diff --git a/parquet-hadoop/src/main/java/parquet/hadoop/util/ConfigurationUtil.java b/parquet-hadoop/src/main/java/parquet/hadoop/util/ConfigurationUtil.java
index <HASH>..<HASH> 100644
--- a/parquet-hadoop/src/main/java/parquet/hadoop/util/ConfigurationUtil.java
+++ b/parquet-hadoop/src/main/java/parquet/hadoop/util/ConfigurationUtil.java
@@ -26,7 +26,11 @@ public class ConfigurationUtil {
return null;
}
try {
- final Class<?> foundClass = Class.forName(className);
+ ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+ if(classLoader == null){
+ classLoader = ConfigurationUtil.class.getClassLoader();
+ }
+ final Class<?> foundClass = Class.forName(className, true, classLoader);
if (!assignableFrom.isAssignableFrom(foundClass)) {
throw new BadConfigurationException("class " + className + " set in job conf at "
+ configName + " is not a subclass of " + assignableFrom.getCanonicalName());
|
first use current thread's classloader to load a class, if current thread does not have a classloader, use the class's current classloader to load a class.
This will make sure a class not packaged in parquet but on classpath loaded properly. Otherwise, for example, if you set your own ReadSupport class to the
Configuration object and expect it to be loaded by ParquetInputFormat, it will fail and throw ClassNotFoundException.
|
apache_parquet-mr
|
train
|
09fcd52bd4840b176af7666d5f2bb74c66165a71
|
diff --git a/lxd/storage/backend_lxd.go b/lxd/storage/backend_lxd.go
index <HASH>..<HASH> 100644
--- a/lxd/storage/backend_lxd.go
+++ b/lxd/storage/backend_lxd.go
@@ -10,8 +10,9 @@ import (
"time"
"github.com/pkg/errors"
+ "golang.org/x/sys/unix"
log "gopkg.in/inconshreveable/log15.v2"
- yaml "gopkg.in/yaml.v2"
+ "gopkg.in/yaml.v2"
"github.com/lxc/lxd/lxd/backup"
"github.com/lxc/lxd/lxd/cluster/request"
@@ -858,6 +859,15 @@ func (b *lxdBackend) CreateInstanceFromCopy(inst instance.Instance, src instance
}
defer src.Unfreeze()
+
+ // Sync the filesystem.
+ rootfsFile, err := os.Open(src.RootfsPath())
+ if err != nil {
+ return err
+ }
+
+ unix.Syncfs(int(rootfsFile.Fd()))
+ rootfsFile.Close()
}
revert.Add(func() { b.DeleteInstance(inst, op) })
@@ -1971,6 +1981,15 @@ func (b *lxdBackend) MigrateInstance(inst instance.Instance, conn io.ReadWriteCl
}
defer inst.Unfreeze()
+
+ // Sync the filesystem.
+ rootfsFile, err := os.Open(inst.RootfsPath())
+ if err != nil {
+ return err
+ }
+
+ unix.Syncfs(int(rootfsFile.Fd()))
+ rootfsFile.Close()
}
err = b.driver.MigrateVolume(vol, conn, args, op)
@@ -2231,12 +2250,21 @@ func (b *lxdBackend) CreateInstanceSnapshot(inst instance.Instance, src instance
// Some driver backing stores require that running instances be frozen during snapshot.
if b.driver.Info().RunningCopyFreeze && src.IsRunning() && !src.IsFrozen() {
+ // Freeze the processes.
err = src.Freeze()
if err != nil {
return err
}
-
defer src.Unfreeze()
+
+ // Sync the filesystem.
+ rootfsFile, err := os.Open(src.RootfsPath())
+ if err != nil {
+ return err
+ }
+
+ unix.Syncfs(int(rootfsFile.Fd()))
+ rootfsFile.Close()
}
isSnap := inst.IsSnapshot()
|
lxd/storage: Sync before snapshotting
On storage drivers where Freeze is required for consistency, also call
Fsync to ensure that any pending write has been properly flushed to disk.
|
lxc_lxd
|
train
|
da572449037974f3707d3ba18e9bcce3cfb75c01
|
diff --git a/master/buildbot/test/integration/test_integration_secrets_with_vault.py b/master/buildbot/test/integration/test_integration_secrets_with_vault.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/integration/test_integration_secrets_with_vault.py
+++ b/master/buildbot/test/integration/test_integration_secrets_with_vault.py
@@ -38,12 +38,15 @@ class SecretsConfig(RunMasterBase):
if rv != 0:
raise SkipTest(
"Vault integration need docker environment to be setup")
+
+ self.addCleanup(self.remove_container)
+
rv = os.system("docker exec vault_for_buildbot /bin/sh -c "
"'export VAULT_ADDR=http://127.0.0.1:8200/\n"
"vault write secret/key value=word'")
self.assertEqual(rv, 0)
- def tearDown(self):
+ def remove_container(self):
os.system("docker rm -f vault_for_buildbot")
@defer.inlineCallbacks
|
test: Remove docker container even if vault setup fails
|
buildbot_buildbot
|
train
|
efe757f0691c09d1a1a92807e2f58010c576c52b
|
diff --git a/lib/ruby-fs-stack/familytree.rb b/lib/ruby-fs-stack/familytree.rb
index <HASH>..<HASH> 100644
--- a/lib/ruby-fs-stack/familytree.rb
+++ b/lib/ruby-fs-stack/familytree.rb
@@ -219,7 +219,11 @@ module Org::Familysearch::Ws::Familytree::V2::Schema
end
def buildFullText
- self.pieces.collect{|piece| "#{piece.predelimiters}#{piece.value}#{piece.postdelimiters}"}.join('')
+ if self.pieces.nil?
+ return ''
+ else
+ self.pieces.collect{|piece| "#{piece.predelimiters}#{piece.value}#{piece.postdelimiters}"}.join('')
+ end
end
end
diff --git a/spec/familytree_v2/person_spec.rb b/spec/familytree_v2/person_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/familytree_v2/person_spec.rb
+++ b/spec/familytree_v2/person_spec.rb
@@ -3,6 +3,8 @@ require 'ruby-fs-stack/familytree'
describe Org::Familysearch::Ws::Familytree::V2::Schema::Person do
+ FamTreeV2 = Org::Familysearch::Ws::Familytree::V2::Schema
+
def new_person
Org::Familysearch::Ws::Familytree::V2::Schema::Person.new
end
@@ -105,6 +107,13 @@ describe Org::Familysearch::Ws::Familytree::V2::Schema::Person do
@person.assertions.names = []
end
+ def add_blank_form
+ nameAssertion = FamTreeV2::NameAssertion.new
+ nameAssertion.value = FamTreeV2::NameValue.new
+ nameAssertion.value.forms = [FamTreeV2::NameForm.new]
+ @person.assertions.names[0] = nameAssertion
+ end
+
before(:each) do
@person = parse_person('KJ86-3VD_version.js')
end
@@ -124,6 +133,13 @@ describe Org::Familysearch::Ws::Familytree::V2::Schema::Person do
@person.full_names.should == []
end
+ it "should return [] if a only a blank nameform" do
+ add_assertions
+ add_names_array
+ add_blank_form
+ @person.full_names.should == ['']
+ end
+
end
end
|
Fixed a Person#full_names bug when a NameForm had no fullText or pieces.
|
jimmyz_ruby-fs-stack
|
train
|
b72fa6d8148eba085a8e1e31658d94e30b9d36b1
|
diff --git a/src/org/jgroups/protocols/JDBC_PING.java b/src/org/jgroups/protocols/JDBC_PING.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/protocols/JDBC_PING.java
+++ b/src/org/jgroups/protocols/JDBC_PING.java
@@ -48,7 +48,7 @@ public class JDBC_PING extends FILE_PING {
protected String connection_driver;
@Property(description = "If not empty, this SQL statement will be performed at startup."
- + "Customize it to create the needed table on those databases which permit table creation attempt without loosing data, such as "
+ + "Customize it to create the needed table on those databases which permit table creation attempt without losing data, such as "
+ "PostgreSQL and MySQL (using IF NOT EXISTS). To allow for creation attempts, errors performing this statement will be logged"
+ "but not considered fatal. To avoid any DDL operation, set this to an empty string.")
protected String initialize_sql =
|
Fixed typo: loosing -> losing
|
belaban_JGroups
|
train
|
a2e5132221024e99b1534f2b574cfad3ef4448bb
|
diff --git a/src/browser/extension/devpanel/index.js b/src/browser/extension/devpanel/index.js
index <HASH>..<HASH> 100644
--- a/src/browser/extension/devpanel/index.js
+++ b/src/browser/extension/devpanel/index.js
@@ -30,7 +30,7 @@ function showDevTools() {
rendered = true;
} catch (error) {
render(
- <div>{error.toString()}</div>,
+ <pre>{error.stack}</pre>,
document.getElementById('root')
);
}
|
Include stack trace when throwing an exception in Chrome Devtools panel
|
zalmoxisus_redux-devtools-extension
|
train
|
03e960bea300a13a1d7a0e540453540f1fb5b936
|
diff --git a/safe_qgis/tools/osm_downloader.py b/safe_qgis/tools/osm_downloader.py
index <HASH>..<HASH> 100644
--- a/safe_qgis/tools/osm_downloader.py
+++ b/safe_qgis/tools/osm_downloader.py
@@ -366,7 +366,7 @@ class OsmDownloader(QDialog, Ui_OsmDownloaderBase):
Currently 'buildings' or 'roads' are supported.
:type feature_type: str
- :raises: OSMDownloaderError - when buildings.shp not exist
+ :raises: ImportDialogError - when buildings.shp not exist
"""
output_prefix = self.filename_prefix.text()
path = str(self.output_directory.text())
|
Fix wrong raises docstring.
|
inasafe_inasafe
|
train
|
e780e2fda326fa5ea616e1441a6ed81b2890085c
|
diff --git a/activerecord/CHANGELOG.md b/activerecord/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/activerecord/CHANGELOG.md
+++ b/activerecord/CHANGELOG.md
@@ -1,3 +1,10 @@
+* Provide :touch option to save() to accommodate saving without updating
+ timestamps.
+
+ Fixes #18202
+
+ *Dan Olson*
+
* Provide a more helpful error message when an unsupported class is passed to
`serialize`
diff --git a/activerecord/lib/active_record/callbacks.rb b/activerecord/lib/active_record/callbacks.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/callbacks.rb
+++ b/activerecord/lib/active_record/callbacks.rb
@@ -298,7 +298,7 @@ module ActiveRecord
private
- def create_or_update #:nodoc:
+ def create_or_update(*) #:nodoc:
_run_save_callbacks { super }
end
diff --git a/activerecord/lib/active_record/persistence.rb b/activerecord/lib/active_record/persistence.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/persistence.rb
+++ b/activerecord/lib/active_record/persistence.rb
@@ -116,8 +116,8 @@ module ActiveRecord
#
# Attributes marked as readonly are silently ignored if the record is
# being updated.
- def save(*)
- create_or_update
+ def save(*args)
+ create_or_update(*args)
rescue ActiveRecord::RecordInvalid
false
end
@@ -138,8 +138,8 @@ module ActiveRecord
#
# Attributes marked as readonly are silently ignored if the record is
# being updated.
- def save!(*)
- create_or_update || raise(RecordNotSaved.new(nil, self))
+ def save!(*args)
+ create_or_update(*args) || raise(RecordNotSaved.new(nil, self))
end
# Deletes the record in the database and freezes this instance to
@@ -498,9 +498,9 @@ module ActiveRecord
relation
end
- def create_or_update
+ def create_or_update(*args)
raise ReadOnlyRecord, "#{self.class} is marked as readonly" if readonly?
- result = new_record? ? _create_record : _update_record
+ result = new_record? ? _create_record : _update_record(*args)
result != false
end
diff --git a/activerecord/lib/active_record/timestamp.rb b/activerecord/lib/active_record/timestamp.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/timestamp.rb
+++ b/activerecord/lib/active_record/timestamp.rb
@@ -57,8 +57,8 @@ module ActiveRecord
super
end
- def _update_record(*args)
- if should_record_timestamps?
+ def _update_record(*args, touch: true, **options)
+ if touch && should_record_timestamps?
current_time = current_time_from_proper_timezone
timestamp_attributes_for_update_in_model.each do |column|
@@ -67,7 +67,7 @@ module ActiveRecord
write_attribute(column, current_time)
end
end
- super
+ super *args
end
def should_record_timestamps?
diff --git a/activerecord/test/cases/persistence_test.rb b/activerecord/test/cases/persistence_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/persistence_test.rb
+++ b/activerecord/test/cases/persistence_test.rb
@@ -878,4 +878,35 @@ class PersistenceTest < ActiveRecord::TestCase
assert_equal "Welcome to the weblog", post.title
assert_not post.new_record?
end
+
+ class SaveTest < ActiveRecord::TestCase
+ self.use_transactional_fixtures = false
+
+ def test_save_touch_false
+ widget = Class.new(ActiveRecord::Base) do
+ connection.create_table :widgets, force: true do |t|
+ t.string :name
+ t.timestamps null: false
+ end
+
+ self.table_name = :widgets
+ end
+
+ instance = widget.create!({
+ name: 'Bob',
+ created_at: 1.day.ago,
+ updated_at: 1.day.ago
+ })
+
+ created_at = instance.created_at
+ updated_at = instance.updated_at
+
+ instance.name = 'Barb'
+ instance.save!(touch: false)
+ assert_equal instance.created_at, created_at
+ assert_equal instance.updated_at, updated_at
+ ensure
+ ActiveRecord::Base.connection.drop_table :widgets
+ end
+ end
end
|
Provide :touch option to save() to accommodate saving without updating timestamps. [#<I>]
|
rails_rails
|
train
|
6f92effcb9b8241546f1c114f373dbcc2b507518
|
diff --git a/shadows/framework/src/main/java/org/robolectric/shadows/ShadowParcel.java b/shadows/framework/src/main/java/org/robolectric/shadows/ShadowParcel.java
index <HASH>..<HASH> 100644
--- a/shadows/framework/src/main/java/org/robolectric/shadows/ShadowParcel.java
+++ b/shadows/framework/src/main/java/org/robolectric/shadows/ShadowParcel.java
@@ -196,7 +196,7 @@ public class ShadowParcel {
}
@Implementation(minSdk = O_MR1)
- public static boolean nativeReadByteArray(long nativePtr, byte[] dest, int destLen) {
+ protected static boolean nativeReadByteArray(long nativePtr, byte[] dest, int destLen) {
return NATIVE_PTR_TO_PARCEL.get(nativePtr).readByteArray(dest, destLen);
}
|
Make new @Implementation method protected.
|
robolectric_robolectric
|
train
|
16a5527fbb720a9d69e243946c285b94e41aa00f
|
diff --git a/colab/accounts/forms.py b/colab/accounts/forms.py
index <HASH>..<HASH> 100644
--- a/colab/accounts/forms.py
+++ b/colab/accounts/forms.py
@@ -181,7 +181,7 @@ class UserCreationForm(UserForm):
'password_mismatch': _("The two password fields didn't match."),
}
username = forms.RegexField(label=_("Username"), max_length=30,
- regex=r'^[\w.@+-]+$',
+ regex=r'^[\w]+$',
help_text=_(("Required. 30 characters or fewer"
". Letter and digits.")),
error_messages={
@@ -251,7 +251,7 @@ class UserCreationForm(UserForm):
class UserChangeForm(forms.ModelForm):
username = forms.RegexField(
- label=_("Username"), max_length=30, regex=r'^[\w.@+-]+$',
+ label=_("Username"), max_length=30, regex=r'^[\w]+$',
help_text=_("Required. 30 characters or fewer. Letters and digits."),
error_messages={
'invalid': _("This value may contain only letters and numbers.")})
diff --git a/colab/accounts/tests/test_forms.py b/colab/accounts/tests/test_forms.py
index <HASH>..<HASH> 100644
--- a/colab/accounts/tests/test_forms.py
+++ b/colab/accounts/tests/test_forms.py
@@ -64,7 +64,7 @@ class FormTest(TestCase):
def test_valid_username(self):
form = self.create_form_data('user@email.com',
- 'colab123@colab-spb.com')
+ 'colab123')
self.assertTrue(form.is_valid())
def test_not_valid_username(self):
@@ -73,7 +73,7 @@ class FormTest(TestCase):
self.assertFalse(form.is_valid())
def test_update_valid_username(self):
- form = self.create_change_form_data('colab123@colab-spb.com')
+ form = self.create_change_form_data('colab123')
self.assertTrue(form.is_valid())
def test_update_not_valid_username(self):
|
Changed regex for user creation and alter
|
colab_colab
|
train
|
efc71703634f21923670a122a843f38376fa53e6
|
diff --git a/launch_control/commands/dashboard.py b/launch_control/commands/dashboard.py
index <HASH>..<HASH> 100644
--- a/launch_control/commands/dashboard.py
+++ b/launch_control/commands/dashboard.py
@@ -271,7 +271,7 @@ class XMLRPCCommand(Command):
def invoke(self):
try:
- self.invoke_remote()
+ return self.invoke_remote()
except socket.error as ex:
print >>sys.stderr, "Unable to connect to server at %s" % (
self.args.dashboard_url,)
@@ -293,6 +293,7 @@ class XMLRPCCommand(Command):
print >>sys.stderr, "HTTP error code: %d/%s" % (ex.errcode, ex.errmsg)
except xmlrpclib.Fault as ex:
self.handle_xmlrpc_fault(ex.faultCode, ex.faultString)
+ return -1
def handle_xmlrpc_fault(self, faultCode, faultString):
if faultCode == 500:
@@ -377,7 +378,7 @@ class get(XMLRPCCommand):
print "File {filename!r} already exists".format(
filename=filename)
print "You may pass --overwrite to write over it"
- return
+ return -1
stream = open(filename, "wb")
else:
stream = self.args.output
diff --git a/launch_control/commands/dispatcher.py b/launch_control/commands/dispatcher.py
index <HASH>..<HASH> 100644
--- a/launch_control/commands/dispatcher.py
+++ b/launch_control/commands/dispatcher.py
@@ -1,8 +1,8 @@
"""
Module with LaunchControlDispatcher - the command dispatcher
"""
-
import argparse
+import sys
from launch_control.commands.interface import Command
@@ -36,9 +36,9 @@ class LaunchControlDispatcher(object):
def dispatch(self, args=None):
args = self.parser.parse_args(args)
command = args.command_cls(self.parser, args)
- command.invoke()
+ return command.invoke()
def main():
- LaunchControlDispatcher().dispatch()
+ sys.exit(LaunchControlDispatcher().dispatch())
|
Allow commands to return error codes on error
|
zyga_json-schema-validator
|
train
|
63486bd173d23a2b0c48836e66d6b79557996178
|
diff --git a/lib/restforce/collection.rb b/lib/restforce/collection.rb
index <HASH>..<HASH> 100644
--- a/lib/restforce/collection.rb
+++ b/lib/restforce/collection.rb
@@ -24,7 +24,7 @@ module Restforce
# Return array of the elements on the current page
def current_page
- @raw_page['records'].collect { |record| Restforce::Mash.build(record, @client) }
+ first(@raw_page['records'].size)
end
# Return the current and all of the following pages.
|
Removing duplication of records parsing code
|
restforce_restforce
|
train
|
304dc52a5638c77e31a96b6cfccf9f0521b45b6b
|
diff --git a/lib/parser/tokenize.js b/lib/parser/tokenize.js
index <HASH>..<HASH> 100644
--- a/lib/parser/tokenize.js
+++ b/lib/parser/tokenize.js
@@ -91,7 +91,7 @@ function tokenize(s) {
}
} else if (c === '"' || c === "'") {
pushToken(c === '"' ? TokenType.StringDQ : TokenType.StringSQ, ln, pos - lineStartPos, parseString(s, c));
- } else if (c === ' ' || c === '\n' || c === '\r' || c === '\t') {
+ } else if (c === ' ' || c === '\n' || c === '\r' || c === '\t' || c === '\f') {
pushToken(TokenType.Space, ln, pos - lineStartPos, parseSpaces(s));
} else if (c in Punctuation) {
pushToken(Punctuation[c], ln, pos - lineStartPos, c);
@@ -122,10 +122,18 @@ function parseSpaces(s) {
for (; pos < s.length; pos++) {
var c = s.charAt(pos);
- if (c === '\n') {
+ // \n or \f
+ if (c === '\n' || c === '\f') {
ln++;
lineStartPos = pos;
- } else if (c !== ' ' && c !== '\r' && c !== '\t') {
+ // \r + optional \n
+ } else if (c === '\r') {
+ ln++;
+ if (s.charAt(pos + 1) === '\n') {
+ pos++;
+ }
+ lineStartPos = pos;
+ } else if (c !== ' ' && c !== '\t') {
break;
}
}
@@ -158,27 +166,42 @@ function parseSLComment(s) {
for (pos = pos + 2; pos < s.length; pos++) {
if (s.charAt(pos) === '\n' || s.charAt(pos) === '\r') {
- pos++;
break;
}
}
- pos--;
return s.substring(start, pos + 1);
}
function parseString(s, q) {
var start = pos;
+ var res = '';
for (pos = pos + 1; pos < s.length; pos++) {
if (s.charAt(pos) === '\\') {
- pos++;
+ var next = s.charAt(pos + 1);
+ // \n or \f
+ if (next === '\n' || next === '\f') {
+ res += s.substring(start, pos);
+ start = pos + 2;
+ pos++;
+ // \r + optional \n
+ } else if (next === '\r') {
+ res += s.substring(start, pos);
+ if (s.charAt(pos + 2) === '\n') {
+ pos++;
+ }
+ start = pos + 2;
+ pos++;
+ } else {
+ pos++;
+ }
} else if (s.charAt(pos) === q) {
break;
}
}
- return s.substring(start, pos + 1);
+ return res + s.substring(start, pos + 1);
}
function parseDecimalNumber(s) {
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -15,11 +15,15 @@ function createParseTest(name, test, scope) {
});
}
+function normalize(str) {
+ return str.replace(/\n|\r\n?|\f/g, '\n');
+}
+
function createCompressTest(name, test) {
return it(name, function() {
var compressed = csso.minify(test.source);
- assert.equal(compressed, test.compressed);
+ assert.equal(normalize(compressed), normalize(test.compressed));
});
}
@@ -63,7 +67,7 @@ describe('csso', function() {
}, {});
for (var name in tests) {
- createCompressTest(path.relative(__dirname, dir) + '/' + name + '.css', tests[name]);
+ createCompressTest(path.join(path.relative(__dirname, dir), name + '.css'), tests[name]);
}
}
|
fixes for windows (correct processing new line, normalize file content in test suite)
|
css_csso
|
train
|
73fbcef06ff0b387c69b645b1fcc8d0d6dbf7e20
|
diff --git a/lib/compareLocales.js b/lib/compareLocales.js
index <HASH>..<HASH> 100644
--- a/lib/compareLocales.js
+++ b/lib/compareLocales.js
@@ -2,6 +2,9 @@
module.exports = function(localeA, localeB, options) {
options = options || {};
+ if (typeof localeA !== 'object' || typeof localeB !== 'object') {
+ throw new Error('s18n: `localeA` and `localeB` must be objects.');
+ }
var localeAMissing = {};
var localeBMissing = {};
diff --git a/test/spec.compareLocales.js b/test/spec.compareLocales.js
index <HASH>..<HASH> 100644
--- a/test/spec.compareLocales.js
+++ b/test/spec.compareLocales.js
@@ -10,6 +10,21 @@ describe('s18n.compareLocales()', function() {
assert.equal(typeof s18n.compareLocales, 'function');
});
+ it('should error if either locale is not an object', function() {
+ var localeA = 'a string';
+ var localeB = {
+ '37b51d19': 'bar',
+ 'acbd18db': 'fóó',
+ '149603e6': 'óld'
+ };
+ assert.throws(
+ function() {
+ s18n.compareLocales(localeA, localeB);
+ },
+ /`localeA`/,
+ 'unexpected error message');
+ });
+
it('should return a locale comparison array', function() {
var localeA = {
'37b51d19': 'bar',
@@ -24,36 +39,28 @@ describe('s18n.compareLocales()', function() {
var comparisonArray = s18n.compareLocales(localeA, localeB);
assert.deepEqual(comparisonArray, [
//0: hashes missing from localeA (present in localeB)
- [
- {
- hash: '149603e6',
- string: 'óld'
- }
- ],
+ [{
+ hash: '149603e6',
+ string: 'óld'
+ }],
//1: hashes missing from localeB (present in localeA)
- [
- {
- hash: '73feffa4',
- string: 'baz'
- }
- ],
+ [{
+ hash: '73feffa4',
+ string: 'baz'
+ }],
//2: hashes with duplicate strings (likely untranslated)
- [
- {
- hash: '37b51d19',
- string: 'bar'
- }
- ],
+ [{
+ hash: '37b51d19',
+ string: 'bar'
+ }],
//3: hashes with modified strings
- [
- {
- hash: 'acbd18db',
- strings: [
- 'foo',
- 'fóó'
- ]
- }
- ]
+ [{
+ hash: 'acbd18db',
+ strings: [
+ 'foo',
+ 'fóó'
+ ]
+ }]
]);
});
|
compareLocales throws error if either locale is not an object
|
bitjson_s18n
|
train
|
288f5746b90087bdb544e9e46beeaf73e6c1ab9f
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -6,14 +6,14 @@ module.exports = function (pluginConfig, config, cb) {
if (env.WERCKER !== "true") {
return cb(new SRError(
- "semantic-release didn’t run on wercker and therefore a new version won’t be published.",
+ "ENOWERCKER: semantic-release didn’t run on wercker and therefore a new version won’t be published.",
"ENOWERCKER"
));
}
if (env.WERCKER_GIT_BRANCH !== options.branch) {
return cb(new SRError(
- "This test run was triggered on the branch " + env.WERCKER_GIT_BRANCH +
+ "EBRANCHMISMATCH: This test run was triggered on the branch " + env.WERCKER_GIT_BRANCH +
", while semantic-release is configured to only publish from " + options.branch + ".",
"EBRANCHMISMATCH"
));
@@ -22,7 +22,7 @@ module.exports = function (pluginConfig, config, cb) {
// WERCKER_RESULT is set only for after-steps
if (env.hasOwnProperty("WERCKER_RESULT") && env.WERCKER_RESULT !== "passed") {
return cb(new SRError(
- "This test run was not passed and therefore a new version won’t be published.",
+ "EFAILED: This test run was not passed and therefore a new version won’t be published.",
"EFAILED"
));
}
|
fix: Put error code into message
semantic-release currently does not put error code to log message so the
io-monad/semantic-release step cannot get error code from messages.
|
io-monad_sr-condition-wercker
|
train
|
03ed104c6e46bb25c2a1b98270b122f33ba7297f
|
diff --git a/src/widgets/IndexLayoutSwitcher.php b/src/widgets/IndexLayoutSwitcher.php
index <HASH>..<HASH> 100644
--- a/src/widgets/IndexLayoutSwitcher.php
+++ b/src/widgets/IndexLayoutSwitcher.php
@@ -10,6 +10,7 @@
namespace hipanel\widgets;
+use hipanel\helpers\Url;
use hipanel\models\IndexPageUiOptions;
use yii\base\Widget;
use yii\bootstrap\ButtonGroup;
@@ -29,21 +30,34 @@ class IndexLayoutSwitcher extends Widget
'buttons' => [
Html::a(
'<i class="fa fa-pause" aria-hidden="true"></i>',
- ['index', 'orientation' => IndexPageUiOptions::ORIENTATION_HORIZONTAL],
- ['class' => 'btn btn-default btn-sm ' . ($this->isOrientation(IndexPageUiOptions::ORIENTATION_HORIZONTAL) ? 'active' : '')]),
+ Url::current(['orientation' => IndexPageUiOptions::ORIENTATION_HORIZONTAL]),
+ ['class' => 'btn btn-default btn-sm ' .
+ $this->getClassActive(IndexPageUiOptions::ORIENTATION_HORIZONTAL)
+ ]),
Html::a(
'<i class="fa fa-pause fa-rotate-90" aria-hidden="true"></i>',
- ['index', 'orientation' => IndexPageUiOptions::ORIENTATION_VERTICAL],
- ['class' => 'btn btn-default btn-sm ' . ($this->isOrientation(IndexPageUiOptions::ORIENTATION_VERTICAL) ? 'active' : '')]),
+ Url::current(['orientation' => IndexPageUiOptions::ORIENTATION_VERTICAL]),
+ ['class' => 'btn btn-default btn-sm ' .
+ $this->getClassActive(IndexPageUiOptions::ORIENTATION_VERTICAL)
+ ]),
],
]);
}
/**
+ * @param string $orientation
+ * @return string
+ */
+ private function getClassActive(string $orientation): string
+ {
+ return $this->isOriented($orientation) ? 'active' : '';
+ }
+
+ /**
* @param $orientation
* @return bool
*/
- private function isOrientation($orientation)
+ private function isOriented(string $orientation): bool
{
return $this->uiModel->orientation === $orientation;
}
|
Fixed filters resetting after layout changing (#<I>)
After layout changing the GET parameters of the current URL had been
erasing and filter storage didn't work.
|
hiqdev_hipanel-core
|
train
|
d30f7098aa3400bfb46e8e341ccda1cde7ef70cf
|
diff --git a/pyforms/gui/Controls/ControlList.py b/pyforms/gui/Controls/ControlList.py
index <HASH>..<HASH> 100755
--- a/pyforms/gui/Controls/ControlList.py
+++ b/pyforms/gui/Controls/ControlList.py
@@ -32,10 +32,8 @@ class ControlList(ControlBase, QWidget):
self._plusFunction = plusFunction
self._minusFunction = minusFunction
ControlBase.__init__(self, label, defaultValue)
-
- def __repr__(self): return "ControlList "+str(self._value)
-
+ def __repr__(self): return "ControlList " + str(self._value)
def initForm(self):
plusFunction = self._plusFunction
@@ -80,7 +78,7 @@ class ControlList(ControlBase, QWidget):
self.currentCellChanged(nextRow, nextCol, previousRow, previousCol)
self.changed()
- def tableWidgetItemChanged(self, current, previous):
+ def tableWidgetItemChanged(self, current, previous):
self.currentItemChanged(current, previous)
self.changed()
@@ -88,23 +86,28 @@ class ControlList(ControlBase, QWidget):
def itemSelectionChanged(self): pass
- def currentCellChanged(self, nextRow, nextCol, previousRow, previousCol): pass
+ def currentCellChanged(
+ self, nextRow, nextCol, previousRow, previousCol): pass
def currentItemChanged(self, current, previous): pass
- def clear(self):
- self.tableWidget.clear()
- self.tableWidget.setColumnCount(3)
- self.tableWidget.setRowCount(0)
-
+ def clear(self, headers=False):
+ if headers:
+ self.tableWidget.clear()
+ self.tableWidget.setColumnCount(3)
+ self.tableWidget.setRowCount(0)
+ else:
+ self.tableWidget.clearContents()
+ self.tableWidget.setRowCount(0)
+
def __add__(self, other):
index = self.tableWidget.rowCount()
- self.tableWidget.insertRow( index )
- if self.tableWidget.currentColumn()<len(other):
+ self.tableWidget.insertRow(index)
+ if self.tableWidget.currentColumn() < len(other):
self.tableWidget.setColumnCount(len(other))
-
+
for i in range(0, len(other)):
v = other[i]
args = [str(v)] if not hasattr(
@@ -164,8 +167,6 @@ class ControlList(ControlBase, QWidget):
@property
def count(self): return self.tableWidget.rowCount()
-
-
@property
def value(self):
if hasattr(self, 'tableWidget'):
|
pep8 compliance
added new functionality: when clearing table, give the choice of resetting or not the current table headers
|
UmSenhorQualquer_pyforms
|
train
|
22d099b752157efbf0b50662acae8dddbba39927
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from distutils.core import setup
setup(
name = "1and1",
packages = ['oneandone'],
- version = "1.6.0",
+ version = "1.7.3",
author = "Tyler Burkhardt (stackpoint.io)",
author_email = "tyler@stackpointcloud.com",
description = ("1&1 API Client Library for Python"),
|
Incremented setup version to the latest release.
|
1and1_oneandone-cloudserver-sdk-python
|
train
|
ed911de68831fcb3894e3e41775c097c01c057d9
|
diff --git a/lib/hammer_cli_katello/organization.rb b/lib/hammer_cli_katello/organization.rb
index <HASH>..<HASH> 100644
--- a/lib/hammer_cli_katello/organization.rb
+++ b/lib/hammer_cli_katello/organization.rb
@@ -46,14 +46,15 @@ module HammerCLIKatello
build_options
end
- class DeleteCommand < HammerCLIKatello::DeleteCommand
- resource :organizations, :destroy
+ # TODO: ORG_DESTROY - reenable this command after org destroy has been implemented
+ # class DeleteCommand < HammerCLIKatello::DeleteCommand
+ # resource :organizations, :destroy
- success_message _("Organization deleted")
- failure_message _("Could not delete the organization")
+ # success_message _("Organization deleted")
+ # failure_message _("Could not delete the organization")
- build_options
- end
+ # build_options
+ # end
autoload_subcommands
end
|
Fixes #<I>,BZ<I> - Disable organization deletion
|
Katello_hammer-cli-katello
|
train
|
9a2493167994eda0f263e8eca9b6e8591b7f3fcd
|
diff --git a/lib/SymlinkPlugin.js b/lib/SymlinkPlugin.js
index <HASH>..<HASH> 100644
--- a/lib/SymlinkPlugin.js
+++ b/lib/SymlinkPlugin.js
@@ -23,10 +23,6 @@ module.exports = class SymlinkPlugin {
let containsSymlink = false;
forEachBail.withIndex(paths, (path, idx, callback) => {
- if(/^[a-zA-Z]:$/.test(path)) {
- // 'F:' resolves to the current working directory of drive 'F' if the current working directory is a symlink.
- path += "\\";
- }
fs.readlink(path, (err, result) => {
if(!err && result) {
pathSeqments[idx] = result;
diff --git a/lib/getPaths.js b/lib/getPaths.js
index <HASH>..<HASH> 100644
--- a/lib/getPaths.js
+++ b/lib/getPaths.js
@@ -10,15 +10,15 @@ module.exports = function getPaths(path) {
const seqments = [parts[parts.length - 1]];
let part = parts[parts.length - 1];
path = path.substr(0, path.length - part.length - 1);
- paths.push(path);
for(let i = parts.length - 2; i > 2; i -= 2) {
+ paths.push(path);
part = parts[i];
path = path.substr(0, path.length - part.length) || "/";
- paths.push(path);
seqments.push(part.substr(0, part.length - 1));
}
part = parts[1];
- seqments.push(part.length > 1 ? part.substr(0, part.length - 1) : part);
+ seqments.push(part);
+ paths.push(part);
return {
paths: paths,
seqments: seqments
|
move fix into getPaths
|
webpack_enhanced-resolve
|
train
|
41e0df8f32d0baa065242c14bc4266c90be54599
|
diff --git a/scalar.py b/scalar.py
index <HASH>..<HASH> 100755
--- a/scalar.py
+++ b/scalar.py
@@ -13,7 +13,6 @@ def main():
print(f.header);
print("reading in data");
data = f.get_data(pool_size=24,lazy=False);
- print("selecting data");
print("dumping");
with open(sys.argv[2],"wb") as f:
cPickle.dump(data,f,2);
|
removed extraneous print line...
|
noobermin_lspreader
|
train
|
3827744657d845ad5b518c619cbb55b46af7764e
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,9 @@
# Changelog
+### 1.0.0 RC34 - 2015-01-28
+
+- Fix: Don't attempt to remove cached assets if they don't exist.
+
### 1.0.0 RC33 - 2015-01-28
- Fix: Doesn't assume site plugin path is in site source directory. Thanks [@drallgood](https://github.com/octopress/feeds/issues/10)!
diff --git a/lib/octopress-ink/assets/asset.rb b/lib/octopress-ink/assets/asset.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress-ink/assets/asset.rb
+++ b/lib/octopress-ink/assets/asset.rb
@@ -55,7 +55,7 @@ module Octopress
if files.empty?
raise IOError.new "Could not find #{File.basename(file)} at #{file}"
end
- @found_file = Pathname.new files[0]
+ @found_file = files[0]
end
end
@@ -64,7 +64,7 @@ module Octopress
end
def read
- path.read
+ File.read(path)
end
def add
diff --git a/lib/octopress-ink/assets/sass.rb b/lib/octopress-ink/assets/sass.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress-ink/assets/sass.rb
+++ b/lib/octopress-ink/assets/sass.rb
@@ -23,7 +23,7 @@ module Octopress
end
def ext
- path.extname
+ File.extname(path)
end
def load_paths
diff --git a/lib/octopress-ink/cache.rb b/lib/octopress-ink/cache.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress-ink/cache.rb
+++ b/lib/octopress-ink/cache.rb
@@ -30,11 +30,13 @@ module Octopress
end
def clean
- remove = Find.find(INK_CACHE_DIR).to_a.reject do |file|
- @cache_files.include?(file) || File.directory?(file)
- end
+ if File.directory?(INK_CACHE_DIR)
+ remove = Find.find(INK_CACHE_DIR).to_a.reject do |file|
+ @cache_files.include?(file) || File.directory?(file)
+ end
- FileUtils.rm remove
+ FileUtils.rm(remove)
+ end
end
end
end
diff --git a/lib/octopress-ink/plugin_asset_pipeline.rb b/lib/octopress-ink/plugin_asset_pipeline.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress-ink/plugin_asset_pipeline.rb
+++ b/lib/octopress-ink/plugin_asset_pipeline.rb
@@ -171,7 +171,7 @@ module Octopress
end
def self.compress_js?(file)
- Ink.configuration['asset_pipeline']['compress_js'] && !file.file.end_with?('.min.js')
+ Ink.configuration['asset_pipeline']['compress_js'] && !file.path.end_with?('.min.js')
end
def self.write_files(source, dest)
diff --git a/lib/octopress-ink/version.rb b/lib/octopress-ink/version.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress-ink/version.rb
+++ b/lib/octopress-ink/version.rb
@@ -1,5 +1,5 @@
module Octopress
module Ink
- VERSION = "1.0.0.rc.33"
+ VERSION = "1.0.0.rc.34"
end
end
|
Fixed plugin path issues, and internal asset paths. Closes #<I>.
|
octopress_ink
|
train
|
76b70834b0e46b4a5eeb2daa67ed3741d8931500
|
diff --git a/code/extensions/BlocksSiteTreeExtension.php b/code/extensions/BlocksSiteTreeExtension.php
index <HASH>..<HASH> 100755
--- a/code/extensions/BlocksSiteTreeExtension.php
+++ b/code/extensions/BlocksSiteTreeExtension.php
@@ -12,6 +12,7 @@ use SilverStripe\Forms\CheckboxField;
use SilverStripe\Forms\FieldList;
use SilverStripe\Forms\LiteralField;
use SilverStripe\Forms\ListboxField;
+use SilverStripe\Forms\Tab;
use SilverStripe\Forms\GridField\GridField;
use SilverStripe\View\SSViewer;
use SilverStripe\ORM\ArrayList;
@@ -100,9 +101,10 @@ class BlocksSiteTreeExtension extends SiteTreeExtension
$gridConfig = GridFieldConfigBlockManager::create(true, true, true, true)
->addExisting($this->owner->class)
//->addBulkEditing()
- ->addComponent(new GridFieldOrderableRows())
+ // ->addComponent(new GridFieldOrderableRows()) // Comment until below TODO is complete.
;
+
// TODO it seems this sort is not being applied...
$gridSource = $this->owner->Blocks();
// ->sort(array(
@@ -113,6 +115,7 @@ class BlocksSiteTreeExtension extends SiteTreeExtension
$fields->addFieldToTab('Root.Blocks', GridField::create('Blocks', _t('Block.PLURALNAME', 'Blocks'), $gridSource, $gridConfig));
+
// Blocks inherited from BlockSets
if ($this->blockManager->getUseBlockSets()) {
$inheritedBlocks = $this->getBlocksFromAppliedBlockSets(null, true);
|
Adding tab use statement and commenting useless sort
|
sheadawson_silverstripe-blocks
|
train
|
bd9f162efaba56d4e0bfa5e980d50781affd47b4
|
diff --git a/xchange-gatecoin/src/main/java/org/knowm/xchange/gatecoin/GatecoinAuthenticated.java b/xchange-gatecoin/src/main/java/org/knowm/xchange/gatecoin/GatecoinAuthenticated.java
index <HASH>..<HASH> 100644
--- a/xchange-gatecoin/src/main/java/org/knowm/xchange/gatecoin/GatecoinAuthenticated.java
+++ b/xchange-gatecoin/src/main/java/org/knowm/xchange/gatecoin/GatecoinAuthenticated.java
@@ -26,7 +26,7 @@ import org.knowm.xchange.gatecoin.dto.trade.Results.GatecoinTradeHistoryResult;
import si.mazi.rescu.ParamsDigest;
-@Path("api")
+@Path("")
@Produces(MediaType.APPLICATION_JSON)
public interface GatecoinAuthenticated {
|
Update GatecoinAuthenticated.java
API address changed
|
knowm_XChange
|
train
|
8c7a7a80a193ca40598988ffeda6dbad131a86d0
|
diff --git a/kuyruk/master.py b/kuyruk/master.py
index <HASH>..<HASH> 100644
--- a/kuyruk/master.py
+++ b/kuyruk/master.py
@@ -59,9 +59,13 @@ class Master(KuyrukProcess):
def shutdown_workers(self, kill=False):
"""Sends shutdown signal to all workers."""
- kill_signal = signal.SIGKILL if kill else signal.SIGTERM
+ if kill:
+ fn, sig = os.killpg, signal.SIGKILL
+ else:
+ fn, sig = os.kill, signal.SIGTERM
+
for worker in self.workers:
- os.kill(worker.pid, kill_signal)
+ fn(worker.pid, sig)
def wait_for_workers(self):
"""Loops until any of the self.workers is alive.
@@ -165,6 +169,7 @@ class WorkerProcess(object):
self.run_worker()
def run_worker(self):
+ os.setpgrp()
self.close_fds()
Args = namedtuple('Args', 'queue')
args = Args(queue=self.queue)
|
run worker in seperate process group
|
cenkalti_kuyruk
|
train
|
cbf87a73de6f2cb3673ab2568be5a9bf38c6d2ee
|
diff --git a/test/color_scheme.rb b/test/color_scheme.rb
index <HASH>..<HASH> 100644
--- a/test/color_scheme.rb
+++ b/test/color_scheme.rb
@@ -29,6 +29,12 @@ module Vic
assert_equal :Baz, @colorscheme.links.last.to_group
end
+ def test_it_creates_unique_links
+ @colorscheme.link :Foo, :Bar, :Baz
+ @colorscheme.link :Foo, :Bar, :Baz
+ assert_equal 2, @colorscheme.links.count
+ end
+
def test_it_creates_forced_links
@colorscheme.link! :Foo, :Bar
assert @colorscheme.links.first.force?
|
make sure duplicate links aren't added
|
noprompt_vic
|
train
|
4ecf8f6d6f8cf498dc86a419af360ff5018f54b7
|
diff --git a/src/org/opencms/ade/upload/CmsUploadBean.java b/src/org/opencms/ade/upload/CmsUploadBean.java
index <HASH>..<HASH> 100644
--- a/src/org/opencms/ade/upload/CmsUploadBean.java
+++ b/src/org/opencms/ade/upload/CmsUploadBean.java
@@ -258,8 +258,7 @@ public class CmsUploadBean extends CmsJspBean {
}
} else {
// create the resource
- String newResname = getNewResourceName(getCmsObject(), fileName, targetFolder);
- createSingleResource(newResname, content);
+ String newResname = createSingleResource(fileName, targetFolder, content);
// add the name of the created resource to the list of successful created resources
m_resourcesCreated.add(newResname);
}
@@ -272,23 +271,26 @@ public class CmsUploadBean extends CmsJspBean {
}
/**
- * Creates a single resource.<p>
+ * Creates a single resource and returns the site path of the new resource.<p>
*
* @param newResname the name of the resource to create
* @param content the content of the resource to create
*
+ * @return the new resource site path
+ *
* @throws CmsException if something goes wrong
* @throws CmsLoaderException if something goes wrong
* @throws CmsDbSqlException if something goes wrong
*/
- private void createSingleResource(String newResname, byte[] content)
+ private String createSingleResource(String fileName, String targetFolder, byte[] content)
throws CmsException, CmsLoaderException, CmsDbSqlException {
+ String newResname = getNewResourceName(getCmsObject(), fileName, targetFolder);
int resTypeId = OpenCms.getResourceManager().getDefaultTypeForName(newResname).getTypeId();
int plainId = OpenCms.getResourceManager().getResourceType(CmsResourceTypePlain.getStaticTypeName()).getTypeId();
// determine Title property value to set on new resource
- String title = CmsResource.getName(newResname);
+ String title = fileName;
if (title.lastIndexOf('.') != -1) {
title = title.substring(0, title.lastIndexOf('.'));
}
@@ -336,6 +338,7 @@ public class CmsUploadBean extends CmsJspBean {
throw sqlExc;
}
}
+ return newResname;
}
/**
|
Setting the not translated file name as title property instead of the
translated version.
|
alkacon_opencms-core
|
train
|
9dbafef7729613363e2e4801683acb26c4f2ec57
|
diff --git a/main.go b/main.go
index <HASH>..<HASH> 100644
--- a/main.go
+++ b/main.go
@@ -226,6 +226,7 @@ func executeLinter(issues chan *Issue, name, command, pattern, paths string) {
if p, ok := predefinedPatterns[pattern]; ok {
pattern = p
}
+ regexp.Compile(pattern)
re, err := regexp.Compile(pattern)
kingpin.FatalIfError(err, "invalid pattern for '"+command+"'")
|
Implemented getTopic, getTopics, createTopic API calls.
|
alecthomas_gometalinter
|
train
|
476fca6e5b4b58d40a9340fc6dcfc31f4c7c0be2
|
diff --git a/test/regression/api/v2/admin/settings_spec.js b/test/regression/api/v2/admin/settings_spec.js
index <HASH>..<HASH> 100644
--- a/test/regression/api/v2/admin/settings_spec.js
+++ b/test/regression/api/v2/admin/settings_spec.js
@@ -7,46 +7,46 @@ const ghost = testUtils.startGhost;
// NOTE: in future iterations these fields should be fetched from a central module.
// Have put a list as is here for the lack of better place for it.
-const defaultSettingsKeys = [
- 'title',
- 'description',
- 'logo',
- 'cover_image',
- 'icon',
- 'codeinjection_head',
- 'codeinjection_foot',
- 'facebook',
- 'twitter',
- 'navigation',
- 'secondary_navigation',
- 'meta_title',
- 'meta_description',
- 'og_image',
- 'og_title',
- 'og_description',
- 'twitter_image',
- 'twitter_title',
- 'twitter_description',
- 'active_theme',
- 'is_private',
- 'password',
- 'public_hash',
- 'default_content_visibility',
- 'members_subscription_settings',
- 'stripe_connect_integration',
- 'portal_name',
- 'portal_button',
- 'portal_plans',
- 'bulk_email_settings',
- 'amp',
- 'labs',
- 'slack',
- 'unsplash',
- 'shared_views',
- 'ghost_head',
- 'ghost_foot',
- 'active_timezone',
- 'default_locale'
+const defaultSettingsKeyTypes = [
+ {key: 'title',type: 'blog'},
+ {key: 'description',type: 'blog'},
+ {key: 'logo',type: 'blog'},
+ {key: 'cover_image',type: 'blog'},
+ {key: 'icon',type: 'blog'},
+ {key: 'codeinjection_head',type: 'blog'},
+ {key: 'codeinjection_foot',type: 'blog'},
+ {key: 'facebook',type: 'blog'},
+ {key: 'twitter',type: 'blog'},
+ {key: 'navigation',type: 'blog'},
+ {key: 'secondary_navigation',type: 'blog'},
+ {key: 'meta_title',type: 'blog'},
+ {key: 'meta_description',type: 'blog'},
+ {key: 'og_image',type: 'blog'},
+ {key: 'og_title',type: 'blog'},
+ {key: 'og_description',type: 'blog'},
+ {key: 'twitter_image',type: 'blog'},
+ {key: 'twitter_title',type: 'blog'},
+ {key: 'twitter_description',type: 'blog'},
+ {key: 'active_theme',type: 'theme'},
+ {key: 'is_private',type: 'private'},
+ {key: 'password',type: 'private'},
+ {key: 'public_hash',type: 'private'},
+ {key: 'default_content_visibility',type: 'members'},
+ {key: 'members_subscription_settings',type: 'members'},
+ {key: 'stripe_connect_integration',type: 'members'},
+ {key: 'portal_name',type: 'portal'},
+ {key: 'portal_button',type: 'portal'},
+ {key: 'portal_plans',type: 'portal'},
+ {key: 'bulk_email_settings',type: 'bulk_email'},
+ {key: 'amp',type: 'blog'},
+ {key: 'labs',type: 'blog'},
+ {key: 'slack',type: 'blog'},
+ {key: 'unsplash',type: 'blog'},
+ {key: 'shared_views',type: 'blog'},
+ {key: 'ghost_head',type: 'blog'},
+ {key: 'ghost_foot',type: 'blog'},
+ {key: 'active_timezone',type: 'blog'},
+ {key: 'default_locale',type: 'blog'}
];
describe('Settings API (v2)', function () {
@@ -82,7 +82,7 @@ describe('Settings API (v2)', function () {
const settings = jsonResponse.settings;
Object.keys(settings).length.should.equal(39);
- settings.map(s => s.key).should.deepEqual(defaultSettingsKeys);
+ settings.map(s => ({key: s.key, type: s.type})).should.deepEqual(defaultSettingsKeyTypes);
localUtils.API.checkResponse(jsonResponse, 'settings');
});
@@ -113,7 +113,7 @@ describe('Settings API (v2)', function () {
});
});
- it('Can not request settings by group, returns all settings instead', function () {
+ xit('Can not request settings by group, returns all settings instead', function () {
return request.get(localUtils.API.getApiQuery(`settings/?group=theme`))
.set('Origin', config.get('url'))
.expect('Content-Type', /json/)
@@ -130,7 +130,7 @@ describe('Settings API (v2)', function () {
const settings = jsonResponse.settings;
Object.keys(settings).length.should.equal(39);
- settings.map(s => s.key).should.deepEqual(defaultSettingsKeys);
+ settings.map(s => s.key).should.deepEqual(defaultSettingsKeyTypes);
localUtils.API.checkResponse(jsonResponse, 'settings');
});
|
Updated settings API v2 tests to also check for correct types
refs <URL>
|
TryGhost_Ghost
|
train
|
1d08f839762c7321df5e1a0c01df66403a20af7d
|
diff --git a/src/Composer/Repository/ComposerRepository.php b/src/Composer/Repository/ComposerRepository.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Repository/ComposerRepository.php
+++ b/src/Composer/Repository/ComposerRepository.php
@@ -90,7 +90,7 @@ class ComposerRepository extends ArrayRepository implements ConfigurableReposito
$this->io = $io;
$this->cache = new Cache($io, $config->get('cache-repo-dir').'/'.preg_replace('{[^a-z0-9.]}i', '-', $this->url), 'a-z0-9.$');
$this->loader = new ArrayLoader();
- if ($rfs) {
+ if ($rfs && $this->options) {
$rfs = clone $rfs;
$rfs->setOptions($this->options);
}
|
Only clone if needed
|
composer_composer
|
train
|
6f670accadf153885e99803f86588d42dc124ed8
|
diff --git a/geomdl/operations.py b/geomdl/operations.py
index <HASH>..<HASH> 100644
--- a/geomdl/operations.py
+++ b/geomdl/operations.py
@@ -619,7 +619,92 @@ def refine_knotvector(obj, param, **kwargs):
# Start volume knot refinement
if isinstance(obj, abstract.Volume):
- raise GeomdlException("Knot refinement is not available for spline volumes")
+ # u-direction
+ if param[0] is True:
+ # Use Pw if rational
+ cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
+
+ # Construct 2-dimensional structure
+ cpt2d = []
+ for u in range(obj.ctrlpts_size_u):
+ temp_surf = []
+ for w in range(obj.ctrlpts_size_w):
+ for v in range(obj.ctrlpts_size_v):
+ temp_pt = cpts[v + (u * obj.ctrlpts_size_v) + (w * obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
+ temp_surf.append(temp_pt)
+ cpt2d.append(temp_surf)
+
+ # Apply knot refinement
+ ctrlpts_tmp, kv_new = helpers.knot_refinement(obj.degree_u, obj.knotvector_u, cpt2d, **kwargs)
+ new_cpts_size = len(ctrlpts_tmp)
+
+ # Flatten to 1-dimensional structure
+ ctrlpts_new = []
+ for w in range(obj.ctrlpts_size_w):
+ for u in range(new_cpts_size):
+ for v in range(obj.ctrlpts_size_v):
+ temp_pt = ctrlpts_tmp[u][v + (w * obj.ctrlpts_size_v)]
+ ctrlpts_new.append(temp_pt)
+
+ # Update the volume after knot removal
+ obj.set_ctrlpts(ctrlpts_new, new_cpts_size, obj.ctrlpts_size_v, obj.ctrlpts_size_w)
+ obj.knotvector_u = kv_new
+
+ # v-direction
+ if param[1] is True:
+ # Use Pw if rational
+ cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
+
+ # Construct 2-dimensional structure
+ cpt2d = []
+ for v in range(obj.ctrlpts_size_v):
+ temp_surf = []
+ for w in range(obj.ctrlpts_size_w):
+ for u in range(obj.ctrlpts_size_u):
+ temp_pt = cpts[v + (u * obj.ctrlpts_size_v) + (w * obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
+ temp_surf.append(temp_pt)
+ cpt2d.append(temp_surf)
+
+ # Apply knot refinement
+ ctrlpts_tmp, kv_new = helpers.knot_refinement(obj.degree_v, obj.knotvector_v, cpt2d, **kwargs)
+ new_cpts_size = len(ctrlpts_tmp)
+
+ # Flatten to 1-dimensional structure
+ ctrlpts_new = []
+ for w in range(obj.ctrlpts_size_w):
+ for u in range(obj.ctrlpts_size_u):
+ for v in range(new_cpts_size):
+ temp_pt = ctrlpts_tmp[v][u + (w * obj.ctrlpts_size_u)]
+ ctrlpts_new.append(temp_pt)
+
+ # Update the volume after knot removal
+ obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u, new_cpts_size, obj.ctrlpts_size_w)
+ obj.knotvector_v = kv_new
+
+ # w-direction
+ if param[2] is True:
+ # Use Pw if rational
+ cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
+
+ # Construct 2-dimensional structure
+ cpt2d = []
+ for w in range(obj.ctrlpts_size_w):
+ temp_surf = [cpts[uv + (w * obj.ctrlpts_size_u * obj.ctrlpts_size_v)] for uv in
+ range(obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
+ cpt2d.append(temp_surf)
+
+ # Apply knot refinement
+ ctrlpts_tmp, kv_new = helpers.knot_refinement(obj.degree_w, obj.knotvector_w, cpt2d, **kwargs)
+ new_cpts_size = len(ctrlpts_tmp)
+
+ # Flatten to 1-dimensional structure
+ ctrlpts_new = []
+ for w in range(new_cpts_size):
+ ctrlpts_new += ctrlpts_tmp[w]
+
+ # Update the volume after knot removal
+ obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u, obj.ctrlpts_size_v, new_cpts_size)
+ obj.knotvector_w = kv_new
# Return updated spline geometry
return obj
|
Add volume support to operations.refine_knotvector
|
orbingol_NURBS-Python
|
train
|
cea1f31eda5779472776bae27d25e891711faf76
|
diff --git a/src/filters.js b/src/filters.js
index <HASH>..<HASH> 100644
--- a/src/filters.js
+++ b/src/filters.js
@@ -8,11 +8,20 @@
*/
angular.module('angular.filters', [
+
'a8m.ucfirst',
'a8m.uri-encode',
'a8m.remove-spaces',
+ 'a8m.strip-tags',
+
'a8m.concat',
'a8m.unique',
'a8m.is-empty',
- 'a8m.strip-tags'
+ 'a8m.after',
+ 'a8m.where',
+
+
+ 'a8m.math',
+ 'a8m.max',
+ 'a8m.min'
]);
|
fix(main): register all filter to angular.filter module
|
a8m_angular-filter
|
train
|
b04f87f62dd9f2935b9d1c85696f9ac0cd520701
|
diff --git a/cake/tests/lib/reporter/cake_cli_reporter.php b/cake/tests/lib/reporter/cake_cli_reporter.php
index <HASH>..<HASH> 100644
--- a/cake/tests/lib/reporter/cake_cli_reporter.php
+++ b/cake/tests/lib/reporter/cake_cli_reporter.php
@@ -98,7 +98,7 @@ class CakeCliReporter extends CakeBaseReporter {
*/
function paintException($exception) {
parent::paintException($exception);
- $message .= sprintf('Unexpected exception of type [%s] with message [%s] in [%s] line [%s]',
+ $message = sprintf('Unexpected exception of type [%s] with message [%s] in [%s] line [%s]',
get_class($exception),
$exception->getMessage(),
$exception->getFile(),
|
fixed E_NOTICE undefined variable in CakeCliReporter::paintException
|
cakephp_cakephp
|
train
|
c3dde683158863ae764684c5b7d958e39e905874
|
diff --git a/test/ral/providers/package.rb b/test/ral/providers/package.rb
index <HASH>..<HASH> 100755
--- a/test/ral/providers/package.rb
+++ b/test/ral/providers/package.rb
@@ -146,6 +146,8 @@ class TestPackageProvider < Test::Unit::TestCase
assert(provider, "Could not retrieve provider")
+ return if result = provider.query and ! [:absent, :purged].include?(result[:ensure])
+
assert_absent(provider)
if Process.uid != 0
|
Fixing #<I> -- installed packages used for testing are just ignored,
rather than throwing a failure.
|
puppetlabs_puppet
|
train
|
9140742fd381626d059500089341c8318633a053
|
diff --git a/security/BasicAuth.php b/security/BasicAuth.php
index <HASH>..<HASH> 100755
--- a/security/BasicAuth.php
+++ b/security/BasicAuth.php
@@ -24,23 +24,25 @@ class BasicAuth {
*
* @param string $realm
* @param string|array $permissionCode
+ * @param boolean $tryUsingSessionLogin If true, then the method with authenticate against the
+ * session log-in if those credentials are disabled.
* @return Member $member
*/
- static function requireLogin($realm, $permissionCode) {
+ static function requireLogin($realm, $permissionCode, $tryUsingSessionLogin = true) {
if(!Security::database_is_ready() || Director::is_cli()) return true;
- $authenticated = false;
+ $member = null;
if(isset($_SERVER['PHP_AUTH_USER']) && isset($_SERVER['PHP_AUTH_PW'])) {
$member = MemberAuthenticator::authenticate(array(
'Email' => $_SERVER['PHP_AUTH_USER'],
'Password' => $_SERVER['PHP_AUTH_PW'],
), null);
-
- if($member || Member::currentUser()) $authenticated = true;
}
+ if(!$member && $tryUsingSessionLogin) $member = Member::currentUser();
+
// If we've failed the authentication mechanism, then show the login form
- if(!$authenticated) {
+ if(!$member) {
header("WWW-Authenticate: Basic realm=\"$realm\"");
header($_SERVER['SERVER_PROTOCOL'] . ' 401 Unauthorized');
@@ -107,7 +109,9 @@ class BasicAuth {
*/
static function protect_site_if_necessary() {
if(self::$entire_site_protected) {
- self::requireLogin("SilverStripe test website. Use your CMS login.", "ADMIN");
+ // The test-site protection should ignore the session log-in; otherwise it's difficult
+ // to test the log-in features of your site
+ self::requireLogin("SilverStripe test website. Use your CMS login.", "ADMIN", false);
}
}
|
BUGFIX: Fixed bug in basicauth failover to session member.
BUGFIX: Don't use session member for test site protection feature.
git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/sapphire/branches/<I>@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
|
silverstripe_silverstripe-framework
|
train
|
881f38e50455668fbc6435ebe50c3ca586255fee
|
diff --git a/core/ledger/ledgerstorage/store.go b/core/ledger/ledgerstorage/store.go
index <HASH>..<HASH> 100644
--- a/core/ledger/ledgerstorage/store.go
+++ b/core/ledger/ledgerstorage/store.go
@@ -116,7 +116,7 @@ func (s *Store) GetPvtDataAndBlockByNum(blockNum uint64, filter ledger.PvtNsColl
if block, err = s.RetrieveBlockByNumber(blockNum); err != nil {
return nil, err
}
- if pvtdata, err = s.GetPvtDataByNum(blockNum, filter); err != nil {
+ if pvtdata, err = s.getPvtDataByNumWithoutLock(blockNum, filter); err != nil {
return nil, err
}
return &ledger.BlockAndPvtData{Block: block, BlockPvtData: constructPvtdataMap(pvtdata)}, nil
@@ -128,7 +128,13 @@ func (s *Store) GetPvtDataAndBlockByNum(blockNum uint64, filter ledger.PvtNsColl
func (s *Store) GetPvtDataByNum(blockNum uint64, filter ledger.PvtNsCollFilter) ([]*ledger.TxPvtData, error) {
s.rwlock.RLock()
defer s.rwlock.RUnlock()
+ return s.getPvtDataByNumWithoutLock(blockNum, filter)
+}
+// getPvtDataByNumWithoutLock returns only the pvt data corresponding to the given block number.
+// This function does not acquire a readlock and it is expected that in most of the circumstances, the caller
+// posesses a read lock on `s.rwlock`
+func (s *Store) getPvtDataByNumWithoutLock(blockNum uint64, filter ledger.PvtNsCollFilter) ([]*ledger.TxPvtData, error) {
var pvtdata []*ledger.TxPvtData
var err error
if pvtdata, err = s.pvtdataStore.GetPvtDataByBlockNum(blockNum, filter); err != nil {
diff --git a/core/ledger/ledgerstorage/store_test.go b/core/ledger/ledgerstorage/store_test.go
index <HASH>..<HASH> 100644
--- a/core/ledger/ledgerstorage/store_test.go
+++ b/core/ledger/ledgerstorage/store_test.go
@@ -17,8 +17,11 @@ limitations under the License.
package ledgerstorage
import (
+ "fmt"
"os"
+ "runtime"
"testing"
+ "time"
"github.com/hyperledger/fabric/common/flogging"
"github.com/hyperledger/fabric/common/ledger/blkstorage"
@@ -26,6 +29,7 @@ import (
"github.com/hyperledger/fabric/common/ledger/testutil"
"github.com/hyperledger/fabric/core/ledger"
"github.com/hyperledger/fabric/core/ledger/ledgerconfig"
+ "github.com/hyperledger/fabric/protos/common"
"github.com/hyperledger/fabric/protos/ledger/rwset"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
@@ -38,6 +42,50 @@ func TestMain(m *testing.M) {
os.Exit(m.Run())
}
+func TestStoreConcurrentReadWrite(t *testing.T) {
+ testEnv := newTestEnv(t)
+ defer testEnv.cleanup()
+ provider := NewProvider()
+ defer provider.Close()
+ store, err := provider.Open("testLedger")
+ assert.NoError(t, err)
+ defer store.Shutdown()
+
+ // Modify store to have a BlockStore that has a custom slowdown
+ store.BlockStore = newSlowBlockStore(store.BlockStore, time.Second)
+
+ sampleData := sampleData(t)
+ // Commit first block
+ store.CommitWithPvtData(sampleData[0])
+ go func() {
+ time.Sleep(time.Millisecond * 500)
+ // Commit all but first block
+ for _, sampleDatum := range sampleData[1:] {
+ store.CommitWithPvtData(sampleDatum)
+ }
+
+ }()
+
+ c := make(chan struct{})
+ go func() {
+ // Read first block
+ _, err := store.GetPvtDataAndBlockByNum(0, nil)
+ assert.NoError(t, err)
+ c <- struct{}{}
+ }()
+
+ select {
+ case <-c:
+ t.Log("Obtained private data and block by number")
+ case <-time.After(time.Second * 10):
+ assert.Fail(t, "Didn't finish within a timely manner, perhaps the system is deadlocked?")
+ buf := make([]byte, 1<<16)
+ runtime.Stack(buf, true)
+ fmt.Printf("%s", buf)
+ }
+
+}
+
func TestStore(t *testing.T) {
testEnv := newTestEnv(t)
defer testEnv.cleanup()
@@ -188,3 +236,25 @@ func samplePvtData(t *testing.T, txNums []uint64) map[uint64]*ledger.TxPvtData {
}
return constructPvtdataMap(pvtData)
}
+
+type slowBlockStore struct {
+ delay time.Duration
+ blkstorage.BlockStore
+}
+
+func newSlowBlockStore(store blkstorage.BlockStore, delay time.Duration) blkstorage.BlockStore {
+ return &slowBlockStore{
+ delay: delay,
+ BlockStore: store,
+ }
+}
+
+func (bs *slowBlockStore) RetrieveBlockByNumber(blockNum uint64) (*common.Block, error) {
+ time.Sleep(bs.delay)
+ return bs.BlockStore.RetrieveBlockByNumber(blockNum)
+}
+
+func (bs *slowBlockStore) AddBlock(block *common.Block) error {
+ time.Sleep(bs.delay)
+ return bs.BlockStore.AddBlock(block)
+}
|
[FAB-<I>] ledgerstore uses RLock^2 and deadlocks
Re-entrent RLocking while a Lock happens in between is a deadlock
in golang.
This makes the ledgerstore deadlock if it writes blocks while some peer
fetches blocks from it via state transfer.
Full details in the JIRA.
Change-Id: I<I>f<I>c<I>cacdbeec<I>c<I>e9b<I>c9c6b<I>
|
hyperledger_fabric
|
train
|
2e4631e4f9a44104e2b9f6e888f5d6f08d80ac97
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,7 @@
- Added experimental `filter_data` option
- Localized times on maintenance page
+- Improved connection pooling
## 2.4.2 (2020-04-16)
diff --git a/lib/pghero.rb b/lib/pghero.rb
index <HASH>..<HASH> 100644
--- a/lib/pghero.rb
+++ b/lib/pghero.rb
@@ -34,6 +34,8 @@ module PgHero
class Error < StandardError; end
class NotEnabled < Error; end
+ MUTEX = Mutex.new
+
# settings
class << self
attr_accessor :long_running_query_sec, :slow_query_ms, :slow_query_calls, :explain_timeout_sec, :total_connections_threshold, :cache_hit_rate_threshold, :env, :show_migrations, :config_path, :filter_data
@@ -129,14 +131,20 @@ module PgHero
end
end
+ # ensure we only have one copy of databases
+ # so there's only one connection pool per database
def databases
- @databases ||= begin
- Hash[
- config["databases"].map do |id, c|
- [id.to_sym, PgHero::Database.new(id, c)]
- end
- ]
+ unless defined?(@databases)
+ # only use mutex on initialization
+ MUTEX.synchronize do
+ # return if another process initialized while we were waiting
+ return @databases if defined?(@databases)
+
+ @databases = config["databases"].map { |id, c| [id.to_sym, Database.new(id, c)] }.to_h
+ end
end
+
+ @databases
end
def primary_database
diff --git a/lib/pghero/database.rb b/lib/pghero/database.rb
index <HASH>..<HASH> 100644
--- a/lib/pghero/database.rb
+++ b/lib/pghero/database.rb
@@ -23,6 +23,10 @@ module PgHero
def initialize(id, config)
@id = id
@config = config || {}
+
+ # preload model to ensure only one connection pool
+ # this doesn't actually start any connections
+ connection_model
end
def name
diff --git a/test/basic_test.rb b/test/basic_test.rb
index <HASH>..<HASH> 100644
--- a/test/basic_test.rb
+++ b/test/basic_test.rb
@@ -43,4 +43,20 @@ class BasicTest < Minitest::Test
def test_connections
assert PgHero.connections
end
+
+ def test_connection_pool
+ 1000.times do
+ [:@config, :@databases].each do |var|
+ PgHero.remove_instance_variable(var) if PgHero.instance_variable_defined?(var)
+ end
+
+ threads =
+ 2.times.map do
+ Thread.new do
+ PgHero.databases[:primary].send(:connection_model).object_id
+ end
+ end
+ assert_equal 1, threads.map(&:value).uniq.size
+ end
+ end
end
|
Ensure only one connection pool per database
|
ankane_pghero
|
train
|
29b00fb744d2e27f835d0130b88ed4bda08c63f7
|
diff --git a/lib/puppet/type.rb b/lib/puppet/type.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/type.rb
+++ b/lib/puppet/type.rb
@@ -198,16 +198,16 @@ class Type < Puppet::Element
raise Puppet::DevError, "must pass a Puppet::Type object"
end
- if @objects.has_key?(newobj.name) and self.isomorphic?
+ if @objects.has_key?(name) and self.isomorphic?
raise Puppet::Error.new(
"Object '%s' of type '%s' already exists with id '%s' vs. '%s'" %
- [newobj.name,newobj.class.name,
- @objects[newobj.name].object_id,newobj.object_id]
+ [name,newobj.class.name,
+ @objects[name].object_id,newobj.object_id]
)
else
- #debug("adding %s of type %s to class list" %
- # [object.name,object.class])
- @objects[newobj.name] = newobj
+ #Puppet.info("adding %s of type %s to class list" %
+ # [name,object.class])
+ @objects[name] = newobj
end
# and then add it to the master list
@@ -1585,6 +1585,23 @@ class Type < Puppet::Element
loglevel
end
end
+
+ newmetaparam(:alias) do
+ desc "Creates an alias for the object. This simplifies lookup of the
+ object so is useful in the language. It is especially useful when
+ you are creating long commands using exec or when many different systems
+ call a given package different names."
+
+ munge do |*aliases|
+ unless aliases.is_a?(Array)
+ aliases = [aliases]
+ end
+ @parent.info "Adding aliases %s" % aliases.join(", ")
+ aliases.each do |other|
+ @parent.class[other] = @parent
+ end
+ end
+ end
end # Puppet::Type
end
diff --git a/test/types/type.rb b/test/types/type.rb
index <HASH>..<HASH> 100644
--- a/test/types/type.rb
+++ b/test/types/type.rb
@@ -130,6 +130,21 @@ class TestType < Test::Unit::TestCase
)
}
end
+
+ # Verify that aliasing works
+ def test_aliasing
+ file = tempfile()
+
+ baseobj = Puppet.type(:file).create(
+ :name => file,
+ :create => true,
+ :alias => "funtest"
+ )
+
+ assert_instance_of(Puppet.type(:file), Puppet.type(:file)["funtest"],
+ "Could not retrieve alias")
+
+ end
end
# $Id$
|
Adding "alias" metaparam; you can now create as many aliases as you want for any of your objects.
git-svn-id: <URL>
|
puppetlabs_puppet
|
train
|
e383758011c0b17ac51d5683ac060b5922c2f342
|
diff --git a/app/assets/javascripts/alchemy/alchemy.file_progress.js b/app/assets/javascripts/alchemy/alchemy.file_progress.js
index <HASH>..<HASH> 100644
--- a/app/assets/javascripts/alchemy/alchemy.file_progress.js
+++ b/app/assets/javascripts/alchemy/alchemy.file_progress.js
@@ -67,7 +67,9 @@ if (typeof(Alchemy) === 'undefined') {
Alchemy.FileProgress.prototype.setComplete = function () {
this.$progressBar.removeClass().addClass("progressBarComplete");
this.$progressBar.css({width: '100%'});
- this.$fileProgressWrapper.delay(1500).fadeOut();
+ this.$fileProgressWrapper.delay(1500).fadeOut(function() {
+ $(this).remove();
+ });
};
Alchemy.FileProgress.prototype.setError = function () {
@@ -78,7 +80,9 @@ if (typeof(Alchemy) === 'undefined') {
Alchemy.FileProgress.prototype.setCancelled = function () {
this.$progressBar.removeClass().addClass("progressBarCanceled");
this.$progressBar.css({width: '100%'});
- this.$fileProgressWrapper.delay(1500).fadeOut();
+ this.$fileProgressWrapper.delay(1500).fadeOut(function() {
+ $(this).remove();
+ });
};
Alchemy.FileProgress.prototype.setStatus = function (status) {
diff --git a/app/assets/javascripts/alchemy/alchemy.uploader.js b/app/assets/javascripts/alchemy/alchemy.uploader.js
index <HASH>..<HASH> 100644
--- a/app/assets/javascripts/alchemy/alchemy.uploader.js
+++ b/app/assets/javascripts/alchemy/alchemy.uploader.js
@@ -83,11 +83,13 @@ Alchemy.Uploader = {
onServerLoadStart: function(event, file) {
var progress = new Alchemy.FileProgress(file);
progress.setStatus(self.t('uploading'));
- // progress.$fileProgressCancel.show().on('click', function(e) {
- // e.preventDefault();
- // $().html5Uploader('cancel', file.id);
- // return false;
- // });
+ progress.$fileProgressCancel.show().on('click', function(e) {
+ e.preventDefault();
+ $().html5Uploader('cancel', file.id);
+ progress.setStatus(self.t('cancelled'));
+ progress.setCancelled();
+ return false;
+ });
},
onServerProgress: function(event, file) {
var progress = new Alchemy.FileProgress(file);
diff --git a/vendor/assets/javascripts/jquery_plugins/jquery.html5uploader.js b/vendor/assets/javascripts/jquery_plugins/jquery.html5uploader.js
index <HASH>..<HASH> 100755
--- a/vendor/assets/javascripts/jquery_plugins/jquery.html5uploader.js
+++ b/vendor/assets/javascripts/jquery_plugins/jquery.html5uploader.js
@@ -77,7 +77,7 @@
}
});
- // Populating our own files array.
+ // Populating our own files array so we can be sure the browser does not change it.
function fillFileQueue(files) {
queuedFiles = [];
fileList = files; //storing the original browsers FileList object.
@@ -233,6 +233,7 @@
for (var file_id in xhrRequestQueue) {
if (xhrRequestQueue.hasOwnProperty(file_id)) {
xhrRequestQueue[file_id].abort();
+ delete xhrRequestQueue[file_id];
}
}
if (settings.onQueueCancelled) {
@@ -242,10 +243,11 @@
// Cancel one upload
cancel : function(file_id) {
- // var xhrRequest = xhrRequestQueue[file_id];
- // if (xhrRequest) {
- // xhrRequest.abort();
- // }
+ var xhrRequest = xhrRequestQueue[file_id];
+ if (xhrRequest) {
+ xhrRequest.abort();
+ delete xhrRequestQueue[file_id];
+ }
}
};
|
Each html5 upload can now be stopped on its own.
|
AlchemyCMS_alchemy_cms
|
train
|
474fb2f817462b63557e58b9aec94d90b9dcc525
|
diff --git a/setget.go b/setget.go
index <HASH>..<HASH> 100644
--- a/setget.go
+++ b/setget.go
@@ -7,54 +7,45 @@ import "net"
import "time"
import "sync"
+import "./rend"
+
type CacheItem struct {
key string
value string
}
// constants and configuration
-// No constant arrays
-var letters = []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
-const verbose = false
-const numThreads = 10
+const verbose = true
+const numWorkers = 10
+// 2-character keys with 26 possibilities = 676 keys
// 3-character keys with 26 possibilities = 17,576 keys
// 4-character keys with 26 possibilities = 456,976 keys
// 5-character keys with 26 possibilities = 11,881,376 keys
const keyLength = 4
-//const numKeys = 1000000
const numKeys = 100000
-// concurrency bits
-var wg sync.WaitGroup
-var tasks chan *CacheItem
-
func main() {
rand.Seed(time.Now().UTC().UnixNano())
- tasks = make(chan *CacheItem, 64)
+ tasks := make(chan *CacheItem)
+ wg := new(sync.WaitGroup)
+ wg.Add(numWorkers)
// spawn worker goroutines
- for i := 0; i < numThreads; i++ {
- wg.Add(1)
- go worker(connect("localhost"))
+ for i := 0; i < numWorkers; i++ {
+ go worker(rend.Connect("localhost"), tasks, wg)
}
-
- key := ""
- value := ""
- valLen := 0
+
+ source := rend.RandString(10240)
for i := 0; i < numKeys; i++ {
- key = randString(keyLength)
-
// Random length between 1k and 10k
- valLen = rand.Intn(9 * 1024) + 1024
- value = randString(valLen)
+ valLen := rand.Intn(9 * 1024) + 1024
item := new(CacheItem)
- item.key = key
- item.value = value
-
+ item.key = rend.RandString(keyLength)
+ item.value = source[:valLen]
tasks <- item
if i % 10000 == 0 {
@@ -66,75 +57,14 @@ func main() {
wg.Wait()
}
-func worker(conn net.Conn) {
+func worker(conn net.Conn, tasks chan *CacheItem, wg *sync.WaitGroup) {
+ reader := bufio.NewReader(conn)
+ writer := bufio.NewWriter(conn)
+
for item := range tasks {
- set(conn, item.key, item.value)
- get(conn, item.key)
+ rend.Set(reader, writer, item.key, item.value)
+ rend.Get(reader, writer, item.key)
}
wg.Done()
}
-
-func randString(n int) string {
- b := make([]rune, n)
-
- for i := range b {
- b[i] = letters[rand.Intn(len(letters))]
- }
-
- return string(b)
-}
-
-func connect(host string) net.Conn {
- conn, err := net.Dial("tcp", host + ":11212")
-
- if err != nil {
- panic(err)
- }
-
- println("Connected to memcached.")
-
- return conn
-}
-
-func set(conn net.Conn, key string, value string) {
- if verbose { println(fmt.Sprintf("Setting key %v to value of length %v", key, len(value))) }
-
- fmt.Fprintf(conn, "set %v 0 0 %v\r\n", key, len(value))
- fmt.Fprintf(conn, "%v\r\n", value)
- response, err := bufio.NewReader(conn).ReadString('\n')
-
- if err != nil { panic(err) }
-
- if verbose { print(response) }
-}
-
-func get(conn net.Conn, key string) {
- if verbose { println(fmt.Sprintf("Getting key %v", key)) }
-
- fmt.Fprintf(conn, "get %v\r\n", key)
-
- reader := bufio.NewReader(conn)
-
- // read the header line
- response, err := reader.ReadString('\n')
-
- if err != nil { panic(err) }
-
- if verbose { print(response) }
-
- // then read the value
- response, err = reader.ReadString('\n')
-
- if err != nil { panic(err) }
-
- //print(response)
- if verbose { println("(read the value)") }
-
- // then read the END
- response, err = reader.ReadString('\n')
-
- if err != nil { panic(err) }
-
- if verbose { print(response) }
-}
|
Much better set/get test. Uses a constant string and slices instead of a new random one every time. Also uses common bits.
|
Netflix_rend
|
train
|
4b7162de9e64027b6d3839e83b2fb683f1c8a216
|
diff --git a/cpuid.go b/cpuid.go
index <HASH>..<HASH> 100644
--- a/cpuid.go
+++ b/cpuid.go
@@ -143,6 +143,8 @@ type CPUInfo struct {
Family int // CPU family number
Model int // CPU model number
CacheLine int // Cache line size in bytes. Will be 0 if undetectable.
+ maxFunc uint32
+ maxExFunc uint32
}
// CPU contains information about the CPU as detected on startup,
@@ -164,6 +166,8 @@ func init() {
// If you call this, you must ensure that no other goroutine is accessing the
// exported CPU variable.
func Detect() {
+ CPU.maxFunc = maxFunctionID()
+ CPU.maxExFunc = maxExtendedFunction()
CPU.BrandName = brandName()
CPU.CacheLine = cacheLine()
CPU.Family, CPU.Model = familyModel()
@@ -463,6 +467,19 @@ func (c CPUInfo) Ia32TscAux() uint32 {
return ecx
}
+// Core will return the number of the core number
+// The code is currently executing on.
+// This is likely to change when the OS re-schedules the running thread
+// to another CPU.
+// If the current core cannot be detected, -1 will be returned.
+func (c CPUInfo) Core() int {
+ if c.maxFunc < 1 {
+ return -1
+ }
+ _, ebx, _, _ := cpuid(1)
+ return int(ebx >> 24)
+}
+
// VM Will return true if the cpu id indicates we are in
// a virtual machine. This is only a hint, and will very likely
// have many false negatives.
diff --git a/cpuid_test.go b/cpuid_test.go
index <HASH>..<HASH> 100644
--- a/cpuid_test.go
+++ b/cpuid_test.go
@@ -580,6 +580,22 @@ func TestIa32TscAux(t *testing.T) {
t.Logf("Ia32TscAux:0x%x", CPU.Ia32TscAux())
}
+// Prints the value of Ia32TscAux()
+func TestCore(t *testing.T) {
+ t.Log("Currently executing on core:", CPU.Core())
+}
+
+func TestMaxFunction(t *testing.T) {
+ expect := maxFunctionID()
+ if CPU.maxFunc != expect {
+ t.Fatal("Max function does not match, expected", expect, "but got", CPU.maxFunc)
+ }
+ expect = maxExtendedFunction()
+ if CPU.maxExFunc != expect {
+ t.Fatal("Max Extended function does not match, expected", expect, "but got", CPU.maxFunc)
+ }
+}
+
// This example will calculate the chip/core number on Linux
// Linux encodes numa id (<<12) and core id (8bit) into TSC_AUX.
func ExampleCPUInfo_Ia32TscAux(t *testing.T) {
diff --git a/private-gen.go b/private-gen.go
index <HASH>..<HASH> 100644
--- a/private-gen.go
+++ b/private-gen.go
@@ -31,7 +31,7 @@ var reWrites = []rewrite{
}
var excludeNames = map[string]bool{"string": true, "join": true, "trim": true,
// cpuid_test.go
- "t": true, "println": true, "logf": true, "log": true, "fatalf": true,
+ "t": true, "println": true, "logf": true, "log": true, "fatalf": true, "fatal": true,
}
var excludePrefixes = []string{"test", "benchmark"}
diff --git a/private/cpuid.go b/private/cpuid.go
index <HASH>..<HASH> 100644
--- a/private/cpuid.go
+++ b/private/cpuid.go
@@ -137,6 +137,8 @@ type cpuInfo struct {
family int // CPU family number
model int // CPU model number
cacheline int // Cache line size in bytes. Will be 0 if undetectable.
+ maxFunc uint32
+ maxExFunc uint32
}
// CPU contains information about the CPU as detected on startup,
@@ -158,6 +160,8 @@ func init() {
// If you call this, you must ensure that no other goroutine is accessing the
// exported CPU variable.
func detect() {
+ cpu.maxFunc = maxFunctionID()
+ cpu.maxExFunc = maxExtendedFunction()
cpu.brandname = brandName()
cpu.cacheline = cacheLine()
cpu.family, cpu.model = familyModel()
@@ -457,6 +461,19 @@ func (c cpuInfo) ia32tscaux() uint32 {
return ecx
}
+// Core will return the number of the core number
+// The code is currently executing on.
+// This is likely to change when the OS re-schedules the running thread
+// to another CPU.
+// If the current core cannot be detected, -1 will be returned.
+func (c cpuInfo) core() int {
+ if c.maxFunc < 1 {
+ return -1
+ }
+ _, ebx, _, _ := cpuid(1)
+ return int(ebx >> 24)
+}
+
// VM Will return true if the cpu id indicates we are in
// a virtual machine. This is only a hint, and will very likely
// have many false negatives.
diff --git a/private/cpuid_test.go b/private/cpuid_test.go
index <HASH>..<HASH> 100644
--- a/private/cpuid_test.go
+++ b/private/cpuid_test.go
@@ -582,6 +582,22 @@ func TestIa32TscAux(t *testing.T) {
t.Logf("Ia32TscAux:0x%x", cpu.ia32tscaux())
}
+// Prints the value of Ia32TscAux()
+func TestCore(t *testing.T) {
+ t.Log("Currently executing on core:", cpu.core())
+}
+
+func TestMaxFunction(t *testing.T) {
+ expect := maxFunctionID()
+ if cpu.maxFunc != expect {
+ t.Fatal("Max function does not match, expected", expect, "but got", cpu.maxFunc)
+ }
+ expect = maxExtendedFunction()
+ if cpu.maxExFunc != expect {
+ t.Fatal("Max Extended function does not match, expected", expect, "but got", cpu.maxFunc)
+ }
+}
+
// This example will calculate the chip/core number on Linux
// Linux encodes numa id (<<12) and core id (8bit) into TSC_AUX.
func examplecpuinfo_ia32tscaux(t *testing.T) {
|
Read current core from APIC cpu info and provide function for it.
|
klauspost_cpuid
|
train
|
08539032fd944cdc9f453807eed1cad53b3c5dbb
|
diff --git a/src/components/RangeSelector.js b/src/components/RangeSelector.js
index <HASH>..<HASH> 100644
--- a/src/components/RangeSelector.js
+++ b/src/components/RangeSelector.js
@@ -94,45 +94,27 @@ class RangeSelector extends React.Component {
}
_handleTrackMouseDown (e) {
- const updatedState = {
- trackClicked: true
- };
-
- const clientX = e.touches ? e.touches[0].clientX : e.clientX;
-
- const newPixels = clientX - ReactDOM.findDOMNode(this.refs.rangeSelector).getBoundingClientRect().left;
-
- //click point is less than lower selector
- if (newPixels < this.state.lowerPixels) {
- this.setState({
- dragging: 'Lower'
- });
- }
-
- //click point is higher than upper selector
- if (newPixels > this.state.upperPixels) {
- this.setState({
- dragging: 'Upper'
- });
- }
-
- //click point is higher than lower selector && also lower than midway point between selectors
- if (newPixels > this.state.lowerPixels && newPixels < (this.state.lowerPixels + (this.state.upperPixels - this.state.lowerPixels) / 2)) {
- this.setState({
- dragging: 'Lower'
- });
- }
-
- //click point is lower than upper selector && also greater than midway point between selectors
- if (newPixels < this.state.upperPixels && newPixels > (this.state.upperPixels - (this.state.upperPixels - this.state.lowerPixels) / 2)) {
- this.setState({
- dragging: 'Upper'
- });
- }
+ const clientX = e.touches ? e.touches[0].clientX : e.clientX;
+ const newPixels = clientX - ReactDOM.findDOMNode(this.refs.rangeSelector).getBoundingClientRect().left;
+ const updatedState = {
+ trackClicked: true
+ };
+ const clickBelowLower = newPixels < this.state.lowerPixels;
+ const clickAboveUpper = newPixels > this.state.upperPixels;
+ const clickCloserToLower = newPixels > this.state.lowerPixels && newPixels < (this.state.lowerPixels + (this.state.upperPixels - this.state.lowerPixels) / 2);
+ const clickCloserToUpper = newPixels < this.state.upperPixels && newPixels > (this.state.upperPixels - (this.state.upperPixels - this.state.lowerPixels) / 2);
+
+ if (clickBelowLower || clickCloserToLower) {
+ updatedState.dragging = 'Lower';
+ }
- this.setState(updatedState);
+ if (clickAboveUpper || clickCloserToUpper) {
+ updatedState.dragging = 'Upper';
}
+ this.setState(updatedState);
+}
+
//this method now handles both the dragging of the toggle, and moving it when track is clicked
_handleDragging (e) {
if (this.state.dragging) {
|
_handleTrackMouseDown has been condensed
|
mxenabled_mx-react-components
|
train
|
e8230963be8f061f35c98670a72fc49fa8fe387f
|
diff --git a/src/Repository/OMA/StudentRepository.php b/src/Repository/OMA/StudentRepository.php
index <HASH>..<HASH> 100644
--- a/src/Repository/OMA/StudentRepository.php
+++ b/src/Repository/OMA/StudentRepository.php
@@ -83,6 +83,30 @@ class StudentRepository extends BaseRepository
return $data;
}
+
+ public function get_task_list(
+ VO\Token $token,
+ VO\Integer $member_apprenticeship_id,
+ VO\Integer $is_completed,
+ VO\Integer $current_page
+ ) {
+ $request = new Request(
+ new GuzzleClient,
+ $this->credentials,
+ VO\HTTP\Url::fromNative($this->base_url.'/student/fetch/all/programs'),
+ new VO\HTTP\Method('POST')
+ );
+ $request_parameters = array(
+ 'member_apprenticeship_id' => $member_apprenticeship_id->__toInteger(),
+ 'is_completed' => $is_completed->__toInteger(),
+ 'current_page' => $current_page->__toInteger()
+ );
+ $header_parameters = array('Authorization' => $token->__toEncodedString());
+ $response = $request->send($request_parameters, $header_parameters);
+ $data = $response->get_data();
+ return $data;
+ }
+
public function program_units_list(
VO\Token $token,
VO\Integer $program_id
|
student repository: added new method for task list fetch
|
OliveMedia_academyhq-api-client
|
train
|
473dda5313efdd3f86d27a7d88aa7939f900c084
|
diff --git a/api/controller/legacy_test.go b/api/controller/legacy_test.go
index <HASH>..<HASH> 100644
--- a/api/controller/legacy_test.go
+++ b/api/controller/legacy_test.go
@@ -248,7 +248,7 @@ func (s *legacySuite) TestAPIServerCanShutdownWithOutstandingNext(c *gc.C) {
select {
case <-srvStopped:
- case <-time.After(time.Minute):
+ case <-time.After(time.Minute): // LongWait (10s) didn't seem quite long enough, see LP 1900931
c.Fatal("timed out waiting for server to stop")
}
|
Add comment about why we're not using LongWait
|
juju_juju
|
train
|
c013e50d0d97bed3eed8aae752c515b8d5cdfe13
|
diff --git a/main.js b/main.js
index <HASH>..<HASH> 100644
--- a/main.js
+++ b/main.js
@@ -1062,7 +1062,7 @@
layerContext.width = layer.bounds.right - layer.bounds.left;
layerContext.height = layer.bounds.bottom - layer.bounds.top;
- if (layerContext.width < 1 || layerContext.height < 1) {
+ if (layerContext.width < 0 || layerContext.height < 0) {
console.warn("Odd image size %dx%d for layer %d (%s)",
layerContext.width, layerContext.height, layer.id, layerContext.name || layer.name);
console.log("Bounds: %j", layer.bounds);
|
No need to print warnings for layers of size 0x0, they are simply empty
|
adobe-photoshop_generator-assets
|
train
|
cf4cdf9bc091b293de192ad89b8d7e3ce0c61d4b
|
diff --git a/src/Adapters/Phpunit/Style.php b/src/Adapters/Phpunit/Style.php
index <HASH>..<HASH> 100644
--- a/src/Adapters/Phpunit/Style.php
+++ b/src/Adapters/Phpunit/Style.php
@@ -182,6 +182,7 @@ final class Style
'/bin\/pest/',
'/vendor\/pestphp\/pest/',
'/vendor\/phpspec\/prophecy-phpunit/',
+ '/vendor\/phpspec\/prophecy/',
'/vendor\/phpunit\/phpunit\/src/',
'/vendor\/mockery\/mockery/',
'/vendor\/laravel\/dusk/',
|
Add prophecy to ignore files in writeError
|
nunomaduro_collision
|
train
|
d3e37099c565aa5e1ec3fd9927f1b4b97287c0e7
|
diff --git a/pythainlp/corpus/core.py b/pythainlp/corpus/core.py
index <HASH>..<HASH> 100644
--- a/pythainlp/corpus/core.py
+++ b/pythainlp/corpus/core.py
@@ -156,7 +156,7 @@ def get_corpus_path(name: str, version : str = None) -> Union[str, None]:
"""
# Customize your the corpus path then close the line after lines 164 through 190.
_CUSTOMIZE = {
- #"the corpus name":"path"
+ # "the corpus name":"path"
}
if name in list(_CUSTOMIZE.keys()):
return _CUSTOMIZE[name]
|
Fixed PEP8 (build and deploy docs)
|
PyThaiNLP_pythainlp
|
train
|
a2cc0c24c13bd94136ac1a98b5021316147ec381
|
diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py
index <HASH>..<HASH> 100644
--- a/tests/unit/commands/init/test_cli.py
+++ b/tests/unit/commands/init/test_cli.py
@@ -1089,6 +1089,7 @@ foo
user_input = """
1
2
+1
"""
args = [
"--no-input",
|
fix: Added Template User Input to Unit Test for Python Image Init Templates
|
awslabs_aws-sam-cli
|
train
|
0b757e1ac9057767c52f299a54d1fd0245ac9f9f
|
diff --git a/test/cases/CrawlDelayTest.php b/test/cases/CrawlDelayTest.php
index <HASH>..<HASH> 100644
--- a/test/cases/CrawlDelayTest.php
+++ b/test/cases/CrawlDelayTest.php
@@ -9,9 +9,9 @@
{
// init parser
$parser = new RobotsTxtParser($robotsTxtContent);
- $this->assertEquals(0, $parser->getCrawlDelay());
- $this->assertEquals(0.9, $parser->getCrawlDelay('GoogleBot'));
- $this->assertEquals(1.5, $parser->getCrawlDelay('AhrefsBot'));
+ $this->assertEquals(0, $parser->getDelay());
+ $this->assertEquals(0.9, $parser->getDelay('GoogleBot'));
+ $this->assertEquals(1.5, $parser->getDelay('AhrefsBot'));
}
/**
|
Update CrawlDelayTest.php
|
t1gor_Robots.txt-Parser-Class
|
train
|
7fd898226f0acfe7eaff9375659171fb8ce6d627
|
diff --git a/src/serial-data-source.js b/src/serial-data-source.js
index <HASH>..<HASH> 100644
--- a/src/serial-data-source.js
+++ b/src/serial-data-source.js
@@ -18,10 +18,19 @@ var optionKeys = [
-var SerialDataSource = DataSource.extend({
+var SerialDataSource = DataSource.extend(/** @lends SerialDataSource# */ {
+ /**
+ * The path to the serial port.
+ */
path: null,
+ /**
+ * Creates a new SerialDataSource.
+ *
+ * @constructs
+ * @augments DataSource
+ */
constructor: function(options) {
DataSource.call(this, options);
@@ -40,7 +49,9 @@ var SerialDataSource = DataSource.extend({
var connection = new SerialConnection(options);
- return connection.connect();
+ return connection.connect().then(function() {
+ return connection;
+ });
}
});
diff --git a/test/specs/serial-data-source.spec.js b/test/specs/serial-data-source.spec.js
index <HASH>..<HASH> 100644
--- a/test/specs/serial-data-source.spec.js
+++ b/test/specs/serial-data-source.spec.js
@@ -3,7 +3,16 @@
-var SerialDataSource = require('./resol-vbus').SerialDataSource;
+var Q = require('q');
+
+
+var vbus = require('./resol-vbus');
+var testUtils = require('./test-utils');
+
+
+
+var SerialConnection = vbus.SerialConnection;
+var SerialDataSource = vbus.SerialDataSource;
@@ -15,9 +24,44 @@ describe('SerialDataSource', function() {
expect(SerialDataSource).to.be.a('function');
});
+ it('should have reasonable defaults', function() {
+ var ds = new SerialDataSource();
+
+ expect(ds)
+ .to.have.a.property('path')
+ .that.equal(null);
+ });
+
});
- xit('should perform tests...', function() {
+ describe('#connectLive', function() {
+
+ it('should be a method', function() {
+ expect(SerialDataSource.prototype)
+ .to.have.a.property('connectLive')
+ .that.is.a('function');
+ });
+
+ it('should work correctly', function(done) {
+ var originalConnect = SerialConnection.prototype.connect;
+
+ SerialConnection.prototype.connect = function() {
+ return Q(null);
+ };
+
+ var ds = new SerialDataSource();
+
+ testUtils.performAsyncTest(done, function() {
+ return Q.fcall(function() {
+ return ds.connectLive();
+ }).then(function(connection) {
+ expect(connection)
+ .to.be.instanceOf(SerialConnection);
+ }).finally(function() {
+ SerialConnection.prototype.connect = originalConnect;
+ });
+ });
+ });
});
|
Complete SerialDataSource documentation and tests.
|
danielwippermann_resol-vbus
|
train
|
154e168da5d2032fcd05777c47805cf89306d3d8
|
diff --git a/app/controllers/maestrano/synchronizations_controller.rb b/app/controllers/maestrano/synchronizations_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/maestrano/synchronizations_controller.rb
+++ b/app/controllers/maestrano/synchronizations_controller.rb
@@ -71,13 +71,12 @@ class Maestrano::SynchronizationsController < Maestrano::Rails::WebHookControlle
end
def organization_status(organization)
- last_sync = organization.synchronizations.last
if Maestrano::Connector::Rails::SynchronizationJob.find_running_job(organization.id)
'RUNNING'
elsif Maestrano::Connector::Rails::SynchronizationJob.find_job(organization.id)
'ENQUEUED'
else
- last_sync&.status || 'DISABLED'
+ organization.synchronizations.last&.status || 'DISABLED'
end
end
end
diff --git a/app/controllers/version_controller.rb b/app/controllers/version_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/version_controller.rb
+++ b/app/controllers/version_controller.rb
@@ -5,7 +5,7 @@ class VersionController < ApplicationController
commit = ENV['GIT_COMMIT_ID']
respond_to do |format|
- format.html { render text: "framework_version=#{framework_version}\nci_branch=#{branch}\nci_commit=#{commit}\nenv=#{Rails}\nnv, ruby_version=#{RUBY_VERSION}\nruby_engine=#{RUBY_ENGINE}\n" }
+ format.html { render text: "framework_version=#{framework_version}\nci_branch=#{branch}\nci_commit=#{commit}\nenv=#{Rails.env}\nruby_version=#{RUBY_VERSION}\nruby_engine=#{RUBY_ENGINE}\n" }
format.json { render json: {framework_version: framework_version, ci_branch: branch, ci_commit: commit, env: Rails.env, ruby_version: RUBY_VERSION, ruby_engine: RUBY_ENGINE} }
end
end
diff --git a/app/models/maestrano/connector/rails/concerns/organization.rb b/app/models/maestrano/connector/rails/concerns/organization.rb
index <HASH>..<HASH> 100644
--- a/app/models/maestrano/connector/rails/concerns/organization.rb
+++ b/app/models/maestrano/connector/rails/concerns/organization.rb
@@ -62,7 +62,7 @@ module Maestrano::Connector::Rails::Concerns::Organization
entity_push_to_connec = clazz && clazz < Maestrano::Connector::Rails::Entity ? clazz.can_write_connec? : true
entity_push_to_external = clazz && clazz < Maestrano::Connector::Rails::Entity ? clazz.can_write_external? : true
- synchronized_entities[entity.to_sym] = {can_push_to_connec: !pull_disabled && entity_push_to_connec, can_push_to_external: !push_disabled && entity_push_to_external}
+ synchronized_entities[entity.to_sym] = {can_push_to_connec: entity_push_to_connec, can_push_to_external: entity_push_to_external}
end
end
diff --git a/lib/generators/connector/templates/home_index.haml b/lib/generators/connector/templates/home_index.haml
index <HASH>..<HASH> 100644
--- a/lib/generators/connector/templates/home_index.haml
+++ b/lib/generators/connector/templates/home_index.haml
@@ -64,9 +64,9 @@
- current_organization.displayable_synchronized_entities.each do |k, v|
.row.sync-entity
.col-md-1.link-step-action
- #{check_box("#{k}", "to_connec", {checked: (v[:can_push_to_connec] || v[:can_push_to_external]) && !current_organization.pull_disabled, onclick: "return !#{k}_to_external.checked;", disabled: current_organization.pull_disabled || !v[:can_push_to_connec]})}
+ #{check_box("#{k}", "to_connec", {checked: (v[:can_push_to_connec] || v[:can_push_to_external])})}
.col-md-1.link-step-action
- #{check_box("#{k}", "to_external", {checked: v[:can_push_to_external] && !current_organization.push_disabled, onchange: "#{k}_to_connec.checked = #{!current_organization.pull_disabled};", disabled: current_organization.push_disabled || !v[:can_push_to_external]})}
+ #{check_box("#{k}", "to_external", {checked: v[:can_push_to_external], disabled: !v[:can_push_to_external]})}
%label.col-md-7{:for => "#{k}", style: 'padding-top: 5px;'}
.col-md-6
#{v[:external_name]}
|
do not skip conenc webhook on push_disabled (transac! compatibility)
|
maestrano_maestrano-connector-rails
|
train
|
f5fe43e8b9e393399622050deefef6a9dab44709
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -247,10 +247,11 @@ function convert(swagger, options) {
data = options.templateCallback('heading_main', 'post', data);
if (data.append) { content += data.append; delete data.append; }
- if (swagger.securityDefinitions) {
+ var securityContainer = swagger.securityDefinitions || swagger.components.securitySchemes;
+ if (securityContainer) {
data.securityDefinitions = [];
- for (var s in swagger.securityDefinitions) {
- var secdef = swagger.securityDefinitions[s];
+ for (var s in securityContainer) {
+ var secdef = securityContainer[s];
var desc = secdef.description ? secdef.description : '';
if (secdef.type == 'oauth2') {
secdef.scopeArray = [];
@@ -379,6 +380,17 @@ function convert(swagger, options) {
// combine
var parameters = sharedParameters.concat(opParameters);
+ if (op.requestBody) {
+ // fake a version 2-style body parameter for now
+ var body = {};
+ body.name = 'body';
+ body.in = 'body';
+ body.type = 'object';
+ body.required = true; // possibly
+ body.description = 'No description'; // todo
+ parameters.push(body);
+ }
+
for (var p in parameters) {
var param = parameters[p];
param.required = (param.required ? param.required : false);
|
Start of auth for v3
|
Mermade_widdershins
|
train
|
3cc177dcb4ca1f4bae5c52ac3c467eafa7c41588
|
diff --git a/lxd/cluster/notify.go b/lxd/cluster/notify.go
index <HASH>..<HASH> 100644
--- a/lxd/cluster/notify.go
+++ b/lxd/cluster/notify.go
@@ -60,12 +60,23 @@ func NewNotifier(state *state.State, cert *shared.CertInfo, policy NotifierPolic
}
if node.IsOffline(offlineThreshold) {
- switch policy {
- case NotifyAll:
- return fmt.Errorf("peer node %s is down", node.Address)
- case NotifyAlive:
- continue // Just skip this node
- case NotifyTryAll:
+ // Even the heartbeat timestamp is not recent
+ // enough, let's try to actually connect to the
+ // node, just in case the heartbeat is lagging
+ // behind for some reason and the node is
+ // actualy up.
+ client, err := Connect(node.Address, cert, true)
+ if err == nil {
+ _, _, err = client.GetServer()
+ }
+ if err != nil {
+ switch policy {
+ case NotifyAll:
+ return fmt.Errorf("peer node %s is down", node.Address)
+ case NotifyAlive:
+ continue // Just skip this node
+ case NotifyTryAll:
+ }
}
}
peers = append(peers, node.Address)
diff --git a/lxd/cluster/notify_test.go b/lxd/cluster/notify_test.go
index <HASH>..<HASH> 100644
--- a/lxd/cluster/notify_test.go
+++ b/lxd/cluster/notify_test.go
@@ -98,8 +98,9 @@ func TestNewNotify_NotifyAlive(t *testing.T) {
// Helper for setting fixtures for Notify tests.
type notifyFixtures struct {
- t *testing.T
- state *state.State
+ t *testing.T
+ state *state.State
+ servers []*httptest.Server
}
// Spawn the given number of fake nodes, save in them in the database and
@@ -148,6 +149,8 @@ func (h *notifyFixtures) Nodes(cert *shared.CertInfo, n int) func() {
}
}
+ h.servers = servers
+
return cleanup
}
@@ -174,6 +177,8 @@ func (h *notifyFixtures) Down(i int) {
return nil
})
require.NoError(h.t, err)
+ h.servers[i].Close()
+
}
// Returns a minimal stub for the LXD RESTful API server, just realistic
|
Attempt to probe to a member which is considered offline
This is last resort attempt, just in case the heartbeat couldn't reach the node
for whatever reason but the node is actually alive.
|
lxc_lxd
|
train
|
504da9b1f0c82e57d3ef0cda7377f6a0889a63e5
|
diff --git a/cache/cache.go b/cache/cache.go
index <HASH>..<HASH> 100644
--- a/cache/cache.go
+++ b/cache/cache.go
@@ -357,7 +357,7 @@ func (r *RowCache) RowsByCondition(conditions []ovsdb.Condition) (map[string]mod
results[rowUUID] = row
}
}
- } else if index, err := r.Index(condition.Column); err != nil {
+ } else if index, err := r.Index(condition.Column); err == nil {
for k, rowUUID := range index {
tSchema := schema.Columns[condition.Column]
nativeValue, err := ovsdb.OvsToNative(tSchema, condition.Value)
|
cache: fix RowsByCondition so non-index conditions are checked
|
socketplane_libovsdb
|
train
|
6cdb587e084b478a4904f264af8728cddcec55f0
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ tests_require = [
'pytest-cache>=1.0',
'pytest-cov>=1.8.0',
'pytest-pep8>=1.0.6',
- 'pytest-reana>=0.4.1,<0.5.0',
+ 'pytest-reana>=0.5.0.dev20181119',
'pytest>=3.8'
]
|
installation: bump pytest-reana
|
reanahub_reana-commons
|
train
|
ea33b1093b0c89e46aeb43e08caa90d6fea73d99
|
diff --git a/server/monitor.go b/server/monitor.go
index <HASH>..<HASH> 100644
--- a/server/monitor.go
+++ b/server/monitor.go
@@ -873,6 +873,7 @@ type Varz struct {
TotalConnections uint64 `json:"total_connections"`
Routes int `json:"routes"`
Remotes int `json:"remotes"`
+ Leafs int `json:"leafnodes"`
InMsgs int64 `json:"in_msgs"`
OutMsgs int64 `json:"out_msgs"`
InBytes int64 `json:"in_bytes"`
@@ -1136,6 +1137,7 @@ func (s *Server) updateVarzRuntimeFields(v *Varz, forceUpdate bool, pcpu float64
v.TotalConnections = s.totalClients
v.Routes = len(s.routes)
v.Remotes = len(s.remotes)
+ v.Leafs = len(s.leafs)
v.InMsgs = atomic.LoadInt64(&s.inMsgs)
v.InBytes = atomic.LoadInt64(&s.inBytes)
v.OutMsgs = atomic.LoadInt64(&s.outMsgs)
|
Add leafnode connections to varz
|
nats-io_gnatsd
|
train
|
7fcf5fb6011a5164ce651820c5fc478d82bfcc91
|
diff --git a/benchmark/encode.rb b/benchmark/encode.rb
index <HASH>..<HASH> 100644
--- a/benchmark/encode.rb
+++ b/benchmark/encode.rb
@@ -20,46 +20,49 @@ rescue LoadError
end
filename = ARGV[0] || 'benchmark/subjects/ohai.json'
-json = File.new(filename, 'r')
-hash = Yajl::Parser.new.parse(json)
-json.close
+hash = File.open(filename, 'rb') { |f| Yajl::Parser.new.parse(f.read) }
times = ARGV[1] ? ARGV[1].to_i : 1000
puts "Starting benchmark encoding #{filename} #{times} times\n\n"
Benchmark.bmbm { |x|
io_encoder = Yajl::Encoder.new
- x.report {
- puts "Yajl::Encoder#encode (to an IO)"
+ string_encoder = Yajl::Encoder.new
+
+ x.report("Yajl::Encoder#encode (to an IO)") {
times.times {
io_encoder.encode(hash, StringIO.new)
}
}
- string_encoder = Yajl::Encoder.new
- x.report {
- puts "Yajl::Encoder#encode (to a String)"
+ x.report("Yajl::Encoder#encode (to a String)") {
times.times {
output = string_encoder.encode(hash)
}
}
if defined?(JSON)
- x.report {
- puts "JSON.generate"
+ x.report("JSON.generate") {
times.times {
JSON.generate(hash)
}
}
end
if defined?(Psych)
- x.report {
- puts "Psych.to_json"
+ x.report("Psych.to_json") {
times.times {
Psych.to_json(hash)
}
}
+ x.report("Psych::JSON::Stream") {
+ times.times {
+ io = StringIO.new
+ stream = Psych::JSON::Stream.new io
+ stream.start
+ stream.push hash
+ stream.finish
+ }
+ }
end
if defined?(ActiveSupport::JSON)
- x.report {
- puts "ActiveSupport::JSON.encode"
+ x.report("ActiveSupport::JSON.encode") {
times.times {
ActiveSupport::JSON.encode(hash)
}
|
adding headings to benchmarks, adding a benchmark for streaming psych
|
brianmario_yajl-ruby
|
train
|
c6390e3779ab54cd88fc760300e52b7f4247c7d6
|
diff --git a/cherrypy/wsgiserver/__init__.py b/cherrypy/wsgiserver/__init__.py
index <HASH>..<HASH> 100644
--- a/cherrypy/wsgiserver/__init__.py
+++ b/cherrypy/wsgiserver/__init__.py
@@ -116,8 +116,12 @@ def plat_specific_errors(*errnames):
# de-dupe the list
return dict.fromkeys(nums).keys()
+socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR")
+
socket_errors_to_ignore = plat_specific_errors(
"EPIPE",
+ "EBADF", "WSAEBADF",
+ "ENOTSOCK", "WSAENOTSOCK",
"ETIMEDOUT", "WSAETIMEDOUT",
"ECONNREFUSED", "WSAECONNREFUSED",
"ECONNRESET", "WSAECONNRESET",
@@ -691,10 +695,7 @@ class FatalSSLAlert(Exception):
class CP_fileobject(socket._fileobject):
"""Faux file object attached to a socket object."""
-
- def recv(self, size):
- return self._sock.recv(size)
-
+
def sendall(self, data):
"""Sendall for non-blocking sockets."""
while data:
@@ -704,7 +705,7 @@ class CP_fileobject(socket._fileobject):
except socket.error, e:
if e.args[0] not in socket_errors_nonblocking:
raise
-
+
def send(self, data):
return self._sock.send(data)
@@ -714,13 +715,18 @@ class CP_fileobject(socket._fileobject):
self._wbuf = []
self.sendall(buffer)
+ def recv(self, size):
+ while True:
+ try:
+ return self._sock.recv(size)
+ except socket.error, e:
+ if e.args[0] not in socket_errors_nonblocking:
+ raise
+
def read(self, size=-1):
- data = self._rbuf
if size < 0:
# Read until EOF
- buffers = []
- if data:
- buffers.append(data)
+ buffers = [self._rbuf]
self._rbuf = ""
if self._rbufsize <= 1:
recv_size = self.default_bufsize
@@ -735,6 +741,7 @@ class CP_fileobject(socket._fileobject):
return "".join(buffers)
else:
# Read until size bytes or EOF seen, whichever comes first
+ data = self._rbuf
buf_len = len(data)
if buf_len >= size:
self._rbuf = data[size:]
@@ -845,12 +852,14 @@ class SSL_fileobject(CP_fileobject):
while True:
try:
return call(*args, **kwargs)
- except (SSL.WantReadError, SSL.WantWriteError):
+ except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
time.sleep(self.ssl_retry)
+ except SSL.WantWriteError:
+ time.sleep(self.ssl_retry)
except SSL.SysCallError, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
@@ -880,8 +889,15 @@ class SSL_fileobject(CP_fileobject):
raise socket.timeout("timed out")
def recv(self, *args, **kwargs):
- return self._safe_call(True, super(SSL_fileobject, self).recv, *args, **kwargs)
-
+ buf = []
+ r = super(SSL_fileobject, self).recv
+ while True:
+ data = self._safe_call(True, r, *args, **kwargs)
+ buf.append(data)
+ p = self._sock.pending()
+ if not p:
+ return "".join(buf)
+
def sendall(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).sendall, *args, **kwargs)
@@ -1405,21 +1421,18 @@ class CherryPyWSGIServer(object):
# accept() by default
return
except socket.error, x:
- if hasattr(errno, "EINTR") and x.args[0] == errno.EINTR:
+ if x.args[0] in socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See http://www.cherrypy.org/ticket/707.
return
- msg = x.args[1]
- if msg in ("Bad file descriptor", "Socket operation on non-socket"):
- # Our socket was closed.
- return
- if msg == "Resource temporarily unavailable":
+ if x.args[0] in socket_errors_nonblocking:
# Just try again. See http://www.cherrypy.org/ticket/479.
return
- if msg == "Software caused connection abort":
+ if x.args[0] in socket_errors_to_ignore:
+ # Our socket was closed.
# See http://www.cherrypy.org/ticket/686.
return
raise
|
Finished the nonblocking fileobject by handling EWOULDBLOCK during recv and accept. Also added experimental code to check SSL pending(), but no verdict yet on whether it's necessary or not.
|
cherrypy_cheroot
|
train
|
3277124d81de0fe24ecaa195992b8821520531f6
|
diff --git a/model/QtiJsonItemCompiler.php b/model/QtiJsonItemCompiler.php
index <HASH>..<HASH> 100644
--- a/model/QtiJsonItemCompiler.php
+++ b/model/QtiJsonItemCompiler.php
@@ -26,8 +26,6 @@ use core_kernel_classes_Resource;
use oat\taoQtiItem\model\pack\QtiItemPacker;
use oat\taoQtiItem\model\qti\exception\XIncludeException;
use oat\taoQtiItem\model\qti\Service;
-use tao_helpers_File;
-use taoItems_models_classes_ItemsService;
use oat\taoQtiItem\model\qti\Parser;
/**
@@ -44,6 +42,20 @@ class QtiJsonItemCompiler extends QtiItemCompiler
const VAR_ELT_FILE_NAME = 'variableElements.json';
/**
+ * @var array keys to sanitize before saving json
+ */
+ private $toSanitizeKeys = array(
+ 'responses',
+ 'feedbacks',
+ 'responseProcessing'
+ );
+
+ /**
+ * @var string json from the item packed
+ */
+ private $itemJson;
+
+ /**
* Desploy all the required files into the provided directories
*
* @param core_kernel_classes_Resource $item
@@ -64,16 +76,19 @@ class QtiJsonItemCompiler extends QtiItemCompiler
try {
$qtiItem = $this->retrieveAssets($item, $language, $publicDirectory);
- //create the item.json file in private directory
- $itemPacker = new QtiItemPacker();
- $itemPack = $itemPacker->packQtiItem($item, $language, $qtiItem);
- file_put_contents($privateFolder.self::ITEM_FILE_NAME, json_encode($itemPack->JsonSerialize()));
-
//store variable qti elements data into the private directory
$variableElements = $qtiService->getVariableElements($qtiItem);
$serializedVarElts = json_encode($variableElements);
file_put_contents($privateFolder . self::VAR_ELT_FILE_NAME, $serializedVarElts);
+ //create the item.json file in private directory
+ $itemPacker = new QtiItemPacker();
+ $itemPack = $itemPacker->packQtiItem($item, $language, $qtiItem);
+ $this->itemJson = $itemPack->JsonSerialize();
+ $this->sanitizeJson();
+ file_put_contents($privateFolder.self::ITEM_FILE_NAME, json_encode($this->itemJson));
+
+
return new common_report_Report(
common_report_Report::TYPE_SUCCESS, __('Successfully compiled "%s"', $language)
);
@@ -93,4 +108,15 @@ class QtiJsonItemCompiler extends QtiItemCompiler
}
}
+ /**
+ * Sanitize the packed json to remove some data such as responses, feedback or responseProcessing
+ */
+ private function sanitizeJson(){
+ foreach($this->toSanitizeKeys as $key){
+ if(isset($this->itemJson['data'][$key])){
+ $this->itemJson['data'][$key] = 'Please do not cheat';
+ }
+ }
+ }
+
}
|
empty json property to avoid cheating
|
oat-sa_extension-tao-itemqti
|
train
|
2f0d40663d4c17f6729933de667b7ed9567d8051
|
diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/naming/QualifiedName.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/naming/QualifiedName.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/naming/QualifiedName.java
+++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/naming/QualifiedName.java
@@ -95,6 +95,9 @@ public class QualifiedName implements Comparable<QualifiedName> {
}
public QualifiedName append(String segment) {
+ if (segment == null) {
+ throw new IllegalArgumentException("Segment cannot be null");
+ }
String[] newSegments = new String[getSegmentCount() + 1];
System.arraycopy(segments, 0, newSegments, 0, segments.length);
newSegments[segments.length] = segment;
diff --git a/tests/org.eclipse.xtext.tests/src/org/eclipse/xtext/naming/QualifiedNameTest.java b/tests/org.eclipse.xtext.tests/src/org/eclipse/xtext/naming/QualifiedNameTest.java
index <HASH>..<HASH> 100644
--- a/tests/org.eclipse.xtext.tests/src/org/eclipse/xtext/naming/QualifiedNameTest.java
+++ b/tests/org.eclipse.xtext.tests/src/org/eclipse/xtext/naming/QualifiedNameTest.java
@@ -25,6 +25,13 @@ public class QualifiedNameTest extends TestCase {
fail("Exception expected");
} catch(IllegalArgumentException e) {}
}
+
+ public void testAppendNull() {
+ try {
+ QualifiedName.create().append((String) null);
+ fail("Exception expected");
+ } catch (IllegalArgumentException e) {}
+ }
public void testSegments() {
QualifiedName qn = QualifiedName.create("foo", "bar", "baz");
|
[Xtext] fixed <I> - Check that you cannot append a null segment to a
QualifiedName
|
eclipse_xtext-core
|
train
|
13115c295710c82e22423373509c3c3756a285c4
|
diff --git a/java/client/test/org/openqa/selenium/AlertsTest.java b/java/client/test/org/openqa/selenium/AlertsTest.java
index <HASH>..<HASH> 100644
--- a/java/client/test/org/openqa/selenium/AlertsTest.java
+++ b/java/client/test/org/openqa/selenium/AlertsTest.java
@@ -352,7 +352,6 @@ public class AlertsTest extends JUnit4TestBase {
}
@Test
- @NotYetImplemented(value = MARIONETTE, reason = "https://bugzilla.mozilla.org/show_bug.cgi?id=1477977")
public void testHandlesTwoAlertsFromOneInteraction() {
driver.get(appServer.create(new Page()
.withScripts(
|
[java] Unignoring the test, regression bug in marionette has been fixed already
|
SeleniumHQ_selenium
|
train
|
1b048b167627275301b8b4f48fe4a79ea3fe3479
|
diff --git a/executor/executor_test.go b/executor/executor_test.go
index <HASH>..<HASH> 100644
--- a/executor/executor_test.go
+++ b/executor/executor_test.go
@@ -8178,7 +8178,7 @@ func (s *testSerialSuite) TestIssue24210(c *C) {
func (s *testSerialSuite) TestDeadlockTable(c *C) {
deadlockhistory.GlobalDeadlockHistory.Clear()
- occurTime := time.Date(2021, 5, 10, 1, 2, 3, 456789000, time.UTC)
+ occurTime := time.Date(2021, 5, 10, 1, 2, 3, 456789000, time.Local)
rec := &deadlockhistory.DeadlockRecord{
OccurTime: occurTime,
IsRetryable: false,
@@ -8201,7 +8201,7 @@ func (s *testSerialSuite) TestDeadlockTable(c *C) {
}
deadlockhistory.GlobalDeadlockHistory.Push(rec)
- occurTime2 := time.Date(2022, 6, 11, 2, 3, 4, 987654000, time.UTC)
+ occurTime2 := time.Date(2022, 6, 11, 2, 3, 4, 987654000, time.Local)
rec2 := &deadlockhistory.DeadlockRecord{
OccurTime: occurTime2,
IsRetryable: true,
diff --git a/infoschema/tables_test.go b/infoschema/tables_test.go
index <HASH>..<HASH> 100644
--- a/infoschema/tables_test.go
+++ b/infoschema/tables_test.go
@@ -1564,7 +1564,7 @@ func (s *testTableSuite) TestTrx(c *C) {
Username: "root",
CurrentDB: "test",
}
- blockTime2 := time.Date(2021, 05, 20, 13, 18, 30, 123456000, time.UTC)
+ blockTime2 := time.Date(2021, 05, 20, 13, 18, 30, 123456000, time.Local)
sm.txnInfo[1] = &txninfo.TxnInfo{
StartTS: 425070846483628033,
CurrentSQLDigest: "",
@@ -1577,8 +1577,8 @@ func (s *testTableSuite) TestTrx(c *C) {
}
tk.Se.SetSessionManager(sm)
tk.MustQuery("select * from information_schema.TIDB_TRX;").Check(testkit.Rows(
- "424768545227014155 2021-05-07 04:56:48.001000 "+digest.String()+" Normal <nil> 1 19 2 root test []",
- "425070846483628033 2021-05-20 13:16:35.778000 <nil> LockWaiting 2021-05-20 13:18:30.123456 0 0 10 user1 db1 [sql1, sql2]"))
+ "424768545227014155 2021-05-07 12:56:48.001000 "+digest.String()+" Normal <nil> 1 19 2 root test []",
+ "425070846483628033 2021-05-20 21:16:35.778000 <nil> LockWaiting 2021-05-20 13:18:30.123456 0 0 10 user1 db1 [sql1, sql2]"))
}
func (s *testTableSuite) TestInfoschemaDeadlockPrivilege(c *C) {
diff --git a/session/txninfo/txn_info.go b/session/txninfo/txn_info.go
index <HASH>..<HASH> 100644
--- a/session/txninfo/txn_info.go
+++ b/session/txninfo/txn_info.go
@@ -97,8 +97,7 @@ func (info *TxnInfo) ShallowClone() *TxnInfo {
// ToDatum Converts the `TxnInfo` to `Datum` to show in the `TIDB_TRX` table.
func (info *TxnInfo) ToDatum() []types.Datum {
- // TODO: The timezone represented to the user is not correct and it will be always UTC time.
- humanReadableStartTime := time.Unix(0, oracle.ExtractPhysical(info.StartTS)*1e6).UTC()
+ humanReadableStartTime := time.Unix(0, oracle.ExtractPhysical(info.StartTS)*1e6)
var currentDigest interface{}
if len(info.CurrentSQLDigest) != 0 {
|
infoschema: Use system local time for tables of lock view (#<I>)
|
pingcap_tidb
|
train
|
4e208ea2fe9d4981e4de37233670f5677d552289
|
diff --git a/SpiffWorkflow/bpmn/specs/event_definitions.py b/SpiffWorkflow/bpmn/specs/event_definitions.py
index <HASH>..<HASH> 100644
--- a/SpiffWorkflow/bpmn/specs/event_definitions.py
+++ b/SpiffWorkflow/bpmn/specs/event_definitions.py
@@ -193,13 +193,13 @@ class SignalEventDefinition(CatchingEventDefinition, ThrowingEventDefinition):
return retdict
-class CancelEventDefinition(CatchingEventDefinition, ThrowingEventDefinition):
+class CancelEventDefinition(CatchingEventDefinition):
"""
- The MessageEventDefinition is the implementation of event definition used
- for Message Events.
+ The CancelEventDefinition is the implementation of event definition used
+ for Cancel Events.
"""
- def __init__(self, message,name=''):
+ def __init__(self, message, name=''):
"""
Constructor.
@@ -216,21 +216,8 @@ class CancelEventDefinition(CatchingEventDefinition, ThrowingEventDefinition):
"""
return my_task._get_internal_data('event_fired', False)
- def _event_ready(self, my_task):
- waiting_events = my_task.workflow.task_tree.internal_data.get('cancels', {})
- if (self.event in waiting_events.keys()):
- evaledpayload = waiting_events[self.event]
- del(waiting_events[self.event])
- return evaledpayload
- return False
-
- # def _send_message(self, my_task,resultVar):
- # payload = PythonScriptEngine().evaluate(self.payload, **my_task.data)
- # my_task.workflow.message(self.message,payload,resultVar=resultVar)
- # return True
- def _send_message(self, my_task):
- my_task.workflow.cancel_notify(self.message)
- return True
+ def _message_ready(self, my_task):
+ return ('CancelEvent', None)
def _accept_message(self, my_task, message):
if message != self.message:
|
changed _event_ready to _message_ready
removed some unused code
|
knipknap_SpiffWorkflow
|
train
|
adf45d2ec12cd880e2c090893bf260b4f1cfcc37
|
diff --git a/workflows/logging/test_logging.py b/workflows/logging/test_logging.py
index <HASH>..<HASH> 100644
--- a/workflows/logging/test_logging.py
+++ b/workflows/logging/test_logging.py
@@ -12,7 +12,8 @@ def test_callback_handler_works_within_logging_framework():
log.setLevel(logging.INFO)
cbh = workflows.logging.CallbackHandler(cbmock)
- log.addHandler(workflows.logging.CallbackHandler(cbmock))
+ cbh.handleError = mock.Mock()
+ log.addHandler(cbh)
log.info(logmsg)
cbmock.assert_called_once()
@@ -23,3 +24,19 @@ def test_callback_handler_works_within_logging_framework():
assert logrec.levelname == 'INFO'
assert logrec.message == logmsg
assert logrec.funcName.startswith('test_')
+ assert not cbh.handleError.called
+
+ # Now check that the callback handler can handle errors in the
+ # callback function.
+ logmsg = 'Test message for error in logging'
+ cbmock.side_effect=AttributeError('Some failure')
+
+ log.info(logmsg)
+
+ assert cbmock.call_count == 2
+ assert cbmock.call_args == ((mock.ANY,), {})
+ logrec = cbmock.call_args[0][0]
+ assert isinstance(logrec, logging.LogRecord)
+ assert logrec.message == logmsg
+ cbh.handleError.assert_called_once()
+ assert cbh.handleError.call_args == cbmock.call_args
|
Errors during logging should be handled gracefully
|
DiamondLightSource_python-workflows
|
train
|
6f79953aa52c08581a5ce5a9a0dafa89d79ba390
|
diff --git a/sentinel.go b/sentinel.go
index <HASH>..<HASH> 100644
--- a/sentinel.go
+++ b/sentinel.go
@@ -157,9 +157,9 @@ func (d *sentinelFailover) MasterAddr() (string, error) {
}
}
- for i, addr := range d.sentinelAddrs {
+ for i, sentinelAddr := range d.sentinelAddrs {
sentinel := newSentinel(&Options{
- Addr: addr,
+ Addr: sentinelAddr,
DB: d.opt.DB,
Password: d.opt.Password,
@@ -171,18 +171,20 @@ func (d *sentinelFailover) MasterAddr() (string, error) {
PoolSize: d.opt.PoolSize,
IdleTimeout: d.opt.IdleTimeout,
})
- addr, err := sentinel.GetMasterAddrByName(d.masterName).Result()
+ masterAddr, err := sentinel.GetMasterAddrByName(d.masterName).Result()
if err != nil {
log.Printf("redis-sentinel: GetMasterAddrByName %q failed: %s", d.masterName, err)
- } else {
- // Push working sentinel to the top.
- d.sentinelAddrs[0], d.sentinelAddrs[i] = d.sentinelAddrs[i], d.sentinelAddrs[0]
-
- d.setSentinel(sentinel)
- addr := net.JoinHostPort(addr[0], addr[1])
- log.Printf("redis-sentinel: %q addr is %s", d.masterName, addr)
- return addr, nil
+ sentinel.Close()
+ continue
}
+
+ // Push working sentinel to the top.
+ d.sentinelAddrs[0], d.sentinelAddrs[i] = d.sentinelAddrs[i], d.sentinelAddrs[0]
+
+ d.setSentinel(sentinel)
+ addr := net.JoinHostPort(masterAddr[0], masterAddr[1])
+ log.Printf("redis-sentinel: %q addr is %s", d.masterName, addr)
+ return addr, nil
}
return "", errors.New("redis: all sentinels are unreachable")
@@ -235,7 +237,7 @@ func (d *sentinelFailover) listen() {
msgIface, err := pubsub.Receive()
if err != nil {
log.Printf("redis-sentinel: Receive failed: %s", err)
- pubsub = nil
+ pubsub.Close()
return
}
|
sentinel: don't leak goroutines on sentinel fail. Fixes #<I>.
|
go-redis_redis
|
train
|
9bb3332c63b29cd6997681c96ed0838e040daba4
|
diff --git a/test/e2e/scheduling/predicates.go b/test/e2e/scheduling/predicates.go
index <HASH>..<HASH> 100644
--- a/test/e2e/scheduling/predicates.go
+++ b/test/e2e/scheduling/predicates.go
@@ -219,9 +219,9 @@ var _ = SIGDescribe("SchedulerPredicates [Serial]", func() {
// 4. Create another pod with no affinity to any node that need 50% of the largest node CPU.
// 5. Make sure this additional pod is not scheduled.
/*
- Testname: scheduler-resource-limits
- Description: Ensure that scheduler accounts node resources correctly
- and respects pods' resource requirements during scheduling.
+ Release : v1.9
+ Testname: Scheduler, resource limits
+ Description: Scheduling Pods MUST fail if the resource limits exceed Machine capacity.
*/
framework.ConformanceIt("validates resource limits of pods that are allowed to run ", func() {
framework.WaitForStableCluster(cs, masterNodes)
@@ -325,9 +325,9 @@ var _ = SIGDescribe("SchedulerPredicates [Serial]", func() {
// Test Nodes does not have any label, hence it should be impossible to schedule Pod with
// nonempty Selector set.
/*
- Testname: scheduler-node-selector-not-matching
- Description: Ensure that scheduler respects the NodeSelector field of
- PodSpec during scheduling (when it does not match any node).
+ Release : v1.9
+ Testname: Scheduler, node selector not matching
+ Description: Create a Pod with a NodeSelector set to a value that does not match a node in the cluster. Since there are no nodes matching the criteria the Pod MUST not be scheduled.
*/
framework.ConformanceIt("validates that NodeSelector is respected if not matching ", func() {
By("Trying to schedule Pod with nonempty NodeSelector.")
@@ -348,9 +348,9 @@ var _ = SIGDescribe("SchedulerPredicates [Serial]", func() {
})
/*
- Testname: scheduler-node-selector-matching
- Description: Ensure that scheduler respects the NodeSelector field
- of PodSpec during scheduling (when it matches).
+ Release : v1.9
+ Testname: Scheduler, node selector matching
+ Description: Create a label on the node {k: v}. Then create a Pod with a NodeSelector set to {k: v}. Check to see if the Pod is scheduled. When the NodeSelector matches then Pod MUST be scheduled on that node.
*/
framework.ConformanceIt("validates that NodeSelector is respected if matching ", func() {
nodeName := GetNodeThatCanRunPod(f)
|
Adding details to Conformance Tests using RFC <I> standards.
|
kubernetes_kubernetes
|
train
|
0ef27db90352bdfdd4af92b5237e7a1d2b250ce7
|
diff --git a/noxfile.py b/noxfile.py
index <HASH>..<HASH> 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -23,14 +23,8 @@ def tests(session):
# any doctests, there isn't any)
session.run('pytest', 'examples/', '--doctest-modules')
- session._run('conda',
- 'env',
- 'update',
- '--prefix',
- session.virtualenv.location,
- '--file',
- 'docs/environment.yml',
- silent=True)
+ session._run('conda', 'env', 'update', '--prefix',
+ session.virtualenv.location, '--file', 'docs/environment.yml')
# build docs so we can detect build errors
session.run('make', '-C', 'docs/', 'html', external=True)
|
debugging sphinx not found error
|
edublancas_sklearn-evaluation
|
train
|
22029692268b29d2125b297aea6c542032bd0cb6
|
diff --git a/lib/sup/modes/thread-view-mode.rb b/lib/sup/modes/thread-view-mode.rb
index <HASH>..<HASH> 100644
--- a/lib/sup/modes/thread-view-mode.rb
+++ b/lib/sup/modes/thread-view-mode.rb
@@ -261,7 +261,7 @@ EOS
end
if chunk.is_a?(Message)
jump_to_message chunk
- jump_to_next_open
+ jump_to_next_open if layout.state == :closed
end
end
|
don't jump to the next open message when expanding
When pressing enter to expand an entire message, don't jump to the next
open message. Only jump when you've collapsed the message.
|
sup-heliotrope_sup
|
train
|
3b7788bf880d58b0fcd42daba1515f5fb05dfe09
|
diff --git a/python/ray/worker.py b/python/ray/worker.py
index <HASH>..<HASH> 100644
--- a/python/ray/worker.py
+++ b/python/ray/worker.py
@@ -441,6 +441,14 @@ class Worker(object):
objectid (object_id.ObjectID): The object ID of the value to be put.
value: The value to put in the object store.
"""
+ # Make sure that the value is not an object ID.
+ if isinstance(value, ray.local_scheduler.ObjectID):
+ raise Exception("Calling `put` on an ObjectID is not allowed (similarly, "
+ "returning an ObjectID from a remote function is not "
+ "allowed). If you really want to do this, you can wrap "
+ "the ObjectID in a list and call `put` on it (or return "
+ "it).")
+
# Serialize and put the object in the object store.
try:
ray.numbuf.store_list(objectid.id(), self.plasma_client.conn, [value])
@@ -465,6 +473,11 @@ class Worker(object):
object_ids (List[object_id.ObjectID]): A list of the object IDs whose
values should be retrieved.
"""
+ # Make sure that the values are object IDs.
+ for object_id in object_ids:
+ if not isinstance(object_id, ray.local_scheduler.ObjectID):
+ raise Exception("Attempting to call `get` on the value {}, which is "
+ "not an ObjectID.".format(object_id))
# Do an initial fetch for remote objects.
self.plasma_client.fetch([object_id.id() for object_id in object_ids])
@@ -1981,7 +1994,4 @@ def store_outputs_in_objstore(objectids, outputs, worker=global_worker):
function.
"""
for i in range(len(objectids)):
- if isinstance(outputs[i], ray.local_scheduler.ObjectID):
- raise Exception("This remote function returned an ObjectID as its {}th return value. This is not allowed.".format(i))
- for i in range(len(objectids)):
worker.put_object(objectids[i], outputs[i])
diff --git a/test/runtest.py b/test/runtest.py
index <HASH>..<HASH> 100644
--- a/test/runtest.py
+++ b/test/runtest.py
@@ -655,6 +655,19 @@ class APITest(unittest.TestCase):
ray.worker.cleanup()
+ def testIllegalAPICalls(self):
+ ray.init(num_workers=0)
+
+ # Verify that we cannot call put on an ObjectID.
+ x = ray.put(1)
+ with self.assertRaises(Exception):
+ ray.put(x)
+ # Verify that we cannot call get on a regular value.
+ with self.assertRaises(Exception):
+ ray.get(3)
+
+ ray.worker.cleanup()
+
class PythonModeTest(unittest.TestCase):
def testPythonMode(self):
|
Disallow calling ray.put on an object ID. (#<I>)
|
ray-project_ray
|
train
|
3bb3f684cf1b4365f75698e7416568309dd22eb5
|
diff --git a/lib/calabash/gestures.rb b/lib/calabash/gestures.rb
index <HASH>..<HASH> 100644
--- a/lib/calabash/gestures.rb
+++ b/lib/calabash/gestures.rb
@@ -179,13 +179,13 @@ module Calabash
# Performs a `pan` heading `left` on the screen.
# @see pan_left
def pan_screen_left(options={})
- pan_left("*", options)
+ pan_left('*', options)
end
# Performs a `pan` heading `right` on the screen.
# @see pan_right
def pan_screen_right(options={})
- pan_right("*", options)
+ pan_right('*', options)
end
# Performs a `pan` heading `up` on the screen.
@@ -253,13 +253,13 @@ module Calabash
# Performs a `flick` heading `left` on the screen.
# @see flick_left
def flick_screen_left(options={})
- flick_left("*", options)
+ flick_left('*', options)
end
# Performs a `flick` heading `right` on the screen.
# @see flick_right
def flick_screen_right(options={})
- flick_right("*", options)
+ flick_right('*', options)
end
# Performs a `flick` heading `up` on the screen.
|
Prefer single quotes for non-interpolated strings
|
calabash_calabash
|
train
|
1879f8331ed886fba00d13bb9d0951e334cc7fde
|
diff --git a/client/lib/media/test/utils.js b/client/lib/media/test/utils.js
index <HASH>..<HASH> 100644
--- a/client/lib/media/test/utils.js
+++ b/client/lib/media/test/utils.js
@@ -674,6 +674,7 @@ describe( 'MediaUtils', () => {
describe( '#createTransientMedia()', () => {
const GUID = 'URL';
+ const originalURL = window.URL;
beforeEach( () => {
window.URL = {
@@ -683,6 +684,10 @@ describe( 'MediaUtils', () => {
};
} );
+ afterEach( () => {
+ window.URL = originalURL;
+ } );
+
test( 'should return a transient for a file blob', () => {
const actual = MediaUtils.createTransientMedia( DUMMY_FILE_BLOB );
const expected = Object.assign( {}, EXPECTED, {
@@ -761,4 +766,82 @@ describe( 'MediaUtils', () => {
] );
} );
} );
+
+ describe( '#mediaURLToProxyConfig()', () => {
+ test( 'should detect media relative to site URL', () => {
+ expect( MediaUtils.mediaURLToProxyConfig( 'https://test.com/media.jpg', 'test.com' ) ).to.eql(
+ {
+ query: '',
+ filePath: '/media.jpg',
+ isRelativeToSiteRoot: true,
+ }
+ );
+ } );
+
+ test( 'should detect query string of given URL', () => {
+ expect(
+ MediaUtils.mediaURLToProxyConfig( 'https://test.com/media.jpg?w=100&h=98', 'test.com' )
+ ).to.eql( {
+ query: '?w=100&h=98',
+ filePath: '/media.jpg',
+ isRelativeToSiteRoot: true,
+ } );
+ } );
+
+ test( 'should detect domain mismatch', () => {
+ expect(
+ MediaUtils.mediaURLToProxyConfig( 'https://test.com/media.jpg', 'test2.com' )
+ ).to.eql( {
+ query: '',
+ filePath: '/media.jpg',
+ isRelativeToSiteRoot: false,
+ } );
+ } );
+
+ test( 'should recognize photon URLs as ones relative to site URL', () => {
+ expect(
+ MediaUtils.mediaURLToProxyConfig( 'https://i0.wp.com/test.com/media.jpg?w=100', 'test.com' )
+ ).to.eql( {
+ query: '?w=100',
+ filePath: '/media.jpg',
+ isRelativeToSiteRoot: true,
+ } );
+ } );
+
+ test( 'should not recognize non-photon wp.com URLs as ones relative to site URL', () => {
+ expect(
+ MediaUtils.mediaURLToProxyConfig(
+ 'https://bad.wp.com/test.com/media.jpg?w=100',
+ 'test.com'
+ )
+ ).to.eql( {
+ query: '?w=100',
+ filePath: '/test.com/media.jpg',
+ isRelativeToSiteRoot: false,
+ } );
+ } );
+
+ test( 'should recognize domain mismatch in photon URL', () => {
+ expect(
+ MediaUtils.mediaURLToProxyConfig(
+ 'https://i0.wp.com/test.com/media.jpg?w=100',
+ 'test2.com'
+ )
+ ).to.eql( {
+ query: '?w=100',
+ filePath: '/media.jpg',
+ isRelativeToSiteRoot: false,
+ } );
+ } );
+
+ test( 'should not consider URLs with non-http protocols as relative to domain root', () => {
+ expect(
+ MediaUtils.mediaURLToProxyConfig( 'blob://test.com/media.jpg?w=100', 'test.com' )
+ ).to.eql( {
+ query: '?w=100',
+ filePath: '/media.jpg',
+ isRelativeToSiteRoot: false,
+ } );
+ } );
+ } );
} );
diff --git a/client/lib/media/utils.js b/client/lib/media/utils.js
index <HASH>..<HASH> 100644
--- a/client/lib/media/utils.js
+++ b/client/lib/media/utils.js
@@ -632,18 +632,22 @@ export function validateMediaItem( site, item ) {
* @returns {object} Dictionary
*/
export function mediaURLToProxyConfig( mediaUrl, siteSlug ) {
- const { pathname, search: query, hostname } = getUrlParts( mediaUrl );
+ const { pathname, search: query, protocol, hostname } = getUrlParts( mediaUrl );
let filePath = pathname;
let isRelativeToSiteRoot = true;
- if (
- hostname !== siteSlug &&
- ( hostname.endsWith( 'wp.com' ) || hostname.endsWith( 'wordpress.com' ) )
- ) {
- const [ first, ...rest ] = filePath.substr( 1 ).split( '/' );
- filePath = '/' + rest.join( '/' );
- if ( first !== siteSlug ) {
- isRelativeToSiteRoot = false;
+ if ( [ 'http:', 'https:' ].indexOf( protocol ) === -1 ) {
+ isRelativeToSiteRoot = false;
+ } else if ( hostname !== siteSlug ) {
+ isRelativeToSiteRoot = false;
+ // CDN URLs like i0.wp.com/mysite.com/media.jpg should also be considered relative to mysite.com
+ if ( /^i[0-2]\.wp\.com$/.test( hostname ) ) {
+ const [ first, ...rest ] = filePath.substr( 1 ).split( '/' );
+ filePath = '/' + rest.join( '/' );
+
+ if ( first === siteSlug ) {
+ isRelativeToSiteRoot = true;
+ }
}
}
|
[Private files] Don't use proxy with blobs (#<I>)
* Don't use proxy with blob files
* A better fix - in mediaURLToProxyConfig
* Harden mediaURLToProxyConfig and add tests
* Make sure window.URL is available in tests
|
Automattic_wp-calypso
|
train
|
236d66b0401e4c59efe0b164ab01386890d687ca
|
diff --git a/lib/write_xlsx/format.rb b/lib/write_xlsx/format.rb
index <HASH>..<HASH> 100644
--- a/lib/write_xlsx/format.rb
+++ b/lib/write_xlsx/format.rb
@@ -506,7 +506,7 @@ module Writexlsx
def set_align(location)
return unless location # No default
- location.downcase!
+ location = location.downcase
set_text_h_align(1) if location == 'left'
set_text_h_align(2) if location == 'centre'
|
[JUNO-<I>] Get rid of decisely-write-xlsx repo
|
cxn03651_write_xlsx
|
train
|
88a1fbf68548aab5abc88d511784d274d73ea749
|
diff --git a/jax/interpreters/xla.py b/jax/interpreters/xla.py
index <HASH>..<HASH> 100644
--- a/jax/interpreters/xla.py
+++ b/jax/interpreters/xla.py
@@ -266,7 +266,15 @@ def _jaxpr_computation(jaxpr, backend, axis_env, const_vals, freevar_shapes, *ar
_map(write, jaxpr.invars, map(c.ParameterWithShape, arg_shapes))
_prefetch_jaxpr_literals(jaxpr)
for eqn in jaxpr.eqns:
- eqn.params.pop('backend', None)
+ # For nested jits, the outer jit sets the backend on all inner jits unless
+ # an inner-jit also has a conflicting explicit backend specification.
+ inner_backend = eqn.params.pop('backend', None)
+ if inner_backend and inner_backend != backend:
+ msg = (
+ "Explicit outer-jit backend specification {} must match"
+ "explicit inner-jit backend specification {}.")
+ raise ValueError(msg.format(backend, inner_backend))
+
if not eqn.restructure:
in_nodes = list(map(read, eqn.invars))
else:
|
Error on nested conflicting explicit jit backend specifications.
|
tensorflow_probability
|
train
|
3f91adf7666b1e8e6841f8c1d82fda1aab6871d0
|
diff --git a/templates/index.html b/templates/index.html
index <HASH>..<HASH> 100644
--- a/templates/index.html
+++ b/templates/index.html
@@ -34,6 +34,12 @@
white-space: pre;
}
+ iframe {
+ width: 100%;
+ height: auto;
+ border: 0;
+ }
+
</style>
</head>
<body>
diff --git a/templates/js/editor.js b/templates/js/editor.js
index <HASH>..<HASH> 100644
--- a/templates/js/editor.js
+++ b/templates/js/editor.js
@@ -35,6 +35,7 @@ var editorService = (function ($) {
codeEditor = ace.edit('jsNewCode');
cssEditor = ace.edit('jsNewCss');
+ cssEditor.setValue('#snippet { \n \n}');
codeEditor.setTheme('ace/theme/monokai');
cssEditor.setTheme('ace/theme/monokai');
diff --git a/templates/js/main.js b/templates/js/main.js
index <HASH>..<HASH> 100644
--- a/templates/js/main.js
+++ b/templates/js/main.js
@@ -48,7 +48,7 @@
onmove: function (e) {
var target = e.target;
- if ( e.rect.width > 319 ) {
+ if ( e.rect.width > 150 ) {
target.style.width = e.rect.width + 'px';
$sizeIndicator.text( e.rect.width + "px" );
}
diff --git a/templates/js/snippetActions.js b/templates/js/snippetActions.js
index <HASH>..<HASH> 100644
--- a/templates/js/snippetActions.js
+++ b/templates/js/snippetActions.js
@@ -1,11 +1,17 @@
var snippetActions = (function ($, snippetService, iframesService, editorService, viewService) {
var module = {};
- var appendIframeContent = function ( frameId, template, content ) {
+ var appendIframeContent = function ( frameId, template, content, css ) {
+ var frame = $(frameId).contents(),
+ frameHTML;
+
if ( template ) {
- $(frameId).contents().find('html').html(template);
+ frameHTML = frame.find('html').get(0);
+ frameHTML.innerHTML = template;
}
- $(frameId).contents().find('#snippet').html(content);
+
+ frame.find('style').append(css);
+ frame.find('#snippet').html(content);
};
var submitSnippet = function ( data ) {
@@ -85,8 +91,8 @@ var snippetActions = (function ($, snippetService, iframesService, editorService
snippetContents = snippetContainer.find('iframe');
- appendIframeContent(snippetContents, null, snippet.code);
- snippetContents.load($.proxy(appendIframeContent, null, snippetContents, null, snippet.code));
+ appendIframeContent(snippetContents, null, snippet.code, snippet.inlineCss);
+ snippetContents.load($.proxy(appendIframeContent, null, snippetContents, null, snippet.code, snippet.inlineCss));
} else {
console.log(snippet);
}
@@ -260,8 +266,8 @@ var snippetActions = (function ($, snippetService, iframesService, editorService
snippetIframe = $('#' + snippetId);
- appendIframeContent(snippetIframe, template, currentCode);
- snippetIframe.load($.proxy(appendIframeContent, null, snippetIframe, template, currentCode));
+ appendIframeContent(snippetIframe, template, currentCode, snippets[index].inlineCss);
+ snippetIframe.load($.proxy(appendIframeContent, null, snippetIframe, template, currentCode, snippets[index].inlineCss));
currentSnippetElement.find('.js-edit-snippet').submit(snippetActions.editSnippet);
}
|
Solved issue, when inserting iframe template body and head tags were stripped.
Iframe somewhat resizable.
|
devbridge_Styleguide
|
train
|
50bc15f87a78d5f336b2682195409682c75c9b11
|
diff --git a/src/Illuminate/Foundation/Bootstrap/LoadConfiguration.php b/src/Illuminate/Foundation/Bootstrap/LoadConfiguration.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Foundation/Bootstrap/LoadConfiguration.php
+++ b/src/Illuminate/Foundation/Bootstrap/LoadConfiguration.php
@@ -42,9 +42,7 @@ class LoadConfiguration
// Finally, we will set the application's environment based on the configuration
// values that were loaded. We will pass a callback which will be used to get
// the environment in a web context where an "--env" switch is not present.
- $app->detectEnvironment(function () use ($config) {
- return $config->get('app.env', 'production');
- });
+ $app->detectEnvironment(fn () => $config->get('app.env', 'production'));
date_default_timezone_set($config->get('app.timezone', 'UTC'));
diff --git a/src/Illuminate/Routing/Route.php b/src/Illuminate/Routing/Route.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Routing/Route.php
+++ b/src/Illuminate/Routing/Route.php
@@ -479,9 +479,7 @@ class Route
*/
public function parametersWithoutNulls()
{
- return array_filter($this->parameters(), function ($p) {
- return ! is_null($p);
- });
+ return array_filter($this->parameters(), fn ($p) => ! is_null($p));
}
/**
@@ -507,9 +505,7 @@ class Route
{
preg_match_all('/\{(.*?)\}/', $this->getDomain().$this->uri, $matches);
- return array_map(function ($m) {
- return trim($m, '?');
- }, $matches[1]);
+ return array_map(fn ($m) => trim($m, '?'), $matches[1]);
}
/**
|
Convert closures to arrow functions (#<I>)
* Convert closures to arrow functions
* Convert closures to arrow functions
fix style
|
laravel_framework
|
train
|
0c122312aa9c1b82bdcf122fcc522533974a8281
|
diff --git a/pom.xml b/pom.xml
index <HASH>..<HASH> 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,7 +5,7 @@
<name>User-Agent-Utils</name>
<url>http://java.net/projects/user-agent-utils</url>
<artifactId>UserAgentUtils</artifactId>
- <version>1.2.3</version>
+ <version>1.2.4</version>
<description>Utility classes to handle user-agents.</description>
<licenses>
<license>
diff --git a/src/main/java/nl/bitwalker/useragentutils/OperatingSystem.java b/src/main/java/nl/bitwalker/useragentutils/OperatingSystem.java
index <HASH>..<HASH> 100644
--- a/src/main/java/nl/bitwalker/useragentutils/OperatingSystem.java
+++ b/src/main/java/nl/bitwalker/useragentutils/OperatingSystem.java
@@ -61,6 +61,7 @@ public enum OperatingSystem {
WINDOWS_98( Manufacturer.MICROSOFT,OperatingSystem.WINDOWS,5, "Windows 98", new String[] { "Windows 98","Win98" }, new String[] { "Palm" }, DeviceType.COMPUTER, null ), // before Win
ANDROID( Manufacturer.GOOGLE,null, 0, "Android", new String[] { "Android" }, null, DeviceType.MOBILE, null ),
+ ANDROID3_TABLET(Manufacturer.GOOGLE,OperatingSystem.ANDROID, 30, "Android 3.x Tablet", new String[] { "Android 3" }, null, DeviceType.TABLET, null ), // as long as there are not Android 3.x phones this should be enough
ANDROID2( Manufacturer.GOOGLE,OperatingSystem.ANDROID, 2, "Android 2.x", new String[] { "Android 2" }, null, DeviceType.MOBILE, null ),
ANDROID2_TABLET(Manufacturer.GOOGLE,OperatingSystem.ANDROID2, 20, "Android 2.x Tablet", new String[] { "GT-P1000","SCH-I800" }, null, DeviceType.TABLET, null ),
ANDROID1( Manufacturer.GOOGLE,OperatingSystem.ANDROID, 1, "Android 1.x", new String[] { "Android 1" }, null, DeviceType.MOBILE, null ),
diff --git a/src/test/java/nl/bitwalker/useragentutils/OperatingSystemTest.java b/src/test/java/nl/bitwalker/useragentutils/OperatingSystemTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/nl/bitwalker/useragentutils/OperatingSystemTest.java
+++ b/src/test/java/nl/bitwalker/useragentutils/OperatingSystemTest.java
@@ -55,7 +55,8 @@ public class OperatingSystemTest {
String[] tablets = {
"Mozilla/5.0 (Linux; U; Android 2.2; es-es; GT-P1000 Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; SCH-I800 Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
- "Mozilla/5.0 (iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Mobile/7D11"
+ "Mozilla/5.0 (iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Mobile/7D11",
+ "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13" // dropped the mobile part, so Android without mobile should be a tablet!
};
String[] gameconsoles = {
|
Added Android 3.x tablet detection.
|
HaraldWalker_user-agent-utils
|
train
|
c708b9109e209c36f2844969641fc7da505e95d4
|
diff --git a/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/Main.java b/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/Main.java
index <HASH>..<HASH> 100644
--- a/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/Main.java
+++ b/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/Main.java
@@ -23,6 +23,7 @@ package org.rapidoid.demo.taskplanner;
import org.rapidoid.annotation.Authors;
import org.rapidoid.annotation.Since;
import org.rapidoid.app.Apps;
+import org.rapidoid.db.DB;
@Authors("Nikolche Mihajlovski")
@Since("2.0.0")
@@ -30,6 +31,10 @@ public class Main {
public static void main(String[] args) {
Apps.run("oauth-no-state");
+ DB.clear();
+ for (int i = 0; i < 100; i++) {
+ DB.init("task title=abc%s, rating=123", i);
+ }
}
}
diff --git a/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/gui/HomeScreen.java b/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/gui/HomeScreen.java
index <HASH>..<HASH> 100644
--- a/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/gui/HomeScreen.java
+++ b/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/gui/HomeScreen.java
@@ -3,6 +3,7 @@ package org.rapidoid.demo.taskplanner.gui;
import org.rapidoid.annotation.Authors;
import org.rapidoid.annotation.Since;
import org.rapidoid.app.Screen;
+import org.rapidoid.html.Tag;
/*
* #%L
@@ -30,6 +31,10 @@ public class HomeScreen extends Screen {
public String title = "Welcome!";
- public Object content = row(h2("Welcome to Task Planer!"));
+ private Tag item = row(p("{{it.id}}: {{it.title}} {{it.rating}}"));
+
+ private Tag foreach = item.attr("ng-repeat", "it in items");
+
+ public Object content = stream(foreach).dataUrl("/task/page/{{page}}");
}
diff --git a/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/model/Task.java b/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/model/Task.java
index <HASH>..<HASH> 100644
--- a/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/model/Task.java
+++ b/rapidoid-demo/src/main/java/org/rapidoid/demo/taskplanner/model/Task.java
@@ -40,10 +40,10 @@ import org.rapidoid.util.CommonRoles;
@Scaffold
@SuppressWarnings("serial")
-@CanRead(CommonRoles.LOGGED_IN)
-@CanChange({ CommonRoles.OWNER })
-@CanInsert(CommonRoles.LOGGED_IN)
-@CanDelete({ CommonRoles.OWNER, CommonRoles.ADMIN })
+//@CanRead(CommonRoles.LOGGED_IN)
+//@CanChange({ CommonRoles.OWNER })
+//@CanInsert(CommonRoles.LOGGED_IN)
+//@CanDelete({ CommonRoles.OWNER, CommonRoles.ADMIN })
@DbEntity
@Authors("Nikolche Mihajlovski")
@Since("2.0.0")
|
Added stream showcase in the Task Planner demo.
|
rapidoid_rapidoid
|
train
|
555e64a5c9b3c1bf137741960aafbca4d358a8af
|
diff --git a/lib/gemsmith/cli.rb b/lib/gemsmith/cli.rb
index <HASH>..<HASH> 100644
--- a/lib/gemsmith/cli.rb
+++ b/lib/gemsmith/cli.rb
@@ -56,6 +56,7 @@ module Gemsmith
def initialize args = [], options = {}, config = {}
super args, options, config
@configuration = Configuration.new
+ @spec_aid = Aids::Spec.new
end
desc "-c, [create=CREATE]", "Create new gem."
@@ -97,7 +98,7 @@ module Gemsmith
desc "-e, [--edit]", "Edit #{Gemsmith::Identity.label} settings in default editor."
map %w(-e --edit) => :edit
def edit
- `#{editor} #{configuration.file_path}`
+ `#{spec_aid.editor} #{configuration.file_path}`
end
desc "-v, [--version]", "Show #{Gemsmith::Identity.label} version."
@@ -114,7 +115,7 @@ module Gemsmith
private
- attr_reader :configuration
+ attr_reader :configuration, :spec_aid
def setup_configuration name, options
gem = Aids::Gem.new name
diff --git a/lib/gemsmith/cli_helpers.rb b/lib/gemsmith/cli_helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/gemsmith/cli_helpers.rb
+++ b/lib/gemsmith/cli_helpers.rb
@@ -1,10 +1,6 @@
module Gemsmith
# Command Line Interface (CLI) helpers for the CLI class.
module CLIHelpers
- def editor
- ENV["EDITOR"]
- end
-
def gem_name
configuration.gem_name
end
@@ -16,44 +12,33 @@ module Gemsmith
module_function
def print_gems gems
- say "Multiple versions found:"
- gems.each.with_index do |spec, index|
- say "#{index + 1}. #{spec.name} #{spec.version.version}"
- end
+ say "Multiple versions found:\n\n"
+ gems.each.with_index { |spec, index| say "#{index + 1}. #{spec.name} #{spec.version.version}" }
+ say "q. Quit.\n\n"
end
def pick_gem gems, name
- result = ask "Please pick one (or type 'q' to quit):"
-
- return if result == "q" # Exit early.
+ answer = ask "Enter selection:"
+ return if answer == "q"
- if (1..gems.size).include?(result.to_i)
- Gem::Specification.find_by_name name, gems[result.to_i - 1].version.version
+ if (1..gems.size).include?(answer.to_i)
+ spec_aid.find name, gems[answer.to_i - 1].version.version
else
- error "Invalid option: #{result}"
- nil
+ error "Invalid option: #{answer}"
end
end
- def open_gem spec
- `#{editor} #{spec.full_gem_path}` if spec
- end
-
- def read_gem spec
- `open #{spec.homepage}` if spec
- end
-
- def process_gem name, command
- specs = Gem::Specification.find_all_by_name name
+ def process_gem name, method
+ specs = spec_aid.find_all name
case
when specs.size == 1
- send "#{command}_gem", specs.first
+ spec_aid.send method, specs.first
when specs.size > 1
print_gems specs
- send "#{command}_gem", pick_gem(specs, name)
+ spec_aid.send method, pick_gem(specs, name)
else
- say "Unable to find gem: #{name}"
+ error "Unable to find gem: #{name}."
end
end
end
|
Updated CLI to use gem spec aid.
- Makes use of the new spec object for processing of
gem specs.
|
bkuhlmann_gemsmith
|
train
|
ac1a363c6ed889d11e8fabd6dd69a8a6df9e3cfd
|
diff --git a/actionpack/test/controller/resources_test.rb b/actionpack/test/controller/resources_test.rb
index <HASH>..<HASH> 100644
--- a/actionpack/test/controller/resources_test.rb
+++ b/actionpack/test/controller/resources_test.rb
@@ -532,7 +532,7 @@ class ResourcesTest < ActionController::TestCase
routes.each do |route|
routes.each do |r|
next if route === r # skip the comparison instance
- assert distinct_routes?(route, r), "Duplicate Route: #{route}"
+ assert_not_equal route.conditions, r.conditions
end
end
end
@@ -1351,8 +1351,4 @@ class ResourcesTest < ActionController::TestCase
assert_recognizes(expected_options, path)
end
end
-
- def distinct_routes? (r1, r2)
- assert_not_equal r1.conditions, r2.conditions
- end
end
|
Pull up a method we only use once.
|
rails_rails
|
train
|
c4e4c00cfa214603e3271214de1eb1ad5e5c3559
|
diff --git a/lib/util/logger.js b/lib/util/logger.js
index <HASH>..<HASH> 100644
--- a/lib/util/logger.js
+++ b/lib/util/logger.js
@@ -33,9 +33,9 @@ function createLogger () {
'timestamp': true,
level: logConfig.level || 'error'
}))
- var postfix = '.master.log'
+ var postfix = '.master.' + config.tokens.spm + '.log'
if (!cluster.isMaster)
- postfix = '.worker-' + cluster.worker.id + '.log'
+ postfix = '.worker-' + cluster.worker.id + '.' + config.tokens.spm + '.log'
if (logConfig.filename) {
loggers.push(new (winston.transports.File)({
|
using token in logfile names in case of global log file directory for multiple apps
|
sematext_spm-agent
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.