hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
466ae7e500a7d9f5ea593a817a4b3379f38c7d66
|
diff --git a/rocketchat_API/APISections/im.py b/rocketchat_API/APISections/im.py
index <HASH>..<HASH> 100644
--- a/rocketchat_API/APISections/im.py
+++ b/rocketchat_API/APISections/im.py
@@ -19,6 +19,10 @@ class RocketChatIM(RocketChatBase):
"""Create a direct message session with another user."""
return self.call_api_post("im.create", username=username, kwargs=kwargs)
+ def im_create_multiple(self, usernames, **kwargs):
+ """Create a direct message session with one or more users."""
+ return self.call_api_post("im.create", usernames=usernames, kwargs=kwargs)
+
def im_open(self, room_id, **kwargs):
"""Adds the direct message back to the user’s list of direct messages."""
return self.call_api_post("im.open", roomId=room_id, kwargs=kwargs)
diff --git a/tests/test_ims.py b/tests/test_ims.py
index <HASH>..<HASH> 100644
--- a/tests/test_ims.py
+++ b/tests/test_ims.py
@@ -1,3 +1,4 @@
+import email
import pytest
from rocketchat_API.APIExceptions.RocketExceptions import RocketMissingParamException
@@ -9,6 +10,25 @@ def recipient_user(create_user):
return _recipient.name
+@pytest.fixture(scope="module")
+def recipient_user3(create_user):
+ _recipient = create_user(name="user3", email="anotherone@domain.com")
+
+ return _recipient.name
+
+@pytest.fixture(scope="module")
+def recipient_user2(create_user):
+ _recipient = create_user(name="user2", email="another@domain.com")
+
+ return _recipient.name
+
+def test_im_create_multiple(logged_rocket, recipient_user3, recipient_user2):
+
+ im_create = logged_rocket.im_create_multiple(recipient_user3 + "," + recipient_user2).json()
+ assert im_create.get("success")
+ room_id = im_create.get("room").get("_id")
+ im_send = logged_rocket.chat_post_message(room_id=room_id, text="Das ist eine Testnachricht").json()
+ assert im_send.get("success")
def test_im_create(logged_rocket, recipient_user):
im_create = logged_rocket.im_create(recipient_user).json()
|
Added rocketChat endpoint to support multiple instant messaging
|
jadolg_rocketchat_API
|
train
|
e096f4348763cdf6bc31ef84db238ff3d58a42b3
|
diff --git a/src/io/deviceManager.js b/src/io/deviceManager.js
index <HASH>..<HASH> 100644
--- a/src/io/deviceManager.js
+++ b/src/io/deviceManager.js
@@ -20,9 +20,40 @@ class DeviceOpener {
* @param {function} reject - callback to be called if an error or timeout is encountered.
*/
constructor (deviceManager, resolve, reject) {
+ /**
+ * The DeviceManager client which wants to open a device.
+ * @type {DeviceManager}
+ * @private
+ */
this._deviceManager = deviceManager;
+
+ /**
+ * Callback to be called if the device is successfully found, connected, and opened.
+ * @type {Function}
+ * @private
+ */
this._resolve = resolve;
+
+ /**
+ * Callback to be called if an error or timeout is encountered.
+ * @type {Function}
+ * @private
+ */
this._reject = reject;
+
+ /**
+ * The socket for the device being opened.
+ * @type {Socket}
+ * @private
+ */
+ this._socket = null;
+
+ /**
+ * If this timeout expires before a successful connection, the connection attempt will be canceled.
+ * @type {Object}
+ * @private
+ */
+ this._connectionTimeout = null;
}
/**
@@ -34,8 +65,7 @@ class DeviceOpener {
* @param {string} deviceId - the ID of the particular device to open, usually from list results
*/
open (extensionName, deviceType, deviceId) {
- this._socket = /** @type {Socket} */ io(`${this._deviceManager._serverURL}/${deviceType}`);
- this._deviceManager._sockets.push(this._socket);
+ this._socket = io(`${this._deviceManager._serverURL}/${deviceType}`);
this._socket.on('deviceWasOpened', () => this.onDeviceWasOpened());
this._socket.on('disconnect', () => this.onDisconnect());
@@ -86,16 +116,6 @@ class DeviceOpener {
this._connectionTimeout = null;
}
}
-
- /**
- * Remove the socket we were using for a now-failed connection attempt.
- */
- removeSocket () {
- const socketIndex = this._deviceManager._sockets.indexOf(this._socket);
- if (socketIndex >= 0) {
- this._deviceManager._sockets.splice(socketIndex, 1);
- }
- }
}
/**
@@ -115,16 +135,57 @@ class DeviceFinder {
* Construct a DeviceFinder to help find and connect to a device satisfying specific conditions.
* @param {DeviceManager} deviceManager - the Device Manager client which instigated this action.
* @param {string} extensionName - human-readable name of the extension requesting the search
- * @param {string} deviceType - the type of device to list, such as 'wedo2'
+ * @param {string} deviceType - the type of device to find, such as 'wedo2'.
* @param {object} [deviceSpec] - optional additional information about the specific devices to list
*/
constructor (deviceManager, extensionName, deviceType, deviceSpec) {
+ /**
+ * The Device Manager client which wants to find a device.
+ * @type {DeviceManager}
+ * @private
+ */
this._deviceManager = deviceManager;
+
+ /**
+ * The human-readable name of the extension requesting the search.
+ * @type {string}
+ * @private
+ */
this._extensionName = extensionName;
+
+ /**
+ * The type of device to find, such as 'wedo2'.
+ * @type {string}
+ * @private
+ */
this._deviceType = deviceType;
+
+ /**
+ * Optional additional information about the specific devices to list.
+ * @type {Object}
+ * @private
+ */
this._deviceSpec = deviceSpec;
+
+ /**
+ * Flag indicating that the search should be canceled.
+ * @type {boolean}
+ * @private
+ */
this._cancel = false;
+
+ /**
+ * The promise representing this search's results.
+ * @type {Promise}
+ * @private
+ */
this._promise = null;
+
+ /**
+ * The fulfillment function for `this._promise`.
+ * @type {Function}
+ * @private
+ */
this._fulfill = null;
}
@@ -202,9 +263,19 @@ class DeviceManager {
}
constructor () {
+ /**
+ * The URL this client will use for Device Manager communication both HTTP(S) and WS(S).
+ * @type {string}
+ * @private
+ */
this._serverURL = DeviceManager.DEFAULT_SERVER_URL;
+
+ /**
+ * True if there is no known problem connecting to the Scratch Device Manager, false otherwise.
+ * @type {boolean}
+ * @private
+ */
this._isConnected = true;
- this._sockets = [];
}
/**
|
DeviceManager: add more jsdoc, remove `_sockets`
It turns out we don't need the Device Manager to centrally track each
socket, so this change removes the Device Manager's `_sockets` property
and related code.
|
LLK_scratch-vm
|
train
|
41ab05781bfc4362e92b463ad250aab6684b37b4
|
diff --git a/pyemma/msm/estimators/implied_timescales.py b/pyemma/msm/estimators/implied_timescales.py
index <HASH>..<HASH> 100644
--- a/pyemma/msm/estimators/implied_timescales.py
+++ b/pyemma/msm/estimators/implied_timescales.py
@@ -32,7 +32,7 @@ from pyemma.util.statistics import confidence_interval
from pyemma.util import types as _types
from pyemma._base.estimator import Estimator, get_estimator, param_grid, estimate_param_scan
from pyemma._base.progress import ProgressReporter
-from pyemma._base.model import SampledModel, Model
+from pyemma._base.model import SampledModel
__docformat__ = "restructuredtext en"
@@ -206,8 +206,6 @@ class ImpliedTimescales(Estimator, ProgressReporter):
self.logger.warning('Some timescales could not be computed. Timescales array is smaller than '
'expected or contains NaNs')
- return self
-
@property
def lagtimes(self):
r"""Return the list of lag times for which timescales were computed.
diff --git a/pyemma/msm/estimators/lagged_model_validators.py b/pyemma/msm/estimators/lagged_model_validators.py
index <HASH>..<HASH> 100644
--- a/pyemma/msm/estimators/lagged_model_validators.py
+++ b/pyemma/msm/estimators/lagged_model_validators.py
@@ -175,8 +175,6 @@ class LaggedModelValidator(Estimator, ProgressReporter):
self._est_L = None
self._est_R = None
- return self
-
@property
def lagtimes(self):
return self._lags
|
[msm/estimators] do not return self as model
|
markovmodel_PyEMMA
|
train
|
e729eb2f042543b264c19d60451566940488ba5c
|
diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -211,10 +211,15 @@ module.exports = function (grunt) {
return 'http://localhost:<%= connect.server.options.port %>/test/fuelux.html?jquery=' + ver;
}),
uglify: {
+ options: {
+ report: 'min'
+ },
fuelux: {
- files: {
- 'dist/js/fuelux.min.js': ['dist/js/fuelux.js']
- }
+ options: {
+ banner: '<%= banner %>'
+ },
+ src: 'dist/js/<%= pkg.name %>.js',
+ dest: 'dist/js/<%= pkg.name %>.min.js'
}
},
usebanner: {
@@ -250,8 +255,8 @@ module.exports = function (grunt) {
grunt.registerTask('saucelabs', ['connect', 'jshint', 'saucelabs-qunit']);
//Style tasks
- grunt.registerTask('quickcss', ['less:dist', 'usebanner']);
- grunt.registerTask('fullcss', ['quickcss', 'less:minify']);
+ grunt.registerTask('quickcss', ['less', 'usebanner']);
+ grunt.registerTask('fullcss', ['quickcss']); /* Remove */
//Serve task
grunt.registerTask('serve', ['quicktest', 'quickcss', 'copy:fonts', 'concat', 'uglify', 'jsbeautifier', 'connect', 'watch']);
|
Cleaning up Less CSS minify and JS uglify
We should probably remove fullcss task. Usebanner task must be after less:minify subtask
|
ExactTarget_fuelux
|
train
|
48d5f902331f4883b8ade984628bc1f7cf55fe28
|
diff --git a/BimServer/src/org/bimserver/database/DatabaseSession.java b/BimServer/src/org/bimserver/database/DatabaseSession.java
index <HASH>..<HASH> 100644
--- a/BimServer/src/org/bimserver/database/DatabaseSession.java
+++ b/BimServer/src/org/bimserver/database/DatabaseSession.java
@@ -1496,7 +1496,7 @@ public class DatabaseSession implements LazyLoader, OidProvider {
buffer.putShort(cid);
IdEObject idEObject = (IdEObject) value;
if (idEObject.getOid() == -1) {
- ((IdEObjectImpl)idEObject).setOid(newOid());
+ ((IdEObjectImpl)idEObject).setOid(newOid(object.eClass()));
((IdEObjectImpl)idEObject).setPid(object.getPid());
((IdEObjectImpl)idEObject).setRid(object.getRid());
}
|
Merge conflict with myself ?!?
|
opensourceBIM_BIMserver
|
train
|
dce35f31d8f1f29b00b468c0c6df27bd122d214f
|
diff --git a/core/src/main/java/hudson/model/DownloadService.java b/core/src/main/java/hudson/model/DownloadService.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/DownloadService.java
+++ b/core/src/main/java/hudson/model/DownloadService.java
@@ -403,7 +403,7 @@ public class DownloadService extends PageDecorator {
jsonString = loadJSONHTML(new URL(site + ".html?id=" + URLEncoder.encode(getId(), "UTF-8") + "&version=" + URLEncoder.encode(Jenkins.VERSION, "UTF-8")));
toolInstallerMetadataExists = true;
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Could not load json from " + site, e );
+ LOGGER.log(Level.FINE, "Could not load json from " + site, e );
continue;
}
JSONObject o = JSONObject.fromObject(jsonString);
|
[FIXED JENKINS-<I>] do not show warnings if tool installer is present in at least 1 update site (#<I>)
|
jenkinsci_jenkins
|
train
|
5f2bdab8a1f312715b0cf3f78d1634210c087146
|
diff --git a/bel_lang/ast.py b/bel_lang/ast.py
index <HASH>..<HASH> 100644
--- a/bel_lang/ast.py
+++ b/bel_lang/ast.py
@@ -55,10 +55,16 @@ class BELAst(object):
else:
bel_relation = self.spec['relation_to_long'].get(self.bel_relation, None)
+ bel_subject = self.bel_subject.to_string(fmt)
+
+ if isinstance(self.bel_object, (BELAst)):
+ bel_object = f'({self.bel_object.to_string(fmt)})'
+ else:
+ bel_object = self.bel_object.to_string(fmt)
return {
- 'subject': self.bel_subject.to_string(fmt),
+ 'subject': bel_subject,
'relation': bel_relation,
- 'object': self.bel_object.to_string(fmt),
+ 'object': bel_object,
}
elif self.bel_subject:
|
fixed to_components for nested statement
|
belbio_bel
|
train
|
98e8b006ada72ed915da41919d695e16a1db571a
|
diff --git a/cipd/appengine/backend/main.go b/cipd/appengine/backend/main.go
index <HASH>..<HASH> 100644
--- a/cipd/appengine/backend/main.go
+++ b/cipd/appengine/backend/main.go
@@ -28,6 +28,9 @@ import (
"go.chromium.org/luci/cipd/appengine/impl"
"go.chromium.org/luci/cipd/appengine/impl/monitoring"
+
+ // Using transactional datastore TQ tasks.
+ _ "go.chromium.org/luci/server/tq/txn/datastore"
)
func main() {
diff --git a/cipd/appengine/frontend/main.go b/cipd/appengine/frontend/main.go
index <HASH>..<HASH> 100644
--- a/cipd/appengine/frontend/main.go
+++ b/cipd/appengine/frontend/main.go
@@ -32,6 +32,8 @@ import (
// Using datastore for user sessions.
_ "go.chromium.org/luci/server/encryptedcookies/session/datastore"
+ // Using transactional datastore TQ tasks.
+ _ "go.chromium.org/luci/server/tq/txn/datastore"
)
func main() {
diff --git a/cipd/appengine/impl/cas/cas.go b/cipd/appengine/impl/cas/cas.go
index <HASH>..<HASH> 100644
--- a/cipd/appengine/impl/cas/cas.go
+++ b/cipd/appengine/impl/cas/cas.go
@@ -42,9 +42,6 @@ import (
"go.chromium.org/luci/cipd/appengine/impl/monitoring"
"go.chromium.org/luci/cipd/appengine/impl/settings"
"go.chromium.org/luci/cipd/common"
-
- // Using transactional datastore TQ tasks.
- _ "go.chromium.org/luci/server/tq/txn/datastore"
)
// readBufferSize is size of a buffer used to read Google Storage files.
diff --git a/cipd/appengine/impl/cas/cas_test.go b/cipd/appengine/impl/cas/cas_test.go
index <HASH>..<HASH> 100644
--- a/cipd/appengine/impl/cas/cas_test.go
+++ b/cipd/appengine/impl/cas/cas_test.go
@@ -44,6 +44,9 @@ import (
. "github.com/smartystreets/goconvey/convey"
. "go.chromium.org/luci/common/testing/assertions"
+
+ // Using transactional datastore TQ tasks.
+ _ "go.chromium.org/luci/server/tq/txn/datastore"
)
func TestGetReader(t *testing.T) {
diff --git a/cipd/appengine/impl/repo/repo_test.go b/cipd/appengine/impl/repo/repo_test.go
index <HASH>..<HASH> 100644
--- a/cipd/appengine/impl/repo/repo_test.go
+++ b/cipd/appengine/impl/repo/repo_test.go
@@ -47,8 +47,10 @@ import (
"go.chromium.org/luci/cipd/common"
. "github.com/smartystreets/goconvey/convey"
-
. "go.chromium.org/luci/common/testing/assertions"
+
+ // Using transactional datastore TQ tasks.
+ _ "go.chromium.org/luci/server/tq/txn/datastore"
)
////////////////////////////////////////////////////////////////////////////////
|
[cipd] import the server/tq/txn/datastore in main.go only.
This is for consistency with other apps in luci-go and google Go style guide.
R=vadimsh
Bug: <I>
Change-Id: If<I>c<I>c8c1a3bc<I>e<I>b<I>dded8d<I>e<I>e
Reviewed-on: <URL>
|
luci_luci-go
|
train
|
943b6816a95e6345c33d9202b8b5f485e7b3be0a
|
diff --git a/lib/transforms/bundleSystemJs.js b/lib/transforms/bundleSystemJs.js
index <HASH>..<HASH> 100644
--- a/lib/transforms/bundleSystemJs.js
+++ b/lib/transforms/bundleSystemJs.js
@@ -314,6 +314,39 @@ function bundleStrategy(builder, entryPoints, options) {
delete conditionalEnv[condition];
});
+ // generate conditional combinations recursively (for general case of arbitrarily many conditional combinations)
+ function generateVariationsOverConditions(conditionList) {
+ var curCondition = conditionList[0];
+
+ if (!curCondition)
+ return [];
+
+ // get combinations from the n - 1th dimension
+ var nextVariations = generateVariationsOverConditions(conditionList.slice(1));
+
+ var variations = [];
+
+ if (!nextVariations.length)
+ conditionalEnv[curCondition].forEach(function(curConditionValue) {
+ var variationConditions = {};
+ variationConditions[curCondition] = curConditionValue;
+ variations.push(variationConditions);
+ });
+
+ // multiply the combinations of the n - 1 dimention by the cominations of this nth dimension
+ nextVariations.forEach(function(nextVariation) {
+ conditionalEnv[curCondition].forEach(function(curConditionValue) {
+ var variationConditions = Object.assign({}, nextVariation);
+ variationConditions[curCondition] = curConditionValue;
+ variations.push(variationConditions);
+ });
+ });
+
+ return variations;
+ }
+
+ var conditionsCombinations = generateVariationsOverConditions(Object.keys(conditionalEnv));
+
var conditionsCombinations = [];
Object.keys(conditionalEnv).forEach(function (condition) {
conditionalEnv[condition].forEach(function (conditionValue) {
|
ensure all conditional combinations are computed in general
|
assetgraph_assetgraph
|
train
|
0e87688ba96f784f6f7ce54f080cbecc1e5e7b94
|
diff --git a/rest_framework_json_api/utils.py b/rest_framework_json_api/utils.py
index <HASH>..<HASH> 100644
--- a/rest_framework_json_api/utils.py
+++ b/rest_framework_json_api/utils.py
@@ -2,6 +2,7 @@
Utils.
"""
import copy
+
import inflection
from django.conf import settings
from django.utils import six, encoding
@@ -421,7 +422,7 @@ def extract_included(fields, resource, resource_instance, included_resources):
continue
try:
- included_resources.remove(field_name)
+ included_resources.copy().remove(field_name)
except ValueError:
# Skip fields not in requested included resources
continue
|
Copy included_resources before removing items from it
Fixes #<I>
|
django-json-api_django-rest-framework-json-api
|
train
|
6335eafebeb9771a900adc3d8494d8ffcbe81b35
|
diff --git a/botstory/ast/callable.py b/botstory/ast/callable.py
index <HASH>..<HASH> 100644
--- a/botstory/ast/callable.py
+++ b/botstory/ast/callable.py
@@ -32,7 +32,7 @@ class CallableNodeWrapper:
self.processor_instance.process_story(session,
# we don't have message yet
message=None,
- compiled_story=self.ast_node.compiled_story,
+ compiled_story=self.ast_node,
idx=0,
story_args=args,
story_kwargs=kwargs)
@@ -53,7 +53,7 @@ class CallableStoriesAPI:
)
self.library.add_callable(compiled_story)
return CallableNodeWrapper(
- compiled_story['parts'],
+ compiled_story,
self.processor_instance
).startpoint
diff --git a/botstory/ast/common.py b/botstory/ast/common.py
index <HASH>..<HASH> 100644
--- a/botstory/ast/common.py
+++ b/botstory/ast/common.py
@@ -11,7 +11,7 @@ class CommonStoriesAPI:
compiled_story = self.parser_instance.compile(
one_story,
)
- compiled_story['validator'] = matchers.get_validator(receive)
+ compiled_story.extensions['validator'] = matchers.get_validator(receive)
self.library.add_message_handler(compiled_story)
return one_story
diff --git a/botstory/ast/library.py b/botstory/ast/library.py
index <HASH>..<HASH> 100644
--- a/botstory/ast/library.py
+++ b/botstory/ast/library.py
@@ -17,11 +17,12 @@ class StoriesLibrary:
self.callable_stories.append(story)
def get_callable_by_topic(self, topic):
- return [s for s in self.callable_stories if s['topic'] == topic][0]
+ return [s for s in self.callable_stories if s.topic == topic][0]
def get_right_story(self, message):
- matched_stories = [task for task in self.message_handling_stories if task['validator'].validate(message)]
+ matched_stories = [story for story in self.message_handling_stories
+ if story.extensions['validator'].validate(message)]
return matched_stories[0] if len(matched_stories) > 0 else None
def get_story_by_topic(self, topic):
- return [s for s in [*self.callable_stories, *self.message_handling_stories] if s['topic'] == topic][0]
+ return [s for s in [*self.callable_stories, *self.message_handling_stories] if s.topic == topic][0]
diff --git a/botstory/ast/parser.py b/botstory/ast/parser.py
index <HASH>..<HASH> 100644
--- a/botstory/ast/parser.py
+++ b/botstory/ast/parser.py
@@ -15,15 +15,9 @@ class Parser:
one_story()
- compiled_story = {
- 'topic': topic,
- 'parts': self.node,
- }
-
- self.node.compiled_story = compiled_story
-
+ res = self.node
self.node = None
- return compiled_story
+ return res
def part(self, story_part):
for m in self.middlewares:
@@ -39,15 +33,9 @@ class Parser:
class ASTNode:
- # TODO: merge compiled_story with ASTNode
- # because compiled_story has ASTNode, topic instances
- # and validator (optionally)
- #
- # so maybe we should add endpoint for message handler endpoints
- # with extra parameter (validator)
- # and the result just ASTNode
def __init__(self, topic):
self.compiled_story = None
+ self.extensions = {}
self.story_line = []
self.story_names = set()
self.topic = topic
diff --git a/botstory/ast/processor.py b/botstory/ast/processor.py
index <HASH>..<HASH> 100644
--- a/botstory/ast/processor.py
+++ b/botstory/ast/processor.py
@@ -44,7 +44,7 @@ class StoryProcessor:
logger.debug('story {}'.format(compiled_story))
logger.debug('idx {}'.format(idx))
- story_line = compiled_story['parts'].story_line
+ story_line = compiled_story.story_line
current_stack_level = len(session.stack) - 1
@@ -68,7 +68,7 @@ class StoryProcessor:
'type': validator.type,
'data': matchers.serialize(validator),
'step': idx,
- 'topic': compiled_story['topic'],
+ 'topic': compiled_story.topic,
}
return
|
merge compiled story an ast node in one entity
|
botstory_botstory
|
train
|
dd5a842cb15f4d10401ffec42ab6a8b98212c1b5
|
diff --git a/pylint/reporters/text.py b/pylint/reporters/text.py
index <HASH>..<HASH> 100644
--- a/pylint/reporters/text.py
+++ b/pylint/reporters/text.py
@@ -119,7 +119,7 @@ def colorize_ansi(msg, color=None, style=None):
class TextReporter(BaseReporter):
- """reports messages and layouts in plain text"""
+ """Reports messages and layouts in plain text"""
__implements__ = IReporter
name = "text"
@@ -135,7 +135,7 @@ class TextReporter(BaseReporter):
self._template = str(self.linter.config.msg_template or self.line_format)
def write_message(self, msg):
- """Convenience method to write a formated message with class default template"""
+ """Convenience method to write a formatted message with class default template"""
self.writeln(msg.format(self._template))
def handle_message(self, msg):
|
Fix a typo in TextReporter
|
PyCQA_pylint
|
train
|
dc2b60dda58ca125a1fcd053950be507d0c1a68a
|
diff --git a/src/FieldHandlers/BaseFieldHandler.php b/src/FieldHandlers/BaseFieldHandler.php
index <HASH>..<HASH> 100644
--- a/src/FieldHandlers/BaseFieldHandler.php
+++ b/src/FieldHandlers/BaseFieldHandler.php
@@ -117,6 +117,14 @@ abstract class BaseFieldHandler implements FieldHandlerInterface
return $options['embedded'] . '.' . $field;
}
- return $table->alias() . '.' . $field;
+ if (empty($table)) {
+ return $field;
+ }
+
+ if (is_object($table)) {
+ return $table->alias() . '.' . $field;
+ }
+
+ return $table . '.' . $field;
}
}
|
More stability to BaseFieldHandler
`BaseFieldHandler` always assumed that the `$table` parameter
is always an instance of the Table class, when generating field
names. This might not always be true and makes things somewhat
more difficult to test.
This fix adds a bit more stability to the method, checking if the
`$table` is in fact an object. Also, adds support for strings
and empty values.
|
QoboLtd_cakephp-csv-migrations
|
train
|
33a357cb6ff4dda6768c725a0d1a00df274676d5
|
diff --git a/lib/ec2spec/cli.rb b/lib/ec2spec/cli.rb
index <HASH>..<HASH> 100644
--- a/lib/ec2spec/cli.rb
+++ b/lib/ec2spec/cli.rb
@@ -11,7 +11,7 @@ module Ec2spec
days = options['days']
format = options['format'] || :plain_text
client = Ec2spec::Client.new(hosts, days, format)
- client.run
+ puts client.run
end
end
end
diff --git a/lib/ec2spec/json_formatter.rb b/lib/ec2spec/json_formatter.rb
index <HASH>..<HASH> 100644
--- a/lib/ec2spec/json_formatter.rb
+++ b/lib/ec2spec/json_formatter.rb
@@ -9,7 +9,7 @@ module Ec2spec
values = host_values(result)
hash[host] = values
end
- puts result_hash.to_json
+ result_hash.to_json
end
def host_values(result)
diff --git a/lib/ec2spec/plain_text_formatter.rb b/lib/ec2spec/plain_text_formatter.rb
index <HASH>..<HASH> 100644
--- a/lib/ec2spec/plain_text_formatter.rb
+++ b/lib/ec2spec/plain_text_formatter.rb
@@ -7,7 +7,7 @@ module Ec2spec
table.rows = table_rows(results)
column_count = hosts.size + 1
column_count.times { |i| table.align_column(i, :right) }
- puts table
+ table
end
def table_header(results)
|
Change returns value of .output to used as library
|
kyoshidajp_ec2spec
|
train
|
8c343a263d8f649d43caaaaf04d24099f9b043f3
|
diff --git a/win32_event_log/datadog_checks/win32_event_log/data/conf.yaml.example b/win32_event_log/datadog_checks/win32_event_log/data/conf.yaml.example
index <HASH>..<HASH> 100644
--- a/win32_event_log/datadog_checks/win32_event_log/data/conf.yaml.example
+++ b/win32_event_log/datadog_checks/win32_event_log/data/conf.yaml.example
@@ -39,6 +39,10 @@ instances:
#
# event_priority: normal
+ ## FILTERS
+ ## At least one filter is required:
+ ## `log_file`, `source_name`, `type`, `event_id`, `message_filters`
+
## @param log_file - list of strings - optional
## The `log_file` filter instructs the check to only capture events
## that belong to one of the specified LogFiles (Application, System, Setup, Security,
diff --git a/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py b/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py
index <HASH>..<HASH> 100644
--- a/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py
+++ b/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py
@@ -55,10 +55,10 @@ class Win32EventLogWMI(WinWMICheck):
event_format = instance.get('event_format')
message_filters = instance.get('message_filters', [])
- if not (source_names or event_ids or message_filters or log_files or user or ltypes):
+ if not (source_names or event_ids or message_filters or log_files or ltypes):
raise ConfigurationError(
'At least one of the following filters must be set: '
- 'source_name, event_id, message_filters, log_file, user, type'
+ 'source_name, event_id, message_filters, log_file, type'
)
instance_hash = hash_mutable(instance)
diff --git a/win32_event_log/tests/test_check.py b/win32_event_log/tests/test_check.py
index <HASH>..<HASH> 100644
--- a/win32_event_log/tests/test_check.py
+++ b/win32_event_log/tests/test_check.py
@@ -135,12 +135,6 @@ def test_filter_log_file(mock_from_time, mock_to_time, check, mock_get_wmi_sampl
check.check(instance)
-def test_filter_user(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
- instance = {'user': 'user'}
-
- check.check(instance)
-
-
def test_filter_type(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
instance = {'type': ['type']}
|
Remove check for user filter (#<I>)
* Win<I> Event Log yaml example
* remove check for user filter
|
DataDog_integrations-core
|
train
|
5a3394ce6743b0bc79b3783b4231a42596bbe04b
|
diff --git a/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/CTInAppBasePartialFragment.java b/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/CTInAppBasePartialFragment.java
index <HASH>..<HASH> 100644
--- a/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/CTInAppBasePartialFragment.java
+++ b/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/CTInAppBasePartialFragment.java
@@ -28,7 +28,7 @@ public abstract class CTInAppBasePartialFragment extends CTInAppBaseFragment {
@Override
void cleanup() {
- if (!isCleanedUp.get()) {
+ if (!Utils.isActivityDead(getActivity()) && !isCleanedUp.get()) {
final FragmentManager fragmentManager = parent.getFragmentManager();
FragmentTransaction transaction = fragmentManager.beginTransaction();
try {
diff --git a/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/Utils.java b/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/Utils.java
index <HASH>..<HASH> 100644
--- a/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/Utils.java
+++ b/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/Utils.java
@@ -1,6 +1,7 @@
package com.clevertap.android.sdk;
import android.annotation.SuppressLint;
+import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ResolveInfo;
@@ -11,6 +12,7 @@ import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
+import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
@@ -318,4 +320,14 @@ public final class Utils {
}
}
+ public static boolean isActivityDead(Activity activity) {
+ if (activity == null)
+ return true;
+ boolean isActivityDead = activity.isFinishing();
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
+ isActivityDead = isActivityDead || activity.isDestroyed();
+ }
+ return isActivityDead;
+ }
+
}
|
fix(crash): Added check to avoid null pointer exception
|
CleverTap_clevertap-android-sdk
|
train
|
e06827e4614e09215f7255adf4e8eea466694d6f
|
diff --git a/tests/InvalidConfigTest.php b/tests/InvalidConfigTest.php
index <HASH>..<HASH> 100644
--- a/tests/InvalidConfigTest.php
+++ b/tests/InvalidConfigTest.php
@@ -14,7 +14,7 @@ class InvalidConfigTest extends \PHPUnit_Framework_TestCase
{
/**
* @covers ::__construct
- * @covers ::getConfig
+ * @covers ::getConfigName
* @covers ::getErrmsg
*/
public function testCreate()
diff --git a/tests/helpers/MessageBuilderTest.php b/tests/helpers/MessageBuilderTest.php
index <HASH>..<HASH> 100644
--- a/tests/helpers/MessageBuilderTest.php
+++ b/tests/helpers/MessageBuilderTest.php
@@ -13,7 +13,6 @@ use axy\errors\tests\nstst\CustomError;
class MessageBuilderTest extends \PHPUnit_Framework_TestCase
{
/**
- * @covers ::__construct
* @covers ::createMessage
*/
public function testOriginalMessage()
@@ -23,7 +22,6 @@ class MessageBuilderTest extends \PHPUnit_Framework_TestCase
}
/**
- * @covers ::__construct
* @covers ::createMessage
*/
public function testDefaultMessage()
@@ -33,7 +31,6 @@ class MessageBuilderTest extends \PHPUnit_Framework_TestCase
}
/**
- * @covers ::__construct
* @covers ::createMessage
*/
public function testTemplateMessage()
@@ -43,7 +40,6 @@ class MessageBuilderTest extends \PHPUnit_Framework_TestCase
}
/**
- * @covers ::__construct
* @covers ::createMessage
*/
public function testTemplateMessageEmptyVar()
@@ -53,7 +49,6 @@ class MessageBuilderTest extends \PHPUnit_Framework_TestCase
}
/**
- * @covers ::__construct
* @covers ::createMessage
*/
public function testTemplateMessageReplaceCode()
|
phpdoc: fix covers annotation for tests
|
axypro_errors
|
train
|
2c1020a6e668cd2a6620d3a5e4ba865567b10301
|
diff --git a/system/Validation/FormatRules.php b/system/Validation/FormatRules.php
index <HASH>..<HASH> 100644
--- a/system/Validation/FormatRules.php
+++ b/system/Validation/FormatRules.php
@@ -317,7 +317,8 @@ class FormatRules
return false;
}
- $scheme = strtolower(parse_url($str, PHP_URL_SCHEME) ?? ''); // absent scheme gives null
+ // parse_url() may return null and false
+ $scheme = strtolower((string) parse_url($str, PHP_URL_SCHEME));
$validSchemes = explode(
',',
strtolower($validSchemes ?? 'http,https')
|
refactor: strtolower() should take only string
|
codeigniter4_CodeIgniter4
|
train
|
e46cff62a665ead02702d6ef3d5261cc3e522adb
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -60,11 +60,18 @@ function RTCChannelStream(channel) {
// set the channel binaryType to arraybuffer
channel.binaryType = 'arraybuffer';
+
+ // initialise the message handlers
+ this._handlers = {
+ message: this._handleMessage.bind(this),
+ close: this._handleClose.bind(this),
+ open: this._handleOpen.bind(this)
+ };
// attach channel listeners
- channel.addEventListener('message', this._handleMessage.bind(this));
- channel.addEventListener('close', this._handleClose.bind(this));
- channel.addEventListener('open', this._handleOpen.bind(this));
+ channel.addEventListener('message', this._handlers.message);
+ channel.addEventListener('close', this._handlers.close);
+ channel.addEventListener('open', this._handlers.open);
// send an ENDOFSTREAM marker on finish
this.once('finish', this._dcsend.bind(this, ENDOFSTREAM));
@@ -86,9 +93,9 @@ prot._debindChannel = function() {
var channel = this.channel;
// remove the message listener
- channel.removeEventListener('message', this._handleMessage);
- channel.removeEventListener('close', this._handleClose);
- channel.removeEventListener('open', this._handleOpen);
+ channel.removeEventListener('message', this._handlers.message);
+ channel.removeEventListener('close', this._handlers.close);
+ channel.removeEventListener('open', this._handlers.message);
};
prot._read = function(n) {
|
Change message handler initialization to ensure that we correctly debind event handlers
|
rtc-io_rtc-dcstream
|
train
|
309bbc9cbc0df45d2bf273bc35f1234e9e855172
|
diff --git a/vyked/bus.py b/vyked/bus.py
index <HASH>..<HASH> 100644
--- a/vyked/bus.py
+++ b/vyked/bus.py
@@ -106,6 +106,12 @@ class TCPBus:
f.add_done_callback(fun)
+ def new_instance(self, service, version, host, port, node_id, type):
+ sc = next(sc for sc in self._service_clients if sc.name == service and sc.version == version)
+ if type == 'tcp':
+ self._node_clients[node_id] = sc
+ asyncio.async(self._connect_to_client(host, node_id, port, type, sc))
+
def send(self, packet: dict):
packet['from'] = self._host_id
func = getattr(self, '_' + packet['type'] + '_sender')
diff --git a/vyked/packet.py b/vyked/packet.py
index <HASH>..<HASH> 100644
--- a/vyked/packet.py
+++ b/vyked/packet.py
@@ -133,6 +133,14 @@ class ControlPacket(_Packet):
'params': dict(uptimes)}
return packet
+ @classmethod
+ def new_instance(cls, service_name, version, host, port, node_id, type):
+ params = {'service': service_name, 'version': version, 'host': host, 'port': port, 'node': node_id,
+ 'type': type}
+ return {'pid': cls._next_pid(),
+ 'type': 'new_instance',
+ 'params': params}
+
class MessagePacket(_Packet):
@classmethod
diff --git a/vyked/registry.py b/vyked/registry.py
index <HASH>..<HASH> 100644
--- a/vyked/registry.py
+++ b/vyked/registry.py
@@ -36,6 +36,9 @@ class Repository:
if self._service_dependencies.get(service_name) is None:
self._service_dependencies[service_name] = service.dependencies
+ def is_pending(self, service, version):
+ return self._get_full_service_name(service, version) in self._pending_services
+
def add_pending_service(self, service, version, node_id):
self._pending_services[self._get_full_service_name(service, version)].append(node_id)
@@ -190,6 +193,16 @@ class Registry:
self._client_protocols[params['node_id']] = registry_protocol
self._connect_to_service(params['host'], params['port'], params['node_id'], params['type'])
self._handle_pending_registrations()
+ self._inform_consumers(service)
+
+ def _inform_consumers(self, service: Service):
+ consumers = self._repository.get_consumers(service.name, service.version)
+ for service_name, service_version in consumers:
+ if not self._repository.is_pending(service_name, service_version):
+ instances = self._repository.get_instances(service_name, service_version)
+ for host, port, node, type in instances:
+ protocol = self._client_protocols[node]
+ protocol.send(ControlPacket.new_instance(service.name, service.version, service.host, service.port, service.node_id, service.type))
def _send_activated_packet(self, service, version, node):
protocol = self._client_protocols.get(node, None)
diff --git a/vyked/registry_client.py b/vyked/registry_client.py
index <HASH>..<HASH> 100644
--- a/vyked/registry_client.py
+++ b/vyked/registry_client.py
@@ -87,6 +87,10 @@ class RegistryClient:
if packet['type'] == 'registered':
self.cache_vendors(packet['params']['vendors'])
self.bus.registration_complete()
+ elif packet['type'] == 'new_instance':
+ #TODO : once method for both vendors and new instance
+ self.cache_instance(**packet['params'])
+ self._handle_new_instance(**packet['params'])
elif packet['type'] == 'deregister':
self._handle_deregistration(packet)
elif packet['type'] == 'subscribers':
@@ -143,6 +147,10 @@ class RegistryClient:
self._available_services[vendor_name].append(
(address['host'], address['port'], address['node_id'], address['type']))
+ def cache_instance(self, service, version, host, port, node, type):
+ vendor = self._get_full_service_name(service, version)
+ self._available_services[vendor].append((host, port, node, type))
+
def _handle_deregistration(self, packet):
params = packet['params']
vendor = self._get_full_service_name(params['service'], params['version'])
@@ -167,3 +175,6 @@ class RegistryClient:
def _handle_get_instances(self, packet):
future = self._pending_requests[packet['request_id']]
future.set_result(packet['params']['instances'])
+
+ def _handle_new_instance(self, service, version, host, port, node, type):
+ self.bus.new_instance(service, version, host, port, node, type)
|
Inform services of new instance of a service
|
kashifrazzaqui_vyked
|
train
|
69f1defa2990b4f1b5246367bad27a6400bdc26e
|
diff --git a/src/Repositories/FileRepository.php b/src/Repositories/FileRepository.php
index <HASH>..<HASH> 100644
--- a/src/Repositories/FileRepository.php
+++ b/src/Repositories/FileRepository.php
@@ -97,8 +97,10 @@ class FileRepository implements Repository
return null;
}
+ $id = preg_replace("/{$this->directory}\/([^.]+).md/", '\1', $file['path']);
+
$entity = $this->factory->make([
- 'id' => $file['filename'],
+ 'id' => $id,
'data' => $this->filesystem->read($file['path']),
]);
diff --git a/tests/FilesystemTest.php b/tests/FilesystemTest.php
index <HASH>..<HASH> 100644
--- a/tests/FilesystemTest.php
+++ b/tests/FilesystemTest.php
@@ -11,11 +11,6 @@ class FilesystemTest extends TestCase
$this->filesystem = new PlainFilesystem($this->fixtures . '/entities/');
}
- public function tearDown()
- {
- $this->filesystem = new PlainFilesystem($this->fixtures . '/entities/');
- }
-
/** @test */
public function it_is_initializable()
{
|
Better filename to id conversion
|
gibboncms_gibbon
|
train
|
e22cd228ebd04c236c642b70f1caa48c483a2968
|
diff --git a/Command/SetupRunCommand.php b/Command/SetupRunCommand.php
index <HASH>..<HASH> 100644
--- a/Command/SetupRunCommand.php
+++ b/Command/SetupRunCommand.php
@@ -151,6 +151,7 @@ class SetupRunCommand extends Command
/** @var \FireGento\MageSetup\Service\SetupServiceInterface $service */
$service = $this->setupService->create(['config' => $config, 'subProcessorCodes' => $subProcessorCodes]);
+ $service->setOutput($output);
$service->execute();
$output->writeln('<info>Setup finished</info>');
diff --git a/Service/SetupService.php b/Service/SetupService.php
index <HASH>..<HASH> 100644
--- a/Service/SetupService.php
+++ b/Service/SetupService.php
@@ -8,6 +8,7 @@ namespace FireGento\MageSetup\Service;
use FireGento\MageSetup\Model\Setup\SubProcessor\SubProcessorPool;
use FireGento\MageSetup\Model\Config;
use Magento\Framework\App\Cache\Manager as CacheManager;
+use Symfony\Component\Console\Output\OutputInterface;
/**
* Class SetupService
@@ -37,6 +38,11 @@ class SetupService implements SetupServiceInterface
private $cacheManager;
/**
+ * @var OutputInterface
+ */
+ private $output = null;
+
+ /**
* @param Config $config
* @param CacheManager $cacheManager
* @param SubProcessorPool $subProcessorPool
@@ -64,10 +70,22 @@ class SetupService implements SetupServiceInterface
public function execute()
{
foreach ($this->subProcessorCodes as $subProcessorCode) {
+ if (null !== $this->output) {
+ $this->output->writeln('<comment>Start processor:</comment> ' . $subProcessorCode);
+ }
+
$subProcessor = $this->subProcessorPool->get($subProcessorCode);
$subProcessor->process($this->config);
}
$this->cacheManager->clean(['config', 'full_page']);
}
+
+ /**
+ * @param OutputInterface $output
+ */
+ public function setOutput(OutputInterface $output)
+ {
+ $this->output = $output;
+ }
}
diff --git a/Service/SetupServiceInterface.php b/Service/SetupServiceInterface.php
index <HASH>..<HASH> 100644
--- a/Service/SetupServiceInterface.php
+++ b/Service/SetupServiceInterface.php
@@ -5,6 +5,8 @@
*/
namespace FireGento\MageSetup\Service;
+use Symfony\Component\Console\Output\OutputInterface;
+
/**
* Interface SetupServiceInterface
*
@@ -16,4 +18,9 @@ interface SetupServiceInterface
* @return void
*/
public function execute();
+
+ /**
+ * @param OutputInterface $output
+ */
+ public function setOutput(OutputInterface $output);
}
|
[TASK] Show which processors are executed during command run
|
firegento_firegento-magesetup2
|
train
|
6357aa8e0b3edd2c56c2e13382cd037dee97316f
|
diff --git a/fleece/cli/build/build.py b/fleece/cli/build/build.py
index <HASH>..<HASH> 100755
--- a/fleece/cli/build/build.py
+++ b/fleece/cli/build/build.py
@@ -85,8 +85,10 @@ def clean_up_container(container, clean_up_volumes=True):
def retrieve_archive(container, dist_dir):
stream, stat = container.get_archive('/dist/lambda_function.zip')
- raw_data = stream.read()
- f = BytesIO(raw_data)
+ f = BytesIO()
+ for chunk in stream:
+ f.write(chunk)
+ f.seek(0)
with tarfile.open(fileobj=f, mode='r') as t:
t.extractall(path=dist_dir)
|
Fix stream reading in new Docker version
|
rackerlabs_fleece
|
train
|
f1686dbf889932625ba254dbd1c8ad9a1da7c863
|
diff --git a/bin/server.js b/bin/server.js
index <HASH>..<HASH> 100755
--- a/bin/server.js
+++ b/bin/server.js
@@ -53,7 +53,7 @@ var topicNum = -1;
var UInt32Max = 4294967296;
var topicMax = UInt32Max - minMulticast;
var topicIndex = {};
-var memory = opt.options.memory || 1024;
+var memory = opt.options.memory || process.env.SKALE_MEMORY || 1024;
//var name = opt.options.name || 'localhost'; // Unused until FT comes back
var port = Number(opt.options.port) || 12346;
var wss;
diff --git a/bin/worker.js b/bin/worker.js
index <HASH>..<HASH> 100755
--- a/bin/worker.js
+++ b/bin/worker.js
@@ -40,7 +40,7 @@ if (opt.options.version) {
var debug = opt.options.debug || false;
var ncpu = Number(opt.options.nworker) || (process.env.SKALE_WORKER_PER_HOST ? process.env.SKALE_WORKER_PER_HOST : os.cpus().length);
var hostname = opt.options.MyHost || os.hostname();
-var memory = Number(opt.options.memory || 1024);
+var memory = Number(opt.options.memory || process.env.SKALE_MEMORY || 1024);
var tmp = opt.options.tmp || process.env.SKALE_TMP || '/tmp';
var cgrid;
var mm = new MemoryManager(memory);
diff --git a/lib/context-local.js b/lib/context-local.js
index <HASH>..<HASH> 100644
--- a/lib/context-local.js
+++ b/lib/context-local.js
@@ -9,7 +9,7 @@ var rimraf = require('rimraf');
var uuid = require('node-uuid');
var dataset = require('./dataset.js');
-var memory = 4096;
+var memory = Number(process.env.SKALE_MEMORY) || 4096;
module.exports = Context;
@@ -17,7 +17,7 @@ function Context(args) {
if (!(this instanceof Context))
return new Context(args);
this.contextId = uuid.v4();
- var nworker = process.env.SKALE_WORKERS || (os.cpus().length - 1) || 1;
+ var nworker = Number(process.env.SKALE_WORKERS) || (os.cpus().length - 1) || 1;
var tmp = process.env.SKALE_TMP || '/tmp';
var self = this;
this.worker = new Array(nworker);
|
Add SKALE_MEMORY env variable to set worker max memory
|
skale-me_skale
|
train
|
f04396797b730823a38433d748f159e710dfaaca
|
diff --git a/lib/active_mocker/generate.rb b/lib/active_mocker/generate.rb
index <HASH>..<HASH> 100644
--- a/lib/active_mocker/generate.rb
+++ b/lib/active_mocker/generate.rb
@@ -12,12 +12,14 @@ module ActiveMocker
def call
progress_init
models_paths.each do |file|
- model_name = model_name(file)
- model = get_model_const(model_name)
- mock_file_name = "#{model_name.underscore}_#{config.mock_append_name.underscore}.rb"
- mock_file_path = File.join(Config.mock_dir, mock_file_name)
+ model_name = model_name(file)
+ model = get_model_const(model_name)
+ mock_file_name = "#{model_name.underscore}_#{config.mock_append_name.underscore}.rb"
+ mock_file_path = File.join(Config.mock_dir, mock_file_name)
assure_dir_path_exists(mock_file_path)
schema_scrapper = ActiveRecordSchemaScrapper.new(model: model)
+ mock_dir = File.dirname(mock_file_path)
+ FileUtils::mkdir_p(mock_dir) unless Dir.exists?(mock_dir)
File.open(mock_file_path, 'w') do |file_out|
begin
result = create_mock(file, file_out, schema_scrapper)
@@ -53,6 +55,7 @@ module ActiveMocker
end
OtherErrors = Struct.new(:successful?)
+
def collect_errors(mock_file_path, create_mock_errors, schema_scrapper, model_name)
display_errors.wrap_errors(schema_scrapper.associations.errors, model_name, type: :associations)
display_errors.wrap_errors(schema_scrapper.attributes.errors, model_name, type: :attributes)
|
Create mock dir for specific mock before file open
|
zeisler_active_mocker
|
train
|
d954ad0b05bac7f6754cf0165d59a969baf5d69f
|
diff --git a/hazelcast/src/test/java/com/hazelcast/osgi/HazelcastOSGiIntegrationTest.java b/hazelcast/src/test/java/com/hazelcast/osgi/HazelcastOSGiIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/test/java/com/hazelcast/osgi/HazelcastOSGiIntegrationTest.java
+++ b/hazelcast/src/test/java/com/hazelcast/osgi/HazelcastOSGiIntegrationTest.java
@@ -61,6 +61,8 @@ public class HazelcastOSGiIntegrationTest {
System.setProperty(MAVEN_REPOSITORIES_PROP, MAVEN_REPOSITORIES);
String url = "reference:file:" + PathUtils.getBaseDir() + "/target/classes";
+ // modify url for Windows environment
+ url = url.replace("\\", "/");
UrlProvisionOption hzBundle = bundle(url);
CompositeOption junitBundles = junitBundles();
return options(hzBundle, junitBundles);
|
made test url suitable for windows env
|
hazelcast_hazelcast
|
train
|
1368181f312ab59772324cd71ae106f21aa2b414
|
diff --git a/upload/admin/controller/user/user_permission.php b/upload/admin/controller/user/user_permission.php
index <HASH>..<HASH> 100644
--- a/upload/admin/controller/user/user_permission.php
+++ b/upload/admin/controller/user/user_permission.php
@@ -125,7 +125,7 @@ class UserPermission extends \Opencart\System\Engine\Controller {
}
if (isset($this->request->get['page'])) {
- $page = $this->request->get['page'];
+ $page = (int)$this->request->get['page'];
} else {
$page = 1;
}
@@ -418,4 +418,4 @@ class UserPermission extends \Opencart\System\Engine\Controller {
return !$this->error;
}
-}
\ No newline at end of file
+}
|
Added integer on $page get request.
|
opencart_opencart
|
train
|
6ae6dcc7a8382da5e94e480316851f9aa4aaf615
|
diff --git a/backend/amp-live-list.go b/backend/amp-live-list.go
index <HASH>..<HASH> 100644
--- a/backend/amp-live-list.go
+++ b/backend/amp-live-list.go
@@ -142,11 +142,8 @@ func initBlogPosts() {
func handleLiveList(w http.ResponseWriter, r *http.Request, page Page) {
newStatus := updateStatus(w, r)
firstBlogID := strings.TrimPrefix(r.URL.Query().Get("from"), BLOG_ID_PREFIX)
- if origin := r.Header.Get("Origin"); origin != "" {
- page.Render(w, createLiveBlogSample(newStatus, time.Now(), firstBlogID, origin, page))
- } else {
- w.WriteHeader(http.StatusBadRequest)
- }
+ origin := GetOrigin(r)
+ page.Render(w, createLiveBlogSample(newStatus, time.Now(), firstBlogID, origin, page))
}
func updateStatus(w http.ResponseWriter, r *http.Request) int {
diff --git a/backend/request.go b/backend/request.go
index <HASH>..<HASH> 100644
--- a/backend/request.go
+++ b/backend/request.go
@@ -40,14 +40,24 @@ func isFormPostRequest(method string, w http.ResponseWriter) bool {
}
func EnableCors(w http.ResponseWriter, r *http.Request) {
- if origin := r.Header.Get("Origin"); origin != "" {
- w.Header().Set("Access-Control-Allow-Origin", origin)
- w.Header().Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS")
- w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token")
- w.Header().Set("Access-Control-Expose-Headers", "AMP-Access-Control-Allow-Source-Origin")
- w.Header().Set("AMP-Access-Control-Allow-Source-Origin", origin)
- w.Header().Set("Access-Control-Allow-Credentials", "true")
+ origin := GetOrigin(r)
+ w.Header().Set("Access-Control-Allow-Origin", origin)
+ w.Header().Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS")
+ w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token")
+ w.Header().Set("Access-Control-Expose-Headers", "AMP-Access-Control-Allow-Source-Origin")
+ w.Header().Set("AMP-Access-Control-Allow-Source-Origin", origin)
+ w.Header().Set("Access-Control-Allow-Credentials", "true")
+}
+
+func GetOrigin(r *http.Request) string {
+ origin := r.Header.Get("Origin")
+ if origin != "" {
+ return origin
+ }
+ if r.TLS == nil {
+ return "http://" + r.Host
}
+ return "https://" + r.Host
}
func SetContentTypeJson(w http.ResponseWriter) {
|
fix cors header for requests without origin header (#<I>)
|
ampproject_amp-by-example
|
train
|
7f0b0246fbdda11d5aeb548d71cda030521ab952
|
diff --git a/lib/backup.rb b/lib/backup.rb
index <HASH>..<HASH> 100644
--- a/lib/backup.rb
+++ b/lib/backup.rb
@@ -21,7 +21,7 @@ module Backup
COMPRESSORS = ['Gzip']
ENCRYPTORS = ['OpenSSL', 'GPG']
NOTIFIERS = ['Mail']
- SYNCERS = ['RSpec']
+ SYNCERS = ['RSync']
##
# Backup's internal paths
@@ -154,7 +154,9 @@ module Backup
# Dynamically defines all the available database, storage, compressor, encryptor and notifier
# classes inside Backup::Finder to improve the DSL for the configuration file
(DATABASES + STORAGES + COMPRESSORS + ENCRYPTORS + NOTIFIERS + SYNCERS).each do |constant|
- Backup::Finder.const_set(constant, Class.new)
+ unless Backup::Finder.const_defined?(constant)
+ Backup::Finder.const_set(constant, Class.new)
+ end
end
end
|
Fixed a typo and fixed how constants are dynamically generated in to the Backup::Finder to avoid double-definition.
|
backup_backup
|
train
|
7b5df365d87534d14da6ad33c8acd84c87147911
|
diff --git a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/mongo/MongoProperties.java b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/mongo/MongoProperties.java
index <HASH>..<HASH> 100644
--- a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/mongo/MongoProperties.java
+++ b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/mongo/MongoProperties.java
@@ -39,6 +39,7 @@ import org.springframework.core.env.Environment;
* @author Josh Long
* @author Andy Wilkinson
* @author Eddú Meléndez
+ * @author Stephane Nicoll
*/
@ConfigurationProperties(prefix = "spring.data.mongodb")
public class MongoProperties {
@@ -257,6 +258,7 @@ public class MongoProperties {
builder.description(options.getDescription());
builder.maxWaitTime(options.getMaxWaitTime());
builder.readPreference(options.getReadPreference());
+ builder.sslEnabled(options.isSslEnabled());
builder.socketFactory(options.getSocketFactory());
builder.socketKeepAlive(options.isSocketKeepAlive());
builder.socketTimeout(options.getSocketTimeout());
diff --git a/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/mongo/MongoAutoConfigurationTests.java b/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/mongo/MongoAutoConfigurationTests.java
index <HASH>..<HASH> 100644
--- a/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/mongo/MongoAutoConfigurationTests.java
+++ b/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/mongo/MongoAutoConfigurationTests.java
@@ -16,7 +16,10 @@
package org.springframework.boot.autoconfigure.mongo;
+import javax.net.SocketFactory;
+
import com.mongodb.Mongo;
+import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import org.junit.After;
import org.junit.Test;
@@ -28,11 +31,13 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
/**
* Tests for {@link MongoAutoConfiguration}.
*
* @author Dave Syer
+ * @author Stephane Nicoll
*/
public class MongoAutoConfigurationTests {
@@ -78,6 +83,20 @@ public class MongoAutoConfigurationTests {
.isEqualTo(300);
}
+ @Test
+ public void optionsSslConfig() {
+ this.context = new AnnotationConfigApplicationContext();
+ EnvironmentTestUtils.addEnvironment(this.context,
+ "spring.data.mongodb.uri:mongodb://localhost/test");
+ this.context.register(SslOptionsConfig.class,
+ PropertyPlaceholderAutoConfiguration.class, MongoAutoConfiguration.class);
+ this.context.refresh();
+ MongoClient mongo = this.context.getBean(MongoClient.class);
+ MongoClientOptions options = mongo.getMongoClientOptions();
+ assertThat(options.isSslEnabled()).isTrue();
+ assertThat(options.getSocketFactory()).isSameAs(this.context.getBean("mySocketFactory"));
+ }
+
@Configuration
protected static class OptionsConfig {
@@ -88,4 +107,19 @@ public class MongoAutoConfigurationTests {
}
+ @Configuration
+ protected static class SslOptionsConfig {
+
+ @Bean
+ public MongoClientOptions mongoClientOptions() {
+ return MongoClientOptions.builder().sslEnabled(true).socketFactory(mySocketFactory()).build();
+ }
+
+ @Bean
+ public SocketFactory mySocketFactory() {
+ return mock(SocketFactory.class);
+ }
+
+ }
+
}
|
Enable SSL from MongoClientOptions
Closes gh-<I>
|
spring-projects_spring-boot
|
train
|
bf081e6b48112d8341196e07e0b018c6287e9f0d
|
diff --git a/src/main/java/strman/Strman.java b/src/main/java/strman/Strman.java
index <HASH>..<HASH> 100644
--- a/src/main/java/strman/Strman.java
+++ b/src/main/java/strman/Strman.java
@@ -528,7 +528,12 @@ public abstract class Strman {
*/
public static boolean isUpperCase(final String value) {
validate(value, NULL_STRING_PREDICATE, NULL_STRING_MSG_SUPPLIER);
- return Objects.equals(value, value.toUpperCase());
+ for (int i = 0; i < value.length(); i++) {
+ if (Character.isLowerCase(value.charAt(i))) {
+ return false;
+ }
+ }
+ return true;
}
/**
@@ -539,7 +544,12 @@ public abstract class Strman {
*/
public static boolean isLowerCase(final String value) {
validate(value, NULL_STRING_PREDICATE, NULL_STRING_MSG_SUPPLIER);
- return Objects.equals(value, value.toLowerCase());
+ for (int i = 0; i < value.length(); i++) {
+ if (Character.isUpperCase(value.charAt(i))) {
+ return false;
+ }
+ }
+ return true;
}
/**
|
Resolved #<I>. Incorporated suggestions of johnsdouglass
|
shekhargulati_strman-java
|
train
|
39af2f4171cbf79dd52347c4cf2d03fedc13a14a
|
diff --git a/mod/scorm/mod_form.php b/mod/scorm/mod_form.php
index <HASH>..<HASH> 100644
--- a/mod/scorm/mod_form.php
+++ b/mod/scorm/mod_form.php
@@ -81,12 +81,6 @@ class mod_scorm_mod_form extends moodleform_mod {
$mform->setType('scormtype', PARAM_ALPHA);
}
- // Update packages timing.
- $mform->addElement('select', 'updatefreq', get_string('updatefreq', 'scorm'), scorm_get_updatefreq_array());
- $mform->setType('updatefreq', PARAM_INT);
- $mform->setDefault('updatefreq', $cfgscorm->updatefreq);
- $mform->addHelpButton('updatefreq', 'updatefreq', 'scorm');
-
// New local package upload.
$filemanageroptions = array();
$filemanageroptions['accepted_types'] = array('.zip', '.xml');
@@ -98,6 +92,12 @@ class mod_scorm_mod_form extends moodleform_mod {
$mform->addHelpButton('packagefile', 'package', 'scorm');
$mform->disabledIf('packagefile', 'scormtype', 'noteq', SCORM_TYPE_LOCAL);
+ // Update packages timing.
+ $mform->addElement('select', 'updatefreq', get_string('updatefreq', 'scorm'), scorm_get_updatefreq_array());
+ $mform->setType('updatefreq', PARAM_INT);
+ $mform->setDefault('updatefreq', $cfgscorm->updatefreq);
+ $mform->addHelpButton('updatefreq', 'updatefreq', 'scorm');
+
// Display Settings.
$mform->addElement('header', 'displaysettings', get_string('appearance'));
|
MDL-<I> SCORM form : Auto update frequency option under File manager
Placing the Auto update frequency option underneath the File manager in /mod/scorm/mod_form.php
This changes the postion of the auto update freq option both while creating a new scorm object and while editing the scorm object.
|
moodle_moodle
|
train
|
5437b43f7f1d680198919f7efa693ea03bd48454
|
diff --git a/lib/xmpp/client.js b/lib/xmpp/client.js
index <HASH>..<HASH> 100644
--- a/lib/xmpp/client.js
+++ b/lib/xmpp/client.js
@@ -39,6 +39,8 @@ function Client(params) {
this.xmppVersion = "1.0";
this.streamTo = this.jid.domain;
this.state = STATE_PREAUTH;
+ // Immediately start stream
+ this.addListener('connect', this.startStream);
this.addListener('rawStanza', this.onRawStanza);
if (params.host) {
@@ -74,6 +76,18 @@ function Client(params) {
sys.inherits(Client, Connection.Connection);
exports.Client = Client;
+Client.prototype.startStream = function() {
+ Connection.prototype.startStream.call(this);
+
+ var tag = "<stream:stream xmlns='" + this.xmlns +
+ "' xmlns:stream='" + Connection.NS_STREAM + "'" +
+ " to='" + this.streamTo + "'";
+ if (this.xmppVersion)
+ tag += " version='" + this.xmppVersion + "'";
+ tag += ">";
+ this.send(tag);
+};
+
Client.prototype.onRawStanza = function(stanza) {
/* Actually, we shouldn't wait for <stream:features/> if
this.streamAttrs.version is missing, but who uses pre-XMPP-1.0
diff --git a/lib/xmpp/component.js b/lib/xmpp/component.js
index <HASH>..<HASH> 100644
--- a/lib/xmpp/component.js
+++ b/lib/xmpp/component.js
@@ -23,6 +23,8 @@ function Component(params) {
this.password = params.password;
this.xmlns = NS_COMPONENT;
this.streamTo = this.jid.domain;
+ // Immediately start stream
+ this.addListener('connect', this.startStream);
this.addListener('streamStart', this.onStreamStart);
this.addListener('rawStanza', this.onRawStanza);
this.addListener('end', this.onEnd);
@@ -42,6 +44,18 @@ Component.prototype.onStreamStart = function(streamAttrs) {
this.send(new xml.Element('handshake').t(digest));
};
+Component.prototype.startStream = function() {
+ Connection.prototype.startStream.call(this);
+
+ var tag = "<stream:stream xmlns='" + this.xmlns +
+ "' xmlns:stream='" + Connection.NS_STREAM + "'" +
+ " to='" + this.streamTo + "'";
+ if (this.xmppVersion)
+ tag += " version='" + this.xmppVersion + "'";
+ tag += ">";
+ this.send(tag);
+};
+
Component.prototype.onRawStanza = function(stanza) {
if (!this.authenticated &&
stanza.is('handshake', NS_COMPONENT)) {
diff --git a/lib/xmpp/connection.js b/lib/xmpp/connection.js
index <HASH>..<HASH> 100644
--- a/lib/xmpp/connection.js
+++ b/lib/xmpp/connection.js
@@ -13,17 +13,40 @@ var NS_STREAM = exports.NS_STREAM = 'http://etherx.jabber.org/streams';
function Connection() {
net.Stream.call(this);
- this.charset = 'UTF-8';
- this.allowTLS = true; /* can be set by user */
- this.addListener('connect', this.startStream);
- this.addListener('data', this.onData);
-// this.addListener('end', this.onEnd);
- this.addListener('error', this.onError);
+ initConnection(this);
}
sys.inherits(Connection, net.Stream);
exports.Connection = Connection;
+// Defaults
+Connection.prototype.charset = 'UTF-8';
+Connection.prototype.allowTLS = true;
+
+
+/** Constructor code, usable for existing streams
+ */
+function makeConnection(conn) {
+ for(var k in Connection.prototype)
+ if (Connection.prototype.hasOwnProperty(k))
+ conn[k] = Connection.prototype[k];
+
+ initConnection(conn);
+}
+exports.makeConnection = makeConnection;
+
+/** Actual constructor code
+ */
+function initConnection(conn) {
+ conn.charset = 'UTF-8';
+
+ conn.addListener('data', conn.onData);
+ conn.addListener('close', conn.onClose);
+}
+
+/** Climbs the stanza up if a child was passed,
+ but you can send strings and buffers too.
+*/
Connection.prototype.send = function(stanza) {
if (!this.writable) {
this.end();
@@ -44,6 +67,7 @@ Connection.prototype.send = function(stanza) {
Connection.prototype.startParser = function() {
var self = this;
+ self.setEncoding('utf8');
self.element = null;
self.parser = new expat.Parser(self.charset);
@@ -96,14 +120,6 @@ Connection.prototype.startParser = function() {
Connection.prototype.startStream = function() {
this.startParser();
-
- var tag = "<stream:stream xmlns='" + this.xmlns +
- "' xmlns:stream='" + NS_STREAM + "'" +
- " to='" + this.streamTo + "'";
- if (this.xmppVersion)
- tag += " version='" + this.xmppVersion + "'";
- tag += ">";
- this.send(tag);
};
Connection.prototype.onData = function(data) {
@@ -164,8 +180,9 @@ Connection.prototype.rmStreamNs = function(stanza) {
/**
- * All errors are terminal for the connection.
+ *
*/
-Connection.prototype.onError = function(error) {
- this.end();
+Connection.prototype.onClose = function() {
+ delete this.element;
+ delete this.parser;
};
|
connection: move stream starting to client & component
|
xmppjs_xmpp.js
|
train
|
e4238aa8fd682bdd84f4bdd6e9c9ddadd7d4b647
|
diff --git a/packages/ember-htmlbars/lib/helpers/yield.js b/packages/ember-htmlbars/lib/helpers/yield.js
index <HASH>..<HASH> 100644
--- a/packages/ember-htmlbars/lib/helpers/yield.js
+++ b/packages/ember-htmlbars/lib/helpers/yield.js
@@ -97,7 +97,7 @@ export function yieldHelper(params, hash, options, env) {
if (view._contextView) {
view = view._contextView;
} else {
- view = get(view, '_parentView');
+ view = view._parentView;
}
}
diff --git a/packages/ember-routing-htmlbars/lib/helpers/outlet.js b/packages/ember-routing-htmlbars/lib/helpers/outlet.js
index <HASH>..<HASH> 100644
--- a/packages/ember-routing-htmlbars/lib/helpers/outlet.js
+++ b/packages/ember-routing-htmlbars/lib/helpers/outlet.js
@@ -85,7 +85,7 @@ export function outletHelper(params, hash, options, env) {
outletSource = this;
while (!outletSource.get('template.isTop')) {
- outletSource = outletSource.get('_parentView');
+ outletSource = outletSource._parentView;
}
set(this, 'outletSource', outletSource);
diff --git a/packages/ember-routing-htmlbars/tests/helpers/outlet_test.js b/packages/ember-routing-htmlbars/tests/helpers/outlet_test.js
index <HASH>..<HASH> 100644
--- a/packages/ember-routing-htmlbars/tests/helpers/outlet_test.js
+++ b/packages/ember-routing-htmlbars/tests/helpers/outlet_test.js
@@ -125,7 +125,7 @@ test("outlet should correctly lookup a view", function() {
view.connectOutlet('main', childView);
});
- ok(ContainerView.detectInstance(childView.get('_parentView')), "The custom view class should be used for the outlet");
+ ok(ContainerView.detectInstance(childView._parentView), "The custom view class should be used for the outlet");
// Replace whitespace for older IE
equal(trim(view.$().text()), 'HIBYE');
@@ -185,7 +185,7 @@ test("outlet should support an optional view class", function() {
view.connectOutlet('main', childView);
});
- ok(view.outletView.detectInstance(childView.get('_parentView')), "The custom view class should be used for the outlet");
+ ok(view.outletView.detectInstance(childView._parentView), "The custom view class should be used for the outlet");
// Replace whitespace for older IE
equal(trim(view.$().text()), 'HIBYE');
diff --git a/packages/ember-views/lib/views/component.js b/packages/ember-views/lib/views/component.js
index <HASH>..<HASH> 100644
--- a/packages/ember-views/lib/views/component.js
+++ b/packages/ember-views/lib/views/component.js
@@ -192,7 +192,7 @@ var Component = View.extend(TargetActionSupport, ComponentTemplateDeprecation, {
@default null
*/
targetObject: computed(function(key) {
- var parentView = get(this, '_parentView');
+ var parentView = this._parentView;
return parentView ? get(parentView, 'controller') : null;
}).property('_parentView'),
diff --git a/packages/ember-views/lib/views/container_view.js b/packages/ember-views/lib/views/container_view.js
index <HASH>..<HASH> 100644
--- a/packages/ember-views/lib/views/container_view.js
+++ b/packages/ember-views/lib/views/container_view.js
@@ -229,7 +229,7 @@ var ContainerView = View.extend(MutableArray, {
replace: function(idx, removedCount, addedViews) {
var addedCount = addedViews ? get(addedViews, 'length') : 0;
var self = this;
- Ember.assert("You can't add a child to a container - the child is already a child of another view", emberA(addedViews).every(function(item) { return !get(item, '_parentView') || get(item, '_parentView') === self; }));
+ Ember.assert("You can't add a child to a container - the child is already a child of another view", emberA(addedViews).every(function(item) { return !item._parentView || item._parentView === self; }));
this.arrayContentWillChange(idx, removedCount, addedCount);
this.childViewsWillChange(this._childViews, idx, removedCount);
@@ -356,7 +356,7 @@ var ContainerView = View.extend(MutableArray, {
_currentViewDidChange: observer('currentView', function() {
var currentView = get(this, 'currentView');
if (currentView) {
- Ember.assert("You tried to set a current view that already has a parent. Make sure you don't have multiple outlets in the same view.", !get(currentView, '_parentView'));
+ Ember.assert("You tried to set a current view that already has a parent. Make sure you don't have multiple outlets in the same view.", !currentView._parentView);
this.pushObject(currentView);
}
}),
diff --git a/packages/ember-views/lib/views/view.js b/packages/ember-views/lib/views/view.js
index <HASH>..<HASH> 100644
--- a/packages/ember-views/lib/views/view.js
+++ b/packages/ember-views/lib/views/view.js
@@ -792,7 +792,7 @@ var View = CoreView.extend({
return this._controller;
}
- var parentView = get(this, '_parentView');
+ var parentView = this._parentView;
return parentView ? get(parentView, 'controller') : null;
}),
|
Don't get _parentView
|
emberjs_ember.js
|
train
|
ae06dc60a335fdfce456bb61e393c5968eafdf72
|
diff --git a/src/core.js b/src/core.js
index <HASH>..<HASH> 100644
--- a/src/core.js
+++ b/src/core.js
@@ -16,7 +16,6 @@
stack.push({
level: match[1].length,
title: match[2],
- children: []
});
}
dataBlock.push(data.substring(index, data.length));
@@ -28,29 +27,36 @@
dataBlock.pop();
stack.reverse();
- return (function parse(parent, level) {
+ return (function parse(parent) {
while (stack.length) {
var node = stack[stack.length - 1];
- if (node.level > level) {
- parent.push($.extend(stack.pop(), {
- data: dataBlock.pop()
- }));
- parse(node.children, node.level);
+ if (node.level > parent.level) {
+ if (parent.children) {
+ parent.children.push($.extend(stack.pop(), {
+ data: dataBlock.pop()
+ }));
+ } else {
+ parent.children = [];
+ }
+ parse(node, node.level);
} else {
return parent;
}
}
return parent;
- }([], 0));
+ }({
+ level: 0,
+ title: ""
+ }));
},
_renderer = function (data, style) {
var method = {
fold: function (data) {
var $markdown = $('<article>').addClass("markdown-body");
(function build($parent, data) {
- if (data.length !== 0) {
- data.forEach(function (element) {
- var $newDiv = build($('<div>').append($(marked(element.data))), element.children).toggle();
+ if (data.children) {
+ data.children.forEach(function (element) {
+ var $newDiv = build($('<div>').append($(marked(element.data))), element).toggle();
$parent.append(
$('<h' + element.level + '>').text(" " + element.title)
.prepend(
@@ -71,7 +77,10 @@
return $markdown;
},
tree: function (data) {
- var width = 500,
+ if (data.children && data.children.length === 1) {
+ data = data.children[0];
+ }
+ var width = 700,
height = 500;
var g = d3.select("body").append("svg")
.attr("width", width)
@@ -79,12 +88,9 @@
.append("g")
.attr("transform", "translate(40,0)");
var tree = d3.tree()
- .size([height, width]);
- var root = d3.hierarchy({
- children: data
- });
+ .size([height, width - 200]);
+ var root = d3.hierarchy(data);
tree(root);
- console.log(root);
g.selectAll(".link")
.data(root.descendants().slice(1))
.enter().append("path")
|
Core: Improve the implementation of parser
|
NiX-Team_jquery-markview
|
train
|
5db8c4ad48645fa6df1654819842808d114b5db3
|
diff --git a/src/android/nl/xservices/plugins/SocialSharing.java b/src/android/nl/xservices/plugins/SocialSharing.java
index <HASH>..<HASH> 100644
--- a/src/android/nl/xservices/plugins/SocialSharing.java
+++ b/src/android/nl/xservices/plugins/SocialSharing.java
@@ -44,6 +44,13 @@ public class SocialSharing extends CordovaPlugin {
private CallbackContext callbackContext;
+ private abstract class SocialSharingRunnable implements Runnable {
+ public CallbackContext callbackContext;
+ SocialSharingRunnable(CallbackContext cb) {
+ this.callbackContext = cb;
+ }
+ }
+
@Override
public boolean execute(String action, JSONArray args, CallbackContext pCallbackContext) throws JSONException {
this.callbackContext = pCallbackContext;
@@ -88,7 +95,7 @@ public class SocialSharing extends CordovaPlugin {
private boolean invokeEmailIntent(final String message, final String subject, final JSONArray to, final JSONArray cc, final JSONArray bcc, final JSONArray files) throws JSONException {
final SocialSharing plugin = this;
- cordova.getThreadPool().execute(new Runnable() {
+ cordova.getThreadPool().execute(new SocialSharingRunnable(this.callbackContext) {
public void run() {
final Intent draft = new Intent(Intent.ACTION_SEND_MULTIPLE);
if (notEmpty(message)) {
@@ -149,7 +156,7 @@ public class SocialSharing extends CordovaPlugin {
final CordovaInterface mycordova = cordova;
final CordovaPlugin plugin = this;
- cordova.getThreadPool().execute(new Runnable() {
+ cordova.getThreadPool().execute(new SocialSharingRunnable(this.callbackContext) {
public void run() {
String message = msg;
final boolean hasMultipleAttachments = files.length() > 1;
|
Fix: race condition, when calling to fast from JS
|
EddyVerbruggen_SocialSharing-PhoneGap-Plugin
|
train
|
1ee4e9719ed058a10dbdaa0c393dcb9f0dc26048
|
diff --git a/intervals.py b/intervals.py
index <HASH>..<HASH> 100644
--- a/intervals.py
+++ b/intervals.py
@@ -73,10 +73,10 @@ class AtomicInterval:
def __init__(self, left, lower, upper, right):
if lower > upper:
raise ValueError('Bounds are not ordered correctly: lower bound {} must be smaller than upper bound {}'.format(lower, upper))
- self.left = left
+ self.left = left if lower != -inf else OPEN
self.lower = lower
self.upper = upper
- self.right = right
+ self.right = right if upper != inf else OPEN
def is_empty(self):
return self.lower == self.upper and (self.left == OPEN or self.right == OPEN)
diff --git a/test_intervals.py b/test_intervals.py
index <HASH>..<HASH> 100644
--- a/test_intervals.py
+++ b/test_intervals.py
@@ -28,6 +28,7 @@ def test_creation():
assert I.open(0, 1) == I.AtomicInterval(I.OPEN, 0, 1, I.OPEN)
assert I.openclosed(0, 1) == I.AtomicInterval(I.OPEN, 0, 1, I.CLOSED)
assert I.closedopen(0, 1) == I.AtomicInterval(I.CLOSED, 0, 1, I.OPEN)
+ assert I.closed(-I.inf, I.inf) == I.open(-I.inf, I.inf)
def test_to_interval_to_atomic():
@@ -107,6 +108,8 @@ def test_complement():
intervals = [I.closed(0, 1), I.open(0, 1), I.openclosed(0, 1), I.closedopen(0, 1)]
for interval in intervals:
assert ~(~interval) == interval
+ assert ~I.open(1, 1) == I.open(-I.inf, I.inf)
+ assert (~I.closed(-I.inf, I.inf)).is_empty()
def test_example():
|
Intervals with infinity are open, not closed
|
AlexandreDecan_python-intervals
|
train
|
2ee2450341042b3b3ce43e84a6e62949b697bca2
|
diff --git a/test/json_hal_renderer_test.rb b/test/json_hal_renderer_test.rb
index <HASH>..<HASH> 100644
--- a/test/json_hal_renderer_test.rb
+++ b/test/json_hal_renderer_test.rb
@@ -1,6 +1,13 @@
require 'test_helper'
Mime::Type.register 'application/json+hal', :hal
+if Roar::Rails.rails4_1?
+ ActionController.add_renderer :hal do |js, options|
+ self.content_type ||= Mime::HAL
+ js.to_json
+ end
+end
+
class HalRendererTest < ActionController::TestCase
include Roar::Rails::TestCase
|
manually add hal renderer for <I> test. sorry for that inconvenience but i don't wanna add that to the core until i understand/rewrote the respective core "components".
|
apotonick_roar-rails
|
train
|
7c83aa6a648885a36307e590a345babfdc7e33a2
|
diff --git a/jet/templatetags/jet_tags.py b/jet/templatetags/jet_tags.py
index <HASH>..<HASH> 100644
--- a/jet/templatetags/jet_tags.py
+++ b/jet/templatetags/jet_tags.py
@@ -221,9 +221,16 @@ def jet_sibling_object_url(context, next):
preserved_filters_plain = context.get('preserved_filters', '')
preserved_filters = dict(parse_qsl(preserved_filters_plain))
admin_site = get_admin_site(context)
+
+ if admin_site is None:
+ return
+
request = context.get('request')
queryset = get_model_queryset(admin_site, model, request, preserved_filters=preserved_filters)
+ if queryset is None:
+ return
+
sibling_object = None
object_pks = list(queryset.values_list('pk', flat=True))
diff --git a/jet/utils.py b/jet/utils.py
index <HASH>..<HASH> 100644
--- a/jet/utils.py
+++ b/jet/utils.py
@@ -166,11 +166,15 @@ class SuccessMessageMixin(object):
def get_model_queryset(admin_site, model, request, preserved_filters=None):
model_admin = admin_site._registry.get(model)
- changelist_url = urlresolvers.reverse('%s:%s_%s_changelist' % (
- admin_site.name,
- model._meta.app_label,
- model._meta.model_name
- ))
+ try:
+ changelist_url = urlresolvers.reverse('%s:%s_%s_changelist' % (
+ admin_site.name,
+ model._meta.app_label,
+ model._meta.model_name
+ ))
+ except NoReverseMatch:
+ return
+
changelist_filters = None
if preserved_filters:
|
Add singlings urls resolving checks
|
geex-arts_django-jet
|
train
|
e4d1953a7f7d5d63c4068e92f447a42ba53f1922
|
diff --git a/modules/gatsby/src/gatsby-node/process-nodes/process-nodes-markdown.js b/modules/gatsby/src/gatsby-node/process-nodes/process-nodes-markdown.js
index <HASH>..<HASH> 100644
--- a/modules/gatsby/src/gatsby-node/process-nodes/process-nodes-markdown.js
+++ b/modules/gatsby/src/gatsby-node/process-nodes/process-nodes-markdown.js
@@ -29,7 +29,6 @@ module.exports.processNewMarkdownNode = function processNewMarkdownNode(
tocNode
) {
const {createNodeField} = actions;
-
const fileNode = getNode(node.parent);
const parsedFilePath = path.parse(fileNode.relativePath);
const hasTitle =
@@ -49,6 +48,7 @@ module.exports.processNewMarkdownNode = function processNewMarkdownNode(
// Update path
let relPath = node.fields.slug;
+ let entry = relPath;
if (node.fileAbsolutePath) {
const {ocularConfig} = global;
@@ -69,7 +69,8 @@ module.exports.processNewMarkdownNode = function processNewMarkdownNode(
const basename = path.basename(relPath, '.md');
const dirname = path.dirname(relPath);
- relPath = basename === 'README' ? dirname : `${dirname}/${basename}`;
+ entry = `${dirname}/${basename}`;
+ relPath = basename === 'README' ? dirname : entry;
createNodeField({node, name: 'path', value: relPath});
createNodeField({node, name: 'slug', value: relPath});
@@ -84,7 +85,7 @@ module.exports.processNewMarkdownNode = function processNewMarkdownNode(
// we don't need as much. The app will only use the title and slug of the corresponding markdown
// node for each toc entry.
- const nodeToEdit = parseToc([tocNode], relPath);
+ const nodeToEdit = parseToc([tocNode], entry);
if (nodeToEdit) {
nodeToEdit.childMarkdownRemark = {
fields: {
|
better treatment of readme.md files (#<I>)
* better treatment of readme.md files
* remove debugger
|
uber-web_ocular
|
train
|
9c1ebb8d64270af9f16ae823635572139a58344c
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -25,6 +25,7 @@ setup(
license='Fusionbox',
test_suite='nose.collector',
setup_requires=[
+ 'nose>=1.2.1',
],
tests_require=[
'nose>=1.2.1',
|
Need nose in setup requirements
- Otherwise, `python setup.py nosetests` will not work out-of-the-box
|
fusionbox_django-backupdb
|
train
|
086f4c8fc57975a66845507dcfa203aac95960d9
|
diff --git a/tests/test_sip_server.py b/tests/test_sip_server.py
index <HASH>..<HASH> 100644
--- a/tests/test_sip_server.py
+++ b/tests/test_sip_server.py
@@ -48,3 +48,67 @@ def test_subscribe(test_server, protocol, loop):
assert received_request.method == 'SUBSCRIBE'
server_app.close()
+
+
+@asyncio.coroutine
+def test_response_501(test_server, protocol, loop):
+ app = aiosip.Application(loop=loop)
+ server_app = aiosip.Application(loop=loop)
+ server = yield from test_server(server_app)
+ connection = yield from app.connect(
+ protocol=protocol,
+ local_addr=(server.sip_config['client_host'], server.sip_config['client_port']),
+ remote_addr=(server.sip_config['server_host'], server.sip_config['server_port'])
+ )
+
+ subscribe_dialog = connection.create_dialog(
+ from_uri='sip:{}@{}:{}'.format(server.sip_config['user'], server.sip_config['client_host'], server.sip_config['client_port']),
+ to_uri='sip:666@{}:{}'.format(server.sip_config['server_host'], server.sip_config['server_port']),
+ )
+
+ response = yield from subscribe_dialog.send(
+ method='SUBSCRIBE',
+ headers={'Expires': '1800',
+ 'Event': 'dialog',
+ 'Accept': 'application/dialog-info+xml'}
+ )
+
+ assert response.status_code == 501
+ assert response.status_message == 'Not Implemented'
+ server_app.close()
+
+
+@asyncio.coroutine
+def test_exception_in_handler(test_server, protocol, loop):
+
+ @asyncio.coroutine
+ def handler(dialog, request):
+ raise RuntimeError('TestError')
+
+ app = aiosip.Application(loop=loop)
+
+ server_app = aiosip.Application(loop=loop)
+ server_app.dialplan.add_user('pytest', {'SUBSCRIBE': handler})
+ server = yield from test_server(server_app)
+
+ connection = yield from app.connect(
+ protocol=protocol,
+ local_addr=(server.sip_config['client_host'], server.sip_config['client_port']),
+ remote_addr=(server.sip_config['server_host'], server.sip_config['server_port'])
+ )
+
+ subscribe_dialog = connection.create_dialog(
+ from_uri='sip:{}@{}:{}'.format(server.sip_config['user'], server.sip_config['client_host'], server.sip_config['client_port']),
+ to_uri='sip:666@{}:{}'.format(server.sip_config['server_host'], server.sip_config['server_port']),
+ )
+
+ response = yield from subscribe_dialog.send(
+ method='SUBSCRIBE',
+ headers={'Expires': '1800',
+ 'Event': 'dialog',
+ 'Accept': 'application/dialog-info+xml'}
+ )
+
+ assert response.status_code == 500
+ assert response.status_message == 'Server Internal Error'
+ server_app.close()
|
Add test for response <I> & <I>
|
Eyepea_aiosip
|
train
|
347073a248ecd844f259c6ef821c5cc27d1b07d1
|
diff --git a/networking_cisco/plugins/ml2/drivers/cisco/nexus/mech_cisco_nexus.py b/networking_cisco/plugins/ml2/drivers/cisco/nexus/mech_cisco_nexus.py
index <HASH>..<HASH> 100644
--- a/networking_cisco/plugins/ml2/drivers/cisco/nexus/mech_cisco_nexus.py
+++ b/networking_cisco/plugins/ml2/drivers/cisco/nexus/mech_cisco_nexus.py
@@ -17,6 +17,7 @@
ML2 Mechanism Driver for Cisco Nexus platforms.
"""
+import eventlet
import threading
from oslo_concurrency import lockutils
@@ -47,6 +48,12 @@ LOG = logging.getLogger(__name__)
HOST_NOT_FOUND = _LW("Host %s not defined in switch configuration section.")
+# Delay the start of the monitor thread to avoid problems with Neutron server
+# process forking. One problem observed was ncclient RPC sync close_session
+# call hanging during initial _monitor_thread() processing to replay existing
+# database.
+DELAY_MONITOR_THREAD = 30
+
class CiscoNexusCfgMonitor(object):
"""Replay config on communication failure between Openstack to Nexus."""
@@ -172,7 +179,7 @@ class CiscoNexusMechanismDriver(api.MechanismDriver):
self.monitor_lock = threading.Lock()
# Start the monitor thread
if self.monitor_timeout > 0:
- self._monitor_thread()
+ eventlet.spawn_after(DELAY_MONITOR_THREAD, self._monitor_thread)
def set_switch_ip_and_active_state(self, switch_ip, state):
self._switch_state[switch_ip, '_connect_active'] = state
|
ML2 cisco_nexus MD: Config hangs when replay enabled
When multiple neutron-server processes are created (rpc_workers > 0)
the cisco_nexus monitor_thread was hanging.
Similar issues seen with other MDs. More info:
<URL>
|
openstack_networking-cisco
|
train
|
77a02cb23fa223801225d59b5fe30f5f4cd8b627
|
diff --git a/src/JumpGate/Users/Http/Routes/Registration.php b/src/JumpGate/Users/Http/Routes/Registration.php
index <HASH>..<HASH> 100644
--- a/src/JumpGate/Users/Http/Routes/Registration.php
+++ b/src/JumpGate/Users/Http/Routes/Registration.php
@@ -19,11 +19,8 @@ class Registration extends BaseRoute implements Routes
public function routes(Router $router)
{
- // Social auth doesn't allow for registration.
- // Also, if the site has disabled registration, don't register these routes.
- if (config('jumpgate.users.social_auth_only')
- || ! config('jumpgate.users.settings.allow_registration')
- ) {
+ // If the site has disabled registration, don't register these routes.
+ if (! config('jumpgate.users.settings.allow_registration')) {
return true;
}
diff --git a/src/config/users.php b/src/config/users.php
index <HASH>..<HASH> 100755
--- a/src/config/users.php
+++ b/src/config/users.php
@@ -25,8 +25,18 @@ return [
*/
'settings' => [
+ // This will send out an email for the user to activate.
'require_email_activation' => false,
+
+ // If this is disabled, users will not be able to register on the site at all.
+ // The route and menu link will be removed. The LoggingIn and Registering
+ // events will also check this and force it to not work. To add a user
+ // when registration is disabled, you will have to add them through
+ // the admin panel. They can only log in if the user exists.
'allow_registration' => false,
+
+ // This will give you the option in the admin dashboard to send
+ // user an invitation to the site through email.
'allow_invitations' => false,
],
@@ -65,8 +75,9 @@ return [
| Routing
|--------------------------------------------------------------------------
|
- | when redirecting a user to login, we don't know which route to send
- | them to. This lets us know what route to aim for.
+ | When redirecting a user to login, we don't know which route to send
+ | them to. This lets us know what route to aim for. If using
+ | social auth, set this to auth.social.login.
|
*/
@@ -110,6 +121,8 @@ return [
| Once you set this to true, make sure to remove any routes pointing to
| the non-social versions (ie auth.login and auth.register)
|
+ | NOTE: Setting this to true does not disable registration.
+ |
*/
'social_auth_only' => false,
diff --git a/src/publish/Http/Composers/Menu.php b/src/publish/Http/Composers/Menu.php
index <HASH>..<HASH> 100644
--- a/src/publish/Http/Composers/Menu.php
+++ b/src/publish/Http/Composers/Menu.php
@@ -46,10 +46,14 @@ class Menu
$link->name = 'Login';
$link->url = route('auth.login');
});
- $rightMenu->link('register', function (Link $link) {
- $link->name = 'Register';
- $link->url = route('auth.register');
- });
+
+ // Don't show a link if we don't allow registration.
+ if (config('jumpgate.users.settings.allow_registration')) {
+ $rightMenu->link('register', function (Link $link) {
+ $link->name = 'Register';
+ $link->url = route('auth.register');
+ });
+ }
}
if (auth()->check()) {
|
Removing registration menu item when its disabled
|
JumpGateio_Users
|
train
|
43dc4da7e2b449cedcfc76d2ddf36757e3714b80
|
diff --git a/core/client/mobile-interactions.js b/core/client/mobile-interactions.js
index <HASH>..<HASH> 100644
--- a/core/client/mobile-interactions.js
+++ b/core/client/mobile-interactions.js
@@ -7,54 +7,56 @@
FastClick.attach(document.body);
+ // ### general wrapper to handle conditional screen size actions
+ function responsiveAction(event, mediaCondition, cb) {
+ if (!window.matchMedia(mediaCondition).matches) {
+ return;
+ }
+
+ event.preventDefault();
+ event.stopPropagation();
+ cb();
+ }
+
// ### Show content preview when swiping left on content list
$('.manage').on('click', '.content-list ol li', function (event) {
- if (window.matchMedia('(max-width: 800px)').matches) {
- event.preventDefault();
- event.stopPropagation();
+ responsiveAction(event, '(max-width: 800px)', function () {
$('.content-list').animate({right: '100%', left: '-100%', 'margin-right': '15px'}, 300);
$('.content-preview').animate({right: '0', left: '0', 'margin-left': '0'}, 300);
- }
+ });
});
// ### Hide content preview
$('.manage').on('click', '.content-preview .button-back', function (event) {
- if (window.matchMedia('(max-width: 800px)').matches) {
- event.preventDefault();
- event.stopPropagation();
+ responsiveAction(event, '(max-width: 800px)', function () {
$('.content-list').animate({right: '0', left: '0', 'margin-right': '0'}, 300);
$('.content-preview').animate({right: '-100%', left: '100%', 'margin-left': '15px'}, 300);
- }
+ });
});
// ### Show settings options page when swiping left on settings menu link
$('.settings').on('click', '.settings-menu li', function (event) {
- if (window.matchMedia('(max-width: 800px)').matches) {
- event.preventDefault();
- event.stopPropagation();
+ responsiveAction(event, '(max-width: 800px)', function () {
$('.settings-sidebar').animate({right: '100%', left: '-102%', 'margin-right': '15px'}, 300);
$('.settings-content').animate({right: '0', left: '0', 'margin-left': '0'}, 300);
$('.settings-content .button-back, .settings-content .button-save').css('display', 'inline-block');
- }
+ });
});
// ### Hide settings options page
$('.settings').on('click', '.settings-content .button-back', function (event) {
- if (window.matchMedia('(max-width: 800px)').matches) {
- event.preventDefault();
- event.stopPropagation();
+ responsiveAction(event, '(max-width: 800px)', function () {
$('.settings-sidebar').animate({right: '0', left: '0', 'margin-right': '0'}, 300);
$('.settings-content').animate({right: '-100%', left: '100%', 'margin-left': '15'}, 300);
$('.settings-content .button-back, .settings-content .button-save').css('display', 'none');
- }
+ });
});
// ### Toggle the sidebar menu
$('[data-off-canvas]').on('click', function (event) {
- if (window.matchMedia('(max-width: 650px)').matches) {
- event.preventDefault();
+ responsiveAction(event, '(max-width: 650px)', function () {
$('body').toggleClass('off-canvas');
- }
+ });
});
-}());
\ No newline at end of file
+}());
|
abstract mobile interactions js
DRY up repeated code and simplify logic.
|
TryGhost_Ghost
|
train
|
5f5527c726841cdefb82965a645d554767c5a6a9
|
diff --git a/activerecord/lib/active_record/relation/finder_methods.rb b/activerecord/lib/active_record/relation/finder_methods.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/relation/finder_methods.rb
+++ b/activerecord/lib/active_record/relation/finder_methods.rb
@@ -134,7 +134,11 @@ module ActiveRecord
def last(*args)
if args.any?
if args.first.kind_of?(Integer) || (loaded? && !args.first.kind_of?(Hash))
- to_a.last(*args)
+ if order_values.empty? && reorder_value.nil?
+ order("#{primary_key} DESC").limit(*args).reverse
+ else
+ to_a.last(*args)
+ end
else
apply_finder_options(args.first).last
end
diff --git a/activerecord/test/cases/finder_test.rb b/activerecord/test/cases/finder_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/finder_test.rb
+++ b/activerecord/test/cases/finder_test.rb
@@ -243,8 +243,25 @@ class FinderTest < ActiveRecord::TestCase
end
end
- def test_first_with_integer_should_use_sql_limit
+ def test_first_and_last_with_integer_should_use_sql_limit
assert_sql(/LIMIT 2/) { Topic.first(2).entries }
+ assert_sql(/LIMIT 5/) { Topic.last(5).entries }
+ end
+
+ def test_last_with_integer_and_order_should_keep_the_order
+ assert_equal Topic.order("title").to_a.last(2), Topic.order("title").last(2)
+ end
+
+ def test_last_with_integer_and_order_should_not_use_sql_limit
+ query = assert_sql { Topic.order("title").last(5).entries }
+ assert_equal 1, query.length
+ assert_no_match(/LIMIT/, query.first)
+ end
+
+ def test_last_with_integer_and_reorder_should_not_use_sql_limit
+ query = assert_sql { Topic.reorder("title").last(5).entries }
+ assert_equal 1, query.length
+ assert_no_match(/LIMIT/, query.first)
end
def test_first_and_last_with_integer_should_return_an_array
|
Use LIMIT sql word in last when it's possible
|
rails_rails
|
train
|
5586c15241a0517ed70f50e44f1225d5e0847870
|
diff --git a/src/Blob/BlobRestProxy.php b/src/Blob/BlobRestProxy.php
index <HASH>..<HASH> 100644
--- a/src/Blob/BlobRestProxy.php
+++ b/src/Blob/BlobRestProxy.php
@@ -1772,8 +1772,13 @@ class BlobRestProxy extends ServiceRestProxy implements IBlob
};
//add number of concurrency if specified int options.
- $requestOptions = $options->getNumberOfConcurrency() == null?
- array() : array($options->getNumberOfConcurrency);
+ if ($options->getNumberOfConcurrency() == null) {
+ $requestOptions = array();
+ } else {
+ $requestOptions = array(
+ 'number_of_concurrency' => $options->getNumberOfConcurrency(),
+ );
+ }
//Send the request concurrently.
//Does not need to evaluate the results. If operation not successful,
|
Fix usage of number of concurrency in chunked uploads
|
Azure_azure-storage-php
|
train
|
b57d5db12934a1d88a3f9e4fcd7e473bb728e790
|
diff --git a/.buildkite/nightly.py b/.buildkite/nightly.py
index <HASH>..<HASH> 100644
--- a/.buildkite/nightly.py
+++ b/.buildkite/nightly.py
@@ -33,7 +33,8 @@ if __name__ == "__main__":
'.buildkite/scripts/pypi.sh',
# Publish
'export PYTHONDONTWRITEBYTECODE=1',
- 'python bin/publish.py publish --nightly --autoclean',
+ 'pip install -e python_modules/automation',
+ 'dagster-release publish --nightly --autoclean',
)
.build(),
]
|
Fix nightly build
Test Plan: buildkite
Reviewers: dgibson, alangenfeld
Reviewed By: dgibson
Differential Revision: <URL>
|
dagster-io_dagster
|
train
|
c3badacbe26a07bea87bbe7e6da943855fb2ff61
|
diff --git a/src/Model/Table/CalendarsTable.php b/src/Model/Table/CalendarsTable.php
index <HASH>..<HASH> 100644
--- a/src/Model/Table/CalendarsTable.php
+++ b/src/Model/Table/CalendarsTable.php
@@ -187,12 +187,10 @@ class CalendarsTable extends Table
continue;
}
+ // we don't pass period as it doesn't have time limits.
$diffCalendar = $this->_getItemDifferences(
$this,
- $calendar,
- [
- 'range' => (!empty($options['period']) ? $options['period'] : []),
- ]
+ $calendar
);
$result['modified'][] = $this->saveItemDifferences($this, $diffCalendar);
@@ -243,9 +241,7 @@ class CalendarsTable extends Table
$diff = $this->_getItemDifferences(
$table,
$item,
- [
- 'range' => (!empty($options['period']) ? $options['period'] : []),
- ]
+ $options
);
$result['modified'][] = $this->saveItemDifferences($table, $diff, [
@@ -255,7 +251,11 @@ class CalendarsTable extends Table
]);
}
- $ignored = $this->_itemsToDelete($table, $result['modified']);
+ $ignored = $this->_itemsToDelete($table, $result['modified'], [
+ 'extra_fields' => [
+ 'calendar_id' => $calendarInfo['calendar']->id
+ ],
+ ]);
$result['removed'] = $this->saveItemDifferences($table, ['delete' => $ignored]);
}
@@ -351,10 +351,12 @@ class CalendarsTable extends Table
$conditions[$source] = $item[$source];
}
+ $conditions[$sourceId] = $item[$sourceId];
+
$query = $table->find()
- ->where($conditions)
- ->all();
+ ->where($conditions);
+ $query->all();
$dbItems = $query->toArray();
$toAdd = $this->_itemsToAdd($item, $dbItems, $sourceId);
@@ -449,20 +451,24 @@ class CalendarsTable extends Table
$source = empty($options['source']) ? 'source' : $options['source'];
$sourceId = empty($options['source_id']) ? 'source_id' : $options['source_id'];
- if (!empty($options['range'])) {
- if (!empty($options['range']['start_date'])) {
- $conditions['start_date >='] = $options['range']['start_date'];
+ if (!empty($options['period'])) {
+ if (!empty($options['period']['start_date'])) {
+ $conditions['start_date >='] = $options['period']['start_date'];
}
- if (!empty($options['range']['end_date'])) {
- $conditions['end_date <='] = $options['range']['end_date'];
+ if (!empty($options['period']['end_date'])) {
+ $conditions['end_date <='] = $options['period']['end_date'];
}
}
+ if (!empty($options['extra_fields']['calendar_id'])) {
+ $conditions['calendar_id'] = $options['extra_fields']['calendar_id'];
+ }
+
$query = $table->find()
- ->where($conditions)
- ->all();
+ ->where($conditions);
+ $query->all();
$dbItems = $query->toArray();
if (empty($dbItems) || empty($items)) {
|
Making sure events removed correctly (task #<I>)
|
QoboLtd_cakephp-calendar
|
train
|
fcbd2acdde370132e95653ca12bf0696f78421cd
|
diff --git a/registry/consul_registry.go b/registry/consul_registry.go
index <HASH>..<HASH> 100644
--- a/registry/consul_registry.go
+++ b/registry/consul_registry.go
@@ -16,13 +16,38 @@ type consulRegistry struct {
services map[string]*Service
}
+func encodeEndpoints(en []*Endpoint) []string {
+ var tags []string
+ for _, e := range en {
+ if b, err := json.Marshal(e); err == nil {
+ tags = append(tags, "e="+string(b))
+ }
+ }
+ return tags
+}
+
+func decodeEndpoints(tags []string) []*Endpoint {
+ var en []*Endpoint
+ for _, tag := range tags {
+ if len(tag) == 0 || tag[0] != 'e' {
+ continue
+ }
+
+ var e *Endpoint
+ if err := json.Unmarshal([]byte(tag[2:]), &e); err == nil {
+ en = append(en, e)
+ }
+ }
+ return en
+}
+
func encodeMetadata(md map[string]string) []string {
var tags []string
for k, v := range md {
if b, err := json.Marshal(map[string]string{
k: v,
}); err == nil {
- tags = append(tags, string(b))
+ tags = append(tags, "t="+string(b))
}
}
return tags
@@ -31,8 +56,12 @@ func encodeMetadata(md map[string]string) []string {
func decodeMetadata(tags []string) map[string]string {
md := make(map[string]string)
for _, tag := range tags {
+ if len(tag) == 0 || tag[0] != 't' {
+ continue
+ }
+
var kv map[string]string
- if err := json.Unmarshal([]byte(tag), &kv); err == nil {
+ if err := json.Unmarshal([]byte(tag[2:]), &kv); err == nil {
for k, v := range kv {
md[k] = v
}
@@ -80,6 +109,7 @@ func (c *consulRegistry) Register(s *Service) error {
node := s.Nodes[0]
tags := encodeMetadata(node.Metadata)
+ tags = append(tags, encodeEndpoints(s.Endpoints)...)
_, err := c.Client.Catalog().Register(&consul.CatalogRegistration{
Node: node.Id,
@@ -116,6 +146,7 @@ func (c *consulRegistry) GetService(name string) (*Service, error) {
continue
}
+ cs.Endpoints = decodeEndpoints(s.ServiceTags)
cs.Name = s.ServiceName
cs.Nodes = append(cs.Nodes, &Node{
Id: s.ServiceID,
diff --git a/registry/consul_watcher.go b/registry/consul_watcher.go
index <HASH>..<HASH> 100644
--- a/registry/consul_watcher.go
+++ b/registry/consul_watcher.go
@@ -42,6 +42,7 @@ func (cw *consulWatcher) serviceHandler(idx uint64, data interface{}) {
cs := &Service{}
for _, e := range entries {
+ cs.Endpoints = decodeEndpoints(e.Service.Tags)
cs.Name = e.Service.Service
cs.Nodes = append(cs.Nodes, &Node{
Id: e.Service.ID,
diff --git a/server/rpc_handler.go b/server/rpc_handler.go
index <HASH>..<HASH> 100644
--- a/server/rpc_handler.go
+++ b/server/rpc_handler.go
@@ -21,6 +21,7 @@ func newRpcHandler(handler interface{}) Handler {
for m := 0; m < typ.NumMethod(); m++ {
if e := extractEndpoint(typ.Method(m)); e != nil {
+ e.Name = name + "." + e.Name
endpoints = append(endpoints, e)
}
}
|
add endpoint data for consul registry
|
micro_go-micro
|
train
|
99b477776608dbd30282b27753d6ec057d7752f0
|
diff --git a/bigchaindb/backend/mongodb/changefeed.py b/bigchaindb/backend/mongodb/changefeed.py
index <HASH>..<HASH> 100644
--- a/bigchaindb/backend/mongodb/changefeed.py
+++ b/bigchaindb/backend/mongodb/changefeed.py
@@ -89,13 +89,14 @@ def run_changefeed(conn, table, last_ts):
try:
# XXX: hack to force reconnection, in case the connection
# is lost while waiting on the cursor. See #1154.
- conn.connection = 1
+ conn._conn = None
namespace = conn.dbname + '.' + table
- cursor = conn.conn.local.oplog.rs.find(
+ query = conn.query().local.oplog.rs.find(
{'ns': namespace, 'ts': {'$gt': last_ts}},
{'o._id': False},
cursor_type=pymongo.CursorType.TAILABLE_AWAIT
)
+ cursor = conn.run(query)
logging.debug('Tailing oplog at %s/%s', namespace, last_ts)
while cursor.alive:
try:
diff --git a/bigchaindb/backend/mongodb/query.py b/bigchaindb/backend/mongodb/query.py
index <HASH>..<HASH> 100644
--- a/bigchaindb/backend/mongodb/query.py
+++ b/bigchaindb/backend/mongodb/query.py
@@ -286,8 +286,9 @@ def get_last_voted_block(conn, node_pubkey):
@register_query(MongoDBConnection)
def get_new_blocks_feed(conn, start_block_id):
namespace = conn.dbname + '.bigchain'
- query = {'o.id': start_block_id, 'op': 'i', 'ns': namespace}
+ match = {'o.id': start_block_id, 'op': 'i', 'ns': namespace}
# Neccesary to find in descending order since tests may write same block id several times
- last_ts = conn.conn.local.oplog.rs.find(query).sort('$natural', -1).next()['ts']
+ query = conn.query().local.oplog.rs.find(match).sort('$natural', -1).next()['ts']
+ last_ts = conn.run(query)
feed = run_changefeed(conn, 'bigchain', last_ts)
return (evt['o'] for evt in feed if evt['op'] == 'i')
diff --git a/tests/backend/mongodb/test_changefeed.py b/tests/backend/mongodb/test_changefeed.py
index <HASH>..<HASH> 100644
--- a/tests/backend/mongodb/test_changefeed.py
+++ b/tests/backend/mongodb/test_changefeed.py
@@ -142,8 +142,7 @@ def test_connection_failure():
from bigchaindb.backend.mongodb.changefeed import run_changefeed
conn = mock.MagicMock()
- find = conn.conn.local.oplog.rs.find
- find.side_effect = [ConnectionError(), RuntimeError()]
+ conn.run.side_effect = [ConnectionError(), RuntimeError()]
changefeed = run_changefeed(conn, 'backlog', -1)
with pytest.raises(RuntimeError):
for record in changefeed:
|
use MongoDBConnection.run() and fix changefeed reconnect
|
bigchaindb_bigchaindb
|
train
|
d7554d8cc8a7c2d9227b1b76a32867a4673a82b9
|
diff --git a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
index <HASH>..<HASH> 100644
--- a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
+++ b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
@@ -14,6 +14,8 @@ depends_on = None
from alembic import op
import sqlalchemy as sa
+from sqlalchemy.engine.reflection import Inspector
+from airflow import settings
connectionhelper = sa.Table(
@@ -24,15 +26,21 @@ connectionhelper = sa.Table(
)
+
+
def upgrade():
- op.add_column('connection',
- sa.Column('is_encrypted', sa.Boolean,
- unique=False, default=False))
-
- conn = op.get_bind()
- conn.execute(
- connectionhelper.update().values(is_encrypted=False)
- )
+ inspector = Inspector.from_engine(settings.engine)
+ col_names = [col['name'] for col in inspector.get_columns('connection')]
+
+ if 'is_encrypted' not in col_names:
+ op.add_column(
+ 'connection',
+ sa.Column('is_encrypted', sa.Boolean, unique=False, default=False))
+
+ conn = op.get_bind()
+ conn.execute(
+ connectionhelper.update().values(is_encrypted=False)
+ )
def downgrade():
|
Improving is_encrypted migration to only add column if not exists
|
apache_airflow
|
train
|
c93234926dc0c3cad53dfe782979082f02a63dc3
|
diff --git a/core/src/main/java/io/undertow/server/protocol/framed/AbstractFramedStreamSourceChannel.java b/core/src/main/java/io/undertow/server/protocol/framed/AbstractFramedStreamSourceChannel.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/io/undertow/server/protocol/framed/AbstractFramedStreamSourceChannel.java
+++ b/core/src/main/java/io/undertow/server/protocol/framed/AbstractFramedStreamSourceChannel.java
@@ -532,8 +532,9 @@ public abstract class AbstractFramedStreamSourceChannel<C extends AbstractFramed
}
}
} finally {
- framedChannel.notifyFrameReadComplete(this);
-
+ if(pendingFrameData.isEmpty()) {
+ framedChannel.notifyFrameReadComplete(this);
+ }
}
}
}
|
Don't notify frame read complete till all pending data has been read
|
undertow-io_undertow
|
train
|
254c81af1b53758307f426af2319375f5f7966c4
|
diff --git a/app/Http/Controllers/Dashboard/SettingsController.php b/app/Http/Controllers/Dashboard/SettingsController.php
index <HASH>..<HASH> 100644
--- a/app/Http/Controllers/Dashboard/SettingsController.php
+++ b/app/Http/Controllers/Dashboard/SettingsController.php
@@ -168,7 +168,9 @@ class SettingsController extends Controller
return Redirect::route('dashboard.settings.setup')->withErrors(trans('dashboard.settings.edit.failure'));
}
- Lang::setLocale(Binput::get('app_locale'));
+ if (Binput::has('app_locale')) {
+ Lang::setLocale(Binput::get('app_locale'));
+ }
return Redirect::route('dashboard.settings.setup')
->withSuccess(trans('dashboard.settings.edit.success'));
|
Fixed wrong Locale on Settings form submit
Except Setup section, when you submit a form, the Success message doesn't use your current locale but always English (that is the default language).
|
CachetHQ_Cachet
|
train
|
74713f9680522043f92cebdcf8e0419f0dfb9c45
|
diff --git a/pymomo/scripts/momo.py b/pymomo/scripts/momo.py
index <HASH>..<HASH> 100644
--- a/pymomo/scripts/momo.py
+++ b/pymomo/scripts/momo.py
@@ -5,7 +5,6 @@ import shlex
from pymomo.utilities.typedargs.shell import HierarchicalShell, posix_lex
from pymomo.exceptions import *
from pymomo.utilities.typedargs import annotate, type_system
-from pymomo.commander.meta import initialization
from pymomo.utilities import build
from pymomo.utilities.rcfile import RCFile
from multiprocessing import freeze_support
@@ -26,9 +25,7 @@ def main():
return 0
shell = HierarchicalShell('momo', no_rc=norc)
-
- shell.root_update(annotate.find_all(initialization))
-
+
shell.root_add("build", "pymomo.utilities.build,build")
shell.root_add("SystemLog", "pymomo.syslog")
shell.root_add("pcb", "pymomo.pcb")
|
Fixed momo to have new hwmanager
|
iotile_coretools
|
train
|
34e73a0508baae415acd2fdfb746aa2ee3233254
|
diff --git a/lib/i18n/translation.rb b/lib/i18n/translation.rb
index <HASH>..<HASH> 100755
--- a/lib/i18n/translation.rb
+++ b/lib/i18n/translation.rb
@@ -1,17 +1,25 @@
module I18n
# This module should be mixed into every object that can be translated (where
# the localization results into a string with pluralization and interpolation)
+ class << self
+ def keyify(scope, key)
+ keys = []
+ keys += scope.to_s.split(/\./) if scope
+ keys += key.to_s.split(/\./)
+ keys.map &:to_sym
+ end
+ end
+
module Translation
# Main translation method
def translate(*args)
args = typify_localization_args(args)
options = args.last.is_a?(Hash) ? args.pop : {}
- options[:keys] = Array(options.delete(:scope)).dup << args.shift
+ options[:keys] = I18n.keyify options.delete(:scope), args.shift
options[:locale] ||= args.shift
I18n.backend.translate options
- end
-
+ end
alias :t :translate
protected
diff --git a/lib/locale.rb b/lib/locale.rb
index <HASH>..<HASH> 100644
--- a/lib/locale.rb
+++ b/lib/locale.rb
@@ -24,8 +24,9 @@ class Locale < String
protected
- def typify_localization_args(args)
- args.insert(1, self) if args[1].nil? || args[1].is_a?(Hash)
+ def typify_localization_args(args)
+ # TODO raise if no key in args[0] given?
+ args[1].is_a?(Hash) ? args.insert(1, self) : args[1] = self
args
end
end
\ No newline at end of file
diff --git a/test/locale_test.rb b/test/locale_test.rb
index <HASH>..<HASH> 100644
--- a/test/locale_test.rb
+++ b/test/locale_test.rb
@@ -1,35 +1,45 @@
-$:.unshift "lib/i18n/lib"
+$:.unshift 'lib/i18n/lib'
require 'rubygems'
require 'mocha'
require 'test/unit'
+require 'active_support'
+
require 'i18n'
require 'i18n/backend/simple'
I18n.backend = I18n::Backend::Simple
require 'i18n/backend/translations'
-gem 'mocha', ">=0.5"
+gem 'mocha', '>=0.5'
class LocaleTest < Test::Unit::TestCase
def setup
+ @locale = Locale.current
end
def test_translate_given_a_translation_key_it_inserts_itself_to_localization_args
I18n.backend = mock
I18n.backend.expects(:translate).with(:locale => Locale['en-US'], :keys => [:currency])
- Locale.current.translate(:currency)
+ @locale.t :currency
end
def test_translate_given_a_translation_key_and_options_it_inserts_itself_to_localization_args
I18n.backend = mock
I18n.backend.expects(:translate).with(:locale => Locale['en-US'], :keys => [:currency, :format, :precision])
- Locale.current.translate(:precision, :scope => [:currency, :format])
+ @locale.t :precision, :scope => 'currency.format'
end
def test_translate_given_a_translation_key_locale_and_options_it_replaces_the_locale_in_localization_args
I18n.backend = mock
I18n.backend.expects(:translate).with(:locale => Locale['en-US'], :keys => [:currency, :format, :precision])
- Locale.current.translate(:precision, Locale['de-DE'], :scope => [:currency, :format])
+ @locale.t :precision, Locale['de-DE'], :scope => 'currency.format'
+ end
+
+ def test_locale_with_options_using_scope_works
+ I18n.backend.expects(:translate).with(:locale => Locale['en-US'], :keys => [:currency, :format, :precision])
+ @locale.with_options :scope => 'currency.format' do |locale|
+ locale.t :precision
+ end
end
end
\ No newline at end of file
|
moving to dot-separated strings as keys/scopes
|
ruby-i18n_i18n
|
train
|
c98517accee2081b73fffb1f453eb67a71198bb7
|
diff --git a/pyrfc3339/tests/tests.py b/pyrfc3339/tests/tests.py
index <HASH>..<HASH> 100644
--- a/pyrfc3339/tests/tests.py
+++ b/pyrfc3339/tests/tests.py
@@ -53,7 +53,7 @@ class TestCore():
Test generating timestamps with microseconds.
'''
- dt = datetime(2009, 01, 01, 10, 02, 03, 500000, pytz.utc)
+ dt = datetime(2009, 1, 1, 10, 2, 3, 500000, pytz.utc)
timestamp = generate(dt, microseconds = True)
eq_(timestamp, '2009-01-01T10:02:03.500000Z')
|
Remove leading zeros from integers (they confuse 2to3).
|
kurtraschke_pyRFC3339
|
train
|
d036d99d4f4d86e43ca7dc9c133bf5a011c63a91
|
diff --git a/search_queries_boosting.go b/search_queries_boosting.go
index <HASH>..<HASH> 100644
--- a/search_queries_boosting.go
+++ b/search_queries_boosting.go
@@ -10,27 +10,27 @@ package elastic
// http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-boosting-query.html
type BoostingQuery struct {
Query
- positiveClauses []Query
- negativeClauses []Query
+ positiveClause []Query
+ negativeClause []Query
negativeBoost *float32
}
// Creates a new boosting query.
func NewBoostingQuery() BoostingQuery {
q := BoostingQuery{
- positiveClauses: make([]Query, 0),
- negativeClauses: make([]Query, 0),
+ positiveClause: make([]Query, 0),
+ negativeClause: make([]Query, 0),
}
return q
}
-func (q BoostingQuery) Positive(queries ...Query) BoostingQuery {
- q.positiveClauses = append(q.positiveClauses, queries...)
+func (q BoostingQuery) Positive(positive ...Query) BoostingQuery {
+ q.positiveClause = positive
return q
}
-func (q BoostingQuery) Negative(queries ...Query) BoostingQuery {
- q.negativeClauses = append(q.negativeClauses, queries...)
+func (q BoostingQuery) Negative(negative ...Query) BoostingQuery {
+ q.negativeClause = negative
return q
}
@@ -63,22 +63,22 @@ func (q BoostingQuery) Source() interface{} {
query["boosting"] = boostingClause
// positive
- if len(q.positiveClauses) == 1 {
- boostingClause["positive"] = q.positiveClauses[0].Source()
- } else if len(q.positiveClauses) > 1 {
+ if len(q.positiveClause) == 1 {
+ boostingClause["positive"] = q.positiveClause[0].Source()
+ } else if len(q.positiveClause) > 1 {
clauses := make([]interface{}, 0)
- for _, subQuery := range q.positiveClauses {
+ for _, subQuery := range q.positiveClause {
clauses = append(clauses, subQuery.Source())
}
boostingClause["positive"] = clauses
}
// negative
- if len(q.negativeClauses) == 1 {
- boostingClause["negative"] = q.negativeClauses[0].Source()
- } else if len(q.negativeClauses) > 1 {
+ if len(q.negativeClause) == 1 {
+ boostingClause["negative"] = q.negativeClause[0].Source()
+ } else if len(q.negativeClause) > 1 {
clauses := make([]interface{}, 0)
- for _, subQuery := range q.negativeClauses {
+ for _, subQuery := range q.negativeClause {
clauses = append(clauses, subQuery.Source())
}
boostingClause["negative"] = clauses
|
Removing ability to stack positive or negative query.
|
olivere_elastic
|
train
|
7793b54479808c371715ac3369f2f0713070add4
|
diff --git a/editconfig_gedcom.php b/editconfig_gedcom.php
index <HASH>..<HASH> 100644
--- a/editconfig_gedcom.php
+++ b/editconfig_gedcom.php
@@ -646,6 +646,13 @@ print_header(i18n::translate('GEDCOM configuration'));
<td class="optionbox width60"><input type="text" dir="ltr" name="NEW_HOME_SITE_TEXT" value="<?php print htmlspecialchars($HOME_SITE_TEXT, ENT_COMPAT, 'UTF-8'); ?>" size="50" tabindex="<?php echo ++$i; ?>" /></td>
</tr>
<tr>
+ <tr>
+ <td class="descriptionbox nowrap">
+ <?php echo i18n::translate('Add to TITLE header tag'), help_link('META_TITLE'); ?>
+ </td>
+ <td class="optionbox width60"><input type="text" dir="ltr" name="NEW_META_TITLE" value="<?php print $META_TITLE; ?>" tabindex="<?php echo ++$i; ?>" /><br />
+ </td>
+ </tr>
<td class="descriptionbox nowrap">
<?php echo i18n::translate('Description META tag'), help_link('META_DESCRIPTION'); ?>
</td>
|
Put back META_TITLE option removed from GEDCOM configuration
|
fisharebest_webtrees
|
train
|
32c06668183ecdf18164a5983bf1fec765eb9914
|
diff --git a/src/Application/Command/InitHandler.php b/src/Application/Command/InitHandler.php
index <HASH>..<HASH> 100644
--- a/src/Application/Command/InitHandler.php
+++ b/src/Application/Command/InitHandler.php
@@ -149,14 +149,14 @@ class InitHandler implements CommandHandlerInterface
'functions' => array('4', '5', '7', '73'),
'iniEntries' => array('4', '5', '7', '71', '73'),
'interfaces' => array('5', '7', '72'),
- 'releases' => array('4', '5', '70', '71', '72'),
+ 'releases' => array('4', '5', '70', '71', '72', '73'),
),
'standard' => array(
'classes' => array('4', '5', '7'),
'constants' => array('4', '5', '7', '71'),
'functions' => array('4', '5', '7', '71', '72', '73'),
'iniEntries' => array('4', '5', '7', '71'),
- 'releases' => array('4', '5', '7', '72'),
+ 'releases' => array('4', '5', '7', '72', '73'),
'methods' => array('4', '5', '7', '71'),
),
'apcu' => array(
|
be able to show releases of <I>.x versions
|
llaville_php-compatinfo-db
|
train
|
864ce5f946e1923900ba6fdb01921987451e7982
|
diff --git a/proxy/reverse.go b/proxy/reverse.go
index <HASH>..<HASH> 100644
--- a/proxy/reverse.go
+++ b/proxy/reverse.go
@@ -90,6 +90,7 @@ func (p *reverseProxy) ServeHTTP(rw http.ResponseWriter, clientreq *http.Request
return
}
+ requestCanceled := false
completeCh := make(chan bool, 1)
closeNotifier, ok := rw.(http.CloseNotifier)
if ok {
@@ -98,6 +99,8 @@ func (p *reverseProxy) ServeHTTP(rw http.ResponseWriter, clientreq *http.Request
case <-closeNotifier.CloseNotify():
tp, ok := p.transport.(*http.Transport)
if ok {
+ requestCanceled = true
+ log.Printf("proxy: request from %v canceled", clientreq.RemoteAddr)
tp.CancelRequest(proxyreq)
}
case <-completeCh:
@@ -118,6 +121,9 @@ func (p *reverseProxy) ServeHTTP(rw http.ResponseWriter, clientreq *http.Request
redirectRequest(proxyreq, ep.URL)
res, err = p.transport.RoundTrip(proxyreq)
+ if requestCanceled {
+ return
+ }
if err != nil {
log.Printf("proxy: failed to direct request to %s: %v", ep.URL.String(), err)
ep.Failed()
|
proxy: handle canceled proxy request gracefully
when a client of the proxy server cancels a request the proxy should not
set the endpoint state to unavailable
|
etcd-io_etcd
|
train
|
c8057810d8257095159ef7d0a5cf005a797808b0
|
diff --git a/voltron/gdbcmd.py b/voltron/gdbcmd.py
index <HASH>..<HASH> 100644
--- a/voltron/gdbcmd.py
+++ b/voltron/gdbcmd.py
@@ -58,7 +58,7 @@ class GDBHelper (DebuggerHelper):
try:
return gdb.selected_frame().architecture().name()
except:
- return re.search('\(currently (.*)\)', gdb.execute('show architecture', to_string=True)).groups(0)
+ return re.search('\(currently (.*)\)', gdb.execute('show architecture', to_string=True)).group(1)
@staticmethod
def helper():
|
Correctly extract arch on platforms where architecure().name() fails
|
snare_voltron
|
train
|
2c4209435af7c5b5426b22d79be5479d77ef82cc
|
diff --git a/dev/com.ibm.ws.security.fat.common/src/com/ibm/ws/security/fat/common/exceptions/TestActionException.java b/dev/com.ibm.ws.security.fat.common/src/com/ibm/ws/security/fat/common/exceptions/TestActionException.java
index <HASH>..<HASH> 100644
--- a/dev/com.ibm.ws.security.fat.common/src/com/ibm/ws/security/fat/common/exceptions/TestActionException.java
+++ b/dev/com.ibm.ws.security.fat.common/src/com/ibm/ws/security/fat/common/exceptions/TestActionException.java
@@ -30,11 +30,24 @@ public class TestActionException extends Exception {
if (cause == null) {
return "";
}
- String causeMsg = cause.getMessage();
- if (causeMsg == null || causeMsg.isEmpty()) {
- return "";
+ Throwable originalCause = getOriginalCause(cause);
+ if (originalCause == null) {
+ originalCause = cause;
+ }
+ return " " + originalCause.toString();
+ }
+
+ private static Throwable getOriginalCause(Throwable cause) {
+ if (cause == null) {
+ return null;
+ }
+ Throwable originalCause = cause;
+ Throwable currentCause = originalCause.getCause();
+ while (currentCause != null) {
+ originalCause = currentCause;
+ currentCause = currentCause.getCause();
}
- return " " + causeMsg;
+ return originalCause;
}
}
\ No newline at end of file
diff --git a/dev/com.ibm.ws.security.fat.common/test/com/ibm/ws/security/fat/common/exceptions/TestActionExceptionTest.java b/dev/com.ibm.ws.security.fat.common/test/com/ibm/ws/security/fat/common/exceptions/TestActionExceptionTest.java
index <HASH>..<HASH> 100644
--- a/dev/com.ibm.ws.security.fat.common/test/com/ibm/ws/security/fat/common/exceptions/TestActionExceptionTest.java
+++ b/dev/com.ibm.ws.security.fat.common/test/com/ibm/ws/security/fat/common/exceptions/TestActionExceptionTest.java
@@ -80,7 +80,7 @@ public class TestActionExceptionTest extends CommonTestClass {
* Tests:
* - Provided cause has no message
* Expects:
- * - Exception message should match the provided failure message (no sub-message from the cause)
+ * - Exception message should match the provided failure message, plus the toString() of the cause
* - "Exception occurred" message logged
*/
@Test
@@ -92,7 +92,7 @@ public class TestActionExceptionTest extends CommonTestClass {
try {
throw new TestActionException(method, defaultExceptionMsg, cause);
} catch (TestActionException e) {
- verifyPattern(e.getMessage(), "^" + Pattern.quote(defaultExceptionMsg) + "$");
+ verifyPattern(e.getMessage(), "^" + Pattern.quote(defaultExceptionMsg) + " " + Pattern.quote(cause.toString()) + "$");
assertStringInTrace(outputMgr, "Exception occurred in " + method);
}
} catch (Throwable t) {
@@ -104,7 +104,7 @@ public class TestActionExceptionTest extends CommonTestClass {
* Tests:
* - Provided cause includes its own message
* Expects:
- * - Exception message should match the provided failure message, plus the sub-message from the cause
+ * - Exception message should match the provided failure message, plus the toString() of the cause
* - "Exception occurred" message logged
*/
@Test
@@ -117,7 +117,36 @@ public class TestActionExceptionTest extends CommonTestClass {
try {
throw new TestActionException(method, defaultExceptionMsg, cause);
} catch (TestActionException e) {
- verifyPattern(e.getMessage(), "^" + Pattern.quote(defaultExceptionMsg) + " " + Pattern.quote(subMessage) + "$");
+ verifyPattern(e.getMessage(), "^" + Pattern.quote(defaultExceptionMsg) + " " + Pattern.quote(cause.toString()) + "$");
+ assertStringInTrace(outputMgr, "Exception occurred in " + method);
+ }
+ } catch (Throwable t) {
+ outputMgr.failWithThrowable(testName.getMethodName(), t);
+ }
+ }
+
+ /**
+ * Tests:
+ * - Provided cause includes its own message
+ * Expects:
+ * - Exception message should match the provided failure message, plus the toString() of the original cause
+ * - "Exception occurred" message logged
+ */
+ @Test
+ public void test_constructor_nestedCause() {
+ try {
+ String method = "some method";
+ String cause1Message = "Sub-message for the first cause";
+ Throwable cause1 = new Exception(cause1Message);
+ String cause2Message = "Sub-message for the second cause";
+ Throwable cause2 = new Exception(cause2Message, cause1);
+ String cause3Message = "Sub-message for the third cause";
+ Throwable cause3 = new Exception(cause3Message, cause2);
+
+ try {
+ throw new TestActionException(method, defaultExceptionMsg, cause3);
+ } catch (TestActionException e) {
+ verifyPattern(e.getMessage(), "^" + Pattern.quote(defaultExceptionMsg) + " " + Pattern.quote(cause1.toString()) + "$");
assertStringInTrace(outputMgr, "Exception occurred in " + method);
}
} catch (Throwable t) {
|
Security FAT infra update for TestActionExceptions
Updates the `TestActionException` class to unwrap a provided `Throwable` to get down to the original cause. The original cause is then included it in the `TestActionException`'s cause string.
|
OpenLiberty_open-liberty
|
train
|
52b0c3a0ebfe334ccf4cbca3ce3001cca1c088f0
|
diff --git a/tests/GeometryTest.php b/tests/GeometryTest.php
index <HASH>..<HASH> 100644
--- a/tests/GeometryTest.php
+++ b/tests/GeometryTest.php
@@ -422,11 +422,9 @@ class GeometryTest extends AbstractTestCase
['LINESTRING (1 2, 3 4)', true],
['LINESTRING (0 0, 0 1, 1 1, 1 0)', true],
['LINESTRING (1 0, 1 2, 2 1, 0 1)', false],
- ['MULTIPOINT EMPTY', true],
['MULTIPOINT (1 2)', true],
['MULTIPOINT (1 3)', true],
['MULTIPOINT (1 2, 1 3)', true],
- ['MULTIPOINT (1 2, 1 3, 1 2)', false],
['MULTIPOINT Z (1 2 3, 2 3 4)', true],
['MULTIPOINT Z (1 2 3, 1 2 4)', false],
['MULTIPOINT M (1 2 3, 2 3 4)', true],
|
Removed problematic geometry engine tests on MySQL
|
brick_geo
|
train
|
7d4a7ffbbcd472d039e981be83c263ab0020a788
|
diff --git a/src/Hprose/Async.php b/src/Hprose/Async.php
index <HASH>..<HASH> 100644
--- a/src/Hprose/Async.php
+++ b/src/Hprose/Async.php
@@ -46,12 +46,12 @@ namespace Hprose {
case 'swoole':
if (extension_loaded("swoole")) {
if (php_sapi_name() != "cli") {
- throw new Exception("swoole extension only can be used in cli.");
+ throw new \Exception("swoole extension only can be used in cli.");
}
self::initSwoole();
}
else {
- throw new Exception("You need to install swoole extension first.");
+ throw new \Exception("You need to install swoole extension first.");
}
break;
case 'event':
@@ -59,7 +59,7 @@ namespace Hprose {
self::initEvent();
}
else {
- throw new Exception("You need to install event extension first.");
+ throw new \Exception("You need to install event extension first.");
}
break;
case 'libevent':
@@ -67,11 +67,11 @@ namespace Hprose {
self::initLibEvent();
}
else {
- throw new Exception("You need to install libevent extension first.");
+ throw new \Exception("You need to install libevent extension first.");
}
break;
default:
- throw new Exception("You can only specify swoole, event or libevent.");
+ throw new \Exception("You can only specify swoole, event or libevent.");
break;
}
}
|
Changed Exception to \Exception
|
hprose_hprose-php
|
train
|
6332505f77b72e036fc377fa042f68ffd4fccd25
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.2:
+
+* Rename WebsocketEvent to WebSocketEvent to maintain consistent casing with the rest of the library.
+
## 0.1:
* Initial release
diff --git a/src/main/java/org/realityforge/gwt/websockets/client/event/CloseEvent.java b/src/main/java/org/realityforge/gwt/websockets/client/event/CloseEvent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/realityforge/gwt/websockets/client/event/CloseEvent.java
+++ b/src/main/java/org/realityforge/gwt/websockets/client/event/CloseEvent.java
@@ -10,7 +10,7 @@ import org.realityforge.gwt.websockets.client.event.CloseEvent.Handler;
* Event fired when the web socket is closed.
*/
public class CloseEvent
- extends WebsocketEvent<Handler>
+ extends WebSocketEvent<Handler>
{
public static final int CLOSE_NORMAL = 1000;
public static final int CLOSE_GOING_AWAY = 1001;
diff --git a/src/main/java/org/realityforge/gwt/websockets/client/event/ErrorEvent.java b/src/main/java/org/realityforge/gwt/websockets/client/event/ErrorEvent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/realityforge/gwt/websockets/client/event/ErrorEvent.java
+++ b/src/main/java/org/realityforge/gwt/websockets/client/event/ErrorEvent.java
@@ -9,7 +9,7 @@ import org.realityforge.gwt.websockets.client.event.ErrorEvent.Handler;
* Event fired when there is an error with the web socket.
*/
public class ErrorEvent
- extends WebsocketEvent<Handler>
+ extends WebSocketEvent<Handler>
{
public interface Handler
extends EventHandler
diff --git a/src/main/java/org/realityforge/gwt/websockets/client/event/MessageEvent.java b/src/main/java/org/realityforge/gwt/websockets/client/event/MessageEvent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/realityforge/gwt/websockets/client/event/MessageEvent.java
+++ b/src/main/java/org/realityforge/gwt/websockets/client/event/MessageEvent.java
@@ -6,7 +6,7 @@ import org.realityforge.gwt.websockets.client.WebSocket;
import org.realityforge.gwt.websockets.client.event.MessageEvent.Handler;
public class MessageEvent
- extends WebsocketEvent<Handler>
+ extends WebSocketEvent<Handler>
{
public interface Handler
extends EventHandler
diff --git a/src/main/java/org/realityforge/gwt/websockets/client/event/OpenEvent.java b/src/main/java/org/realityforge/gwt/websockets/client/event/OpenEvent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/realityforge/gwt/websockets/client/event/OpenEvent.java
+++ b/src/main/java/org/realityforge/gwt/websockets/client/event/OpenEvent.java
@@ -10,7 +10,7 @@ import org.realityforge.gwt.websockets.client.event.OpenEvent.Handler;
* Event fired when web socket successfully connects.
*/
public class OpenEvent
- extends WebsocketEvent<Handler>
+ extends WebSocketEvent<Handler>
{
public interface Handler
extends EventHandler
diff --git a/src/main/java/org/realityforge/gwt/websockets/client/event/WebsocketEvent.java b/src/main/java/org/realityforge/gwt/websockets/client/event/WebsocketEvent.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/realityforge/gwt/websockets/client/event/WebsocketEvent.java
+++ b/src/main/java/org/realityforge/gwt/websockets/client/event/WebsocketEvent.java
@@ -8,12 +8,12 @@ import org.realityforge.gwt.websockets.client.WebSocket;
/**
* Base class of all events originating from web socket.
*/
-public abstract class WebsocketEvent<H extends EventHandler>
+public abstract class WebSocketEvent<H extends EventHandler>
extends GwtEvent<H>
{
private final WebSocket _webSocket;
- protected WebsocketEvent( @Nonnull final WebSocket webSocket )
+ protected WebSocketEvent( @Nonnull final WebSocket webSocket )
{
_webSocket = webSocket;
}
|
Rename WebsocketEvent to WebSocketEvent to maintain consistent casing with the rest of the library.
|
realityforge_gwt-websockets
|
train
|
eb5ccf7d1c14a989f92c2057f9a35f272ecfbe9e
|
diff --git a/tcex/batch/batch.py b/tcex/batch/batch.py
index <HASH>..<HASH> 100644
--- a/tcex/batch/batch.py
+++ b/tcex/batch/batch.py
@@ -569,7 +569,10 @@ class Batch:
"""
data = []
# process group objects
- for xid in groups.keys():
+ # we are converting groups.keys() to a list because the data_group_association function
+ # will be deleting items the groups dictionary which would raise a
+ # "dictionary changed size during iteration" error
+ for xid in list(groups.keys()):
# get association from group data
assoc_group_data = self.data_group_association(xid)
data += assoc_group_data
|
Fixing "dictionary changed size during iteration" error
|
ThreatConnect-Inc_tcex
|
train
|
16c6de6498cb646e0b036c55ae76a3405d9d45a4
|
diff --git a/lib/Gitlab/Api/MergeRequests.php b/lib/Gitlab/Api/MergeRequests.php
index <HASH>..<HASH> 100644
--- a/lib/Gitlab/Api/MergeRequests.php
+++ b/lib/Gitlab/Api/MergeRequests.php
@@ -218,4 +218,37 @@ class MergeRequests extends AbstractApi
{
return $this->get($this->getProjectPath($project_id, 'merge_request/'.$this->encodePath($mr_id).'/commits'));
}
+
+ /**
+ * @param int $project_id
+ * @param int $mr_id
+ *
+ * @return mixed
+ */
+ public function approvals($project_id, $mr_id)
+ {
+ return $this->get($this->getProjectPath($project_id, 'merge_requests/'.$this->encodePath($mr_id).'/approvals'));
+ }
+
+ /**
+ * @param int $project_id
+ * @param int $mr_id
+ *
+ * @return mixed
+ */
+ public function approve($project_id, $mr_id)
+ {
+ return $this->post($this->getProjectPath($project_id, 'merge_requests/'.$this->encodePath($mr_id).'/approve'));
+ }
+
+ /**
+ * @param int $project_id
+ * @param int $mr_id
+ *
+ * @return mixed
+ */
+ public function unApprove($project_id, $mr_id)
+ {
+ return $this->post($this->getProjectPath($project_id, 'merge_requests/'.$this->encodePath($mr_id).'/unapprove'));
+ }
}
diff --git a/test/Gitlab/Tests/Api/MergeRequestsTest.php b/test/Gitlab/Tests/Api/MergeRequestsTest.php
index <HASH>..<HASH> 100644
--- a/test/Gitlab/Tests/Api/MergeRequestsTest.php
+++ b/test/Gitlab/Tests/Api/MergeRequestsTest.php
@@ -353,6 +353,58 @@ class MergeRequestsTest extends ApiTestCase
$this->assertEquals($expectedArray, $api->getByIid(1, 2));
}
+ /**
+ * @test
+ */
+ public function shouldApproveMergeRequest()
+ {
+ $expectedArray = array('id' => 1, 'title' => 'Approvals API');
+
+ $api = $this->getApiMock();
+ $api->expects($this->once())
+ ->method('post')
+ ->with('projects/1/merge_requests/2/approve')
+ ->will($this->returnValue($expectedArray))
+ ;
+
+ $this->assertEquals($expectedArray, $api->approve(1, 2));
+ }
+
+ /**
+ * @test
+ */
+ public function shouldUnApproveMergeRequest()
+ {
+ $expectedArray = array('id' => 1, 'title' => 'Approvals API');
+
+ $api = $this->getApiMock();
+ $api->expects($this->once())
+ ->method('post')
+ ->with('projects/1/merge_requests/2/unapprove')
+ ->will($this->returnValue($expectedArray))
+ ;
+
+ $this->assertEquals($expectedArray, $api->unapprove(1, 2));
+ }
+
+ /**
+ * @test
+ */
+ public function shouldGetMergeRequestApprovals()
+ {
+ $expectedArray = array('id' => 1, 'title' => 'Approvals API');
+
+ $api = $this->getApiMock();
+ $api->expects($this->once())
+ ->method('get')
+ ->with('projects/1/merge_requests', array('iid' => 2))
+ ->will($this->returnValue($expectedArray))
+ ;
+
+ $this->assertEquals($expectedArray, $api->getByIid(1, 2));
+ }
+
+
protected function getMultipleMergeRequestsData()
{
return array(
|
Add approvals, approve & unapprove API's
|
m4tthumphrey_php-gitlab-api
|
train
|
a65bed8c3831d3d37c0c8cf2548bfe1d4edd0387
|
diff --git a/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java b/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java
index <HASH>..<HASH> 100644
--- a/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java
+++ b/v2client/src/test/java/com/yubico/client/v2/YubicoClientTest.java
@@ -132,7 +132,7 @@ public class YubicoClientTest {
"http://api2.example.com/wsapi/2.0/verify"
});
VerificationResponse response = client.verify(otp);
- assertEquals(ResponseStatus.REPLAYED_OTP, response.getStatus());
+ fail("Expected exception to be thrown.");
}
@Test
|
Remove assertion from test expected to throw exception
|
Yubico_yubico-java-client
|
train
|
ec0126564352b4e7258164a151b8a4aca1de0154
|
diff --git a/pkg/kubelet/dockertools/manager.go b/pkg/kubelet/dockertools/manager.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/dockertools/manager.go
+++ b/pkg/kubelet/dockertools/manager.go
@@ -17,7 +17,6 @@ limitations under the License.
package dockertools
import (
- "bufio"
"bytes"
"errors"
"fmt"
@@ -898,20 +897,16 @@ func (dm *DockerManager) RunInContainer(containerID string, cmd []string) ([]byt
return nil, fmt.Errorf("failed to run in container - Exec setup failed - %v", err)
}
var buf bytes.Buffer
- wrBuf := bufio.NewWriter(&buf)
startOpts := docker.StartExecOptions{
Detach: false,
Tty: false,
- OutputStream: wrBuf,
- ErrorStream: wrBuf,
+ OutputStream: &buf,
+ ErrorStream: &buf,
RawTerminal: false,
}
- errChan := make(chan error, 1)
- go func() {
- errChan <- dm.client.StartExec(execObj.ID, startOpts)
- }()
- wrBuf.Flush()
- return buf.Bytes(), <-errChan
+ err = dm.client.StartExec(execObj.ID, startOpts)
+
+ return buf.Bytes(), err
}
// ExecInContainer uses nsenter to run the command inside the container identified by containerID.
diff --git a/pkg/probe/exec/exec.go b/pkg/probe/exec/exec.go
index <HASH>..<HASH> 100644
--- a/pkg/probe/exec/exec.go
+++ b/pkg/probe/exec/exec.go
@@ -43,7 +43,7 @@ func (pr execProber) Probe(e uexec.Cmd) (probe.Result, error) {
if err != nil {
return probe.Unknown, err
}
- if strings.ToLower(string(data)) != defaultHealthyOutput {
+ if !strings.HasPrefix(strings.ToLower(string(data)), defaultHealthyOutput) {
return probe.Failure, nil
}
return probe.Success, nil
|
Fix docker exec logic. Without this patch, kubelet was not receiving any output from docker exec
and was incorrectly handling the output.
|
kubernetes_kubernetes
|
train
|
dcb1b53535459af0f53914446a8afaff5321a364
|
diff --git a/tests/thrift/thrift_test.py b/tests/thrift/thrift_test.py
index <HASH>..<HASH> 100644
--- a/tests/thrift/thrift_test.py
+++ b/tests/thrift/thrift_test.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
import mock
import pytest
import socket
@@ -146,10 +147,14 @@ def test_annotation_list_builder(ann_mock):
assert ann_mock.call_count == 2
-def test_create_binary_annotation():
+@pytest.mark.parametrize(
+ 'value',
+ [(b'binary', u'unicøde')],
+)
+def test_create_binary_annotation(value):
bann = thrift.create_binary_annotation(
- 'foo', 'bar', 'baz', 'bla')
- assert ('foo', 'bar', 'baz', 'bla') == (
+ 'foo', value, 'baz', 'bla')
+ assert ('foo', value, 'baz', 'bla') == (
bann.key, bann.value, bann.annotation_type, bann.host)
|
Add thrift test for unicode binary annotation
|
Yelp_py_zipkin
|
train
|
f6d2ef1370b1de9edcc92b4f4a33201bfaadb945
|
diff --git a/src/components/link.js b/src/components/link.js
index <HASH>..<HASH> 100644
--- a/src/components/link.js
+++ b/src/components/link.js
@@ -4,6 +4,7 @@ import type { Href, Location } from '../types';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import PropTypes from 'prop-types';
+import qs from 'query-string';
import {
push as pushAction,
@@ -22,8 +23,8 @@ type Props = {
onClick: EventHandler,
style: Object,
location: Location,
- push: Function,
- replace: Function
+ push: typeof pushAction,
+ replace: typeof replaceAction
};
const LEFT_MOUSE_BUTTON = 0;
@@ -64,10 +65,29 @@ const handleClick = ({
navigate(href, { persistQuery });
};
+// When persisting queries, we need to merge the persisted
+// query with the link's new query.
+const contextifyHref = (href, location, persistQuery) => {
+ if (!persistQuery) { return href; }
+
+ const query = {
+ ...location.query || {},
+ ...href.query || {}
+ };
+
+ const search = qs.stringify(query);
+
+ return {
+ ...href,
+ query,
+ search: search && `?${search}` || ''
+ };
+};
+
const Link = (props: Props) => {
const {
href: rawHref,
- location: { basename },
+ location,
children,
onClick,
target,
@@ -79,7 +99,12 @@ const Link = (props: Props) => {
} = props;
// Ensure the href has both a search and a query when needed
- const href = normalizeHref(rawHref);
+ const normalizedHref = normalizeHref(rawHref);
+ const href = contextifyHref(
+ normalizedHref,
+ location,
+ persistQuery
+ );
const clickHandler = e => handleClick({
e,
@@ -94,7 +119,7 @@ const Link = (props: Props) => {
return (
<a
- href={stringifyHref(href, basename)}
+ href={stringifyHref(href, location.basename)}
onClick={clickHandler}
target={target}
{...rest}
diff --git a/src/reducer.js b/src/reducer.js
index <HASH>..<HASH> 100644
--- a/src/reducer.js
+++ b/src/reducer.js
@@ -1,9 +1,11 @@
// @flow
import type { Location, LocationOptions, LocationAction } from './types';
+import qs from 'query-string';
+
import { LOCATION_CHANGED, isNavigationAction } from './types';
-const flow = (...funcs: Array<Function>) =>
+const flow = (...funcs: Array<*>) =>
funcs.reduce((prev, curr) => (...args) => curr(prev(...args)));
type ResolverArgs = {
@@ -17,22 +19,18 @@ const resolveQuery = ({
newLocation,
options
}: ResolverArgs): ResolverArgs => {
- const { query: oldQuery, search: oldSearch } = oldLocation;
-
- // Only use the query from state if it exists
- // and the href doesn't provide its own query
- if (
- options.persistQuery &&
- oldQuery &&
- !newLocation.search &&
- !newLocation.query
- ) {
+ // Merge the old and new queries if asked to persist
+ if (options.persistQuery) {
+ const mergedQuery = {
+ ...oldLocation.query,
+ ...newLocation.query
+ };
return {
oldLocation,
newLocation: {
...newLocation,
- query: oldQuery,
- search: oldSearch
+ query: mergedQuery,
+ search: `?${qs.stringify(mergedQuery)}`
},
options
};
diff --git a/test/components/link.spec.js b/test/components/link.spec.js
index <HASH>..<HASH> 100644
--- a/test/components/link.spec.js
+++ b/test/components/link.spec.js
@@ -281,6 +281,19 @@ describe('Link', () => {
expect(wrapper.find('a').prop('href')).to.equal(`/base${expected[index]}`);
});
});
+
+ it('renders the correct href when persisting queries', () => {
+ const onClick = sandbox.stub();
+ const wrapper = mount(
+ <Link persistQuery href='/home?what=do' onClick={onClick} />,
+ fakeContext({
+ query: { persist: 'pls' }
+ })
+ );
+
+ expect(wrapper.find('a').prop('href'))
+ .to.equal('/home?persist=pls&what=do');
+ });
});
describe('PersistentQueryLink', () => {
diff --git a/test/reducer.spec.js b/test/reducer.spec.js
index <HASH>..<HASH> 100644
--- a/test/reducer.spec.js
+++ b/test/reducer.spec.js
@@ -121,7 +121,7 @@ describe('Router reducer', () => {
});
});
- it('persists the previous query string if requested', () => {
+ it('persists the previous query if requested', () => {
const reducerInstance = reducer();
const navigationAction = {
@@ -180,7 +180,7 @@ describe('Router reducer', () => {
});
});
- it('allows new queries to override persistQuery', () => {
+ it('merges old and new queries when requesting persistence', () => {
const reducerInstance = reducer();
const navigationAction = {
@@ -222,9 +222,10 @@ describe('Router reducer', () => {
expect(result).to.deep.equal({
pathname: '/rofl',
query: {
- clap: 'please'
+ clap: 'please',
+ please: 'clap'
},
- search: '?clap=please',
+ search: '?clap=please&please=clap',
previous: {
pathname: '/waffle',
query: {
|
Merge old and new queries on persistQuery
|
FormidableLabs_redux-little-router
|
train
|
1c0142d491ef67040d15c67c80e0866b2d899143
|
diff --git a/src/Functional/Curry.php b/src/Functional/Curry.php
index <HASH>..<HASH> 100644
--- a/src/Functional/Curry.php
+++ b/src/Functional/Curry.php
@@ -53,6 +53,14 @@ class CurriedFunction
$this->arguments = $arguments;
}
+ public function getCallback()
+ {
+ if ($this->callback instanceof self) {
+ return $this->callback->getCallback();
+ }
+ return $this->callback;
+ }
+
public function __invoke()
{
$callArgs = func_get_args();
@@ -74,7 +82,7 @@ class CurriedFunction
throw new \InvalidArgumentException(
sprintf(
'Curried %s() requires parameter %d to be passed. None given',
- $this->callback,
+ $this->getCallback(),
$arg->position
)
);
diff --git a/tests/Functional/CurryTest.php b/tests/Functional/CurryTest.php
index <HASH>..<HASH> 100644
--- a/tests/Functional/CurryTest.php
+++ b/tests/Functional/CurryTest.php
@@ -45,6 +45,15 @@ class CurryTest extends AbstractTestCase
$this->assertSame(1, $func());
}
+ function testNestedCurryingWithVariableArguments()
+ {
+ $func1 = curry('sprintf', 'first: %d, second: %d', arg('...'));
+ $func2 = curry($func1, arg(1), 2);
+ $func3 = curry($func1, arg('...'), 2);
+ $this->assertSame('first: 1, second: 2', $func2(1));
+ $this->assertSame('first: 0, second: 2', $func3(0));
+ }
+
function testCurryingVariableArguments()
{
$func = curry('sprintf', 'first: %d, second: %d, third: %d', arg('...'));
@@ -64,4 +73,12 @@ class CurryTest extends AbstractTestCase
$this->setExpectedException('InvalidArgumentException', 'Curried strpos() requires parameter 2 to be passed. None given');
$func('foo');
}
+
+ function testExceptionIsThrownInNestedCurriedFunctionWhenRequiredParameterIsNotPassed()
+ {
+ $func = curry('strpos', arg(1), arg(2));
+ $func = curry($func, arg(1), "o");
+ $this->setExpectedException('InvalidArgumentException', 'Curried strpos() requires parameter 1 to be passed. None given');
+ $func();
+ }
}
|
Tests for recursive currying
|
lstrojny_functional-php
|
train
|
a56388e16725bbc14478712d16dcb024c588b268
|
diff --git a/primus.js b/primus.js
index <HASH>..<HASH> 100644
--- a/primus.js
+++ b/primus.js
@@ -137,14 +137,16 @@ function Primus(url, options) {
options = options || {};
var primus = this;
- this.buffer = []; // Stores premature send data.
- this.writable = true; // Silly stream compatibility.
- this.readable = true; // Silly stream compatibility.
- this.url = this.parse(url); // Parse the URL to a readable format.
- this.backoff = options.reconnect || {}; // Stores the back off configuration.
- this.attempt = null; // Current back off attempt.
- this.readyState = Primus.CLOSED; // The readyState of the connection.
- this.transformers = { // Message transformers.
+ this.buffer = []; // Stores premature send data.
+ this.writable = true; // Silly stream compatibility.
+ this.readable = true; // Silly stream compatibility.
+ this.url = this.parse(url); // Parse the URL to a readable format.
+ this.backoff = options.reconnect || {}; // Stores the back off configuration.
+ this.attempt = null; // Current back off attempt.
+ this.readyState = Primus.CLOSED; // The readyState of the connection.
+ this.connection = +options.timeout || 10e3; // Connection timeout duration.
+ this.timer = null; // The connection timeout timer.
+ this.transformers = { // Message transformers.
outgoing: [],
incoming: []
};
@@ -404,9 +406,51 @@ Primus.prototype.initialise = function initalise(options) {
* @api public
*/
Primus.prototype.open = function open() {
- this.emit('outgoing::open');
+ //
+ // Only start a `connection timeout` procedure if we're not reconnecting as
+ // that shouldn't count as an initial connection. This should be started
+ // before the connection is opened to capture failing connections and kill the
+ // timeout.
+ //
+ if (!this.attempt && this.connection) this.timeout();
- return this;
+ return this.emit('outgoing::open');
+};
+
+/**
+ * Start a connection timeout
+ *
+ * @api private
+ */
+Primus.prototype.timeout = function timeout() {
+ var primus = this;
+
+ /**
+ * Remove all references to the timeout listener as we've received an event
+ * that can be used to determine state.
+ *
+ * @api privatek
+ */
+ function remove() {
+ primus.removeListener('error', remove);
+ primus.removeListener('open', remove);
+ primus.removeListener('end', remove);
+
+ clearTimeout(primus.timer);
+ primus.timer = null;
+ }
+
+ this.timer = setTimeout(function setTimeout() {
+ remove(); // Clean up old references.
+
+ if (Primus.readyState === Primus.OPEN || primus.attempt) return;
+
+ primus.emit('timeout');
+ }, this.connection);
+
+ return this.on('error', remove)
+ .on('open', remove)
+ .on('end', remove);
};
/**
@@ -459,7 +503,7 @@ Primus.prototype.write = function write(data) {
* @api public
*/
Primus.prototype.end = function end(data) {
- if (this.readyState === Primus.CLOSED) return this;
+ if (this.readyState === Primus.CLOSED && !this.timer) return this;
if (data) this.write(data);
this.writable = false;
@@ -708,7 +752,7 @@ if (
// Normally this makes sense, when your page is still loading. But versions
// before FireFox 22 will close all connections including WebSocket connections
// after page load. One way to prevent this is to do a `preventDefault()` and
- // cancel the operation before it bubbles up to the browsers's default handler.
+ // cancel the operation before it bubbles up to the browsers default handler.
// It needs to be added as `keydown` event, if it's added keyup it will not be
// able to prevent the connection from being closed.
//
|
[major] Added connection timeout
|
primus_primus
|
train
|
9c865a5f0598f089abd1c7c282209e7465e2d3bf
|
diff --git a/expression/aggregation/aggregation.go b/expression/aggregation/aggregation.go
index <HASH>..<HASH> 100644
--- a/expression/aggregation/aggregation.go
+++ b/expression/aggregation/aggregation.go
@@ -15,15 +15,10 @@ package aggregation
import (
"bytes"
- "fmt"
"github.com/pingcap/errors"
"github.com/pingcap/parser/ast"
- "github.com/pingcap/parser/charset"
- "github.com/pingcap/parser/model"
- "github.com/pingcap/parser/mysql"
"github.com/pingcap/tidb/expression"
- "github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/sessionctx/stmtctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
@@ -47,15 +42,6 @@ type Aggregation interface {
// ResetContext resets the content of the evaluate context.
ResetContext(sc *stmtctx.StatementContext, evalCtx *AggEvaluateContext)
-
- // GetFinalAggFunc constructs the final agg functions, only used in parallel execution.
- GetFinalAggFunc(ctx sessionctx.Context, idx int) (int, Aggregation)
-
- // GetArgs gets the args of the aggregate function.
- GetArgs() []expression.Expression
-
- // Clone deep copy the Aggregation.
- Clone(ctx sessionctx.Context) Aggregation
}
// NewDistAggFunc creates new Aggregate function for mock tikv.
@@ -175,56 +161,6 @@ func (af *aggFunction) updateSum(sc *stmtctx.StatementContext, evalCtx *AggEvalu
return nil
}
-func (af *aggFunction) GetFinalAggFunc(ctx sessionctx.Context, idx int) (_ int, newAggFunc Aggregation) {
- switch af.Mode {
- case DedupMode:
- panic("DedupMode is not supported now.")
- case Partial1Mode:
- args := make([]expression.Expression, 0, 2)
- if NeedCount(af.Name) {
- args = append(args, &expression.Column{
- ColName: model.NewCIStr(fmt.Sprintf("col_%d", idx)),
- Index: idx,
- RetType: &types.FieldType{Tp: mysql.TypeLonglong, Flen: 21, Charset: charset.CharsetBin, Collate: charset.CollationBin},
- })
- idx++
- }
- if NeedValue(af.Name) {
- args = append(args, &expression.Column{
- ColName: model.NewCIStr(fmt.Sprintf("col_%d", idx)),
- Index: idx,
- RetType: af.RetTp,
- })
- idx++
- if af.Name == ast.AggFuncGroupConcat {
- separator := af.Args[len(af.Args)-1]
- args = append(args, separator.Clone())
- }
- }
- desc := af.AggFuncDesc.Clone()
- desc.Mode = FinalMode
- desc.Args = args
- newAggFunc = desc.GetAggFunc(ctx)
- case Partial2Mode:
- desc := af.AggFuncDesc.Clone()
- desc.Mode = FinalMode
- idx += len(desc.Args)
- newAggFunc = desc.GetAggFunc(ctx)
- case FinalMode, CompleteMode:
- panic("GetFinalAggFunc should not be called when aggMode is FinalMode/CompleteMode.")
- }
- return idx, newAggFunc
-}
-
-func (af *aggFunction) GetArgs() []expression.Expression {
- return af.Args
-}
-
-func (af *aggFunction) Clone(ctx sessionctx.Context) Aggregation {
- desc := af.AggFuncDesc.Clone()
- return desc.GetAggFunc(ctx)
-}
-
// NeedCount indicates whether the aggregate function should record count.
func NeedCount(name string) bool {
return name == ast.AggFuncCount || name == ast.AggFuncAvg
|
expression/aggregation: remove unused function in `aggregation.Aggregation` (#<I>)
|
pingcap_tidb
|
train
|
921457de8155ef15b0905a59e8a606bfe7e3b4fb
|
diff --git a/src/Console/Command/StartJsonRpcServerCommand.php b/src/Console/Command/StartJsonRpcServerCommand.php
index <HASH>..<HASH> 100644
--- a/src/Console/Command/StartJsonRpcServerCommand.php
+++ b/src/Console/Command/StartJsonRpcServerCommand.php
@@ -45,7 +45,7 @@ class StartJsonRpcServerCommand extends AbstractCommand
->setDefinition([
new InputOption(
'name',
- 'n',
+ 'c',
InputOption::VALUE_REQUIRED,
'name of the JSON-RPC server to start'
),
|
Changed command switch back to c from n, looks like this was switched back during a merge after the issue #<I> was fixed.
|
prolic_HumusAmqp
|
train
|
d541501870d04c9fe788b3e980d875a6d70c0030
|
diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py
index <HASH>..<HASH> 100644
--- a/salt/modules/win_file.py
+++ b/salt/modules/win_file.py
@@ -296,7 +296,7 @@ def chgrp(path, group):
return None
-def stats(path, hash_type='md5', follow_symlink=False):
+def stats(path, hash_type='md5', follow_symlinks=False):
'''
Return a dict containing the stats for a given file
@@ -309,7 +309,7 @@ def stats(path, hash_type='md5', follow_symlink=False):
ret = {}
if not os.path.exists(path):
return ret
- if follow_symlink:
+ if follow_symlinks:
pstat = os.stat(path)
else:
pstat = os.lstat(path)
|
Addresses the issue: Unable to manage file: stats() got an unexpected keyword argument 'follow_symlinks’
|
saltstack_salt
|
train
|
b19b294845d08fb0f8bb9034437dd3e718fd63ab
|
diff --git a/src/ServiceContainer/WordpressBehatExtension.php b/src/ServiceContainer/WordpressBehatExtension.php
index <HASH>..<HASH> 100644
--- a/src/ServiceContainer/WordpressBehatExtension.php
+++ b/src/ServiceContainer/WordpressBehatExtension.php
@@ -42,7 +42,7 @@ class WordpressBehatExtension implements ExtensionInterface
// Optional - not fully supported, but implemented for possible future integration.
->scalarNode('site_url')
- ->defaultValue('127.0.0.1')
+ ->defaultValue('http://127.0.0.1')
->end()
->end()
->end();
|
site_url is assumed to have a protocol
|
paulgibbs_behat-wordpress-extension
|
train
|
3948742eab757bc3ef63789287e3eaaee3ff41d5
|
diff --git a/mdp.py b/mdp.py
index <HASH>..<HASH> 100644
--- a/mdp.py
+++ b/mdp.py
@@ -428,6 +428,7 @@ def getSpan(W):
"""
return (W.max() - W.min())
+
class MDP(object):
"""A Markov Decision Problem."""
|
Separate top-level function and class definitions with two blank lines
|
sawcordwell_pymdptoolbox
|
train
|
afda6b49408bad8f7a7a5faa9eeda016c8763400
|
diff --git a/pkg/repo/index.go b/pkg/repo/index.go
index <HASH>..<HASH> 100644
--- a/pkg/repo/index.go
+++ b/pkg/repo/index.go
@@ -169,6 +169,15 @@ func (i IndexFile) Get(name, version string) (*ChartVersion, error) {
}
}
+ // when customer input exact version, check whether have exact match one first
+ if len(version) != 0 {
+ for _, ver := range vs {
+ if version == ver.Version {
+ return ver, nil
+ }
+ }
+ }
+
for _, ver := range vs {
test, err := semver.NewVersion(ver.Version)
if err != nil {
diff --git a/pkg/repo/index_test.go b/pkg/repo/index_test.go
index <HASH>..<HASH> 100644
--- a/pkg/repo/index_test.go
+++ b/pkg/repo/index_test.go
@@ -20,6 +20,7 @@ import (
"io/ioutil"
"net/http"
"os"
+ "strings"
"testing"
"helm.sh/helm/v3/pkg/cli"
@@ -40,14 +41,17 @@ func TestIndexFile(t *testing.T) {
i.Add(&chart.Metadata{Name: "cutter", Version: "0.1.1"}, "cutter-0.1.1.tgz", "http://example.com/charts", "sha256:1234567890abc")
i.Add(&chart.Metadata{Name: "cutter", Version: "0.1.0"}, "cutter-0.1.0.tgz", "http://example.com/charts", "sha256:1234567890abc")
i.Add(&chart.Metadata{Name: "cutter", Version: "0.2.0"}, "cutter-0.2.0.tgz", "http://example.com/charts", "sha256:1234567890abc")
+ i.Add(&chart.Metadata{Name: "setter", Version: "0.1.9+alpha"}, "setter-0.1.9+alpha.tgz", "http://example.com/charts", "sha256:1234567890abc")
+ i.Add(&chart.Metadata{Name: "setter", Version: "0.1.9+beta"}, "setter-0.1.9+beta.tgz", "http://example.com/charts", "sha256:1234567890abc")
+
i.SortEntries()
if i.APIVersion != APIVersionV1 {
t.Error("Expected API version v1")
}
- if len(i.Entries) != 2 {
- t.Errorf("Expected 2 charts. Got %d", len(i.Entries))
+ if len(i.Entries) != 3 {
+ t.Errorf("Expected 3 charts. Got %d", len(i.Entries))
}
if i.Entries["clipper"][0].Name != "clipper" {
@@ -55,13 +59,23 @@ func TestIndexFile(t *testing.T) {
}
if len(i.Entries["cutter"]) != 3 {
- t.Error("Expected two cutters.")
+ t.Error("Expected three cutters.")
}
// Test that the sort worked. 0.2 should be at the first index for Cutter.
if v := i.Entries["cutter"][0].Version; v != "0.2.0" {
t.Errorf("Unexpected first version: %s", v)
}
+
+ cv, err := i.Get("setter", "0.1.9")
+ if err == nil && !strings.Contains(cv.Metadata.Version, "0.1.9") {
+ t.Errorf("Unexpected version: %s", cv.Metadata.Version)
+ }
+
+ cv, err = i.Get("setter", "0.1.9+alpha")
+ if err != nil || cv.Metadata.Version != "0.1.9+alpha" {
+ t.Errorf("Expected version: 0.1.9+alpha")
+ }
}
func TestLoadIndex(t *testing.T) {
|
Porting fix from commit f<I>db<I>cf6d<I>dcd<I>ac<I>a<I>fb<I>
This port fixes the bug #<I> for helm3
which was fixed in helm2 with the pull request <I>
<URL>
|
helm_helm
|
train
|
5ddc49b22ca8c3563271f95e1d294c8404b2b072
|
diff --git a/src/js/string.js b/src/js/string.js
index <HASH>..<HASH> 100644
--- a/src/js/string.js
+++ b/src/js/string.js
@@ -6,7 +6,7 @@
* @interface
* @augments ch.Watcher
* @memberOf ch
- * @param {Configuration Object} conf Object with configuration properties
+ * @param {String} msg Validation message
* @returns {Chico-UI Object}
* @see ch.Watcher
* @example
|
Corrected a param to String validator contructor.
|
mercadolibre_chico
|
train
|
570d0e85c5dd75dd2b7d3b463c0578e98ce13da8
|
diff --git a/nodeconductor/structure/serializers.py b/nodeconductor/structure/serializers.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/structure/serializers.py
+++ b/nodeconductor/structure/serializers.py
@@ -1114,11 +1114,10 @@ class BaseServiceSerializer(six.with_metaclass(ServiceSerializerMetaclass,
return service
def update(self, instance, attrs):
- if 'settings' in attrs:
- name = attrs.pop('settings', {}).get('name')
- if name:
- instance.settings.name = name
- instance.settings.save()
+ name = attrs.pop('settings', {}).get('name')
+ if name:
+ instance.settings.name = name
+ instance.settings.save()
return super(BaseServiceSerializer, self).update(instance, attrs)
|
Remove redundant condition [WAL-<I>]
|
opennode_waldur-core
|
train
|
4f362ee6dc952fa547ac1e18ce51a2f127009d38
|
diff --git a/lib/mongomapper/embedded_document.rb b/lib/mongomapper/embedded_document.rb
index <HASH>..<HASH> 100644
--- a/lib/mongomapper/embedded_document.rb
+++ b/lib/mongomapper/embedded_document.rb
@@ -55,6 +55,10 @@ module MongoMapper
if key.options[:required]
validates_presence_of(attribute)
end
+
+ if key.options[:unique]
+ validates_uniqueness_of(attribute)
+ end
if key.options[:numeric]
number_options = key.type == Integer ? {:only_integer => true} : {}
diff --git a/test/test_validations.rb b/test/test_validations.rb
index <HASH>..<HASH> 100644
--- a/test/test_validations.rb
+++ b/test/test_validations.rb
@@ -182,6 +182,18 @@ class ValidationsTest < Test::Unit::TestCase
doc2.should have_error_on(:name)
end
end
+
+ context "validates uniqueness of with :unique shortcut" do
+ should "work" do
+ @document.key :name, String, :unique => true
+
+ doc = @document.create(:name => 'John')
+ doc.should_not have_error_on(:name)
+ second_john = @document.create(:name => 'John')
+ second_john.should have_error_on(:name)
+ end
+ end
+
end # Validations
context "Saving a new document that is invalid" do
|
added :unique key shortcut to add validates_uniqueness_of automatically. Fixes #<I>.
|
mongomapper_mongomapper
|
train
|
8ac551be35d5cd3fdef60d1cf348a5a749b671ab
|
diff --git a/croppie.js b/croppie.js
index <HASH>..<HASH> 100755
--- a/croppie.js
+++ b/croppie.js
@@ -807,16 +807,15 @@
canvas.width = outWidth;
canvas.height = outHeight;
+ ctx.drawImage(img, left, top, width, height, 0, 0, outWidth, outHeight);
if (circle) {
- ctx.save();
+ ctx.fillStyle = '#fff';
+ ctx.globalCompositeOperation = 'destination-in';
ctx.beginPath();
ctx.arc(outWidth / 2, outHeight / 2, outWidth / 2, 0, Math.PI * 2, true);
ctx.closePath();
- ctx.clip();
+ ctx.fill();
}
-
- ctx.drawImage(img, left, top, width, height, 0, 0, outWidth, outHeight);
-
return canvas.toDataURL(data.format, data.quality);
}
diff --git a/demo/demo.js b/demo/demo.js
index <HASH>..<HASH> 100644
--- a/demo/demo.js
+++ b/demo/demo.js
@@ -48,7 +48,7 @@ var Demo = (function() {
$('.js-main-image').on('click', function (ev) {
mc.croppie('result', {
type: 'canvas',
- format: 'jpeg'
+ format: 'png'
}).then(function (resp) {
popupResult({
src: resp
|
antialiasing issue fixes #<I>
|
Foliotek_Croppie
|
train
|
3a874a24aed6ee93fbccf97efe0ecc999bafe87d
|
diff --git a/lib/mixlib/archive.rb b/lib/mixlib/archive.rb
index <HASH>..<HASH> 100644
--- a/lib/mixlib/archive.rb
+++ b/lib/mixlib/archive.rb
@@ -26,6 +26,8 @@ module Mixlib
end
def extract(destination, perms: true, ignore: [])
+ ignore = [/^\.$/, /\.{2}/] + ignore
+
create_and_empty(destination)
archiver.extract(destination, perms: perms, ignore: ignore)
diff --git a/lib/mixlib/archive/tar.rb b/lib/mixlib/archive/tar.rb
index <HASH>..<HASH> 100644
--- a/lib/mixlib/archive/tar.rb
+++ b/lib/mixlib/archive/tar.rb
@@ -29,7 +29,10 @@ module Mixlib
dest = File.join(destination, entry.read.strip)
next
end
- next if entry.full_name =~ ignore_re
+ if entry.full_name =~ ignore_re
+ Mixlib::Archive::Log.warn "ignoring entry #{entry.full_name}"
+ next
+ end
dest ||= File.join(destination, entry.full_name)
parent = File.dirname(dest)
FileUtils.mkdir_p(parent)
diff --git a/spec/mixlib/archive_spec.rb b/spec/mixlib/archive_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mixlib/archive_spec.rb
+++ b/spec/mixlib/archive_spec.rb
@@ -21,6 +21,7 @@ describe Mixlib::Archive do
it "accepts a path" do
expect { described_class.new("../foo") }.not_to raise_error
end
+
it "allows the target to be emptied" do
expect { described_class.new("../foo", empty: true) }.not_to raise_error
end
@@ -57,14 +58,19 @@ describe Mixlib::Archive do
end
it "runs the extractor" do
- expect(archiver).to receive(:extract).with(destination, { perms: true, ignore: [] })
+ expect(archiver).to receive(:extract).with(destination, { perms: true, ignore: [/^\.$/, /\.{2}/] })
archive.extract(destination)
end
it "passes options to the extractor" do
- expect(archiver).to receive(:extract).with(destination, { perms: false, ignore: [] })
+ expect(archiver).to receive(:extract).with(destination, { perms: false, ignore: [/^\.$/, /\.{2}/] })
archive.extract(destination, perms: false)
end
+
+ it "allows the user to ignore more patterns" do
+ expect(archiver).to receive(:extract).with(destination, { perms: false, ignore: [/^\.$/, /\.{2}/, /^$/] })
+ archive.extract(destination, perms: false, ignore: [/^$/])
+ end
end
describe "#create_and_empty" do
|
Ignore directory traversing file_names in an tar
It's possible to construct tar archives with entries that contain path
names that traverse outside the target directory. We will now just
ignore those entries
|
chef_mixlib-archive
|
train
|
18ce3337e67d83c01e683d0fbf6fcd98706b1808
|
diff --git a/PySimpleGUIWeb/PySimpleGUIWeb.py b/PySimpleGUIWeb/PySimpleGUIWeb.py
index <HASH>..<HASH> 100644
--- a/PySimpleGUIWeb/PySimpleGUIWeb.py
+++ b/PySimpleGUIWeb/PySimpleGUIWeb.py
@@ -1443,39 +1443,39 @@ class Image(Element):
-class SuperImage(remi.gui.Image):
- def __init__(self, file_path_name=None, **kwargs):
- image = file_path_name
- super(SuperImage, self).__init__(image, **kwargs)
-
- self.imagedata = None
- self.mimetype = None
- self.encoding = None
- if image is None:
- return
- self.load(image)
-
- def load(self, file_path_name):
- if type(file_path_name) is bytes or len(file_path_name) > 200:
- try:
- self.imagedata = base64.b64decode(file_path_name, validate=True)
- except binascii.Error:
- self.imagedata = file_path_name
- else:
- self.mimetype, self.encoding = mimetypes.guess_type(file_path_name)
- with open(file_path_name, 'rb') as f:
- self.imagedata = f.read()
- self.refresh()
-
- def refresh(self):
- i = int(time.time() * 1e6)
- self.attributes['src'] = "/%s/get_image_data?update_index=%d" % (id(self), i)
-
- def get_image_data(self, update_index):
- headers = {'Content-type': self.mimetype if self.mimetype else 'application/octet-stream'}
- return [self.imagedata, headers]
+# class SuperImageOld(remi.gui.Image):
+# def __init__(self, file_path_name=None, **kwargs):
+# image = file_path_name
+# super(SuperImage, self).__init__(image, **kwargs)
+#
+# self.imagedata = None
+# self.mimetype = None
+# self.encoding = None
+# if image is None:
+# return
+# self.load(image)
+#
+# def load(self, file_path_name):
+# if type(file_path_name) is bytes or len(file_path_name) > 200:
+# try:
+# self.imagedata = base64.b64decode(file_path_name, validate=True)
+# except binascii.Error:
+# self.imagedata = file_path_name
+# else:
+# self.mimetype, self.encoding = mimetypes.guess_type(file_path_name)
+# with open(file_path_name, 'rb') as f:
+# self.imagedata = f.read()
+# self.refresh()
+#
+# def refresh(self):
+# i = int(time.time() * 1e6)
+# self.attributes['src'] = "/%s/get_image_data?update_index=%d" % (id(self), i)
+#
+# def get_image_data(self, update_index):
+# headers = {'Content-type': self.mimetype if self.mimetype else 'application/octet-stream'}
+# return [self.imagedata, headers]
-class SuperImagenew(remi.gui.Image):
+class SuperImage(remi.gui.Image):
def __init__(self, file_path_name=None, **kwargs):
"""
This new app_instance variable is causing lots of problems. I do not know the value of the App
@@ -1496,7 +1496,9 @@ class SuperImagenew(remi.gui.Image):
def load(self, file_path_name):
if type(file_path_name) is bytes or len(file_path_name) > 200:
- self.imagedata = base64.b64decode(file_path_name)
+ # print("image data")
+ self.mimetype = 'image/png'
+ self.imagedata = file_path_name #base64.b64decode(file_path_name)
else:
self.mimetype, self.encoding = mimetypes.guess_type(file_path_name)
with open(file_path_name, 'rb') as f:
@@ -1504,6 +1506,7 @@ class SuperImagenew(remi.gui.Image):
self.refresh()
def refresh(self):
+ # print("refresh")
i = int(time.time() * 1e6)
# self.app_instance.execute_javascript("""
if Window.App is not None:
@@ -1521,11 +1524,10 @@ class SuperImagenew(remi.gui.Image):
""" % {'id': id(self), 'frame_index':i})
def get_image_data(self, update_index):
+ # print("get image data")
headers = {'Content-type': self.mimetype if self.mimetype else 'application/octet-stream'}
return [self.imagedata, headers]
-
-
# ---------------------------------------------------------------------- #
# Graph #
# ---------------------------------------------------------------------- #
|
Fix for flickering image in OpenCV demos!!!!
|
PySimpleGUI_PySimpleGUI
|
train
|
b3649d2d8c66b4d7f13633b5c55cd653876dbf94
|
diff --git a/lib/mongodb/collection.js b/lib/mongodb/collection.js
index <HASH>..<HASH> 100644
--- a/lib/mongodb/collection.js
+++ b/lib/mongodb/collection.js
@@ -1210,7 +1210,7 @@ Collection.prototype.mapReduce = function mapReduce (map, reduce, options, callb
var readPreference = _getReadConcern(this, options);
// If we have a read preference and inline is not set as output fail hard
- if((readPreference != false && readPreference != 'primary') && options['out'] != 'inline') {
+ if((readPreference != false && readPreference != 'primary') && options['out'].inline != 1) {
throw new Error("a readPreference can only be provided when performing an inline mapReduce");
}
diff --git a/test/tests/functional/read_preferences_tests.js b/test/tests/functional/read_preferences_tests.js
index <HASH>..<HASH> 100644
--- a/test/tests/functional/read_preferences_tests.js
+++ b/test/tests/functional/read_preferences_tests.js
@@ -154,7 +154,7 @@ exports['Should correctly apply collection level read Preference to mapReduce']
var reduce = function(k,vals) { return 1; };
// Peform the map reduce
- collection.mapReduce(map, reduce, {out: 'inline'}, function(err, collection) {
+ collection.mapReduce(map, reduce, {out: {inline:1}}, function(err, collection) {
db.serverConfig.checkoutReader = checkout;
db.close();
|
Fixed use of inline in collection mapreduce to be object as per docs
|
mongodb_node-mongodb-native
|
train
|
061dcc6122cfa8a26fa6d5ae1bdb2fd15fd2cf25
|
diff --git a/scripts/syncDeps.js b/scripts/syncDeps.js
index <HASH>..<HASH> 100644
--- a/scripts/syncDeps.js
+++ b/scripts/syncDeps.js
@@ -9,6 +9,7 @@ const semver = require('semver')
const globby = require('globby')
const { execSync } = require('child_process')
const inquirer = require('inquirer')
+const readline = require('readline')
const externalVueScopedPackages = {
'@vue/test-utils': true,
@@ -47,16 +48,38 @@ const checkUpdate = (pkg, filePath, local, remote) => {
if (!isNewer) {
return false
}
- const isCompat = semver.intersects(`^${local}`, `^${remote}`)
- console.log(
- `${chalk.cyan(pkg)}: ${local} => ${remote} ` +
- (isCompat ? `` : chalk.red.bold(`maybe breaking `)) +
- chalk.gray(`(${path.relative(process.cwd(), filePath)})`)
- )
+ const maybeBreaking = !semver.intersects(`^${local}`, `^${remote}`)
+ console.log(genUpdateString(pkg, filePath, local, remote, maybeBreaking))
return true
}
}
+const checkUpdateAsync = async (pkg, filePath, local, remote) => {
+ if (remote !== local) {
+ const isNewer = semver.gt(remote, local)
+ if (!isNewer) {
+ return false
+ }
+ const maybeBreaking = !semver.intersects(`^${local}`, `^${remote}`)
+ if (!maybeBreaking) {
+ return true
+ }
+ const { shouldUpdate } = await inquirer.prompt([{
+ name: 'shouldUpdate',
+ type: 'confirm',
+ message: genUpdateString(pkg, filePath, local, remote, maybeBreaking) + `\n` +
+ `Update this dependency?`
+ }])
+ return shouldUpdate
+ }
+}
+
+function genUpdateString (pkg, filePath, local, remote, maybeBreaking) {
+ return `${chalk.cyan(pkg)}: ${local} => ${remote} ` +
+ (maybeBreaking ? chalk.red.bold(`maybe breaking `) : ``) +
+ chalk.gray(`(${path.relative(process.cwd(), filePath)})`)
+}
+
const writeCache = {}
const bufferWrite = (file, content) => {
writeCache[file] = content
@@ -74,15 +97,15 @@ async function syncDeps ({ local, version, skipPrompt }) {
if (!local) {
console.log('Syncing remote deps...')
const packages = await globby(['packages/@vue/*/package.json'])
- await Promise.all(packages.filter(filePath => {
+ const resolvedPackages = (await Promise.all(packages.filter(filePath => {
return filePath.match(/cli-service|cli-plugin|babel-preset|eslint-config/)
}).concat('package.json').map(async (filePath) => {
const pkg = require(path.resolve(__dirname, '../', filePath))
if (!pkg.dependencies) {
return
}
- let isUpdated = false
const deps = pkg.dependencies
+ const resolvedDeps = []
for (const dep in deps) {
if (dep.match(/^@vue/) && !externalVueScopedPackages[dep]) {
continue
@@ -92,9 +115,28 @@ async function syncDeps ({ local, version, skipPrompt }) {
continue
}
local = local.replace(/^\^/, '')
+ readline.clearLine(process.stdout)
+ readline.cursorTo(process.stdout, 0)
+ process.stdout.write(dep)
const remote = await getRemoteVersion(dep)
- if (remote && checkUpdate(dep, filePath, local, remote)) {
- deps[dep] = `^${remote}`
+ resolvedDeps.push({
+ dep,
+ local,
+ remote
+ })
+ }
+ return {
+ pkg,
+ filePath,
+ resolvedDeps
+ }
+ }))).filter(_ => _)
+
+ for (const { pkg, filePath, resolvedDeps } of resolvedPackages) {
+ let isUpdated = false
+ for (const { dep, local, remote } of resolvedDeps) {
+ if (remote && await checkUpdateAsync(dep, filePath, local, remote)) {
+ pkg.dependencies[dep] = `^${remote}`
updatedDeps.add(dep)
isUpdated = true
}
@@ -102,7 +144,7 @@ async function syncDeps ({ local, version, skipPrompt }) {
if (isUpdated) {
bufferWrite(filePath, JSON.stringify(pkg, null, 2) + '\n')
}
- }))
+ }
}
console.log('Syncing local deps...')
|
workflow: improve sync script
allow individual confirmation for each breaking update
|
vuejs_vue-cli
|
train
|
87741a186c41df78a007aa1733c169b7c79a590f
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -2,7 +2,18 @@ var sources = require("webpack-sources");
var fs = require("fs");
var path = require("path");
-exports.StyleUrlResolvePlugin = require('./resource-resolver-plugins/StyleUrlResolvePlugin');
+var projectDir = path.dirname(path.dirname(__dirname));
+var packageJsonPath = path.join(projectDir, "package.json");
+var packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
+
+var isAngular = Object.keys(packageJson.dependencies).filter(function (dependency) {
+ return /^@angular\b/.test(dependency);
+}).length > 0;
+
+
+if (isAngular) {
+ exports.StyleUrlResolvePlugin = require("./resource-resolver-plugins/StyleUrlResolvePlugin");
+}
//HACK: changes the JSONP chunk eval function to `global["nativescriptJsonp"]`
// applied to tns-java-classes.js only
@@ -10,7 +21,7 @@ exports.NativeScriptJsonpPlugin = function(options) {
};
exports.NativeScriptJsonpPlugin.prototype.apply = function (compiler) {
- compiler.plugin('compilation', function (compilation, params) {
+ compiler.plugin("compilation", function (compilation, params) {
compilation.plugin("optimize-chunk-assets", function (chunks, callback) {
chunks.forEach(function (chunk) {
chunk.files.forEach(function (file) {
@@ -38,7 +49,7 @@ exports.GenerateBundleStarterPlugin.prototype = {
var plugin = this;
plugin.webpackContext = compiler.options.context;
- compiler.plugin('emit', function (compilation, cb) {
+ compiler.plugin("emit", function (compilation, cb) {
console.log(" GenerateBundleStarterPlugin: " + plugin.webpackContext);
compilation.assets["package.json"] = plugin.generatePackageJson();
diff --git a/postinstall.js b/postinstall.js
index <HASH>..<HASH> 100644
--- a/postinstall.js
+++ b/postinstall.js
@@ -55,6 +55,7 @@ configureDevDependencies(packageJson, function (add) {
if (isAngular) {
add("@angular/compiler-cli", "2.3.1");
add("@ngtools/webpack", "1.2.1");
+ add("typescript", "~2.0.10");
} else {
add("awesome-typescript-loader", "~3.0.0-beta.9");
}
|
fix: add typescript@~<I> to ng projects (#<I>)
Required for the StyleUrlResolvePlugin
|
NativeScript_nativescript-dev-webpack
|
train
|
5289a270d5d09a729524f16891c9aed0681027d5
|
diff --git a/ignite-base/Tests/Components/FullButtonTest.js b/ignite-base/Tests/Components/FullButtonTest.js
index <HASH>..<HASH> 100644
--- a/ignite-base/Tests/Components/FullButtonTest.js
+++ b/ignite-base/Tests/Components/FullButtonTest.js
@@ -26,9 +26,3 @@ test('onPress', t => {
wrapper.simulate('press')
t.is(i, 1)
})
-
-test('renders children text when passed', t => {
- const wrapper = shallow(<FullButton onPress={() => {}}>Howdy</FullButton>)
- t.is(wrapper.children().length, 1) // has 1 child
- t.is(wrapper.children().first().name(), 'Text') // that child is Text
-})
|
Rm faulty test for FullButton
|
infinitered_ignite
|
train
|
da2c5c67033dacb95430587358b3c3746fe99fd6
|
diff --git a/pythonwhat/checks/check_wrappers.py b/pythonwhat/checks/check_wrappers.py
index <HASH>..<HASH> 100644
--- a/pythonwhat/checks/check_wrappers.py
+++ b/pythonwhat/checks/check_wrappers.py
@@ -4,6 +4,7 @@ from pythonwhat.checks import check_object, check_logic, check_funcs, has_funcs
from pythonwhat.checks.check_function import check_function
from pythonwhat.checks.check_has_context import has_context
+from inspect import signature, Parameter
from functools import partial, wraps
from jinja2 import Template
@@ -648,25 +649,6 @@ scts = dict()
# make has_equal_part wrappers
-# todo: check @wraps implementation
-
-# def partial_with_state(func, *partial_args, **partial_kwargs):
-# @wraps(func)
-# def state_partial(state, *args, **kwargs):
-# func(state, *partial_args, *args, **{**partial_kwargs, **kwargs})
-# return state_partial
-
-
-# def right_args_partial(func, *last_args, **partial_kwargs):
-# kwargs_partial = partial(func, **partial_kwargs)
-#
-# @wraps(func)
-# def full_partial(*first_args, **kwargs):
-# kwargs_partial(*first_args, *last_args, **kwargs)
-#
-# return full_partial
-
-
def partial_with_offset(offset=1):
def bound_partial_with_offset(func, *partial_args, **partial_kwargs):
kwargs_partial = partial(func, **partial_kwargs)
@@ -676,7 +658,35 @@ def partial_with_offset(offset=1):
full_args = args[:offset] + partial_args + args[offset:]
return kwargs_partial(*full_args, **kwargs)
+ # set correct signature of returned partial
+ # todo: pass arguments as keywords to partial, instead of this decorator?
+ # (where args are always the same)
+ func_sig = signature(full_partial)
+ parameter_names = tuple(func_sig.parameters)
+
+ partialed_positional_indices = []
+ for kwarg in partial_kwargs:
+ param = func_sig.parameters[kwarg]
+ if param.default is param.empty:
+ partialed_positional_indices.append(parameter_names.index(kwarg))
+
+ partial_params = list(func_sig.parameters.values())
+ for index in sorted(partialed_positional_indices, reverse=True):
+ # appending isn't needed for functionality, but more similar to partial
+ # and it shows that these arguments can still be updated as kwargs
+ partial_params.append(
+ partial_params[index].replace(
+ kind=Parameter.KEYWORD_ONLY,
+ default=partial_kwargs[partial_params[index].name],
+ )
+ )
+ del partial_params[index]
+ del partial_params[offset : offset + len(partial_args)]
+
+ full_partial.__signature__ = func_sig.replace(parameters=partial_params)
+
return full_partial
+
return bound_partial_with_offset
|
Fix signature of partial_with_offset functions
|
datacamp_pythonwhat
|
train
|
ef77af3852460477ed1127e6b2a041184a9f0ee9
|
diff --git a/backup/moodle2/backup_stepslib.php b/backup/moodle2/backup_stepslib.php
index <HASH>..<HASH> 100644
--- a/backup/moodle2/backup_stepslib.php
+++ b/backup/moodle2/backup_stepslib.php
@@ -1114,14 +1114,12 @@ class backup_users_structure_step extends backup_structure_step {
$user->set_source_sql('SELECT u.*, c.id AS contextid, m.wwwroot AS mnethosturl
FROM {user} u
JOIN {backup_ids_temp} bi ON bi.itemid = u.id
- JOIN {context} c ON c.instanceid = u.id
+ LEFT JOIN {context} c ON c.instanceid = u.id AND c.contextlevel = ' . CONTEXT_USER . '
LEFT JOIN {mnet_host} m ON m.id = u.mnethostid
WHERE bi.backupid = ?
- AND bi.itemname = ?
- AND c.contextlevel = ?', array(
+ AND bi.itemname = ?', array(
backup_helper::is_sqlparam($this->get_backupid()),
- backup_helper::is_sqlparam('userfinal'),
- backup_helper::is_sqlparam(CONTEXT_USER)));
+ backup_helper::is_sqlparam('userfinal')));
// All the rest on information is only added if we arent
// in an anonymized backup
@@ -1772,12 +1770,15 @@ class backup_annotate_all_user_files extends backup_execution_step {
'backupid' => $this->get_backupid(), 'itemname' => 'userfinal'));
foreach ($rs as $record) {
$userid = $record->itemid;
- $userctxid = get_context_instance(CONTEXT_USER, $userid)->id;
+ $userctx = get_context_instance(CONTEXT_USER, $userid);
+ if (!$userctx) {
+ continue; // User has not context, sure it's a deleted user, so cannot have files
+ }
// Proceed with every user filearea
foreach ($fileareas as $filearea) {
// We don't need to specify itemid ($userid - 5th param) as far as by
// context we can get all the associated files. See MDL-22092
- backup_structure_dbops::annotate_files($this->get_backupid(), $userctxid, 'user', $filearea, null);
+ backup_structure_dbops::annotate_files($this->get_backupid(), $userctx->id, 'user', $filearea, null);
}
}
$rs->close();
|
MDL-<I> backup - avoid missing deleted users on backup
|
moodle_moodle
|
train
|
7df0f2fb296c6e17f2fd5074ec1f1cd80b494474
|
diff --git a/publish/index.js b/publish/index.js
index <HASH>..<HASH> 100644
--- a/publish/index.js
+++ b/publish/index.js
@@ -1,3 +1,5 @@
+require('../lib/shims');
+
var Vue = require('vue');
var clone = require('clone');
var bulk = require('bulk-require');
|
add shims to publish as well
|
mozilla_webmaker-android
|
train
|
84debb3aab428116334fddc00491f651b07739fb
|
diff --git a/animation/__init__.py b/animation/__init__.py
index <HASH>..<HASH> 100644
--- a/animation/__init__.py
+++ b/animation/__init__.py
@@ -2,7 +2,7 @@
__author__ = 'Blake Printy'
__email__ = 'bprinty@gmail.com'
-__version__ = '0.0.5'
+__version__ = '0.0.6'
-from .decorators import wait, simple_wait, Wait
\ No newline at end of file
+from .decorators import wait, simple_wait, Wait
|
Incremented patch version for bug fix
|
bprinty_animation
|
train
|
96610794a88c0f9d0d737b1084999dace6f22400
|
diff --git a/src/remotestorage.js b/src/remotestorage.js
index <HASH>..<HASH> 100644
--- a/src/remotestorage.js
+++ b/src/remotestorage.js
@@ -102,6 +102,7 @@
return;
}
this._emit('connecting');
+ this.remote.configure(userAddress);
RemoteStorage.Discover(userAddress,function(href, storageApi, authURL){
if(!href){
this._emit('error', new RemoteStorage.DiscoveryError('failed to contact storage server'));
|
configure WireClient's "userAddress" property before discovery, so it's set correctly to be displayed in the widget (refs #<I>)
|
remotestorage_remotestorage.js
|
train
|
c5cc9033366b5889e4fdf9f7ea6ff8415c613351
|
diff --git a/packages/cozy-konnector-libs/src/libs/linkBankOperations.js b/packages/cozy-konnector-libs/src/libs/linkBankOperations.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-konnector-libs/src/libs/linkBankOperations.js
+++ b/packages/cozy-konnector-libs/src/libs/linkBankOperations.js
@@ -16,7 +16,7 @@ const groupBy = require('lodash/groupBy')
const flatten = require('lodash/flatten')
const sumBy = require('lodash/sumBy')
const geco = require('geco')
-const format = require('date-fns/format')
+const { format } = require('date-fns')
const cozyClient = require('./cozyclient')
const DOCTYPE_OPERATIONS = 'io.cozy.bank.operations'
diff --git a/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/helpers.js b/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/helpers.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/helpers.js
+++ b/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/helpers.js
@@ -1,7 +1,5 @@
const sortBy = require('lodash/sortBy')
-const addDays = require('date-fns/addDays')
-const subDays = require('date-fns/subDays')
-const differenceInDays = require('date-fns/differenceInDays')
+const { addDays, subDays, differenceInDays } = require('date-fns')
const getOperationAmountFromBill = (bill, options) => {
const searchingCredit = options && options.credit
diff --git a/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/operationsFilters.js b/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/operationsFilters.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/operationsFilters.js
+++ b/packages/cozy-konnector-libs/src/libs/linker/billsToOperation/operationsFilters.js
@@ -1,7 +1,7 @@
const includes = require('lodash/includes')
const some = require('lodash/some')
const sumBy = require('lodash/sumBy')
-const isWithinRange = require('date-fns/isWithinInterval')
+const isWithinInterval = require('date-fns/isWithinInterval')
const {
getIdentifiers,
@@ -68,7 +68,7 @@ const filterByIdentifiers = identifiers => {
const filterByDates = ({ minDate, maxDate }) => {
const dateFilter = operation => {
- return isWithinRange(new Date(operation.date), {
+ return isWithinInterval(new Date(operation.date), {
start: new Date(minDate),
end: new Date(maxDate)
})
diff --git a/packages/cozy-konnector-libs/src/libs/utils.js b/packages/cozy-konnector-libs/src/libs/utils.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-konnector-libs/src/libs/utils.js
+++ b/packages/cozy-konnector-libs/src/libs/utils.js
@@ -8,7 +8,7 @@ const groupBy = require('lodash/groupBy')
const keyBy = require('lodash/keyBy')
const sortBy = require('lodash/sortBy')
const range = require('lodash/range')
-const format = require('date-fns/format')
+const { format } = require('date-fns')
/**
* This function allows to fetch all documents for a given doctype. It is the fastest to get all
|
fix: regressions after upgrade of date-fns to version 2
|
konnectors_libs
|
train
|
7b881f12e398fc6bcfdf39a0a3fafde8b48bac7e
|
diff --git a/packages/react-dev-utils/printHostingInstructions.js b/packages/react-dev-utils/printHostingInstructions.js
index <HASH>..<HASH> 100644
--- a/packages/react-dev-utils/printHostingInstructions.js
+++ b/packages/react-dev-utils/printHostingInstructions.js
@@ -26,11 +26,9 @@ function printHostingInstructions(
printBaseMessage(buildFolder, publicPathname);
printDeployInstructions(publicUrl, hasDeployScript, useYarn);
-
} else if (publicPath !== '/') {
// "homepage": "http://mywebsite.com/project"
printBaseMessage(buildFolder, publicPath);
-
} else {
// "homepage": "http://mywebsite.com"
// or no homepage
@@ -39,40 +37,36 @@ function printHostingInstructions(
printStaticServerInstructions(buildFolder, useYarn);
}
console.log();
+ console.log('Find out more about deployment here:');
+ console.log();
+ console.log(` ${chalk.yellow('http://bit.ly/2vY88Kr')}`);
+ console.log();
}
function printBaseMessage(buildFolder, hostingLocation) {
- console.log(
- `The project was built assuming it is hosted at ${chalk.green(
- hostingLocation || 'the server root'
- )}.`
- );
- console.log(
- `You can control this with the ${chalk.green(
- 'homepage'
- )} field in your ${chalk.cyan('package.json')}.`
- );
-
- if (!hostingLocation) {
- console.log('For example, add this to build it for GitHub Pages:');
- console.log();
-
- console.log(
- ` ${chalk.green('"homepage"')} ${chalk.cyan(':')} ${chalk.green(
- '"http://myname.github.io/myapp"'
- )}${chalk.cyan(',')}`
- );
- }
+ console.log(
+ `The project was built assuming it is hosted at ${chalk.green(
+ hostingLocation || 'the server root'
+ )}.`
+ );
+ console.log(
+ `You can control this with the ${chalk.green(
+ 'homepage'
+ )} field in your ${chalk.cyan('package.json')}.`
+ );
+
+ if (!hostingLocation) {
+ console.log('For example, add this to build it for GitHub Pages:');
console.log();
console.log(
- `The ${chalk.cyan(buildFolder)} folder is ready to be deployed.`
+ ` ${chalk.green('"homepage"')} ${chalk.cyan(':')} ${chalk.green(
+ '"http://myname.github.io/myapp"'
+ )}${chalk.cyan(',')}`
);
- console.log()
- console.log('Find out more about deployment here:');
- console.log();
- console.log(` ${chalk.yellow('http://bit.ly/2vY88Kr')}`);
- console.log();
+ }
+ console.log();
+ console.log(`The ${chalk.cyan(buildFolder)} folder is ready to be deployed.`);
}
function printDeployInstructions(publicUrl, hasDeployScript, useYarn) {
@@ -88,20 +82,24 @@ function printDeployInstructions(publicUrl, hasDeployScript, useYarn) {
}
console.log();
- console.log(`Add the following script in your ${chalk.cyan(
- 'package.json'
- )}.`);
+ console.log(
+ `Add the following script in your ${chalk.cyan('package.json')}.`
+ );
console.log();
console.log(` ${chalk.dim('// ...')}`);
console.log(` ${chalk.yellow('"scripts"')}: {`);
console.log(` ${chalk.dim('// ...')}`);
- console.log(` ${chalk.yellow('"predeploy"')}: ${chalk.yellow(
- '"npm run build",'
- )}`);
- console.log(` ${chalk.yellow('"deploy"')}: ${chalk.yellow(
- '"gh-pages -d build"'
- )}`);
+ console.log(
+ ` ${chalk.yellow('"predeploy"')}: ${chalk.yellow(
+ '"npm run build",'
+ )}`
+ );
+ console.log(
+ ` ${chalk.yellow('"deploy"')}: ${chalk.yellow(
+ '"gh-pages -d build"'
+ )}`
+ );
console.log(' }');
console.log();
|
move the link for deployment to the bottom (#<I>)
|
facebook_create-react-app
|
train
|
4b6fb7ce487281c6ae638bbf72fcea92a1df7e1c
|
diff --git a/src/main/java/com/couchbase/lite/replicator/ReplicationInternal.java b/src/main/java/com/couchbase/lite/replicator/ReplicationInternal.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/couchbase/lite/replicator/ReplicationInternal.java
+++ b/src/main/java/com/couchbase/lite/replicator/ReplicationInternal.java
@@ -1300,11 +1300,6 @@ abstract class ReplicationInternal implements BlockingQueueListener {
@Override
public void doIt(Transition<ReplicationState, ReplicationTrigger> transition) {
Log.v(Log.TAG_SYNC, "[onEntry()] " + transition.getSource() + " => " + transition.getDestination());
- saveLastSequence(); // move from databaseClosing() method as databaseClosing() is not called if Rem
- clearDbRef();
-
- // close any active resources associated with this replicator
- close();
// NOTE: Based on StateMachine configuration, this should not happen.
// However, from Unit Test result, this could be happen.
@@ -1314,6 +1309,16 @@ abstract class ReplicationInternal implements BlockingQueueListener {
return;
}
+ saveLastSequence(); // move from databaseClosing() method as databaseClosing() is not called
+
+ // stop network reachablity check
+ stopNetworkReachabilityManager();
+
+ // close any active resources associated with this replicator
+ close();
+
+ clearDbRef();
+
notifyChangeListenersStateTransition(transition);
}
});
|
When Replicator enters STOPPED state, network reachability should be stopped.
|
couchbase_couchbase-lite-java-core
|
train
|
7888b0a4c3e633ece847cbfdfda26ad2fa50a157
|
diff --git a/safe/engine/test_engine.py b/safe/engine/test_engine.py
index <HASH>..<HASH> 100644
--- a/safe/engine/test_engine.py
+++ b/safe/engine/test_engine.py
@@ -1879,7 +1879,7 @@ class Test_Engine(unittest.TestCase):
assert not is_inside_polygon(midpoint, test_polygon)
# Possibly generate files for visual inspection with e.g. QGis
- if True:
+ if False:
P = Vector(geometry=[test_polygon])
P.write_to_file('test_polygon.shp')
|
Disabled storing of files as per issue #<I>
|
inasafe_inasafe
|
train
|
612ba278ebadbbf786da3417f3b825cfd89f05ed
|
diff --git a/lib/licensee/project.rb b/lib/licensee/project.rb
index <HASH>..<HASH> 100644
--- a/lib/licensee/project.rb
+++ b/lib/licensee/project.rb
@@ -26,26 +26,34 @@ class Licensee
@revision = revision
end
- # Detects the license file, if any
- # Returns a Licensee::LicenseFile instance
+ # Returns an instance of Licensee::LicenseFile if there's a license file detected
def license_file
- return @license_file if defined? @license_file
+ @license_file ||= LicenseFile.new(@repository.lookup(license_blob[:oid])) if license_blob
+ end
- commit = @revision ? @repository.lookup(@revision) : @repository.last_commit
- tree = commit.tree.select { |blob| blob[:type] == :blob }
+ # Returns the matching Licensee::License instance if a license can be detected
+ def license
+ @license ||= license_file.match if license_file
+ end
- # Prefer an exact match to one of our known file names
- license_blob = tree.find { |blob| LICENSE_FILENAMES.include? blob[:name].downcase }
+ private
- # Fall back to the first file in the project root that has the word license in it
- license_blob = tree.find { |blob| blob[:name] =~ /license/i } unless license_blob
+ def commit
+ @revision ? @repository.lookup(@revision) : @repository.last_commit
+ end
- @license_file = LicenseFile.new(@repository.lookup(license_blob[:oid])) if license_blob
+ def tree
+ commit.tree.select { |blob| blob[:type] == :blob }
end
- # Returns the matching Licensee::License instance if a license can be detected
- def license
- @license ||= license_file.match if license_file
+ # Detects the license file, if any
+ # Returns the blob hash as detected in the tree
+ def license_blob
+ # Prefer an exact match to one of our known file names
+ license_blob = tree.find { |blob| LICENSE_FILENAMES.include? blob[:name].downcase }
+
+ # Fall back to the first file in the project root that has the word license in it
+ license_blob || tree.find { |blob| blob[:name] =~ /license/i }
end
end
end
diff --git a/test/test_licensee_project.rb b/test/test_licensee_project.rb
index <HASH>..<HASH> 100644
--- a/test/test_licensee_project.rb
+++ b/test/test_licensee_project.rb
@@ -14,6 +14,22 @@ class TestLicenseeProject < Minitest::Test
assert_equal "mit", @project.license.key
end
+ should "know the last commit" do
+ commit = @project.send(:commit)
+ assert_equal Rugged::Commit, commit.class
+ assert_equal "b02cbad9d254c41d16d56ed9d6d2cf07c1d837fd", commit.oid
+ end
+
+ should "retrieve the tree" do
+ tree = @project.send(:tree)
+ assert_equal 1, tree.count
+ assert_equal "bcb552d06d9cf1cd4c048a6d3bf716849c2216cc", tree.first[:oid]
+ end
+
+ should "return the license blob" do
+ assert_equal "LICENSE", @project.send(:license_blob)[:name]
+ end
+
should "detect an atypically cased license file" do
project = Licensee::Project.new fixture_path("case-sensitive.git")
assert_equal Licensee::LicenseFile, project.license_file.class
|
break things up a bit for easier testing
|
licensee_licensee
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.