hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
fad70427d56aece9ff597243dc79d1e210aeb27f
|
diff --git a/demo/app.js b/demo/app.js
index <HASH>..<HASH> 100644
--- a/demo/app.js
+++ b/demo/app.js
@@ -1,16 +1,14 @@
-const electron = require('electron');
+const electron = require("electron");
const app = electron.app;
-app.on('ready', function () {
+app.on("ready", function () {
const mainWindow = new electron.BrowserWindow({
webPreferences: {
- nodeIntegration: true,
- contextIsolation: false,
webviewTag: true
}
});
- mainWindow.loadURL('file://' + __dirname + '/electron-tabs.html');
- mainWindow.on('ready-to-show', function () {
+ mainWindow.loadURL("file://" + __dirname + "/electron-tabs.html");
+ mainWindow.on("ready-to-show", function () {
mainWindow.show();
mainWindow.focus();
});
|
Remove nodeIntegration requirement
|
brrd_electron-tabs
|
train
|
6ad9e2bb1d42d83b0cba266012db65e7f97b5220
|
diff --git a/expression/builtin_string_vec.go b/expression/builtin_string_vec.go
index <HASH>..<HASH> 100644
--- a/expression/builtin_string_vec.go
+++ b/expression/builtin_string_vec.go
@@ -298,3 +298,25 @@ func (b *builtinSpaceSig) vecEvalString(input *chunk.Chunk, result *chunk.Column
func (b *builtinSpaceSig) vectorized() bool {
return true
}
+
+// vecEvalString evals a REVERSE(str).
+// See https://dev.mysql.com/doc/refman/5.7/en/string-functions.html#function_reverse
+func (b *builtinReverseSig) vecEvalString(input *chunk.Chunk, result *chunk.Column) error {
+ if err := b.args[0].VecEvalString(b.ctx, input, result); err != nil {
+ return err
+ }
+
+ for i := 0; i < input.NumRows(); i++ {
+ if result.IsNull(i) {
+ continue
+ }
+ str := result.GetString(i)
+ reversed := reverseRunes([]rune(str))
+ result.SetRaw(i, []byte(string(reversed)))
+ }
+ return nil
+}
+
+func (b *builtinReverseSig) vectorized() bool {
+ return true
+}
diff --git a/expression/builtin_string_vec_test.go b/expression/builtin_string_vec_test.go
index <HASH>..<HASH> 100644
--- a/expression/builtin_string_vec_test.go
+++ b/expression/builtin_string_vec_test.go
@@ -45,6 +45,10 @@ var vecBuiltinStringCases = map[string][]vecExprBenchCase{
{retEvalType: types.ETString, childrenTypes: []types.EvalType{types.ETInt}, geners: []dataGenerator{&rangeInt64Gener{-10, 2000}}},
{retEvalType: types.ETString, childrenTypes: []types.EvalType{types.ETInt}, geners: []dataGenerator{&rangeInt64Gener{5, 10}}},
},
+ ast.Reverse: {
+ {retEvalType: types.ETString, childrenTypes: []types.EvalType{types.ETString}, geners: []dataGenerator{&randLenStrGener{10, 20}}},
+ {retEvalType: types.ETString, childrenTypes: []types.EvalType{types.ETString}, geners: []dataGenerator{&defaultGener{0.2, types.ETString}}},
+ },
}
func (s *testEvaluatorSuite) TestVectorizedBuiltinStringEvalOneVec(c *C) {
diff --git a/util/chunk/column.go b/util/chunk/column.go
index <HASH>..<HASH> 100644
--- a/util/chunk/column.go
+++ b/util/chunk/column.go
@@ -549,6 +549,14 @@ func (c *Column) GetRaw(rowID int) []byte {
return data
}
+// SetRaw sets the raw bytes for the rowIdx-th element.
+// NOTE: Two conditions must be satisfied before calling this function:
+// 1. The column should be stored with variable-length elements.
+// 2. The length of the new element should be exactly the same as the old one.
+func (c *Column) SetRaw(rowID int, bs []byte) {
+ copy(c.data[c.offsets[rowID]:c.offsets[rowID+1]], bs)
+}
+
// reconstruct reconstructs this Column by removing all filtered rows in it according to sel.
func (c *Column) reconstruct(sel []int) {
if sel == nil {
|
expression: implement vectorized evaluation for `builtinRevers… (#<I>)
|
pingcap_tidb
|
train
|
f8a2514f8b31ac91dd148a2bd30162df5efdb9b6
|
diff --git a/pyzotero/zotero.py b/pyzotero/zotero.py
index <HASH>..<HASH> 100644
--- a/pyzotero/zotero.py
+++ b/pyzotero/zotero.py
@@ -187,7 +187,8 @@ def retrieve(func):
# we need to dump as a zip!
self.snapshot = True
if fmt == 'bibtex':
- return bibtexparser.loads(retrieved.text, common_strings=True)
+ parser = bibtexparser.bparser.BibTexParser(common_strings=True)
+ return parser.parse(retrieved.text)
# it's binary, so return raw content
elif fmt != 'json':
return retrieved.content
|
Bibtexparser: Load common strings
|
urschrei_pyzotero
|
train
|
89a1a7e26d455a7db325328dd367f6b5e6464d3d
|
diff --git a/fitsio/test.py b/fitsio/test.py
index <HASH>..<HASH> 100644
--- a/fitsio/test.py
+++ b/fitsio/test.py
@@ -292,6 +292,28 @@ class TestReadWrite(unittest.TestCase):
self.vardata = data
+ #
+ # for bitcol columns
+ #
+ nvec = 2
+ ashape=(21,21)
+
+ dtype=[('b1vec','?',nvec),
+
+ ('b1arr','?',ashape)]
+
+ nrows=4
+ data=numpy.zeros(nrows, dtype=dtype)
+
+ for t in ['b1']:
+ data[t+'vec'] = (numpy.arange(nrows*nvec) % 2 == 0).astype('?').reshape(nrows,nvec)
+ arr = (numpy.arange(nrows*ashape[0]*ashape[1]) % 2 == 0).astype('?')
+ data[t+'arr'] = arr.reshape(nrows,ashape[0],ashape[1])
+
+ self.bdata = data
+
+
+
def testHeaderWriteRead(self):
"""
Test a basic header write and read
@@ -1668,6 +1690,49 @@ class TestReadWrite(unittest.TestCase):
traceback.print_exc()
self.assertTrue(False, 'Exception in testing read_raw')
+ def testTableBitcolReadWrite(self):
+ """
+ Test basic write/read with bitcols
+ """
+
+ fname=tempfile.mktemp(prefix='fitsio-TableWriteBitcol-',suffix='.fits')
+ try:
+ with fitsio.FITS(fname,'rw',clobber=True) as fits:
+ try:
+ fits.write_table(self.bdata, extname='mytable', write_bitcols=True)
+ write_success=True
+ except:
+ write_success=False
+
+ self.assertTrue(write_success,"testing write does not raise an error")
+ if not write_success:
+ self.skipTest("cannot test result if write failed")
+
+ d=fits[1].read()
+ self.compare_rec(self.bdata, d, "table read/write")
+
+ # now test read_column
+ with fitsio.FITS(fname) as fits:
+
+ for f in self.bdata.dtype.names:
+ d = fits[1].read_column(f)
+ self.compare_array(self.bdata[f], d, "table 1 single field read '%s'" % f)
+
+ # now list of columns
+ for cols in [['b1vec','b1arr']]:
+ d = fits[1].read(columns=cols)
+ for f in d.dtype.names:
+ self.compare_array(self.bdata[f][:], d[f], "test column list %s" % f)
+
+ rows = [1,3]
+ d = fits[1].read(columns=cols, rows=rows)
+ for f in d.dtype.names:
+ self.compare_array(self.bdata[f][rows], d[f], "test column list %s row subset" % f)
+
+ finally:
+ if os.path.exists(fname):
+ os.remove(fname)
+
def compare_names(self, read_names, true_names, lower=False, upper=False):
for nread,ntrue in zip(read_names,true_names):
if lower:
|
test write/read of bitcols
|
esheldon_fitsio
|
train
|
0c83be5f0991fc27a90b8b4d452bcea3312b41f1
|
diff --git a/manager_utils/manager_utils.py b/manager_utils/manager_utils.py
index <HASH>..<HASH> 100644
--- a/manager_utils/manager_utils.py
+++ b/manager_utils/manager_utils.py
@@ -32,7 +32,7 @@ class ManagerUtilsQuerySet(QuerySet):
Assumes that this model only has one element in the table and returns it. If the table has more
than one or no value, an exception is raised.
"""
- return self.get(id__gte=0)
+ return self.get()
def update(self, **kwargs):
"""
|
updated single function to just use get()
|
ambitioninc_django-manager-utils
|
train
|
67f76d10bf55e867004d125fbdc48475adb0229b
|
diff --git a/lib/window.js b/lib/window.js
index <HASH>..<HASH> 100644
--- a/lib/window.js
+++ b/lib/window.js
@@ -5,11 +5,19 @@ var NativeWindow = require('./bindings').Window,
_slice = Array.prototype.slice,
_apply = Function.prototype.apply,
_bind = Function.prototype.bind,
- handlers = new WeakMap;
+ screenHeight = App.prototype.screenHeight,
+ screenWidth = App.prototype.screenWidth,
+ frames = new WeakMap;
+
+
+function unwrap(o){
+ return frames.get(o);
+}
module.exports = Window;
+
function Window(win){
if (!(this instanceof Window))
return new Window(win);
@@ -17,8 +25,6 @@ function Window(win){
var self = this;
var handler = new WindowHandler(win, this);
var window = Proxy.create(handler, Window.prototype);
- handlers.set(window, handler);
-
this.frame = new Frame(win);
win.transition = function transition(newHandler, props){
@@ -36,7 +42,6 @@ function Window(win){
win.imbue = function imbue(){
var windowProto = window.__proto__;
- installMethods(windowProto);
windowProto._events = handler.target._events;
['on', 'off', 'emit', 'once'].forEach(function(key){
windowProto[key] = EventEmitter.prototype[key];
@@ -50,32 +55,14 @@ function Window(win){
return window;
}
-Object.keys(NativeWindow).forEach(function(key){
- if (typeof NativeWindow[key] === 'function') {
- Window[key] = NativeWindow[key];
+Window.prototype = Object.create(EventEmitter.prototype, {
+ constructor: {
+ configurable: true,
+ writable: true,
+ value: Window
}
});
-var installMethods = function(){
- var windowMethods = [
- 'openDevTools', 'closeDevTools', 'minimize', 'maximize',
- 'restore', 'drag', 'show', 'hide', 'move', 'resize'
- ].map(function(key){
- var method = NativeWindow.prototype[key];
- if (typeof method === 'function') {
- return new Function('h', 'm', 'return function '+key+'(){ m.apply(h.get(this).window, arguments); return this }')(handlers,method);
- }
- }).filter(Boolean);
-
- return function installMethods(o){
- windowMethods.forEach(function(method){
- o[method.name] = method;
- })
- }
-}();
-
-Window.prototype.__proto__ = EventEmitter.prototype;
-installMethods(Window.prototype);
function Reflector(target){
@@ -165,6 +152,14 @@ function RefType(properties, accessors){
};
}
+function WindowHandler(win, target){
+ this.window = win;
+ this.target = target;
+}
+
+WindowHandler.prototype = Object.create(Reflector.prototype);
+
+
RefType.prototype = {
keys: function keys(){
return this.names.concat(Object.keys(this.target));
@@ -244,23 +239,18 @@ RefType.prototype = {
}
};
-var screen = {
- height: App.prototype.screenHeight,
- width: App.prototype.screenWidth
-};
-
var FrameImpl = new RefType(['left', 'top', 'width', 'height', 'title', 'state'], {
get right(){
- return screen.width() - this.left - this.width;
+ return screenWidth() - this.left - this.width;
},
set right(v){
- this.width = Math.max(0, screen.width() - this.left - v);
+ this.width = Math.max(0, screenWidth() - this.left - v);
},
get bottom(){
- return screen.height() - this.top - this.height;
+ return screenHeight() - this.top - this.height;
},
set bottom(v){
- this.height = Math.max(0, screen.height() - this.top - v);
+ this.height = Math.max(0, screenHeight() - this.top - v);
}
});
@@ -270,51 +260,45 @@ function Frame(win){
return frame;
}
-var frames = new WeakMap;
-
Frame.prototype = FrameImpl.prototype = {
- constructor: Frame,
center: function center(){
- frames.get(this).move((screen.height() - this.height) / 2, (screen.width() - this.width) / 2);
+ unwrap(this).move((screenHeight() - this.height) / 2, (screenWidth() - this.width) / 2);
+ return this;
+ },
+ drag: function drag(){
+ unwrap(this).drag();
return this;
},
minimize: function minimize(){
- frames.get(this).minimize();
+ unwrap(this).minimize();
return this;
},
maximize: function maximize(){
- frames.get(this).maximize();
+ unwrap(this).maximize();
return this;
},
restore: function restore(){
- frames.get(this).restore();
+ unwrap(this).restore();
return this;
},
fullscreen: function fullscreen(){
- frames.get(this).fullscreen();
+ unwrap(this).fullscreen();
return this;
},
show: function show(){
- frames.get(this).show();
+ unwrap(this).show();
return this;
},
hide: function hide(){
- frames.get(this).hide();
+ unwrap(this).hide();
return this;
},
move: function move(top, left, width, height){
- frames.get(this).move(top, left, width, height);
+ unwrap(this).move(top, left, width, height);
return this;
},
resize: function resize(width, height){
- frames.get(this).resize(width, height)
+ unwrap(this).resize(width, height)
return this
}
};
-
-function WindowHandler(win, target){
- this.window = win;
- this.target = target;
-}
-
-WindowHandler.prototype = Object.create(Reflector.prototype);
|
cleans up window and frame, removes stuff from window that's now on frame
|
appjs_appjs
|
train
|
c9a7479ba57efcde79256641a673651ff1856531
|
diff --git a/phraseapp/lib.go b/phraseapp/lib.go
index <HASH>..<HASH> 100644
--- a/phraseapp/lib.go
+++ b/phraseapp/lib.go
@@ -2991,5 +2991,5 @@ func (client *Client) VersionsList(project_id, translation_id string, page, perP
}
func GetUserAgent() string {
- return "PhraseApp go (test)"
+ return "PhraseApp go (1.0.0.rc16)"
}
|
<I>.rc<I>
Partial updates when field is mandatory in create
|
phrase_phraseapp-go
|
train
|
db7262ae3ba3c345fb741136e4e079aaa250e19b
|
diff --git a/closure/goog/deps.js b/closure/goog/deps.js
index <HASH>..<HASH> 100644
--- a/closure/goog/deps.js
+++ b/closure/goog/deps.js
@@ -341,7 +341,7 @@ goog.addDependency('locale/timezonefingerprint.js', ['goog.locale.TimeZoneFinger
goog.addDependency('locale/timezonelist.js', ['goog.locale.TimeZoneList'], ['goog.locale']);
goog.addDependency('math/bezier.js', ['goog.math.Bezier'], ['goog.math', 'goog.math.Coordinate']);
goog.addDependency('math/box.js', ['goog.math.Box'], ['goog.math.Coordinate']);
-goog.addDependency('math/coordinate.js', ['goog.math.Coordinate'], []);
+goog.addDependency('math/coordinate.js', ['goog.math.Coordinate'], ['goog.math']);
goog.addDependency('math/coordinate3.js', ['goog.math.Coordinate3'], []);
goog.addDependency('math/exponentialbackoff.js', ['goog.math.ExponentialBackoff'], ['goog.asserts']);
goog.addDependency('math/integer.js', ['goog.math.Integer'], []);
diff --git a/closure/goog/math/coordinate.js b/closure/goog/math/coordinate.js
index <HASH>..<HASH> 100644
--- a/closure/goog/math/coordinate.js
+++ b/closure/goog/math/coordinate.js
@@ -19,6 +19,8 @@
goog.provide('goog.math.Coordinate');
+goog.require('goog.math');
+
/**
@@ -94,6 +96,27 @@ goog.math.Coordinate.distance = function(a, b) {
/**
+ * Returns the magnitude of a coordinate.
+ * @param {!goog.math.Coordinate} a A Coordinate.
+ * @return {number} The distance between the origin and {@code a}.
+ */
+goog.math.Coordinate.magnitude = function(a) {
+ return Math.sqrt(a.x * a.x + a.y * a.y);
+};
+
+
+/**
+ * Returns the angle from the origin to a coordinate.
+ * @param {!goog.math.Coordinate} a A Coordinate.
+ * @return {number} The angle, in degrees, clockwise from the positive X
+ * axis to {@code a}.
+ */
+goog.math.Coordinate.azimuth = function(a) {
+ return goog.math.angle(0, 0, a.x, a.y);
+};
+
+
+/**
* Returns the squared distance between two coordinates. Squared distances can
* be used for comparisons when the actual value is not required.
*
diff --git a/closure/goog/math/coordinate_test.html b/closure/goog/math/coordinate_test.html
index <HASH>..<HASH> 100644
--- a/closure/goog/math/coordinate_test.html
+++ b/closure/goog/math/coordinate_test.html
@@ -65,6 +65,16 @@ function testCoordinateDistance() {
assertEquals(5, goog.math.Coordinate.distance(a, b));
}
+function testCoordinateMagnitude() {
+ var a = new goog.math.Coordinate(5, 5);
+ assertEquals(Math.sqrt(50), goog.math.Coordinate.magnitude(a));
+}
+
+function testCoordinateAzimuth() {
+ var a = new goog.math.Coordinate(5, 5);
+ assertEquals(45, goog.math.Coordinate.azimuth(a));
+}
+
function testCoordinateClone() {
var c = new goog.math.Coordinate();
assertEquals(c.toString(), c.clone().toString());
|
Add magnitude and azimuth helper methods to goog.math.Coordinate.
R=nicksantos
DELTA=<I> (<I> added, 0 deleted, 2 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL>
|
google_closure-library
|
train
|
6ec2cbaa6cf236247fd1266226be0f60df6589a1
|
diff --git a/src/Symfony/Component/HttpFoundation/Request.php b/src/Symfony/Component/HttpFoundation/Request.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/HttpFoundation/Request.php
+++ b/src/Symfony/Component/HttpFoundation/Request.php
@@ -410,6 +410,10 @@ class Request
$dup->attributes->set('_format', $this->get('_format'));
}
+ if (!$dup->getRequestFormat(null)) {
+ $dup->setRequestFormat($format = $this->getRequestFormat(null));
+ }
+
return $dup;
}
diff --git a/src/Symfony/Component/HttpFoundation/Tests/RequestTest.php b/src/Symfony/Component/HttpFoundation/Tests/RequestTest.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/HttpFoundation/Tests/RequestTest.php
+++ b/src/Symfony/Component/HttpFoundation/Tests/RequestTest.php
@@ -284,6 +284,12 @@ class RequestTest extends \PHPUnit_Framework_TestCase
$this->assertEquals('json', $dup->getRequestFormat());
$this->assertEquals('json', $dup->attributes->get('_format'));
+
+ $request = new Request();
+ $request->setRequestFormat('xml');
+ $dup = $request->duplicate();
+
+ $this->assertEquals('xml', $dup->getRequestFormat());
}
/**
|
[HttpFoundation] fixed some unit tests
|
symfony_symfony
|
train
|
4c0a670908ac51876be7f5ecc83b6dcad662ce55
|
diff --git a/src/main/java/skadistats/clarity/processor/entities/Entities.java b/src/main/java/skadistats/clarity/processor/entities/Entities.java
index <HASH>..<HASH> 100644
--- a/src/main/java/skadistats/clarity/processor/entities/Entities.java
+++ b/src/main/java/skadistats/clarity/processor/entities/Entities.java
@@ -22,7 +22,7 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
-@Provides({ UsesEntities.class, OnEntityCreated.class, OnEntityUpdated.class, OnEntityDeleted.class })
+@Provides({ UsesEntities.class, OnEntityCreated.class, OnEntityUpdated.class, OnEntityDeleted.class, OnEntityEntered.class, OnEntityLeft.class })
@UsesDTClasses
public class Entities {
|
fix "no provider found" for @OnEntityEntered / Left
|
skadistats_clarity
|
train
|
fa4046787ee2162a585bd0db0344cbef7f0820ef
|
diff --git a/lib/Doctrine/ORM/Tools/ConvertDoctrine1Schema.php b/lib/Doctrine/ORM/Tools/ConvertDoctrine1Schema.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ORM/Tools/ConvertDoctrine1Schema.php
+++ b/lib/Doctrine/ORM/Tools/ConvertDoctrine1Schema.php
@@ -48,7 +48,8 @@ class ConvertDoctrine1Schema
{
private $_legacyTypeMap = array(
// TODO: This list may need to be updated
- 'clob' => 'text'
+ 'clob' => 'text',
+ 'timestamp' => 'datetime'
);
/**
|
[<I>][DDC-<I>] Fixing mapping of D1 timestamp to D2 datetime
|
doctrine_orm
|
train
|
eddaf1843fa41b673d2d8e8791181ef0d7a40fdb
|
diff --git a/test/exec.js b/test/exec.js
index <HASH>..<HASH> 100644
--- a/test/exec.js
+++ b/test/exec.js
@@ -17,9 +17,10 @@ if (ostype === "windows") {
}
virtualbox.start(vm, function(){
- virtualbox.exec({ 'vm': vm, 'user': user, 'passwd': pass, 'path': path, 'params': [args[1] || 'http://google.com'] }, function(error){
+ virtualbox.exec({ 'vm': vm, 'user': user, 'passwd': pass, 'path': path, 'params': [args[1] || 'http://google.com'] }, function(error, stdout){
if(error) {
throw error;
}
+ console.log(stdout);
});
});
|
printing stdout in exec test
|
Node-Virtualization_node-virtualbox
|
train
|
277ea4bf35cb5f11db8eaf2ae13831cbab2d3bd3
|
diff --git a/doc/changelog.rst b/doc/changelog.rst
index <HASH>..<HASH> 100644
--- a/doc/changelog.rst
+++ b/doc/changelog.rst
@@ -2,6 +2,9 @@
What's New
==========
+* Fixed ``AttributeError`` in proxy when using PhantomJS and ``pre_response`` scripting hook.
+
+
0.1006 (2015-02-01)
===================
diff --git a/wpull/ftp/request.py b/wpull/ftp/request.py
index <HASH>..<HASH> 100644
--- a/wpull/ftp/request.py
+++ b/wpull/ftp/request.py
@@ -173,6 +173,7 @@ class Response(DictableMixin, ProtocolResponseMixin):
return 'ftp'
def to_dict(self):
+ # TODO: check if body.to_dict() exists
return {
'protocol': 'ftp',
'request': self.request.to_dict(),
diff --git a/wpull/http/request.py b/wpull/http/request.py
index <HASH>..<HASH> 100644
--- a/wpull/http/request.py
+++ b/wpull/http/request.py
@@ -34,6 +34,7 @@ class RawRequest(SerializableMixin, DictableMixin):
self.encoding = 'latin-1'
def to_dict(self):
+ # TODO: check if body.to_dict() exists
return {
'protocol': 'http',
'method': self.method,
diff --git a/wpull/proxy.py b/wpull/proxy.py
index <HASH>..<HASH> 100644
--- a/wpull/proxy.py
+++ b/wpull/proxy.py
@@ -139,6 +139,11 @@ class Session(object):
response = yield From(session.fetch(request))
+ # XXX: scripting hook tries to call to_dict() on body.
+ # we set it to None so it doesn't error
+ if request.body:
+ request.body = None
+
if self._pre_response_callback:
self._pre_response_callback(request, response)
|
proxy: Set request.body to None as it is not a Body instance
Re: chfoo/wpull#<I>
|
ArchiveTeam_wpull
|
train
|
8a2f592bd3619b3767f994cbbf631b388577d042
|
diff --git a/utilities/build.py b/utilities/build.py
index <HASH>..<HASH> 100644
--- a/utilities/build.py
+++ b/utilities/build.py
@@ -10,6 +10,7 @@ from pymomo.utilities.typedargs.annotate import *
from pymomo.utilities.typedargs.exceptions import *
from collections import namedtuple
from copy import deepcopy
+import itertools
@takes_cmdline
def build(args):
@@ -175,15 +176,26 @@ class TargetSettings:
paths = MomoPaths()
base = paths.modules
+
+ incs = [y for x,y in self.settings.iteritems() if x.endswith('includes')]
+ includes = itertools.chain(*incs)
- includes = []
+ fullpaths = [os.path.normpath(os.path.join(base, x)) for x in includes]
+ return fullpaths
+
+ def extra_sources(self):
+ """
+ If the architectures have specified that extra source files be included, return a list of paths to
+ those source files.
+ """
- if "includes" in self.settings:
- includes += self.settings['includes']
- if "extra_includes" in self.settings:
- includes += self.settings['extra_includes']
+ paths = MomoPaths()
+ base = paths.modules
+
+ srcs = [y for x,y in self.settings.iteritems() if x.endswith('sources')]
+ sources = itertools.chain(*srcs)
- fullpaths = [os.path.normpath(os.path.join(base, x)) for x in includes]
+ fullpaths = [os.path.normpath(os.path.join(base, x)) for x in sources]
return fullpaths
|
Working pic<I> unit test construction
TODO:
- implement main function
- write runner for simulator
- write log file parser
Finished:
- Unit test type is now autoselected based on the type specified in
the file
|
iotile_coretools
|
train
|
55351c1abe6b9703c787a29b06fab0900eb70c55
|
diff --git a/emma2/msm/estimation/api.py b/emma2/msm/estimation/api.py
index <HASH>..<HASH> 100644
--- a/emma2/msm/estimation/api.py
+++ b/emma2/msm/estimation/api.py
@@ -113,11 +113,10 @@ def connected_count_matrix(C):
"""
return sparse.connectivity.connected_count_matrix(C)
-# TODO: Implement in Python directly
def is_connected(C):
"""Return true if C is a countmatrix for a completely connected process.
"""
- raise NotImplementedError('Not implemented.')
+ return sparse.connectivity.is_connected(C)
# TODO: Implement in Python directly
def mapping(set):
@@ -172,7 +171,6 @@ def transition_matrix(C, reversible=False, mu=None, **kwargs):
"""
if reversible:
- """
from emma2.util.stallone import stallone_available
print stallone_available
if stallone_available == False:
@@ -185,7 +183,6 @@ def transition_matrix(C, reversible=False, mu=None, **kwargs):
return stallone.ArrayWrapper(stallone.API.msm.estimateTrev(C))
except stallone.JavaError as je:
raise RuntimeError(je.getJavaException())
- """
else:
if issparse(C):
return sparse.transition_matrix.transition_matrix(C, reversible, mu)
diff --git a/emma2/msm/estimation/sparse/connectivity.py b/emma2/msm/estimation/sparse/connectivity.py
index <HASH>..<HASH> 100644
--- a/emma2/msm/estimation/sparse/connectivity.py
+++ b/emma2/msm/estimation/sparse/connectivity.py
@@ -108,4 +108,22 @@ def connected_count_matrix(C):
return C_cc.tocoo()
+def is_connected(C):
+ r"""Return true, if the input count matrix is completely connected.
+ Effectively checking if the number of connected components equals one.
+
+ Parameters
+ ----------
+ C : scipy.sparse matrix
+ Count matrix specifying edge weights.
+
+ Returns
+ -------
+ connected : boolean, returning true only if C is connected.
+
+
+ """
+ nc, indices=csgraph.connected_components(C, directed=True, connection='strong')
+
+ return nc == 1
|
implemented is_connected
no unit test yet
|
markovmodel_PyEMMA
|
train
|
9b8b20b50243a674f4b6de5d665a490b592e55ab
|
diff --git a/lib/telegram/models.rb b/lib/telegram/models.rb
index <HASH>..<HASH> 100644
--- a/lib/telegram/models.rb
+++ b/lib/telegram/models.rb
@@ -163,7 +163,7 @@ module Telegram
@chat = chat
@id = chat['id']
- @title = chat.has_key?('title') ? chat['title'] : chat['print_name']
+ @name = @title = chat.has_key?('title') ? chat['title'] : chat['print_name']
@type = chat['type']
@members = []
|
FIx: attribute `name` is not assigned
|
ssut_telegram-rb
|
train
|
b1be2b477853f5ff2edd02c2812f51af51249b75
|
diff --git a/.gitignore b/.gitignore
index <HASH>..<HASH> 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,3 +37,9 @@ node_modules
# IDE
.idea
+
+# IDE: WebStorm
+.settings
+
+# Tern configure file
+.tern-project
diff --git a/lib/utils/job-utils/command-parser.js b/lib/utils/job-utils/command-parser.js
index <HASH>..<HASH> 100644
--- a/lib/utils/job-utils/command-parser.js
+++ b/lib/utils/job-utils/command-parser.js
@@ -36,7 +36,7 @@ function commandParserFactory(Logger, Promise, _) {
testEsesQ = 'sudo test_eses -q std --xml',
amiBios = 'cd /opt/ami; sudo ./afulnx_64 /S',
flashupdt = 'sudo /opt/intel/flashupdt -i',
- smart = 'sudo /opt/scripts/get_smart.sh';
+ smart = 'sudo /opt/scripts/get_smart.sh';
var matchParsers = {};
matchParsers.ipmiUserList = {
@@ -228,11 +228,11 @@ function commandParserFactory(Logger, Promise, _) {
}
};
- CommandParser.prototype[smart] = function(data) {
- if (data.error) {
- return Promise.resolve({ source: 'smart', error: data.error });
- }
- try {
+ CommandParser.prototype[smart] = function(data) {
+ if (data.error) {
+ return Promise.resolve({ source: 'smart', error: data.error });
+ }
+ try {
var parsed = [];
var splitData = data.stdout.split(/^#{4,}\s*([^\n\r]+)[\r\n]+/m);
@@ -257,11 +257,11 @@ function commandParserFactory(Logger, Promise, _) {
}
});
- return Promise.resolve({ data: parsed, source: 'smart', store: true });
- } catch (e) {
- return Promise.resolve({ source: 'smart', error: e });
- }
- };
+ return Promise.resolve({ data: parsed, source: 'smart', store: true });
+ } catch (e) {
+ return Promise.resolve({ source: 'smart', error: e });
+ }
+ };
CommandParser.prototype[dmi] = function(data) {
if (data.error) {
@@ -1154,6 +1154,9 @@ function commandParserFactory(Logger, Promise, _) {
strBuf += lines[i].trim();
}
}
+ if (strBuf !== '') { // if this is ignored, the last capabilities entry will be missed.
+ combineLines.push(strBuf);
+ }
//After combination, each line will become something like this:
//name: (0x84) annotation1.annotation2.annotation3
@@ -1377,9 +1380,13 @@ function commandParserFactory(Logger, Promise, _) {
}
var entry = {};
- for (var j = 0; j < colHeaders.length; j+=1) {
+ var j;
+ for (j = 0; j < colHeaders.length - 1; j += 1) {
entry[colHeaders[j]] = arr[j]; //Fill each column data
}
+ //There maybe more cells than the header, we append these cells to the last column.
+ //For example, the 'RAW_VALUE' in the attributes table may be like '31 (Min/Max 31/31)'
+ entry[colHeaders[j]] = arr.slice(j, arr.length).join(' ');
resultObj.push(entry);
}
diff --git a/spec/lib/utils/job-utils/command-parser-spec.js b/spec/lib/utils/job-utils/command-parser-spec.js
index <HASH>..<HASH> 100644
--- a/spec/lib/utils/job-utils/command-parser-spec.js
+++ b/spec/lib/utils/job-utils/command-parser-spec.js
@@ -910,6 +910,12 @@ describe("Task Parser", function () {
expect(elem).property('WHEN_FAILED').to.equal('-');
expect(elem).property('RAW_VALUE').to.match(/^\d+/);
});
+ // Add test case for Jira MAG-91 issue
+ expect(attr['Attributes Table'][15]).property('RAW_VALUE').equal('40 (Min/Max 25/47)'); //jshint ignore:line
+ expect(attr['Attributes Table'][9]).property('RAW_VALUE').equal('1275616690298');
+ expect(attr['Attributes Table'][23]).property('ID#').equal('242');
+ expect(attr['Attributes Table'][21]).property('UPDATED').equal('Offline');
+ expect(attr['Attributes Table'][17]).property('ATTRIBUTE_NAME').equal('Reallocated_Event_Count');//jshint ignore:line
var cap = smart.Capabilities;
expect(cap).that.is.an('array');
@@ -920,6 +926,11 @@ describe("Task Parser", function () {
expect(elem).property('Annotation').is.an('array');
expect(elem).property('Annotation').have.length.least(1);
});
+ // Add test case for Jira MAG-91 issue
+ expect(cap[cap.length-1]).property('Name').to.equal('SCT capabilities');
+ expect(cap[cap.length-1]).property('Value').to.equal('0x003d');
+ expect(cap[cap.length-1]).property('Annotation').have.length(4);
+ expect(cap[cap.length-1]).property('Annotation').property(3).to.equal('SCT Data Table supported');//jshint ignore:line
var errlog = smart['Error Log'];
expect(errlog).contain.all.keys('Error Log Table', 'Revision');
|
Fix some smart parser issue that recorded in MAG-<I>
|
RackHD_on-tasks
|
train
|
2f1e14ac38402243718595eb7d104c1cbbe59020
|
diff --git a/media/boom/js/boom.page.js b/media/boom/js/boom.page.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.page.js
+++ b/media/boom/js/boom.page.js
@@ -54,7 +54,6 @@ $.extend($.boom, {
@returns {Promise} promise which notifies a page ID when a page is selected.
*/
picker : function( $element ){
- console.log( $element );
var self = this;
var complete = new $.Deferred();
@@ -225,11 +224,12 @@ $.extend($.boom, {
url: '/cms/page/add/' + self.config.id,
title: $(this).text(),
onLoad : function() {
+
self.picker( $( this ).find( '.boom-tree' ) )
.progress( function( page_id ){
- console.log( page_id );
$( 'input[name=parent_id]' ).val( page_id );
});
+
},
callback: function(){
diff --git a/media/boom/js/boom.plugins.js b/media/boom/js/boom.plugins.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.plugins.js
+++ b/media/boom/js/boom.plugins.js
@@ -690,19 +690,6 @@ boom.plugins.js
};
});
- this.elements.container =
- $('<div />')
- .addClass('boom-tree-container ui-widget')
- .width(this.options.width)
- .height(this.options.height);
-
- if (this.options.height != 'auto') {
- this.elements.container.css({ overflow: 'auto' });
- }
-
- if (this.options.border) this.elements.container.addClass('ui-state-active ui-corner-all');
-
- this.element.wrap(this.elements.container);
},
/**
@@ -719,6 +706,20 @@ boom.plugins.js
self._add_item( $( this ) );
});
+
+ var $container =
+ $('<div />')
+ .addClass('boom-tree-container ui-widget')
+ .width(this.options.width)
+ .height(this.options.height);
+
+ if (this.options.height != 'auto') {
+ $container.css({ overflow: 'auto' });
+ }
+
+ if (this.options.border) $container.addClass('ui-state-active ui-corner-all');
+
+ this.element.wrap($container);
},
/**
@@ -1075,6 +1076,8 @@ boom.plugins.js
destroy : function(){
$.Widget.prototype.destroy.apply(this, arguments);
+
+ console.log( this.element );
this.element
.find('li')
|
bugfix – tree.destroy() did not always add the tree-container element.
|
boomcms_boom-core
|
train
|
f094167befa724954887f1b41b4cb3fdee834913
|
diff --git a/admin/tool/behat/version.php b/admin/tool/behat/version.php
index <HASH>..<HASH> 100644
--- a/admin/tool/behat/version.php
+++ b/admin/tool/behat/version.php
@@ -24,6 +24,6 @@
defined('MOODLE_INTERNAL') || die();
-$plugin->version = 2014051200;
-$plugin->requires = 2014050800; // Requires Moodle 2.5.
-$plugin->component = 'tool_behat';
+$plugin->version = 2014051200; // The current plugin version (Date: YYYYMMDDXX)
+$plugin->requires = 2014050800; // Requires this Moodle version
+$plugin->component = 'tool_behat'; // Full name of the plugin (used for diagnostics)
diff --git a/course/format/singleactivity/version.php b/course/format/singleactivity/version.php
index <HASH>..<HASH> 100644
--- a/course/format/singleactivity/version.php
+++ b/course/format/singleactivity/version.php
@@ -24,6 +24,6 @@
defined('MOODLE_INTERNAL') || die();
-$plugin->version = 2014051200; // The current plugin version (Date: YYYYMMDDXX)
-$plugin->requires = 2014050800; // Requires this Moodle version (2.6)
-$plugin->component = 'format_singleactivity'; // Full name of the plugin (used for diagnostics).
+$plugin->version = 2014051200; // The current plugin version (Date: YYYYMMDDXX)
+$plugin->requires = 2014050800; // Requires this Moodle version
+$plugin->component = 'format_singleactivity'; // Full name of the plugin (used for diagnostics)
diff --git a/repository/areafiles/version.php b/repository/areafiles/version.php
index <HASH>..<HASH> 100644
--- a/repository/areafiles/version.php
+++ b/repository/areafiles/version.php
@@ -24,6 +24,6 @@
defined('MOODLE_INTERNAL') || die();
-$plugin->version = 2014051200; // The current plugin version (Date: YYYYMMDDXX)
-$plugin->requires = 2014050800; // Requires this Moodle version (Moodle 2.3 - 2.5)
+$plugin->version = 2014051200; // The current plugin version (Date: YYYYMMDDXX)
+$plugin->requires = 2014050800; // Requires this Moodle version
$plugin->component = 'repository_areafiles'; // Full name of the plugin (used for diagnostics)
|
MDL-<I> Bump all versions to planned release
|
moodle_moodle
|
train
|
a6eba587a4660e5730d46977b94f4c8c62239b6a
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -50,6 +50,8 @@ setup(
download_url = metainfo['download_url'],
classifiers = metainfo['classifiers'],
+ zip_safe=False,
+
# package installation
package_dir = {'':'src'},
packages = ["reports"],
diff --git a/test/test_htmltable.py b/test/test_htmltable.py
index <HASH>..<HASH> 100644
--- a/test/test_htmltable.py
+++ b/test/test_htmltable.py
@@ -1,13 +1,11 @@
from reports import HTMLTable
-
+import pandas as pd
def test_htmltable():
- import pandas as pd
df = pd.DataFrame({'A':[1,2,10], 'B':[1,10,2]})
- from reports import HTMLTable
table = HTMLTable(df)
diff --git a/test/test_report.py b/test/test_report.py
index <HASH>..<HASH> 100644
--- a/test/test_report.py
+++ b/test/test_report.py
@@ -1,4 +1,4 @@
-from reports import report
+from reports import Report
from easydev import TempFile
import os
@@ -13,7 +13,7 @@ class Test_report():
pass
def test(self):
- r = report.Report()
+ r = Report()
r.create_report(onweb=False)
# test setter
@@ -30,7 +30,7 @@ class Test_report():
with open(temp_filename, "w") as fin:
fin.write("{{ test }}")
try:
- r = report.Report(".", template_filename=temp_filename)
+ r = Report(".", template_filename=temp_filename)
r.jinja['test'] = 'youpi'
r.create_report(onweb=False)
with open("report/index.html", "r") as fin:
|
add zip_false in setup and trying to fix travis run
|
cokelaer_reports
|
train
|
a005ba12941b726b347b2b624618a3aca4345e3c
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+## 3.1.53 Feb 21, 2020
+
+Added `--showAmbiguous` to `compare-sequences.py`. Pass `--iupac-codes` to
+`bcftools consensus` in `make-consensus.py`. Add `--max-depth 5000` to
+`bcftools mpileup` call, also in `make-consensus.py`.
+
## 3.1.52 Feb 21, 2020
Make `sam-coverage-depth.py` not throw an error if there is no coverage at
diff --git a/bin/compare-sequences.py b/bin/compare-sequences.py
index <HASH>..<HASH> 100755
--- a/bin/compare-sequences.py
+++ b/bin/compare-sequences.py
@@ -167,6 +167,11 @@ parser.add_argument(
'--showDiffs', default=False, action='store_true',
help='Print (1-based) sites where the sequence nucleotides differ.')
+parser.add_argument(
+ '--showAmbiguous', default=False, action='store_true',
+ help=('Print (1-based) sites where either sequence has an ambiguous '
+ 'nucleotide code.'))
+
addFASTACommandLineOptions(parser)
args = parser.parse_args()
@@ -256,3 +261,17 @@ if args.showDiffs:
if not headerPrinted:
print('No sequence differences found.')
+
+if args.showAmbiguous:
+ width = int(log10(max(len1, len2))) + 1
+ headerPrinted = False
+ for site, (a, b) in enumerate(zip(read1.sequence, read2.sequence),
+ start=1):
+ if not (a in 'ACGT' and b in 'ACGT'):
+ if not headerPrinted:
+ print('Ambiguities (site, %s, %s):' % (read1.id, read2.id))
+ headerPrinted = True
+ print(' %*d %s %s' % (width, site, a, b))
+
+ if not headerPrinted:
+ print('No sequence ambiguities found.')
diff --git a/bin/make-consensus.py b/bin/make-consensus.py
index <HASH>..<HASH> 100755
--- a/bin/make-consensus.py
+++ b/bin/make-consensus.py
@@ -83,7 +83,7 @@ def main():
if args.bam:
# No VCF file provided, so make one.
vcfFile = join(tempdir, 'vcf.gz')
- e.execute("bcftools mpileup -Ou -f '%s' '%s' | "
+ e.execute("bcftools mpileup --max-depth 5000 -Ou -f '%s' '%s' | "
"bcftools call -mv -Oz -o '%s'" %
(args.reference, args.bam, vcfFile))
@@ -100,7 +100,8 @@ def main():
consensusFile = join(tempdir, 'consensus.fasta')
result = e.execute(
- "bcftools consensus --sample '%s' --fasta-ref '%s' '%s' > '%s'" %
+ "bcftools consensus --sample '%s' --iupac-codes --fasta-ref "
+ "'%s' '%s' > '%s'" %
(sample, args.reference, vcfFile, consensusFile))
consensus = list(FastaReads(consensusFile))[0]
diff --git a/bin/sam-coverage-depth.py b/bin/sam-coverage-depth.py
index <HASH>..<HASH> 100755
--- a/bin/sam-coverage-depth.py
+++ b/bin/sam-coverage-depth.py
@@ -80,7 +80,7 @@ with samfile(args.samfile) as sam:
if samFilter.referenceIds:
# No need to check if the given reference id is in referenceLengths
- # because the samFilter.referenceLengths call above catched that.
+ # because the samFilter.referenceLengths call above catches that.
referenceId = samFilter.referenceIds.pop()
else:
if len(referenceLengths) == 1:
diff --git a/dark/__init__.py b/dark/__init__.py
index <HASH>..<HASH> 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.1.52'
+__version__ = '3.1.53'
|
Added --showAmbiguous to compare-sequences.py. Pass --iupac-codes to bcftools consensus in make-consensus.py. Add --max-depth <I> to bcftools mpileup call, also in make-consensus.py.
|
acorg_dark-matter
|
train
|
787a285fc7378eacb50ea84ab05c6641a01db188
|
diff --git a/cmd/admin/admin.go b/cmd/admin/admin.go
index <HASH>..<HASH> 100644
--- a/cmd/admin/admin.go
+++ b/cmd/admin/admin.go
@@ -33,6 +33,8 @@ func (t *cmdAdmin) Main(d map[string]interface{}) {
t.handleConfigConvert(d)
case d["--config-restore"] != nil:
t.handleConfigRestore(d)
+ case d["--list-dashboard"].(bool):
+ t.handleListDashboard(d)
}
}
@@ -409,3 +411,35 @@ func (t *cmdAdmin) handleConfigRestore(d map[string]interface{}) {
}
}
}
+
+func (t *cmdAdmin) handleListDashboard(d map[string]interface{}) {
+ client := t.newTopomClient(d)
+ defer client.Close()
+
+ list, err := client.List("/codis3")
+ if err != nil {
+ log.PanicErrorf(err, "list products failed")
+ }
+ for _, path := range list {
+ var elem = &struct {
+ Name string `json:"name"`
+ Dashboard string `json:"dashboard"`
+ }{filepath.Base(path), ""}
+
+ if b, err := client.Read(filepath.Join(path, "topom")); err != nil {
+ log.PanicErrorf(err, "read topom of product %s failed", elem.Name)
+ } else if b != nil {
+ var t = &models.Topom{}
+ if err := json.Unmarshal(b, t); err != nil {
+ log.PanicErrorf(err, "decode json failed")
+ }
+ elem.Dashboard = t.AdminAddr
+ }
+
+ if b, err := json.MarshalIndent(elem, "", " "); err != nil {
+ log.PanicErrorf(err, "json encode failed")
+ } else {
+ fmt.Printf("%s\n", b)
+ }
+ }
+}
diff --git a/cmd/admin/main.go b/cmd/admin/main.go
index <HASH>..<HASH> 100644
--- a/cmd/admin/main.go
+++ b/cmd/admin/main.go
@@ -41,10 +41,11 @@ Usage:
codis-admin [-v] --dashboard=ADDR --slot-action --create-range --beg=ID --end=ID --gid=ID
codis-admin [-v] --dashboard=ADDR --slot-action --interval=VALUE
codis-admin [-v] --dashboard=ADDR --slot-action --disabled=VALUE
- codis-admin [-v] --remove-lock --product=NAME (--zookeeper=ADDR|--etcd=ADDR)
- codis-admin [-v] --config-dump --product=NAME (--zookeeper=ADDR|--etcd=ADDR) [-1]
- codis-admin [-v] --config-convert=FILE
- codis-admin [-v] --config-restore=FILE --product=NAME (--zookeeper=ADDR|--etcd=ADDR) [--confirm]
+ codis-admin [-v] --remove-lock --product=NAME (--zookeeper=ADDR|--etcd=ADDR)
+ codis-admin [-v] --config-dump --product=NAME (--zookeeper=ADDR|--etcd=ADDR) [-1]
+ codis-admin [-v] --config-convert=FILE
+ codis-admin [-v] --config-restore=FILE --product=NAME (--zookeeper=ADDR|--etcd=ADDR) [--confirm]
+ codis-admin [-v] --list-dashboard (--zookeeper=ADDR|--etcd=ADDR)
Options:
-a AUTH, --auth=AUTH
diff --git a/pkg/proxy/config.go b/pkg/proxy/config.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/config.go
+++ b/pkg/proxy/config.go
@@ -75,7 +75,7 @@ type Config struct {
func NewDefaultConfig() *Config {
c := &Config{}
if _, err := toml.Decode(DefaultConfig, c); err != nil {
- log.PanicErrorf(err, "decode config failed")
+ log.PanicErrorf(err, "decode toml failed")
}
return c
}
diff --git a/pkg/topom/config.go b/pkg/topom/config.go
index <HASH>..<HASH> 100644
--- a/pkg/topom/config.go
+++ b/pkg/topom/config.go
@@ -46,7 +46,7 @@ type Config struct {
func NewDefaultConfig() *Config {
c := &Config{}
if _, err := toml.Decode(DefaultConfig, c); err != nil {
- log.PanicErrorf(err, "decode config failed")
+ log.PanicErrorf(err, "decode toml failed")
}
return c
}
|
Add, codis-admin can find all dashboard instances
|
CodisLabs_codis
|
train
|
a67bab66242517e6802a57d8f7ceaeac0797f188
|
diff --git a/search/query/conjunction.go b/search/query/conjunction.go
index <HASH>..<HASH> 100644
--- a/search/query/conjunction.go
+++ b/search/query/conjunction.go
@@ -70,9 +70,14 @@ func (q *ConjunctionQuery) Searcher(i index.IndexReader, m mapping.IndexMapping,
}
ss = append(ss, sr)
}
+
if len(ss) < 1 {
return searcher.NewMatchNoneSearcher(i)
+ } else if len(ss) == 1 {
+ // return single nested searcher as is
+ return ss[0], nil
}
+
return searcher.NewConjunctionSearcher(i, ss, options)
}
diff --git a/search/query/disjunction.go b/search/query/disjunction.go
index <HASH>..<HASH> 100644
--- a/search/query/disjunction.go
+++ b/search/query/disjunction.go
@@ -76,9 +76,14 @@ func (q *DisjunctionQuery) Searcher(i index.IndexReader, m mapping.IndexMapping,
}
ss = append(ss, sr)
}
+
if len(ss) < 1 {
return searcher.NewMatchNoneSearcher(i)
+ } else if len(ss) == 1 {
+ // return the single nested searcher as is
+ return ss[0], nil
}
+
return searcher.NewDisjunctionSearcher(i, ss, q.Min, options)
}
|
MB-<I>: Minimal optimization for conjunction/disjunction searchers
If the number of nested searchers within a conjunction/disjunction
searcher is one, just return the nested searcher unwrapped as is, to
avoid an unnecessary level of indirection.
|
blevesearch_bleve
|
train
|
8c36dd624ef1c4a66cc19576d1318881c225b4da
|
diff --git a/structr-core/src/test/java/org/structr/test/schema/SchemaTest.java b/structr-core/src/test/java/org/structr/test/schema/SchemaTest.java
index <HASH>..<HASH> 100644
--- a/structr-core/src/test/java/org/structr/test/schema/SchemaTest.java
+++ b/structr-core/src/test/java/org/structr/test/schema/SchemaTest.java
@@ -983,6 +983,40 @@ public class SchemaTest extends StructrTest {
}
}
+ @Test
+ public void testRelatedTypeOnNotionProperty() {
+
+ cleanDatabaseAndSchema();
+
+ try (final Tx tx = app.tx()) {
+
+ final JsonSchema schema = StructrSchema.createFromDatabase(app);
+
+ final JsonObjectType project = schema.addType("Project");
+ final JsonObjectType task = schema.addType("Task");
+ final JsonReferenceType rel = project.relate(task, "TASK", Cardinality.OneToMany, "project", "tasks");
+ final JsonReferenceProperty ref = rel.getSourceProperty();
+
+ project.addStringProperty("blah").setUnique(true);
+
+ task.addReferenceProperty("projectBlah", ref).setProperties("blah", "true");
+
+ Settings.LogSchemaOutput.setValue(true);
+
+ StructrSchema.extendDatabaseSchema(app, schema);
+
+ tx.success();
+
+ } catch (Throwable t) {
+
+ t.printStackTrace();
+ fail("NotionProperty setup failed.");
+ }
+
+ Settings.LogSchemaOutput.setValue(true);
+ }
+
+
// ----- private methods -----
private void checkSchemaString(final String source) {
|
Adds schema test from master manually to avoid complex merge.
|
structr_structr
|
train
|
146f9c9f82601be83f3fef230dd1bde0b98abbc1
|
diff --git a/client/json-socket-connection.js b/client/json-socket-connection.js
index <HASH>..<HASH> 100644
--- a/client/json-socket-connection.js
+++ b/client/json-socket-connection.js
@@ -18,7 +18,7 @@ var JSONSocketConnection = W.Object.extend({
W.extend(this, W.eventMixin);
this.socketUrl = options.socketUrl;
this._connectionDesired = false;
- this.attemptReconnectionAfterMS = (typeof attemptReconnectionAfterMS !== 'undefined') ? options.attemptReconnectionAfterMS : 1000;
+ this.attemptReconnectionAfterMS = (typeof options.attemptReconnectionAfterMS !== 'undefined') ? options.attemptReconnectionAfterMS : 1000;
},
openSocketConnection : function () {
this._connectionDesired = true;
|
Fixed JSONSocketConnection attemptReconnectionAfterMS undefined condition
|
theworkers_W.js
|
train
|
f1df7dd9e0174b48682958367de884e49e2672b0
|
diff --git a/tests/tests.py b/tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -66,5 +66,23 @@ class TestUB(unittest.TestCase):
self.assertFalse(ub.is_rfc1918("172.15.10.10"))
self.assertFalse(ub.is_rfc1918("192.30.252.130"))
+ def test_is_reserved(self):
+ self.assertIsInstance(ub.is_reserved("10.10.10.10"), bool)
+ self.assertTrue(ub.is_reserved("0.0.1.1"))
+ self.assertTrue(ub.is_reserved("10.100.100.100"))
+ self.assertTrue(ub.is_reserved("100.90.200.200"))
+ self.assertTrue(ub.is_reserved("127.50.50.1"))
+ self.assertTrue(ub.is_reserved("192.0.0.50"))
+ self.assertTrue(ub.is_reserved("192.0.2.50"))
+ self.assertTrue(ub.is_reserved("192.88.99.50"))
+ self.assertTrue(ub.is_reserved("192.168.50.50"))
+ self.assertTrue(ub.is_reserved("198.18.50.50"))
+ self.assertTrue(ub.is_reserved("198.51.100.50"))
+ self.assertTrue(ub.is_reserved("203.0.113.50"))
+ self.assertTrue(ub.is_reserved("224.50.50.50"))
+ self.assertFalse(ub.is_reserved("3.0.0.0"))
+ self.assertFalse(ub.is_reserved("8.8.4.4"))
+ self.assertFalse(ub.is_reserved("192.30.252.131"))
+
if __name__ == '__main__':
unittest.main()
diff --git a/utilitybelt.py b/utilitybelt.py
index <HASH>..<HASH> 100644
--- a/utilitybelt.py
+++ b/utilitybelt.py
@@ -77,6 +77,37 @@ def is_rfc1918(ip):
else:
return False
+def is_reserved(ip):
+ if ip_between (ip, "0.0.0.0", "0.255.255.255"):
+ return True
+ elif ip_between (ip, "10.0.0.0", "10.255.255.255"):
+ return True
+ elif ip_between (ip, "100.64.0.0", "100.127.255.255"):
+ return True
+ elif ip_between (ip, "127.0.0.0", "127.255.255.255"):
+ return True
+ elif ip_between (ip, "169.254.0.0", "169.254.255.255"):
+ return True
+ elif ip_between (ip, "172.16.0.0", "172.31.255.255"):
+ return True
+ elif ip_between (ip, "192.0.0.0", "192.0.0.255"):
+ return True
+ elif ip_between (ip, "192.0.2.0", "192.0.2.255"):
+ return True
+ elif ip_between (ip, "192.88.99.0", "192.88.99.255"):
+ return True
+ elif ip_between (ip, "192.168.0.0", "192.168.255.255"):
+ return True
+ elif ip_between (ip, "198.18.0.0", "198.19.255.255"):
+ return True
+ elif ip_between (ip, "198.51.100.0", "198.51.100.255"):
+ return True
+ elif ip_between (ip, "203.0.113.0", "203.0.113.255"):
+ return True
+ elif ip_between (ip, "224.0.0.0", "255.255.255.255"):
+ return True
+ else:
+ return False
def is_IPv4Address(ipv4address):
"""Returns true for valid IPv4 Addresses, false for invalid."""
|
added is_reserved & tests
|
yolothreat_utilitybelt
|
train
|
d433aa8400249a5437f990a517d2d0f050086391
|
diff --git a/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go b/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go
index <HASH>..<HASH> 100644
--- a/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go
+++ b/azurerm/internal/services/cosmos/cosmosdb_cassandra_keyspace_resource.go
@@ -5,6 +5,8 @@ import (
"log"
"time"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/migration"
+
"github.com/Azure/azure-sdk-for-go/services/preview/cosmos-db/mgmt/2020-04-01-preview/documentdb"
"github.com/hashicorp/go-azure-helpers/response"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
@@ -12,7 +14,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/common"
- "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/migration"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/parse"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
@@ -32,11 +33,7 @@ func resourceCosmosDbCassandraKeyspace() *schema.Resource {
SchemaVersion: 1,
StateUpgraders: []schema.StateUpgrader{
- {
- Type: migration.ResourceCassandraKeyspaceUpgradeV0Schema().CoreConfigSchema().ImpliedType(),
- Upgrade: migration.ResourceCassandraKeyspaceStateUpgradeV0ToV1,
- Version: 0,
- },
+ migration.CassandraKeyspaceV0ToV1(),
},
Timeouts: &schema.ResourceTimeout{
diff --git a/azurerm/internal/services/cosmos/migration/cassandra_keyspace.go b/azurerm/internal/services/cosmos/migration/cassandra_keyspace.go
index <HASH>..<HASH> 100644
--- a/azurerm/internal/services/cosmos/migration/cassandra_keyspace.go
+++ b/azurerm/internal/services/cosmos/migration/cassandra_keyspace.go
@@ -5,40 +5,47 @@ import (
"strings"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
- "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
- "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/cosmos/validate"
)
-func ResourceCassandraKeyspaceUpgradeV0Schema() *schema.Resource {
+func CassandraKeyspaceV0ToV1() schema.StateUpgrader {
+ return schema.StateUpgrader{
+ Type: cassandraKeyspaceSchemaForV0().CoreConfigSchema().ImpliedType(),
+ Upgrade: cassandraKeyspaceUpgradeV0ToV1,
+ Version: 0,
+ }
+}
+
+func cassandraKeyspaceSchemaForV0() *schema.Resource {
return &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- ValidateFunc: validate.CosmosEntityName,
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
},
- "resource_group_name": azure.SchemaResourceGroupName(),
+ "resource_group_name": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ },
"account_name": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- ValidateFunc: validate.CosmosAccountName,
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
},
"throughput": {
- Type: schema.TypeInt,
- Optional: true,
- Computed: true,
- ValidateFunc: validate.CosmosThroughput,
+ Type: schema.TypeInt,
+ Optional: true,
+ Computed: true,
},
},
}
}
-func ResourceCassandraKeyspaceStateUpgradeV0ToV1(rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) {
+func cassandraKeyspaceUpgradeV0ToV1(rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) {
oldId := rawState["id"].(string)
newId := strings.Replace(rawState["id"].(string), "apis/cassandra/keyspaces", "cassandraKeyspaces", 1)
|
r/cassandra_keyspace: refactoring to match the other state upgraders
|
terraform-providers_terraform-provider-azurerm
|
train
|
aa3cf9c8061894f1a85f0bfee6a41926bae4a2e7
|
diff --git a/moto/ec2/models.py b/moto/ec2/models.py
index <HASH>..<HASH> 100644
--- a/moto/ec2/models.py
+++ b/moto/ec2/models.py
@@ -85,6 +85,7 @@ from .utils import (
simple_aws_filter_to_re,
is_valid_cidr,
filter_internet_gateways,
+ filter_reservations,
)
@@ -588,7 +589,7 @@ class InstanceBackend(object):
if instance.id == instance_id:
return instance
- def get_reservations_by_instance_ids(self, instance_ids):
+ def get_reservations_by_instance_ids(self, instance_ids, filters=None):
""" Go through all of the reservations and filter to only return those
associated with the given instance_ids.
"""
@@ -605,15 +606,20 @@ class InstanceBackend(object):
if len(found_instance_ids) != len(instance_ids):
invalid_id = list(set(instance_ids).difference(set(found_instance_ids)))[0]
raise InvalidInstanceIdError(invalid_id)
+ if filters is not None:
+ reservations = filter_reservations(reservations, filters)
return reservations
- def all_reservations(self, make_copy=False):
+ def all_reservations(self, make_copy=False, filters=None):
if make_copy:
# Return copies so that other functions can modify them with changing
# the originals
- return [copy.deepcopy(reservation) for reservation in self.reservations.values()]
+ reservations = [copy.deepcopy(reservation) for reservation in self.reservations.values()]
else:
- return [reservation for reservation in self.reservations.values()]
+ reservations = [reservation for reservation in self.reservations.values()]
+ if filters is not None:
+ reservations = filter_reservations(reservations, filters)
+ return reservations
class KeyPairBackend(object):
diff --git a/moto/ec2/responses/instances.py b/moto/ec2/responses/instances.py
index <HASH>..<HASH> 100644
--- a/moto/ec2/responses/instances.py
+++ b/moto/ec2/responses/instances.py
@@ -3,20 +3,18 @@ from jinja2 import Template
from moto.core.responses import BaseResponse
from moto.core.utils import camelcase_to_underscores
-from moto.ec2.utils import instance_ids_from_querystring, filters_from_querystring, filter_reservations, \
+from moto.ec2.utils import instance_ids_from_querystring, filters_from_querystring, \
dict_from_querystring, optional_from_querystring
class InstanceResponse(BaseResponse):
def describe_instances(self):
+ filter_dict = filters_from_querystring(self.querystring)
instance_ids = instance_ids_from_querystring(self.querystring)
if instance_ids:
- reservations = self.ec2_backend.get_reservations_by_instance_ids(instance_ids)
+ reservations = self.ec2_backend.get_reservations_by_instance_ids(instance_ids, filters=filter_dict)
else:
- reservations = self.ec2_backend.all_reservations(make_copy=True)
-
- filter_dict = filters_from_querystring(self.querystring)
- reservations = filter_reservations(reservations, filter_dict)
+ reservations = self.ec2_backend.all_reservations(make_copy=True, filters=filter_dict)
template = Template(EC2_DESCRIBE_INSTANCES)
return template.render(reservations=reservations)
|
Move filter logic from responses.InstanceResponse to models.InstanceBackend
|
spulec_moto
|
train
|
f4d3441977a76fdf38bbd2c3ded73627260ce1ef
|
diff --git a/public/js/editor.js b/public/js/editor.js
index <HASH>..<HASH> 100644
--- a/public/js/editor.js
+++ b/public/js/editor.js
@@ -301,7 +301,7 @@ apos.Editor = function(options) {
$table.prepend($(markup));
// last the bottom control row: an add button for a new row
- $table.append($('<tr data-control-row><th class="apos-table-button"><a href="#" title="Add Row" data-add-row>+</a></th></tr>'));
+ $table.append($('<tr data-control-row><th colspan="1000" class="apos-table-button"><a href="#" title="Add Row" data-add-row>+</a></th></tr>'));
};
self.countTableColumns = function($table) {
|
in table editor, make Add Row <th> tag have a huge colspan to fake complete table
|
apostrophecms_apostrophe
|
train
|
24a1a6d75f28e66fed76fa638adcae180c6472aa
|
diff --git a/src/util/globals.js b/src/util/globals.js
index <HASH>..<HASH> 100644
--- a/src/util/globals.js
+++ b/src/util/globals.js
@@ -1 +1,6 @@
-export default ['inherit', 'initial', 'unset'];
+/**
+ * CSS4 specification link;
+ * https://drafts.csswg.org/css-cascade/#defaulting-keywords
+ */
+
+export default ['inherit', 'initial', 'revert', 'unset'];
|
Add revert keyword to globals.
|
ben-eb_css-values
|
train
|
b656c722705e3952323d8b82477338dc99c513f6
|
diff --git a/java/server/src/org/openqa/selenium/remote/server/log/LoggingManager.java b/java/server/src/org/openqa/selenium/remote/server/log/LoggingManager.java
index <HASH>..<HASH> 100644
--- a/java/server/src/org/openqa/selenium/remote/server/log/LoggingManager.java
+++ b/java/server/src/org/openqa/selenium/remote/server/log/LoggingManager.java
@@ -55,7 +55,7 @@ public class LoggingManager {
resetLoggerToOriginalState();
overrideSimpleFormatterWithTerseOneForConsoleHandler(currentLogger, debugMode);
addInMemoryLogger(currentLogger, options);
- addPerSessionLogger(currentLogger, options, debugMode);
+ //addPerSessionLogger(currentLogger, options, debugMode);
if (debugMode) {
currentLogger.setLevel(Level.FINE);
}
|
Fixing memory leak on grid node. It is a temporary fix, LoggingManager
needs to be rethinked as the whole. Fixes issue <I>
|
SeleniumHQ_selenium
|
train
|
2d897ed2e2adf87c0fd7c4099b9bb7e4382f04fb
|
diff --git a/flexid.go b/flexid.go
index <HASH>..<HASH> 100644
--- a/flexid.go
+++ b/flexid.go
@@ -131,8 +131,13 @@ type Identifier interface {
ID() ExternalID
// AsFlexID allows for easy conversion of all Identifiers to the most forgiving struct
AsFlexID() *FlexID
+ // AsManifoldID allows for conversion of all Identifiers to a Manifold identifier if
+ // compatible, otherwise an error is returned and the ID is nil
+ AsManifoldID() (*ID, error)
// IsEmpty returns true if the ID is considered empty
IsEmpty() bool
+ // Equals checks the equality of this Identifier against another
+ Equals(Identifier) bool
}
// NewFlexID constructs a FlexID from the provided Domain, Class, and ID parts
@@ -295,6 +300,16 @@ func (id FlexID) IsEmpty() bool {
return false
}
+// Equals is implemented to allow for easy comparison of FlexIDs to IDs using the
+// Identifier interface
+func (id FlexID) Equals(oid Identifier) bool {
+ if oid == nil {
+ return false
+ }
+ fid := oid.AsFlexID()
+ return fid != nil && *fid == id
+}
+
// Ensure interface adherence
var (
_ runtime.Validatable = Domain("")
diff --git a/flexid_test.go b/flexid_test.go
index <HASH>..<HASH> 100644
--- a/flexid_test.go
+++ b/flexid_test.go
@@ -26,6 +26,7 @@ func init() {
if err != nil {
panic(err)
}
+ _, _ = validID.AsManifoldID()
// Call AsFlexID for coverage on both types :D
validFlexID = validID.AsFlexID().AsFlexID()
@@ -492,3 +493,109 @@ func TestFlexID_IsEmpty(t *testing.T) {
})
}
}
+
+func TestFlexID_Equals(t *testing.T) {
+ tests := []struct {
+ name string
+ id FlexID
+ oid Identifier
+ want bool
+ }{
+ {
+ name: "FlexID equals FlexID - ok",
+ id: FlexID{"bus.com", "driver", "Benny"},
+ oid: FlexID{"bus.com", "driver", "Benny"},
+ want: true,
+ },
+ {
+ name: "FlexID equals ID - ok",
+ id: *validFlexID,
+ oid: validID,
+ want: true,
+ },
+ {
+ name: "Empty FlexID equals Empty FlexID - ok",
+ id: FlexID{},
+ oid: FlexID{},
+ want: true,
+ },
+ {
+ name: "Empty FlexID equals Empty ID - not ok",
+ id: FlexID{},
+ oid: ID{},
+ want: false,
+ },
+ {
+ name: "FlexID equals Nil - not ok",
+ id: *validFlexID,
+ oid: nil,
+ want: false,
+ },
+ {
+ name: "FlexID equals different FlexID - not ok",
+ id: FlexID{"bus.com", "driver", "Benny"},
+ oid: FlexID{"bus.com", "driver", "Bob"},
+ want: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := tt.id.Equals(tt.oid); got != tt.want {
+ t.Errorf("FlexID.Equals() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestID_Equals(t *testing.T) {
+ tests := []struct {
+ name string
+ id ID
+ oid Identifier
+ want bool
+ }{
+ {
+ name: "ID equals ID - ok",
+ id: validID,
+ oid: validID,
+ want: true,
+ },
+ {
+ name: "ID equals FlexID - ok",
+ id: validID,
+ oid: *validFlexID,
+ want: true,
+ },
+ {
+ name: "Empty ID equals Empty ID - ok",
+ id: ID{},
+ oid: ID{},
+ want: true,
+ },
+ {
+ name: "Empty ID equals Empty FlexID - not ok",
+ id: ID{},
+ oid: FlexID{},
+ want: false,
+ },
+ {
+ name: "ID equals Nil - not ok",
+ id: validID,
+ oid: nil,
+ want: false,
+ },
+ {
+ name: "ID equals different ID - not ok",
+ id: validID,
+ oid: ID{},
+ want: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := tt.id.Equals(tt.oid); got != tt.want {
+ t.Errorf("FlexID.Equals() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/id.go b/id.go
index <HASH>..<HASH> 100644
--- a/id.go
+++ b/id.go
@@ -215,3 +215,19 @@ func (id ID) ID() ExternalID {
func (id ID) AsFlexID() *FlexID {
return FlexIDFromID(id)
}
+
+// AsManifoldID returns the ID as itself without error, implemented to adhere
+// to the Identifier interface
+func (id ID) AsManifoldID() (*ID, error) {
+ return &id, nil
+}
+
+// Equals is implemented to allow for easy comparison of FlexIDs to IDs using the
+// Identifier interface
+func (id ID) Equals(oid Identifier) bool {
+ if oid == nil {
+ return false
+ }
+ mid, _ := oid.AsManifoldID()
+ return mid != nil && *mid == id
+}
|
Expanded Identifier Interface (#<I>)
This expands the identifier interface to make it easier to work with FlexIDs and IDs side by side
The plan is to use this change to make refactoring the Principal Interface to run off of Identifiers
instead of the struct type ID, to allow us to use FlexIDs more interchangably within Principals
in marketplace.
Relates manifoldco/engineering#<I>
|
manifoldco_go-manifold
|
train
|
002868819ee1291b8fb102d92f599614c066e20a
|
diff --git a/src/main/java/com/couchbase/lite/Database.java b/src/main/java/com/couchbase/lite/Database.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/couchbase/lite/Database.java
+++ b/src/main/java/com/couchbase/lite/Database.java
@@ -2284,7 +2284,9 @@ public final class Database {
change.setDatabase(this);
if (options.getKeys() != null) {
docs.put(docId, change);
- } else {
+ }
+ // TODO: In the future, we need to implement CBLRowPassesFilter() in CBLView+Querying.m
+ else if (options.getPostFilter() == null || options.getPostFilter().apply(change)) {
rows.add(change);
}
}
diff --git a/src/main/java/com/couchbase/lite/Query.java b/src/main/java/com/couchbase/lite/Query.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/couchbase/lite/Query.java
+++ b/src/main/java/com/couchbase/lite/Query.java
@@ -489,6 +489,7 @@ public class Query {
queryOptions.setAllDocsMode(getAllDocsMode());
queryOptions.setStartKeyDocId(getStartKeyDocId());
queryOptions.setEndKeyDocId(getEndKeyDocId());
+ queryOptions.setPostFilter(getPostFilter());
return queryOptions;
}
|
Added the missing code for postFilter
1. Database.java - fix for querying all documents with filter.
2. Query.java - fix for applying filter to queryOptions.
|
couchbase_couchbase-lite-java-core
|
train
|
313c3c41df8ed430aad79ad4d1254ad8f36653c4
|
diff --git a/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/preparer/splitter/InventoryTaskSplitterTest.java b/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/preparer/splitter/InventoryTaskSplitterTest.java
index <HASH>..<HASH> 100644
--- a/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/preparer/splitter/InventoryTaskSplitterTest.java
+++ b/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/preparer/splitter/InventoryTaskSplitterTest.java
@@ -19,9 +19,7 @@ package org.apache.shardingsphere.scaling.core.job.preparer.splitter;
import org.apache.shardingsphere.scaling.core.common.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
-import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.TaskConfiguration;
-import org.apache.shardingsphere.scaling.core.config.datasource.StandardJDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.job.JobContext;
import org.apache.shardingsphere.scaling.core.job.position.PrimaryKeyPosition;
import org.apache.shardingsphere.scaling.core.job.task.inventory.InventoryTask;
@@ -34,9 +32,7 @@ import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertNotNull;
@@ -44,12 +40,6 @@ import static org.junit.Assert.assertThat;
public final class InventoryTaskSplitterTest {
- private static final String DATA_SOURCE_URL = "jdbc:h2:mem:test_db;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL";
-
- private static final String USERNAME = "root";
-
- private static final String PASSWORD = "password";
-
private JobContext jobContext;
private TaskConfiguration taskConfig;
@@ -60,16 +50,11 @@ public final class InventoryTaskSplitterTest {
@Before
public void setUp() {
- jobContext = mockJobContext();
+ initJobContext();
dataSourceManager = new DataSourceManager();
inventoryTaskSplitter = new InventoryTaskSplitter();
}
- @After
- public void tearDown() {
- dataSourceManager.close();
- }
-
@Test
public void assertSplitInventoryDataWithIntPrimary() throws SQLException {
taskConfig.getHandleConfig().setShardingSize(10);
@@ -105,6 +90,11 @@ public final class InventoryTaskSplitterTest {
assertThat(actual.size(), is(1));
}
+ @After
+ public void tearDown() {
+ dataSourceManager.close();
+ }
+
private void initIntPrimaryEnvironment(final DumperConfiguration dumperConfig) throws SQLException {
DataSource dataSource = dataSourceManager.getDataSource(dumperConfig.getDataSourceConfig());
try (Connection connection = dataSource.getConnection();
@@ -147,20 +137,8 @@ public final class InventoryTaskSplitterTest {
}
}
- private JobContext mockJobContext() {
- JobContext result = new JobContext(ResourceUtil.mockJobConfig());
- result.getJobConfig().getHandleConfig().setDatabaseType("H2");
- result.getJobConfig().getHandleConfig().setShardingSize(10);
- taskConfig = new TaskConfiguration(result.getJobConfig().getHandleConfig(), mockDumperConfig(), new ImporterConfiguration());
- return result;
- }
-
- private DumperConfiguration mockDumperConfig() {
- DumperConfiguration result = new DumperConfiguration();
- result.setDataSourceConfig(new StandardJDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD));
- Map<String, String> tableMap = new HashMap<>(1, 1);
- tableMap.put("t_order", "t_order");
- result.setTableNameMap(tableMap);
- return result;
+ private void initJobContext() {
+ jobContext = new JobContext(ResourceUtil.mockJobConfig());
+ taskConfig = jobContext.getTaskConfigs().iterator().next();
}
}
|
Refactor InventoryTaskSplitterTest (#<I>)
Co-authored-by: qiulu3 <Lucas<I>>
|
apache_incubator-shardingsphere
|
train
|
18e4c9d86c249689c2585f7acb8441f048213210
|
diff --git a/src/canmatrix/tests/test_canmatrix.py b/src/canmatrix/tests/test_canmatrix.py
index <HASH>..<HASH> 100644
--- a/src/canmatrix/tests/test_canmatrix.py
+++ b/src/canmatrix/tests/test_canmatrix.py
@@ -243,6 +243,9 @@ def test_signal_get_startbit():
def test_signal_get_startbit_conversion():
signal_big = canmatrix.canmatrix.Signal(start_bit=2, size=16, is_little_endian=False)
assert signal_big.get_startbit(start_little=True) == 17 # looking for "end" of the signal: 2 + (16 - 1)
+
+ signal_big = canmatrix.canmatrix.Signal(start_bit=32, size=4, is_little_endian=False)
+ assert signal_big.get_startbit(bit_numbering=True, start_little=True) == 36
# TODO add test for reversed endianning
diff --git a/src/canmatrix/tests/test_dbc.py b/src/canmatrix/tests/test_dbc.py
index <HASH>..<HASH> 100644
--- a/src/canmatrix/tests/test_dbc.py
+++ b/src/canmatrix/tests/test_dbc.py
@@ -430,6 +430,46 @@ def test_signal_inital_value():
canmatrix.formats.dump(matrix, outdbc, "dbc")
assert 'BA_ "GenSigStartValue" SG_ 17 sig1 2.7;' in outdbc.getvalue().decode('utf8')
+
+def test_candbpp_startbit():
+ dbc = io.BytesIO(textwrap.dedent(u'''\
+ BO_ 1809 MSG: 8 DEV2
+ SG_ SIG1 : 39|4@0+ (1,0) [0|1] "" DEV1
+ SG_ SIG2 : 52|1@0+ (1,0) [0|1] "" DEV1
+ SG_ SIG3 : 51|12@0+ (0.1,0) [0|360] "°" DEV1
+ SG_ SIG4 : 6|1@0+ (1,0) [0|1] "" DEV1
+ SG_ SIG5 : 5|1@0+ (1,0) [0|1] "" DEV1
+ SG_ SIG6 : 23|3@0+ (1,0) [0|1] "" DEV1
+ SG_ SIG7 : 7|1@0+ (1,0) [0|1] "" DEV1
+ SG_ SIG8 : 34|11@0+ (0.1,-102.4) [-32|32] "A" DEV1
+ SG_ SIG9 : 18|11@0+ (0.1,-102.4) [-62.5|62.5] "A" DEV1
+ SG_ SIG10 : 4|13@0+ (0.1,0) [350|450] "V" DEV1
+ ''').encode('utf-8'))
+ matrix = canmatrix.formats.dbc.load(dbc, dbcImportEncoding="utf8")
+ # Motorola forward LSB
+ assert matrix.frames[0].signal_by_name("SIG1").get_startbit(True, True) == 36
+ assert matrix.frames[0].signal_by_name("SIG2").get_startbit(True, True) == 52
+ assert matrix.frames[0].signal_by_name("SIG3").get_startbit(True, True) == 56
+ assert matrix.frames[0].signal_by_name("SIG4").get_startbit(True, True) == 6
+ assert matrix.frames[0].signal_by_name("SIG5").get_startbit(True, True) == 5
+ assert matrix.frames[0].signal_by_name("SIG6").get_startbit(True, True) == 21
+ assert matrix.frames[0].signal_by_name("SIG7").get_startbit(True, True) == 7
+ assert matrix.frames[0].signal_by_name("SIG8").get_startbit(True, True) == 40
+ assert matrix.frames[0].signal_by_name("SIG9").get_startbit(True, True) == 24
+ assert matrix.frames[0].signal_by_name("SIG10").get_startbit(True, True) == 8
+ # Motorola forward MSB
+ assert matrix.frames[0].signal_by_name("SIG1").get_startbit(True, False) == 39
+ assert matrix.frames[0].signal_by_name("SIG2").get_startbit(True, False) == 52
+ assert matrix.frames[0].signal_by_name("SIG3").get_startbit(True, False) == 51
+ assert matrix.frames[0].signal_by_name("SIG4").get_startbit(True, False) == 6
+ assert matrix.frames[0].signal_by_name("SIG5").get_startbit(True, False) == 5
+ assert matrix.frames[0].signal_by_name("SIG6").get_startbit(True, False) == 23
+ assert matrix.frames[0].signal_by_name("SIG7").get_startbit(True, False) == 7
+ assert matrix.frames[0].signal_by_name("SIG8").get_startbit(True, False) == 34
+ assert matrix.frames[0].signal_by_name("SIG9").get_startbit(True, False) == 18
+ assert matrix.frames[0].signal_by_name("SIG10").get_startbit(True, False) == 4
+
+
def test_missing_space():
dbc = io.BytesIO(textwrap.dedent(u'''\
BO_ 17 Frame_1: 8 Vector__XXX
@@ -438,3 +478,4 @@ def test_missing_space():
matrix = canmatrix.formats.dbc.load(dbc, dbcImportEncoding="utf8")
assert matrix.frames[0].signals[0].name == "sig1"
+
|
add tests for issue #<I> (#<I>)
add tests for Motorola forward MSB and Motorola forward LSB start_bit
|
ebroecker_canmatrix
|
train
|
061d79bda90f717c24cbd4edec1c4a3901249b99
|
diff --git a/gwpy/timeseries/io/gwf/framecpp.py b/gwpy/timeseries/io/gwf/framecpp.py
index <HASH>..<HASH> 100644
--- a/gwpy/timeseries/io/gwf/framecpp.py
+++ b/gwpy/timeseries/io/gwf/framecpp.py
@@ -498,7 +498,7 @@ def write(tsdict, outfile,
duration = end - start
ifos = {ts.channel.ifo for ts in tsdict.values() if
ts.channel and ts.channel.ifo and
- ts.channel.ifo in io_framecpp.DetectorLocation}
+ ts.channel.ifo in io_framecpp.DetectorLocation.__members__}
# create frame
frame = io_gwf.create_frame(
|
gwpy.timeseries: use __members__ when searchin enum
not doing this raises an error on python<I>
|
gwpy_gwpy
|
train
|
e4da0a11151f4e37d78ce0f33ad9bb93a0a497af
|
diff --git a/flask_jwt_extended/utils.py b/flask_jwt_extended/utils.py
index <HASH>..<HASH> 100644
--- a/flask_jwt_extended/utils.py
+++ b/flask_jwt_extended/utils.py
@@ -11,7 +11,7 @@ from flask_jwt_extended.config import config
from flask_jwt_extended.internal_utils import get_jwt_manager
# Proxy to access the current user
-current_user = LocalProxy(lambda: get_current_user())
+current_user: Any = LocalProxy(lambda: get_current_user())
def get_jwt() -> dict:
|
Fix mypy errors with current_user (fixes #<I>)
|
vimalloc_flask-jwt-extended
|
train
|
0fda5582e8e489518a9d1ef378d4a48d532341e8
|
diff --git a/lib/grape/dsl/inside_route.rb b/lib/grape/dsl/inside_route.rb
index <HASH>..<HASH> 100644
--- a/lib/grape/dsl/inside_route.rb
+++ b/lib/grape/dsl/inside_route.rb
@@ -37,6 +37,9 @@ module Grape
key.each_pair do |parent, children|
output_key = options[:stringify] ? parent.to_s : parent.to_sym
+
+ next unless options[:include_missing] || children || params[parent]
+
if params.key?(parent) || options[:include_missing]
hash[output_key] = if children
declared(params[parent] || {}, options, Array(children))
diff --git a/spec/grape/endpoint_spec.rb b/spec/grape/endpoint_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/grape/endpoint_spec.rb
+++ b/spec/grape/endpoint_spec.rb
@@ -328,6 +328,40 @@ describe Grape::Endpoint do
get '/declared?first=one&other=two'
expect(last_response.status).to eq(200)
end
+
+ it 'does not include missing attributes when there are nested hashes' do
+ subject.get '/dummy' do
+ end
+
+ subject.params do
+ requires :first
+ optional :second
+ optional :third, default: nil
+ optional :nested, type: Hash do
+ optional :fourth, default: nil
+ optional :fifth, default: nil
+ requires :nested_nested, type: Hash do
+ optional :sixth, default: 'sixth-default'
+ optional :seven, default: nil
+ end
+ end
+ end
+
+ inner_params = nil
+ subject.get '/declared' do
+ inner_params = declared(params, include_missing: false)
+ ""
+ end
+
+ get '/declared?first=present&nested[fourth]=&nested[nested_nested][sixth]=sixth'
+
+ expect(last_response.status).to eq(200)
+ expect(inner_params[:first]).to eq "present"
+ expect(inner_params[:nested].keys).to eq [:fourth, :nested_nested]
+ expect(inner_params[:nested][:fourth]).to eq ""
+ expect(inner_params[:nested][:nested_nested].keys).to eq [:sixth]
+ expect(inner_params[:nested][:nested_nested][:sixth]).to eq "sixth"
+ end
end
describe '#declared; call from child namespace' do
|
fix include_missing for nested hash
|
ruby-grape_grape
|
train
|
4695c7a3a0f50b40d4d646195e1d78a23aa5d8e5
|
diff --git a/src/main/java/com/couchbase/lite/replicator/PullerInternal.java b/src/main/java/com/couchbase/lite/replicator/PullerInternal.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/couchbase/lite/replicator/PullerInternal.java
+++ b/src/main/java/com/couchbase/lite/replicator/PullerInternal.java
@@ -990,6 +990,13 @@ public class PullerInternal extends ReplicationInternal implements ChangeTracker
beginReplicating();
}
+ /**
+ * Implementation of BlockingQueueListener.changed(EventType, Object, BlockingQueue) for Pull Replication
+ *
+ * Note: Pull replication needs to send IDLE after PUT /{db}/_local.
+ * However sending IDLE from Push replicator breaks few unit test cases.
+ * This is reason changed() method was override for pull replicatoin
+ */
@Override
public void changed(EventType type, Object o, BlockingQueue queue) {
// Log.d(Log.TAG_SYNC, "[changed()] " + type + " size="+queue.size());
|
Added comment for newly added method for this fix
|
couchbase_couchbase-lite-java-core
|
train
|
fd8814cc864e4cc6babaa90a20e9457219fb60c4
|
diff --git a/rkt/config/config.go b/rkt/config/config.go
index <HASH>..<HASH> 100644
--- a/rkt/config/config.go
+++ b/rkt/config/config.go
@@ -55,6 +55,15 @@ const (
)
func addParser(kind, version string, parser configParser) {
+ if len(kind) == 0 {
+ panic("empty kind string when registering a config parser")
+ }
+ if len(version) == 0 {
+ panic("empty version string when registering a config parser")
+ }
+ if parser == nil {
+ panic("trying to register a nil parser")
+ }
if _, err := getParser(kind, version); err == nil {
panic(fmt.Sprintf("A parser for kind %q and version %q already exist", kind, version))
}
@@ -65,6 +74,9 @@ func addParser(kind, version string, parser configParser) {
}
func registerSubDir(dir string, kinds []string) {
+ if len(dir) == 0 {
+ panic("trying to register empty config subdirectory")
+ }
if len(kinds) == 0 {
panic("kinds array cannot be empty when registering config subdir")
}
|
rkt: Add more checks in config registration functions
Triggering those checks means a programming error.
|
rkt_rkt
|
train
|
8d66183725259d4a946351de03e61b3d1cc18d68
|
diff --git a/go/vt/vttablet/tabletserver/query_engine_test.go b/go/vt/vttablet/tabletserver/query_engine_test.go
index <HASH>..<HASH> 100644
--- a/go/vt/vttablet/tabletserver/query_engine_test.go
+++ b/go/vt/vttablet/tabletserver/query_engine_test.go
@@ -165,6 +165,7 @@ func TestGetMessageStreamPlan(t *testing.T) {
func assertPlanCacheSize(t *testing.T, qe *QueryEngine, expected int) {
var size int
+ qe.plans.Wait()
qe.plans.ForEach(func(_ interface{}) bool {
size++
return true
|
query_engine: fix async race
|
vitessio_vitess
|
train
|
92095d4db103e532b44c4147a7901ab0491d3b62
|
diff --git a/asv/www/graphdisplay.js b/asv/www/graphdisplay.js
index <HASH>..<HASH> 100644
--- a/asv/www/graphdisplay.js
+++ b/asv/www/graphdisplay.js
@@ -888,9 +888,22 @@ $(document).ready(function() {
/* Find the minimum and maximum values */
var min = Infinity;
var max = -Infinity;
+ var left = options.xaxis.min || 0;
+ var right = options.xaxis.max || Infinity;
$.each(graphs, function(i, graph) {
var data = graph.data;
- for (var j = 0; j < data.length; ++j) {
+ var j;
+ for (j = 0; j < data.length; ++j) {
+ var x = data[j][0];
+ if (x >= left) {
+ break;
+ }
+ }
+ for (; j < data.length; ++j) {
+ var x = data[j][0];
+ if (x >= right) {
+ break;
+ }
var p = data[j][1];
if (p !== null && (!log_scale || p > 0)) {
if (p < min) {
@@ -910,6 +923,9 @@ $(document).ready(function() {
min /= reference;
max /= reference;
+ options.yaxis.min = min;
+ options.yaxis.max = max;
+
if (log_scale || reference_scale) {
min = Math.floor(Math.log(min) / Math.LN10);
max = Math.ceil(Math.log(max) / Math.LN10);
@@ -1113,8 +1129,8 @@ $(document).ready(function() {
}
};
- handle_y_scale(options);
handle_x_scale(options);
+ handle_y_scale(options);
var graph_div = $('#main-graph');
var overview_div = $('#overview');
@@ -1158,12 +1174,16 @@ $(document).ready(function() {
graph_div.unbind("plotselected");
graph_div.bind("plotselected", function (event, ranges) {
// do the zooming
- plot = $.plot(graph_div, graphs, $.extend(true, {}, options, {
+ var new_options = $.extend(true, {}, options, {
xaxis: {
min: ranges.xaxis.from,
max: ranges.xaxis.to
}
- }));
+ });
+
+ handle_y_scale(new_options);
+
+ plot = $.plot(graph_div, graphs, new_options);
// Update things that depend on the range
update_tags();
|
Auto zoom the y axis when zooming in
|
airspeed-velocity_asv
|
train
|
83d1ce9fb5a75c5703777b7ad265cbf54edf58b8
|
diff --git a/libnetwork/drivers/overlay/ov_network.go b/libnetwork/drivers/overlay/ov_network.go
index <HASH>..<HASH> 100644
--- a/libnetwork/drivers/overlay/ov_network.go
+++ b/libnetwork/drivers/overlay/ov_network.go
@@ -242,7 +242,7 @@ func (d *driver) DeleteNetwork(nid string) error {
for _, ep := range n.endpoints {
if ep.ifName != "" {
- if link, err := ns.NlHandle().LinkByName(ep.ifName); err != nil {
+ if link, err := ns.NlHandle().LinkByName(ep.ifName); err == nil {
ns.NlHandle().LinkDel(link)
}
}
|
fix for #<I>, calling LinkDel to delete link device when the err of LinkByName is NULL
|
moby_moby
|
train
|
68409c60799502e8e90253c3a06565fc3448409b
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -89,6 +89,15 @@ setup_args = {
'cmdclass': {'install_data': install_data_twisted},
}
+try:
+ # If setuptools is installed, then we'll add setuptools-specific arguments
+ # to the setup args.
+ import setuptools
+except ImportError:
+ pass
+else:
+ setup_args['install_requires'] = ['twisted >= 2.4.0']
+
setup(**setup_args)
# Local Variables:
|
setup: declare dependency on twisted >= <I>
|
buildbot_buildbot
|
train
|
3ed45491b42510cdd03f5571d3087f6df0501922
|
diff --git a/pharen.php b/pharen.php
index <HASH>..<HASH> 100755
--- a/pharen.php
+++ b/pharen.php
@@ -271,6 +271,10 @@ class Scope{
$this->lexical_bindings[$var_name] = $id;
}
+ public function get_indent(){
+ return $this->owner->parent instanceof RootNode && $this->owner instanceof BindingNode ? "" : $this->owner->indent."\t";
+ }
+
public function get_binding($var_name){
$value = $this->bindings[$var_name]->compile();
return $this->owner->indent."$var_name = $value;\n";
@@ -282,17 +286,15 @@ class Scope{
}
public function init_lexical_scope(){
- $indent = $this->owner->parent instanceof RootNode && $this->owner instanceof BindingNode ? "" : $this->owner->indent."\t";
- return $indent.'Lexical::$scopes['.$this->id.'] = array();'."\n";
+ return $this->get_indent().'Lexical::$scopes['.$this->id.'] = array();'."\n";
}
public function get_lexing($var_name){
if(!isset($this->lexically_needed[$var_name])){
return "";
}
- $indent = $this->owner->parent instanceof RootNode && $this->owner instanceof BindingNode ? "" : $this->owner->indent."\t";
$value = $this->bindings[$var_name]->compile();
- return $indent.'Lexical::$scopes['.$this->id.'][\''.$var_name.'\'] =& '.$var_name.";\n";
+ return $this->get_indent().'Lexical::$scopes['.$this->id.'][\''.$var_name.'\'] =& '.$var_name.";\n";
}
public function get_lexical_bindings(){
|
Refactor indentation code for Scope.
|
Scriptor_pharen
|
train
|
af044cbbdff56c36339f3a678fa0e35163b314c1
|
diff --git a/ait/core/dmc.py b/ait/core/dmc.py
index <HASH>..<HASH> 100644
--- a/ait/core/dmc.py
+++ b/ait/core/dmc.py
@@ -261,7 +261,7 @@ class UTCLeapSeconds(object):
def get_current_GPS_offset(self):
return self._data['leapseconds'][-1][-1]
- def get_GPS_offset_for_date(self, timestamp):
+ def get_GPS_offset_for_date(self, timestamp=None):
if timestamp is None:
timestamp = datetime.datetime.utcnow()
diff --git a/ait/core/test/test_dmc.py b/ait/core/test/test_dmc.py
index <HASH>..<HASH> 100644
--- a/ait/core/test/test_dmc.py
+++ b/ait/core/test/test_dmc.py
@@ -125,6 +125,10 @@ def test_leap_second_by_date():
assert ls.get_GPS_offset_for_date(datetime.datetime(2015, 7, 1)) == 17
assert ls.get_GPS_offset_for_date(datetime.datetime(2017, 1, 1)) == 18
+ # Make sure not supplying a date returns the offset for the current date
+ assert (ls.get_GPS_offset_for_date(datetime.datetime.utcnow()) ==
+ ls.get_GPS_offset_for_date())
+
def test_leap_second_data_load():
ait.config.leapseconds._config['filename'] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "leapseconds.dat"
|
Issue #<I> - Add default timestamp for get_GPS_offset_for_date
|
NASA-AMMOS_AIT-Core
|
train
|
bfd24ab18efbe3bf5b7db0840c57a86fa737b44f
|
diff --git a/lxd/db/node.go b/lxd/db/node.go
index <HASH>..<HASH> 100644
--- a/lxd/db/node.go
+++ b/lxd/db/node.go
@@ -61,7 +61,7 @@ type NodeInfo struct {
Schema int // Schema version of the LXD code running the node
APIExtensions int // Number of API extensions of the LXD code running on the node
Heartbeat time.Time // Timestamp of the last heartbeat
- Roles []string // List of cluster roles
+ Roles []ClusterRole // List of cluster roles
Architecture int // Node architecture
State int // Node state
Config map[string]string // Configuration for the node
@@ -146,7 +146,12 @@ func (n NodeInfo) ToAPI(cluster *Cluster, node *Node, leader string) (*api.Clust
result.URL = fmt.Sprintf("https://%s", n.Address)
result.Database = false
result.Config = n.Config
- result.Roles = n.Roles
+
+ result.Roles = make([]string, 0, len(n.Roles))
+ for _, r := range n.Roles {
+ result.Roles = append(result.Roles, string(r))
+ }
+
result.Groups = n.Groups
// Check if node is the leader node
@@ -437,7 +442,7 @@ func (c *ClusterTx) nodes(pending bool, where string, args ...interface{}) ([]No
// Get node roles
sql := "SELECT node_id, role FROM nodes_roles"
- nodeRoles := map[int64][]string{}
+ nodeRoles := map[int64][]ClusterRole{}
rows, err := c.tx.Query(sql)
if err != nil {
// Don't fail on a missing table, we need to handle updates
@@ -456,12 +461,12 @@ func (c *ClusterTx) nodes(pending bool, where string, args ...interface{}) ([]No
}
if nodeRoles[nodeID] == nil {
- nodeRoles[nodeID] = []string{}
+ nodeRoles[nodeID] = []ClusterRole{}
}
roleName := string(ClusterRoles[role])
- nodeRoles[nodeID] = append(nodeRoles[nodeID], roleName)
+ nodeRoles[nodeID] = append(nodeRoles[nodeID], ClusterRole(roleName))
}
err = rows.Err()
|
lxd/db/node: Changes Roles field type to []ClusterRole in NodeInfo struct
|
lxc_lxd
|
train
|
7f178d90bbdf799c1ac600a56c50ad5ecf2d1a8c
|
diff --git a/public/get/appFilesView.php b/public/get/appFilesView.php
index <HASH>..<HASH> 100644
--- a/public/get/appFilesView.php
+++ b/public/get/appFilesView.php
@@ -31,6 +31,8 @@ if (file_exists(PATH_HOME . "_config/viewOffline.json")) {
* Assets Offline
*/
if (file_exists(PATH_HOME . "_config/viewOfflineAssets.json")) {
- foreach (json_decode(file_get_contents(PATH_HOME . "_config/viewOfflineAssets.json"), !0) as $item)
+ foreach (json_decode(file_get_contents(PATH_HOME . "_config/viewOfflineAssets.json"), !0) as $item){
+ $item = preg_match("/^public/i", $item) ? str_replace("public/", VENDOR . DOMINIO . "/", $item) : $item;
$data['data']['misc'][] = HOME . $item;
+ }
}
|
assets Offline if public redirect to vendor domain
|
edineibauer_uebConfig
|
train
|
8673975999ef6549962cd135cc51b77afa5dc8ee
|
diff --git a/mod/book/tool/print/classes/output/print_book_page.php b/mod/book/tool/print/classes/output/print_book_page.php
index <HASH>..<HASH> 100644
--- a/mod/book/tool/print/classes/output/print_book_page.php
+++ b/mod/book/tool/print/classes/output/print_book_page.php
@@ -79,7 +79,8 @@ class print_book_page implements renderable, templatable {
$data->printdialoglink = $output->render_print_book_dialog_link();
$data->booktitle = $OUTPUT->heading(format_string($this->book->name, true,
array('context' => $context)), 1);
- $data->bookintro = format_text($this->book->intro, $this->book->introformat,
+ $introtext = file_rewrite_pluginfile_urls($this->book->intro, 'pluginfile.php', $context->id, 'mod_book', 'intro', null);
+ $data->bookintro = format_text($introtext, $this->book->introformat,
array('noclean' => true, 'context' => $context));
$data->sitelink = \html_writer::link(new moodle_url($CFG->wwwroot),
format_string($SITE->fullname, true, array('context' => $context)));
diff --git a/mod/book/tool/print/classes/output/renderer.php b/mod/book/tool/print/classes/output/renderer.php
index <HASH>..<HASH> 100644
--- a/mod/book/tool/print/classes/output/renderer.php
+++ b/mod/book/tool/print/classes/output/renderer.php
@@ -200,7 +200,9 @@ class renderer extends plugin_renderer_base {
}
}
- $bookchapter .= format_text($chapter->content, $chapter->contentformat, array('noclean' => true, 'context' => $context));
+ $chaptertext = file_rewrite_pluginfile_urls($chapter->content, 'pluginfile.php', $context->id,
+ 'mod_book', 'chapter', $chapter->id);
+ $bookchapter .= format_text($chaptertext, $chapter->contentformat, array('noclean' => true, 'context' => $context));
$bookchapter .= html_writer::end_div();
return array($bookchapter, $chaptervisible);
|
MDL-<I> mod_book: format plugin file urls when printing book
|
moodle_moodle
|
train
|
79606c78c7ab9c166265cf1adf43ef017f2bd0d6
|
diff --git a/src/ActiveRecord.php b/src/ActiveRecord.php
index <HASH>..<HASH> 100755
--- a/src/ActiveRecord.php
+++ b/src/ActiveRecord.php
@@ -140,7 +140,7 @@ abstract class ActiveRecord implements \JsonSerializable {
public function __set(string $name, $value) {
// check that’s not the initial object population
- if (!in_array(debug_backtrace()[1]['function'], ['populate']) and in_array($name, static::getBinds())) {
+ if (in_array($name, static::getBinds()) and isset(debug_backtrace()[1]) and !in_array(debug_backtrace()[1]['function'], ['populate'])) {
$previousValue = $this->$name;
}
@@ -216,8 +216,8 @@ abstract class ActiveRecord implements \JsonSerializable {
}
// keep track of updated properties
- if (!in_array(debug_backtrace()[1]['function'], ['populate']) and in_array($name, static::getBinds())
- and $previousValue != $this->$name and !in_array($name, $this->updatedProperties)) {
+ if (!in_array($name, $this->updatedProperties) and isset(debug_backtrace()[1]) and !in_array(debug_backtrace()[1]['function'], ['populate']) and in_array($name, static::getBinds())
+ and $previousValue != $this->$name) {
$this->updatedProperties[] = $name;
}
|
Update ActiveRecord.php
Fixed potential bug and improved performance
|
Viames_Pair
|
train
|
90e63c0c460ac118bc51c64881cc0354c5c3d76f
|
diff --git a/src/js/NavigationDrawers/NavigationDrawer.js b/src/js/NavigationDrawers/NavigationDrawer.js
index <HASH>..<HASH> 100644
--- a/src/js/NavigationDrawers/NavigationDrawer.js
+++ b/src/js/NavigationDrawers/NavigationDrawer.js
@@ -3,7 +3,7 @@ import PureRenderMixin from 'react-addons-pure-render-mixin';
import CSSTransitionGroup from 'react-addons-css-transition-group';
import classnames from 'classnames';
-import { isMobile, setOverflow } from '../utils';
+import { setOverflow } from '../utils';
import Divider from '../Dividers';
import { List, ListItem } from '../Lists';
import Subheader from '../Subheaders';
@@ -31,6 +31,7 @@ export default class NavigationDrawer extends Component {
static propTypes = {
isOpen: PropTypes.bool.isRequired,
+ isMobile: PropTypes.bool.isRequired,
title: PropTypes.string,
containerClassName: PropTypes.string,
className: PropTypes.string,
@@ -80,6 +81,7 @@ export default class NavigationDrawer extends Component {
drawerType: NavigationDrawer.DrawerType.FULL_HEIGHT,
menuIconChildren: 'menu',
closeIconChildren: 'keyboard_arrow_left',
+ isMobile: false,
};
componentWillUpdate(nextProps) {
@@ -130,6 +132,7 @@ export default class NavigationDrawer extends Component {
render() {
const {
isOpen,
+ isMobile,
title,
toolbarTitle,
containerClassName,
@@ -149,6 +152,7 @@ export default class NavigationDrawer extends Component {
navHeader,
navHeaderChildren,
} = this.props;
+
const { PERSISTENT, PERSISTENT_MINI, TEMPORARY, TEMPORARY_MINI } = NavigationDrawer.DrawerType;
const mini = drawerType === PERSISTENT_MINI || drawerType === TEMPORARY_MINI;
diff --git a/src/js/utils/index.js b/src/js/utils/index.js
index <HASH>..<HASH> 100644
--- a/src/js/utils/index.js
+++ b/src/js/utils/index.js
@@ -47,15 +47,6 @@ export function isPointInCircle(cx, cy, r, x, y) {
return distance <= Math.pow(r, 2);
}
-/**
- * Amzing media query to check if mobile..
- * @return true if device width is between 0 and 599px
- */
-export const isMobile = (() => {
- return window.matchMedia('only screen and (min-width: 0px) and (max-width: 599px)').matches;
-})();
-
-
export function easeInOut(currentTime, start, change, duration) {
currentTime /= duration / 2;
if(currentTime < 1) {
|
Updated navigation drawer to use a isMobile prop instead of calculating in utils. This is to allow server side rendering.
|
mlaursen_react-md
|
train
|
573e1b1fe42ef44deda1f2d20d3f30120069fd60
|
diff --git a/lib/roar/representer/transport/net_http.rb b/lib/roar/representer/transport/net_http.rb
index <HASH>..<HASH> 100644
--- a/lib/roar/representer/transport/net_http.rb
+++ b/lib/roar/representer/transport/net_http.rb
@@ -35,6 +35,7 @@ module Roar
http = Net::HTTP.new(uri.host, uri.port)
req = what.new(uri.request_uri)
req.content_type = as
+ req["accept"] = as # TODO: test me. # DISCUSS: if Accept is not set, rails treats this request as as "text/html".
req.body = body if body
http.request(req)
end
|
set the Accept: header to the same as Content-type: since Rails seems to get confused otherwise (in HttpVerbs).
|
trailblazer_roar
|
train
|
f17ec8a4e0ba9ec69baae2d50eaab5b9257868bc
|
diff --git a/src/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyFreeStyleConfigurator.java b/src/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyFreeStyleConfigurator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyFreeStyleConfigurator.java
+++ b/src/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyFreeStyleConfigurator.java
@@ -228,7 +228,7 @@ public class ArtifactoryIvyFreeStyleConfigurator extends BuildWrapper implements
final BuildContext context = new BuildContext(getDetails(), ArtifactoryIvyFreeStyleConfigurator.this,
isRunChecks(), isIncludePublishArtifacts(), getViolationRecipients(), getScopes(),
isLicenseAutoDiscovery(), isDiscardOldBuilds(), isDeployArtifacts(),
- getArtifactDeploymentPatterns(), isDeployBuildInfo(), isIncludeEnvVars(), isDiscardBuildArtifacts(),
+ getArtifactDeploymentPatterns(), !isDeployBuildInfo(), isIncludeEnvVars(), isDiscardBuildArtifacts(),
getMatrixParams());
File localDependencyFile = Which.jarFile(ArtifactoryBuildListener.class);
final FilePath actualDependencyDir =
|
HAP-<I>: Freestyle Ant/Ivy support
|
jenkinsci_artifactory-plugin
|
train
|
ff516009f382558466c159f42a6effcfab18bb67
|
diff --git a/src/Omnipay/Common/Message/AbstractRequest.php b/src/Omnipay/Common/Message/AbstractRequest.php
index <HASH>..<HASH> 100644
--- a/src/Omnipay/Common/Message/AbstractRequest.php
+++ b/src/Omnipay/Common/Message/AbstractRequest.php
@@ -38,6 +38,16 @@ abstract class AbstractRequest implements RequestInterface
protected $response;
/**
+ * @var bool
+ */
+ protected $zeroAmountAllowed = true;
+
+ /**
+ * @var bool
+ */
+ protected $negativeAmountAllowed = false;
+
+ /**
* Create a new Request
*
* @param ClientInterface $httpClient A Guzzle client to make API calls with
@@ -159,6 +169,30 @@ abstract class AbstractRequest implements RequestInterface
}
/**
+ * Convert an amount into a float.
+ *
+ * @var string|int|float $value The value to convert.
+ * @throws InvalidRequestException on any validation failure.
+ * @return float The amount converted to a float.
+ */
+
+ public function toFloat($value)
+ {
+ if ( ! is_string($value) && ! is_int($value) && ! is_float($value)) {
+ throw new InvalidRequestException('Data type is not a valid decimal number.');
+ }
+
+ if (is_string($value)) {
+ // Validate generic number, with optional sign and decimals.
+ if ( ! preg_match('/^[-]?[0-9]+(\.[0-9]*)?$/', $value)) {
+ throw new InvalidRequestException('String is not a valid decimal number.');
+ }
+ }
+
+ return (float)$value;
+ }
+
+ /**
* Validates and returns the formated amount.
*
* @throws InvalidRequestException on any validation failure.
@@ -168,41 +202,31 @@ abstract class AbstractRequest implements RequestInterface
public function getAmount()
{
$amount = $this->getParameter('amount');
- $message = 'Please specify amount as a string or float, '
- . 'with decimal places (e.g. \'10.00\' to represent $10.00).';
if ($amount !== null) {
// Don't allow integers for currencies that support decimals.
// This is for legacy reasons - upgrades from v0.9
- if (is_int($amount) && $this->getCurrencyDecimalPlaces() > 0) {
- throw new InvalidRequestException($message);
- }
-
- if (is_string($amount)) {
- // Negative amounts are valid numbers at this stage.
- if (preg_match('/[^0-9\.-]/', $amount)) {
- throw new InvalidRequestException('Invalid character in amount.');
- }
-
- // Generic number, with optional sign and decimals.
- if (!preg_match('/^[-]?[0-9]+(\.[0-9]*)?$/', $amount)) {
- throw new InvalidRequestException('Amount string is not a valid decimal number.');
- }
-
- // Don't allow integers for currencies that support decimals (legacy v0.9).
- if ($this->getCurrencyDecimalPlaces() > 0 && false === strpos((string) $amount, '.')) {
- throw new InvalidRequestException($message);
- }
+ if ($this->getCurrencyDecimalPlaces() > 0) {
+ if (is_int($amount) || (is_string($amount) && false === strpos((string) $amount, '.'))) {
+ throw new InvalidRequestException(
+ 'Please specify amount as a string or float, '
+ . 'with decimal places (e.g. \'10.00\' to represent $10.00).'
+ );
+ };
}
- // The number_format() used later requires a float.
- $amount = (float)$amount;
+ $amount = $this->toFloat($amount);
// Check for a negative amount.
- if ($amount < 0) {
+ if ( ! $this->negativeAmountAllowed && $amount < 0) {
throw new InvalidRequestException('A negative amount is not allowed.');
}
+ // Check for a zero amount.
+ if ( ! $this->zeroAmountAllowed && $amount === 0.0) {
+ throw new InvalidRequestException('A zero amount is not allowed.');
+ }
+
// Check for rounding that may occur if too many significant decimal digits are supplied.
$decimal_count = strlen(substr(strrchr((string)$amount, '.'), 1));
if ($decimal_count > $this->getCurrencyDecimalPlaces()) {
diff --git a/tests/Omnipay/Common/Message/AbstractRequestTest.php b/tests/Omnipay/Common/Message/AbstractRequestTest.php
index <HASH>..<HASH> 100644
--- a/tests/Omnipay/Common/Message/AbstractRequestTest.php
+++ b/tests/Omnipay/Common/Message/AbstractRequestTest.php
@@ -95,7 +95,7 @@ class AbstractRequestTest extends TestCase
public function testAmountZeroString()
{
- $this->assertSame($this->request, $this->request->setAmount('0.0'));
+ $this->assertSame($this->request, $this->request->setAmount('0.000000'));
$this->assertSame('0.00', $this->request->getAmount());
}
@@ -172,6 +172,15 @@ class AbstractRequestTest extends TestCase
/**
* @expectedException Omnipay\Common\Exception\InvalidRequestException
*/
+ public function testAmountInvalidTypeThrowsException()
+ {
+ $this->assertSame($this->request, $this->request->setAmount(true));
+ $this->request->getAmount();
+ }
+
+ /**
+ * @expectedException Omnipay\Common\Exception\InvalidRequestException
+ */
public function testAmountNegativeStringThrowsException()
{
$this->assertSame($this->request, $this->request->setAmount('-123.00'));
|
Refactor getAmount() - pull out the float conversion.
|
thephpleague_omnipay-common
|
train
|
0ac54df6949202a528ecd3888f1a9c1dfe3619c1
|
diff --git a/src/Rah/Backup.php b/src/Rah/Backup.php
index <HASH>..<HASH> 100644
--- a/src/Rah/Backup.php
+++ b/src/Rah/Backup.php
@@ -168,7 +168,11 @@ class Rah_Backup
href = obj.attr('href');
obj.addClass('disabled').attr('href', '#').after(spinner);
- sendAsyncEvent(href.substr(1), null, 'script').fail(function ()
+ $.ajax('index.php', {
+ data: href.substr(1) + '&app_mode=async',
+ dataType: 'script',
+ timeout: 1800000
+ }).fail(function ()
{
$.globalEval('{$msg['error']}');
}).always(function ()
|
Migrate to $.ajax to mitigate timeout issues.
|
gocom_rah_backup
|
train
|
2f4709d904d49a43ff2cdc0b4961d9aae562f61f
|
diff --git a/pkg/proxy/request.go b/pkg/proxy/request.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/request.go
+++ b/pkg/proxy/request.go
@@ -24,6 +24,12 @@ type Request struct {
Coalesce func() error
}
+func (r *Request) Release() {
+ r.Multi = nil
+ r.Resp = nil
+ r.Coalesce = nil
+}
+
type RequestAlloc struct {
alloc struct {
buf []Request
diff --git a/pkg/proxy/session.go b/pkg/proxy/session.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/session.go
+++ b/pkg/proxy/session.go
@@ -179,6 +179,8 @@ func (s *Session) loopWriter(tasks <-chan *Request) (err error) {
}
if err := p.Flush(len(tasks) == 0); err != nil {
return s.incrOpFails(err)
+ } else {
+ r.Release()
}
if len(tasks) == 0 {
s.flushOpStats()
|
proxy: call Request.Release to release resources
|
CodisLabs_codis
|
train
|
32244d7faae9aa88af13de4021611dd2b4dd7b13
|
diff --git a/stl/__about__.py b/stl/__about__.py
index <HASH>..<HASH> 100644
--- a/stl/__about__.py
+++ b/stl/__about__.py
@@ -1,6 +1,6 @@
__package_name__ = 'numpy-stl'
__import_name__ = 'stl'
-__version__ = '2.13.0'
+__version__ = '2.13.1'
__author__ = 'Rick van Hattem'
__author_email__ = 'Wolph@Wol.ph'
__description__ = ' '.join('''
|
Incrementing version to <I>
|
WoLpH_numpy-stl
|
train
|
e2a8a5ba62cc890097c5c7cb21182cab27c24a04
|
diff --git a/lib/bandcamp/track.rb b/lib/bandcamp/track.rb
index <HASH>..<HASH> 100644
--- a/lib/bandcamp/track.rb
+++ b/lib/bandcamp/track.rb
@@ -1,9 +1,13 @@
+require 'bandcamp/band'
+require 'bandcamp/request'
require 'bandcamp/methodical'
+require 'bandcamp/associated'
module Bandcamp
class Track
include Bandcamp::Methodical
+ include Bandcamp::Associated
def initialize attrs
to_methods attrs
@@ -17,5 +21,9 @@ module Bandcamp
downloadable == 1 ? true : false
end
+ def band
+ retrieve_associated :band
+ end
+
end
end
diff --git a/spec/track_spec.rb b/spec/track_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/track_spec.rb
+++ b/spec/track_spec.rb
@@ -7,7 +7,11 @@ module Bandcamp
describe Track do
it "includes Bandcamp::Methodical" do
- expect(Track.ancestors).to include(Bandcamp::Methodical)
+ expect(track.private_methods).to include(:to_methods)
+ end
+
+ it "includes Bandcamp::Associated" do
+ expect(track.private_methods).to include(:retrieve_associated)
end
let(:track_json){ MultiJson.decode(File.read(File.join %w(spec fixtures a_new_day.json))) }
@@ -38,6 +42,13 @@ module Bandcamp
end
end
+ describe "#band" do
+ it "returns the associated band object" do
+ track.stub(:retrieve_associated).and_return(Band.new(foo: "bar"))
+ expect(track.band).to be_a Band
+ end
+ end
+
end
end
|
Track#band returns the associated band object
|
sleepycat_bandcamp_api
|
train
|
1422623157f153b5c0ecee67901123134280ac4c
|
diff --git a/salt/utils/event.py b/salt/utils/event.py
index <HASH>..<HASH> 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -247,8 +247,6 @@ class Reactor(multiprocessing.Process, salt.state.Compiler):
for fn_ in glob.glob(glob_ref):
react.update(self.render_template(
fn_,
- self.rend,
- self.opts['renderer'],
tag=tag,
data=data))
return react
|
meh, no need to pass inheritted objects!
|
saltstack_salt
|
train
|
a591c7ec03530aa80ebcb63bc1808708eabddc22
|
diff --git a/opcode.go b/opcode.go
index <HASH>..<HASH> 100644
--- a/opcode.go
+++ b/opcode.go
@@ -1891,11 +1891,16 @@ func opcodeCheckMultiSig(op *parsedOpcode, s *Script) error {
}
// bug in bitcoind mean we pop one more stack value than should be used.
- _, err = s.dstack.PopByteArray()
+ dummy, err := s.dstack.PopByteArray()
if err != nil {
return err
}
+ if s.strictMultiSig && len(dummy) != 0 {
+ return fmt.Errorf("multisig dummy argument is not zero length: %d",
+ len(dummy))
+ }
+
if len(signatures) == 0 {
s.dstack.PushBool(nsig == 0)
return nil
diff --git a/script.go b/script.go
index <HASH>..<HASH> 100644
--- a/script.go
+++ b/script.go
@@ -207,6 +207,7 @@ type Script struct {
numOps int
bip16 bool // treat execution as pay-to-script-hash
der bool // enforce DER encoding
+ strictMultiSig bool // verify multisig stack item is zero length
savedFirstStack [][]byte // stack from first script for bip16 scripts
}
@@ -508,6 +509,10 @@ const (
// recognized by creator of the transaction. Performing a canonical
// check enforces script signatures use a unique DER format.
ScriptCanonicalSignatures
+
+ // ScriptStrictMultiSig defines whether to verify the stack item
+ // used by CHECKMULTISIG is zero length.
+ ScriptStrictMultiSig
)
// NewScript returns a new script engine for the provided tx and input idx with
@@ -550,6 +555,9 @@ func NewScript(scriptSig []byte, scriptPubKey []byte, txidx int, tx *btcwire.Msg
if flags&ScriptCanonicalSignatures == ScriptCanonicalSignatures {
m.der = true
}
+ if flags&ScriptStrictMultiSig == ScriptStrictMultiSig {
+ m.strictMultiSig = true
+ }
m.tx = *tx
m.txidx = txidx
|
Add new ScriptFlag ScriptStrictMultiSig.
ScriptStrictMultiSig verifies that the stack item used by CHECKMULTISIG
is zero length.
|
btcsuite_btcd
|
train
|
a1209d23173399f41f539f66f5cc53fd72ea8c98
|
diff --git a/openquake/calculators/ebrisk.py b/openquake/calculators/ebrisk.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/ebrisk.py
+++ b/openquake/calculators/ebrisk.py
@@ -294,14 +294,14 @@ class EbriskCalculator(event_based.EventBasedCalculator):
Compute and store average losses from the losses_by_event dataset,
and then loss curves and maps.
"""
- if len(times): # store some info on the calculation times
+ if len(times):
try:
dset = self.datastore['task_info/start_ebrisk']
except KeyError:
# can happen for mysterious race conditions on some machines
pass
else:
- # store the time information
+ # store the time information plus the events_per_sid info
dset.attrs['times'] = times
dset.attrs['events_per_sid'] = numpy.mean(self.events_per_sid)
|
[skip CI]Added comment
|
gem_oq-engine
|
train
|
6b3083d9af33d18904d9344250f81f3a0541d866
|
diff --git a/salt/cloud/clouds/ec2.py b/salt/cloud/clouds/ec2.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/clouds/ec2.py
+++ b/salt/cloud/clouds/ec2.py
@@ -2874,17 +2874,18 @@ def create(vm_=None, call=None):
if ex_blockdevicetags:
for k, v in six.iteritems(ret['blockDeviceMapping']):
+ bd_items = []
if isinstance(v, dict):
- if v['deviceName'] in ex_blockdevicetags:
- if 'Name' not in ex_blockdevicetags[v['deviceName']]:
- ex_blockdevicetags[v['deviceName']]['Name'] = vm_['name']
- block_device_volume_id_map[v[ret['rootDeviceType']]['volumeId']] = ex_blockdevicetags[v['deviceName']]
+ bd_items.append(v)
else:
- for _d in v:
- if _d['deviceName'] in ex_blockdevicetags:
- if 'Name' not in ex_blockdevicetags[_d['deviceName']]:
- ex_blockdevicetags[_d['deviceName']]['Name'] = vm_['name']
- block_device_volume_id_map[_d[ret['rootDeviceType']]['volumeId']] = ex_blockdevicetags[_d['deviceName']]
+ for i in v:
+ bd_items.append(i)
+
+ for i in bd_items:
+ if i['deviceName'] in ex_blockdevicetags and 'Name' not in ex_blockdevicetags[i['deviceName']]:
+ ex_blockdevicetags[i['deviceName']]['Name'] = vm_['name']
+ if i['deviceName'] in ex_blockdevicetags:
+ block_device_volume_id_map[i[ret['rootDeviceType']]['volumeId']] = ex_blockdevicetags[i['deviceName']]
if block_device_volume_id_map:
|
rework based on lint suggestions
|
saltstack_salt
|
train
|
bd78069b6e96b18cadecd23fe66f76241274c1db
|
diff --git a/girder/utility/server.py b/girder/utility/server.py
index <HASH>..<HASH> 100644
--- a/girder/utility/server.py
+++ b/girder/utility/server.py
@@ -32,6 +32,10 @@ class Webroot(object):
"""
exposed = True
+ client_base = os.path.abspath(
+ os.path.join(
+ constants.ROOT_DIR, 'clients')).split(os.path.sep)
+
def GET(self, *pargs):
if len(pargs) == 0:
return cherrypy.lib.static.serve_file(
@@ -41,9 +45,19 @@ class Webroot(object):
client = pargs[0]
path = pargs[1:]
- if client != "web":
- path_components = [constants.ROOT_DIR, 'clients'] + list(path)
- return cherrypy.lib.static.serve_file(os.path.join(*path_components))
+ if client in ["jquery"]:
+ url_path_components = (Webroot.client_base +
+ [client] +
+ list(path))
+ path_components = os.path.abspath(
+ os.path.sep.join(url_path_components)).split("/")
+
+ prefix_len = len(Webroot.client_base)
+ if path_components[:prefix_len] != Webroot.client_base:
+ raise cherrypy.HTTPError(404)
+
+ return cherrypy.lib.static.serve_file(
+ os.path.sep.join(path_components))
else:
raise cherrypy.HTTPError(404)
|
Addressing PR comments
* Converted handler to use a whitelist to allow certain prefixed URLs through
* URLs are converted to disk paths and compared for a legal directory prefix
before serving any file.
* Lines have been appropriately shortened to pass PEP8 style tests.
|
girder_girder
|
train
|
fd828962a39fbc77a1d9a738e2253a2ead496d9d
|
diff --git a/src/Foundation/Coroutine/Scheduler.php b/src/Foundation/Coroutine/Scheduler.php
index <HASH>..<HASH> 100755
--- a/src/Foundation/Coroutine/Scheduler.php
+++ b/src/Foundation/Coroutine/Scheduler.php
@@ -16,10 +16,6 @@ class Scheduler
public function schedule()
{
$coroutine = $this->task->getCoroutine();
-
- $signal = $this->checkTaskDone($coroutine);
- if ($signal !== null) return $signal;
-
$value = $coroutine->current();
$signal = $this->handleSysCall($value);
@@ -37,6 +33,9 @@ class Scheduler
$signal = $this->handleTaskStack($value);
if ($signal !== null) return $signal;
+ $signal = $this->checkTaskDone($value);
+ if ($signal !== null) return $signal;
+
return Signal::TASK_CONTINUE;
}
@@ -130,8 +129,9 @@ class Scheduler
return Signal::TASK_CONTINUE;
}
- private function checkTaskDone($coroutine)
+ private function checkTaskDone($value)
{
+ $coroutine = $this->task->getCoroutine();
if ($coroutine->valid()) {
return null;
}
|
task mv checkTaskDone to last
|
youzan_zanphp
|
train
|
f192892e645576bf5f09f02dfbf561a1d60979a4
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -14,6 +14,9 @@ Each version section may have have subsections for: _Added_, _Changed_, _Removed
### Added
- Added support for LHUC in RNN models (David Vilar, "Learning Hidden Unit
Contribution for Adapting Neural Machine Translation Models" NAACL 2018)
+
+### Fixed
+- Word based batching with very small batch sizes.
## [1.18.6]
### Fixed
diff --git a/sockeye/arguments.py b/sockeye/arguments.py
index <HASH>..<HASH> 100644
--- a/sockeye/arguments.py
+++ b/sockeye/arguments.py
@@ -670,7 +670,8 @@ def add_training_args(params):
default=C.BATCH_TYPE_SENTENCE,
choices=[C.BATCH_TYPE_SENTENCE, C.BATCH_TYPE_WORD],
help="Sentence: each batch contains X sentences, number of words varies. Word: each batch"
- " contains (approximately) X words, number of sentences varies. Default: %(default)s.")
+ " contains (approximately) X target words, number of sentences varies. "
+ "Default: %(default)s.")
train_params.add_argument('--fill-up',
type=str,
diff --git a/sockeye/data_io.py b/sockeye/data_io.py
index <HASH>..<HASH> 100644
--- a/sockeye/data_io.py
+++ b/sockeye/data_io.py
@@ -159,14 +159,14 @@ def define_bucket_batch_sizes(buckets: List[Tuple[int, int]],
" buckets: (%d > %d)" % (padded_seq_len, batch_size))
# Multiple of number of devices (int) closest to target number of words, assuming each sentence is of
# average length
- batch_size_seq = batch_num_devices * round((batch_size / average_seq_len) / batch_num_devices)
+ batch_size_seq = batch_num_devices * max(1, round((batch_size / average_seq_len) / batch_num_devices))
batch_size_word = batch_size_seq * average_seq_len
else:
batch_size_seq = batch_size
batch_size_word = batch_size_seq * average_seq_len
bucket_batch_sizes.append(BucketBatchSize(bucket, batch_size_seq, batch_size_word))
- # Track largest number of word samples in a batch
- largest_total_num_words = max(largest_total_num_words, batch_size_seq * max(*bucket))
+ # Track largest number of target word samples in a batch
+ largest_total_num_words = max(largest_total_num_words, batch_size_seq * padded_seq_len)
# Final step: guarantee that largest bucket by sequence length also has largest total batch size.
# When batching by sentences, this will already be the case.
|
Word based batching clarification. (#<I>)
* Word based batching clarification.
* Version + changelog.
|
awslabs_sockeye
|
train
|
b9a3e7c834a779ed8ede8d7428d302fa9156f728
|
diff --git a/centinel/client.py b/centinel/client.py
index <HASH>..<HASH> 100644
--- a/centinel/client.py
+++ b/centinel/client.py
@@ -13,6 +13,7 @@ import centinel
from centinel.backend import get_meta
from centinel.primitives.tcpdump import Tcpdump
from experiment import ExperimentList
+from centinel.vpn.cli import get_external_ip
loaded_modules = set()
@@ -113,7 +114,12 @@ class Client:
"""
# get the normalized IP if we don't already have it
if self._meta is None:
- self._meta = get_meta(self.config)
+ external_ip = get_external_ip()
+ if external_ip:
+ self._meta = get_meta(self.config, external_ip)
+ else:
+ raise Exception("Unable to get public IP")
+
return self._meta
def run(self, data_dir=None):
|
using public IP to get meta data in client.py, closes #<I>
|
iclab_centinel
|
train
|
c55a4b6c90f50cf2c8cef40a7c28b70237d0d60d
|
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -66,7 +66,7 @@ describe('chokidar', function() {
});
describe('non-polling', runTests.bind(this, {usePolling: false, useFsEvents: false}));
- describe('polling', runTests.bind(this, {usePolling: true}));
+ describe('polling', runTests.bind(this, {usePolling: true, interval: 10}));
if (os === 'darwin') describe('fsevents', runTests.bind(this, {useFsEvents: true}));
});
|
Set fast poll interval
Will try to use this to simplify delay wrappers and speed up test
execution
|
paulmillr_chokidar
|
train
|
22dbe9db904e1dbcc75c7c51cff97d8bd5e40401
|
diff --git a/library/rampage/core/services/DoctrineORMConfigDelegator.php b/library/rampage/core/services/DoctrineORMConfigDelegator.php
index <HASH>..<HASH> 100644
--- a/library/rampage/core/services/DoctrineORMConfigDelegator.php
+++ b/library/rampage/core/services/DoctrineORMConfigDelegator.php
@@ -43,7 +43,10 @@ class DoctrineORMConfigDelegator implements DelegatorFactoryInterface
if ($config instanceof Configuration) {
$pathManager = $serviceLocator->get('PathManager');
- $config->setProxyDir($pathManager->get('var', 'doctrine/proxies'));
+ $dir = $config->getProxyDir();
+ $dir = (strpos($dir, 'data/') === 0)? substr($dir, 0, 5) : 'doctrine/proxies';
+
+ $config->setProxyDir($pathManager->get('var', $dir));
}
return $config;
|
Changed doctrine orm config delegator to keep path behind data/
|
tux-rampage_rampage-php
|
train
|
dee460d366862ba40c76c413f7210e617dcd0443
|
diff --git a/Core/Content/Page/AlPageManager.php b/Core/Content/Page/AlPageManager.php
index <HASH>..<HASH> 100755
--- a/Core/Content/Page/AlPageManager.php
+++ b/Core/Content/Page/AlPageManager.php
@@ -366,10 +366,10 @@ class AlPageManager extends AlContentManagerBase implements AlContentManagerInte
unset($values['IsHome']);
}
- if (isset($values['IsPublished']) && ($values['IsPublished'] == "" || $values['IsPublished'] == $this->alPage->getIsPublished())) {
+ if (empty($values['IsPublished']) || $values['IsPublished'] == $this->alPage->getIsPublished()) {
unset($values['IsPublished']);
}
-
+
if ($result !== false) {
if (!empty($values)) {
$result = $this->pageRepository
diff --git a/Tests/Functional/Controller/PagesControllerTest.php b/Tests/Functional/Controller/PagesControllerTest.php
index <HASH>..<HASH> 100644
--- a/Tests/Functional/Controller/PagesControllerTest.php
+++ b/Tests/Functional/Controller/PagesControllerTest.php
@@ -278,7 +278,7 @@ class PagesControllerTest extends WebTestCaseFunctional
'pageName' => "page2 edited",
'permalink' => "page-2 edited",);
- $crawler = $this->client->request('POST', 'backend/en/al_savePage', $params);echo $crawler->text();exit;
+ $crawler = $this->client->request('POST', 'backend/en/al_savePage', $params);
$response = $this->client->getResponse();
$this->assertEquals(200, $response->getStatusCode());
|
fixed control condition on IsPublished field
|
redkite-labs_RedKiteCmsBundle
|
train
|
c7f74deb17e8c691f782af970754ce950c438b8e
|
diff --git a/agent/consul/enterprise_server_oss.go b/agent/consul/enterprise_server_oss.go
index <HASH>..<HASH> 100644
--- a/agent/consul/enterprise_server_oss.go
+++ b/agent/consul/enterprise_server_oss.go
@@ -1,3 +1,4 @@
+//go:build !consulent
// +build !consulent
package consul
@@ -86,9 +87,3 @@ func (s *Server) validateEnterpriseIntentionNamespace(ns string, _ bool) error {
func addEnterpriseSerfTags(_ map[string]string, _ *structs.EnterpriseMeta) {
// do nothing
}
-
-// updateEnterpriseSerfTags in enterprise will update any instances of Serf with the tag that
-// are not the normal LAN or WAN serf instances (network segments and network areas)
-func (_ *Server) updateEnterpriseSerfTags(_, _ string) {
- // do nothing
-}
diff --git a/agent/consul/leader_test.go b/agent/consul/leader_test.go
index <HASH>..<HASH> 100644
--- a/agent/consul/leader_test.go
+++ b/agent/consul/leader_test.go
@@ -1771,14 +1771,11 @@ func TestDatacenterSupportsFederationStates(t *testing.T) {
}
func updateSerfTags(s *Server, key, value string) {
- // Update the LAN serf
libserf.UpdateTag(s.serfLAN, key, value)
if s.serfWAN != nil {
libserf.UpdateTag(s.serfWAN, key, value)
}
-
- s.updateEnterpriseSerfTags(key, value)
}
func TestDatacenterSupportsIntentionsAsConfigEntries(t *testing.T) {
|
acl: remove updateEnterpriseSerfTags
The only remaining caller is a test helper, and the tests don't use the enterprise gossip
pools.
|
hashicorp_consul
|
train
|
f93f2a7954d346f8cc7de8efea02d2483ab9a5b4
|
diff --git a/lib/session.js b/lib/session.js
index <HASH>..<HASH> 100644
--- a/lib/session.js
+++ b/lib/session.js
@@ -119,9 +119,11 @@ function Session(conn) {
this._allocatedHandles = {};
this._linksByName = {};
this._linksByRemoteHandle = {};
- this._senderLinks = [];
this._deliveryTag = 1;
+ this._senderLinks = [];
+ this._receiverLinks = [];
+
var self = this;
var stateMachine = {
'UNMAPPED': {
@@ -216,6 +218,7 @@ Session.prototype.attachLink = function(linkPolicy) {
this._senderLinks.push(link);
} else {
link = new ReceiverLink(this, policy.options.handle, policy);
+ this._receiverLinks.push(link);
}
this._allocatedHandles[policy.options.handle] = link;
@@ -414,9 +417,12 @@ Session.prototype._handleDisposition = function(frame) {
state: frame.state
};
- _.values(this._linksByName).forEach(function(link) {
- link._dispositionReceived(disposition);
- });
+ var dispositionHandler = function(l) { l._dispositionReceived(disposition); };
+ if (frame.role === constants.linkRole.sender) {
+ this._receiverLinks.forEach(dispositionHandler);
+ } else {
+ this._senderLinks.forEach(dispositionHandler);
+ }
this.emit(Session.DispositionReceived, disposition);
};
diff --git a/test/integration/qpid/disposition.test.js b/test/integration/qpid/disposition.test.js
index <HASH>..<HASH> 100644
--- a/test/integration/qpid/disposition.test.js
+++ b/test/integration/qpid/disposition.test.js
@@ -1,10 +1,14 @@
'use strict';
-var AMQPClient = require('../../..').Client,
+var Promise = require('bluebird'),
+ AMQPClient = require('../../..').Client,
+ BrokerAgent = require('qmf2'),
+
+ Session = require('../../../lib/session'),
+
c = require('../../../').Constants,
- Promise = require('bluebird'),
+
config = require('./config'),
- expect = require('chai').expect,
- BrokerAgent = require('qmf2');
+ expect = require('chai').expect;
var test = {};
describe('QPID', function() {
@@ -101,5 +105,28 @@ describe('Disposition', function() {
});
});
+ it('should forward disposition frames by link role', function(done) {
+ var queueName = 'test.disposition.queue';
+ var called = { receiver: false, sender: false };
+ return test.client.connect(config.address)
+ .then(function() {
+ return Promise.all([
+ test.client.createReceiver(queueName),
+ test.client.createSender(queueName)
+ ]);
+ })
+ .spread(function(receiver, sender) {
+ receiver._dispositionReceived = function(d) { called.receiver = true; };
+ sender._dispositionReceived = function(d) { called.sender = true; };
+
+ test.client._session.on(Session.DispositionReceived, function(d) {
+ expect(called).to.eql({ receiver: false, sender: true });
+ done();
+ });
+
+ return sender.send('test message');
+ });
+ });
+
});
});
|
fix(disposition): only send disposition to relevant links by role
Previously we ignored the role of the incoming disposition frame,
and forwarded these frames to the _dispositionRecieved method of
all links we knew about. This patch corrects this behavior, and
includes an integration test verifying that frames are forwarded
to the appropriate link types.
|
noodlefrenzy_node-amqp10
|
train
|
ff35d7f35bffc100059d07cb5f3e7c0f947bea3b
|
diff --git a/core/src/main/java/com/google/errorprone/util/ASTHelpers.java b/core/src/main/java/com/google/errorprone/util/ASTHelpers.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/google/errorprone/util/ASTHelpers.java
+++ b/core/src/main/java/com/google/errorprone/util/ASTHelpers.java
@@ -496,7 +496,11 @@ public class ASTHelpers {
* @param annotationType The type of the annotation to look for (e.g, "javax.annotation.Nullable")
*/
public static boolean hasAnnotation(Symbol sym, String annotationType, VisitorState state) {
- Symbol annotationSym = state.getSymtab().enterClass(state.getName(annotationType));
+ Name annotationName = state.getName(annotationType);
+ Symbol annotationSym;
+ synchronized (state.context) {
+ annotationSym = state.getSymtab().enterClass(annotationName);
+ }
try {
annotationSym.complete();
} catch (CompletionFailure e) {
|
Make ASTHelpers.hasAnnotation less thread-hostile
MOE_MIGRATED_REVID=<I>
|
google_error-prone
|
train
|
70f85211d97118331ba95c06ec3af26f0fb40aff
|
diff --git a/api.go b/api.go
index <HASH>..<HASH> 100644
--- a/api.go
+++ b/api.go
@@ -268,6 +268,10 @@ func setUpImportOptions(opts ...ImportOption) (*ImportOptions, error) {
// of the rows in this shard of this field concatenated together in one long
// bitmap.
func (api *API) ImportRoaring(ctx context.Context, indexName, fieldName string, shard uint64, remote bool, data []byte, opts ...ImportOption) (err error) {
+ if len(data) == 0 {
+ return errors.New("no data to import")
+ }
+
if err = api.validate(apiField); err != nil {
return errors.Wrap(err, "validating api method")
}
diff --git a/fragment.go b/fragment.go
index <HASH>..<HASH> 100644
--- a/fragment.go
+++ b/fragment.go
@@ -25,6 +25,7 @@ import (
"hash"
"io"
"io/ioutil"
+ "math"
"os"
"sort"
"sync"
@@ -33,9 +34,6 @@ import (
"unsafe"
"github.com/cespare/xxhash"
-
- "math"
-
"github.com/gogo/protobuf/proto"
"github.com/pilosa/pilosa/internal"
"github.com/pilosa/pilosa/pql"
diff --git a/roaring/roaring.go b/roaring/roaring.go
index <HASH>..<HASH> 100644
--- a/roaring/roaring.go
+++ b/roaring/roaring.go
@@ -3465,6 +3465,10 @@ func readOfficialHeader(buf []byte) (size uint32, containerTyper func(index uint
// UnmarshalBinary decodes b from a binary-encoded byte slice. data can be in
// either official roaring format or Pilosa's roaring format.
func (b *Bitmap) UnmarshalBinary(data []byte) error {
+ if data == nil {
+ // Nothing to unmarshal
+ return nil
+ }
fileMagic := uint32(binary.LittleEndian.Uint16(data[0:2]))
if fileMagic == magicNumber { // if pilosa roaring
return errors.Wrap(b.unmarshalPilosaRoaring(data), "unmarshaling as pilosa roaring")
|
prevent panic in Bitmap.UnmarshalBinary when there is no data
|
pilosa_pilosa
|
train
|
e1fa205119c7f6011659ad14e1e3a33126af241d
|
diff --git a/spiketoolkit/postprocessing/postprocessing_tools.py b/spiketoolkit/postprocessing/postprocessing_tools.py
index <HASH>..<HASH> 100644
--- a/spiketoolkit/postprocessing/postprocessing_tools.py
+++ b/spiketoolkit/postprocessing/postprocessing_tools.py
@@ -606,6 +606,7 @@ def compute_unit_pca_scores(recording, sorting, unit_ids=None, n_comp=3, by_elec
pca = PCA(n_components=n_comp, whiten=whiten, random_state=seed)
if len(all_waveforms) < max_spikes_for_pca:
max_spikes_for_pca = len(all_waveforms)
+ max_spikes_for_pca = int(max_spikes_for_pca)
if verbose:
print("Fitting PCA of %d dimensions on %d waveforms" % (n_comp, max_spikes_for_pca))
pca.fit(all_waveforms[np.random.RandomState(seed=seed).permutation(len(all_waveforms))[:max_spikes_for_pca]])
@@ -800,7 +801,7 @@ def export_to_phy(recording, sorting, output_folder, n_comp=3, electrode_dimensi
f.write('sample_rate = ' + str(recording.get_sampling_frequency()) + '\n')
f.write('hp_filtered = False')
- spike_times, spike_clusters, amplitudes, channel_map, pc_features, pc_feature_ind, waveforms, \
+ spike_times, spike_clusters, amplitudes, channel_map, pc_features, pc_feature_ind, \
spike_templates, templates, templates_ind, similar_templates, channel_map_si, channel_groups, \
positions = _get_phy_data(recording, sorting, n_comp, electrode_dimensions, grouping_property, ms_before,
ms_after, dtype, amp_method, amp_peak, amp_frames_before, amp_frames_after,
@@ -851,9 +852,6 @@ def export_to_phy(recording, sorting, output_folder, n_comp=3, electrode_dimensi
np.save(str(output_folder / 'channel_positions.npy'), positions)
np.save(str(output_folder / 'channel_groups.npy'), channel_groups)
- if write_waveforms:
- np.save(str(output_folder / 'waveforms.npy'), np.array(waveforms))
-
if verbose:
print('Saved phy format to: ', output_folder)
print('Run:\n\nphy template-gui ', str(output_folder / 'params.py'))
@@ -1106,15 +1104,6 @@ def _get_phy_data(recording, sorting, n_comp, electrode_dimensions, grouping_pro
max_num_chans_in_group = recording.get_num_channels()
channel_groups = np.array([0] * recording.get_num_channels())
- if 'waveforms' not in sorting.get_shared_unit_spike_feature_names():
- waveforms = get_unit_waveforms(recording, sorting, max_spikes_per_unit=max_spikes_per_unit,
- ms_before=ms_before, ms_after=ms_after, save_as_features=save_features_props,
- dtype=dtype, verbose=verbose, seed=seed)
- else:
- waveforms = []
- for unit_id in sorting.get_unit_ids():
- waveforms.append(sorting.get_unit_spike_features(unit_id, 'waveforms'))
-
spike_times, spike_times_amps, spike_times_pca, spike_clusters, spike_clusters_amps, spike_clusters_pca, \
amplitudes, pc_features, pc_feature_ind \
= _get_quality_metric_data(recording, sorting, n_comp=n_comp, ms_before=ms_before, ms_after=ms_after,
@@ -1185,5 +1174,5 @@ def _get_phy_data(recording, sorting, n_comp, electrode_dimensions, grouping_pro
# spike_templates.npy - [nSpikes, ] uint32
spike_templates = spike_clusters
- return spike_times, spike_clusters, amplitudes, channel_map, pc_features, pc_feature_ind, waveforms, \
+ return spike_times, spike_clusters, amplitudes, channel_map, pc_features, pc_feature_ind, \
spike_templates, templates, templates_ind, similar_templates, channel_map_si, channel_groups, positions
|
No need to save waveforms to phy
|
SpikeInterface_spiketoolkit
|
train
|
4ff4fbb063b5c7e87c9f727de071d0847c22a9cb
|
diff --git a/packages/cli/src/changelog.js b/packages/cli/src/changelog.js
index <HASH>..<HASH> 100644
--- a/packages/cli/src/changelog.js
+++ b/packages/cli/src/changelog.js
@@ -7,7 +7,7 @@
'use strict';
-const parse = require('@commitlint/parse');
+const { default: parse } = require('@commitlint/parse');
const execa = require('execa');
// We keep a list of commits that are process-oriented that we never want to
diff --git a/packages/cli/src/workspace.js b/packages/cli/src/workspace.js
index <HASH>..<HASH> 100644
--- a/packages/cli/src/workspace.js
+++ b/packages/cli/src/workspace.js
@@ -18,7 +18,7 @@ const packagePaths = fs
.readdirSync(PACKAGES_DIR)
.filter(basename => {
const filename = path.join(PACKAGES_DIR, basename);
- if (!denylist.has(filename)) {
+ if (denylist.has(filename)) {
return false;
}
|
fix(cli): update to support new package versions (#<I>)
|
carbon-design-system_carbon-components
|
train
|
4e097c1e80e325cfb5440586ada9f8c103f42251
|
diff --git a/src/com/google/bitcoin/store/BoundedOverheadBlockStore.java b/src/com/google/bitcoin/store/BoundedOverheadBlockStore.java
index <HASH>..<HASH> 100644
--- a/src/com/google/bitcoin/store/BoundedOverheadBlockStore.java
+++ b/src/com/google/bitcoin/store/BoundedOverheadBlockStore.java
@@ -268,7 +268,10 @@ public class BoundedOverheadBlockStore implements BlockStore {
}
public synchronized StoredBlock getChainHead() throws BlockStoreException {
- return get(chainHead);
+ StoredBlock head = get(chainHead);
+ if (head == null)
+ throw new BlockStoreException("Corrupted block store: chain head not found");
+ return head;
}
public synchronized void setChainHead(StoredBlock chainHead) throws BlockStoreException {
|
Throw BlockStoreException if the chain head doesn't seem to be in the store. Update issue <I>.
|
bitcoinj_bitcoinj
|
train
|
c2deed3b365e81b3134c03b49624c93b1f9ee9c2
|
diff --git a/app/controllers/spree/user_registrations_controller.rb b/app/controllers/spree/user_registrations_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/spree/user_registrations_controller.rb
+++ b/app/controllers/spree/user_registrations_controller.rb
@@ -17,7 +17,7 @@ class Spree::UserRegistrationsController < Devise::RegistrationsController
if resource.save
set_flash_message(:notice, :signed_up)
sign_in(:user, @user)
- fire_event('spree.user.signup', :user => @user, :order => current_order(true))
+ session[:spree_user_signup] = true
sign_in_and_redirect(:user, @user)
else
clean_up_passwords(resource)
|
Just set session[:spree_user_signup] in UserRegistrationsController. The fire_event call now lives in core
|
spree_spree_auth_devise
|
train
|
0ddc45d8329f889cd140902ac891ad4e2f437a4d
|
diff --git a/lib/spaceship/client.rb b/lib/spaceship/client.rb
index <HASH>..<HASH> 100644
--- a/lib/spaceship/client.rb
+++ b/lib/spaceship/client.rb
@@ -64,11 +64,12 @@ module Spaceship
end
def initialize
+ @cookie = HTTP::CookieJar.new
@client = Faraday.new(self.class.hostname) do |c|
c.response :json, content_type: /\bjson$/
c.response :xml, content_type: /\bxml$/
c.response :plist, content_type: /\bplist$/
- c.use :cookie_jar
+ c.use :cookie_jar, jar: @cookie
c.adapter Faraday.default_adapter
if ENV['DEBUG']
@@ -100,6 +101,14 @@ module Spaceship
@logger
end
+ ##
+ # Return the session cookie.
+ #
+ # @return (String) the cookie-string in the RFC6265 format: https://tools.ietf.org/html/rfc6265#section-4.2.1
+ def cookie
+ @cookie.map(&:to_s).join(';')
+ end
+
#####################################################
# @!group Automatic Paging
#####################################################
diff --git a/spec/tunes/tunes_client_spec.rb b/spec/tunes/tunes_client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/tunes/tunes_client_spec.rb
+++ b/spec/tunes/tunes_client_spec.rb
@@ -9,6 +9,13 @@ describe Spaceship::TunesClient do
end
end
+ describe 'client' do
+ it 'exposes the session cookie' do
+ subject.login('bad-username', 'bad-password') rescue nil
+ expect(subject.cookie).to eq('session=invalid')
+ end
+ end
+
describe "Logged in" do
subject { Spaceship::Tunes.client }
let(:username) { 'spaceship@krausefx.com' }
diff --git a/spec/tunes/tunes_stubbing.rb b/spec/tunes/tunes_stubbing.rb
index <HASH>..<HASH> 100644
--- a/spec/tunes/tunes_stubbing.rb
+++ b/spec/tunes/tunes_stubbing.rb
@@ -22,7 +22,7 @@ def itc_stub_login
# Failed login attempts
stub_request(:post, "https://idmsa.apple.com/appleauth/auth/signin?widgetKey=1234567890").
with(body: { "accountName" => "bad-username", "password" => "bad-password", "rememberMe" => true }.to_json).
- to_return(status: 401, body: '{}')
+ to_return(status: 401, body: '{}', headers: {'Set-Cookie' => 'session=invalid'})
end
def itc_stub_applications
|
Expose the cookie so that session data can be shared.
Fixes backwards-compatibility breaking changes in previous release.
* <URL>
|
fastlane_fastlane
|
train
|
23ae0856e267a970ecbd47227ad4ea429129e2d6
|
diff --git a/tests/Helpers/Mocks/Http/EchoEngine.php b/tests/Helpers/Mocks/Http/EchoEngine.php
index <HASH>..<HASH> 100644
--- a/tests/Helpers/Mocks/Http/EchoEngine.php
+++ b/tests/Helpers/Mocks/Http/EchoEngine.php
@@ -3,6 +3,8 @@ namespace Tests\Helpers\Mocks\Http;
use GuzzleHttp\Client as Guzzle;
use GuzzleHttp\Exception\ClientException;
+use GuzzleHttp\Message\Response;
+use Tests\Helpers\Providers\Request;
class EchoEngine extends Guzzle
{
@@ -61,9 +63,11 @@ class EchoEngine extends Guzzle
}
$endpoint = str_replace($this->baseUrl, '', $url);
- if (json_decode($this->responses[$endpoint])['status'] === 'CONFLICT'
- || json_decode($this->responses[$endpoint])['status'] === 'NOT FOUND') {
- throw new ClientException('worked', \GuzzleHttp\Message\RequestInterface);
+ if ($this->responses[$endpoint]->json()['status'] === 'CONFLICT') {
+ throw new ClientException('Conflict: invalid key', new \GuzzleHttp\Message\Request('GET', $endpoint), new Response(409));
+ }
+ if ($this->responses[$endpoint]->json()['status'] === 'NOT FOUND') {
+ throw new ClientException('Any other http status returned', new \GuzzleHttp\Message\Request('GET', $endpoint), new Response(404));
}
return $this->responses[$endpoint];
}
diff --git a/tests/Integration/FacadeTest.php b/tests/Integration/FacadeTest.php
index <HASH>..<HASH> 100644
--- a/tests/Integration/FacadeTest.php
+++ b/tests/Integration/FacadeTest.php
@@ -22,6 +22,8 @@ class FacadeTest extends TestCase
/**
* @param Facade $ebanx
+ *
+ * @throws \ReflectionException
* @depends testMainObject
*/
public function testGatewayAccessors($ebanx)
@@ -42,6 +44,8 @@ class FacadeTest extends TestCase
/**
* @param $ebanx
+ *
+ * @throws \ReflectionException
* @depends testMainObject
*/
public function testOtherServicesAccessors($ebanx)
@@ -159,6 +163,38 @@ class FacadeTest extends TestCase
$this->assertTrue($subject);
}
+ /**
+ * @throws \Exception Won't be thrown in this test
+ */
+ public function testCheckInvalidPublicKey()
+ {
+ $integrationKey = 'invalidKey';
+ $privateKeyUrl = 'ws/merchantIntegrationProperties/isValidPublicIntegrationKey';
+
+ $ebanx = $this->buildMockedFacade([
+ $privateKeyUrl => $this->buildPublicKeyValidationMock($integrationKey),
+ ]);
+
+ $subject = $ebanx->isValidPublicKey($integrationKey);
+
+ $this->assertFalse($subject);
+ }
+
+ /**
+ * @expectedException \Exception
+ */
+ public function testCheckPublicKeyWithOtherWrongResponse()
+ {
+ $integrationKey = 'invalidKey';
+ $publicKeyUrl = 'ws/merchantIntegrationProperties/isValidPublicIntegrationKey';
+
+ $ebanx = $this->buildMockedFacade([
+ $publicKeyUrl => '{"status": "NOT FOUND"}',
+ ]);
+
+ $ebanx->isValidPublicKey($integrationKey);
+ }
+
public function testGetTicketHtml()
{
$hash = md5(rand());
@@ -299,9 +335,13 @@ class FacadeTest extends TestCase
}';
}
return '{
- "status": "ERROR",
- "status_code": "BP-SA-2",
- "status_message": "Invalid integration key"
+ "status": "CONFLICT",
+ "success": false,
+ "resource": "merchantIntegrationProperties",
+ "operation": "isValidPublicIntegrationKey",
+ "body": {
+ "error": "Invalid public_integration_key"
+ }
}';
}
|
Tests for checking if merchant key is valid
|
ebanx_benjamin
|
train
|
eb6a0e0e1bef8d4eee52b4c61be29cc7aaeebd1b
|
diff --git a/lib/smart_ioc.rb b/lib/smart_ioc.rb
index <HASH>..<HASH> 100644
--- a/lib/smart_ioc.rb
+++ b/lib/smart_ioc.rb
@@ -25,7 +25,7 @@ module SmartIoC
# @param dir [String] absolute path with bean definitions
# @return nil
def find_package_beans(package_name, dir)
- bean_locator = SmartIoC::BeanLocator.new()
+ bean_locator = SmartIoC::BeanLocator.new
bean_locator.locate_beans(package_name.to_sym, dir)
nil
end
diff --git a/lib/smart_ioc/bean_locator.rb b/lib/smart_ioc/bean_locator.rb
index <HASH>..<HASH> 100644
--- a/lib/smart_ioc/bean_locator.rb
+++ b/lib/smart_ioc/bean_locator.rb
@@ -5,8 +5,6 @@ class SmartIoC::BeanLocator
# @param dir [String] absolute path for directory with bean definitions
# @return nil
def locate_beans(package_name, dir)
- SmartIoC::BeanLocations.clear
-
if !package_name.is_a?(Symbol)
raise ArgumentError, 'package name should be a symbol'
end
|
do not clear located beans for each find_package_beans call
|
droidlabs_smart_ioc
|
train
|
b7995def05fe20ddee335a003b5e8dcf33b622e0
|
diff --git a/pandasql/sqldf.py b/pandasql/sqldf.py
index <HASH>..<HASH> 100644
--- a/pandasql/sqldf.py
+++ b/pandasql/sqldf.py
@@ -1,11 +1,9 @@
import inspect
-import pandas as pd
-import numpy as np
from pandas.io.sql import to_sql, read_sql
from sqlalchemy import create_engine
import re
from warnings import catch_warnings, filterwarnings
-from sqlalchemy.exc import DatabaseError
+from sqlalchemy.exc import DatabaseError, ResourceClosedError
from sqlalchemy.pool import NullPool
@@ -32,6 +30,7 @@ class PandaSQL:
if self.persist:
self.loaded_tables = set()
self.conn = self.engine.connect()
+ self._init_connection(self.conn)
def __call__(self, query, env=None):
"""
@@ -59,8 +58,12 @@ class PandaSQL:
result = read_sql(query, self.conn)
except DatabaseError as ex:
raise PandaSQLException(ex)
+ except ResourceClosedError:
+ # query returns nothing
+ result = None
else:
with self.engine.connect() as conn:
+ self._init_connection(conn)
for table_name in extract_table_names(query):
if table_name not in env:
continue
@@ -70,9 +73,16 @@ class PandaSQL:
result = read_sql(query, conn)
except DatabaseError as ex:
raise PandaSQLException(ex)
+ except ResourceClosedError:
+ # query returns nothing
+ result = None
return result
+ def _init_connection(self, conn):
+ if self.engine.name == 'postgresql':
+ conn.execute('set search_path to pg_temp')
+
def get_outer_frame_variables():
""" Get a dict of local and global variables of the first outer frame from another file. """
diff --git a/pandasql/tests/test_pandasql.py b/pandasql/tests/test_pandasql.py
index <HASH>..<HASH> 100644
--- a/pandasql/tests/test_pandasql.py
+++ b/pandasql/tests/test_pandasql.py
@@ -231,6 +231,24 @@ def test_persistent(pdsql):
result3 = pdsql("SELECT * FROM df")
pdtest.assert_frame_equal(result1, result3)
- df1 = pd.DataFrame({'x': [1, 2, 3]}) # will not have any effect
+ df1 = pd.DataFrame({'x': [1, 2, 3]})
result4 = pdsql("SELECT * FROM df1")
- pdtest.assert_frame_equal(df1, result4)
\ No newline at end of file
+ pdtest.assert_frame_equal(df1, result4)
+
+
+def test_noreturn_query(pdsql):
+ assert pdsql("CREATE TABLE tbl (col INTEGER)") is None
+
+
+@pytest.mark.parametrize('pdsql', [False], indirect=True)
+def test_sideeffect(pdsql):
+ pdsql("CREATE TABLE tbl (col INTEGER)")
+ with pytest.raises(PandaSQLException):
+ result = pdsql("SELECT * FROM tbl")
+
+
+@pytest.mark.parametrize('pdsql', [True], indirect=True)
+def test_sideeffect(pdsql):
+ pdsql("CREATE TABLE tbl (col INTEGER)")
+ result = pdsql("SELECT * FROM tbl")
+ assert list(result.columns) == ['col']
|
support queries which return nothing (like setting options or manually creating/updating tables)
|
yhat_pandasql
|
train
|
90ad340a47af2083db35028150bc91589ab692f1
|
diff --git a/salt/states/network.py b/salt/states/network.py
index <HASH>..<HASH> 100644
--- a/salt/states/network.py
+++ b/salt/states/network.py
@@ -6,6 +6,9 @@ The network module is used to create and manage network settings,
interfaces can be set as either managed or ignored. By default
all interfaces are ignored unless specified.
+Please note that only Redhat-style networking is currently
+supported. This module will therefore only work on RH/CentOS/Fedora.
+
.. code-block:: yaml
eth0:
|
Updated docstring for network module to specify that only Redhat-style networking is supported.
|
saltstack_salt
|
train
|
d1ca08ada6e949b9fb0c052b8f9a3056da8065a0
|
diff --git a/source/loosely-matches.js b/source/loosely-matches.js
index <HASH>..<HASH> 100644
--- a/source/loosely-matches.js
+++ b/source/loosely-matches.js
@@ -11,7 +11,7 @@ const looseMatchTransform = flow(deburr, toLower);
* The main advantages of this function are that it removes accented characters and that
* it is case-insensitive.
*
- * Powered by lodash's {@link https://lodash.com/docs/#deburr deburr}
+ * Powered by lodash's {@link https://lodash.com/docs/#deburr deburr}.
*
* @function looselyMatches
*
|
Add terminal . at end of sentence in looselyMatches documentation string
|
tentwentyfour_helpbox
|
train
|
f7036734f57d19d1614456d9d6d98a7ed5080865
|
diff --git a/Kwf/Component/Generator/PseudoPage/Table.php b/Kwf/Component/Generator/PseudoPage/Table.php
index <HASH>..<HASH> 100644
--- a/Kwf/Component/Generator/PseudoPage/Table.php
+++ b/Kwf/Component/Generator/PseudoPage/Table.php
@@ -39,9 +39,9 @@ class Kwf_Component_Generator_PseudoPage_Table extends Kwf_Component_Generator_T
$select->whereEquals($this->_settings['filenameColumn'], $filename);
} else {
if ($this->_hasNumericIds) {
- $pattern = '#^([0-9]+)-#';
+ $pattern = '#^([0-9]+)[-_]#'; //_ for compatibility with older urls
} else {
- $pattern = '#^([^-]+)-#';
+ $pattern = '#^([^-_]+)[-_]#';
}
if (!preg_match($pattern, $filename, $m)) return null;
$select->whereEquals($this->_idColumn, $m[1]);
|
Table Page Generator: support old urls with _
in older kwf versions we used _ instead of -, with this change we also match old _ urls (which will
get redirected automatically to new - url)
|
koala-framework_koala-framework
|
train
|
5de9d2e74220e9c5ed47512ea1de9ee6f61331c4
|
diff --git a/bin/capo_cli.js b/bin/capo_cli.js
index <HASH>..<HASH> 100644
--- a/bin/capo_cli.js
+++ b/bin/capo_cli.js
@@ -13,6 +13,8 @@ var aliases = {
'verbose': ['v', 'verbose'],
'strict': ['strict'],
'generate': ['g', 'generate'],
+ 'notExcludeGitignore': ['n', 'not-exclude-gitignore'],
+ 'ignore': ['i', 'ignore'],
'help': ['h', 'help']
};
diff --git a/lib/capo.js b/lib/capo.js
index <HASH>..<HASH> 100644
--- a/lib/capo.js
+++ b/lib/capo.js
@@ -13,9 +13,9 @@ module.exports = function(filePath, mediatorObjectName, options){
function Capo(filePath, mediatorObjectName, options){
this.options = options || {};
this.filePath = filePath;
- this.excludePath = typeof this.options.excludePath === 'string' ? '!' + this.options.excludePath : '';
- this.excludeGitignore = typeof this.options.excludeGitignore === 'boolean' ?
- this.options.excludeGitignore : true;
+ this.ignore = typeof this.options.ignore === 'string' ? '!' + this.options.ignore : '';
+ this.excludeGitignore = typeof this.options.notExcludeGitignore === 'boolean' ?
+ !this.options.notExcludeGitignore : true;
this.subscriptions = {};
this.triggers = {};
this.callback = undefined;
@@ -46,7 +46,7 @@ Capo.prototype.find = function(callback){
var extname = path.extname(filePath);
if (!extname){
patterns.push(filePath + '/**/*.js');
- if (this.excludePath) patterns.push(this.excludePath);
+ if (this.ignore) patterns.push(this.ignore);
if (this.excludeGitignore){
var gitignoreWildcards = this.generateGitExcludes(filePath);
if (gitignoreWildcards) patterns = patterns.concat(gitignoreWildcards);
diff --git a/lib/cli/help.js b/lib/cli/help.js
index <HASH>..<HASH> 100644
--- a/lib/cli/help.js
+++ b/lib/cli/help.js
@@ -8,5 +8,7 @@ module.exports = table([
['verbose', '-v --verbose', ' log all the files processed and other info. Default `false`.'],
['strict', '--strict', 'throw error on sub for event with 0 pubs. Default `false`.'],
['generate', '-g --generate', 'generate file with listeners for all the triggers.'],
+ ['exclude-gitignore', '-ex --exclude-gitignore', 'specifies if capo should exclude gitignored files. Default `true`.'],
+ ['ignore', '-g --ignore', 'specifies glob path which will be ignored by capo'],
['help', '-h --help', ' show help.']
]);
\ No newline at end of file
diff --git a/lib/cli/index.js b/lib/cli/index.js
index <HASH>..<HASH> 100644
--- a/lib/cli/index.js
+++ b/lib/cli/index.js
@@ -10,6 +10,8 @@ module.exports = function(options, path){
var report = typeof options.report === 'string' ? options.report : 'html';
var object = typeof options.object === 'string' ? options.object : undefined;
var eventName = typeof options.event === 'string' ? options.event : false;
+ var eventName = typeof options.ignore === 'string' ? options.ignore : undefined;
+ var eventName = typeof options.notExcludeGitignore === 'boolean' ? options.notExcludeGitignore : false;
if (path){
capo(path, object, options).event(eventName).report(report).find(function(err, data){
if (err && !options.silent){
diff --git a/test/core_test.js b/test/core_test.js
index <HASH>..<HASH> 100644
--- a/test/core_test.js
+++ b/test/core_test.js
@@ -209,7 +209,7 @@ describe('Excludes: capo should', function(){
it('exclude files by specified in exclude option', function(done){
var options = {
- excludePath: __dirname + '/fixtures/excludes/**/*.js'
+ ignore: __dirname + '/fixtures/excludes/**/*.js'
};
capo(__dirname + '/fixtures/', 'qwe', options).find(function(err, data){
err.should.be.not.ok;
@@ -233,7 +233,7 @@ describe('Excludes: capo should', function(){
it('not exclude .gignore files if excludeGitignore is false', function(done){
capo(__dirname + '/fixtures', 'qwe', {
- excludeGitignore: false
+ notExcludeGitignore: true
}).find(function(err, data){
err.should.be.not.ok;
|
add cli support got ignoring options #7
|
msemenistyi_capo
|
train
|
d58aba4b8e2c684cb2a4f31158d61ad7aadb536f
|
diff --git a/tensor2tensor/models/research/glow_ops.py b/tensor2tensor/models/research/glow_ops.py
index <HASH>..<HASH> 100644
--- a/tensor2tensor/models/research/glow_ops.py
+++ b/tensor2tensor/models/research/glow_ops.py
@@ -554,18 +554,24 @@ def temporal_tensor_to_dist(name, x, hparams, output_channels=None):
Returns:
dist: tf.distributions.Normal
"""
+ res_channels = common_layers.shape_list(x)[-1]
if output_channels is None:
- output_channels = common_layers.shape_list(x)[-1]
+ output_channels = res_channels
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
- h = conv_block("conv3d_init", x, time_filter=2,
- mid_channels=hparams.latent_encoder_width)
- h = conv("conv3d_zeros", h, apply_actnorm=False,
- output_channels=2*output_channels, conv_init="zeros",
- filter_size=[2, 3, 3])
+ h = x
+ for i in range(hparams.latent_encoder_depth):
+ h1 = conv_block("conv3d_1_%d" % i, h, time_filter=2,
+ mid_channels=hparams.latent_encoder_width)
+ h2 = conv("conv3d_zeros_%d" % i, h1, apply_actnorm=False,
+ output_channels=res_channels, conv_init="zeros",
+ filter_size=[2, 3, 3])
+ h += h2
# take last activation that should capture all context since padding is
# on left.
h = h[:, -1, :, :, :]
+ h = conv("res_final", h, apply_actnorm=False, conv_init="zeros",
+ output_channels=2*output_channels, filter_size=[1, 1])
mean, log_scale = h[:, :, :, 0::2], h[:, :, :, 1::2]
return tf.distributions.Normal(mean, tf.exp(log_scale))
|
Extend latent_dist_encoder=conv3d_net to have multiple layers via hparams.latent_encoder_depth and skip connections.
PiperOrigin-RevId: <I>
|
tensorflow_tensor2tensor
|
train
|
ed91577da23beb90213f369994bcd999b9764136
|
diff --git a/boil/cli.py b/boil/cli.py
index <HASH>..<HASH> 100644
--- a/boil/cli.py
+++ b/boil/cli.py
@@ -15,16 +15,20 @@ from boil.renderer import Renderer
def main():
- print __name__
-
args = docopt(__doc__)
plate_name = args['<plate_name>']
plate = plates.get_plate(plate_name)
+ vars = {}
+ for var in plate.VARS:
+ example = var.get('example', var.get('default'))
+ prompt_str = "%s [%s]:\n" % (var['name'], example)
+ value = raw_input(prompt_str)
+ vars[var['name']] = value
+
env = get_environment(plate)
- env.globals['app_name'] = ''.join(
- [random.choice('abcde') for _ in range(10)])
+ env.globals.update(vars)
renderer = Renderer(env, target_dir=os.getcwd())
renderer.run()
diff --git a/boil/plates/python_package/__init__.py b/boil/plates/python_package/__init__.py
index <HASH>..<HASH> 100644
--- a/boil/plates/python_package/__init__.py
+++ b/boil/plates/python_package/__init__.py
@@ -0,0 +1,12 @@
+VARS = [
+ {'name': 'app_name',
+ 'example': 'fancy_package'},
+ {'name': 'description',
+ 'example': 'This is a super fancy package!'},
+ {'name': 'url',
+ 'example': 'https://github.com/username/app_name'},
+ {'name': 'license',
+ 'default': 'MIT'},
+ {'name': 'author',
+ 'example': 'John Smith'}
+]
|
Collect plate vars interactivelly
|
bzurkowski_boil
|
train
|
93f949f422dbe5b422bee171a8353662fe9ed2e0
|
diff --git a/test/features/router/misc.js b/test/features/router/misc.js
index <HASH>..<HASH> 100644
--- a/test/features/router/misc.js
+++ b/test/features/router/misc.js
@@ -52,12 +52,15 @@ describe('router - misc', function() {
}).then(function(res) {
slice.halt();
assert.deepEqual(res.headers['x-request-id'], reqId, 'Returned request ID does not match the sent one');
- slice.get().forEach(function(line) {
- var a = JSON.parse(line);
- if(a.req || a.request_id) {
- assert.deepEqual(a.request_id, reqId, 'Request ID mismatch');
- }
- });
+ // TODO: Fix https://phabricator.wikimedia.org/T121414 &
+ // re-enable.
+ //
+ // slice.get().forEach(function(line) {
+ // var a = JSON.parse(line);
+ // if(a.req || a.request_id) {
+ // assert.deepEqual(a.request_id, reqId, 'Request ID mismatch');
+ // }
+ // });
});
});
@@ -75,12 +78,15 @@ describe('router - misc', function() {
}, function(err) {
slice.halt();
assert.deepEqual(err.headers['x-request-id'], reqId, 'Returned request ID does not match the sent one');
- slice.get().forEach(function(line) {
- var a = JSON.parse(line);
- if(a.req || a.request_id) {
- assert.deepEqual(a.request_id, reqId, 'Request ID mismatch');
- }
- });
+ // TODO: Fix https://phabricator.wikimedia.org/T121414 &
+ // re-enable.
+ //
+ // slice.get().forEach(function(line) {
+ // var a = JSON.parse(line);
+ // if(a.req || a.request_id) {
+ // assert.deepEqual(a.request_id, reqId, 'Request ID mismatch');
+ // }
+ // });
});
});
|
Temporarily disable sub-request id check until failure is investigated
Bug: <URL>
|
wikimedia_restbase
|
train
|
3f3762e093bb8ee682346d95493ebdb6cd6f3068
|
diff --git a/src/TreeHouse/Keystone/Client/TokenPool.php b/src/TreeHouse/Keystone/Client/TokenPool.php
index <HASH>..<HASH> 100644
--- a/src/TreeHouse/Keystone/Client/TokenPool.php
+++ b/src/TreeHouse/Keystone/Client/TokenPool.php
@@ -15,6 +15,8 @@ use TreeHouse\Keystone\Client\Model\Token;
class TokenPool
{
+ const TOKEN_KEY_FORMAT = 'keystone_token_3_%s';
+
/**
* @var Tenant
*/
@@ -204,6 +206,6 @@ class TokenPool
*/
private function getCacheKey()
{
- return sprintf('keystone_token_%s', rawurlencode($this->tenant->getTokenUrl()));
+ return sprintf(self::TOKEN_KEY_FORMAT, rawurlencode($this->tenant->getTokenUrl()));
}
}
diff --git a/tests/TreeHouse/Keystone/Tests/Functional/ClientTest.php b/tests/TreeHouse/Keystone/Tests/Functional/ClientTest.php
index <HASH>..<HASH> 100644
--- a/tests/TreeHouse/Keystone/Tests/Functional/ClientTest.php
+++ b/tests/TreeHouse/Keystone/Tests/Functional/ClientTest.php
@@ -11,6 +11,7 @@ use TreeHouse\Cache\Driver\ArrayDriver;
use TreeHouse\Cache\Serializer\JsonSerializer;
use TreeHouse\Keystone\Client\ClientFactory;
use TreeHouse\Keystone\Client\Model\Tenant;
+use TreeHouse\Keystone\Client\TokenPool;
use TreeHouse\Keystone\Test\Server;
class ClientTest extends \PHPUnit_Framework_TestCase
@@ -171,7 +172,7 @@ class ClientTest extends \PHPUnit_Framework_TestCase
*/
private function getTokenKey()
{
- return sprintf('keystone_token_%s', rawurlencode($this->url));
+ return sprintf(TokenPool::TOKEN_KEY_FORMAT, rawurlencode($this->url));
}
/**
|
bump token key format to prevent BC issue
|
treehouselabs_keystone-client
|
train
|
48c012ef3bead55c57bc06efc2f580da4e58efaa
|
diff --git a/lib/prmd/commands/combine.rb b/lib/prmd/commands/combine.rb
index <HASH>..<HASH> 100644
--- a/lib/prmd/commands/combine.rb
+++ b/lib/prmd/commands/combine.rb
@@ -4,7 +4,7 @@ module Prmd
[*paths].each do |path|
files << if File.directory?(path)
Dir.glob(File.join(path, '**', '*.json')) +
- Dir.glob(File.join(path, '**', '*.yaml')) -
+ Dir.glob(File.join(path, '**', '*.{yaml,yml}')) -
[options[:meta]]
else
path
|
Accept "*.yml" as input in addition to "*.yaml"
"Be conservative in what you do, be liberal in what you accept from
others"
|
interagent_prmd
|
train
|
80142fd48d62a75821660de40137d45366b1e968
|
diff --git a/src/main/java/com/github/webdriverextensions/internal/WebComponentList.java b/src/main/java/com/github/webdriverextensions/internal/WebComponentList.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/webdriverextensions/internal/WebComponentList.java
+++ b/src/main/java/com/github/webdriverextensions/internal/WebComponentList.java
@@ -13,11 +13,11 @@ import org.openqa.selenium.support.PageFactory;
public class WebComponentList<T extends WebComponent> implements List<T> {
- Class<T> webComponentClass;
- List<WebElement> wrappedWebElements;
- List<T> webComponents;
- WebComponentFactory webComponentFactory = new DefaultWebComponentFactory();
- WebDriver driver;
+ private Class<T> webComponentClass;
+ private List<WebElement> wrappedWebElements;
+ private List<T> webComponents;
+ private WebComponentFactory webComponentFactory = new DefaultWebComponentFactory();
+ private WebDriver driver;
public WebComponentList(Class<T> webComponentClass, List<WebElement> webElements, WebComponentFactory webComponentFactory, WebDriver driver) {
this.webComponentClass = webComponentClass;
|
Revertet package private fields to private
|
webdriverextensions_webdriverextensions
|
train
|
aceea7684ade1674bbd78c64b16221e679dcab3c
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -22,7 +22,7 @@ $ npm install app-store-reviews
```
## Usage
-You can find examples in the [examples](https://raw.github.com/jcoynel/app-store-reviews/master/examples/) folder.
+You can find all the following examples in the [examples](https://raw.github.com/jcoynel/app-store-reviews/master/examples/) folder.
### Example 1: single app and country
In this example we simply print the reviews of [Tunes Notifier](http://www.tunes-notifier.com) to the console from the **US Store**. The ID of the app is **555731861**.
@@ -105,6 +105,7 @@ appStoreReviews.on('nextPage', function(nextPage) {
});
+console.log("Starting reviews-to-mysql.js at " + Date());
var db = mysqlConnection();
db.connect();
db.query('SELECT * FROM apps WHERE enabled=1', function(err, rows, fields) {
@@ -131,10 +132,39 @@ db.query('SELECT * FROM apps WHERE enabled=1', function(err, rows, fields) {
db.end();
```
+### Example 3: automatically email new reviews
+In this example we store the reviews in a MySQL database and send the new reviews by email periodically using Cron.
+
+You can find the structure of the database in [examples/reviews-to-mysql.sql](https://raw.github.com/jcoynel/app-store-reviews/master/examples/reviews-to-mysql.sql).
+
+* In a directory, copy the following files
+ * [examples/reviews-to-mysql.js](https://raw.github.com/jcoynel/app-store-reviews/master/examples/reviews-to-mysql.js)
+ * [examples/mysql-to-email.js](https://raw.github.com/jcoynel/app-store-reviews/master/examples/mysql-to-email.js)
+ * [examples/reviews-to-email.sh](https://raw.github.com/jcoynel/app-store-reviews/master/examples/reviews-to-email.sh)
+
+* Install the required Node.js modules
+```bash
+$ npm install app-store-reviews
+$ npm install mysql
+$ npm install nodemailer
+```
+
+* In **reviews-to-mysql.js**, configure your database connection details.
+
+* In **mysql-to-email.js**, configure your database connection details and email address and password.
+
+* In **reviews-to-email.sh**, set the path to the directory containing this file.
+
+* Make **reviews-to-email.sh** executable: `chmod +x reviews-to-email.sh`
+
+* Add **reviews-to-email.sh** to Cron
+ * Edit the current crontab: `$ crontab -e`
+ * Add the following line (run every day at 12): `0 12 * * * /EXAMPLE/PATH/reviews-to-email.sh`
+
## Licence (MIT)
-Copyright (c) 2013 Jules Coynel
+Copyright (c) 2013-2014 Jules Coynel
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
diff --git a/examples/reviews-to-console.js b/examples/reviews-to-console.js
index <HASH>..<HASH> 100644
--- a/examples/reviews-to-console.js
+++ b/examples/reviews-to-console.js
@@ -1,4 +1,4 @@
-// Copyright (c) 2013 Jules Coynel
+// Copyright (c) 2013-2014 Jules Coynel
// https://github.com/jcoynel/app-store-reviews
var appStoreReviewsModule = require('app-store-reviews');
diff --git a/examples/reviews-to-mysql.js b/examples/reviews-to-mysql.js
index <HASH>..<HASH> 100644
--- a/examples/reviews-to-mysql.js
+++ b/examples/reviews-to-mysql.js
@@ -1,6 +1,12 @@
-// Copyright (c) 2013 Jules Coynel
+// Copyright (c) 2013-2014 Jules Coynel
// https://github.com/jcoynel/app-store-reviews
+/*
+Node.js modules required
+ - app-store-reviews
+ - mysql
+*/
+
var appStoreReviewsModule = require('app-store-reviews');
var appStoreReviews = new appStoreReviewsModule();
@@ -47,6 +53,7 @@ appStoreReviews.on('nextPage', function(nextPage) {
});
+console.log("Starting reviews-to-mysql.js at " + Date());
var db = mysqlConnection();
db.connect();
db.query('SELECT * FROM apps WHERE enabled=1', function(err, rows, fields) {
|
Update documentation for example to email new reviews from database
|
jcoynel_app-store-reviews
|
train
|
88b58e20bfedf11d94ee4873cf66eaf084ee7ffe
|
diff --git a/lib/patch.js b/lib/patch.js
index <HASH>..<HASH> 100644
--- a/lib/patch.js
+++ b/lib/patch.js
@@ -53,6 +53,8 @@ function updateComponent (oldVirtualNode, newVirtualNode, options) {
return component.element
}
+let mapPool = [new Map(), new Map(), new Map(), new Map()]
+
function updateChildren (parentElement, oldChildren, newChildren, options) {
let oldStartIndex = 0
let oldEndIndex = oldChildren.length - 1
@@ -90,7 +92,16 @@ function updateChildren (parentElement, oldChildren, newChildren, options) {
oldEndChild = oldChildren[--oldEndIndex]
newStartChild = newChildren[++newStartIndex]
} else {
- if (!oldIndicesByKey) oldIndicesByKey = mapOldKeysToIndices(oldChildren, oldStartIndex, oldEndIndex)
+ if (!oldIndicesByKey) {
+ if (mapPool.length > 0) {
+ oldIndicesByKey = mapPool.pop()
+ oldIndicesByKey.clear()
+ } else {
+ oldIndicesByKey = new Map()
+ }
+ mapOldKeysToIndices(oldIndicesByKey, oldChildren, oldStartIndex, oldEndIndex)
+ }
+
const key = getKey(newStartChild)
const oldIndex = key ? oldIndicesByKey.get(key) : null
if (oldIndex == null) {
@@ -117,6 +128,8 @@ function updateChildren (parentElement, oldChildren, newChildren, options) {
if (child) removeVirtualNode(child, options && options.refs)
}
}
+
+ if (oldIndicesByKey) mapPool.push(oldIndicesByKey)
}
function removeVirtualNode (virtualNode, refs, removeDOMNode = true) {
@@ -148,8 +161,7 @@ function getKey (virtualNode) {
return virtualNode.props ? virtualNode.props.key : undefined
}
-function mapOldKeysToIndices (children, startIndex, endIndex) {
- let oldIndicesByKey = new Map()
+function mapOldKeysToIndices (oldIndicesByKey, children, startIndex, endIndex) {
for (let i = startIndex; i <= endIndex; i++) {
const key = getKey(children[i])
if (key) oldIndicesByKey.set(key, i)
|
Use a map pool to minimize allocations in updateChildren
|
atom_etch
|
train
|
8634d4f8ad266fa5dd6eaa05b1f7c1ce925ec27b
|
diff --git a/examples/server.py b/examples/server.py
index <HASH>..<HASH> 100644
--- a/examples/server.py
+++ b/examples/server.py
@@ -465,13 +465,15 @@ class ServerHandler(BaseHTTPRequestHandler):
<Service priority="0">
<Type>%s</Type>
+ <Type>%s</Type>
<URI>%s</URI>
<LocalID>%s</LocalID>
</Service>
</XRD>
</xrds:XRDS>
-"""%(discover.OPENID_2_0_TYPE, endpoint_url, user_url))
+"""%(discover.OPENID_2_0_TYPE, discover.OPENID_1_0_TYPE,
+ endpoint_url, user_url))
def showServerYadis(self):
self.send_response(200)
|
[project @ examples/server.py: add /signon/<I> type to user XRDS]
|
necaris_python3-openid
|
train
|
ca2abb0f10d2d37d878ca908f49d14eb4ff6b769
|
diff --git a/scripts/distributed_nextgen_pipeline.py b/scripts/distributed_nextgen_pipeline.py
index <HASH>..<HASH> 100644
--- a/scripts/distributed_nextgen_pipeline.py
+++ b/scripts/distributed_nextgen_pipeline.py
@@ -45,11 +45,28 @@ def start_workers(cluster, config, config_file):
return jobids
def run_analysis(config_file, fc_dir, run_info_yaml, cluster, config):
- args = config["distributed"]["platform_args"].split()
+ # get arguments, ignoring need for multiple cores in arguments
+ # since analysis is single threaded runner
+ args = []
+ ignore = False
+ for a in config["distributed"]["platform_args"].split():
+ if ignore:
+ ignore = False
+ elif a == "-n":
+ ignore = True
+ else:
+ args.append(a)
program_cl = [config["analysis"]["process_program"], config_file, fc_dir]
if run_info_yaml:
program_cl.append(run_info_yaml)
jobid = cluster.submit_job(args, program_cl)
+ try:
+ _monitor_analysis(cluster, jobid)
+ except:
+ stop_workers(cluster, [jobid])
+ raise
+
+def _monitor_analysis(cluster, jobid):
# wait for job to start
while not(cluster.are_running([jobid])):
time.sleep(5)
|
Run job manager on single cluster node; cleanup for killed jobs
|
bcbio_bcbio-nextgen
|
train
|
be07bb7a3986678e5424db7be8717a1685f67ad1
|
diff --git a/test/Util.js b/test/Util.js
index <HASH>..<HASH> 100644
--- a/test/Util.js
+++ b/test/Util.js
@@ -68,6 +68,9 @@ exports.fillMap = function (map, size, keyPrefix, valuePrefix) {
};
exports.markEnterprise = function (_this) {
+ if (process.env.SERVER_TYPE === 'oss' || process.env.HZ_TYPE === 'oss') {
+ _this.skip();
+ }
if(!process.env.HAZELCAST_ENTERPRISE_KEY){
_this.skip();
}
@@ -76,7 +79,7 @@ exports.markEnterprise = function (_this) {
exports.markServerVersionAtLeast = function (_this, client, expectedVersion) {
var actNumber = client.getClusterService().getOwnerConnection().getConnectedServerVersion();
var expNumber = BuildMetadata.calculateVersion(expectedVersion);
- if (actNumber < expNumber) {
+ if (actNumber === BuildMetadata.UNKNOWN_VERSION_ID || actNumber < expNumber) {
_this.skip();
}
};
|
adds additional oss/enterprise test check
|
hazelcast_hazelcast-nodejs-client
|
train
|
7e06b6ccacae7e75416a39d6e34d25040151699e
|
diff --git a/pcapfile/protocols/linklayer/ethernet.py b/pcapfile/protocols/linklayer/ethernet.py
index <HASH>..<HASH> 100644
--- a/pcapfile/protocols/linklayer/ethernet.py
+++ b/pcapfile/protocols/linklayer/ethernet.py
@@ -6,8 +6,6 @@ import binascii
import ctypes
import struct
-import pcapfile.structs
-
class Ethernet(ctypes.Structure):
"""
@@ -23,8 +21,10 @@ class Ethernet(ctypes.Structure):
def __init__(self, packet, layers=0):
(dst, src, self.type) = struct.unpack('!6s6sH', packet[:14])
- self.dst = b':'.join([('%02x' % octet).encode('ascii') for octet in bytearray(dst)])
- self.src = b':'.join([('%02x' % octet).encode('ascii') for octet in bytearray(src)])
+ dst = bytearray(dst)
+ src = bytearray(src)
+ self.dst = b':'.join([('%02x' % o).encode('ascii') for o in dst])
+ self.src = b':'.join([('%02x' % o).encode('ascii') for o in src])
payload = binascii.hexlify(packet[14:])
self.payload = payload
diff --git a/pcapfile/protocols/network/ip.py b/pcapfile/protocols/network/ip.py
index <HASH>..<HASH> 100644
--- a/pcapfile/protocols/network/ip.py
+++ b/pcapfile/protocols/network/ip.py
@@ -30,12 +30,9 @@ class IP(ctypes.Structure):
def __init__(self, packet, layers=0):
# parse the required header first, deal with options later
- try: # python2
- magic = struct.unpack('B', packet[0])[0]
- except TypeError: # python3
- magic = packet[0]
- assert ((magic & 0b1100) == 4
- and (magic & 0b0111) > 4), 'not an IPv4 packet.'
+ magic = int(bytearray(packet)[0])
+ assert ((magic & 0b1100) == 4 and
+ (magic & 0b0111) > 4), 'not an IPv4 packet.'
fields = struct.unpack('!BBHHHBBHII', packet[:20])
self.v = fields[0] & 0b1100
@@ -74,7 +71,7 @@ def parse_ipv4(address):
"""
raw = struct.pack('I', address)
octets = struct.unpack('BBBB', raw)[::-1]
- ipv4 = b'.'.join([('%d'% b).encode('ascii') for b in bytearray(octets)])
+ ipv4 = b'.'.join([('%d' % o).encode('ascii') for o in bytearray(octets)])
return ipv4
diff --git a/pcapfile/savefile.py b/pcapfile/savefile.py
index <HASH>..<HASH> 100644
--- a/pcapfile/savefile.py
+++ b/pcapfile/savefile.py
@@ -81,9 +81,9 @@ Load and validate the header of a pcap file.
try:
raw_savefile_header = file_h.read(24)
except UnicodeDecodeError:
- print("\nMake sure the input file is opened in (r)ead (b)inary, 'rb'\n")
+ print("\nMake sure the input file is opened in read binary, 'rb'\n")
raise
-
+
# in case the capture file is not the same endianness as ours, we have to
# use the correct byte order for the file header
if raw_savefile_header[:4] == b'\xa1\xb2\xc3\xd4':
diff --git a/pcapfile/test/savefile_test.py b/pcapfile/test/savefile_test.py
index <HASH>..<HASH> 100644
--- a/pcapfile/test/savefile_test.py
+++ b/pcapfile/test/savefile_test.py
@@ -8,24 +8,25 @@ import pickle
import tempfile
import unittest
import base64
+import sys
import pcapfile.test.fixture as fixture
from pcapfile import savefile
+
def create_pcap():
- """Create a capture file from the test fixtures."""
+ """
+ Create a capture file from the test fixtures.
+ """
tfile = tempfile.NamedTemporaryFile()
- try: # python3
- Y = fixture.TESTPCAP3
- X = base64.b64decode(Y)
- capture = pickle.loads(X)
- except ValueError: # python2 unsupported pickle protocol: 3
- Y = fixture.TESTPCAP2
- X = Y.decode('base64')
- capture = pickle.loads(X)
+ if sys.version_info[0] >= 3: # python3
+ capture = pickle.loads(base64.b64decode(fixture.TESTPCAP3))
+ else: # python2 unsupported pickle protocol: 3
+ capture = pickle.loads(fixture.TESTPCAP2.decode('base64'))
open(tfile.name, 'wb').write(capture)
return tfile
+
class TestCase(unittest.TestCase):
"""
Test case for the savefile code.
@@ -82,11 +83,8 @@ class TestCase(unittest.TestCase):
Make sure raw packets load properly.
"""
packet = self.capfile.packets[0].raw()
- if isinstance(packet[14], int): # python3
- expected = 69
- else:# python2
- expected = b'\x45'
- self.assertEqual(packet[14], expected, 'invalid packet')
+
+ self.assertEqual(int(bytearray(packet)[14]), 69, 'invalid packet')
for packet in self.capfile.packets:
for field in ['capture_len', 'timestamp', 'timestamp_ms',
|
Code changes as suggested from pep8 and pylint
|
kisom_pypcapfile
|
train
|
73ec39cd7ef8f7bf82be4f64f805f1df6042e638
|
diff --git a/lib/amee/data_item.rb b/lib/amee/data_item.rb
index <HASH>..<HASH> 100644
--- a/lib/amee/data_item.rb
+++ b/lib/amee/data_item.rb
@@ -7,6 +7,7 @@ module AMEE
@choices = data[:choices]
@label = data[:label]
@item_definition = data[:item_definition]
+ @total_amount = data[:total_amount]
super
end
@@ -14,6 +15,7 @@ module AMEE
attr_reader :choices
attr_reader :label
attr_reader :item_definition
+ attr_reader :total_amount
def self.from_json(json)
# Read JSON
@@ -26,6 +28,7 @@ module AMEE
data[:path] = doc['path']
data[:label] = doc['dataItem']['label']
data[:item_definition] = doc['dataItem']['itemDefinition']['uid']
+ data[:total_amount] = doc['amountPerMonth']
# Get values
data[:values] = []
doc['dataItem']['itemValues'].each do |value|
@@ -61,6 +64,7 @@ module AMEE
data[:path] = (REXML::XPath.first(doc, '/Resources/DataItemResource/Path') || REXML::XPath.first(doc, '/Resources/DataItemResource/DataItem/path')).text
data[:label] = (REXML::XPath.first(doc, '/Resources/DataItemResource/DataItem/Label') || REXML::XPath.first(doc, '/Resources/DataItemResource/DataItem/label')).text
data[:item_definition] = REXML::XPath.first(doc, '/Resources/DataItemResource/DataItem/ItemDefinition/@uid').to_s
+ data[:total_amount] = REXML::XPath.first(doc, '/Resources/DataItemResource/AmountPerMonth').text.to_f
# Get values
data[:values] = []
REXML::XPath.each(doc, '/Resources/DataItemResource/DataItem/ItemValues/ItemValue') do |value|
@@ -86,9 +90,9 @@ module AMEE
end
- def self.get(connection, path)
+ def self.get(connection, path, options = {})
# Load data from path
- response = connection.get(path)
+ response = connection.get(path, options)
# Parse data from response
if response.is_json?
item = Item.from_json(response)
|
Add support for on-the-fly calculations
|
OpenAMEE_amee-ruby
|
train
|
5ad24556036602f1fb9a5426c89420b79815ddbd
|
diff --git a/vexbot/argenvconfig.py b/vexbot/argenvconfig.py
index <HASH>..<HASH> 100644
--- a/vexbot/argenvconfig.py
+++ b/vexbot/argenvconfig.py
@@ -49,7 +49,10 @@ class ArgEnvConfig:
def get(self, value):
args = self._arg.parse_args()
- result = getattr(args, value)
+ try:
+ result = getattr(args, value)
+ except AttributeError:
+ result = None
if result is None:
key = self._environ.get(value)
if key:
|
added in error handeling for attribut error in get value command
|
benhoff_vexbot
|
train
|
7472411cbebe1bfb24678d52045f946b372f47d5
|
diff --git a/emoticons/templatetags/emoticons_tags.py b/emoticons/templatetags/emoticons_tags.py
index <HASH>..<HASH> 100644
--- a/emoticons/templatetags/emoticons_tags.py
+++ b/emoticons/templatetags/emoticons_tags.py
@@ -9,7 +9,7 @@ from emoticons.settings import EMOTICONS_COMPILED
register = template.Library()
-def replace_emoticons(content):
+def replace_emoticons(content, excluded_markups):
"""
Replace the emoticons string by HTML images.
"""
@@ -23,12 +23,13 @@ class EmoticonNode(template.Node):
"""
Node for applying ``replace_emoticons`` on content.
"""
- def __init__(self, nodelist):
+ def __init__(self, nodelist, exclude):
self.nodelist = nodelist
+ self.exclude = exclude
def render(self, context):
content = self.nodelist.render(context)
- return replace_emoticons(content)
+ return replace_emoticons(content, self.exclude)
@register.tag('emoticons')
@@ -36,18 +37,26 @@ def emoticons_tag(parser, token):
"""
Tag for rendering emoticons.
"""
+ exclude = ''
+ args = token.split_contents()
+ if len(args) == 2:
+ exclude = args[1]
+ elif len(args) > 2:
+ raise template.TemplateSyntaxError(
+ 'emoticons tag has only one optional argument')
+
nodelist = parser.parse(['endemoticons'])
parser.delete_first_token()
- return EmoticonNode(nodelist)
+ return EmoticonNode(nodelist, exclude)
@register.filter('emoticons', needs_autoescape=True)
-def emoticons_filter(content, autoescape=None):
+def emoticons_filter(content, exclude='', autoescape=None):
"""
Filter for rendering emoticons.
"""
esc = autoescape and conditional_escape or (lambda x: x)
- content = mark_safe(replace_emoticons(esc(content)))
+ content = mark_safe(replace_emoticons(esc(content), exclude))
return content
|
Update the API of the tag and the filter to handle the excluded markups
|
Fantomas42_django-emoticons
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.