hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
0ef4af44ce140b60819bc1e7f46b53990b523457
|
diff --git a/js/vars.go b/js/vars.go
index <HASH>..<HASH> 100644
--- a/js/vars.go
+++ b/js/vars.go
@@ -78,15 +78,16 @@ func (r *renamer) isReserved(name []byte, undeclared js.VarArray) bool {
func (r *renamer) getIndex(name []byte) int {
index := 0
NameLoop:
- for i, b := range name {
+ for i := len(name) - 1; 0 <= i; i-- {
chars := r.identContinue
if i == 0 {
chars = r.identStart
+ index *= len(r.identStart)
} else {
index *= len(r.identContinue)
}
for j, c := range chars {
- if b == c {
+ if name[i] == c {
index += j
continue NameLoop
}
@@ -132,11 +133,12 @@ func (r *renamer) getName(name []byte, index int) []byte {
} else {
name = name[:n]
}
- for j := n - 1; 0 < j; j-- {
- name[j] = r.identContinue[index%len(r.identContinue)]
+ name[0] = r.identStart[index%len(r.identStart)]
+ index /= len(r.identStart)
+ for i := 1; i < n; i++ {
+ name[i] = r.identContinue[index%len(r.identContinue)]
index /= len(r.identContinue)
}
- name[0] = r.identStart[index]
return name
}
|
JS: change character order for JS variable renaming of 2 or more characters, improves GZIP compression
|
tdewolff_minify
|
train
|
0c4b0df8f5a088d3bb16a8e9d7b89b44e9c0be90
|
diff --git a/opengl/src/main/java/com/doctoror/particlesdrawable/opengl/GlParticlesView.java b/opengl/src/main/java/com/doctoror/particlesdrawable/opengl/GlParticlesView.java
index <HASH>..<HASH> 100644
--- a/opengl/src/main/java/com/doctoror/particlesdrawable/opengl/GlParticlesView.java
+++ b/opengl/src/main/java/com/doctoror/particlesdrawable/opengl/GlParticlesView.java
@@ -425,17 +425,6 @@ public class GlParticlesView extends GLSurfaceView implements
}
@Override
- protected void onSizeChanged(final int w, final int h, final int oldw, final int oldh) {
- super.onSizeChanged(w, h, oldw, oldh);
- queueEvent(new Runnable() {
- @Override
- public void run() {
- presenter.setBounds(0, 0, w, h);
- }
- });
- }
-
- @Override
public void scheduleNextFrame(final long delay) {
if (delay == 0) {
requestRender();
@@ -492,6 +481,7 @@ public class GlParticlesView extends GLSurfaceView implements
@Override
public void onSurfaceChanged(@NonNull final GL10 gl, final int width, final int height) {
+ presenter.setBounds(0, 0, width, height);
renderer.setDimensions(width, height);
backgroundColorDirty = true;
backgroundTextureDirty = true;
|
setBounds of presenter onSurfaceChanged
|
Doctoror_ParticlesDrawable
|
train
|
0f71256996cce65b0beee3a51262e2f32c1ba724
|
diff --git a/lib/visit.js b/lib/visit.js
index <HASH>..<HASH> 100644
--- a/lib/visit.js
+++ b/lib/visit.js
@@ -26,12 +26,16 @@ var runtimeValuesMethod = runtimeProperty("values");
var runtimeAsyncMethod = runtimeProperty("async");
exports.transform = function transform(node, options) {
+ var visitor = types.PathVisitor.fromMethodsObject(visitorMethods);
+
node = recast.visit(node, visitor);
- if (options && options.includeRuntime) {
+ if (options && options.includeRuntime && visitor.wasChangeReported()) {
injectRuntime(n.File.check(node) ? node.program : node);
}
+ options.madeChanges = visitor.wasChangeReported();
+
return node;
};
@@ -50,7 +54,7 @@ function injectRuntime(program) {
body.unshift.apply(body, runtimeBody);
}
-var visitor = types.PathVisitor.fromMethodsObject({
+var visitorMethods = {
visitFunction: function(path) {
// Calling this.traverse(path) first makes for a post-order traversal.
this.traverse(path);
@@ -61,6 +65,8 @@ var visitor = types.PathVisitor.fromMethodsObject({
return;
}
+ this.reportChanged();
+
node.generator = false;
if (node.expression) {
@@ -296,7 +302,7 @@ var visitor = types.PathVisitor.fromMethodsObject({
node.body
);
}
-});
+};
function shouldNotHoistAbove(stmtPath) {
var value = stmtPath.value;
|
Report back if any changes was made to AST. Fixes #<I>
This is done by using the proposed ast-types API from: benjamn/ast-types#<I>
|
facebook_regenerator
|
train
|
f641b0a5dd9d923b8485feb4bc14165b60ed0efb
|
diff --git a/lib/argument-modification.js b/lib/argument-modification.js
index <HASH>..<HASH> 100644
--- a/lib/argument-modification.js
+++ b/lib/argument-modification.js
@@ -111,10 +111,13 @@ class ArgumentModification {
createRecorderClass (controller) {
const globalScope = this.options.globalScope;
const globalScopeBlockEspath = findEspathOfAncestorNode(globalScope.block, controller);
- const createNode = newNodeWithLocationCopyOf(globalScope.block);
const ctorName = this.options.transformation.generateUniqueName('PowerAssertRecorder');
+ const classDef = updateLocRecursively(espurify(recorderClassAst), {
+ loc: globalScope.block.loc,
+ range: globalScope.block.range,
+ visitorKeys: this.options.visitorKeys
+ });
const types = new NodeCreator(globalScope.block);
- const classDef = updateLocRecursively(espurify(recorderClassAst), createNode, this.options.visitorKeys);
const decl = types.variableDeclaration('var', [
types.variableDeclarator(types.identifier(ctorName), classDef)
]);
@@ -127,8 +130,8 @@ class ArgumentModification {
}
class NodeCreator {
- constructor (locationNode) {
- this.createNode = newNodeWithLocationCopyOf(locationNode);
+ constructor ({ loc, range }) {
+ this.createNode = newNodeWithLocation({ loc, range });
}
identifier (name) {
return this.createNode({
@@ -212,7 +215,8 @@ class NoModification {
}
}
-const updateLocRecursively = (node, n, visitorKeys) => {
+const updateLocRecursively = (node, { loc, range, visitorKeys }) => {
+ const n = newNodeWithLocation({ loc, range });
const visitor = {
leave: function (currentNode, parentNode) {
return n(currentNode);
@@ -232,26 +236,26 @@ const isPathIdentical = (path1, path2) => {
return path1.join('/') === path2.join('/');
};
-const newNodeWithLocationCopyOf = (original) => {
+const newNodeWithLocation = ({ loc, range }) => {
return (newNode) => {
- if (typeof original.loc !== 'undefined') {
+ if (typeof loc !== 'undefined') {
const newLoc = {
start: {
- line: original.loc.start.line,
- column: original.loc.start.column
+ line: loc.start.line,
+ column: loc.start.column
},
end: {
- line: original.loc.end.line,
- column: original.loc.end.column
+ line: loc.end.line,
+ column: loc.end.column
}
};
- if (typeof original.loc.source !== 'undefined') {
- newLoc.source = original.loc.source;
+ if (typeof loc.source !== 'undefined') {
+ newLoc.source = loc.source;
}
newNode.loc = newLoc;
}
- if (Array.isArray(original.range)) {
- newNode.range = [original.range[0], original.range[1]];
+ if (Array.isArray(range)) {
+ newNode.range = [range[0], range[1]];
}
return newNode;
};
|
refactor: making node creation logic abstract
bahevior preserving refactoring before big feature addition
|
power-assert-js_espower
|
train
|
3ee26d69c228aefc7ee7468d0577d7deffc6f06d
|
diff --git a/buildfox.py b/buildfox.py
index <HASH>..<HASH> 100755
--- a/buildfox.py
+++ b/buildfox.py
@@ -295,7 +295,7 @@ if args.get("selftest"):
engine.save(ninja_filename)
result = not subprocess.call(["ninja", "-f", ninja_filename])
if result:
- result = not subprocess.call([app_filename])
+ result = not subprocess.call(["./" + app_filename])
if result:
print("Selftest - ok")
selftest_wipe()
|
Fix call to selftest app on unix
|
beardsvibe_buildfox
|
train
|
2abca536d0453bee510d762e799564d4aad9012b
|
diff --git a/packages/openneuro-server/app.js b/packages/openneuro-server/app.js
index <HASH>..<HASH> 100644
--- a/packages/openneuro-server/app.js
+++ b/packages/openneuro-server/app.js
@@ -72,7 +72,7 @@ export default test => {
// Apollo server setup
const apolloServer = new ApolloServer({
schema,
- context: req => {
+ context: ({ req }) => {
if (req.isAuthenticated()) {
return {
user: req.user.id,
@@ -88,20 +88,7 @@ export default test => {
app.use('/crn/graphql', jwt.authenticate, auth.optional)
// Inject Apollo Server
- apolloServer.applyMiddleware({ app })
-
- const websocketUrl = process.browser ? config.url.replace('http', 'ws') : null
- const subscriptionUrl = websocketUrl
- ? `${websocketUrl}/graphql-subscriptions`
- : null
- // GraphiQL, a visual editor for queries
- app.use(
- '/crn/graphiql',
- graphiqlExpress({
- endpointURL: '/crn/graphql',
- subscriptionsEndpoint: subscriptionUrl,
- }),
- )
+ apolloServer.applyMiddleware({ app, path: '/crn/graphql' })
return app
}
|
Fix setup on correct graphql path (/crn/graphql)
|
OpenNeuroOrg_openneuro
|
train
|
522c1c676781a0d5e91c9c8bcd07746795187afc
|
diff --git a/src/com/google/javascript/jscomp/CompilerInput.java b/src/com/google/javascript/jscomp/CompilerInput.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/CompilerInput.java
+++ b/src/com/google/javascript/jscomp/CompilerInput.java
@@ -35,7 +35,6 @@ import com.google.javascript.rhino.InputId;
import com.google.javascript.rhino.Node;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@@ -162,16 +161,16 @@ public class CompilerInput implements SourceAst, DependencyInfo {
/** Gets a list of types depended on by this input. */
@Override
- public Collection<String> getRequires() {
+ public ImmutableList<String> getRequires() {
if (hasFullParseDependencyInfo) {
- return orderedRequires;
+ return ImmutableList.copyOf(orderedRequires);
}
return getDependencyInfo().getRequires();
}
@Override
- public ImmutableCollection<String> getWeakRequires() {
+ public ImmutableList<String> getWeakRequires() {
return getDependencyInfo().getWeakRequires();
}
@@ -180,7 +179,7 @@ public class CompilerInput implements SourceAst, DependencyInfo {
* but does not attempt to regenerate the dependency information.
* Typically this occurs from module rewriting.
*/
- Collection<String> getKnownRequires() {
+ ImmutableCollection<String> getKnownRequires() {
return concat(
dependencyInfo != null ? dependencyInfo.getRequires() : ImmutableList.<String>of(),
extraRequires);
@@ -188,7 +187,7 @@ public class CompilerInput implements SourceAst, DependencyInfo {
/** Gets a list of types provided by this input. */
@Override
- public Collection<String> getProvides() {
+ public ImmutableList<String> getProvides() {
return getDependencyInfo().getProvides();
}
@@ -197,7 +196,7 @@ public class CompilerInput implements SourceAst, DependencyInfo {
* regenerate the dependency information. Typically this occurs
* from module rewriting.
*/
- Collection<String> getKnownProvides() {
+ ImmutableCollection<String> getKnownProvides() {
return concat(
dependencyInfo != null ? dependencyInfo.getProvides() : ImmutableList.<String>of(),
extraProvides);
diff --git a/src/com/google/javascript/jscomp/LazyParsedDependencyInfo.java b/src/com/google/javascript/jscomp/LazyParsedDependencyInfo.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/LazyParsedDependencyInfo.java
+++ b/src/com/google/javascript/jscomp/LazyParsedDependencyInfo.java
@@ -18,13 +18,12 @@ package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkNotNull;
-import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.javascript.jscomp.deps.DependencyInfo;
import com.google.javascript.jscomp.deps.ModuleLoader;
import com.google.javascript.jscomp.parsing.parser.FeatureSet;
import com.google.javascript.jscomp.parsing.parser.FeatureSet.Feature;
-import java.util.Collection;
import java.util.Map;
import java.util.TreeMap;
@@ -78,17 +77,17 @@ public class LazyParsedDependencyInfo implements DependencyInfo {
}
@Override
- public Collection<String> getRequires() {
+ public ImmutableList<String> getRequires() {
return delegate.getRequires();
}
@Override
- public ImmutableCollection<String> getWeakRequires() {
+ public ImmutableList<String> getWeakRequires() {
return delegate.getWeakRequires();
}
@Override
- public Collection<String> getProvides() {
+ public ImmutableList<String> getProvides() {
return delegate.getProvides();
}
diff --git a/src/com/google/javascript/jscomp/deps/DependencyInfo.java b/src/com/google/javascript/jscomp/deps/DependencyInfo.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/deps/DependencyInfo.java
+++ b/src/com/google/javascript/jscomp/deps/DependencyInfo.java
@@ -18,7 +18,7 @@ package com.google.javascript.jscomp.deps;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import java.io.IOException;
@@ -42,13 +42,13 @@ public interface DependencyInfo extends Serializable {
String getPathRelativeToClosureBase();
/** Gets the symbols provided by this file. */
- Collection<String> getProvides();
+ ImmutableList<String> getProvides();
/** Gets the symbols required by this file. */
- Collection<String> getRequires();
+ ImmutableList<String> getRequires();
/** Gets the symbols weakly required by this file. (i.e. for typechecking only) */
- ImmutableCollection<String> getWeakRequires();
+ ImmutableList<String> getWeakRequires();
/** Gets the loading information for this file. */
ImmutableMap<String, String> getLoadFlags();
|
Specify that the results of DependencyInfo class are immutable types.
Since all of the implementations are lists, make them all ImmutableList
for consistency.
-------------
Created by MOE: <URL>
|
google_closure-compiler
|
train
|
7c745800badd9774c9210967a36b4313c3f6896c
|
diff --git a/src/main/java/com/extjs/selenium/grid/GridPanel.java b/src/main/java/com/extjs/selenium/grid/GridPanel.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/extjs/selenium/grid/GridPanel.java
+++ b/src/main/java/com/extjs/selenium/grid/GridPanel.java
@@ -158,7 +158,7 @@ public class GridPanel extends Panel implements ITable<GridRow, GridCell> {
String id = getAttrId();
return scrollTop(id);
}
- public boolean scrollTop(String id) {
+ protected boolean scrollTop(String id) {
String script = "return (function(g){var a=g.view.scroller;if(a.dom.scrollTop!=0){a.dom.scrollTop=0;return true}return false})(window.Ext.getCmp('" + id + "'))";
return executeScrollScript("scrollTop", script);
}
@@ -189,7 +189,7 @@ public class GridPanel extends Panel implements ITable<GridRow, GridCell> {
String id = getAttrId();
return scrollPageDown(id);
}
- public boolean scrollPageDown(String id) {
+ protected boolean scrollPageDown(String id) {
String script = "return (function(c){var a=c.view,b=a.scroller;if(b.dom.scrollTop<(a.mainBody.getHeight()-b.getHeight())){b.dom.scrollTop+=b.getHeight()-10;return true}return false})(window.Ext.getCmp('" + id + "'))";
return executeScrollScript("scrollPageDown", script);
}
diff --git a/src/main/java/com/extjs/selenium/grid/LiveGridPanel.java b/src/main/java/com/extjs/selenium/grid/LiveGridPanel.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/extjs/selenium/grid/LiveGridPanel.java
+++ b/src/main/java/com/extjs/selenium/grid/LiveGridPanel.java
@@ -47,11 +47,10 @@ public class LiveGridPanel extends GridPanel {
*/
public boolean scrollTop() {
String id = getAttributeId();
- String script = "return (function(c){var a=c.view,b=a.liveScroller,d=a.liveScrollerInsets,n=d.length,h=n*d[0].style.height.replace('px','');if(b.dom.style.display=='none'){return false}if(b.dom.scrollTop!=0){b.dom.scrollTop=0;return true}return false})(window.Ext.getCmp('" + id + "'))";
- return executeScrollScript("scrollTop", script);
+ return scrollTop(id);
}
- public boolean scrollTop(String id) {
+ protected boolean scrollTop(String id) {
String script = "return (function(c){var a=c.view,b=a.liveScroller,d=a.liveScrollerInsets,n=d.length,h=n*d[0].style.height.replace('px','');if(b.dom.style.display=='none'){return false}if(b.dom.scrollTop!=0){b.dom.scrollTop=0;return true}return false})(window.Ext.getCmp('" + id + "'))";
return executeScrollScript("scrollTop", script);
}
@@ -94,11 +93,10 @@ public class LiveGridPanel extends GridPanel {
*/
public boolean scrollPageDown() {
String id = getAttributeId();
- String script = "return (function(c){var a=c.view,b=a.liveScroller,d=a.liveScrollerInsets,n=d.length,h=n*d[0].style.height.replace('px','');if(b.dom.style.display=='none'){return false}if(b.dom.scrollTop<(h-b.getHeight()-1)){b.dom.scrollTop+=b.getHeight()-10;return true}return false})(window.Ext.getCmp('" + id + "'))";
- return executeScrollScript("scrollPageDown", script);
+ return scrollPageDown(id);
}
- public boolean scrollPageDown(String id) {
+ protected boolean scrollPageDown(String id) {
String script = "return (function(c){var a=c.view,b=a.liveScroller,d=a.liveScrollerInsets,n=d.length,h=n*d[0].style.height.replace('px','');if(b.dom.style.display=='none'){return false}if(b.dom.scrollTop<(h-b.getHeight()-1)){b.dom.scrollTop+=b.getHeight()-10;return true}return false})(window.Ext.getCmp('" + id + "'))";
return executeScrollScript("scrollPageDown", script);
}
|
improvement scrollPageDown(String id) and scrollTop(String id) on LiveGridPanel
|
sdl_Testy
|
train
|
c94749356bc35b10136d80762a21d9108cd334bc
|
diff --git a/src/mesh/Mesh.js b/src/mesh/Mesh.js
index <HASH>..<HASH> 100644
--- a/src/mesh/Mesh.js
+++ b/src/mesh/Mesh.js
@@ -227,9 +227,9 @@ Mesh.prototype._renderCanvasTriangles = function (context)
*/
Mesh.prototype._renderCanvasDrawTriangle = function (context, vertices, uvs, index0, index1, index2)
{
- var textureSource = this.texture.baseTexture.source;
- var textureWidth = this.texture.baseTexture.width;
- var textureHeight = this.texture.baseTexture.height;
+ var textureSource = this._texture.baseTexture.source;
+ var textureWidth = this._texture.baseTexture.width;
+ var textureHeight = this._texture.baseTexture.height;
var x0 = vertices[index0], x1 = vertices[index1], x2 = vertices[index2];
var y0 = vertices[index0 + 1], y1 = vertices[index1 + 1], y2 = vertices[index2 + 1];
diff --git a/src/mesh/Rope.js b/src/mesh/Rope.js
index <HASH>..<HASH> 100644
--- a/src/mesh/Rope.js
+++ b/src/mesh/Rope.js
@@ -65,7 +65,7 @@ Rope.prototype.refresh = function ()
var points = this.points;
// if too little points, or texture hasn't got UVs set yet just move on.
- if (points.length < 1 || !this.texture._uvs)
+ if (points.length < 1 || !this._texture._uvs)
{
return;
}
@@ -75,7 +75,7 @@ Rope.prototype.refresh = function ()
var indices = this.indices;
var colors = this.colors;
- var textureUvs = this.texture._uvs;
+ var textureUvs = this._texture._uvs;
var offset = new core.math.Point(textureUvs.x0, textureUvs.y0);
var factor = new core.math.Point(textureUvs.x2 - textureUvs.x0, textureUvs.y2 - textureUvs.y0);
@@ -180,7 +180,7 @@ Rope.prototype.updateTransform = function ()
}
perpLength = Math.sqrt(perpX * perpX + perpY * perpY);
- num = this.texture.height / 2; //(20 + Math.abs(Math.sin((i + this.count) * 0.3) * 50) )* ratio;
+ num = this._texture.height / 2; //(20 + Math.abs(Math.sin((i + this.count) * 0.3) * 50) )* ratio;
perpX /= perpLength;
perpY /= perpLength;
diff --git a/src/mesh/webgl/MeshRenderer.js b/src/mesh/webgl/MeshRenderer.js
index <HASH>..<HASH> 100644
--- a/src/mesh/webgl/MeshRenderer.js
+++ b/src/mesh/webgl/MeshRenderer.js
@@ -77,7 +77,7 @@ MeshRenderer.prototype.render = function (mesh)
var renderer = this.renderer,
gl = renderer.gl,
- texture = mesh.texture.baseTexture,
+ texture = mesh._texture.baseTexture,
shader = renderer.shaderManager.plugins.meshShader;
var drawMode = mesh.drawMode === Mesh.DRAW_MODES.TRIANGLE_MESH ? gl.TRIANGLE_STRIP : gl.TRIANGLES;
|
optimize texture property access to bypass the getter
|
pixijs_pixi.js
|
train
|
5fcb6aa1e92d55a82ae44a9602644e2784e9c83f
|
diff --git a/pronouncing/__init__.py b/pronouncing/__init__.py
index <HASH>..<HASH> 100755
--- a/pronouncing/__init__.py
+++ b/pronouncing/__init__.py
@@ -19,12 +19,13 @@ def parse_cmu(cmufh):
:returns: a list of 2-tuples pairing a word with its phones (as a string)
"""
pronunciations = list()
+ regexp = re.compile(r'\(\d\)$')
for line in cmufh:
line = line.strip().decode('latin1')
if line.startswith(';'):
continue
word, phones = line.split(" ")
- word = re.sub(r'\(\d\)$', '', word.lower())
+ word = regexp.sub('', word.lower())
pronunciations.append((word.lower(), phones))
return pronunciations
@@ -167,8 +168,9 @@ def search(pattern):
"""
init_cmu()
matches = list()
+ regexp = re.compile(r"\b" + pattern + r"\b")
for word, phones in pronunciations:
- if re.search(r"\b" + pattern + r"\b", phones):
+ if regexp.search(phones):
matches.append(word)
return matches
@@ -191,8 +193,9 @@ def search_stresses(pattern):
"""
init_cmu()
matches = list()
+ regexp = re.compile(pattern)
for word, phones in pronunciations:
- if re.search(pattern, stresses(phones)):
+ if regexp.search(stresses(phones)):
matches.append(word)
return matches
|
Precompile regexps before using them in a loop.
|
aparrish_pronouncingpy
|
train
|
82b6100e0267ca28401e4abb69ca6166bdd7055b
|
diff --git a/src/TestSuite/IntegrationTestCase.php b/src/TestSuite/IntegrationTestCase.php
index <HASH>..<HASH> 100644
--- a/src/TestSuite/IntegrationTestCase.php
+++ b/src/TestSuite/IntegrationTestCase.php
@@ -51,6 +51,13 @@ abstract class IntegrationTestCase extends TestCase
protected $_response;
/**
+ * The exception being thrown if the case.
+ *
+ * @var \Cake\Core\Exception\Exception
+ */
+ protected $_exception;
+
+ /**
* Session data to use in the next request.
*
* @var array
@@ -115,6 +122,7 @@ abstract class IntegrationTestCase extends TestCase
$this->_session = [];
$this->_cookie = [];
$this->_response = null;
+ $this->_exception = null;
$this->_controller = null;
$this->_viewName = null;
$this->_layoutName = null;
@@ -280,6 +288,7 @@ abstract class IntegrationTestCase extends TestCase
} catch (\PHPUnit_Exception $e) {
throw $e;
} catch (\Exception $e) {
+ $this->_exception = $e;
$this->_handleError($e);
}
}
@@ -445,6 +454,11 @@ abstract class IntegrationTestCase extends TestCase
$this->fail('No response set, cannot assert status code.');
}
$status = $this->_response->statusCode();
+
+ if ($this->_exception && ($status < $min || $status > $max)) {
+ throw $this->_exception;
+ }
+
$this->assertGreaterThanOrEqual($min, $status, $message);
$this->assertLessThanOrEqual($max, $status, $message);
}
|
Bubble up exception if it is clearly to be not swallowed away.
|
cakephp_cakephp
|
train
|
4b00505ff298efe3f29d9b5572a88350db9a6896
|
diff --git a/go/dhcp/config.go b/go/dhcp/config.go
index <HASH>..<HASH> 100644
--- a/go/dhcp/config.go
+++ b/go/dhcp/config.go
@@ -275,7 +275,8 @@ func (d *Interfaces) readConfig() {
var options = make(map[dhcp.OptionCode][]byte)
options[dhcp.OptionSubnetMask] = []byte(net.ParseIP(ConfNet.Netmask).To4())
- options[dhcp.OptionDomainNameServer] = ShuffleDNS(ConfNet)
+ // options[dhcp.OptionDomainNameServer] = ShuffleDNS(ConfNet)
+ options[dhcp.OptionDomainNameServer] = []byte(DHCPScope.ip.To4())
options[dhcp.OptionRouter] = ShuffleGateway(ConfNet)
options[dhcp.OptionDomainName] = []byte(ConfNet.DomainName)
DHCPScope.options = options
|
Return the gw ip instead of the main interface ip (ip per role)
|
inverse-inc_packetfence
|
train
|
42f9a0aeaf67d529637d9740cf9b6d5548771c5d
|
diff --git a/django_webtest/__init__.py b/django_webtest/__init__.py
index <HASH>..<HASH> 100644
--- a/django_webtest/__init__.py
+++ b/django_webtest/__init__.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
from django.conf import settings
-from django.contrib.auth.models import User
from django.test.signals import template_rendered
from django.core.handlers.wsgi import WSGIHandler
from django.test import TestCase
@@ -32,9 +31,14 @@ class DjangoTestApp(TestApp):
def _update_environ(self, environ, user):
if user:
environ = environ or {}
- if isinstance(user, User):
+ if hasattr(user, 'get_username'):
+ # custom user, django 1.5+
+ environ['WEBTEST_USER'] = to_string(user.get_username())
+ elif hasattr(user, 'username'):
+ # standard User
environ['WEBTEST_USER'] = to_string(user.username)
else:
+ # username
environ['WEBTEST_USER'] = to_string(user)
return environ
|
one more fix for django <I> custom user models
|
django-webtest_django-webtest
|
train
|
4d5bcffdc471a8e887ac84c6f84be26ae7d070c6
|
diff --git a/presto-main/src/test/java/com/facebook/presto/sql/planner/optimizations/TestAddExchangesPlans.java b/presto-main/src/test/java/com/facebook/presto/sql/planner/optimizations/TestAddExchangesPlans.java
index <HASH>..<HASH> 100644
--- a/presto-main/src/test/java/com/facebook/presto/sql/planner/optimizations/TestAddExchangesPlans.java
+++ b/presto-main/src/test/java/com/facebook/presto/sql/planner/optimizations/TestAddExchangesPlans.java
@@ -15,9 +15,7 @@
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
-import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.spi.plan.AggregationNode;
-import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.sql.planner.Plan;
import com.facebook.presto.sql.planner.assertions.BasePlanTest;
import com.facebook.presto.sql.planner.assertions.ExpectedValueProvider;
@@ -34,7 +32,9 @@ import java.util.Optional;
import java.util.function.BiConsumer;
import static com.facebook.presto.SystemSessionProperties.AGGREGATION_PARTITIONING_MERGING_STRATEGY;
+import static com.facebook.presto.SystemSessionProperties.PARTITIONING_PRECISION_STRATEGY;
import static com.facebook.presto.SystemSessionProperties.TASK_CONCURRENCY;
+import static com.facebook.presto.sql.analyzer.FeaturesConfig.PartitioningPrecisionStrategy.PREFER_EXACT_PARTITIONING;
import static com.facebook.presto.spi.plan.AggregationNode.Step.SINGLE;
import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.aggregation;
import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.anySymbol;
@@ -422,9 +422,7 @@ public class TestAddExchangesPlans
TestingSession.testSessionBuilder()
.setCatalog("local")
.setSchema("tiny")
- .setSystemProperty(
- SystemSessionProperties.PARTITIONING_PRECISION_STRATEGY,
- FeaturesConfig.PartitioningPrecisionStrategy.PREFER_EXACT_PARTITIONING.toString())
+ .setSystemProperty(PARTITIONING_PRECISION_STRATEGY, PREFER_EXACT_PARTITIONING.toString())
.build(),
pattern);
}
|
Use static imports in TestAddExchangePlans
|
prestodb_presto
|
train
|
7b52245ed955abf99d98814d85ed3eb258141e3c
|
diff --git a/expression/builtin_string.go b/expression/builtin_string.go
index <HASH>..<HASH> 100644
--- a/expression/builtin_string.go
+++ b/expression/builtin_string.go
@@ -1555,7 +1555,41 @@ type builtinQuoteSig struct {
// See https://dev.mysql.com/doc/refman/5.7/en/string-functions.html#function_quote
func (b *builtinQuoteSig) eval(row []types.Datum) (d types.Datum, err error) {
- return d, errFunctionNotExists.GenByArgs("quote")
+ args, err := b.evalArgs(row)
+ if err != nil {
+ return types.Datum{}, errors.Trace(err)
+ }
+ if args[0].IsNull() {
+ return
+ }
+ var (
+ str string
+ buffer bytes.Buffer
+ )
+ str, err = args[0].ToString()
+ if err != nil {
+ return d, errors.Trace(err)
+ }
+ runes := []rune(str)
+ buffer.WriteRune('\'')
+ for i := 0; i < len(runes); i++ {
+ switch runes[i] {
+ case '\\', '\'':
+ buffer.WriteRune('\\')
+ buffer.WriteRune(runes[i])
+ case 0:
+ buffer.WriteRune('\\')
+ buffer.WriteRune('0')
+ case '\032':
+ buffer.WriteRune('\\')
+ buffer.WriteRune('Z')
+ default:
+ buffer.WriteRune(runes[i])
+ }
+ }
+ buffer.WriteRune('\'')
+ d.SetString(buffer.String())
+ return d, errors.Trace(err)
}
type binFunctionClass struct {
diff --git a/expression/builtin_string_test.go b/expression/builtin_string_test.go
index <HASH>..<HASH> 100644
--- a/expression/builtin_string_test.go
+++ b/expression/builtin_string_test.go
@@ -1231,5 +1231,33 @@ func (s *testEvaluatorSuite) TestBin(c *C) {
r, err := f.eval(nil)
c.Assert(r, testutil.DatumEquals, types.NewDatum(t["Expected"][0]))
}
+}
+
+func (s *testEvaluatorSuite) TestQuote(c *C) {
+ defer testleak.AfterTest(c)()
+
+ tbl := []struct {
+ arg interface{}
+ ret interface{}
+ }{
+ {`Don\'t!`, `'Don\\\'t!'`},
+ {`Don't`, `'Don\'t'`},
+ {`Don"`, `'Don"'`},
+ {`Don\"`, `'Don\\"'`},
+ {`\'`, `'\\\''`},
+ {`\"`, `'\\"'`},
+ {`萌萌哒(๑•ᴗ•๑)😊`, `'萌萌哒(๑•ᴗ•๑)😊'`},
+ {`㍿㌍㍑㌫`, `'㍿㌍㍑㌫'`},
+ {string([]byte{0, 26}), `'\0\Z'`},
+ {nil, nil},
+ }
+ for _, t := range tbl {
+ fc := funcs[ast.Quote]
+ f, err := fc.getFunction(datumsToConstants(types.MakeDatums(t.arg)), s.ctx)
+ c.Assert(err, IsNil)
+ r, err := f.eval(nil)
+ c.Assert(err, IsNil)
+ c.Assert(r, testutil.DatumEquals, types.NewDatum(t.ret))
+ }
}
diff --git a/plan/typeinferer.go b/plan/typeinferer.go
index <HASH>..<HASH> 100644
--- a/plan/typeinferer.go
+++ b/plan/typeinferer.go
@@ -380,7 +380,7 @@ func (v *typeInferrer) handleFuncCallExpr(x *ast.FuncCallExpr) {
"replace", "ucase", "upper", "convert", "substring", "elt",
"substring_index", "trim", "ltrim", "rtrim", "reverse", "hex", "unhex",
"date_format", "rpad", "lpad", "char_func", "conv", "make_set", "oct", "uuid",
- "insert_func", "bin":
+ "insert_func", "bin", "quote":
tp = types.NewFieldType(mysql.TypeVarString)
chs = v.defaultCharset
case "strcmp", "isnull", "bit_length", "char_length", "character_length", "crc32", "timestampdiff",
diff --git a/plan/typeinferer_test.go b/plan/typeinferer_test.go
index <HASH>..<HASH> 100644
--- a/plan/typeinferer_test.go
+++ b/plan/typeinferer_test.go
@@ -273,6 +273,7 @@ func (ts *testTypeInferrerSuite) TestInferType(c *C) {
{`exp(1)`, mysql.TypeDouble, charset.CharsetBin},
{`exp(1.23)`, mysql.TypeDouble, charset.CharsetBin},
{`exp('1.23')`, mysql.TypeDouble, charset.CharsetBin},
+ {`quote("Don\\'t!")`, mysql.TypeVarString, charset.CharsetUTF8},
{`insert("Titanium", 3, 6, "DB")`, mysql.TypeVarString, charset.CharsetUTF8},
{`is_ipv6('FE80::AAAA:0000:00C2:0002')`, mysql.TypeLonglong, charset.CharsetBin},
{"inet_ntoa(1)", mysql.TypeVarString, charset.CharsetUTF8},
|
builtin: add quote built-in function (#<I>)
|
pingcap_tidb
|
train
|
4f4c1c2ea671365c5fd3b7ae8ae156e4a7218b05
|
diff --git a/holoviews/plotting/bokeh/selection.py b/holoviews/plotting/bokeh/selection.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/bokeh/selection.py
+++ b/holoviews/plotting/bokeh/selection.py
@@ -14,7 +14,7 @@ class TabularSelectionDisplay(SelectionDisplay):
opts['selected'] = list(np.where(mask)[0])
return el.opts(clone=True, backend='bokeh', **opts)
- def build_selection(self, selection_streams, hvobj, operations, region_stream=None):
+ def build_selection(self, selection_streams, hvobj, operations, region_stream=None, cache={}):
sel_streams = [selection_streams.exprs_stream]
hvobj = hvobj.apply(self._build_selection, streams=sel_streams, per_element=True)
for op in operations:
diff --git a/holoviews/selection.py b/holoviews/selection.py
index <HASH>..<HASH> 100644
--- a/holoviews/selection.py
+++ b/holoviews/selection.py
@@ -106,6 +106,8 @@ class _base_link_selections(param.ParameterizedFunction):
Register an Element or DynamicMap that may be capable of generating
selection expressions in response to user interaction events
"""
+ from .element import Table
+
# Create stream that produces element that displays region of selection
selection_expr_seq = SelectionExprSequence(
hvobj, mode=self.selection_mode, include_region=self.show_regions,
@@ -122,8 +124,9 @@ class _base_link_selections(param.ParameterizedFunction):
stream.clear_history()
stream.event()
- mode_stream = SelectMode(source=hvobj)
- mode_stream.param.watch(self._update_mode, 'mode')
+ if not isinstance(hvobj, Table):
+ mode_stream = SelectMode(source=hvobj)
+ mode_stream.param.watch(self._update_mode, 'mode')
self._plot_reset_streams[hvobj].param.watch(
clear_stream_history, ['resetting']
|
Fix issues with linked selections on tables (#<I>)
|
pyviz_holoviews
|
train
|
c27c704ffa226f5cf22e57fa58281490c4b03328
|
diff --git a/resources/lang/pl-PL/dashboard.php b/resources/lang/pl-PL/dashboard.php
index <HASH>..<HASH> 100644
--- a/resources/lang/pl-PL/dashboard.php
+++ b/resources/lang/pl-PL/dashboard.php
@@ -16,22 +16,22 @@ return [
// Incidents
'incidents' => [
- 'title' => 'Ereignisse & Termine',
+ 'title' => 'Incydenty i Konserwacja',
'incidents' => 'Vorfälle',
'logged' => '{0} Es gibt keine Vorfälle, gute Arbeit.|Du hast einen Vorfall gemeldet.|Du hast <strong>:count</strong> Vorfälle gemeldet.',
'incident-create-template' => 'Vorlage erstellen',
'incident-templates' => 'Vorfall Vorlagen',
'updates' => [
'title' => 'Aktualizacje incydentu dla :incident',
- 'count' => '{0} Zero Updates|[1] One Update|[2] Two Updates|[3,*] Several Updates',
+ 'count' => '{0} Zero Aktualizacji|[1] Jedna Aktualizacja|[2] Dwie Aktualizacje|[3,*] Kilka Aktualizacji',
'add' => [
'title' => 'Utwórz nową aktualizację zdarzenia',
'success' => 'Your new incident update has been created.',
- 'failure' => 'Something went wrong with the incident update.',
+ 'failure' => 'Coś poszło nie tak z aktualizacją incydentu.',
],
'edit' => [
- 'title' => 'Edit incident update',
- 'success' => 'The incident update has been updated.',
+ 'title' => 'Edytuj aktualizację incydentu',
+ 'success' => 'Aktualizacja incydentu została zaktualizowana.',
'failure' => 'Something went wrong updating the incident update',
],
],
@@ -157,12 +157,12 @@ return [
'subscribers' => [
'subscribers' => 'Abonnenten',
'description' => 'Subskrybenci będą otrzymywać powiadomienia, gdy wydarzenia zostaną utworzone lub komponenty zaktualizowane.',
- 'description_disabled' => 'To use this feature, you need allow people to signup for notifications.',
+ 'description_disabled' => 'Aby korzystać z tej funkcji, musisz pozwolić ludziom na rejestrację dla powiadomień.',
'verified' => 'Verifiziert',
'not_verified' => 'Nicht verifiziert',
'subscriber' => ':email, subskrybowany :data',
'no_subscriptions' => 'Zapisano do wszystkich aktualizacji',
- 'global' => 'Globally subscribed',
+ 'global' => 'Globalnie subskrybowane',
'add' => [
'title' => 'Einen neuen Abonnenten hinzufügen',
'success' => 'Abonnent hinzugefügt.',
|
New translations dashboard.php (Polish)
|
CachetHQ_Cachet
|
train
|
90d1775d406b4b193a9431629199d5f4e73d4824
|
diff --git a/lib/validate.js b/lib/validate.js
index <HASH>..<HASH> 100644
--- a/lib/validate.js
+++ b/lib/validate.js
@@ -292,7 +292,7 @@ Vp.functionType = function functionType(funDecl) {
paramTypes[i] = this.extractType(stmt.expression.right, params[i].name);
}
var returns = find(body, function(node) { return node.type === 'ReturnStatement'; });
- var returnTypes = returns.map(extractReturnType, this);
+ var returnTypes = returns.map(this.extractReturnType, this);
var result = types.void;
if (returns.length > 0) {
@@ -406,7 +406,11 @@ Vp.import = function import_(params, x, init, loc) {
Vp.function = function function_(funDecl) {
var f = funDecl.id.name;
var ft = this._env.lookup(f);
- var params = funDecl.params.map(function(id) { return id.name });
+ var params = funDecl.params.map(function(id) {
+ if (!this.validId(id))
+ this.fail("expected valid parameter name, got " + id.name, id.loc);
+ return id.name;
+ }, this);
var paramTypes = ft.params;
var resultType = ft.result;
var body = funDecl.body.body.filter(nonEmpty);
|
validate identifier names in all binding forms
|
asm-js_validator
|
train
|
1dbcd7cab146d1213c19acd6ba61b7d7d764c31e
|
diff --git a/newsletter-bundle/src/Resources/contao/languages/de/modules.php b/newsletter-bundle/src/Resources/contao/languages/de/modules.php
index <HASH>..<HASH> 100644
--- a/newsletter-bundle/src/Resources/contao/languages/de/modules.php
+++ b/newsletter-bundle/src/Resources/contao/languages/de/modules.php
@@ -35,6 +35,12 @@ $GLOBALS['TL_LANG']['MOD']['newsletter'] = array('Newsletter', 'Abonnements verw
/**
+ * Submodules
+ */
+$GLOBALS['TL_LANG']['MOD']['tl_newsletter_recipients'] = 'Abonnenten';
+
+
+/**
* Front end modules
*/
$GLOBALS['TL_LANG']['FMD']['newsletter'] = 'Newsletter';
diff --git a/newsletter-bundle/src/Resources/contao/languages/de/tl_newsletter_recipients.php b/newsletter-bundle/src/Resources/contao/languages/de/tl_newsletter_recipients.php
index <HASH>..<HASH> 100644
--- a/newsletter-bundle/src/Resources/contao/languages/de/tl_newsletter_recipients.php
+++ b/newsletter-bundle/src/Resources/contao/languages/de/tl_newsletter_recipients.php
@@ -55,11 +55,11 @@ $GLOBALS['TL_LANG']['tl_newsletter_recipients']['manually'] = 'manuell hinzuge
/**
* Buttons
*/
-$GLOBALS['TL_LANG']['tl_newsletter_recipients']['new'] = array('Abonnenten hinzufügen', 'Einen neuen Abonnenten hinzufügen');
-$GLOBALS['TL_LANG']['tl_newsletter_recipients']['show'] = array('Abonnentendetails', 'Details des Abonnenten ID %s anzeigen');
-$GLOBALS['TL_LANG']['tl_newsletter_recipients']['edit'] = array('Abonnenten bearbeiten', 'Abonnenten ID %s bearbeiten');
-$GLOBALS['TL_LANG']['tl_newsletter_recipients']['copy'] = array('Abonnenten duplizieren', 'Abonnenten ID %s duplizieren');
-$GLOBALS['TL_LANG']['tl_newsletter_recipients']['delete'] = array('Abonnenten löschen', 'Abonnenten ID %s löschen');
+$GLOBALS['TL_LANG']['tl_newsletter_recipients']['new'] = array('Abonnent hinzufügen', 'Einen neuen Abonnent hinzufügen');
+$GLOBALS['TL_LANG']['tl_newsletter_recipients']['show'] = array('Abonnentendetails', 'Details des Abonnent ID %s anzeigen');
+$GLOBALS['TL_LANG']['tl_newsletter_recipients']['edit'] = array('Abonnent bearbeiten', 'Abonnent ID %s bearbeiten');
+$GLOBALS['TL_LANG']['tl_newsletter_recipients']['copy'] = array('Abonnent duplizieren', 'Abonnent ID %s duplizieren');
+$GLOBALS['TL_LANG']['tl_newsletter_recipients']['delete'] = array('Abonnent löschen', 'Abonnent ID %s löschen');
$GLOBALS['TL_LANG']['tl_newsletter_recipients']['editheader'] = array('Verteiler bearbeiten', 'Die Verteiler-Einstellungen bearbeiten');
-$GLOBALS['TL_LANG']['tl_newsletter_recipients']['toggle'] = array('Abonnenten aktivieren/deaktivieren', 'Abonnenten ID %s aktivieren/deaktivieren');
+$GLOBALS['TL_LANG']['tl_newsletter_recipients']['toggle'] = array('Abonnent aktivieren/deaktivieren', 'Abonnent ID %s aktivieren/deaktivieren');
$GLOBALS['TL_LANG']['tl_newsletter_recipients']['import'] = array('CSV-Import', 'Abonnenten aus einer CSV-Datei importieren');
diff --git a/newsletter-bundle/src/Resources/contao/languages/en/modules.php b/newsletter-bundle/src/Resources/contao/languages/en/modules.php
index <HASH>..<HASH> 100644
--- a/newsletter-bundle/src/Resources/contao/languages/en/modules.php
+++ b/newsletter-bundle/src/Resources/contao/languages/en/modules.php
@@ -35,6 +35,12 @@ $GLOBALS['TL_LANG']['MOD']['newsletter'] = array('Newsletters', 'Manage subscrip
/**
+ * Submodules
+ */
+$GLOBALS['TL_LANG']['MOD']['tl_newsletter_recipients'] = 'Recipients';
+
+
+/**
* Front end modules
*/
$GLOBALS['TL_LANG']['FMD']['newsletter'] = 'Newsletter';
|
[Newsletter] Added better page titles in the back end (see #<I>)
|
contao_contao
|
train
|
71a823b70ba029772285792ed75f273366b5dffb
|
diff --git a/api/operator_autopilot_test.go b/api/operator_autopilot_test.go
index <HASH>..<HASH> 100644
--- a/api/operator_autopilot_test.go
+++ b/api/operator_autopilot_test.go
@@ -76,7 +76,6 @@ func TestAPI_OperatorAutopilotCASConfiguration(t *testing.T) {
func TestAPI_OperatorAutopilotServerHealth(t *testing.T) {
t.Parallel()
c, s := makeClient(t, nil, func(c *testutil.TestServerConfig) {
- c.AdvertiseAddrs.RPC = "127.0.0.1"
c.Server.RaftProtocol = 3
})
defer s.Stop()
diff --git a/testutil/server.go b/testutil/server.go
index <HASH>..<HASH> 100644
--- a/testutil/server.go
+++ b/testutil/server.go
@@ -92,12 +92,6 @@ func defaultServerConfig(t testing.T) *TestServerConfig {
NodeName: fmt.Sprintf("node-%d", ports[0]),
DisableCheckpoint: true,
LogLevel: "DEBUG",
- // Advertise can't be localhost
- AdvertiseAddrs: &Advertise{
- HTTP: "169.254.42.42",
- RPC: "169.254.42.42",
- Serf: "169.254.42.42",
- },
Ports: &PortsConfig{
HTTP: ports[0],
RPC: ports[1],
|
Remove fake advertise address and fix TestAPI_OperatorAutopilotServerHealth
|
hashicorp_nomad
|
train
|
b3851165894968caad5a74ff17846f6ce7f0e091
|
diff --git a/lib/callbacks/runtime.js b/lib/callbacks/runtime.js
index <HASH>..<HASH> 100644
--- a/lib/callbacks/runtime.js
+++ b/lib/callbacks/runtime.js
@@ -86,6 +86,7 @@
returnArray = false; // so that we don't do it twice if we trampoline
var oldFrame = __g.frame;
__g.frame = frame;
+ var oldContext = __g.context;
__g.context = ctx;
if (emitter && __g.depth === 0) emitter.emit('resume', frame);
if (emitter) emitter.emit('enter', frame);
@@ -117,6 +118,7 @@
if (emitter) emitter.emit("exit", frame);
__g.frame = oldFrame;
if (--__g.depth === 0 && __g.trampoline) __g.trampoline.flush();
+ __g.context = oldContext;
}
};
if (emitter && !ret.dispatched) emitter.emit('yield', frame);
diff --git a/lib/compiler/flows._js b/lib/compiler/flows._js
index <HASH>..<HASH> 100644
--- a/lib/compiler/flows._js
+++ b/lib/compiler/flows._js
@@ -266,6 +266,5 @@
}
});
}, 0);
-
})(typeof exports !== 'undefined' ? exports : (Streamline.flows = Streamline.flows || {}));
diff --git a/lib/fibers-fast/runtime.js b/lib/fibers-fast/runtime.js
index <HASH>..<HASH> 100644
--- a/lib/fibers-fast/runtime.js
+++ b/lib/fibers-fast/runtime.js
@@ -97,6 +97,7 @@ function invoke(that, fn, args, options) {
err = e;
val = v;
} else {
+ var oldContext = globals.context;
globals.context = cx;
globals.emitter && globals.emitter.emit("resume");
try {
@@ -107,6 +108,7 @@ function invoke(that, fn, args, options) {
}
} finally {
globals.emitter && globals.emitter.emit("yield");
+ globals.context = oldContext;
}
}
};
diff --git a/lib/fibers/runtime.js b/lib/fibers/runtime.js
index <HASH>..<HASH> 100644
--- a/lib/fibers/runtime.js
+++ b/lib/fibers/runtime.js
@@ -161,27 +161,23 @@ function invoke(that, fn, args, options) {
err = e;
val = v;
} else {
+ var oldContext = globals.context;
globals.context = cx;
+ var oldFrame = globals.frame;
+ globals.frame = frame;
if (emitter) {
- var oldFrame = globals.frame;
- globals.frame = frame;
if (globals.yielded) emitter.emit('resume', frame);
globals.yielded = false;
- try {
- if (e) {
- fiber.throwInto(e);
- } else {
- fiber.run(v);
- }
- } finally {
- globals.frame = oldFrame;
- }
- } else {
+ }
+ try {
if (e) {
fiber.throwInto(e);
} else {
fiber.run(v);
- }
+ }
+ } finally {
+ globals.frame = oldFrame;
+ globals.context = oldContext;
}
}
};
diff --git a/lib/globals.js b/lib/globals.js
index <HASH>..<HASH> 100644
--- a/lib/globals.js
+++ b/lib/globals.js
@@ -41,9 +41,25 @@ g.runtime || Object.defineProperty(g, 'runtime', {
}
});
+///
+/// * `fn = globals.withFreshContext(fn)`
+/// wraps a function so that it creates a new context on entry and restores the previous context on exit.
+/// returns the wrapped function.
+g.withFreshContext = function(fn) {
+ return function() {
+ var oldContext = g.context;
+ g.context = {};
+ try {
+ fn.apply(this, arguments)
+ } finally {
+ g.context = oldContext;
+ }
+ };
+}
+
g.setPromise = function(name) {
if (g.Promise) return; // first caller wins
var req = require; // defeat streamline-require dependencies
if (name === true) g.Promise = typeof Promise === "function" ? Promise : req('es6-promise');
else g.Promise = require(name);
-}
\ No newline at end of file
+}
diff --git a/lib/globals.md b/lib/globals.md
index <HASH>..<HASH> 100644
--- a/lib/globals.md
+++ b/lib/globals.md
@@ -17,3 +17,7 @@ be able to retrieve to format messages.
Note: an empty context (`{}`) is automatically set by the server wrappers of the `streams` module,
before they dispatch a request. So, with these wrappers, each request starts with a fresh empty context.
+
+* `fn = globals.withFreshContext(fn)`
+ wraps a function so that it creates a new context on entry and restores the previous context on exit.
+ returns the wrapped function.
|
fixed #<I> - potential leak of TLS context
|
Sage_streamlinejs
|
train
|
3469cbcb1a0f2904208820d201337ba25b26a16b
|
diff --git a/lib/locabulary.rb b/lib/locabulary.rb
index <HASH>..<HASH> 100644
--- a/lib/locabulary.rb
+++ b/lib/locabulary.rb
@@ -15,10 +15,10 @@ module Locabulary
# Responsible for building a hierarchical tree from faceted items, and ordering the nodes as per the presentation sequence for the
# associated predicate_name.
#
- # @param options [Hash]
- # @option predicate_name [String]
- # @option faceted_items [Array<#hits, #value>]
- # @option faceted_item_hierarchy_delimiter [String]
+ # @param [Hash] options
+ # @option options [String] :predicate_name
+ # @option options [Array<#hits, #value>] :faceted_items
+ # @option options [String] :faceted_item_hierarchy_delimiter
# @return Array[<FacetWrapperForItem>]
#
# @see Locabulary::Commands::BuildOrderedHierarchicalTree
@@ -29,11 +29,14 @@ module Locabulary
# @api public
# @since 0.1.0
#
- # Responsible for extracting a non-hierarchical sorted array of Locabulary::Item for the given predicate_name.
+ # Responsible for extracting a non-hierarchical sorted array of Locabulary::Item::Base objects for the given predicate_name.
#
- # @param options [Hash]
- # @option predicate_name [String]
- # @option as_of [Date]
+ # @param [Hash] options
+ # @option options [String] :predicate_name
+ # @option options [Date] :as_of (Date.today)
+ # @return Array[<Locabulary::Item::Base>]
+ #
+ # @see Locabulary::Commands::ActiveItemsForCommand
def self.active_items_for(options = {})
Commands::ActiveItemsForCommand.call(options)
end
@@ -76,19 +79,35 @@ module Locabulary
# @api public
# @since 0.1.0
+ #
+ # @param [Hash] options
+ # @option options [String] :predicate_name
+ # @option options [String] :term_uri
+ # @option options [String] :as_of (Date.today)
+ #
+ # @return [String] a label or URI
+ #
+ # @see Locabulary.active_items_for
def self.active_label_for_uri(options = {})
- predicate_name = options.fetch(:predicate_name)
term_uri = options.fetch(:term_uri)
- object = active_items_for(predicate_name: predicate_name).detect { |obj| obj.term_uri == term_uri }
+ object = active_items_for(options).detect { |obj| obj.term_uri == term_uri }
return object.term_label if object
term_uri
end
# @api public
# @since 0.1.0
+ #
+ # Return an Array of term labels for the given :predicate_name
+ #
+ # @param [Hash] options
+ # @option options [String] :predicate_name
+ # @option options [String] :as_of (Date.today)
+ # @return [Array<String>] an array of Locabuarly::Item::Base#term_label
+ #
+ # @see Locabulary.active_items_for
def self.active_labels_for(options = {})
- predicate_name = options.fetch(:predicate_name)
- active_items_for(predicate_name: predicate_name).map(&:term_label)
+ active_items_for(options).map(&:term_label)
end
# @api private
diff --git a/lib/locabulary/commands/active_hierarchical_roots_command.rb b/lib/locabulary/commands/active_hierarchical_roots_command.rb
index <HASH>..<HASH> 100644
--- a/lib/locabulary/commands/active_hierarchical_roots_command.rb
+++ b/lib/locabulary/commands/active_hierarchical_roots_command.rb
@@ -20,6 +20,15 @@ module Locabulary
# @api private
# @since 0.5.0
+ #
+ # @param options [Hash]
+ # @option options [String] :predicate_name
+ # @option options [Date] :as_of (Date.today)
+ #
+ # @note A concession about the as_of; This is not a live Utility. The data has a
+ # low churn rate. And while the date is important, I'm not as concerned
+ # about the local controlled vocabulary exposing a date that has expired.
+ # When we next deploy the server changes, the deactivated will go away.
def self.call(options = {})
predicate_name = options.fetch(:predicate_name)
cache[predicate_name] ||= new(options).call
diff --git a/lib/locabulary/commands/active_items_for_command.rb b/lib/locabulary/commands/active_items_for_command.rb
index <HASH>..<HASH> 100644
--- a/lib/locabulary/commands/active_items_for_command.rb
+++ b/lib/locabulary/commands/active_items_for_command.rb
@@ -24,8 +24,9 @@ module Locabulary
# @since 0.5.0
#
# @param options [Hash]
- # @option predicate_name [String]
- # @option as_of [Date]
+ # @option options [String] :predicate_name
+ # @option options [Date] :as_of (Date.today)
+ #
# @note A concession about the as_of; This is not a live Utility. The data has a
# low churn rate. And while the date is important, I'm not as concerned
# about the local controlled vocabulary exposing a date that has expired.
diff --git a/lib/locabulary/commands/build_ordered_hierarchical_tree_command.rb b/lib/locabulary/commands/build_ordered_hierarchical_tree_command.rb
index <HASH>..<HASH> 100644
--- a/lib/locabulary/commands/build_ordered_hierarchical_tree_command.rb
+++ b/lib/locabulary/commands/build_ordered_hierarchical_tree_command.rb
@@ -15,7 +15,8 @@ module Locabulary
# @option predicate_name [String]
# @option faceted_items [Array<#hits, #value>]
# @option faceted_item_hierarchy_delimiter [String]
- # @return Array[<FacetWrapperForItem>]
+ #
+ # @return [Array<FacetWrapperForItem>]
def self.call(options = {})
new(options).call
end
|
Updating documentation to adhere to Yard format
[skip ci]
|
ndlib_locabulary
|
train
|
8e6e06ce303433f12b23b76a4deb3eaedfc1a9ac
|
diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,4 @@
-neocore==0.5.4
+neocore==0.5.6
python-socketio[client]==4.4.0
-requests>=2.20.0
-web3==4.8.2
+requests==2.22.0
+web3==5.4.0
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ with open("README.rst", "r") as fh:
setup(
name='switcheo',
python_requires='>=3.6',
- version='0.4.0',
+ version='0.4.1',
author='Keith Smith',
author_email='keith.scotts@gmail.com',
license='MIT License',
diff --git a/switcheo/streaming_client.py b/switcheo/streaming_client.py
index <HASH>..<HASH> 100644
--- a/switcheo/streaming_client.py
+++ b/switcheo/streaming_client.py
@@ -5,22 +5,22 @@ import threading
class OrderBooksNamespace(SocketIOClientNamespace):
-
+
def __init__(self):
self.lock = threading.Lock()
self.namespace = '/v2/books'
self.order_book = {}
SocketIOClientNamespace.__init__(self, namespace=self.namespace)
-
+
def on_connect(self):
pass
-
+
def on_disconnect(self):
pass
-
+
def on_join(self):
pass
-
+
def on_all(self, data):
self.lock.acquire()
self.order_book[data["room"]["pair"]] = data
@@ -31,16 +31,18 @@ class OrderBooksNamespace(SocketIOClientNamespace):
if digest_hash != book_digest_hash:
self.emit(event="leave", data=data["room"], namespace='/v2/books')
self.emit(event="join", data=data["room"], namespace='/v2/books')
-
+
def on_updates(self, data):
update_digest = data["digest"]
update_pair = data["room"]["pair"]
update_events = data["events"]
+ buy_event = False
+ sell_event = False
+ if "symbol" in self.order_book[update_pair]["book"]:
+ del self.order_book[update_pair]["book"]["symbol"]
self.lock.acquire()
for event in update_events:
price_match = False
- buy_event = False
- sell_event = False
event_iteration = 0
if event["side"] == "buy":
event_side = "buys"
@@ -55,8 +57,7 @@ class OrderBooksNamespace(SocketIOClientNamespace):
price_match = True
updated_amount = int(side["amount"]) + int(event_change)
if updated_amount == 0:
- self.order_book[update_pair]["book"][event_side].remove(
- side)
+ self.order_book[update_pair]["book"][event_side].remove(side)
else:
updated_book = {}
updated_book["amount"] = str(updated_amount)
@@ -68,8 +69,7 @@ class OrderBooksNamespace(SocketIOClientNamespace):
new_book = {}
new_book["amount"] = event_change
new_book["price"] = event_price
- self.order_book[update_pair]["book"][event_side].append(
- new_book)
+ self.order_book[update_pair]["book"][event_side].append(new_book)
if buy_event and sell_event:
self.order_book[update_pair]["book"]["buys"] = sorted(
self.order_book[update_pair]["book"]["buys"], key=itemgetter("price"), reverse=True)
|
Streaming Client Logic Update
Updating the streaming client logic for the order book to ensure SHA1 hashes match. Also updating the requirements file to the latest versions of the dependencies.
|
KeithSSmith_switcheo-python
|
train
|
41ad780073b3a0a815e0083ecf1a0cea38cb8105
|
diff --git a/starlette/responses.py b/starlette/responses.py
index <HASH>..<HASH> 100644
--- a/starlette/responses.py
+++ b/starlette/responses.py
@@ -173,9 +173,15 @@ class UJSONResponse(JSONResponse):
class RedirectResponse(Response):
def __init__(
- self, url: typing.Union[str, URL], status_code: int = 307, headers: dict = None
+ self,
+ url: typing.Union[str, URL],
+ status_code: int = 307,
+ headers: dict = None,
+ background: BackgroundTask = None,
) -> None:
- super().__init__(content=b"", status_code=status_code, headers=headers)
+ super().__init__(
+ content=b"", status_code=status_code, headers=headers, background=background
+ )
self.headers["location"] = quote_plus(str(url), safe=":/%#?&=@[]!$&'()*+,;")
|
RedirectResponse now accepts optional background parameter (#<I>)
* RedirectResponse now accepts optional background parameter
With this change, a `background` task can now be sent during a
RedirectResponse.
This can be useful when an application wishes to run a background task
after a form submission, which usually includes a redirect (e.g. to
prevent resubmission)
Closes #<I>
* Revert testing background in redirect test
|
encode_starlette
|
train
|
41c6dd505fba0191fbebf618558d25d6c4960eb3
|
diff --git a/tests/Build.php b/tests/Build.php
index <HASH>..<HASH> 100644
--- a/tests/Build.php
+++ b/tests/Build.php
@@ -76,6 +76,6 @@ class Build extends \PHPUnit_Framework_TestCase
]
)->setSourceDir($this->wsSourceDir)
->setDestinationDir($this->wsDestinationDir)
- ->build();
+ ->build(true);
}
}
|
Tests: verbose build.
|
Cecilapp_PHPoole
|
train
|
efa7c2efe2792db09c52573c5bbd46b430a3be04
|
diff --git a/opentok/opentok.py b/opentok/opentok.py
index <HASH>..<HASH> 100644
--- a/opentok/opentok.py
+++ b/opentok/opentok.py
@@ -1007,8 +1007,9 @@ class Client(object):
encrypted (true) or not (false, the default)
Boolean 'observeForceMute': A Boolean flag that determines whether the SIP endpoint should
- honor the force mute action. Defaults to False if moderator does not want to observe force
- mute a stream and set to True if the moderator wants to observe force mute a stream.
+ honor the force mute action. The force mute action allows a moderator to force clients to
+ mute audio in streams they publish. It defaults to False if moderator does not want to observe
+ force mute a stream and set to True if the moderator wants to observe force mute a stream.
Boolean 'video': A Boolean flag that indicates whether the SIP call will include video(true)
or not(false, which is the default). With video included, the SIP client's video is included
|
Change wording in docstring for force mute
|
opentok_Opentok-Python-SDK
|
train
|
2862f4286e3ac06e3a8737fd96991dd70d4ee8a5
|
diff --git a/code/javascript/iedoc.js b/code/javascript/iedoc.js
index <HASH>..<HASH> 100644
--- a/code/javascript/iedoc.js
+++ b/code/javascript/iedoc.js
@@ -42,6 +42,8 @@ function handleTags(name, args, comment) {
}
var tagStart = comment.search(/@(param|return)/);
if (tagStart == -1) {
+ comment = comment.replace(/^[\s\r\n]*/, "");
+ comment = comment.replace(/[\s\r\n]*$/, "");
WScript.Echo("<comment>" + comment + "</comment>");
return;
}
@@ -81,6 +83,8 @@ function handleTags(name, args, comment) {
if ("" == argMap[args[i]]) throw new Error("Comment error: param " + args[i] + " has no description");
WScript.Echo("<param name=\"" + args[i] + "\">" + argMap[args[i]] + "</param>");
}
+ comment = comment.replace(/^[\s\r\n]*/, "");
+ comment = comment.replace(/[\s\r\n]*$/, "");
WScript.Echo("<comment>" + comment + "</comment>");
}
|
Stripping whitespace around comment, to make XML file more consistent
r<I>
|
SeleniumHQ_selenium
|
train
|
7a261dee342646d1e0d7baa6b86d7738c9d41a5b
|
diff --git a/pyspider/scheduler/scheduler.py b/pyspider/scheduler/scheduler.py
index <HASH>..<HASH> 100644
--- a/pyspider/scheduler/scheduler.py
+++ b/pyspider/scheduler/scheduler.py
@@ -70,14 +70,6 @@ class Scheduler(object):
self._cnt['all'].load(os.path.join(self.data_path, 'scheduler.all'))
self._last_dump_cnt = 0
- def _load_projects(self):
- '''init projects'''
- self.projects = dict()
- for project in self.projectdb.get_all():
- self._update_project(project)
- logger.debug("project: %s loaded.", project['name'])
- self._last_update_project = time.time()
-
def _update_projects(self):
'''Check project update'''
now = time.time()
@@ -383,22 +375,26 @@ class Scheduler(object):
'''Set quit signal'''
self._quit = True
+ def run_once(self):
+ '''comsume queues and feed tasks to fetcher, once'''
+
+ self._update_projects()
+ self._check_task_done()
+ self._check_request()
+ while self._check_cronjob():
+ pass
+ self._check_select()
+ self._check_delete()
+ self._try_dump_cnt()
+
def run(self):
'''Start scheduler loop'''
logger.info("loading projects")
- self._load_projects()
while not self._quit:
try:
time.sleep(self.LOOP_INTERVAL)
- self._update_projects()
- self._check_task_done()
- self._check_request()
- while self._check_cronjob():
- pass
- self._check_select()
- self._check_delete()
- self._try_dump_cnt()
+ self.run_once()
self._exceptions = 0
except KeyboardInterrupt:
break
|
move run logic in scheduler to run_once, remove _load_projects
projects can been loaded via _update_projects when
_load_update_project == 0
|
binux_pyspider
|
train
|
cba9eadd0151063a441a49c8f4bbd7130ebf6277
|
diff --git a/lib/utils/xpack.js b/lib/utils/xpack.js
index <HASH>..<HASH> 100644
--- a/lib/utils/xpack.js
+++ b/lib/utils/xpack.js
@@ -664,13 +664,13 @@ class Xpack {
if (!packageJson) {
// Not in a package.
- return
+ return undefined
}
if (!packageJson.xpack ||
!packageJson.xpack.minimumXpmRequired) {
log.trace('minimumXpmRequired not used, no checks')
- return
+ return undefined
}
// Remove the pre-release part.
const minimumXpmRequired = semver.clean(
@@ -690,6 +690,7 @@ class Xpack {
'or later, please update', CliExitCodes.ERROR.PREREQUISITES)
}
// Check passed.
+ return xpmVersion
}
}
|
xpack.js: checkMinimumXpmRequired returns version
|
xpack_xpm-js
|
train
|
23f12a93e9cf6824f139e6a2c7c4da56cbc64360
|
diff --git a/tests/Database/AuthorizeArgsTests/TestAuthorizationArgsQuery.php b/tests/Database/AuthorizeArgsTests/TestAuthorizationArgsQuery.php
index <HASH>..<HASH> 100644
--- a/tests/Database/AuthorizeArgsTests/TestAuthorizationArgsQuery.php
+++ b/tests/Database/AuthorizeArgsTests/TestAuthorizationArgsQuery.php
@@ -50,6 +50,8 @@ class TestAuthorizationArgsQuery extends Query
$selectFields = $getSelectFields();
Assert::assertInstanceOf(SelectFields::class, $selectFields);
+
+ return true;
}
public function resolve(): void
|
phpstan: fix "Method Rebing\GraphQL\Tests\Database\AuthorizeArgsTests\TestAuthorizationArgsQuery::authorize() should return bool but return statement is missing."
|
rebing_graphql-laravel
|
train
|
d0fbbc774e0e8d599e63ab41eba2ff1f8cd2023d
|
diff --git a/lib/rake-pipeline-web-filters/coffee_script_filter.rb b/lib/rake-pipeline-web-filters/coffee_script_filter.rb
index <HASH>..<HASH> 100644
--- a/lib/rake-pipeline-web-filters/coffee_script_filter.rb
+++ b/lib/rake-pipeline-web-filters/coffee_script_filter.rb
@@ -26,7 +26,11 @@ module Rake::Pipeline::Web::Filters
# @param [FileWrapper] output a FileWrapper object
def generate_output(inputs, output)
inputs.each do |input|
- output.write CoffeeScript.compile(input, options)
+ begin
+ output.write CoffeeScript.compile(input, options)
+ rescue ExecJS::Error => error
+ raise error, "Error compiling #{input.path}. #{error.message}"
+ end
end
end
diff --git a/spec/coffee_script_filter_spec.rb b/spec/coffee_script_filter_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/coffee_script_filter_spec.rb
+++ b/spec/coffee_script_filter_spec.rb
@@ -85,5 +85,22 @@ y = function() {
filter.output_files.first.path.should == "octopus"
end
end
+
+ describe "invalid input" do
+ let(:coffee_input) { <<-COFFEE }
+y = function(){
+ return "whoops there javascript in here!"
+}
+ COFFEE
+
+ it "has a useful error message including the input file name" do
+ filter = setup_filter CoffeeScriptFilter.new
+ tasks = filter.generate_rake_tasks
+ lambda {
+ tasks.each(&:invoke)
+ }.should raise_error(ExecJS::RuntimeError, "Error compiling input.coffee. reserved word \"function\" on line 1")
+ end
+ end
+
end
|
Include the file name in Coffeescript compilation errors. (Luke Melia / Kris Selden)
|
wycats_rake-pipeline-web-filters
|
train
|
2cbb1c036e81b3d59761c1cd9bb5de01db37caa8
|
diff --git a/sniffer/broadcasters.py b/sniffer/broadcasters.py
index <HASH>..<HASH> 100644
--- a/sniffer/broadcasters.py
+++ b/sniffer/broadcasters.py
@@ -82,15 +82,29 @@ except ImportError:
class Broadcaster(object):
def __init__(self, *emitters):
- self.emitters = emitters
+ self.emitters = list(emitters)
def success(self, sniffer):
- for emit in self.emitters:
- emit.success(sniffer)
+ for emit in list(self.emitters):
+ try:
+ emit.success(sniffer)
+ except Exception as e:
+ self.remove(emit, str(e))
def failure(self, sniffer):
- for emit in self.emitters:
- emit.failure(sniffer)
+ for emit in list(self.emitters):
+ try:
+ emit.failure(sniffer)
+ except Exception as e:
+ self.remove(emit, str(e))
+
+ def remove(self, emitter, why):
+ self.emitters.remove(emitter)
+
+ print(why, file=sys.stderr)
+
+ if not self.emitters:
+ raise Exception('No emitters available')
broadcaster = Broadcaster(
|
Recover from emitter errors
PyNotify raises an error when showing a message if no service is
available. Instead of dying when an emitter raises, that emitter
is removed. If the last emitter is removed an exception is
raised (because there would be no way to monitor test results).
|
jeffh_sniffer
|
train
|
21bc8352509ae9599f9462882017d290c7c124e2
|
diff --git a/htdocs/widget.php b/htdocs/widget.php
index <HASH>..<HASH> 100644
--- a/htdocs/widget.php
+++ b/htdocs/widget.php
@@ -1,7 +1,24 @@
<?php
$env = $_GET['environment'];
$svc = $_GET['service'];
- $tag = $env."-".$svc;
+
+ if (isset($_GET['environment'])) {
+ $env = $_GET['environment'];
+ $tag_arr[] = $env;
+ }
+ if (isset($_GET['service'])) {
+ $svc = $_GET['service'];
+ $tag_arr[] = $svc;
+ }
+ if (isset($_GET['group'])) {
+ $grp = $_GET['group'];
+ $tag_arr[] = $grp;
+ }
+ $tag = implode('-', $tag_arr);
+ if (isset($_GET['label']))
+ $label = $_GET['label'];
+ else
+ $label = implode(' ', $tag_arr);
?>
<html lang="en">
@@ -15,7 +32,7 @@
<div class="span3">
<table class="table table-bordered table-condensed summary" id="<?php echo $tag; ?>" data-label="<?php echo $tag; ?>">
<thead>
- <tr> <th colspan="6" id="<?php echo $tag; ?>-status"><?php echo $env." ".$svc; ?></th> </tr>
+ <tr> <th colspan="6" id="<?php echo $tag; ?>-status"><?php echo $label; ?></th> </tr>
</thead>
<tbody>
<tr id="<?php echo $tag; ?>-warnings" class="warnings">
@@ -34,7 +51,7 @@
<script src="js/console.js"></script>
<script>
$(document).ready(function() {
- var services = { '<?php echo $tag; ?>': 'environment=<?php echo $env; ?>&service=<?php echo $svc; ?>' };
+ var services = { '<?php echo $tag; ?>': 'sort-by=lastReceiveTime<?php if ($env != "") echo "&environment=".$env; ?><?php if ($svc != "") echo "&service=".$svc; ?><?php if ($grp != "") echo "&group=".$grp; ?>' };
loadAlerts(services, true);
});
</script>
|
Add widget support for Groups and user-defined labels
|
alerta_alerta
|
train
|
8236c05d0d2336c208d9564b4f5c2038fd1a4f35
|
diff --git a/lib/flowdock.rb b/lib/flowdock.rb
index <HASH>..<HASH> 100644
--- a/lib/flowdock.rb
+++ b/lib/flowdock.rb
@@ -13,7 +13,7 @@ module Flowdock
def initialize(options = {})
@api_token = options[:api_token]
raise InvalidParameterError, "Flow must have :api_token attribute" if blank?(@api_token)
-
+
@source = options[:source]
raise InvalidParameterError, "Flow must have valid :source attribute, only alphanumeric characters and underscores can be used" if blank?(@source) || !@source.match(/^[a-z0-9\-_ ]+$/i)
@@ -25,13 +25,13 @@ module Flowdock
def send_message(params)
raise InvalidParameterError, "Message must have both :subject and :content" if blank?(params[:subject]) || blank?(params[:content])
-
+
from = (params[:from].kind_of?(Hash)) ? params[:from] : @from
raise InvalidParameterError, "Flow's :from attribute must have :address attribute" if blank?(from[:address])
tags = (params[:tags].kind_of?(Array)) ? params[:tags] : []
tags.reject! { |tag| !tag.kind_of?(String) || blank?(tag) }
-
+
link = (!blank?(params[:link])) ? params[:link] : nil
params = {
@@ -48,16 +48,16 @@ module Flowdock
# Send the request
resp = self.class.post(get_flowdock_api_url, :body => params)
- raise ApiError, (resp.code == 500 ? "Flowdock API returned error: #{resp.body}" : "HTTP Error #{resp.code}") unless resp.code == 200
+ raise ApiError, "Flowdock API returned error: Status: #{resp.code} Body: #{resp.body}" unless resp.code == 200
true
end
-
+
private
def blank?(var)
var.nil? || var.respond_to?(:length) && var.length == 0
end
-
+
def get_flowdock_api_url
"#{FLOWDOCK_API_URL}/#{@api_token}"
end
|
More sensible handling of error responses from API
|
flowdock_flowdock-api
|
train
|
f3d77a8cfb68bb2b00d47cf7a6130aa9e0b2d6f2
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -450,6 +450,9 @@ function parse (args, opts) {
function hasKey (obj, keys) {
var o = obj
+
+ if (!configuration['dot-notation']) keys = [keys.join('.')]
+
keys.slice(0, -1).forEach(function (key) {
o = (o[key] || {})
})
diff --git a/test/yargs-parser.js b/test/yargs-parser.js
index <HASH>..<HASH> 100644
--- a/test/yargs-parser.js
+++ b/test/yargs-parser.js
@@ -1531,6 +1531,35 @@ describe('yargs-parser', function () {
})
expect(parsed['foo.bar']).to.equal('banana')
})
+
+ it('should use value from cli, if cli overrides dot notation default', function () {
+ var parsed = parser(['--foo.bar', 'abc'], {
+ default: {
+ 'foo.bar': 'default'
+ },
+ configuration: {
+ 'dot-notation': false
+ }
+ })
+
+ expect(parsed['foo.bar']).to.equal('abc')
+ })
+
+ it('should also override dot notation alias', function () {
+ var parsed = parser(['--foo.bar', 'abc'], {
+ alias: {
+ 'foo.bar': ['alias.bar']
+ },
+ default: {
+ 'foo.bar': 'default'
+ },
+ configuration: {
+ 'dot-notation': false
+ }
+ })
+
+ expect(parsed['alias.bar']).to.equal('abc')
+ })
})
describe('parse numbers', function () {
|
Join 'keys' in hasKey() when 'dot-notation' is turned off
|
yargs_yargs-parser
|
train
|
b62e21f3017e25d3c55d089b464ba232d9a1c8c6
|
diff --git a/pylon/io/matpower.py b/pylon/io/matpower.py
index <HASH>..<HASH> 100644
--- a/pylon/io/matpower.py
+++ b/pylon/io/matpower.py
@@ -204,7 +204,7 @@ class MATPOWERReader(_CaseReader):
for i, line in enumerate(file):
if line.startswith("]"):
- logger.warning("Missing cost data [%d]." % i)
+# logger.warning("Missing cost data [%d]." % i)
break
g = case.generators[i]
@@ -248,19 +248,19 @@ class MATPOWERReader(_CaseReader):
def _parse_gencost_line(self, line):
gencost_map = {1: PW_LINEAR, 2: POLYNOMIAL}
- gencost_data = line.strip(";\n").split()
+ gencost_data = line.replace(";", "").strip("\n").split()
model = gencost_map[int(gencost_data[0])]
c_startup = float(gencost_data[1])
c_shutdown = float(gencost_data[2])
n = int(gencost_data[3])
if model == PW_LINEAR:
- d = gencost_data[-2 * n:]
+ d = gencost_data[4:4 + (2 * n)]
cost = []
for j in range(n):
cost.append((float(d[2 * j]), float(d[2 * j + 1])))
else:
- d = gencost_data[-n:]
+ d = gencost_data[4:4 + n]
cost = tuple([float(a) for a in d])
return model, c_startup, c_shutdown, cost
|
Fixing parsing of gencosts with trailing comments.
|
rwl_pylon
|
train
|
050112805ffa875087fa7f58d00f92880f2a12ba
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -8,14 +8,15 @@ module.exports = function (db, sep) {
//use \xff (255) as the seperator,
//so that sections of the database will sort after the regular keys
sep = sep || '\xff'
+
Hooks(db)
db.sublevel = function (prefix) {
return new SubDb(db, prefix, sep)
}
- db.prefix = function () {
- return ''
+ db.prefix = function (key) {
+ return '' + (key || '')
}
db.pre = function (hook) {
@@ -27,6 +28,17 @@ module.exports = function (db, sep) {
return db
}
+ var batch = db.batch
+ db.batch = function (changes, opts, cb) {
+ changes.forEach(function (e) {
+ if(e.prefix) {
+ if(e.prefix && 'function' === typeof e.prefix.prefix)
+ e.key = e.prefix.prefix(e.key)
+ }
+ })
+ batch.call(db, changes, opts, cb)
+ }
+
db.post = function (hook) {
db.hooks.post({start: '', end: sep}, hook)
return db
diff --git a/sub.js b/sub.js
index <HASH>..<HASH> 100644
--- a/sub.js
+++ b/sub.js
@@ -14,6 +14,7 @@ function SubDB (db, prefix, sep) {
this._parent = db
this._sep = sep || '\xff'
this._prefix = prefix
+ this._root = root(this)
var self = this
this.hooks = {
pre: function () {
@@ -39,28 +40,31 @@ SDB.sublevel = function (prefix) {
}
SDB.put = function (key, value, opts, cb) {
- this._parent.put(this._key(key), value, opts, cb)
+ this._root.put(this.prefix(key), value, opts, cb)
+// this._parent.put(this._key(key), value, opts, cb)
}
SDB.get = function (key, opts, cb) {
- this._parent.get(this._key(key), opts, cb)
+ this._root.get(this.prefix(key), opts, cb)
+// this._parent.get(this._key(key), opts, cb)
}
SDB.del = function (key, opts, cb) {
- this._parent.del(this._key(key), opts, cb)
+ this._root.del(this.prefix(key), opts, cb)
+// this._parent.del(this._key(key), opts, cb)
}
SDB.batch = function (changes, opts, cb) {
var self = this
changes.forEach(function (ch) {
- ch.key = self._key(ch.key)
+ ch.key = (ch.prefix || self).prefix(ch.key)
+ if(ch.prefix) ch.prefix = null
})
- this._parent.batch(changes, opts, cb)
-
+ this._root.batch(changes, opts, cb)
}
-SDB.prefix = function () {
- return this._parent.prefix() + this._sep + this._prefix + this._sep
+SDB.prefix = function (key) {
+ return this._parent.prefix() + this._sep + this._prefix + this._sep + (key || '')
}
;['createReadStream', 'createKeyStream', 'createValueStream']
|
allow user to pass prefix option to batch
|
dominictarr_level-sublevel
|
train
|
228cb300cbde0df7bdf538e1388e2cc2eebaadd1
|
diff --git a/src/main/java/org/dasein/cloud/aws/compute/EC2Instance.java b/src/main/java/org/dasein/cloud/aws/compute/EC2Instance.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dasein/cloud/aws/compute/EC2Instance.java
+++ b/src/main/java/org/dasein/cloud/aws/compute/EC2Instance.java
@@ -55,18 +55,7 @@ import org.dasein.cloud.Requirement;
import org.dasein.cloud.ResourceStatus;
import org.dasein.cloud.Tag;
import org.dasein.cloud.aws.AWSCloud;
-import org.dasein.cloud.compute.Architecture;
-import org.dasein.cloud.compute.ImageClass;
-import org.dasein.cloud.compute.MachineImage;
-import org.dasein.cloud.compute.Platform;
-import org.dasein.cloud.compute.VMLaunchOptions;
-import org.dasein.cloud.compute.VMScalingCapabilities;
-import org.dasein.cloud.compute.VMScalingOptions;
-import org.dasein.cloud.compute.VirtualMachine;
-import org.dasein.cloud.compute.VirtualMachineProduct;
-import org.dasein.cloud.compute.VirtualMachineSupport;
-import org.dasein.cloud.compute.VmState;
-import org.dasein.cloud.compute.VmStatistics;
+import org.dasein.cloud.compute.*;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.network.IPVersion;
import org.dasein.cloud.network.IpAddress;
@@ -1595,6 +1584,32 @@ public class EC2Instance implements VirtualMachineSupport {
@Override
public @Nonnull Iterable<VirtualMachine> listVirtualMachines() throws InternalException, CloudException {
+ return listVirtualMachinesWithParams(null);
+ }
+
+ @Override
+ public @Nonnull Iterable<VirtualMachine> listVirtualMachines(VMFilterOptions options) throws InternalException, CloudException {
+ Map<String, String> extraParameters = new HashMap<String, String>();
+ int i = 1;
+ Map<String, String> tags = options.getTags();
+ if ( tags != null && tags.size() > 0 ) {
+ for ( Map.Entry<String, String> parameter : tags.entrySet() ) {
+ String key = parameter.getKey();
+ String value = parameter.getValue();
+
+// String filterName = "Filter." + i + ".Name=" + AWSCloud.encode( "tag:" + key, true );
+// String filterValue = "Filter." + i + ".Value.1=" + AWSCloud.encode( value, true );
+
+ extraParameters.put( "Filter." + i + ".Name", "tag:" + key );
+ extraParameters.put( "Filter." + i + ".Value.1", value );
+ i++;
+ }
+ }
+
+ return listVirtualMachinesWithParams( extraParameters );
+ }
+
+ private @Nonnull Iterable<VirtualMachine> listVirtualMachinesWithParams(Map<String,String> extraParameters) throws InternalException, CloudException {
APITrace.begin(provider, "listVirtualMachines");
try {
ProviderContext ctx = provider.getContext();
@@ -1614,7 +1629,15 @@ public class EC2Instance implements VirtualMachineSupport {
}
}
}
- Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.DESCRIBE_INSTANCES);
+
+ Map<String, String> parameters = provider.getStandardParameters( provider.getContext(), EC2Method.DESCRIBE_INSTANCES );
+
+ if ( extraParameters != null && extraParameters.size() > 0 ) {
+ for ( Map.Entry<String, String> parameter : extraParameters.entrySet() ) {
+ parameters.put( parameter.getKey(), parameter.getValue() );
+ }
+ }
+
EC2Method method = new EC2Method(provider, provider.getEc2Url(), parameters);
ArrayList<VirtualMachine> list = new ArrayList<VirtualMachine>();
NodeList blocks;
@@ -1644,9 +1667,9 @@ public class EC2Instance implements VirtualMachineSupport {
finally {
APITrace.end();
}
- }
+ }
- @Override
+ @Override
public void pause(@Nonnull String vmId) throws InternalException, CloudException {
throw new OperationNotSupportedException("Pause/unpause not supported by the EC2 API");
}
|
Added support for retrieving VM list with filter options by tags.
|
dasein-cloud_dasein-cloud-aws
|
train
|
b2d2ca69063507b82b8cff13b5bd62593c16f9c9
|
diff --git a/opal/browser/dom/node.rb b/opal/browser/dom/node.rb
index <HASH>..<HASH> 100644
--- a/opal/browser/dom/node.rb
+++ b/opal/browser/dom/node.rb
@@ -59,7 +59,7 @@ class Node
if native?(node)
`#@native.appendChild(node)`
elsif node.respond_to? :each
- node.each { |n| add_child(n) }
+ node.each { |n| self << n }
elsif String === node
`#@native.appendChild(#@native.ownerDocument.createTextNode(node))`
else
@@ -110,7 +110,7 @@ class Node
#
# @param node [Node] the node to append to
def append_to(node)
- node.add_child(self)
+ node << self
end
# Get an array of ancestors.
|
dom/node: use #<< internally instead of #add_child
|
opal_opal-browser
|
train
|
619e89aa2293b2063ddc04deec58471901ce11ac
|
diff --git a/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/LogTest.java b/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/LogTest.java
index <HASH>..<HASH> 100644
--- a/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/LogTest.java
+++ b/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/LogTest.java
@@ -136,6 +136,31 @@ public class LogTest {
}
}
+ @Test
+ public void testEnableAndDisableCustomLogging() {
+ File logPath = new File(context.getCacheDir().getAbsolutePath(), "Logs");
+ logPath.deleteOnExit();
+ Database.getLog().getFile().setDirectory(context.getCacheDir().getAbsolutePath());
+
+ LogTestLogger customLogger = new LogTestLogger();
+ Log.i("IGNORE", "IGNORE");
+ Database.getLog().setCustom(customLogger);
+
+ customLogger.setLevel(LogLevel.NONE);
+ Log.v(LogDomain.DATABASE.toString(), "TEST VERBOSE");
+ Log.i(LogDomain.DATABASE.toString(), "TEST INFO");
+ Log.w(LogDomain.DATABASE.toString(), "TEST WARNING");
+ Log.e(LogDomain.DATABASE.toString(), "TEST ERROR");
+ assertEquals(0, customLogger.getLines().size());
+
+ customLogger.setLevel(LogLevel.VERBOSE);
+ Log.v(LogDomain.DATABASE.toString(), "TEST VERBOSE");
+ Log.i(LogDomain.DATABASE.toString(), "TEST INFO");
+ Log.w(LogDomain.DATABASE.toString(), "TEST WARNING");
+ Log.e(LogDomain.DATABASE.toString(), "TEST ERROR");
+ assertEquals(4, customLogger.getLines().size());
+ }
+
}
class LogTestLogger implements Logger {
|
test: check custom logging (#<I>)
* check custom logging enabled and disabled
|
couchbase_couchbase-lite-android
|
train
|
18848eae78e96c2b93d4e3dcb4c4e7d7ef619b99
|
diff --git a/src/Service/Captcha.php b/src/Service/Captcha.php
index <HASH>..<HASH> 100644
--- a/src/Service/Captcha.php
+++ b/src/Service/Captcha.php
@@ -16,14 +16,14 @@ class Captcha extends BaseService
use RetTrait;
/**
- * @var int the width of the generated CAPTCHA image. Defaults to 120
+ * @var int the width of the generated CAPTCHA image. Defaults to 75
*/
protected $width = 75;
/**
- * @var int the height of the generated CAPTCHA image. Defaults to 50
+ * @var int the height of the generated CAPTCHA image. Defaults to 31
*/
- protected $height = 32;
+ protected $height = 31;
/**
* @var int padding around the text. Defaults to 2
|
refactoring: 解决验证码超出1px
|
miaoxing_app
|
train
|
47a5b3ba12e2b943b7cb3c77bfd9cf9b5df0c218
|
diff --git a/lib/pry-theme/commands.rb b/lib/pry-theme/commands.rb
index <HASH>..<HASH> 100644
--- a/lib/pry-theme/commands.rb
+++ b/lib/pry-theme/commands.rb
@@ -34,6 +34,7 @@ module PryTheme
opt.on :a, "all-colors", "Show all available 8/256 colors."
opt.on :c, "color", "Show information about a specific color (256)."
opt.on :t, "test", "Test your current theme", :argument => false
+ opt.on :e, "edit", "Edit/reload current .prytheme", :argument => false
opt.on :l, "list", "Show a list of installed themes", :argument => false
opt.on :r, "remote-list", "Show a list of themes from Pry Theme Collection", :argument => false
opt.on :i, "install", "Install a theme from Pry Theme Collection"
@@ -46,6 +47,8 @@ module PryTheme
show_specific_color
elsif opts.t?
test_theme
+ elsif opts.e?
+ edit_theme
elsif opts.l?
show_list
elsif opts.r?
@@ -140,6 +143,24 @@ end
lputs colorize_code(example)
end
+ def edit_theme
+ cur = PryTheme.current_theme
+ file_name = PryTheme::Theme.pathify_theme cur
+ Pry.run_command 'edit ' + file_name
+ err_msg = proc {
+ output.puts Pry::Helpers::Text.red('Oops. Probably try again.')
+ }
+ begin
+ if PryTheme.set_theme(cur).nil?
+ err_msg.call
+ else
+ test_theme
+ end
+ rescue
+ err_msg.call
+ end
+ end
+
def show_list
old_theme = PryTheme.current_theme.dup
diff --git a/lib/pry-theme/theme.rb b/lib/pry-theme/theme.rb
index <HASH>..<HASH> 100644
--- a/lib/pry-theme/theme.rb
+++ b/lib/pry-theme/theme.rb
@@ -4,7 +4,7 @@ module PryTheme
attr_reader :scheme, :author, :description, :color_depth, :version, :name
def initialize(theme_filename)
- theme_file = File.join(THEME_DIR, "#{theme_filename}.prytheme")
+ theme_file = self.class.pathify_theme(theme_filename)
if File.exists?(theme_file)
theme = YAML.load_file(theme_file)
@@ -37,6 +37,10 @@ module PryTheme
end
end
+ def self.pathify_theme theme_name
+ File.join(THEME_DIR, "#{theme_name}.prytheme")
+ end
+
end
class NoThemeError < StandardError; end
|
Add pry-theme -e — Edit theme then display results.
|
kyrylo_pry-theme
|
train
|
3e860a67c4e626ae72f0132be81ca31066ea8678
|
diff --git a/chempy/util/_expr.py b/chempy/util/_expr.py
index <HASH>..<HASH> 100644
--- a/chempy/util/_expr.py
+++ b/chempy/util/_expr.py
@@ -15,7 +15,7 @@ import math
from itertools import chain
from operator import add, mul, truediv, sub, pow
from .pyutil import defaultkeydict, deprecated
-from .arithmeticdict import ArithmeticDict
+
def _implicit_conversion(obj):
if isinstance(obj, (int, float)):
@@ -91,7 +91,7 @@ class Expr(object):
parameter_keys = ()
nargs = None
- def __init__(self, args=None, unique_keys=None, args_dimensionality=None):
+ def __init__(self, args=None, unique_keys=None):
if isinstance(args, str):
args = (args,)
if self.argument_names is not None and self.argument_names[-1] != Ellipsis and self.nargs is None:
@@ -127,8 +127,6 @@ class Expr(object):
args = [args[k] for k in self.argument_names or self.unique_keys]
self.args = args
- if args_dimensionality is not None:
- self.args_dimensionality = types.MethodType(args_dimensionality, self)
@classmethod
def fk(cls, *args):
diff --git a/chempy/util/bkh.py b/chempy/util/bkh.py
index <HASH>..<HASH> 100644
--- a/chempy/util/bkh.py
+++ b/chempy/util/bkh.py
@@ -12,8 +12,8 @@ from chempy.units import to_unitless, linspace, logspace_from_lin
def integration_with_sliders(
- rsys, tend, c0, parameters, fig_kwargs=None, unit_registry=None,
- slider_kwargs=None, x_axis_type="linear", y_axis_type="linear",
+ rsys, tend, c0, parameters, fig_kwargs=None,
+ slider_kwargs=None, conc_bounds=None, x_axis_type="linear", y_axis_type="linear",
integrate_kwargs=None, odesys_extra=None, get_odesys_kw=None):
"""
Parameters
@@ -41,6 +41,7 @@ def integration_with_sliders(
state_keys, rarg_keys, p_units = [extra[k] for k in ('param_keys', 'unique', 'p_units')]
output_conc_unit = get_odesys_kw.get('output_conc_unit', None)
output_time_unit = get_odesys_kw.get('output_time_unit', None)
+ unit_registry = get_odesys_kw.get('unit_registry', None)
if output_conc_unit is None:
if unit_registry is not None:
raise ValueError("if unit_registry is given, output_conc_unit must also be given")
@@ -63,7 +64,6 @@ def integration_with_sliders(
'tout': to_unitless(tout, output_time_unit),
k: to_unitless(Cout[:, idx], output_conc_unit)
}) for idx, k in enumerate(rsys.substances)]
- print(sources)
if fig_kwargs is None:
Cmax = np.max(Cout)
x_range = list(to_unitless([tend*0, tend], output_time_unit))
@@ -84,21 +84,28 @@ def integration_with_sliders(
if p_units is None:
p_units = [None]*len(param_keys)
p_ul = [to_unitless(parameters[k], _u) for k, _u in zip(param_keys, p_units)]
+
+ def _dict_to_unitless(d, u):
+ return {k: to_unitless(v, u) for k, v in d.items()}
+
c0_widgets = OrderedDict()
for k in rsys.substances:
- ck = _C(k)
- if ck == 0:
- max_ = max(*[_C(k) for k in rsys.substances])
- slider_defaults = dict(start=0, end=max_, step=max_/100)
+ if conc_bounds is not None and k in conc_bounds:
+ if k in slider_kwargs:
+ raise ValueError("Key '%s' both in slider_kwargs and conc_bounds" % k)
+ slider_defaults = _dict_to_unitless(conc_bounds[k], output_conc_unit)
else:
- slider_defaults = dict(start=_C(k)/2, end=_C(k)*2, step=_C(k)/10)
+ ck = _C(k)
+ if ck == 0:
+ max_ = max(*[_C(k) for k in rsys.substances])
+ slider_defaults = dict(start=0, end=max_, step=max_/100)
+ else:
+ slider_defaults = dict(start=_C(k)/2, end=_C(k)*2, step=_C(k)/10)
c0_widgets[k] = Slider(
title=k if output_conc_unit is 1 else k + ' / ' + output_conc_unit.dimensionality.unicode,
value=_C(k), **slider_kwargs.get(k, slider_defaults)
)
- def _dict_to_unitless(d, u):
- return {k: to_unitless(v, u) for k, v in d.items()}
param_widgets = OrderedDict([
(k, Slider(title=k if u is None else k + ' / ' + u.dimensionality.unicode,
|
Further fixes to bkh.py
|
bjodah_chempy
|
train
|
c05ece66e31e171af2bb3cb691ec9b72216496f8
|
diff --git a/lib/mail/message.rb b/lib/mail/message.rb
index <HASH>..<HASH> 100644
--- a/lib/mail/message.rb
+++ b/lib/mail/message.rb
@@ -1975,8 +1975,9 @@ module Mail
end
def raw_source=(value)
- value.force_encoding("binary") if RUBY_VERSION >= "1.9.1"
@raw_source = value.to_crlf
+ @raw_source.force_encoding("binary") if RUBY_VERSION >= "1.9.1"
+ @raw_source
end
# see comments to body=. We take data and process it lazily
diff --git a/spec/mail/message_spec.rb b/spec/mail/message_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mail/message_spec.rb
+++ b/spec/mail/message_spec.rb
@@ -339,9 +339,12 @@ describe Mail::Message do
end
it "should parse non-UTF8 sources" do
- mail = Mail.read(fixture('emails', 'multi_charset', 'japanese_shiftjis.eml'))
+ raw_message = File.read(fixture('emails', 'multi_charset', 'japanese_shiftjis.eml'))
+ original_encoding = raw_message.encoding if raw_message.respond_to?(:encoding)
+ mail = Mail.new(raw_message)
mail.to.should eq ["raasdnil@gmail.com"]
mail.decoded.should eq "すみません。\n\n"
+ raw_message.encoding.should eq original_encoding if raw_message.respond_to?(:encoding)
end
end
|
don't change encoding of string input to Mail.new
|
mikel_mail
|
train
|
5fdac428bc2d27914400a634624a82ffbe7ac3b2
|
diff --git a/src/Illuminate/Database/Eloquent/Model.php b/src/Illuminate/Database/Eloquent/Model.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Database/Eloquent/Model.php
+++ b/src/Illuminate/Database/Eloquent/Model.php
@@ -2691,8 +2691,10 @@ abstract class Model implements ArrayAccess, Arrayable, Jsonable, JsonSerializab
if ($this->hasCast($key))
{
$type = $this->getCastType($key);
-
- return $type === 'array' || $type === 'json' || $type === 'object';
+
+ $jsonCastables = array('array', 'json', 'object', 'collection');
+
+ return in_array($type, $jsonCastables, true);
}
return false;
|
Added `collection` to the isJsonCastable()
|
laravel_framework
|
train
|
ad0677d6a91efec5711c80b27e618904a3a19fc9
|
diff --git a/benchmark/worker/benchmark_client.go b/benchmark/worker/benchmark_client.go
index <HASH>..<HASH> 100644
--- a/benchmark/worker/benchmark_client.go
+++ b/benchmark/worker/benchmark_client.go
@@ -224,30 +224,32 @@ func doCloseLoopStreamingBenchmark(h *stats.Histogram, conns []*grpc.ClientConn,
} else {
doRPC = benchmark.DoStreamingRoundTrip
}
- streams := make([]testpb.BenchmarkService_StreamingCallClient, len(conns))
+ streams := make([]testpb.BenchmarkService_StreamingCallClient, len(conns)*rpcCount)
for ic, conn := range conns {
- c := testpb.NewBenchmarkServiceClient(conn)
- s, err := c.StreamingCall(context.Background())
- if err != nil {
- grpclog.Printf("%v.StreamingCall(_) = _, %v", c, err)
- }
- streams[ic] = s
- for j := 0; j < 100/len(conns); j++ {
- doRPC(streams[ic], reqSize, respSize)
+ for is := 0; is < rpcCount; is++ {
+ c := testpb.NewBenchmarkServiceClient(conn)
+ s, err := c.StreamingCall(context.Background())
+ if err != nil {
+ grpclog.Printf("%v.StreamingCall(_) = _, %v", c, err)
+ }
+ streams[ic*rpcCount+is] = s
+ for j := 0; j < 100/len(conns); j++ {
+ doRPC(streams[ic], reqSize, respSize)
+ }
}
}
var wg sync.WaitGroup
wg.Add(len(conns) * rpcCount)
var mu sync.Mutex
for ic, _ := range conns {
- for j := 0; j < rpcCount; j++ {
- go func() {
+ for is := 0; is < rpcCount; is++ {
+ go func(ic, is int) {
defer wg.Done()
for {
done := make(chan bool)
go func() {
start := time.Now()
- if err := doRPC(streams[ic], reqSize, respSize); err != nil {
+ if err := doRPC(streams[ic*rpcCount+is], reqSize, respSize); err != nil {
done <- false
return
}
@@ -264,7 +266,7 @@ func doCloseLoopStreamingBenchmark(h *stats.Histogram, conns []*grpc.ClientConn,
case <-done:
}
}
- }()
+ }(ic, is)
}
}
grpclog.Printf("close loop done, count: %v", rpcCount)
|
Close loop: Create multiple streams on one connection
|
grpc_grpc-go
|
train
|
9b659376e38f7b07f18ba383f1ba7900227ef599
|
diff --git a/sql_metadata.py b/sql_metadata.py
index <HASH>..<HASH> 100644
--- a/sql_metadata.py
+++ b/sql_metadata.py
@@ -97,6 +97,8 @@ def get_query_columns(query: str) -> List[str]:
"LIKE",
"CASE",
"WHEN",
+ "DISTINCT",
+ "UNIQUE",
]
# these keywords are followed by columns reference
|
Add DISTINCT and UNIQUE to keywords_ignored (fixes #<I>)
|
macbre_sql-metadata
|
train
|
e96ca92c9d25f319991f38327a905b8cac3fa3e1
|
diff --git a/core/src/main/java/com/dtolabs/rundeck/core/resources/BaseFileResourceModelSource.java b/core/src/main/java/com/dtolabs/rundeck/core/resources/BaseFileResourceModelSource.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/dtolabs/rundeck/core/resources/BaseFileResourceModelSource.java
+++ b/core/src/main/java/com/dtolabs/rundeck/core/resources/BaseFileResourceModelSource.java
@@ -49,7 +49,7 @@ import java.nio.file.Files;
* <ul>
* <li>{@link #getResourceFormat()}</li>
* <li>{@link #getDocumentFileExtension()}</li>
- * <li>{@link #writeFileData(InputStream)}</li>
+ * <li>{@link #writeFileData(long, InputStream)}</li>
* <li>{@link #openFileDataInputStream()}</li>
* </ul>
*
@@ -124,7 +124,7 @@ public abstract class BaseFileResourceModelSource implements ResourceModelSource
/**
* Writes the data to a temp file, and attempts to parser it, then if successful it will
- * call {@link #writeFileData(InputStream)} to invoke the sub class
+ * call {@link #writeFileData(long, InputStream)} to invoke the sub class
*
* @param data data
*
@@ -156,7 +156,7 @@ public abstract class BaseFileResourceModelSource implements ResourceModelSource
throw new ResourceModelSourceException(e);
}
try (FileInputStream tempStream = new FileInputStream(temp)) {
- return writeFileData(tempStream);
+ return writeFileData(temp.length(), tempStream);
}
} finally {
temp.delete();
@@ -167,14 +167,25 @@ public abstract class BaseFileResourceModelSource implements ResourceModelSource
* Write the file data from the inputstream to the backing store
*
* @param tempStream input stream
- *
* @return bytes writen
- *
* @throws IOException
*/
public abstract long writeFileData(final InputStream tempStream) throws IOException;
/**
+ * Write the file data from the inputstream to the backing store, this implementation calls {@link
+ * #writeFileData(InputStream)} but can be overridden
+ *
+ * @param length data length
+ * @param tempStream input stream
+ * @return bytes writen
+ * @throws IOException
+ */
+ protected long writeFileData(final long length, final InputStream tempStream) throws IOException {
+ return writeFileData(tempStream);
+ }
+
+ /**
* @return an input stream that reads the data from the backing store
*
* @throws IOException
diff --git a/core/src/main/java/com/dtolabs/rundeck/core/resources/FileResourceModelSource.java b/core/src/main/java/com/dtolabs/rundeck/core/resources/FileResourceModelSource.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/dtolabs/rundeck/core/resources/FileResourceModelSource.java
+++ b/core/src/main/java/com/dtolabs/rundeck/core/resources/FileResourceModelSource.java
@@ -16,10 +16,10 @@
/*
* FileResourceModelSource.java
-*
+*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: 7/19/11 11:28 AM
-*
+*
*/
package com.dtolabs.rundeck.core.resources;
|
add length argument to writeFileData method
|
rundeck_rundeck
|
train
|
adae66f47cf11bb7a686c98059436f496d881955
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -30,16 +30,13 @@ exports.parse = function parse (s, env) {
.replace(/\\([ "'\\$`(){}!#&*|])/g, '$1')
;
}
- else return s
- .replace(/(['"])((\\\1|[^\1])*?)\1|[^'"]+/g, function (s, q) {
+ else return s.replace(
+ /(['"])((\\\1|[^\1])*?)\1|[^'"]+/g,
+ function (s, q) {
if (/^['"]/.test(s)) return parse(s, env);
- return s
- .replace(/(^|[^\\])\$(\w+)/g, getVar)
- .replace(/(^|[^\\])\${(\w+)}/g, getVar)
- .replace(/\\([ "'\\$`(){}!#&*|])/g, '$1')
- ;
- })
- ;
+ return parse('"' + s + '"', env);
+ }
+ );
});
function getVar (_, pre, key) {
|
cleaner implementation recursing on the double quote case
|
substack_node-shell-quote
|
train
|
8c708b4f3d104e0de00f9f13ff88a10a5bb68c24
|
diff --git a/harpoon/overview.py b/harpoon/overview.py
index <HASH>..<HASH> 100644
--- a/harpoon/overview.py
+++ b/harpoon/overview.py
@@ -176,7 +176,10 @@ class Harpoon(object):
configuration.converters.done(path, meta.result)
for key, v in val.items(ignore_converters=True):
+ if isinstance(v, MergedOptions):
+ v.ignore_converters = True
meta.result[key] = v
+
return spec.normalise(meta, meta.result)
converter = Converter(convert=convert_image, convert_path=["images", image])
|
Make sure converter doesn't trip over itself
|
delfick_harpoon
|
train
|
bf7593a95c5e6af337559102026655b506e1fc57
|
diff --git a/src/main/java/cz/jiripinkas/jsitemapgenerator/WebSitemapGenerator.java b/src/main/java/cz/jiripinkas/jsitemapgenerator/WebSitemapGenerator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/cz/jiripinkas/jsitemapgenerator/WebSitemapGenerator.java
+++ b/src/main/java/cz/jiripinkas/jsitemapgenerator/WebSitemapGenerator.java
@@ -88,7 +88,6 @@ public String constructSitemapString() {
public void pingGoogle(String sitemapUrl) {
try {
String pingUrl = "http://www.google.com/webmasters/tools/ping?sitemap=" + URLEncoder.encode(sitemapUrl, "UTF-8");
- System.out.println("Will ping this URL: " + pingUrl);
// ping Google
int returnCode = HttpClientUtil.get(pingUrl);
if (returnCode != 200) {
@@ -109,7 +108,6 @@ public String constructSitemapString() {
public void pingBing(String sitemapUrl) {
try {
String pingUrl = "http://www.bing.com/ping?sitemap=" + URLEncoder.encode(sitemapUrl, "UTF-8");
- System.out.println("Will ping this URL: " + pingUrl);
// ping Bing
int returnCode = HttpClientUtil.get(pingUrl);
if (returnCode != 200) {
|
removed System.out.println
|
jirkapinkas_jsitemapgenerator
|
train
|
72b27c7d68951a2e3f8dbed84eb78899ed19e3a1
|
diff --git a/simpledist/distributions.py b/simpledist/distributions.py
index <HASH>..<HASH> 100644
--- a/simpledist/distributions.py
+++ b/simpledist/distributions.py
@@ -382,8 +382,9 @@ class Distribution_FromH5(Distribution):
"""
def __init__(self,filename,path='',**kwargs):
fns = pd.read_hdf(filename,path+'/fns')
- store = pd.HDFStore(filename)
+ store = pd.HDFStore(filename,'r')
if '{}/samples'.format(path) in store:
+ store.close()
samples = pd.read_hdf(filename,path+'/samples')
self.samples = np.array(samples)
minval = fns['vals'].iloc[0]
@@ -399,6 +400,8 @@ class Distribution_FromH5(Distribution):
cdf = interpolate(fns['vals'],fns['cdf'],s=0,k=1)
Distribution.__init__(self,pdf,cdf,minval=minval,maxval=maxval,
**kwargs)
+
+ store = pd.HDFStore(filename,'r')
try:
keywords = store.get_storer('{}/fns'.format(path)).attrs.keywords
for kw,val in keywords.iteritems():
|
fixed bug that left HDFStore open
|
timothydmorton_simpledist
|
train
|
20eef84b80a91e5171575622be4487866898177a
|
diff --git a/gridsome/lib/app/loadConfig.js b/gridsome/lib/app/loadConfig.js
index <HASH>..<HASH> 100644
--- a/gridsome/lib/app/loadConfig.js
+++ b/gridsome/lib/app/loadConfig.js
@@ -552,6 +552,7 @@ function normalizeImages (config = {}) {
defaultQuality: Joi.number().default(75).min(0).max(100),
backgroundColor: Joi.string().allow(null).default(null),
defaultBlur: Joi.number().default(defaultPlaceholder.defaultBlur),
+ removeUnused: Joi.boolean().default(true),
placeholder: Joi.alternatives()
.default(defaultPlaceholder)
.try([
diff --git a/gridsome/lib/build.js b/gridsome/lib/build.js
index <HASH>..<HASH> 100644
--- a/gridsome/lib/build.js
+++ b/gridsome/lib/build.js
@@ -1,3 +1,4 @@
+const path = require('path')
const fs = require('fs-extra')
const pMap = require('p-map')
const hirestime = require('hirestime')
@@ -115,6 +116,10 @@ async function processImages (images, config) {
const totalAssets = images.queue.length
const totalJobs = chunks.length
+ const existingImages = !config.emptyOutputDir
+ ? await fs.readdir(config.imagesDir)
+ : []
+
let progress = 0
writeLine(`Processing images (${totalAssets} images) - 0%`)
@@ -139,4 +144,17 @@ async function processImages (images, config) {
worker.end()
writeLine(`Process images (${totalAssets} images) - ${timer(hirestime.S)}s\n`)
+
+ // Remove images that existed before this build started but isn't in use.
+ if (config.images.removeUnused && existingImages.length) {
+ const newImages = images.queue.map((c) => path.basename(c.destPath))
+ const extraImages = existingImages.filter((value) => !newImages.includes(value))
+
+ if (extraImages.length) {
+ for (const filename of extraImages) {
+ await fs.remove(path.join(config.imagesDir, filename))
+ }
+ info(`- Removed ${extraImages.length} images that where no longer in use`)
+ }
+ }
}
|
chore: remove unused images when keeping dist
|
gridsome_gridsome
|
train
|
cb07b96a6285b7dbeb7abc6d8faf2f2c18738e6d
|
diff --git a/client/deis.py b/client/deis.py
index <HASH>..<HASH> 100755
--- a/client/deis.py
+++ b/client/deis.py
@@ -415,7 +415,7 @@ class DeisClient(object):
Options:
--cluster=<cluster>
- target cluster to host application (default: dev).
+ target cluster to host application [default: dev].
--no-remote
do not create a `deis` git remote.
"""
|
fix(client): restore `--cluster=dev` as apps:create default
In refactoring the Deis CLI docstrings, we broke the default cluster
for apps:create. This was pointed out by smoke_test.go.
|
deis_deis
|
train
|
a11f6aaa914df773366eb37a9164e473e733d915
|
diff --git a/src/Collection/Helpers/ArrayCollectionHelper.php b/src/Collection/Helpers/ArrayCollectionHelper.php
index <HASH>..<HASH> 100644
--- a/src/Collection/Helpers/ArrayCollectionHelper.php
+++ b/src/Collection/Helpers/ArrayCollectionHelper.php
@@ -129,7 +129,10 @@ class ArrayCollectionHelper
{
if ($value instanceof IEntity) {
return $value->hasValue('id') ? $value->getValue('id') : null;
- } elseif (isset($propertyMetadata->types['datetime']) && $value !== null) {
+ } elseif (
+ (isset($propertyMetadata->types[\DateTimeImmutable::class]) || isset($propertyMetadata->types[\Nextras\Dbal\Utils\DateTimeImmutable::class]))
+ && $value !== null
+ ) {
if (!$value instanceof DateTimeInterface) {
$value = new DateTimeImmutable($value);
}
diff --git a/src/Entity/Reflection/MetadataParser.php b/src/Entity/Reflection/MetadataParser.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Reflection/MetadataParser.php
+++ b/src/Entity/Reflection/MetadataParser.php
@@ -180,12 +180,12 @@ class MetadataParser implements IMetadataParser
$type = $aliases[$typeLower];
} else {
$type = Reflection::expandClassName($type, $this->currentReflection);
- if ($type === DateTimeImmutable::class) {
- $type = 'datetime';
+ if ($type === DateTimeImmutable::class || $type === \Nextras\Dbal\Utils\DateTimeImmutable::class) {
+ // these datetime types are allowed
} elseif (is_subclass_of($type, DateTimeImmutable::class)) {
- throw new NotSupportedException("Type '{$type}' in {$this->currentReflection->name}::\${$property->name} property is not supported (a subclass of \DateTimeImmutable). Use directly the \DateTimeImmutable type.");
+ throw new NotSupportedException("Type '{$type}' in {$this->currentReflection->name}::\${$property->name} property is not supported (a subclass of \DateTimeImmutable). Use directly the \DateTimeImmutable or \Nextras\Dbal\Utils\DateTimeImmutable type.");
} elseif ($type === DateTime::class || is_subclass_of($type, DateTime::class)) {
- throw new NotSupportedException("Type '{$type}' in {$this->currentReflection->name}::\${$property->name} property is not supported anymore. Use \DateTimeImmutable type.");
+ throw new NotSupportedException("Type '{$type}' in {$this->currentReflection->name}::\${$property->name} property is not supported anymore. Use \DateTimeImmutable or \Nextras\Dbal\Utils\DateTimeImmutable type.");
}
}
$parsedTypes[$type] = true;
diff --git a/src/Entity/Reflection/PropertyMetadata.php b/src/Entity/Reflection/PropertyMetadata.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Reflection/PropertyMetadata.php
+++ b/src/Entity/Reflection/PropertyMetadata.php
@@ -70,8 +70,7 @@ class PropertyMetadata
}
foreach ($this->types as $type => $_) {
- $type = strtolower($type);
- if ($type === 'datetime') {
+ if ($type === \DateTimeImmutable::class) {
if ($value instanceof \DateTimeImmutable) {
return true;
@@ -89,7 +88,28 @@ class PropertyMetadata
return true;
}
- } elseif ($type === 'string') {
+ } elseif ($type === \Nextras\Dbal\Utils\DateTimeImmutable::class) {
+ if ($value instanceof \Nextras\Dbal\Utils\DateTimeImmutable) {
+ return true;
+
+ } elseif ($value instanceof \DateTimeInterface) {
+ $value = new \Nextras\Dbal\Utils\DateTimeImmutable($value->format('c'));
+ return true;
+
+ } elseif (is_string($value) && $value !== '') {
+ $tmp = new \Nextras\Dbal\Utils\DateTimeImmutable($value);
+ $value = $tmp->setTimezone(new DateTimeZone(date_default_timezone_get()));
+ return true;
+
+ } elseif (ctype_digit($value)) {
+ $value = new \Nextras\Dbal\Utils\DateTimeImmutable("@{$value}");
+ return true;
+ }
+
+ }
+
+ $type = strtolower($type);
+ if ($type === 'string') {
if (is_string($value)) {
return true;
}
diff --git a/tests/inc/model/author/Author.php b/tests/inc/model/author/Author.php
index <HASH>..<HASH> 100644
--- a/tests/inc/model/author/Author.php
+++ b/tests/inc/model/author/Author.php
@@ -2,7 +2,7 @@
namespace NextrasTests\Orm;
-use DateTimeImmutable;
+use Nextras\Dbal\Utils\DateTimeImmutable;
use Nextras\Orm\Entity\Entity;
use Nextras\Orm\Relationships\OneHasMany as OHM;
|
entity: added back support for Nextras\Dbal\DateTimeImmutable type
|
nextras_orm
|
train
|
53b7d5b8a1a42cf19b76a7a901461bd1c2478368
|
diff --git a/hugolib/shortcode.go b/hugolib/shortcode.go
index <HASH>..<HASH> 100644
--- a/hugolib/shortcode.go
+++ b/hugolib/shortcode.go
@@ -93,7 +93,7 @@ func ShortcodesHandle(stringToParse string, p *Page, t Template) string {
var data = &ShortcodeWithPage{Params: params, Page: p}
if endStart > 0 {
s := stringToParse[leadEnd+3 : leadEnd+endStart]
- data.Inner = template.HTML(CleanP(ShortcodesHandle(s, p, t)))
+ data.Inner = template.HTML(renderBytes([]byte(CleanP(ShortcodesHandle(s, p, t))), p.guessMarkupType()))
remainder := CleanP(stringToParse[leadEnd+endEnd:])
return CleanP(stringToParse[:leadStart]) +
diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go
index <HASH>..<HASH> 100644
--- a/hugolib/shortcode_test.go
+++ b/hugolib/shortcode_test.go
@@ -53,8 +53,20 @@ func TestInnerSC(t *testing.T) {
tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
CheckShortCodeMatch(t, `{{% inside class="aspen" %}}`, `<div class="aspen"></div>`, tem)
- CheckShortCodeMatch(t, `{{% inside class="aspen" %}}More Here{{% /inside %}}`, `<div class="aspen">More Here</div>`, tem)
- CheckShortCodeMatch(t, `{{% inside %}}More Here{{% /inside %}}`, `<div>More Here</div>`, tem)
+ CheckShortCodeMatch(t, `{{% inside class="aspen" %}}More Here{{% /inside %}}`, "<div class=\"aspen\"><p>More Here</p>\n</div>", tem)
+ CheckShortCodeMatch(t, `{{% inside %}}More Here{{% /inside %}}`, "<div><p>More Here</p>\n</div>", tem)
+}
+
+func TestInnerSCWithMarkdown(t *testing.T) {
+ tem := NewTemplate()
+ tem.AddInternalShortcode("inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
+
+ CheckShortCodeMatch(t, `{{% inside %}}
+# More Here
+
+[link](http://spf13.com) and text
+
+{{% /inside %}}`, "<div><h1>More Here</h1>\n\n<p><a href=\"http://spf13.com\">link</a> and text</p>\n</div>", tem)
}
func TestEmbeddedSC(t *testing.T) {
|
Inner Shortcodes now treated as markdown. fixed #<I>
|
gohugoio_hugo
|
train
|
4080ee7004449df08a7acb031ab3cf777d68fe95
|
diff --git a/tt/distutils.py b/tt/distutils.py
index <HASH>..<HASH> 100644
--- a/tt/distutils.py
+++ b/tt/distutils.py
@@ -5,6 +5,6 @@ def get_extra_fflags():
fflags = []
fcompiler = customized_fcompiler()
if fcompiler.compiler_type in ('g95', 'gnu', 'gnu95'):
- if fcompiler.get_version() >= '11.0':
+ if fcompiler.get_version() >= '11':
fflags.append('-fallow-argument-mismatch')
return fflags
|
change version to '<I>' from '<I>'
Here is an illustration of the issue:
```
>>> from numpy.distutils import customized_fcompiler
>>> v=fcompiler.get_version(); v
LooseVersion ('<I>')
>>> v>="<I>"
False
>>> v>="<I>"
True
```
|
oseledets_ttpy
|
train
|
825f169342c2ecd390ace2b40cb951bb437a4c77
|
diff --git a/claripy/vsa/strided_interval.py b/claripy/vsa/strided_interval.py
index <HASH>..<HASH> 100644
--- a/claripy/vsa/strided_interval.py
+++ b/claripy/vsa/strided_interval.py
@@ -1864,6 +1864,11 @@ class StridedInterval(BackendObject):
return new_si.bitwise_or(new_b)
def extract(self, high_bit, low_bit):
+
+ if self._reversed:
+ reversed = self._reverse()
+ return reversed.extract(high_bit, low_bit)
+
assert low_bit >= 0
bits = high_bit - low_bit + 1
@@ -2324,34 +2329,34 @@ class StridedInterval(BackendObject):
def _reverse(self):
"""
This function does the reversing for real.
- :return:
+ :return: A new reversed StridedInterval instance
"""
- if self.bits == 8:
+ o = self.copy()
+ # Clear the reversed flag
+ o._reversed = not o._reversed
+
+ if o.bits == 8:
# No need for reversing
- return self.copy()
+ return o.copy()
- if self.is_top:
+ if o.is_top:
# A TOP is still a TOP after reversing
- si = self.copy()
- si._reversed = False
+ si = o.copy()
return si
else:
- if self.uninitialized:
- return self.copy()
-
- if not self.is_integer:
+ if not o.is_integer:
# We really don't want to do that. Something is wrong.
logger.warning('Reversing a real strided-interval %s is bad', self)
# Reversing an integer is easy
- rounded_bits = ((self.bits + 7) / 8) * 8
+ rounded_bits = ((o.bits + 7) / 8) * 8
list_bytes = [ ]
si = None
for i in xrange(0, rounded_bits, 8):
- b = self.extract(min(i + 7, self.bits - 1), i)
+ b = o.extract(min(i + 7, o.bits - 1), i)
list_bytes.append(b)
for b in list_bytes:
diff --git a/tests/test_vsa.py b/tests/test_vsa.py
index <HASH>..<HASH> 100644
--- a/tests/test_vsa.py
+++ b/tests/test_vsa.py
@@ -395,6 +395,17 @@ def test_vsa():
si = si_1 & si_2
nose.tools.assert_true(is_equal(si, claripy.SI(bits=32, stride=0, lower_bound=0, upper_bound=0)))
+ # Concatenation: concat with zeros only increases the stride
+ si_1 = claripy.SI(bits=8, stride=0xff, lower_bound=0x0, upper_bound=0xff)
+ si_2 = claripy.SI(bits=8, stride=0, lower_bound=0, upper_bound=0)
+ si = si_1.concat(si_2)
+ nose.tools.assert_true(is_equal(si, claripy.SI(bits=16, stride=0xff00, lower_bound=0, upper_bound=0xff00)))
+
+ # Extract from a reversed value
+ si_1 = claripy.SI(bits=64, stride=0xff, lower_bound=0x0, upper_bound=0xff)
+ si_2 = si_1.reversed[63 : 56]
+ nose.tools.assert_true(is_equal(si_2, claripy.SI(bits=8, stride=0xff, lower_bound=0x0, upper_bound=0xff)))
+
#
# ValueSet
#
|
VSA: Bug fix in StridedInterval.extract() when strided interval is reversed.
Also add two test cases for VSA.
|
angr_claripy
|
train
|
1898268093159eeece19538f09acbc89ef40bda0
|
diff --git a/grimoire_elk/utils.py b/grimoire_elk/utils.py
index <HASH>..<HASH> 100755
--- a/grimoire_elk/utils.py
+++ b/grimoire_elk/utils.py
@@ -52,7 +52,7 @@ from perceval.backends.core.meetup import Meetup, MeetupCommand
from perceval.backends.core.nntp import NNTP, NNTPCommand
from perceval.backends.core.phabricator import Phabricator, PhabricatorCommand
from perceval.backends.core.pipermail import Pipermail, PipermailCommand
-from perceval.backends.puppet.puppetforge import PuppetForge, PuppetForgeCommand
+# from perceval.backends.puppet.puppetforge import PuppetForge, PuppetForgeCommand
from perceval.backends.core.redmine import Redmine, RedmineCommand
from perceval.backends.core.rss import RSS, RSSCommand
from perceval.backends.core.slack import Slack, SlackCommand
@@ -206,7 +206,7 @@ def get_connectors():
"nntp": [NNTP, NNTPOcean, NNTPEnrich, NNTPCommand],
"phabricator": [Phabricator, PhabricatorOcean, PhabricatorEnrich, PhabricatorCommand],
"pipermail": [Pipermail, PipermailOcean, PipermailEnrich, PipermailCommand],
- "puppetforge": [PuppetForge, PuppetForgeOcean, PuppetForgeEnrich, PuppetForgeCommand],
+# "puppetforge": [PuppetForge, PuppetForgeOcean, PuppetForgeEnrich, PuppetForgeCommand],
"redmine": [Redmine, RedmineOcean, RedmineEnrich, RedmineCommand],
"remo": [ReMo, ReMoOcean, ReMoEnrich, ReMoCommand],
"rss": [RSS, RSSOcean, RSSEnrich, RSSCommand],
|
[utils] Remove puppetforge support until there is a pip package and it is supported in grimoirelab_build script
|
chaoss_grimoirelab-elk
|
train
|
c2bcc418e037d6bc2d6b47c2d782900126b4f884
|
diff --git a/db/rdb.go b/db/rdb.go
index <HASH>..<HASH> 100644
--- a/db/rdb.go
+++ b/db/rdb.go
@@ -64,9 +64,6 @@ func (r *RDBDriver) OpenDB(dbType, dbPath string, debugSQL bool) (err error) {
return
}
r.conn.LogMode(debugSQL)
- if r.name == dialectSqlite3 {
- r.conn.Exec("PRAGMA journal_mode=WAL;")
- }
return
}
|
fix(db): no wal mode with SQLite3 backend to avoid `database is locked` (#<I>)
|
kotakanbe_go-cve-dictionary
|
train
|
00b855ab867bd9d6c740777532e141cabb6f81ba
|
diff --git a/labm8/py/fs_test.py b/labm8/py/fs_test.py
index <HASH>..<HASH> 100644
--- a/labm8/py/fs_test.py
+++ b/labm8/py/fs_test.py
@@ -17,8 +17,6 @@ import pathlib
import stat
import tempfile
-import pytest
-
from labm8.py import app
from labm8.py import fs
from labm8.py import system
|
Remove unused import.
Signed-off-by: format <I> <github.com/ChrisCummins/format>
|
ChrisCummins_labm8
|
train
|
2ffd4704c6f37d7fb10110450fe035fa6df08db8
|
diff --git a/issue_milestone.go b/issue_milestone.go
index <HASH>..<HASH> 100644
--- a/issue_milestone.go
+++ b/issue_milestone.go
@@ -48,7 +48,7 @@ func (c *Client) CreateMilestone(owner, repo string, opt CreateMilestoneOption)
}
type EditMilestoneOption struct {
- Title *string `json:"title"`
+ Title string `json:"title"`
Description *string `json:"description"`
State *string `json:"state"`
Deadline *time.Time `json:"due_on"`
|
Change EditMilestoneOption.Title to non-pointer
|
gogs_go-gogs-client
|
train
|
4ef63c97c450a9c8ffd744cd47f51159dbbf229b
|
diff --git a/lib/cinch/irc.rb b/lib/cinch/irc.rb
index <HASH>..<HASH> 100644
--- a/lib/cinch/irc.rb
+++ b/lib/cinch/irc.rb
@@ -368,7 +368,7 @@ module Cinch
else
# away
m.user.sync(:away, msg.message, true)
- events << [:away, m.user, msg.message]
+ events << [:away, m.user]
end
end
|
do not pass msg.message to :away handlers
They can just use msg.message themself
|
cinchrb_cinch
|
train
|
6a5b4a33858e2b8b308968379eb27da9e816f0af
|
diff --git a/app/controllers/admin/sidebar_controller.rb b/app/controllers/admin/sidebar_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/admin/sidebar_controller.rb
+++ b/app/controllers/admin/sidebar_controller.rb
@@ -5,6 +5,7 @@ class Admin::SidebarController < Admin::BaseController
# Reset the staged position based on the active position.
Sidebar.delete_all('active_position is null')
flash_sidebars
+ @active = Sidebar.find(:all, :order => 'active_position ASC') unless @active
end
def set_active
@@ -79,8 +80,8 @@ class Admin::SidebarController < Admin::BaseController
def flash_sidebars
unless flash[:sidebars]
- @active = Sidebar.find(:all, :order => 'active_position ASC')
- flash[:sidebars] = @active.map {|sb| sb.id }
+ active = Sidebar.find(:all, :order => 'active_position ASC')
+ flash[:sidebars] = active.map {|sb| sb.id }
end
flash[:sidebars]
end
|
fix adminsidebar issue. Order is keep
|
publify_publify
|
train
|
a1964b49f18919f5aaed17d3c2f6b48a35634b5c
|
diff --git a/parser/context.go b/parser/context.go
index <HASH>..<HASH> 100644
--- a/parser/context.go
+++ b/parser/context.go
@@ -263,7 +263,7 @@ func specialChildren(node ast.Node) []ast.Node {
case *ast.ArrayComp:
return []ast.Node{node.Body}
case *ast.ObjectComp:
-
+ return inObjectFieldsChildren(node.Fields)
case *ast.Self:
return nil
case *ast.SuperIndex:
@@ -274,6 +274,8 @@ func specialChildren(node ast.Node) []ast.Node {
return nil
case *ast.Var:
return nil
+ case *ast.Parens:
+ return nil
}
panic(fmt.Sprintf("specialChildren: Unknown node %#v", node))
}
|
support for traversing AST tree with parens and objectcomps
|
google_go-jsonnet
|
train
|
68a27239b487a520f4031cb44801dac3acb61453
|
diff --git a/tests/pycut_boundary_penalties_test.py b/tests/pycut_boundary_penalties_test.py
index <HASH>..<HASH> 100644
--- a/tests/pycut_boundary_penalties_test.py
+++ b/tests/pycut_boundary_penalties_test.py
@@ -20,15 +20,8 @@ from PyQt4.QtGui import QApplication
import numpy as np
-try:
- from imcut import pycut
- from imcut import seed_editor_qt
- import imcut.dcmreaddata as dcmr
-except:
- print("Deprecated of pyseg_base as submodule")
- import pycut
- import seed_editor_qt
- import dcmreaddata as dcmr
+from imcut import pycut
+# import imcut.dcmreaddata as dcmr
class PycutTest(unittest.TestCase):
@@ -124,6 +117,7 @@ class PycutTest(unittest.TestCase):
import pdb; pdb.set_trace()
#np.exp(-np.random.normal(0
+ from seededitorqt import seed_editor_qt
from PyQt4.QtGui import QApplication
app = QApplication(sys.argv)
pyed = seed_editor_qt.QTSeedEditor(filtered2)
|
removed unused import for seededitorqt
|
mjirik_imcut
|
train
|
8c0ecf4982c5aa08da31bd6583087951140b4627
|
diff --git a/.rubocop.yml b/.rubocop.yml
index <HASH>..<HASH> 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -21,49 +21,10 @@ Layout/FirstArgumentIndentation:
EnforcedStyle: consistent
Layout/MultilineArrayBraceLayout:
EnforcedStyle: new_line
-Layout/FirstArrayElementLineBreak:
- Enabled: true
-Layout/FirstHashElementLineBreak:
- Enabled: true
-Layout/FirstMethodArgumentLineBreak:
- Enabled: true
-Layout/FirstMethodParameterLineBreak:
- Enabled: true
-Layout/SpaceAroundMethodCallOperator:
- Enabled: true
-Layout/EmptyLinesAroundAttributeAccessor:
- Enabled: true
-
-Lint/RaiseException:
- Enabled: true
-Lint/StructNewOverride:
- Enabled: true
-Lint/DeprecatedOpenSSLConstant:
- Enabled: true
-Lint/MixedRegexpCaptureTypes:
- Enabled: true
-
-Style/ParenthesesAroundCondition:
- AllowInMultilineConditions: true
-Style/HashEachMethods:
- Enabled: true
-Style/HashTransformKeys:
- Enabled: true
-Style/HashTransformValues:
- Enabled: true
-Style/ExponentialNotation:
- Enabled: true
-Style/SlicingWithRange:
- Enabled: true
-Style/RedundantRegexpCharacterClass:
- Enabled: true
-Style/RedundantRegexpEscape:
- Enabled: true
-Style/RedundantFetchBlock:
- Enabled: true
AllCops:
TargetRubyVersion: 2.5
+ NewCops: enable
Metrics/BlockLength:
Exclude:
diff --git a/lib/flame/router/routes_refine/mounting.rb b/lib/flame/router/routes_refine/mounting.rb
index <HASH>..<HASH> 100644
--- a/lib/flame/router/routes_refine/mounting.rb
+++ b/lib/flame/router/routes_refine/mounting.rb
@@ -47,10 +47,7 @@ module Flame
def should_be_mounted?(controller)
if controller.instance_of?(Module)
controller.const_defined?(:IndexController, false)
- elsif (
- controller.actions.empty? ||
- @reverse_routes.key?(controller.to_s)
- )
+ elsif controller.actions.empty? || @reverse_routes.key?(controller.to_s)
false
else
true
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -17,7 +17,7 @@ require 'pry-byebug'
require_relative '../lib/flame'
Dir["#{__dir__}/**/spec_helper.rb"].sort.each do |spec_helper|
- next if spec_helper.match?(/require_dirs/)
+ next if spec_helper.include?('require_dirs')
require spec_helper
end
|
Enable all new RuboCop cops
Fix some offenses.
|
AlexWayfer_flame
|
train
|
18b41711bf4e464e0c8697b9cb0d6e742896da81
|
diff --git a/system/modules/generalDriver/DcGeneral/Contao/View/Contao2BackendView/BaseView.php b/system/modules/generalDriver/DcGeneral/Contao/View/Contao2BackendView/BaseView.php
index <HASH>..<HASH> 100644
--- a/system/modules/generalDriver/DcGeneral/Contao/View/Contao2BackendView/BaseView.php
+++ b/system/modules/generalDriver/DcGeneral/Contao/View/Contao2BackendView/BaseView.php
@@ -86,13 +86,6 @@ class BaseView implements BackendViewInterface
protected $environment;
/**
- * The widget manager.
- *
- * @var WidgetManagerInterface
- */
- protected $widgetManager;
-
- /**
* @var PanelContainerInterface
*/
protected $panel;
@@ -143,8 +136,6 @@ class BaseView implements BackendViewInterface
public function setEnvironment(EnvironmentInterface $environment)
{
$this->environment = $environment;
- // TODO is this a good place to create the WidgetManager?
- $this->widgetManager = new ContaoWidgetManager($environment);
}
/**
@@ -1876,16 +1867,20 @@ class BaseView implements BackendViewInterface
}
/**
- * {@inheritdoc}
+ * Process input and return all modified properties or null if there is no input.
+ *
+ * @param ContaoWidgetManager $widgetManager
+ *
+ * @return null|PropertyValueBag
*/
- public function processInput()
+ public function processInput($widgetManager)
{
$input = $this->getEnvironment()->getInputProvider();
if ($_POST && $input->getValue('FORM_SUBMIT') == $this->getEnvironment()->getDataDefinition()->getName())
{
$propertyValues = new PropertyValueBag();
- $propertyNames = $this->getEnvironment()->getDataDefinition()->getPropertyNames();
+ $propertyNames = $this->getEnvironment()->getDataDefinition()->getPropertiesDefinition()->getPropertyNames();
// process input and update changed properties.
foreach ($propertyNames as $propertyName)
@@ -1897,7 +1892,7 @@ class BaseView implements BackendViewInterface
}
}
- $this->widgetManager->processInput($propertyValues);
+ $widgetManager->processInput($propertyValues);
return $propertyValues;
}
diff --git a/system/modules/generalDriver/DcGeneral/View/Widget/ContaoWidgetManager.php b/system/modules/generalDriver/DcGeneral/View/Widget/ContaoWidgetManager.php
index <HASH>..<HASH> 100644
--- a/system/modules/generalDriver/DcGeneral/View/Widget/ContaoWidgetManager.php
+++ b/system/modules/generalDriver/DcGeneral/View/Widget/ContaoWidgetManager.php
@@ -28,14 +28,25 @@ class ContaoWidgetManager implements WidgetManagerInterface
protected $environment;
/**
+ * @var \DcGeneral\Data\ModelInterface
+ */
+ protected $model;
+
+ /**
* A list with all widgets
* @var array
*/
protected $arrWidgets = array();
- function __construct(EnvironmentInterface $environment)
+ /**
+ * @param EnvironmentInterface $environment
+ *
+ * @param \DcGeneral\Data\ModelInterface $model
+ */
+ function __construct(EnvironmentInterface $environment, $model)
{
$this->environment = $environment;
+ $this->model = $model;
}
/**
|
Made BaseView widgetManager dynamically instantiated (as it is only needed for editing).
|
contao-community-alliance_dc-general
|
train
|
9b24fc392a62cc939c928146d77a2ccfb4269803
|
diff --git a/tests/spec/RulerZ/Executor/DoctrineQueryBuilder/AutoJoinSpec.php b/tests/spec/RulerZ/Executor/DoctrineQueryBuilder/AutoJoinSpec.php
index <HASH>..<HASH> 100644
--- a/tests/spec/RulerZ/Executor/DoctrineQueryBuilder/AutoJoinSpec.php
+++ b/tests/spec/RulerZ/Executor/DoctrineQueryBuilder/AutoJoinSpec.php
@@ -97,7 +97,7 @@ class AutoJoinSpec extends ObjectBehavior
$this->getJoinAlias('association.embeddable')->shouldReturn('association.embeddable');
}
- function it_uses_root_embeddable(QueryBuilder $target)
+ function it_joins_needed_tables(QueryBuilder $target)
{
$this->beConstructedWith($target, [
['group']
|
Rolled back auto join specification rename.
|
K-Phoen_rulerz
|
train
|
19ad35c6443be7d666f394bb6d0b0d51edf1a878
|
diff --git a/src/Common/Collection.php b/src/Common/Collection.php
index <HASH>..<HASH> 100644
--- a/src/Common/Collection.php
+++ b/src/Common/Collection.php
@@ -49,12 +49,15 @@ final class Collection extends AbstractAggregate implements CollectionInterface
/**
* {@inheritDoc}
*/
- public function sortBy(string $fieldName): CollectionInterface
+ public function sortBy(string $fieldName, bool $reverse = false): CollectionInterface
{
$results = $this->items;
usort($results, function ($item1, $item2) use ($fieldName) {
return $item1[$fieldName] <=> $item2[$fieldName];
});
+ if ($reverse) {
+ krsort($results);
+ }
return new static($results);
}
diff --git a/src/Common/CollectionInterface.php b/src/Common/CollectionInterface.php
index <HASH>..<HASH> 100644
--- a/src/Common/CollectionInterface.php
+++ b/src/Common/CollectionInterface.php
@@ -72,12 +72,14 @@ interface CollectionInterface extends AggregateInterface, ArrayableInterface, Js
public function limit(int $offset = 0, int $limit = null): CollectionInterface;
/**
- * Returns a collection sorted by the provided field name.
+ * Returns a collection sorted by the provided field name. Optionally returns the
+ * sorted collection in reverse order.
*
* @param string $fieldName The field name to use when grouping results.
+ * @param boolean $reverse OPTIONAL Whether or not to return the sort in reverse order.
* @return CollectionInterface
*/
- public function sortBy(string $fieldName): CollectionInterface;
+ public function sortBy(string $fieldName, bool $reverse = false): CollectionInterface;
/**
* Returns a collection filtered by the provided callback.
diff --git a/tests/Common/CollectionTest.php b/tests/Common/CollectionTest.php
index <HASH>..<HASH> 100644
--- a/tests/Common/CollectionTest.php
+++ b/tests/Common/CollectionTest.php
@@ -2,9 +2,9 @@
namespace Test\Common;
+use PHPUnit\Framework\TestCase;
use Guillermoandrae\Common\Collection;
use Guillermoandrae\Common\CollectionInterface;
-use PHPUnit\Framework\TestCase;
class CollectionTest extends TestCase
{
@@ -108,6 +108,12 @@ class CollectionTest extends TestCase
$this->assertTrue($sorted->first()['age'] < $sorted->last()['age']);
}
+ public function testSortByReverse()
+ {
+ $sorted = $this->collection->sortBy('age', true);
+ $this->assertTrue($sorted->first()['age'] > $sorted->last()['age']);
+ }
+
public function testFilter()
{
$collection = new Collection([1, 2, 3]);
|
Added the reverse param to the sortBy method
|
guillermoandrae_php-collection
|
train
|
3a15efdac2024a1893a4a0ef542140df052dca8b
|
diff --git a/html/pfappserver/root/static/admin/common.js b/html/pfappserver/root/static/admin/common.js
index <HASH>..<HASH> 100644
--- a/html/pfappserver/root/static/admin/common.js
+++ b/html/pfappserver/root/static/admin/common.js
@@ -801,9 +801,14 @@ function FingerbankSearch() {
}
+FingerbankSearch.prototype.model_stripped = function() {
+ var that = this;
+ return this.model.split('::Model::')[1].toLowerCase();
+}
+
FingerbankSearch.prototype.search = function(query, process) {
var that = this;
- var path = this.model.split('::Model::')[1].toLowerCase();
+ var path = this.model_stripped();
console.log(path);
$.ajax({
type: 'POST',
@@ -832,9 +837,23 @@ FingerbankSearch.prototype.search = function(query, process) {
});
}
+var alreadySetup = {};
+
FingerbankSearch.setup = function() {
$('.fingerbank-type-ahead').each(function(){
var o = this;
+
+ // Ensure we don't bind the search twice by recording which IDs we've already set it up on
+ // The ID is generated and assigned to a data tag to make sure duplicate HTML ids don't break this flow even though they aren't valid
+ if(!$(o).attr('data-fingerbank-search-id')) {
+ var gen_id = $("<a></a>").uniqueId().attr('id');
+ $(o).attr('data-fingerbank-search-id', gen_id);
+ }
+ if(alreadySetup[$(o).attr('data-fingerbank-search-id')]) return;
+ console.log(o);
+
+ alreadySetup[$(o).attr('data-fingerbank-search-id')] = true;
+
// Creating a new scope since we are in a loop
(function() {
var search = new FingerbankSearch();
@@ -844,6 +863,7 @@ FingerbankSearch.setup = function() {
search.typeahead_btn = $($(o).attr('data-btn'));
search.model = $(o).attr('data-type-ahead-for');
search.add_to = $('#'+$(o).attr('data-add-to'));
+ search.add_action = $(o).attr('data-add-action');
$(o).typeahead({
source: $.proxy(search.search, search),
minLength: 2,
@@ -851,13 +871,23 @@ FingerbankSearch.setup = function() {
matcher: function(item) { return true; }
});
search.typeahead_btn.click(function(e) {
- e.preventDefault()
+ e.preventDefault();
+ var id;
+ var display;
+ console.log(search);
$.each(search.results, function(){
if(this.display == search.typeahead_field.val()){
- search.add_to.append('<option selected="selected" value="'+this.id+'">'+this.display+'</option>');
- search.add_to.trigger("liszt:updated");
+ id = this.id;
+ display = this.display;
}
});
+ if(search.add_action) {
+ eval(search.add_action + "(search,id,display)");
+ }
+ else {
+ search.add_to.append('<option selected="selected" value="'+id+'">'+display+'</option>');
+ search.add_to.trigger("liszt:updated");
+ }
search.typeahead_field.val('');
return false;
});
|
prevent double binding of fingerbank type ahead + custom actions
|
inverse-inc_packetfence
|
train
|
7c7407fa99a40fafa1575affc16f346c904fff45
|
diff --git a/lib/sql_query_executor/query/normalizers/base_normalizer.rb b/lib/sql_query_executor/query/normalizers/base_normalizer.rb
index <HASH>..<HASH> 100644
--- a/lib/sql_query_executor/query/normalizers/base_normalizer.rb
+++ b/lib/sql_query_executor/query/normalizers/base_normalizer.rb
@@ -22,6 +22,8 @@ module SqlQueryExecutor
end
def self.attributes_from_query(selector)
+ return {} if selector.empty?
+
attributes = {}
selector.each do |key, value|
diff --git a/lib/sql_query_executor/query/normalizers/query_normalizer.rb b/lib/sql_query_executor/query/normalizers/query_normalizer.rb
index <HASH>..<HASH> 100644
--- a/lib/sql_query_executor/query/normalizers/query_normalizer.rb
+++ b/lib/sql_query_executor/query/normalizers/query_normalizer.rb
@@ -20,6 +20,7 @@ module SqlQueryExecutor
end
def self.attributes_from_query(query)
+ return {} if query.empty?
selector = query.class == Hash ? query : Base.new([], query).selector
super(selector)
end
diff --git a/spec/sql_query_executor/query/normalizers/query_normalizer_spec.rb b/spec/sql_query_executor/query/normalizers/query_normalizer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/sql_query_executor/query/normalizers/query_normalizer_spec.rb
+++ b/spec/sql_query_executor/query/normalizers/query_normalizer_spec.rb
@@ -142,7 +142,18 @@ describe SqlQueryExecutor::Query::Normalizers::QueryNormalizer do
end
describe '.attributes_from_query' do
- context 'single selector' do
+ context 'empty query' do
+ let(:query) { '' }
+ let(:selector) { {} }
+
+ subject { described_class.attributes_from_query(query) }
+
+ it 'converts correctly' do
+ expect(subject).to eq(selector)
+ end
+ end
+
+ context 'single query' do
context 'when value is a string' do
let(:query) { 'monarch = "Crown of england"' }
let(:selector) { {monarch: "Crown of england"} }
@@ -210,7 +221,7 @@ describe SqlQueryExecutor::Query::Normalizers::QueryNormalizer do
end
end
- context 'multiple selectors' do
+ context 'multiple queries' do
let(:query) { 'name = "US" and monarch = "Crown of england"' }
let(:selector) { {name: 'US', monarch: "Crown of england"} }
@@ -221,7 +232,7 @@ describe SqlQueryExecutor::Query::Normalizers::QueryNormalizer do
end
end
- context 'nested selectors' do
+ context 'nested queries' do
let(:selector) { {name: 'US', '$and' => [{id: 1}, {monarch: "Crown of england"}] } }
let(:attributes) { {name: 'US', id: 1, monarch: "Crown of england"} }
@@ -232,7 +243,7 @@ describe SqlQueryExecutor::Query::Normalizers::QueryNormalizer do
end
end
- context 'complex selectors' do
+ context 'complex queries' do
let(:selector) { {name: 'US', '$and' => [{id: 1}, '$or' => [{name: 'Brazil'}, {monarch: "Crown of england"}]] } }
let(:attributes) { {name: 'US', id: 1} }
|
When passing empty queries to attributes_from_query it returns an empty Hash
|
efreesen_sql_query_executor
|
train
|
26776196e8a9bb607f83933d1ce2decea9aeba53
|
diff --git a/lib/chef/providers/right_script_provider.rb b/lib/chef/providers/right_script_provider.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/providers/right_script_provider.rb
+++ b/lib/chef/providers/right_script_provider.rb
@@ -100,17 +100,24 @@ class Chef
ENV['RS_CLOUD_PROVIDER'] = node[:cloud][:provider]
# On some clouds (gce) node[:cloud][:public_ipv4] could be array.
- instance_public_ip = Array(node[:cloud][:public_ipv4]).first
- instance_public_ip = (node[:cloud][:public_ips].is_a?(Array) && node[:cloud][:public_ips].first) unless instance_public_ip
+ if node[:cloud][:public_ips].is_a?(Array)
+ instance_public_ip = node[:cloud][:public_ips].first
+ else
+ # On some clouds (gce) node[:cloud][:public_ipv4] could be array.
+ instance_public_ip = Array(node[:cloud][:public_ipv4]).first
+ end
if instance_public_ip
ENV['RS_PUBLIC_IP'] = instance_public_ip
else
::Chef::Log.info("Could not retrieve instance public IP")
end
- # On some clouds (gce) node[:cloud][:public_ipv4] could be array.
- instance_private_ip = Array(node[:cloud][:local_ipv4]).first
- instance_private_ip = (node[:cloud][:private_ips].is_a?(Array) && node[:cloud][:private_ips].first) unless instance_private_ip
+ if node[:cloud][:private_ips].is_a?(Array)
+ instance_private_ip = node[:cloud][:private_ips].first
+ else
+ # On some clouds (gce) node[:cloud][:private_ipv4] could be array.
+ instance_private_ip = Array(node[:cloud][:local_ipv4]).first
+ end
if instance_private_ip
ENV['RS_PRIVATE_IP'] = instance_private_ip
else
|
acu<I> Prefer private_ips and public_ips array, as these are filtered
Should only make a difference on cloudstack, where the local_ipv4
address can either be a public or private ip
|
rightscale_right_link
|
train
|
a7f0dd42d419cc84e083adcd06e7ba69577fb139
|
diff --git a/lib/syllable_rules.rb b/lib/syllable_rules.rb
index <HASH>..<HASH> 100644
--- a/lib/syllable_rules.rb
+++ b/lib/syllable_rules.rb
@@ -1,9 +1,9 @@
module Pronounce::SyllableRules
def self.evaluate(context)
- is_new_syllable = English.stressed_syllables_heavy context
- return is_new_syllable unless is_new_syllable.nil?
- is_new_syllable = English.disallow_ng_onset context
- return is_new_syllable unless is_new_syllable.nil?
+ [:stressed_syllables_heavy, :disallow_ng_onset].each do |rule|
+ is_new_syllable = English.send rule, context
+ return is_new_syllable unless is_new_syllable.nil?
+ end
sonority_sequencing_principle context
end
diff --git a/spec/syllable_rules_spec.rb b/spec/syllable_rules_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/syllable_rules_spec.rb
+++ b/spec/syllable_rules_spec.rb
@@ -29,6 +29,13 @@ module Pronounce
end
SyllableRules.evaluate context
end
+
+ it 'returns the first boolean value returned by a rule' do
+ SyllableRules::English.should_receive(:stressed_syllables_heavy)
+ SyllableRules::English.should_receive(:disallow_ng_onset).and_return(true)
+ SyllableRules.should_not_receive(:sonority_sequencing_principle)
+ expect(SyllableRules.evaluate context).to eq true
+ end
end
end
end
|
Reduced duplication of running rules.
|
josephwilk_pronounce
|
train
|
98307c8faefca5c4347288af18aee4dacbf8802c
|
diff --git a/integration-cli/docker_cli_run_test.go b/integration-cli/docker_cli_run_test.go
index <HASH>..<HASH> 100644
--- a/integration-cli/docker_cli_run_test.go
+++ b/integration-cli/docker_cli_run_test.go
@@ -1257,6 +1257,7 @@ func TestRunWithVolumesIsRecursive(t *testing.T) {
if err := mount.Mount("tmpfs", tmpfsDir, "tmpfs", ""); err != nil {
t.Fatalf("failed to create a tmpfs mount at %s - %s", tmpfsDir, err)
}
+ defer mount.Unmount(tmpfsDir)
f, err := ioutil.TempFile(tmpfsDir, "touch-me")
if err != nil {
|
integ-cli: fix cleanup in test which mounts tmpfs
Docker-DCO-<I>-
|
containers_storage
|
train
|
63dbd0f6b18a0175d2d8169648e63fcb30205957
|
diff --git a/docs/users_guide.md b/docs/users_guide.md
index <HASH>..<HASH> 100644
--- a/docs/users_guide.md
+++ b/docs/users_guide.md
@@ -16,7 +16,7 @@ $ [VAR=VALUE] /path/to/python -c "$(curl -fsSL https://raw.githubusercontent.com
| NAME | Description | Default |
| ---- | ----------- | ------- |
| RPM | Path to rpm | rpm |
-| RPM_VERSION | Installed python module's version | Same version with rpm |
+| RPM_PY_VERSION | Installed python module's version | Same version with rpm |
| VERBOSE | Verbose mode. true/false | false |
@@ -67,7 +67,7 @@ $ [VAR=VALUE] /path/to/python -c "$(curl -fsSL https://raw.githubusercontent.com
- A. Yes. Possible. But it may be failed to install. Set version number seeing [RPM release page](https://github.com/rpm-software-management/rpm/releases).
```
- $ RPM_VERSION=4.13.0 python -c "$(curl -fsSL https://raw.githubusercontent.com/junaruga/rpm-py-installer/master/install.py)"
+ $ RPM_PY_VERSION=4.13.0 python -c "$(curl -fsSL https://raw.githubusercontent.com/junaruga/rpm-py-installer/master/install.py)"
```
## Tutorial
diff --git a/install.py b/install.py
index <HASH>..<HASH> 100644
--- a/install.py
+++ b/install.py
@@ -65,12 +65,12 @@ class Application(object):
# Installed RPM Python module's version.
# Default: Same version with rpm.
- rpm_version = None
- if 'RPM_VERSION' in os.environ:
- rpm_version = os.environ.get('RPM_VERSION')
+ rpm_py_version = None
+ if 'RPM_PY_VERSION' in os.environ:
+ rpm_py_version = os.environ.get('RPM_PY_VERSION')
else:
stdout = Cmd.sh_e_out('{0} --version'.format(rpm_path))
- rpm_version = stdout.split()[2]
+ rpm_py_version = stdout.split()[2]
# Command options
setup_py_opts = '-q'
@@ -81,7 +81,7 @@ class Application(object):
self.python_path = python_path
self.rpm_path = rpm_path
- self.rpm_version = rpm_version
+ self.rpm_py_version = rpm_py_version
self.setup_py_opts = setup_py_opts
self.curl_opts = curl_opts
self.is_work_dir_removed = False
@@ -126,13 +126,13 @@ class Application(object):
@property
def rpm_archive_top_dir(self):
top_dir = self.RPM_ARCHIVE_TOP_DIR_FORMAT.format(
- version=self.rpm_version
+ version=self.rpm_py_version
)
return top_dir
def download_and_expand_rpm_py(self):
archive_url = self.RPM_ARCHIVE_URL_FORMAT.format(
- version=self.rpm_version
+ version=self.rpm_py_version
)
Log.info("Downloading archive '{0}' in the working directory.".format(
archive_url))
@@ -144,7 +144,7 @@ class Application(object):
def make_setup_py(self):
replaced_word_dict = {
'@PACKAGE_NAME@': 'rpm',
- '@VERSION@': self.rpm_version,
+ '@VERSION@': self.rpm_py_version,
'@PACKAGE_BUGREPORT@': 'rpm-maint@lists.rpm.org',
}
diff --git a/tests/test_install.py b/tests/test_install.py
index <HASH>..<HASH> 100644
--- a/tests/test_install.py
+++ b/tests/test_install.py
@@ -17,6 +17,18 @@ def app():
return Application()
+@pytest.fixture
+def app_with_env(env):
+ if not isinstance(env, dict):
+ raise ValueError('env: Invalid type: {0}'.format(type(env)))
+ prev_env = os.environ.copy()
+ try:
+ os.environ.update(env)
+ yield Application()
+ finally:
+ os.environ = prev_env
+
+
def test_cmd_sh_e_is_ok():
stdout = Cmd.sh_e('pwd')
assert not stdout
@@ -43,13 +55,31 @@ def test_app_init(app):
assert 'bin/python' in app.python_path
assert app.rpm_path
assert 'rpm' in app.rpm_path
- assert app.rpm_version
- assert re.match('^[\d.]+$', app.rpm_version)
+ assert app.rpm_py_version
+ assert re.match('^[\d.]+$', app.rpm_py_version)
assert app.setup_py_opts == '-q'
assert app.curl_opts == '--silent'
assert app.is_work_dir_removed is False
+@pytest.mark.parametrize('env', [{'RPM': 'pwd'}])
+def test_app_init_env_rpm(app_with_env):
+ assert app_with_env
+ assert re.match('^/.+/pwd$', app_with_env.rpm_path)
+
+
+@pytest.mark.parametrize('env', [{'RPM_PY_VERSION': '1.2.3'}])
+def test_app_init_env_rpm_py_version(app_with_env):
+ assert app_with_env
+ assert app_with_env.rpm_py_version == '1.2.3'
+
+
+@pytest.mark.parametrize('env', [{'VERBOSE': 'true'}])
+def test_app_init_env_verbose(app_with_env):
+ assert app_with_env
+ assert app_with_env.verbose is True
+
+
def test_verify_system_status_is_ok(app):
app.verify_system_status()
assert True
|
Change a environment variable RPM_VERSION to RPM_PY_VERSION. (#<I>)
|
junaruga_rpm-py-installer
|
train
|
7316f86034261c9663f001a5464da4786df7fece
|
diff --git a/ryu/ofproto/ofproto_v1_5_parser.py b/ryu/ofproto/ofproto_v1_5_parser.py
index <HASH>..<HASH> 100644
--- a/ryu/ofproto/ofproto_v1_5_parser.py
+++ b/ryu/ofproto/ofproto_v1_5_parser.py
@@ -2814,7 +2814,6 @@ class OFPGroupDescStats(StringifyMixin):
self.length = length
self.type = type_
self.group_id = group_id
- self.bucket_array_len = bucket_array_len
self.buckets = buckets
self.properties = properties
|
ofproto_v1_5: Reduce an unnecessary code
|
osrg_ryu
|
train
|
bfb9ca115e819243e41ce56d4acea36f028079a3
|
diff --git a/pygbif/__init__.py b/pygbif/__init__.py
index <HASH>..<HASH> 100644
--- a/pygbif/__init__.py
+++ b/pygbif/__init__.py
@@ -6,7 +6,7 @@
pygbif library
~~~~~~~~~~~~~~~~~~~~~
-pygbif is a Python client for GBIF.
+pygbif is a Python client for the Global Biodiversity Information Facility (GBIF) API.
Usage::
|
better explanation of gbif in top level docstring
|
sckott_pygbif
|
train
|
3b729d95b7a2ed2fc08d55159849f8552ddc170d
|
diff --git a/tests/transformation_test.py b/tests/transformation_test.py
index <HASH>..<HASH> 100644
--- a/tests/transformation_test.py
+++ b/tests/transformation_test.py
@@ -252,6 +252,7 @@ def test_pdf_info(transform):
result = transform.pdf_info(colorinfo=True)
assert result.url == target_url
+
def test_pdf_convert(transform):
target_url = '{}/{}/pdfconvert=pageorientation:landscape/{}'.format(config.CDN_URL, APIKEY, EXTERNAL_URL)
result = transform.pdf_convert(pageorientation='landscape')
|
addressed feedback : reverted the removed space between tests
|
filestack_filestack-python
|
train
|
d0bb2a72ae9727c49c10cdb89fccc264d5ae6560
|
diff --git a/lib/vines/stream/http/ready.rb b/lib/vines/stream/http/ready.rb
index <HASH>..<HASH> 100644
--- a/lib/vines/stream/http/ready.rb
+++ b/lib/vines/stream/http/ready.rb
@@ -11,7 +11,11 @@ module Vines
raise StreamErrors::NotAuthorized
end
stream.parse_body(node).each do |child|
- super(child)
+ begin
+ super(child)
+ rescue StanzaError => e
+ stream.error(e)
+ end
end
stream.terminate if terminate?(node)
end
diff --git a/test/stream/http/ready_test.rb b/test/stream/http/ready_test.rb
index <HASH>..<HASH> 100644
--- a/test/stream/http/ready_test.rb
+++ b/test/stream/http/ready_test.rb
@@ -1,40 +1,42 @@
# encoding: UTF-8
+require 'tmpdir'
require 'vines'
require 'minitest/autorun'
-class HttpReadyTest < MiniTest::Unit::TestCase
- def setup
+describe Vines::Stream::Http::Ready do
+ before do
@stream = MiniTest::Mock.new
@state = Vines::Stream::Http::Ready.new(@stream, nil)
end
- def test_missing_body_raises_error
+ it "raises when body element is missing" do
node = node('<presence type="unavailable"/>')
@stream.expect(:valid_session?, true, [nil])
- assert_raises(Vines::StreamErrors::NotAuthorized) { @state.node(node) }
+ -> { @state.node(node) }.must_raise Vines::StreamErrors::NotAuthorized
end
- def test_body_with_missing_namespace_raises_error
+ it "raises when namespace is missing" do
node = node('<body rid="42" sid="12"/>')
@stream.expect(:valid_session?, true, ['12'])
assert_raises(Vines::StreamErrors::NotAuthorized) { @state.node(node) }
+ -> { @state.node(node) }.must_raise Vines::StreamErrors::NotAuthorized
end
- def test_missing_rid_raises_error
+ it "raises when rid attribute is missing" do
node = node('<body xmlns="http://jabber.org/protocol/httpbind" sid="12"/>')
@stream.expect(:valid_session?, true, ['12'])
- assert_raises(Vines::StreamErrors::NotAuthorized) { @state.node(node) }
+ -> { @state.node(node) }.must_raise Vines::StreamErrors::NotAuthorized
end
- def test_invalid_session_raises_error
+ it "raises when session id is invalid" do
@stream.expect(:valid_session?, false, ['12'])
node = node('<body xmlns="http://jabber.org/protocol/httpbind" rid="42" sid="12"/>')
- assert_raises(Vines::StreamErrors::NotAuthorized) { @state.node(node) }
+ -> { @state.node(node) }.must_raise Vines::StreamErrors::NotAuthorized
assert @stream.verify
end
- def test_valid_body_processes
+ it "processes when body element is empty" do
node = node('<body xmlns="http://jabber.org/protocol/httpbind" rid="42" sid="12"/>')
@stream.expect(:valid_session?, true, ['12'])
@stream.expect(:parse_body, [], [node])
@@ -42,7 +44,40 @@ class HttpReadyTest < MiniTest::Unit::TestCase
assert @stream.verify
end
- def test_terminate
+ it "processes all stanzas in one body element" do
+ alice = Vines::User.new(jid: 'alice@wonderland.lit')
+ hatter = Vines::User.new(jid: 'hatter@wonderland.lit')
+
+ config = Vines::Config.new do
+ host 'wonderland.lit' do
+ storage(:fs) { dir Dir.tmpdir }
+ end
+ end
+
+ bogus = node('<message type="bogus">raises stanza error</message>')
+ ok = node('<message to="hatter@wonderland.lit">but processes this message</message>')
+ node = node(%Q{<body xmlns="http://jabber.org/protocol/httpbind" rid="42" sid="12">#{bogus}#{ok}</body>})
+
+ raises = Vines::Stanza.from_node(bogus, @stream)
+ processes = Vines::Stanza.from_node(ok, @stream)
+
+ recipient = MiniTest::Mock.new
+ recipient.expect(:user, hatter)
+ recipient.expect(:write, nil, [Vines::Stanza::Message])
+
+ @stream.expect(:valid_session?, true, ['12'])
+ @stream.expect(:parse_body, [raises, processes], [node])
+ @stream.expect(:error, nil, [Vines::StanzaErrors::BadRequest])
+ @stream.expect(:config, config)
+ @stream.expect(:user, alice)
+ @stream.expect(:connected_resources, [recipient], [hatter.jid])
+
+ @state.node(node)
+ assert @stream.verify
+ assert recipient.verify
+ end
+
+ it "terminates the session" do
node = node('<body xmlns="http://jabber.org/protocol/httpbind" rid="42" sid="12" type="terminate"/>')
@stream.expect(:valid_session?, true, ['12'])
@stream.expect(:parse_body, [], [node])
|
Don't allow StanzaErrors to stop the BOSH stanza processing loop.
|
negativecode_vines
|
train
|
c07207f219268010ace0dc6c35b518e990b2865b
|
diff --git a/src/util/Util.js b/src/util/Util.js
index <HASH>..<HASH> 100644
--- a/src/util/Util.js
+++ b/src/util/Util.js
@@ -483,11 +483,11 @@ class Util extends null {
* @returns {Collection}
*/
static discordSort(collection) {
+ const isGuildChannel = collection.first() instanceof GuildChannel;
return collection.sorted(
- (a, b) =>
- a.rawPosition - b.rawPosition ||
- parseInt(b.id.slice(0, -10)) - parseInt(a.id.slice(0, -10)) ||
- parseInt(b.id.slice(10)) - parseInt(a.id.slice(10)),
+ isGuildChannel
+ ? (a, b) => a.rawPosition - b.rawPosition || Number(BigInt(a.id) - BigInt(b.id))
+ : (a, b) => a.rawPosition - b.rawPosition || Number(BigInt(b.id) - BigInt(a.id)),
);
}
@@ -616,3 +616,6 @@ class Util extends null {
}
module.exports = Util;
+
+// Fixes Circular
+const GuildChannel = require('../structures/GuildChannel');
|
fix(Util): fix sorting for GuildChannels (#<I>)
|
discordjs_discord.js
|
train
|
3a258a592fd384d740e86d08e2c9e7540b093267
|
diff --git a/demos/html.html b/demos/html.html
index <HASH>..<HASH> 100644
--- a/demos/html.html
+++ b/demos/html.html
@@ -4,13 +4,23 @@
<title>Syntax Highlighting</title>
<link href="../themes/blackboard.css" rel="stylesheet" type="text/css" media="screen">
<body>
-
+<style type="text/css">
+</style>
<pre>
-<code data-language="html"><body>
+<code data-language="html"><!-- inline styles! -->
+<style type="text/css">
+ body span.blah {
+ background: #000;
+ color: #fff;
+ }
+</style>
+
+<body>
<span class="blah" width="200" height="200">test code goes here</span>
</body>
-<?
+<!-- php code in html! -->
+<?php
$test = true;
/**
@@ -21,12 +31,11 @@
}
?>
-<!-- this is an html comment -->
-<article title="<?= $user->name ?>">test</article>
-</code>
+<article title="<?= $user->name ?>">test</article></code>
</pre>
<script src="../js/rainbow.js"></script>
- <script src="../js/language/php.js"></script>
<script src="../js/language/html.js"></script>
+ <script src="../js/language/php.js"></script>
+ <script src="../js/language/css.js"></script>
</body>
diff --git a/js/language/css.js b/js/language/css.js
index <HASH>..<HASH> 100644
--- a/js/language/css.js
+++ b/js/language/css.js
@@ -42,6 +42,15 @@ Rainbow.extend('css', [
3: 'support.value'
},
'pattern': /(:|,)\s?(-o-|-moz-|-webkit-|-ms-)?([a-zA-Z-]*)(?=\b)(?!.*\{)/g
+ },
+ {
+ 'matches': {
+ 1: {
+ 'name': 'meta.style-tag',
+ 'pattern': /\w+/g
+ }
+ },
+ 'pattern': /<\/?(.*?)(?=\=|>)/g
}
], true);
diff --git a/js/language/html.js b/js/language/html.js
index <HASH>..<HASH> 100644
--- a/js/language/html.js
+++ b/js/language/html.js
@@ -11,6 +11,15 @@ Rainbow.extend('html', [
'pattern': /<\?(php)?([\s\S]*?)(\?>)/gm
},
{
+ 'name': 'css',
+ 'matches': {
+ 0: {
+ 'language': 'css'
+ }
+ },
+ 'pattern': /<style(.*?)>([\s\S]*?)<\/style>/gm
+ },
+ {
'name': 'support.tag',
'pattern': /<|>/g
},
|
Add support for inline css in html
|
ccampbell_rainbow
|
train
|
4baf80726e011934e4f9e2b42e8e9b431f6b4891
|
diff --git a/src/Collection.js b/src/Collection.js
index <HASH>..<HASH> 100644
--- a/src/Collection.js
+++ b/src/Collection.js
@@ -9,8 +9,6 @@ var Collection = new Class({
_models: [],
- _bound: {},
-
_Model: Model,
length: 0,
@@ -21,6 +19,7 @@ var Collection = new Class({
// onAdd: function(){},
// onRemove: function(){},
// onEmpty: function(){},
+ // onSort: function(){},
primaryKey: undefined,
Model: undefined,
// Model Options
@@ -34,10 +33,6 @@ var Collection = new Class({
setup: function(models, options){
this.setOptions(options);
- this._bound = {
- remove: this.remove.bind(this)
- };
-
this.primaryKey = this.options.primaryKey;
if (this.options.Model) {
@@ -76,15 +71,18 @@ var Collection = new Class({
* @param {Class} model A Model instance
* @return {Class} Collection Instance
*/
- _add: function(model){
+ _add: function(model, at){
model = new this._Model(model, this.options.modelOptions);
if (!this.hasModel(model)) {
-
// Remove the model if it destroys itself.
- model.addEvent('destroy', this._bound.remove);
+ model.addEvent('destroy', this.bound('remove'));
- this._models.push(model);
+ if (at != undefined) {
+ this._models.splice(at, 0, model);
+ } else {
+ this._models.push(model);
+ }
this.length = this._models.length;
@@ -103,7 +101,7 @@ var Collection = new Class({
* collectionInstance.add(model);
* collectionInstance.add([model, model]);
*/
- add: function(models){
+ add: function(models, at){
models = Array.from(models);
var len = models.length,
@@ -147,7 +145,7 @@ var Collection = new Class({
*/
_remove: function(model){
// Clean up when removing so that it doesn't try removing itself from the collection
- model.removeEvent('destroy', this._bound.remove);
+ model.removeEvent('destroy', this.bound('remove'));
this._models.erase(model);
@@ -236,22 +234,22 @@ var Collection = new Class({
},
signalAdd: function(model){
- !this.isSilent() && this.fireEvent('add', model);
+ !this.isSilent() && this.fireEvent('add', [this, model]);
return this;
},
signalRemove: function(model){
- !this.isSilent() && this.fireEvent('remove', model);
+ !this.isSilent() && this.fireEvent('remove', [this, model]);
return this;
},
signalEmpty: function(){
- !this.isSilent() && this.fireEvent('empty');
+ !this.isSilent() && this.fireEvent('empty', this);
return this;
},
signalSort: function(){
- !this.isSilent() && this.fireEvent('sort');
+ !this.isSilent() && this.fireEvent('sort', this);
return this;
},
|
Removing custom bound because it comes with Connector.
Add at arg to add method so that a model could be spliced in at certain locations of the _models array.
Update to pass the collection instance to fired events.
|
GCheung55_Neuro
|
train
|
8bff00206a025e4966d66e418fdb467afcd5effe
|
diff --git a/src/pandas_profiling/config.py b/src/pandas_profiling/config.py
index <HASH>..<HASH> 100644
--- a/src/pandas_profiling/config.py
+++ b/src/pandas_profiling/config.py
@@ -17,7 +17,7 @@ class Config(object):
def __init__(self):
"""The config constructor should be called only once."""
if self.config is None:
- self.config = confuse.Configuration("PandasProfiling", __name__)
+ self.config = confuse.Configuration("PandasProfiling", __name__, read=False)
self.set_file(str(get_config_default()))
|
Workaround for issue <I>
|
pandas-profiling_pandas-profiling
|
train
|
d3260dfdcd2cb365788ea3bc41e91739a8acb81d
|
diff --git a/src/Symfony/Component/EventDispatcher/Debug/TraceableEventDispatcher.php b/src/Symfony/Component/EventDispatcher/Debug/TraceableEventDispatcher.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/EventDispatcher/Debug/TraceableEventDispatcher.php
+++ b/src/Symfony/Component/EventDispatcher/Debug/TraceableEventDispatcher.php
@@ -217,6 +217,7 @@ class TraceableEventDispatcher implements TraceableEventDispatcherInterface
public function reset()
{
$this->called = array();
+ $this->orphanedEvents = array();
}
/**
diff --git a/src/Symfony/Component/EventDispatcher/Tests/Debug/TraceableEventDispatcherTest.php b/src/Symfony/Component/EventDispatcher/Tests/Debug/TraceableEventDispatcherTest.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/EventDispatcher/Tests/Debug/TraceableEventDispatcherTest.php
+++ b/src/Symfony/Component/EventDispatcher/Tests/Debug/TraceableEventDispatcherTest.php
@@ -271,6 +271,17 @@ class TraceableEventDispatcherTest extends TestCase
$this->assertCount(1, $eventDispatcher->getListeners('foo'), 'expected listener1 to be removed');
}
+
+ public function testClearOrphanedEvents()
+ {
+ $tdispatcher = new TraceableEventDispatcher(new EventDispatcher(), new Stopwatch());
+ $tdispatcher->dispatch('foo');
+ $events = $tdispatcher->getOrphanedEvents();
+ $this->assertCount(1, $events);
+ $tdispatcher->reset();
+ $events = $tdispatcher->getOrphanedEvents();
+ $this->assertCount(0, $events);
+ }
}
class EventSubscriber implements EventSubscriberInterface
|
[EventDispatcher] Clear orphaned events on TraceableEventDispatcher::reset
|
symfony_symfony
|
train
|
88ba4219301151c35b6150c81bae53011ff2d4c4
|
diff --git a/functional/pipeline.py b/functional/pipeline.py
index <HASH>..<HASH> 100644
--- a/functional/pipeline.py
+++ b/functional/pipeline.py
@@ -1,6 +1,7 @@
# pylint: disable=too-many-lines,too-many-public-methods,protected-access,redefined-builtin,
# pylint: disable=no-member
+from __future__ import division
from operator import mul
import collections
from functools import reduce
@@ -894,6 +895,14 @@ class Sequence(object):
"""
return sum(self)
+ def average(self):
+ """
+ Takes the average of elements in the sequence
+ :return: average of elements in the sequence
+ """
+ length = self.size()
+ return sum(self) / length
+
def aggregate(self, *args):
"""
Aggregates the sequence by specified arguments. Its behavior varies depending on if one,
diff --git a/functional/test/test_functional.py b/functional/test/test_functional.py
index <HASH>..<HASH> 100644
--- a/functional/test/test_functional.py
+++ b/functional/test/test_functional.py
@@ -601,6 +601,10 @@ class TestPipeline(unittest.TestCase):
l = [1, 2, 3]
self.assertEqual(6, seq(l).sum())
+ def test_average(self):
+ l = [1, 2]
+ self.assertEqual(1.5, seq(l).average())
+
def test_set(self):
l = [1, 1, 2, 2, 3]
ls = set(l)
|
Added average function to match LINQ API
|
EntilZha_PyFunctional
|
train
|
eb6a146b6cec5aa570a46e9d1d2ab4a56478dc3e
|
diff --git a/src/Rah/Danpu/Base.php b/src/Rah/Danpu/Base.php
index <HASH>..<HASH> 100644
--- a/src/Rah/Danpu/Base.php
+++ b/src/Rah/Danpu/Base.php
@@ -41,6 +41,14 @@ abstract class Base
protected $config;
/**
+ * An instance of PDO
+ *
+ * @var \PDO
+ */
+
+ protected $pdo;
+
+ /**
* An array of tables in the database.
*
* @var array
|
Define Base::$pdo property as protected.
|
gocom_danpu
|
train
|
870e1f82f65f4917baeb9e5c3e32aadc8773c8e7
|
diff --git a/sos/plugins/kubernetes.py b/sos/plugins/kubernetes.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/kubernetes.py
+++ b/sos/plugins/kubernetes.py
@@ -97,6 +97,11 @@ class kubernetes(Plugin, RedHatPlugin):
"{} get --raw /metrics".format(kube_cmd)
])
+ # CNV is not part of the base installation, but can be added
+ if self.is_installed('kubevirt-virtctl'):
+ resources.extend(['vms', 'vmis'])
+ self.add_cmd_output('virtctl version')
+
for n in knsps:
knsp = '--namespace=%s' % n
if self.get_option('all'):
|
[kubernetes] Collect vm/vmi information if CNV is in use
If CNV is installed ontop of the base kubernetes installation, collect
output for the 'vms' and 'vmis' resources as well as virtctl.
Resolves: #<I>
|
sosreport_sos
|
train
|
b62e73a58afe41f7604260c7332e0a7ea1d3f8ec
|
diff --git a/inlineplz/interfaces/github.py b/inlineplz/interfaces/github.py
index <HASH>..<HASH> 100644
--- a/inlineplz/interfaces/github.py
+++ b/inlineplz/interfaces/github.py
@@ -304,8 +304,12 @@ class GitHubInterface(InterfaceBase):
if self.stopped_early:
return
+ comments_to_delete = []
+ in_reply_to = set()
+
for comment in self.pull_request.review_comments():
try:
+ in_reply_to.add(comment.as_dict().get("in_reply_to_id"))
should_delete = True
if not comment.body.startswith(self.prefix):
continue
@@ -319,8 +323,17 @@ class GitHubInterface(InterfaceBase):
if not should_delete:
continue
- comment.delete()
- print("Deleted comment: {}".format(comment.body))
+ except Exception:
+ traceback.print_exc()
+
+ for comment in comments_to_delete:
+ try:
+ if comment.id not in in_reply_to:
+ comment.delete()
+ print("Deleted comment: {}".format(comment.body))
+ elif "**OBSOLETE**" not in comment.body:
+ comment.edit(comment.body + "\n**OBSOLETE**")
+ print("Edited obsolete comment: {}".format(comment.body))
except Exception:
traceback.print_exc()
|
Preserve comments people have replied to (#<I>)
fixes #<I>
|
guykisel_inline-plz
|
train
|
1ba76cc91e8d0f42663394254e1ccf6d16fe89c4
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -1,6 +1,7 @@
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
-require 'coveralls'
require 'pp'
+require "codeclimate-test-reporter"
+CodeClimate::TestReporter.start
def create_stack(options = {}, &block)
cfn = options[:client] || Cfer::Cfn::Client.new(stack_name: options[:stack_name] || 'test', region: 'us-east-1')
@@ -48,7 +49,6 @@ module Cfer
DEBUG = true
end
-Coveralls.wear!
require 'cfer'
Cfer::LOGGER.level = Logger::DEBUG
|
Updated spec helper to use codeclimate
|
seanedwards_cfer
|
train
|
f07abda4e6f4ed35f326c3be48d6811bdf345b74
|
diff --git a/framework/core/js/forum/dist/app.js b/framework/core/js/forum/dist/app.js
index <HASH>..<HASH> 100644
--- a/framework/core/js/forum/dist/app.js
+++ b/framework/core/js/forum/dist/app.js
@@ -21470,6 +21470,8 @@ System.register('flarum/components/DiscussionsSearchSource', ['flarum/helpers/hi
value: function search(query) {
var _this = this;
+ query = query.toLowerCase();
+
this.results[query] = [];
var params = {
@@ -21485,6 +21487,8 @@ System.register('flarum/components/DiscussionsSearchSource', ['flarum/helpers/hi
}, {
key: 'view',
value: function view(query) {
+ query = query.toLowerCase();
+
var results = this.results[query] || [];
return [m(
@@ -28066,6 +28070,8 @@ System.register('flarum/components/UsersSearchSource', ['flarum/helpers/highligh
}, {
key: 'view',
value: function view(query) {
+ query = query.toLowerCase();
+
var results = app.store.all('users').filter(function (user) {
return user.username().toLowerCase().substr(0, query.length) === query;
});
diff --git a/framework/core/js/forum/src/components/DiscussionsSearchSource.js b/framework/core/js/forum/src/components/DiscussionsSearchSource.js
index <HASH>..<HASH> 100644
--- a/framework/core/js/forum/src/components/DiscussionsSearchSource.js
+++ b/framework/core/js/forum/src/components/DiscussionsSearchSource.js
@@ -13,6 +13,8 @@ export default class DiscussionsSearchSource {
}
search(query) {
+ query = query.toLowerCase();
+
this.results[query] = [];
const params = {
@@ -25,6 +27,8 @@ export default class DiscussionsSearchSource {
}
view(query) {
+ query = query.toLowerCase();
+
const results = this.results[query] || [];
return [
diff --git a/framework/core/js/forum/src/components/UsersSearchSource.js b/framework/core/js/forum/src/components/UsersSearchSource.js
index <HASH>..<HASH> 100644
--- a/framework/core/js/forum/src/components/UsersSearchSource.js
+++ b/framework/core/js/forum/src/components/UsersSearchSource.js
@@ -17,6 +17,8 @@ export default class UsersSearchResults {
}
view(query) {
+ query = query.toLowerCase();
+
const results = app.store.all('users')
.filter(user => user.username().toLowerCase().substr(0, query.length) === query);
|
Make search dropdown filtering case-insensitive. closes flarum/core#<I>
|
flarum_core
|
train
|
20bed07788150e99f2d49f5a3f3e9bd0c2dff931
|
diff --git a/lib/amee/profile_item.rb b/lib/amee/profile_item.rb
index <HASH>..<HASH> 100644
--- a/lib/amee/profile_item.rb
+++ b/lib/amee/profile_item.rb
@@ -99,6 +99,17 @@ module AMEE
raise AMEE::BadData.new("Couldn't create ProfileItem. Check that your information is correct.")
end
+ def update(options = {})
+ connection.put(full_path, options)
+ rescue
+ raise AMEE::BadData.new("Couldn't update ProfileItem. Check that your information is correct.")
+ end
+
+ def value(name_or_path)
+ val = values.find{ |x| x[:name] == name_or_path || x[:path] == name_or_path}
+ val ? val[:value] : nil
+ end
+
end
end
end
|
Add ProfileItem#update and a function to search for values by name
|
OpenAMEE_amee-ruby
|
train
|
0612ea6aea5a10e5639a710500c321e3c9e02495
|
diff --git a/interfaces/python/setup.py b/interfaces/python/setup.py
index <HASH>..<HASH> 100644
--- a/interfaces/python/setup.py
+++ b/interfaces/python/setup.py
@@ -5,6 +5,8 @@ setup.py file for compiling Infomap module
"""
from distutils.core import setup, Extension
+from distutils.file_util import copy_file
+import sysconfig
import fnmatch
import os
import re
@@ -35,4 +37,8 @@ setup (name = 'infomap',
url = "www.mapequation.org",
ext_modules = [infomap_module],
py_modules = ["infomap"],
- )
\ No newline at end of file
+ )
+
+# Clean ABI Version Tagged .so Files
+libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX'))
+copy_file(libFilename, '_infomap.so')
\ No newline at end of file
|
Fix python library problem due to ABI tagged .so files
|
mapequation_infomap
|
train
|
8d0447b17b57f49e73c7e1e3fa15964ce5c90df7
|
diff --git a/src/de/unihd/dbs/uima/annotator/heideltime/resources/NormalizationManager.java b/src/de/unihd/dbs/uima/annotator/heideltime/resources/NormalizationManager.java
index <HASH>..<HASH> 100644
--- a/src/de/unihd/dbs/uima/annotator/heideltime/resources/NormalizationManager.java
+++ b/src/de/unihd/dbs/uima/annotator/heideltime/resources/NormalizationManager.java
@@ -71,7 +71,7 @@ public class NormalizationManager extends GenericResourceManager {
* @return singleton instance of NormalizationManager
*/
public static NormalizationManager getInstance(Language language) {
- if(!instances.containsKey(language)) {
+ if(!instances.containsKey(language.getName())) {
NormalizationManager nm = new NormalizationManager(language.getResourceFolder());
instances.put(language.getName(), nm);
}
diff --git a/src/de/unihd/dbs/uima/annotator/heideltime/resources/RePatternManager.java b/src/de/unihd/dbs/uima/annotator/heideltime/resources/RePatternManager.java
index <HASH>..<HASH> 100644
--- a/src/de/unihd/dbs/uima/annotator/heideltime/resources/RePatternManager.java
+++ b/src/de/unihd/dbs/uima/annotator/heideltime/resources/RePatternManager.java
@@ -52,7 +52,7 @@ public class RePatternManager extends GenericResourceManager {
* @return singleton instance of RePatternManager
*/
public static RePatternManager getInstance(Language language) {
- if(!instances.containsKey(language)) {
+ if(!instances.containsKey(language.getName())) {
RePatternManager nm = new RePatternManager(language.getResourceFolder());
instances.put(language.getName(), nm);
}
diff --git a/src/de/unihd/dbs/uima/annotator/heideltime/resources/RuleManager.java b/src/de/unihd/dbs/uima/annotator/heideltime/resources/RuleManager.java
index <HASH>..<HASH> 100644
--- a/src/de/unihd/dbs/uima/annotator/heideltime/resources/RuleManager.java
+++ b/src/de/unihd/dbs/uima/annotator/heideltime/resources/RuleManager.java
@@ -102,7 +102,7 @@ public class RuleManager extends GenericResourceManager {
* @return singleton instance of RuleManager
*/
public static RuleManager getInstance(Language language) {
- if(!instances.containsKey(language)) {
+ if(!instances.containsKey(language.getName())) {
RuleManager nm = new RuleManager(language.getResourceFolder());
instances.put(language.getName(), nm);
}
|
quick fix for performance degradation due to some overlooked code
|
HeidelTime_heideltime
|
train
|
b06dc8525a0f128fb642df865901c550d9fcfee5
|
diff --git a/resource_aws_autoscaling_group_test.go b/resource_aws_autoscaling_group_test.go
index <HASH>..<HASH> 100644
--- a/resource_aws_autoscaling_group_test.go
+++ b/resource_aws_autoscaling_group_test.go
@@ -89,7 +89,6 @@ func TestAccAWSAutoScalingGroup_autoGeneratedName(t *testing.T) {
},
},
})
-
}
func TestAccAWSAutoScalingGroup_tags(t *testing.T) {
@@ -409,21 +408,10 @@ resource "aws_launch_configuration" "foobar" {
resource "aws_autoscaling_group" "bar" {
availability_zones = ["us-west-2a"]
- max_size = 1
- min_size = 1
- health_check_grace_period = 300
- health_check_type = "ELB"
- desired_capacity = 1
- force_delete = true
- termination_policies = ["OldestInstance","ClosestToNextInstanceHour"]
-
+ desired_capacity = 0
+ max_size = 0
+ min_size = 0
launch_configuration = "${aws_launch_configuration.foobar.name}"
-
- tag {
- key = "Foo"
- value = "foo-bar"
- propagate_at_launch = true
- }
}
`
|
Cleanup unrelated config to speed up autogenerate name acc test.
Removes overspecified config that is unrelated to testing the auto scaling
group's autogenerated name. The test is only concerned with checking that
the auto scaling group was created successfully with an autogenerated name
matching a specific pattern.
|
terraform-providers_terraform-provider-aws
|
train
|
0ea285ab0b30f43358e1719cbf86411f02ebc065
|
diff --git a/lib/geokit/geocoders/yahoo.rb b/lib/geokit/geocoders/yahoo.rb
index <HASH>..<HASH> 100644
--- a/lib/geokit/geocoders/yahoo.rb
+++ b/lib/geokit/geocoders/yahoo.rb
@@ -134,9 +134,9 @@ class OauthUtil
# @ref http://oauth.net/core/1.0/#rfc.section.A.5.1
def query_string
pairs = []
- @params.sort.each { | key, val |
+ @params.sort.each do | key, val |
pairs.push("#{ percent_encode(key) }=#{ percent_encode(val.to_s) }")
- }
+ end
pairs.join "&"
end
diff --git a/test/helper.rb b/test/helper.rb
index <HASH>..<HASH> 100644
--- a/test/helper.rb
+++ b/test/helper.rb
@@ -81,9 +81,9 @@ end
def assert_array_in_delta(expected_array, actual_array, delta = 0.001, message = "")
full_message = build_message(message, "<?> and\n<?> expected to be within\n<?> of each other.\n", expected_array, actual_array, delta)
assert_block(full_message) do
- expected_array.zip(actual_array).all?{|expected_item, actual_item|
+ expected_array.zip(actual_array).all? do |expected_item, actual_item|
(expected_item.to_f - actual_item.to_f).abs <= delta.to_f
- }
+ end
end
end
|
Avoid using {...} for multi-line blocks
|
geokit_geokit
|
train
|
c29838fb39430e2ac50beea6e263251920e6f4f6
|
diff --git a/common/types/endpoint.go b/common/types/endpoint.go
index <HASH>..<HASH> 100644
--- a/common/types/endpoint.go
+++ b/common/types/endpoint.go
@@ -3,6 +3,7 @@ package types
import (
"encoding/json"
"net"
+ "sort"
"strconv"
"github.com/noironetworks/cilium-net/bpf/policymap"
@@ -96,3 +97,40 @@ func (e *Endpoint) GetFmtOpt(name string) string {
}
return "#undef " + name
}
+
+type orderEndpoint func(e1, e2 *Endpoint) bool
+
+// OrderEndpointAsc orders the slice of Endpoint in ascending ID order.
+func OrderEndpointAsc(eps []Endpoint) {
+ ascPriority := func(e1, e2 *Endpoint) bool {
+ e1Int, _ := strconv.ParseUint(e1.ID, 10, 64)
+ e2Int, _ := strconv.ParseUint(e2.ID, 10, 64)
+ return e1Int < e2Int
+ }
+ orderEndpoint(ascPriority).sort(eps)
+}
+
+func (by orderEndpoint) sort(eps []Endpoint) {
+ dS := &epSorter{
+ eps: eps,
+ by: by,
+ }
+ sort.Sort(dS)
+}
+
+type epSorter struct {
+ eps []Endpoint
+ by func(e1, e2 *Endpoint) bool
+}
+
+func (epS *epSorter) Len() int {
+ return len(epS.eps)
+}
+
+func (epS *epSorter) Swap(i, j int) {
+ epS.eps[i], epS.eps[j] = epS.eps[j], epS.eps[i]
+}
+
+func (epS *epSorter) Less(i, j int) bool {
+ return epS.by(&epS.eps[i], &epS.eps[j])
+}
diff --git a/common/types/endpoint_test.go b/common/types/endpoint_test.go
index <HASH>..<HASH> 100644
--- a/common/types/endpoint_test.go
+++ b/common/types/endpoint_test.go
@@ -37,3 +37,22 @@ func (s *EndpointSuite) TestGetFmtOpt(c *C) {
c.Assert(e.GetFmtOpt("BAR"), Equals, "#undef BAR")
c.Assert(e.GetFmtOpt("BAZ"), Equals, "#undef BAZ")
}
+
+func (s *EndpointSuite) TestOrderEndpointAsc(c *C) {
+ eps := []Endpoint{
+ Endpoint{ID: "5"},
+ Endpoint{ID: "1000"},
+ Endpoint{ID: "1"},
+ Endpoint{ID: "3"},
+ Endpoint{ID: "2"},
+ }
+ epsWant := []Endpoint{
+ Endpoint{ID: "1"},
+ Endpoint{ID: "2"},
+ Endpoint{ID: "3"},
+ Endpoint{ID: "5"},
+ Endpoint{ID: "1000"},
+ }
+ OrderEndpointAsc(eps)
+ c.Assert(eps, DeepEquals, epsWant)
+}
|
Implemented a slice endpoint sorter by ID
|
cilium_cilium
|
train
|
2315a386c868cc585ff5628a84c22a20e4504545
|
diff --git a/bootstrap-wysiwyg.js b/bootstrap-wysiwyg.js
index <HASH>..<HASH> 100644
--- a/bootstrap-wysiwyg.js
+++ b/bootstrap-wysiwyg.js
@@ -153,7 +153,9 @@
};
options = $.extend({}, $.fn.wysiwyg.defaults, userOptions);
bindHotkeys(options.hotKeys);
- initFileDrops();
+ if (options.dragAndDropImages) {
+ initFileDrops();
+ }
bindToolbar($(options.toolbarSelector), options);
editor.attr('contenteditable', true)
.on('mouseup keyup mouseout', function () {
@@ -190,6 +192,7 @@
activeToolbarClass: 'btn-info',
selectionMarker: 'edit-focus-marker',
selectionColor: 'darkgrey',
+ dragAndDropImages: true,
fileUploadError: function (reason, detail) { console.log("File upload error", reason, detail); }
};
}(window.jQuery));
|
Added an option for enabling/disabling dragging and droppping images.
|
steveathon_bootstrap-wysiwyg
|
train
|
d8ea6fabf3064fb4eb879c59c55574c04c6ed7cb
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -1,3 +1,6 @@
+// Dependencies
+var GitUp = require("git-up");
+
/**
* GitUrlParse
* Parses a Git url.
@@ -7,11 +10,22 @@
* @param {String} url The Git url to parse.
* @return {GitUrl} The `GitUrl` object containing:
*
+ * - `protocols` (Array): An array with the url protocols (usually it has one element).
+ * - `port` (null|Number): The domain port.
+ * - `resource` (String): The url domain (including subdomains).
+ * - `user` (String): The authentication user (usually for ssh urls).
+ * - `pathname` (String): The url pathname.
+ * - `hash` (String): The url hash.
+ * - `search` (String): The url querystring value.
+ * - `href` (String): The input url.
+ * - `protocol` (String): The git url protocol.
+ * - `token` (String): The oauth token (could appear in the https urls).
+ *
* - `protocol` (String): The url protocol.
* - `source` (String): The Git provider (e.g. `"github.com"`).
* - `owner` (String): The repository owner.
* - `name` (String): The repository name.
- * - `_` (String): The original url which was parsed.
+ * - `href` (String): The original url which was parsed.
* - `toString` (Function): A function to stringify the parsed url into another url type.
*/
function GitUrlParse(url) {
@@ -20,52 +34,35 @@ function GitUrlParse(url) {
throw new Error("The url must be a string.");
}
- var urlInfo = {
- protocol: null
- , source: null
- , owner: null
- , name: null
- , _: url
- , toString: function (type) {
- return GitUrlParse.stringify(this, type);
- }
- }
- , match = null
+ var urlInfo = GitUp(url)
+ , sourceParts = urlInfo.resource.split(".")
+ , splits = null
;
- // SSH protocol
- check_git: if (/^git\@/.test(url)) {
- match = url.match(/^git@(.*):(.*)\/(.*).git$/);
- if (!match) { break check_git; }
- urlInfo.source = match[1];
- urlInfo.owner = match[2];
- urlInfo.name = match[3];
- urlInfo.protocol = "ssh";
- } else
+ urlInfo.toString = function (type) {
+ return GitUrlParse.stringify(this, type);
+ };
- // HTTP(S) protocol
- check_https: if (/^https?:\/\//.test(url)) {
- url = url.replace(/(\/|\.git)$/, "");
- match = url.match(/^(https?):\/\/(.*)\/(.*)\/(.*)\/?$/);
- if (!match) { break check_https; }
- urlInfo.protocol = match[1];
- urlInfo.source = match[2];
- urlInfo.owner = match[3];
- urlInfo.name = match[4];
- } else
+ urlInfo.source = sourceParts.length > 2
+ ? sourceParts.slice(-2).join(".")
+ : urlInfo.source = urlInfo.resource
+ ;
- // git+ssh protocol
- check_gitssh: if (/^git\+ssh:\/\/git\@/.test(url)) {
- url = url.replace(/\.git$/, "");
- match = url.match(/^git\+ssh:\/\/git\@(.*)\/(.*)\/(.*)\/?$/);
- if (!match) { break check_gitssh; }
- urlInfo.protocol = "git+ssh";
- urlInfo.source = match[1];
- urlInfo.owner = match[2];
- urlInfo.name = match[3];
- } else {
- urlInfo.protocol = "file";
- // Feel free to add more parsers
+ urlInfo.name = urlInfo.pathname.substring(1).replace(/\.git$/, "");
+ urlInfo.owner = urlInfo.user;
+
+ switch (urlInfo.source) {
+ case "cloudforge.com":
+ urlInfo.owner = urlInfo.user;
+ urlInfo.organization = sourceParts[0];
+ break;
+ default:
+ splits = urlInfo.name.split("/");
+ if (splits.length === 2) {
+ urlInfo.owner = splits[0];
+ urlInfo.name = splits[1];
+ }
+ break;
}
return urlInfo;
@@ -92,7 +89,7 @@ GitUrlParse.stringify = function (obj, type) {
case "https":
return type + "://" + obj.source + "/" + obj.owner + "/" + obj.name + ".git";
default:
- return obj._;
+ return obj.href;
}
};
|
Use git-up to parse the urls
|
IonicaBizau_git-url-parse
|
train
|
184d62acf8ac77782878188aa43b8e8f2759a0f7
|
diff --git a/safe/gui/widgets/dock.py b/safe/gui/widgets/dock.py
index <HASH>..<HASH> 100644
--- a/safe/gui/widgets/dock.py
+++ b/safe/gui/widgets/dock.py
@@ -953,9 +953,6 @@ class Dock(QtGui.QDockWidget, FORM_CLASS):
self.show_impact(table_report_path)
break
- if isinstance(self.impact_function, MultiExposureImpactFunction):
- self.print_button.setEnabled(False)
-
if show_keywords:
if inasafe_keyword_version_key not in keywords.keys():
show_keyword_version_message(
diff --git a/safe/impact_function/multi_exposure_wrapper.py b/safe/impact_function/multi_exposure_wrapper.py
index <HASH>..<HASH> 100644
--- a/safe/impact_function/multi_exposure_wrapper.py
+++ b/safe/impact_function/multi_exposure_wrapper.py
@@ -42,7 +42,7 @@ from safe.definitions.constants import (
from safe.definitions.layer_purposes import (
layer_purpose_analysis_impacted,
layer_purpose_aggregation_summary,
-)
+ layer_purpose_exposure_summary)
from safe.definitions.provenance import (
provenance_aggregation_keywords,
provenance_aggregation_layer,
@@ -1096,7 +1096,19 @@ class MultiExposureImpactFunction(object):
child for child in multi_exposure_group.children() if (
isinstance(child, QgsLayerTreeGroup))]
- if not exposure_groups:
+ if exposure_groups:
+ extra_layers = []
+ for exposure_group in exposure_groups:
+ tree_layers = [
+ child for child in exposure_group.children() if (
+ isinstance(child, QgsLayerTreeLayer))]
+ for tree_layer in tree_layers:
+ layer_purpose = KeywordIO.read_keywords(
+ tree_layer.layer(), 'layer_purpose')
+ if layer_purpose == (
+ layer_purpose_exposure_summary['key']):
+ extra_layers.append(tree_layer.layer())
+ else:
extra_layers = [
tree_layer.layer() for tree_layer in (
multi_exposure_tree_layers)]
diff --git a/safe/report/extractors/composer.py b/safe/report/extractors/composer.py
index <HASH>..<HASH> 100644
--- a/safe/report/extractors/composer.py
+++ b/safe/report/extractors/composer.py
@@ -242,10 +242,18 @@ def qgis_composer_extractor(impact_report, component_metadata):
# Define the layers for the impact map.
if not impact_report.multi_exposure_impact_function: # single IF
+ layers = [impact_report.impact] + impact_report.extra_layers
+ else: # multi-exposure IF
+ layers = [] + impact_report.extra_layers
+
+ add_supplementary_layers = (
+ not impact_report.multi_exposure_impact_function or not (
+ impact_report.multi_exposure_impact_function.output_layers_ordered)
+ )
+ if add_supplementary_layers:
# Check show only impact.
show_only_impact = setting(
'set_show_only_impact_on_report', False, bool)
- layers = [impact_report.impact] + impact_report.extra_layers
layer_registry = QgsMapLayerRegistry.instance()
if not show_only_impact:
hazard_layer = layer_registry.mapLayers().get(
@@ -266,8 +274,6 @@ def qgis_composer_extractor(impact_report, component_metadata):
exposure_layer = layer_registry.mapLayers().get(
provenance['exposure_layer_id'])
layers.append(exposure_layer)
- else: # multi-exposure IF
- layers = impact_report.extra_layers
# default extent is analysis extent
if not qgis_context.extent:
@@ -287,7 +293,7 @@ def qgis_composer_extractor(impact_report, component_metadata):
if not impact_report.multi_exposure_impact_function: # single IF
layers = [impact_report.impact]
else: # multi-exposure IF
- layers = [impact_report.extra_layers[0]]
+ layers = [] + impact_report.extra_layers
symbol_count = 0
for l in layers:
layer = l
diff --git a/safe/report/processors/default.py b/safe/report/processors/default.py
index <HASH>..<HASH> 100644
--- a/safe/report/processors/default.py
+++ b/safe/report/processors/default.py
@@ -572,8 +572,9 @@ def qgis_composer_renderer(impact_report, component):
QgsMapLayerRegistry.instance().addMapLayer(layer)
# used for customizations
tree_layer = root_group.addLayer(layer)
- QgsLegendRenderer.setNodeLegendStyle(
- tree_layer, QgsComposerLegendStyle.Hidden)
+ if not impact_report.multi_exposure_impact_function:
+ QgsLegendRenderer.setNodeLegendStyle(
+ tree_layer, QgsComposerLegendStyle.Hidden)
legend.synchronizeWithModel()
# process to output
|
multi-exposure IF map report without custom layers order
|
inasafe_inasafe
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.