hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
7427d5942963a56287fb261b015aeac0ecafcea2
|
diff --git a/builtin_string.go b/builtin_string.go
index <HASH>..<HASH> 100644
--- a/builtin_string.go
+++ b/builtin_string.go
@@ -380,12 +380,7 @@ func builtinString_split(call FunctionCall) Value {
split = split[:limit]
}
- valueArray := make([]Value, len(split))
- for index, value := range split {
- valueArray[index] = toValue_string(value)
- }
-
- return toValue_object(call.runtime.newArrayOf(valueArray))
+ return call.runtime.toValue(split)
}
}
|
made string splits *much* faster
|
robertkrimen_otto
|
train
|
8a702cd8c9e88a3ae2d38b5455d871997834d90c
|
diff --git a/blade-core/src/main/java/com/hellokaton/blade/mvc/HttpConst.java b/blade-core/src/main/java/com/hellokaton/blade/mvc/HttpConst.java
index <HASH>..<HASH> 100644
--- a/blade-core/src/main/java/com/hellokaton/blade/mvc/HttpConst.java
+++ b/blade-core/src/main/java/com/hellokaton/blade/mvc/HttpConst.java
@@ -8,8 +8,12 @@ public interface HttpConst {
String HEADER_CONTENT_TYPE = "Content-Type";
String HEADER_HOST = "Host";
+ String HEADER_LOCATION = "Location";
+
String HEADER_ACCEPT_ENCODING = "Accept-Encoding";
+ String HEADER_IF_MODIFIED_SINCE ="If-Modified-Since";
+
String HEADER_USER_AGENT = "User-Agent";
String CONTENT_TYPE_MULTIPART = "multipart/form-data";
diff --git a/blade-core/src/main/java/com/hellokaton/blade/server/NettyHttpConst.java b/blade-core/src/main/java/com/hellokaton/blade/server/NettyHttpConst.java
index <HASH>..<HASH> 100644
--- a/blade-core/src/main/java/com/hellokaton/blade/server/NettyHttpConst.java
+++ b/blade-core/src/main/java/com/hellokaton/blade/server/NettyHttpConst.java
@@ -1,11 +1,5 @@
package com.hellokaton.blade.server;
-import com.hellokaton.blade.mvc.HttpConst;
-import io.netty.util.AsciiString;
-
-import java.util.HashMap;
-import java.util.Map;
-
/**
* Http headers const
*
@@ -14,30 +8,9 @@ import java.util.Map;
*/
public interface NettyHttpConst {
- String IF_MODIFIED_SINCE = "If-Modified-Since";
-
String METHOD_GET = "GET";
String SLASH = "/";
char CHAR_SLASH = '/';
char CHAR_POINT = '.';
- AsciiString CONNECTION = AsciiString.cached("Connection");
- AsciiString CONTENT_LENGTH = AsciiString.cached("Content-Length");
- AsciiString CONTENT_TYPE = AsciiString.cached(HttpConst.HEADER_CONTENT_TYPE);
- AsciiString CONTENT_ENCODING = AsciiString.cached("Content-Encoding");
- AsciiString DATE = AsciiString.cached("Date");
- AsciiString LOCATION = AsciiString.cached("Location");
- AsciiString EXPIRES = AsciiString.cached("Expires");
- AsciiString CACHE_CONTROL = AsciiString.cached("Cache-Control");
- AsciiString LAST_MODIFIED = AsciiString.cached("Last-Modified");
- AsciiString SERVER = AsciiString.cached("Server");
- AsciiString SET_COOKIE = AsciiString.cached("Set-Cookie");
- AsciiString KEEP_ALIVE = AsciiString.cached("keep-alive");
-
- Map<String, AsciiString> CACHE = new HashMap<>(16);
-
- static AsciiString getAsciiString(String name) {
- return CACHE.computeIfAbsent(name, AsciiString::cached);
- }
-
}
|
:art: improve code struct
|
lets-blade_blade
|
train
|
546ed13563c3530b414d64b5a815c0919ab0513a
|
diff --git a/stanza/models/tokenize/utils.py b/stanza/models/tokenize/utils.py
index <HASH>..<HASH> 100644
--- a/stanza/models/tokenize/utils.py
+++ b/stanza/models/tokenize/utils.py
@@ -56,14 +56,6 @@ def process_sentence(sentence, mwt_dict=None):
i += 1
return sent
-def find_token(token, text):
- """
- Robustly finds the first occurrence of token in the text, and return its offset and it's underlying original string.
- Ignores whitespace mismatches between the text and the token.
- """
- m = re.search(r'\s*'.join([r'\s' if re.match(r'\s', x) else re.escape(x) for x in token]), text)
- return m.start(), m.group()
-
def output_predictions(output_file, trainer, data_generator, vocab, mwt_dict, max_seqlen=1000, orig_text=None, no_ssplit=False):
paragraphs = []
for i, p in enumerate(data_generator.sentences):
@@ -132,7 +124,7 @@ def output_predictions(output_file, trainer, data_generator, vocab, mwt_dict, ma
oov_count = 0
doc = []
- text = orig_text
+ text = re.sub('\s', ' ', orig_text)
char_offset = 0
for j in range(len(paragraphs)):
@@ -160,11 +152,12 @@ def output_predictions(output_file, trainer, data_generator, vocab, mwt_dict, ma
current_tok = ''
continue
if orig_text is not None:
- st0, tok0 = find_token(tok, text)
- st = char_offset + st0
- text = text[st0 + len(tok0):]
- char_offset += st0 + len(tok0)
- additional_info = {START_CHAR: st, END_CHAR: st + len(tok0)}
+ lstripped = current_tok.lstrip()
+ st0 = text.index(current_tok)
+ st = char_offset + st0 + (len(current_tok) - len(lstripped))
+ text = text[st0 + len(current_tok):]
+ char_offset += st0 + len(current_tok)
+ additional_info = {START_CHAR: st, END_CHAR: st + len(lstripped)}
else:
additional_info = dict()
current_sent += [(tok, p, additional_info)]
|
Potentially faster version of find_token
|
stanfordnlp_stanza
|
train
|
5bf0a22e7c85fb6f006ae6bade7b344a07c91552
|
diff --git a/plugins/PluginHSTS.py b/plugins/PluginHSTS.py
index <HASH>..<HASH> 100644
--- a/plugins/PluginHSTS.py
+++ b/plugins/PluginHSTS.py
@@ -28,10 +28,9 @@
#-------------------------------------------------------------------------------
from xml.etree.ElementTree import Element
-import socket
-
+import httplib
from plugins import PluginBase
-from utils.ctSSL import ctSSL_initialize, ctSSL_cleanup
+
class PluginHSTS(PluginBase.PluginBase):
@@ -47,30 +46,40 @@ class PluginHSTS(PluginBase.PluginBase):
output_format = ' {0:<25} {1}'
- ctSSL_initialize()
- ssl_connect = self._create_ssl_connection(target)
+ hsts_supported = False
+ hsts_timeout = ""
+ (host, addr, port) = target
+ connection = httplib.HTTPSConnection(host)
+ try:
+ connection.connect()
+ connection.request("HEAD", "/", headers={"Connection": "close"})
+ response = connection.getresponse()
+ headers = response.getheaders()
+ for (field, data) in headers:
+ if field == 'strict-transport-security':
+ hsts_supported = True
+ hsts_timeout = data
- header = None
+ except httplib.HTTPException as ex:
+ print "Error: %s" % ex
- #try: # Perform the SSL handshake
- ssl_connect.connect()
- ssl_connect.request("HEAD", "/", headers={"Connection": "close"})
- http_response = ssl_connect.getresponse()
- header = http_response.getheader('Strict-Transport-Security', None)
-
- ctSSL_cleanup()
+ finally:
+ connection.close()
# Text output
cmd_title = 'HSTS'
txt_result = [self.PLUGIN_TITLE_FORMAT.format(cmd_title)]
- txt_result.append(output_format.format("Strict-Transport-Security header:", header))
+ if hsts_supported:
+ txt_result.append(output_format.format("Supported:", hsts_timeout))
+ else:
+ txt_result.append(output_format.format("Not supported.", ""))
# XML output
- xml_hsts_attr = {'header_found': str(header != None)}
- if header:
- xml_hsts_attr['header'] = header
+ xml_hsts_attr = {'hsts_header_found': str(hsts_supported)}
+ if hsts_supported:
+ xml_hsts_attr['hsts_header'] = hsts_timeout
xml_hsts = Element('hsts', attrib = xml_hsts_attr)
-
+
xml_result = Element(self.__class__.__name__, command = command,
title = cmd_title)
xml_result.append(xml_hsts)
|
Reworked the HSTS plugin to work with the new sslyze interface. Changed the way SSL is set up and called. Completed the functionality pre testing.
|
nabla-c0d3_sslyze
|
train
|
0782c0de4dff7e43fe6ebc8d30aba7196371d136
|
diff --git a/code/ReportAdmin.php b/code/ReportAdmin.php
index <HASH>..<HASH> 100755
--- a/code/ReportAdmin.php
+++ b/code/ReportAdmin.php
@@ -214,6 +214,7 @@ class ReportAdmin extends LeftAndMain {
}
$form->setFormAction($this->Link() . '/EditForm?' . http_build_query($filteredCriteria));
+ $form->setTemplate('ReportAdminForm');
return $form;
}
@@ -240,8 +241,7 @@ class ReportAdmin extends LeftAndMain {
}
public function updatereport() {
- FormResponse::load_form($this->EditForm()->renderWith('Form'));
- FormResponse::add("$('tab-Root_Report').onclick();");
+ FormResponse::load_form($this->EditForm()->forTemplate());
return FormResponse::respond();
}
}
diff --git a/css/cms_right.css b/css/cms_right.css
index <HASH>..<HASH> 100644
--- a/css/cms_right.css
+++ b/css/cms_right.css
@@ -234,6 +234,19 @@
}
/**
+ * Non-tabbed scrolling area that can be used in place of tabs in LeftAndMAin
+ */
+.right #ScrollPanel {
+ background: #fff;
+ clear: left;
+ overflow: auto;
+ border: 1px solid #aaa;
+ position: relative;
+ top: 0;
+ padding: 10px;
+}
+
+/**
* RHS Action Parameters boxes
*/
#right form.actionparams {
diff --git a/javascript/LeftAndMain.js b/javascript/LeftAndMain.js
index <HASH>..<HASH> 100644
--- a/javascript/LeftAndMain.js
+++ b/javascript/LeftAndMain.js
@@ -156,6 +156,11 @@ window.ontabschanged = function() {
fitToParent(_TAB_DIVS_ON_PAGE[i], 30);
}
}
+
+ // Non-tab alternative
+ if($('ScrollPanel')) {
+ fitToParent('ScrollPanel', 0);
+ }
}
window.onresize = function(init) {
diff --git a/javascript/ReportAdmin_right.js b/javascript/ReportAdmin_right.js
index <HASH>..<HASH> 100755
--- a/javascript/ReportAdmin_right.js
+++ b/javascript/ReportAdmin_right.js
@@ -11,6 +11,10 @@ Behaviour.register({
*/
prepareForm : function() {
ajaxActionsAtTop('Form_EditForm', 'form_actions', 'right');
+
+ // Custom code for reports section - link the search button to ajax
+ var updateReportButtonHolder = $('action_updatereport');
+ if(updateReportButtonHolder) prepareAjaxActions(updateReportButtonHolder, 'Form_EditForm');
},
/**
@@ -107,7 +111,6 @@ Behaviour.register({
this.resetElements();
window.ontabschanged();
-
}
}
-});
+});
\ No newline at end of file
|
ENHANCEMENT: Improved look and feel for report filtering
ENHANCEMNT: Added export and print buttons to reports (from r<I>)
git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/cms/branches/<I>@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
|
silverstripe_silverstripe-reports
|
train
|
4169724b86f149285bdde72283b0ed7706a7f183
|
diff --git a/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/embedded/netty/NettyReactiveWebServerFactoryTests.java b/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/embedded/netty/NettyReactiveWebServerFactoryTests.java
index <HASH>..<HASH> 100644
--- a/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/embedded/netty/NettyReactiveWebServerFactoryTests.java
+++ b/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/embedded/netty/NettyReactiveWebServerFactoryTests.java
@@ -21,6 +21,7 @@ import java.time.Duration;
import java.util.Arrays;
import org.awaitility.Awaitility;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.mockito.InOrder;
import reactor.core.publisher.Mono;
@@ -125,6 +126,12 @@ class NettyReactiveWebServerFactoryTests extends AbstractReactiveWebServerFactor
this.webServer.stop();
}
+ @Disabled("Flaky due to https://github.com/reactor/reactor-netty/issues/1093")
+ @Override
+ protected void whenARequestRemainsInFlightThenShutDownGracefullyDoesNotInvokeCallbackUntilTheRequestCompletes() {
+
+ }
+
protected Mono<String> testSslWithAlias(String alias) {
String keyStore = "classpath:test.jks";
String keyPassword = "password";
diff --git a/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/reactive/server/AbstractReactiveWebServerFactoryTests.java b/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/reactive/server/AbstractReactiveWebServerFactoryTests.java
index <HASH>..<HASH> 100644
--- a/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/reactive/server/AbstractReactiveWebServerFactoryTests.java
+++ b/spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/reactive/server/AbstractReactiveWebServerFactoryTests.java
@@ -359,7 +359,7 @@ public abstract class AbstractReactiveWebServerFactoryTests {
}
@Test
- void whenARequestRemainsInFlightThenShutDownGracefullyDoesNotInvokeCallbackUntilTheRequestCompletes()
+ protected void whenARequestRemainsInFlightThenShutDownGracefullyDoesNotInvokeCallbackUntilTheRequestCompletes()
throws Exception {
AbstractReactiveWebServerFactory factory = getFactory();
factory.setShutdown(Shutdown.GRACEFUL);
|
Disable test that is flaky due to Reactor Netty problem
See gh-<I>
|
spring-projects_spring-boot
|
train
|
483e74cbf8ccef7a24853d2e3e17c342cbbc1e03
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -15,16 +15,16 @@ function farfetched(url, options) {
function makeHandler(url, options) {
- var response = options.response;
+ var __response = options.response;
return {
url,
respond: function(url) {
- if (typeof response === "function") {
- response = response(url);
- }
-
- return new Response(response);
+ return new Response(
+ (typeof __response === "function")
+ ? __response(url)
+ : __response
+ );
}
};
}
diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -76,6 +76,22 @@ describe("farfetched", function() {
});
+ it("should not cache results of hander functions", function(done) {
+ var requestURL = "/requestURL?foo=true";
+ farfetched(/requestURL/, {
+ response: function(url) { return url; }
+ });
+ window.fetch(requestURL).catch(done);
+ window.fetch("/requestURL?foo=false")
+ .then(text)
+ .then(function(response) {
+ assert(response.indexOf("false") > -1);
+ done();
+ })
+ .catch(done);
+ });
+
+
it("should allow fetches to be mocked more than once", function(done) {
var a, b;
farfetched("/repeat", { response: "foo" });
|
Fix handlers' responses being cached
Fixes #3
|
athaeryn_farfetched
|
train
|
adbd8258dce92c4609a94819abba9f22529048ca
|
diff --git a/test/UserInfoTest.php b/test/UserInfoTest.php
index <HASH>..<HASH> 100644
--- a/test/UserInfoTest.php
+++ b/test/UserInfoTest.php
@@ -43,6 +43,23 @@ class UserInfoTest extends PHPUnit_Framework_TestCase
}
/**
+ * @dataProvider sameValueAsProvider
+ */
+ public function testSameValueAs($userinfo, $userinfobis, $expected)
+ {
+ $this->assertSame($expected, $userinfo->sameValueAs($userinfobis));
+ }
+
+ public function sameValueAsProvider()
+ {
+ return [
+ [new UserInfo(), new UserInfo('foo', 'bar'), false],
+ [new UserInfo('foo', 'bar'), new UserInfo('foo', 'bar'), true],
+ [new UserInfo('', 'bar'), new UserInfo('', 'coucou'), true],
+ ];
+ }
+
+ /**
* @param $login
* @param $pass
* @param $expected
|
Improve UserInfo test suite
Add more test to validate the behavior of the sameValueAs method
with the UserInfo URL part class
|
thephpleague_uri-manipulations
|
train
|
db2ff5af33c879b1c49368e36ee1adc1ec4a9756
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,6 @@
# v4.8.0
* Added: Option to refresh on flush as opposed to flushing at query time
+* Added: Graph.getElements - get multiple elements (vertices and edges) with one call
* Fixed: Soft delete with event data cannot be un-soft deleted
* Changed: Elasticsearch to use bulk updates
* Changed: EdgeVertexPairs can now have null vertex ids if user can see the edge but not the vertex
diff --git a/core/src/main/java/org/vertexium/Graph.java b/core/src/main/java/org/vertexium/Graph.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/vertexium/Graph.java
+++ b/core/src/main/java/org/vertexium/Graph.java
@@ -11,12 +11,10 @@ import org.vertexium.query.Aggregation;
import org.vertexium.query.GraphQuery;
import org.vertexium.query.MultiVertexQuery;
import org.vertexium.query.SimilarToGraphQuery;
+import org.vertexium.util.JoinIterable;
import java.io.InputStream;
-import java.util.Collection;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import java.util.stream.Stream;
import static org.vertexium.util.Preconditions.checkNotNull;
@@ -134,6 +132,66 @@ public interface Graph {
}
/**
+ * Gets elements from the graph
+ *
+ * @param elementIds The element ids to retrieve from the graph.
+ * @param authorizations The authorizations required to load the elements.
+ * @return The found elements, if an element is not found it will not be returned.
+ */
+ default Iterable<? extends Element> getElements(
+ Iterable<ElementId> elementIds,
+ Authorizations authorizations
+ ) {
+ return getElements(elementIds, FetchHints.ALL, authorizations);
+ }
+
+ /**
+ * Gets elements from the graph
+ *
+ * @param elementIds The element ids to retrieve from the graph.
+ * @param fetchHints Hint at what parts of the elements to fetch.
+ * @param authorizations The authorizations required to load the elements.
+ * @return The found elements, if an element is not found it will not be returned.
+ */
+ default Iterable<? extends Element> getElements(
+ Iterable<ElementId> elementIds,
+ FetchHints fetchHints,
+ Authorizations authorizations
+ ) {
+ Set<String> vertexIds = new HashSet<>();
+ Set<String> edgeIds = new HashSet<>();
+ for (ElementId elementId : elementIds) {
+ switch (elementId.getElementType()) {
+ case VERTEX:
+ vertexIds.add(elementId.getId());
+ break;
+ case EDGE:
+ edgeIds.add(elementId.getId());
+ break;
+ default:
+ throw new VertexiumException("unhandled element type: " + elementId.getElementType());
+ }
+ }
+
+ if (vertexIds.size() == 0 && edgeIds.size() == 0) {
+ return Collections.emptyList();
+ }
+
+ if (vertexIds.size() == 0) {
+ return getEdges(edgeIds, fetchHints, authorizations);
+ }
+
+ if (edgeIds.size() == 0) {
+ return getVertices(vertexIds, fetchHints, authorizations);
+ }
+
+ return new JoinIterable<>(
+ getVertices(vertexIds, fetchHints, authorizations),
+ getEdges(edgeIds, fetchHints, authorizations)
+ );
+ }
+
+ /**
* Deletes multiple elements
*
* @param elementIds The element ids to delete
diff --git a/test/src/main/java/org/vertexium/test/GraphTestBase.java b/test/src/main/java/org/vertexium/test/GraphTestBase.java
index <HASH>..<HASH> 100644
--- a/test/src/main/java/org/vertexium/test/GraphTestBase.java
+++ b/test/src/main/java/org/vertexium/test/GraphTestBase.java
@@ -1554,6 +1554,46 @@ public abstract class GraphTestBase {
}
@Test
+ public void testGetElements() {
+ Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A);
+ Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A);
+ graph.prepareEdge("e1", v1, v2, LABEL_LABEL1, VISIBILITY_A)
+ .setProperty("prop1", "e1", VISIBILITY_A)
+ .save(AUTHORIZATIONS_A_AND_B);
+ graph.flush();
+
+ ArrayList<ElementId> elementIds = Lists.newArrayList(
+ ElementId.edge("e1"),
+ ElementId.vertex("v1"),
+ ElementId.vertex("v2")
+ );
+ assertElementIdsAnyOrder(
+ graph.getElements(elementIds, AUTHORIZATIONS_A),
+ "e1", "v1", "v2"
+ );
+
+ elementIds = Lists.newArrayList(
+ ElementId.edge("e1")
+ );
+ assertElementIdsAnyOrder(
+ graph.getElements(elementIds, AUTHORIZATIONS_A),
+ "e1"
+ );
+
+ elementIds = Lists.newArrayList(
+ ElementId.vertex("v1"),
+ ElementId.vertex("v2")
+ );
+ assertElementIdsAnyOrder(
+ graph.getElements(elementIds, AUTHORIZATIONS_A),
+ "v1", "v2"
+ );
+
+ elementIds = Lists.newArrayList();
+ assertElementIdsAnyOrder(graph.getElements(elementIds, AUTHORIZATIONS_A));
+ }
+
+ @Test
public void testMarkVertexAndPropertiesHidden() {
graph.prepareVertex("v1", VISIBILITY_EMPTY)
.addPropertyValue("k1", "age", 25, VISIBILITY_EMPTY)
|
Added: Graph.getElements - get multiple elements (vertices and edges) with one call
|
visallo_vertexium
|
train
|
ffd9c553fff2e08d227e4444f2d3b9c83c3d8a22
|
diff --git a/pyghmi/ipmi/private/session.py b/pyghmi/ipmi/private/session.py
index <HASH>..<HASH> 100644
--- a/pyghmi/ipmi/private/session.py
+++ b/pyghmi/ipmi/private/session.py
@@ -1570,6 +1570,15 @@ class Session(object):
# For now, skip the checksums since we are in LAN only,
# TODO(jbjohnso): if implementing other channels, add checksum checks
# here
+ if len(payload) < 7:
+ # This cannot possibly be a valid IPMI packet. Note this is after
+ # the integrity checks, so this must be a buggy BMC packet
+ # One example was a BMC that if receiving an SOL deactivate
+ # from another party would emit what looks to be an attempt
+ # at SOL deactivation payload, but with the wrong payload type
+ # since we can't do anything remotely sane with such a packet,
+ # drop it and carry about our business.
+ return
if self.servermode:
self.seqlun = payload[4]
self.clientaddr = payload[3]
|
Ignore small packets claiming to be IPMI payload
A particular BMC had it's wires crossed and sent a stray SOL packet
with the wrong payload type. We can't do anything to correctly process
said packet without making a pretty wild assumption about it, so drop
it with prejudice.
Change-Id: I<I>e<I>a<I>f<I>d<I>cc<I>c3e<I>ce<I>b<I>e<I>e
|
openstack_pyghmi
|
train
|
8a265077a072a6247f0f5a1e29f79b4304ec52e2
|
diff --git a/sources/scalac/symtab/SourceCompleter.java b/sources/scalac/symtab/SourceCompleter.java
index <HASH>..<HASH> 100644
--- a/sources/scalac/symtab/SourceCompleter.java
+++ b/sources/scalac/symtab/SourceCompleter.java
@@ -39,8 +39,12 @@ public class SourceCompleter extends Type.LazyType {
c.fullName()) + ".scala";
java.io.File f = global.classPath.openJavaFile(filename);
Unit unit = new Unit(global, new SourceFile(f), false);
+ Phase phase = global.currentPhase;
+ global.currentPhase = global.PHASE.PARSER.phase();
global.PHASE.PARSER.phase().apply(new Unit[] {unit});
+ global.currentPhase = global.PHASE.ANALYZER.phase();
((AnalyzerPhase)global.PHASE.ANALYZER.phase()).lateEnter(global, unit, c);
+ global.currentPhase = phase;
global.operation("added " + filename + " in " +
(System.currentTimeMillis() - msec) + "ms");
} catch (IOException e) {
|
- Added currentPhase modification
|
scala_scala
|
train
|
9c66d612ad2063d12c1c82aa729abef4e9a7a040
|
diff --git a/packages/twig/src/tags/render.js b/packages/twig/src/tags/render.js
index <HASH>..<HASH> 100644
--- a/packages/twig/src/tags/render.js
+++ b/packages/twig/src/tags/render.js
@@ -2,6 +2,7 @@
const _ = require('lodash');
const path = require('path');
+const utils = require('@frctl/fractal').utils;
/**
* Render tag
@@ -56,7 +57,7 @@ module.exports = function (fractal) {
let innerContext = entity.isComponent ? entity.variants().default().getContext() : entity.getContext();
if (token.contextStack !== undefined) {
- _.assign(innerContext, Twig.expression.parse.apply(this, [token.contextStack, context]));
+ innerContext = utils.defaultsDeep(Twig.expression.parse.apply(this, [token.contextStack, context]), innerContext);
}
let template;
|
merge objects instead of assign for render tag (#<I>)
use _.merge instead of _.assign for deep objects
|
frctl_fractal
|
train
|
9a577f15508ae3c7ea765739245171e4ae0f63d6
|
diff --git a/lib/notee/helpers/notee_helper.rb b/lib/notee/helpers/notee_helper.rb
index <HASH>..<HASH> 100644
--- a/lib/notee/helpers/notee_helper.rb
+++ b/lib/notee/helpers/notee_helper.rb
@@ -63,24 +63,18 @@ module Notee
@posts = Notee::Post.where(user_id: writer.id, status: Notee::STATUS[:published], is_deleted: false).order(published_at: :desc)
end
-
def notee_categories
- posts = Notee::Post.select(:category_id).where(status: 1, is_deleted: false).order(created_at: :desc)
- notee_categories = {}
- posts.each do |post|
- category = post.category
- if notee_categories.has_key?(category.name)
- notee_categories[category.name][0] = notee_categories[category.name][0] + 1
- else
- notee_categories.store(category.name, [1, category])
- end
+ notee_categories_arr = {}
+
+ get_parent_categories_arr.each do |cate|
+ post_count = get_category_posts_count(cate)
+ notee_categories_arr.store(cate.name, [post_count, cate])
end
- notee_categories
+ notee_categories_arr
end
-
def notee_archives
posts = Notee::Post.select(:published_at).where(status: 1, is_deleted: false).order(created_at: :desc)
@@ -132,6 +126,47 @@ module Notee
# def secret_notees
# @notees = Notee::Post.where(status: Notee::STATUS[:secret_published]).order(published_at: :desc)
# end
+
+
+ private
+
+ def get_parent_categories_arr
+ categories = Notee::Category.where(is_private: false, is_deleted: false)
+ parent_categories = categories.map do |cate|
+ cate unless cate.parent_id.present?
+ end
+ parent_categories.compact!
+ end
+
+ def get_category_posts_count(category)
+ count = 0
+ count = recursive_category_family_loop(category, count)
+ count
+ end
+
+ def recursive_category_family_loop(category, count)
+ if category.children.present?
+ category.children.each do |child_cate|
+ count = recursive_category_family_loop(child_cate, count)
+ end
+ end
+
+ count = count + get_posts_count(category.posts)
+ count
+ end
+
+ def get_posts_count(posts)
+ count = 0
+ posts.each do |post|
+ count = count + 1 if post.is_deleted == false && post.status == 1
+ end
+
+ count
+ end
+
+ def display_category?(category)
+
+ end
end
end
end
|
make categories parent - children helper method
|
maru-u_notee
|
train
|
b9876e4710f22a60a9600601852fa798f17f1ecb
|
diff --git a/org.ektorp.spring/src/main/resources/org/ektorp/spring/xml/couchdb.xsd b/org.ektorp.spring/src/main/resources/org/ektorp/spring/xml/couchdb.xsd
index <HASH>..<HASH> 100644
--- a/org.ektorp.spring/src/main/resources/org/ektorp/spring/xml/couchdb.xsd
+++ b/org.ektorp.spring/src/main/resources/org/ektorp/spring/xml/couchdb.xsd
@@ -63,6 +63,11 @@
<!-- </xsd:complexType> -->
<xsd:element name="instance">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Configures a org.ektorp.CouchDbInstance in the application context.
+ ]]></xsd:documentation>
+ </xsd:annotation>
<xsd:complexType>
<xsd:attribute name="url" type="xsd:string">
<xsd:annotation>
@@ -73,7 +78,13 @@
]]></xsd:documentation>
</xsd:annotation>
</xsd:attribute>
- <xsd:attribute name="id" type="xsd:string" />
+ <xsd:attribute name="id" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The bean id registered in the application context.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
<xsd:attribute name="properties" type="xsd:string">
<xsd:annotation>
<xsd:documentation><![CDATA[
diff --git a/org.ektorp/src/main/java/org/ektorp/CouchDbConnector.java b/org.ektorp/src/main/java/org/ektorp/CouchDbConnector.java
index <HASH>..<HASH> 100644
--- a/org.ektorp/src/main/java/org/ektorp/CouchDbConnector.java
+++ b/org.ektorp/src/main/java/org/ektorp/CouchDbConnector.java
@@ -228,7 +228,16 @@ public interface CouchDbConnector {
* @return the view result mapped as the specified class.
*/
<T> List<T> queryView(ViewQuery query, Class<T> type);
-
+ /**
+ * Provides paged view results. Implementation based on the recipe described in the book "CouchDB The Definitive Guide"
+ * http://guide.couchdb.org/editions/1/en/recipes.html#pagination
+ *
+ * This method has the same requirements for the view as the method queryView(ViewQuery query, Class<T> type).
+ * @param query
+ * @param pr
+ * @param type
+ * @return
+ */
<T> Page<T> queryForPage(ViewQuery query, PageRequest pr, Class<T> type);
/**
*
diff --git a/org.ektorp/src/main/java/org/ektorp/impl/StdCouchDbConnector.java b/org.ektorp/src/main/java/org/ektorp/impl/StdCouchDbConnector.java
index <HASH>..<HASH> 100644
--- a/org.ektorp/src/main/java/org/ektorp/impl/StdCouchDbConnector.java
+++ b/org.ektorp/src/main/java/org/ektorp/impl/StdCouchDbConnector.java
@@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory;
*/
public class StdCouchDbConnector implements CouchDbConnector {
- private static final int DEFAULT_HEARTBEAT_INTERVAL = 10000;
+ private static final int DEFAULT_HEARTBEAT_INTERVAL = 9000;
private static final Logger LOG = LoggerFactory
.getLogger(StdCouchDbConnector.class);
private static final ResponseCallback<Void> VOID_RESPONSE_HANDLER = new StdResponseHandler<Void>();
|
improved docs
decreased default heartbeat interval for changes feed to 9 sec
|
helun_Ektorp
|
train
|
4d6026d9bb06319da00e0c34ee56d9b880db2a20
|
diff --git a/lib/danger_plugin.rb b/lib/danger_plugin.rb
index <HASH>..<HASH> 100755
--- a/lib/danger_plugin.rb
+++ b/lib/danger_plugin.rb
@@ -86,8 +86,8 @@ module Danger
# Get start from diff.
lineno = first_line.match(/\+(\d+),(\d+)/).captures.first.to_i
diff.each_with_object([]) do |current_line, added_lines|
- lineno += 1 unless current_line.start_with?('-')
added_lines << lineno if current_line.start_with?('+')
+ lineno += 1 unless current_line.start_with?('-')
added_lines
end
end
diff --git a/spec/danger_plugin_spec.rb b/spec/danger_plugin_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/danger_plugin_spec.rb
+++ b/spec/danger_plugin_spec.rb
@@ -20,7 +20,7 @@ module Danger
'offenses' => [
{
'message' => 'No.',
- 'location' => { 'line' => 42 }
+ 'location' => { 'line' => 41 }
}
]
}
@@ -79,7 +79,7 @@ module Danger
'offenses' => [
{
'message' => 'No.',
- 'location' => { 'line' => 42 }
+ 'location' => { 'line' => 41 }
}
]
}
|
Fix added_lines method returns 1 more line number
|
ashfurrow_danger-rubocop
|
train
|
64014d35d875fe731ba8d0e56448dbb25f12b487
|
diff --git a/ChangeLog b/ChangeLog
index <HASH>..<HASH> 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,6 @@
=== Version 0.2.1
* Enhancements
+ * Added ability to locate div by the contained text
* Updated to use selenium-webdriver 2.2.0
* Updated to use watir-webdriver 0.2.8
diff --git a/features/div.feature b/features/div.feature
index <HASH>..<HASH> 100644
--- a/features/div.feature
+++ b/features/div.feature
@@ -26,6 +26,7 @@ Feature: Div
| xpath |
| index |
| name |
+ | text |
Scenario Outline: Locating divs using multiple parameters
When I search for the div by "<param1>" and "<param2>"
diff --git a/features/support/page.rb b/features/support/page.rb
index <HASH>..<HASH> 100644
--- a/features/support/page.rb
+++ b/features/support/page.rb
@@ -82,6 +82,7 @@ class Page
div(:div_id, :id => 'div_id')
div(:div_name, :name => 'div_name')
div(:div_class, :class => 'div_class')
+ div(:div_text, :text => 'page-object rocks!')
div(:div_index, :index => 0)
div(:div_xpath, :xpath => '//div')
div(:div_class_index, :class => "div_class", :index => 0)
diff --git a/lib/page-object/accessors.rb b/lib/page-object/accessors.rb
index <HASH>..<HASH> 100644
--- a/lib/page-object/accessors.rb
+++ b/lib/page-object/accessors.rb
@@ -297,6 +297,7 @@ module PageObject
# * :id => Watir and Selenium
# * :index => Watir and Selenium
# * :name => Watir and Selenium
+ # * :text => Watir and Selenium
# * :xpath => Watir and Selenium
# @param optional block to be invoked when element method is called
#
diff --git a/lib/page-object/elements/div.rb b/lib/page-object/elements/div.rb
index <HASH>..<HASH> 100644
--- a/lib/page-object/elements/div.rb
+++ b/lib/page-object/elements/div.rb
@@ -5,8 +5,13 @@ module PageObject
protected
def self.watir_finders
- [:class, :id, :index, :xpath]
+ [:class, :id, :text, :index, :xpath]
end
+
+ def self.selenium_finders
+ [:class, :id, :name, :text, :xpath, :index]
+ end
+
end
end
end
\ No newline at end of file
diff --git a/lib/page-object/selenium_page_object.rb b/lib/page-object/selenium_page_object.rb
index <HASH>..<HASH> 100755
--- a/lib/page-object/selenium_page_object.rb
+++ b/lib/page-object/selenium_page_object.rb
@@ -479,6 +479,7 @@ module PageObject
return false if identifier[:index]
return false if identifier[:text] and tag == 'input' and additional[:type] == 'hidden'
return false if identifier[:href] and tag == 'a'
+ return false if identifier[:text] and tag == 'div'
true
end
diff --git a/spec/page-object/elements/div_spec.rb b/spec/page-object/elements/div_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/page-object/elements/div_spec.rb
+++ b/spec/page-object/elements/div_spec.rb
@@ -6,14 +6,14 @@ describe PageObject::Elements::Div do
describe "when mapping how to find an element" do
it "should map watir types to same" do
- [:class, :id, :index, :xpath].each do |t|
+ [:class, :id, :text, :index, :xpath].each do |t|
identifier = div.watir_identifier_for t => 'value'
identifier.keys.first.should == t
end
end
it "should map selenium types to same" do
- [:class, :id, :index, :name, :xpath].each do |t|
+ [:class, :id, :text, :index, :name, :xpath].each do |t|
key, value = div.selenium_identifier_for t => 'value'
key.should == t
end
diff --git a/spec/page-object/selenium_page_object_spec.rb b/spec/page-object/selenium_page_object_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/page-object/selenium_page_object_spec.rb
+++ b/spec/page-object/selenium_page_object_spec.rb
@@ -24,5 +24,12 @@ describe PageObject::SeleniumPageObject do
selenium_browser.should_receive(:find_element)
selenium_page_object.platform.link_for(:href => 'foo')
end
+
+ it "should add tag_name when identifying by text for div" do
+ expected_identifier = {:text => 'foo', :tag_name => 'div'}
+ PageObject::Elements::Div.should_receive(:selenium_identifier_for).with(expected_identifier)
+ selenium_browser.should_receive(:find_element)
+ selenium_page_object.platform.div_for(:text => 'foo')
+ end
end
end
\ No newline at end of file
|
added ability to find divs by the contained text
|
cheezy_page-object
|
train
|
8fb9bc952aa303210cdd5dee30cb8cfc7ddd1c51
|
diff --git a/src/mg/PAMI/Client/Impl/ClientImpl.php b/src/mg/PAMI/Client/Impl/ClientImpl.php
index <HASH>..<HASH> 100644
--- a/src/mg/PAMI/Client/Impl/ClientImpl.php
+++ b/src/mg/PAMI/Client/Impl/ClientImpl.php
@@ -323,7 +323,7 @@ class ClientImpl implements IClient
} else if ($evePos !== false) {
$event = $this->_messageToEvent($aMsg);
$response = $this->findResponse($event);
- if ($response === false) {
+ if ($response === false || $response->isComplete()) {
$this->dispatch($event);
} else {
$response->addEvent($event);
|
client will now check if a response is complete before trying to add an event to it (OriginateResponse issue,when response and events arrive together with same actionid)
|
marcelog_PAMI
|
train
|
32f5055fb438c801bc42bf75e966bdb907733ea1
|
diff --git a/realtime/shake_event.py b/realtime/shake_event.py
index <HASH>..<HASH> 100644
--- a/realtime/shake_event.py
+++ b/realtime/shake_event.py
@@ -1950,7 +1950,10 @@ class ShakeEvent(QObject):
if myCitiesHtmlPath is not None:
myCitiesHtml.setUrl(QUrl(myCitiesHtmlPath))
else:
- raise CityMemoryLayerCreationError('No nearby cities found.')
+ # We used to raise an error here but it is actually feasible that
+ # no nearby cities with a valid mmi value are found - e.g.
+ # if the event is way out in the ocean.
+ LOGGER.info('No nearby cities found.')
# Load the contours and cities shapefile into the map
myContoursLayer = QgsVectorLayer(myContoursShapeFile,
|
Realtime: dont need to raise an exception if not affected
|
inasafe_inasafe
|
train
|
cf310764d0b41596060164c92b9b4ca4749e3c90
|
diff --git a/src/Mongator/Extension/templates/Core/QueryDefaultFinders.php.twig b/src/Mongator/Extension/templates/Core/QueryDefaultFinders.php.twig
index <HASH>..<HASH> 100644
--- a/src/Mongator/Extension/templates/Core/QueryDefaultFinders.php.twig
+++ b/src/Mongator/Extension/templates/Core/QueryDefaultFinders.php.twig
@@ -11,11 +11,8 @@
*/
private function findBy{{ name|ucfirst }}($value)
{
- if (!is_object($value)) $this->throwBadReferenceException();
- if ($value instanceOf \MongoId) $id = $value;
- elseif ($value instanceOf \Mandango\Document\Document) $id = $value->getId();
- else $this->throwBadReferenceException();
+ $id = $this->valueToMongoId($value);
return $this->mergeCriteria(array('{{field.dbName}}' => $id));
}
diff --git a/src/Mongator/Query/Query.php b/src/Mongator/Query/Query.php
index <HASH>..<HASH> 100644
--- a/src/Mongator/Query/Query.php
+++ b/src/Mongator/Query/Query.php
@@ -739,4 +739,16 @@ abstract class Query implements \Countable, \IteratorAggregate
if ( !$includeHash ) unset($keys['vars']['hash']);
return md5(serialize($keys));
}
+
+ protected function valueToMongoId($value)
+ {
+ if (is_string($value)) return new \MongoId($value);
+
+ if (!is_object($value)) $this->throwBadReferenceException();
+
+ if ($value instanceOf \MongoId) return $value;
+ if ($value instanceOf \Mongator\Document\Document) return $value->getId();
+
+ $this->throwBadReferenceException();
+ }
}
diff --git a/tests/Mongator/Tests/Extension/QueryDefaultFindersTest.php b/tests/Mongator/Tests/Extension/QueryDefaultFindersTest.php
index <HASH>..<HASH> 100644
--- a/tests/Mongator/Tests/Extension/QueryDefaultFindersTest.php
+++ b/tests/Mongator/Tests/Extension/QueryDefaultFindersTest.php
@@ -120,7 +120,14 @@ class QueryDefaultFindersTest extends TestCase
public function testFindByReferenceTypecheck()
{
$this->setExpectedException('\Exception');
- $this->createQuery()->findByAuthor((string) (new \MongoId()));
+ $this->createQuery()->findByAuthor(1234);
+ }
+
+ public function testFindByReferenceAcceptsString()
+ {
+ $id = new \MongoId();
+ $query = $this->createQuery()->findByAuthor((string) $id);
+ $this->assertEquals(array('author' => $id), $query->getCriteria());
}
public function testIgnoreUnsearchableTypes()
|
Accept string in findBy<Reference>
It will get automatically transformed to MongoId
|
mongator_mongator
|
train
|
69da1d8880e1bf2d32190ac24005b42eb493cd0c
|
diff --git a/pdb.py b/pdb.py
index <HASH>..<HASH> 100644
--- a/pdb.py
+++ b/pdb.py
@@ -679,12 +679,12 @@ except for when using the function decorator.
Config = self.ConfigFactory
class PdbppWithConfig(self.__class__):
- def __init__(self, *args):
+ def __init__(self_withcfg, *args):
kwds = dict(Config=Config)
- super(PdbppWithConfig, self).__init__(*args, **kwds)
+ super(PdbppWithConfig, self_withcfg).__init__(*args, **kwds)
# Backport of fix for bpo-31078 (not yet merged).
- self.use_rawinput = self.use_rawinput
+ self_withcfg.use_rawinput = self.use_rawinput
if sys.version_info < (3, ):
do_debug_func = pdb.Pdb.do_debug.im_func
diff --git a/testing/test_pdb.py b/testing/test_pdb.py
index <HASH>..<HASH> 100644
--- a/testing/test_pdb.py
+++ b/testing/test_pdb.py
@@ -2247,3 +2247,26 @@ before_interaction_hook
before_interaction_hook
# c
""")
+
+
+def test_rawinput_with_debug():
+ """Test backport of fix for bpo-31078."""
+ def fn():
+ set_trace()
+
+ check(fn, """
+--Return--
+[NUM] > .*fn()
+-> set_trace()
+ 5 frames hidden .*
+# debug 1
+ENTERING RECURSIVE DEBUGGER
+[NUM] > <string>(1)<module>()->None
+(#) import pdb; print(pdb.GLOBAL_PDB.use_rawinput)
+1
+(#) p sys._getframe().f_back.f_locals['self'].use_rawinput
+1
+(#) c
+LEAVING RECURSIVE DEBUGGER
+# c
+""")
|
Fix backport for bpo-<I> (keeping use_rawinput) (#<I>)
|
antocuni_pdb
|
train
|
45637268655d128d8b2746c27f32c3ec9971d113
|
diff --git a/hitcpy/version.py b/hitcpy/version.py
index <HASH>..<HASH> 100644
--- a/hitcpy/version.py
+++ b/hitcpy/version.py
@@ -1 +1 @@
-version = "0.1.0"
\ No newline at end of file
+version = "0.1.1"
|
Version bump for pypi
|
htm-community_hitc-py
|
train
|
d9f3d622da8c6fcbd212a27245baad55f13ededf
|
diff --git a/src/http/mock.js b/src/http/mock.js
index <HASH>..<HASH> 100644
--- a/src/http/mock.js
+++ b/src/http/mock.js
@@ -5,6 +5,7 @@
/*#ifndef(UMD)*/
"use strict";
/*global _GPF_HTTP_METHODS*/ // HTTP Methods
+/*global _gpfArrayTail*/ // [].slice.call(,1)
/*global _gpfHost*/ // Host type
/*global _gpfHttpSetRequestImplIf*/ // Set the http request implementation if the host matches
/*#endif*/
@@ -59,7 +60,7 @@ function _gpfHttMockMatchRequest (mockedRequest, request) {
url.lastIndex = 0;
match = url.exec(request.url);
if (match) {
- return mockedRequest.response.apply(mockedRequest, [request].concat([].slice.call(match, 1)));
+ return mockedRequest.response.apply(mockedRequest, [request].concat(_gpfArrayTail(match)));
}
}
|
no-magic-numbers (#<I>)
|
ArnaudBuchholz_gpf-js
|
train
|
c5b246639af3842edac7ad51126841b3c2e0a2aa
|
diff --git a/calendar-bundle/contao/classes/Calendar.php b/calendar-bundle/contao/classes/Calendar.php
index <HASH>..<HASH> 100644
--- a/calendar-bundle/contao/classes/Calendar.php
+++ b/calendar-bundle/contao/classes/Calendar.php
@@ -161,9 +161,18 @@ class Calendar extends \Frontend
// Recurring events
if ($objArticle->recurring)
{
- $count = 0;
$arrRepeat = deserialize($objArticle->repeatEach);
+ if ($arrRepeat['value'] < 1)
+ {
+ continue;
+ }
+
+ $count = 0;
+ $intStartTime = $objArticle->startTime;
+ $intEndTime = $objArticle->endTime;
+ $strtotime = '+ ' . $arrRepeat['value'] . ' ' . $arrRepeat['unit'];
+
// Do not include more than 20 recurrences
while ($count++ < 20)
{
@@ -172,17 +181,12 @@ class Calendar extends \Frontend
break;
}
- $arg = $arrRepeat['value'];
- $unit = $arrRepeat['unit'];
-
- $strtotime = '+ ' . $arg . ' ' . $unit;
-
- $objArticle->startTime = strtotime($strtotime, $objArticle->startTime);
- $objArticle->endTime = strtotime($strtotime, $objArticle->endTime);
+ $intStartTime = strtotime($strtotime, $intStartTime);
+ $intEndTime = strtotime($strtotime, $intEndTime);
- if ($objArticle->startTime >= $time)
+ if ($intStartTime >= $time)
{
- $this->addEvent($objArticle, $objArticle->startTime, $objArticle->endTime, $strUrl, $strLink);
+ $this->addEvent($objArticle, $intStartTime, $intEndTime, $strUrl, $strLink);
}
}
}
diff --git a/calendar-bundle/contao/classes/Events.php b/calendar-bundle/contao/classes/Events.php
index <HASH>..<HASH> 100644
--- a/calendar-bundle/contao/classes/Events.php
+++ b/calendar-bundle/contao/classes/Events.php
@@ -138,33 +138,35 @@ abstract class Events extends \Module
// Recurring events
if ($objEvents->recurring)
{
- $count = 0;
-
$arrRepeat = deserialize($objEvents->repeatEach);
- $strtotime = '+ ' . $arrRepeat['value'] . ' ' . $arrRepeat['unit'];
if ($arrRepeat['value'] < 1)
{
continue;
}
- while ($objEvents->endTime < $intEnd)
+ $count = 0;
+ $intStartTime = $objEvents->startTime;
+ $intEndTime = $objEvents->endTime;
+ $strtotime = '+ ' . $arrRepeat['value'] . ' ' . $arrRepeat['unit'];
+
+ while ($intEndTime < $intEnd)
{
if ($objEvents->recurrences > 0 && $count++ >= $objEvents->recurrences)
{
break;
}
- $objEvents->startTime = strtotime($strtotime, $objEvents->startTime);
- $objEvents->endTime = strtotime($strtotime, $objEvents->endTime);
+ $intStartTime = strtotime($strtotime, $intStartTime);
+ $intEndTime = strtotime($strtotime, $intEndTime);
// Skip events outside the scope
- if ($objEvents->endTime < $intStart || $objEvents->startTime > $intEnd)
+ if ($intEndTime < $intStart || $intStartTime > $intEnd)
{
continue;
}
- $this->addEvent($objEvents, $objEvents->startTime, $objEvents->endTime, $strUrl, $intStart, $intEnd, $id);
+ $this->addEvent($objEvents, $intStartTime, $intEndTime, $strUrl, $intStart, $intEnd, $id);
}
}
}
|
[Calendar] Correctly display repeated events in the event list (fixes #<I>)
|
contao_contao
|
train
|
9129e074da07bef19173399703ab50df18b90002
|
diff --git a/ecs/client.go b/ecs/client.go
index <HASH>..<HASH> 100644
--- a/ecs/client.go
+++ b/ecs/client.go
@@ -90,7 +90,9 @@ func (client *Client) Invoke(action string, args interface{}, response interface
}
statusCode := httpResp.StatusCode
- log.Printf("Invoke %s %s %d (%v)", ECSRequestMethod, requestURL, statusCode, t1.Sub(t0))
+ if client.debug {
+ log.Printf("Invoke %s %s %d (%v)", ECSRequestMethod, requestURL, statusCode, t1.Sub(t0))
+ }
defer httpResp.Body.Close()
body, err := ioutil.ReadAll(httpResp.Body)
diff --git a/ecs/instances.go b/ecs/instances.go
index <HASH>..<HASH> 100644
--- a/ecs/instances.go
+++ b/ecs/instances.go
@@ -5,6 +5,18 @@ import (
"time"
)
+// Region represents ECS region
+type InstanceStatus string
+
+// Constants of region definition
+const (
+ Running = InstanceStatus("Running")
+ Starting = InstanceStatus("Starting")
+
+ Stopped = InstanceStatus("Stopped")
+ Stopping = InstanceStatus("Stopping")
+)
+
type DescribeInstanceStatusArgs struct {
RegionId string
ZoneId string
@@ -173,7 +185,7 @@ const InstanceWaitForInterval = 5
const InstanceDefaultTimeout = 60
// WaitForInstance waits for instance to given status
-func (client *Client) WaitForInstance(instanceId string, status string, timeout int) error {
+func (client *Client) WaitForInstance(instanceId string, status InstanceStatus, timeout int) error {
if timeout <= 0 {
timeout = InstanceDefaultTimeout
}
@@ -182,7 +194,7 @@ func (client *Client) WaitForInstance(instanceId string, status string, timeout
if err != nil {
return err
}
- if instance.Status == status {
+ if instance.Status == string(status) {
break
}
timeout = timeout - InstanceWaitForInterval
diff --git a/ecs/instances_test.go b/ecs/instances_test.go
index <HASH>..<HASH> 100644
--- a/ecs/instances_test.go
+++ b/ecs/instances_test.go
@@ -13,7 +13,7 @@ func TestECSInstance(t *testing.T) {
if err != nil {
t.Errorf("Failed to stop instance %s: %v", TestInstanceId, err)
}
- err = client.WaitForInstance(TestInstanceId, "Stopped", 0)
+ err = client.WaitForInstance(TestInstanceId, Stopped, 0)
if err != nil {
t.Errorf("Instance %s is failed to stop: %v", TestInstanceId, err)
}
@@ -22,7 +22,7 @@ func TestECSInstance(t *testing.T) {
if err != nil {
t.Errorf("Failed to start instance %s: %v", TestInstanceId, err)
}
- err = client.WaitForInstance(TestInstanceId, "Running", 0)
+ err = client.WaitForInstance(TestInstanceId, Running, 0)
if err != nil {
t.Errorf("Instance %s is failed to start: %v", TestInstanceId, err)
}
@@ -31,7 +31,7 @@ func TestECSInstance(t *testing.T) {
if err != nil {
t.Errorf("Failed to restart instance %s: %v", TestInstanceId, err)
}
- err = client.WaitForInstance(TestInstanceId, "Running", 0)
+ err = client.WaitForInstance(TestInstanceId, Running, 0)
if err != nil {
t.Errorf("Instance %s is failed to restart: %v", TestInstanceId, err)
}
@@ -67,13 +67,13 @@ func TestECSInstanceCreationAndDeletion(t *testing.T) {
if err != nil {
t.Errorf("Failed to start instance %s: %v", instanceId, err)
}
- err = client.WaitForInstance(instanceId, "Running", 0)
+ err = client.WaitForInstance(instanceId, Running, 0)
err = client.StopInstance(instanceId, true)
if err != nil {
t.Errorf("Failed to stop instance %s: %v", instanceId, err)
}
- err = client.WaitForInstance(instanceId, "Stopped", 0)
+ err = client.WaitForInstance(instanceId, Stopped, 0)
if err != nil {
t.Errorf("Instance %s is failed to stop: %v", instanceId, err)
}
diff --git a/ecs/security_groups.go b/ecs/security_groups.go
index <HASH>..<HASH> 100644
--- a/ecs/security_groups.go
+++ b/ecs/security_groups.go
@@ -141,3 +141,27 @@ func (client *Client) ModifySecurityGroupAttribute(args *ModifySecurityGroupAttr
err := client.Invoke("ModifySecurityGroupAttribute", args, &response)
return err
}
+
+type AuthorizeSecurityGroupArgs struct {
+ SecurityGroupId string
+ RegionId string
+ IpProtocol string
+ PortRange string
+ SourceGroupId string
+ SourceGroupOwnerAccount string
+ SourceCidrIp string // IPv4 only, default 0.0.0.0/0
+ Policy string // enum of accept (default) | drop
+ Priority int // 1 - 100, default 1
+ NicType string // enum of internet | intranet (default)
+}
+
+type AuthorizeSecurityGroupResponse struct {
+ CommonResponse
+}
+
+// AuthorizeSecurityGroup authorize permissions to security group
+func (client *Client) AuthorizeSecurityGroup(args *AuthorizeSecurityGroupArgs) error {
+ response := AuthorizeSecurityGroupResponse{}
+ err := client.Invoke("AuthorizeSecurityGroup", args, &response)
+ return err
+}
|
Support AuthorizeSecurityGroup
|
denverdino_aliyungo
|
train
|
49fa53aa969e42270dfa8adf1258c06fbf39e455
|
diff --git a/spec/components/pickers.js b/spec/components/pickers.js
index <HASH>..<HASH> 100644
--- a/spec/components/pickers.js
+++ b/spec/components/pickers.js
@@ -43,7 +43,7 @@ class PickersTest extends React.Component {
<DatePicker
label='Formatted Date'
- inputFormat={(value) => `${value.getDate()}/${value.getMonth()}/${value.getFullYear()}`}
+ inputFormat={(value) => `${value.getDate()}/${value.getMonth() + 1}/${value.getFullYear()}`}
onChange={this.handleChange.bind(this, 'date3')}
value={this.state.date3}
/>
|
Update pickers.js
Just like in components/date_picker/readme.md, you need to add 1 to value.getMonth()
|
react-toolbox_react-toolbox
|
train
|
cbb4433432b2c5e962f21d2180f5900b5d37413a
|
diff --git a/config/trustedproxy.php b/config/trustedproxy.php
index <HASH>..<HASH> 100644
--- a/config/trustedproxy.php
+++ b/config/trustedproxy.php
@@ -19,15 +19,16 @@ return [
/*
* To trust one or more specific proxies that connect
- * directly to your server, use an array of IP addresses:
+ * directly to your server, use an array or a string separated by comma of IP addresses:
*/
- # 'proxies' => ['192.168.1.1'],
+ // 'proxies' => ['192.168.1.1'],
+ // 'proxies' => '192.168.1.1, 192.168.1.1',
/*
* Or, to trust all proxies that connect
* directly to your server, use a "*"
*/
- # 'proxies' => '*',
+ // 'proxies' => '*',
/*
* Which headers to use to detect proxy related data (For, Host, Proto, Port)
|
fix styleci and update doc in config file
|
fideloper_TrustedProxy
|
train
|
0df3efd949ba3eac49e562de90a7a6aa795e8fe0
|
diff --git a/lib/geoengineer/resources/aws/api_gateway/aws_api_gateway_resource.rb b/lib/geoengineer/resources/aws/api_gateway/aws_api_gateway_resource.rb
index <HASH>..<HASH> 100644
--- a/lib/geoengineer/resources/aws/api_gateway/aws_api_gateway_resource.rb
+++ b/lib/geoengineer/resources/aws/api_gateway/aws_api_gateway_resource.rb
@@ -21,7 +21,7 @@ class GeoEngineer::Resources::AwsApiGatewayResource < GeoEngineer::Resource
after :initialize, -> { depends_on [_rest_api.terraform_name] }
- after :initialize, -> { _geo_id -> { "#{_rest_api._geo_id}::#{path_part}" } }
+ after :initialize, -> { _geo_id -> { "#{_rest_api._geo_id}::#{parent_id}::#{path_part}" } }
after :initialize, -> { _terraform_id -> { NullObject.maybe(remote_resource)._terraform_id } }
after :initialize, -> { _id -> { _terraform_id } }
|
API Gateway resources should include parent_id in geo_id
|
coinbase_geoengineer
|
train
|
09f0d71fd36460f7595c7def8fef6ccd97c5789e
|
diff --git a/src/Native5/ConfigurationFactory.php b/src/Native5/ConfigurationFactory.php
index <HASH>..<HASH> 100644
--- a/src/Native5/ConfigurationFactory.php
+++ b/src/Native5/ConfigurationFactory.php
@@ -36,15 +36,10 @@ namespace Native5;
* Created : 27-11-2012
* Last Modified : Fri Dec 21 09:11:53 2012
*/
-class ConfigurationFactory extends \Native5\Core\YamlConfigFactory
+class ConfigurationFactory extends \Native5\Core\Configuration\YamlConfigFactory
{
protected $_configuration;
- public function __construct($configFile, $localConfigFile) {
- parent::__construct($configFile);
- parent::override($localConfigFile);
- }
-
/**
* makeConfig Wrap the associative configuration array inside a Configuration class
*
|
Corrected class fully qualified name (now within Configuration folder) in ConfigurationFactory.php
|
native5_native5-sdk-client-php
|
train
|
dc5f8cdeb1924efc14e17f171eca9a18177b5dd4
|
diff --git a/releasedir/git_repo.go b/releasedir/git_repo.go
index <HASH>..<HASH> 100644
--- a/releasedir/git_repo.go
+++ b/releasedir/git_repo.go
@@ -100,5 +100,15 @@ func (r FSGitRepo) MustNotBeDirty(force bool) (bool, error) {
}
func (r FSGitRepo) isNotGitRepo(stderr string) bool {
- return strings.Contains(stderr, "Not a git repository")
+ if r.fs.FileExists(filepath.Join(r.dirPath, ".git")) {
+ return false
+ }
+
+ cmd := boshsys.Command{
+ Name: "git",
+ Args: []string{"rev-parse", "--git-dir"},
+ WorkingDir: r.dirPath,
+ }
+ _, _, _, err := r.runner.RunComplexCommand(cmd)
+ return err != nil
}
diff --git a/releasedir/git_repo_test.go b/releasedir/git_repo_test.go
index <HASH>..<HASH> 100644
--- a/releasedir/git_repo_test.go
+++ b/releasedir/git_repo_test.go
@@ -22,6 +22,7 @@ var _ = Describe("GitRepo", func() {
fs = fakesys.NewFakeFileSystem()
cmdRunner = fakesys.NewFakeCmdRunner()
gitRepo = NewFSGitRepo("/dir", cmdRunner, fs)
+ fs.WriteFile("/dir/.git", []byte{})
})
Describe("Init", func() {
@@ -85,9 +86,14 @@ releases/**/*.tgz
})
It("returns 'non-git' if it's not a git repo", func() {
+ fs.RemoveAll("/dir/.git")
cmdRunner.AddCmdResult(cmd, fakesys.FakeCmdResult{
Stderr: "fatal: Not a git repository: '/dir/.git'\n",
- Error: errors.New("fake-err"),
+ Error: errors.New("not a git repo (--short HEAD)"),
+ })
+ cmdRunner.AddCmdResult("git rev-parse --git-dir", fakesys.FakeCmdResult{
+ Stderr: "fatal: Not a git repository: '/dir/.git'\n",
+ Error: errors.New("not a git repo (--git-dir)"),
})
commit, err := gitRepo.LastCommitSHA()
Expect(err).ToNot(HaveOccurred())
@@ -146,9 +152,14 @@ releases/**/*.tgz
})
It("returns false if it's not a git repo", func() {
+ fs.RemoveAll("/dir/.git")
cmdRunner.AddCmdResult(cmd, fakesys.FakeCmdResult{
- Stderr: "fatal: Not a git repository: '/dir/.git'\n",
- Error: errors.New("fake-err"),
+ Stderr: "fatal: not a git repository: '/dir/.git'\n",
+ Error: errors.New("not a git repo (--short HEAD)"),
+ })
+ cmdRunner.AddCmdResult("git rev-parse --git-dir", fakesys.FakeCmdResult{
+ Stderr: "fatal: not a git repository: '/dir/.git'\n",
+ Error: errors.New("not a git repo (--git-dir)"),
})
dirty, err := gitRepo.MustNotBeDirty(false)
Expect(err).ToNot(HaveOccurred())
|
Make git repo detection more forgiving
Recent git version changed capitalization of the output in `git status`.
Relying on speicifc text doesn't seem to be the most sustainable way of
detecting repositories anyway, so we now use git commands and file
presence to do the detection.
[finishes #<I>](<URL>)
|
cloudfoundry_bosh-cli
|
train
|
19e1fcc242d36fb6383da8d5de4f4616335287eb
|
diff --git a/test/e2e/network/dns_common.go b/test/e2e/network/dns_common.go
index <HASH>..<HASH> 100644
--- a/test/e2e/network/dns_common.go
+++ b/test/e2e/network/dns_common.go
@@ -22,9 +22,9 @@ import (
"strings"
"time"
+ "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
- "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/util/intstr"
@@ -494,7 +494,7 @@ func createProbeCommand(namesToResolve []string, hostEntries []string, ptrLookup
// createTargetedProbeCommand returns a command line that performs a DNS lookup for a specific record type
func createTargetedProbeCommand(nameToResolve string, lookup string, fileNamePrefix string) (string, string) {
fileName := fmt.Sprintf("%s_udp@%s", fileNamePrefix, nameToResolve)
- probeCmd := fmt.Sprintf("dig +short +tries=12 +norecurse %s %s > /results/%s", nameToResolve, lookup, fileName)
+ probeCmd := fmt.Sprintf("dig +short +tries=12 %s %s > /results/%s", nameToResolve, lookup, fileName)
return probeCmd, fileName
}
|
Remove 'norecurse' flag in dns tests.
dnsmasq <I> introduced a change to respond to all norecurse queries with ServFail.
This is to prevent cache snooping where an adversary can figure out if a particular hostname has been looked up or not.
These tests do not need the norecurse flag, hence removing it.
|
kubernetes_kubernetes
|
train
|
a77acd3302723453a8098243d92467e52f4cebda
|
diff --git a/cumulusci/tasks/marketing_cloud/api.py b/cumulusci/tasks/marketing_cloud/api.py
index <HASH>..<HASH> 100644
--- a/cumulusci/tasks/marketing_cloud/api.py
+++ b/cumulusci/tasks/marketing_cloud/api.py
@@ -29,7 +29,7 @@ class CreateSubscriberAttribute(BaseMarketingCloudTask):
data=envelope.encode("utf-8"),
headers={"Content-Type": "text/xml; charset=utf-8"},
)
- self._check_response(response)
+ self._check_soap_response(response)
self.logger.info(
f"Successfully created subscriber attribute: {attribute_name}."
)
@@ -120,7 +120,7 @@ class CreateUser(BaseMarketingCloudTask):
data=envelope.encode("utf-8"),
headers={"Content-Type": "text/xml; charset=utf-8"},
)
- self._check_response(response)
+ self._check_soap_response(response)
self.logger.info(f"Successfully created User: {user_username}.")
self.return_values = {"success": True}
@@ -182,7 +182,7 @@ class UpdateUserRole(BaseMarketingCloudTask):
data=envelope.encode("utf-8"),
headers={"Content-Type": "text/xml; charset=utf-8"},
)
- self._check_response(response)
+ self._check_soap_response(response)
user_name = self.options.get("user_name")
self.logger.info(f"Successfully updated role for User: {user_name}.")
self.return_values = {"success": True}
diff --git a/cumulusci/tasks/marketing_cloud/base.py b/cumulusci/tasks/marketing_cloud/base.py
index <HASH>..<HASH> 100644
--- a/cumulusci/tasks/marketing_cloud/base.py
+++ b/cumulusci/tasks/marketing_cloud/base.py
@@ -17,7 +17,7 @@ class BaseMarketingCloudTask(BaseTask):
super()._init_task()
self.mc_config = self.project_config.keychain.get_service("marketing_cloud")
- def _check_response(self, response):
+ def _check_soap_response(self, response):
"""Make sure the response indicates success."""
response.raise_for_status()
root = lxml_parse_string(response.content)
|
rename to _check_soap_response
|
SFDO-Tooling_CumulusCI
|
train
|
245e83fce5278b369af2850dc251a7cbae9f7c05
|
diff --git a/bumpversion/__init__.py b/bumpversion/__init__.py
index <HASH>..<HASH> 100644
--- a/bumpversion/__init__.py
+++ b/bumpversion/__init__.py
@@ -653,14 +653,13 @@ def main(original_args=None):
for section_name in config.sections():
- if not ':' in section_name:
- continue
-
- the_word_bumpversion, section_prefix, section_value = section_name.split(":", 2)
+ section_name_match = re.compile("^bumpversion:(file|part):(.+)").match(section_name)
- if 'bumpversion' != the_word_bumpversion:
+ if not section_name_match:
continue
+ section_prefix, section_value = section_name_match.groups()
+
section_config = dict(config.items(section_name))
if section_prefix == "part":
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -1258,3 +1258,54 @@ def test_multi_file_configuration(tmpdir, capsys):
main(['patch'])
assert '2.0.1' in tmpdir.join("FULL_VERSION.txt").read()
assert '2' in tmpdir.join("MAJOR_VERSION.txt").read()
+
+
+def test_multi_file_configuration2(tmpdir, capsys):
+ tmpdir.join("setup.cfg").write("1.6.6")
+ tmpdir.join("README.txt").write("MyAwesomeSoftware(TM) v1.6")
+ tmpdir.join("BUILDNUMBER").write("1.6.6+joe+38943")
+
+ tmpdir.chdir()
+
+ tmpdir.join(".bumpversion.cfg").write(dedent("""
+ [bumpversion]
+ current_version = 1.6.6
+
+ [something:else]
+
+ [foo]
+
+ [bumpversion:file:setup.cfg]
+
+ [bumpversion:file:README.txt]
+ parse = '(?P<major>\d+)\.(?P<minor>\d+)'
+ serialize =
+ {major}.{minor}
+
+ [bumpversion:file:BUILDNUMBER]
+ serialize =
+ {major}.{minor}.{patch}+{$USER}+{$BUILDNUMBER}
+
+ """))
+
+ environ['BUILDNUMBER'] = "38944"
+ environ['USER'] = "bob"
+ main(['minor', '--verbose'])
+ del environ['BUILDNUMBER']
+ del environ['USER']
+
+ assert '1.7.0' in tmpdir.join("setup.cfg").read()
+ assert 'MyAwesomeSoftware(TM) v1.7' in tmpdir.join("README.txt").read()
+ assert '1.7.0+bob+38944' in tmpdir.join("BUILDNUMBER").read()
+
+ environ['BUILDNUMBER'] = "38945"
+ environ['USER'] = "bob"
+ main(['patch', '--verbose'])
+ del environ['BUILDNUMBER']
+ del environ['USER']
+
+ assert '1.7.1' in tmpdir.join("setup.cfg").read()
+ assert 'MyAwesomeSoftware(TM) v1.7' in tmpdir.join("README.txt").read()
+ assert '1.7.1+bob+38945' in tmpdir.join("BUILDNUMBER").read()
+
+
|
another smoke test for per-file-config
|
c4urself_bump2version
|
train
|
2d6be014b1a5689db5eb231906846665bb57d451
|
diff --git a/lib/Doctrine/ODM/MongoDB/UnitOfWork.php b/lib/Doctrine/ODM/MongoDB/UnitOfWork.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ODM/MongoDB/UnitOfWork.php
+++ b/lib/Doctrine/ODM/MongoDB/UnitOfWork.php
@@ -1944,7 +1944,9 @@ class UnitOfWork implements PropertyChangedListener
$managedCopy = $document;
if ($this->getDocumentState($document, self::STATE_DETACHED) !== self::STATE_MANAGED) {
- // @TODO: If $document is a proxy, reset and initialize it
+ if ($document instanceof Proxy && ! $document->__isInitialized()) {
+ $document->__load();
+ }
// Try to look the document up in the identity map.
$id = $class->isEmbeddedDocument ? null : $class->getIdentifierObject($document);
@@ -1971,9 +1973,11 @@ class UnitOfWork implements PropertyChangedListener
$managedCopy = $class->newInstance();
$class->setIdentifierValue($managedCopy, $id);
$this->persistNew($class, $managedCopy);
+ } else {
+ if ($managedCopy instanceof Proxy && ! $managedCopy->__isInitialized__) {
+ $managedCopy->__load();
+ }
}
-
- // @TODO: If $managedCopy is a proxy, initialize it
}
if ($class->isVersioned) {
@@ -1987,7 +1991,9 @@ class UnitOfWork implements PropertyChangedListener
}
// Merge state of $document into existing (managed) document
- foreach ($class->reflFields as $name => $prop) {
+ foreach ($class->reflClass->getProperties() as $prop) {
+ $name = $prop->name;
+ $prop->setAccessible(true);
if ( ! isset($class->associationMappings[$name])) {
if ( ! $class->isIdentifier($name)) {
$prop->setValue($managedCopy, $prop->getValue($document));
@@ -2009,9 +2015,12 @@ class UnitOfWork implements PropertyChangedListener
$targetClass = $this->dm->getClassMetadata($targetDocument);
$relatedId = $targetClass->getIdentifierObject($other);
- // @TODO: Use DocumentManager::find() if $targetClass has sub-classes?
- $other = $this->dm->getProxyFactory()->getProxy($targetDocument, $relatedId);
- $this->registerManaged($other, $relatedId, array());
+ if ($targetClass->subClasses) {
+ $other = $this->dm->find($targetClass->name, $relatedId);
+ } else {
+ $other = $this->dm->getProxyFactory()->getProxy($assoc2['targetDocument'], $relatedId);
+ $this->registerManaged($other, $relatedId, array());
+ }
}
$prop->setValue($managedCopy, $other);
@@ -2046,7 +2055,7 @@ class UnitOfWork implements PropertyChangedListener
// If $managedCol differs from the merged collection, clear and set dirty
if ( ! $managedCol->isEmpty() && $managedCol !== $mergeCol) {
$managedCol->unwrap()->clear();
- $managedCol->clearSnapshot(); // Sets dirty
+ $managedCol->setDirty(true);
if ($assoc2['isOwningSide'] && $class->isChangeTrackingNotify()) {
$this->scheduleForDirtyCheck($managedCopy);
@@ -2062,32 +2071,23 @@ class UnitOfWork implements PropertyChangedListener
}
}
- /* If $prevManagedCopy and $assoc are provided, $managedCopy is the
- * target of that association. Since $managedCopy has now been
- * merged, set it on $prevManagedCopy's association.
- *
- * @TODO: If $managedCopy has an association to $prevManagedCopy, it
- * is not getting set (see testSerializeUnserializeModifyMerge in
- * DetachedDocumentTest).
- */
- if ($prevManagedCopy !== null) {
- $assocField = $assoc['fieldName'];
- $prevClass = $this->dm->getClassMetadata(get_class($prevManagedCopy));
+ if ($class->isChangeTrackingDeferredExplicit()) {
+ $this->scheduleForDirtyCheck($document);
+ }
+ }
- if ($assoc['type'] === 'one') {
- $prevClass->reflFields[$assocField]->setValue($prevManagedCopy, $managedCopy);
- } else {
- // Add $managedCopy to $prevManagedCopy's PersistentCollection
- $prevClass->reflFields[$assocField]->getValue($prevManagedCopy)->add($managedCopy);
+ if ($prevManagedCopy !== null) {
+ $assocField = $assoc['fieldName'];
+ $prevClass = $this->dm->getClassMetadata(get_class($prevManagedCopy));
- if ( ! empty($assoc['mappedBy'])) {
- $class->reflFields[$assoc['mappedBy']]->setValue($managedCopy, $prevManagedCopy);
- }
- }
- }
+ if ($assoc['type'] === 'one') {
+ $prevClass->reflFields[$assocField]->setValue($prevManagedCopy, $managedCopy);
+ } else {
+ $prevClass->reflFields[$assocField]->getValue($prevManagedCopy)->add($managedCopy);
- if ($class->isChangeTrackingDeferredExplicit()) {
- $this->scheduleForDirtyCheck($document);
+ if ($assoc['type'] === 'many' && isset($assoc['mappedBy'])) {
+ $class->reflFields[$assoc['mappedBy']]->setValue($managedCopy, $prevManagedCopy);
+ }
}
}
|
port remaining functionality from orm merge() functionality.
|
Briareos_mongodb-odm
|
train
|
ba80ff74a9627f676b4c426587ce5ea487665e46
|
diff --git a/actionpack/lib/action_controller/response.rb b/actionpack/lib/action_controller/response.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_controller/response.rb
+++ b/actionpack/lib/action_controller/response.rb
@@ -114,8 +114,8 @@ module ActionController # :nodoc:
def redirect(url, status)
self.status = status
- self.location = url
- self.body = "<html><body>You are being <a href=\"#{url}\">redirected</a>.</body></html>"
+ self.location = url.gsub(/[\r\n]/, '')
+ self.body = "<html><body>You are being <a href=\"#{CGI.escapeHTML(url)}\">redirected</a>.</body></html>"
end
def sending_file?
|
Sanitize the URLs passed to redirect_to to prevent a potential response splitting attack.
CGI.rb and mongrel don't do any sanitization of the contents of HTTP headers, so care needs to be taken.
|
rails_rails
|
train
|
728f37a73d117d6352d2bf146fa09240be4453a2
|
diff --git a/lib/linters/README.md b/lib/linters/README.md
index <HASH>..<HASH> 100644
--- a/lib/linters/README.md
+++ b/lib/linters/README.md
@@ -800,9 +800,11 @@ The [CSS spec](http://dev.w3.org/csswg/css-values/#url-value) also recommends th
## zeroUnit
Zero values should include a unit for consistency with other values.
-Option | Description
----------- | ----------
-`style` | `no_unit`, `keep_unit` (**default**)
+Option | Description
+------------| ----------
+`style` | `no_unit`, `keep_unit` (**default**)
+`units` | `string array` additional units to enforce.
+`exclude` | `string array` additional properties to exclude.
### no_unit
```less
diff --git a/lib/linters/zero_unit.js b/lib/linters/zero_unit.js
index <HASH>..<HASH> 100644
--- a/lib/linters/zero_unit.js
+++ b/lib/linters/zero_unit.js
@@ -14,6 +14,24 @@ module.exports = {
var number;
var value;
var unit;
+ var excludedProperties = ['opacity', 'z-index'];
+ var excludedUnits = [];
+ var property = node.first('property').first('ident').content;
+
+ if (config) {
+ if (config.exclude && config.exclude.length) {
+ excludedProperties = excludedProperties.concat(config.exclude);
+ }
+
+ if (config.units && config.units.length) {
+ excludedUnits = excludedUnits.concat(config.units);
+ }
+ }
+
+ // The property shouldn't be checked for units
+ if (property && excludedProperties.indexOf(property) !== -1) {
+ return;
+ }
node.forEach('value', function (element) {
value = element.first('dimension');
@@ -30,12 +48,17 @@ module.exports = {
// Nothing to lint found, bail
if (!number || parseFloat(number.content) !== 0) {
- return null;
+ return;
+ }
+
+ // Unit is excluded, nothing to do
+ if (unit && excludedUnits.indexOf(unit.content) > -1) {
+ return;
}
- // Unit is required, nothing to do
+ // Unit is always required by the CSS spec, nothing to do
if (unit && units.indexOf(unit.content) === -1) {
- return null;
+ return;
}
number = number.content;
@@ -54,9 +77,7 @@ module.exports = {
break;
default:
- throw new Error(
- 'Invalid setting value for zeroUnit: ' + config.style
- );
+ throw new Error('Invalid setting value for zeroUnit: ' + config.style);
}
if (!valid) {
diff --git a/test/specs/linters/zero_unit.js b/test/specs/linters/zero_unit.js
index <HASH>..<HASH> 100644
--- a/test/specs/linters/zero_unit.js
+++ b/test/specs/linters/zero_unit.js
@@ -101,7 +101,7 @@ describe('lesshint', function () {
result = linter.lint(options, ast);
- expect(result).to.equal(null);
+ expect(result).to.be.undefined;
});
it('should not report units on zero values when the unit is a time and "style" is "no_unit"', function () {
@@ -118,7 +118,60 @@ describe('lesshint', function () {
result = linter.lint(options, ast);
- expect(result).to.equal(null);
+ expect(result).to.be.undefined;
+ });
+
+ it('should not report units on zero values when the unit is configured and "style" is "no_unit"', function () {
+ var source = '.foo { margin-left: 0zz; }';
+ var result;
+ var ast;
+
+ var options = {
+ style: 'no_unit',
+ units: ['zz']
+ };
+
+ ast = parseAST(source);
+ ast = ast.first().first('block').first('declaration');
+
+ result = linter.lint(options, ast);
+
+ expect(result).to.be.undefined;
+ });
+
+ it('should not report units on zero values when the the property does not have units and "style" is "no_unit"', function () {
+ var source = '.bar { z-index: 0; }';
+ var result;
+ var ast;
+
+ var options = {
+ style: 'no_unit'
+ };
+
+ ast = parseAST(source);
+ ast = ast.first().first('block').first('declaration');
+
+ result = linter.lint(options, ast);
+
+ expect(result).to.be.undefined;
+ });
+
+ it('should not report units on zero values when the the property does not have units and "style" is "no_unit"', function () {
+ var source = '.bar { margin-left: 0; }';
+ var result;
+ var ast;
+
+ var options = {
+ style: 'no_unit',
+ exclude: ['margin-left']
+ };
+
+ ast = parseAST(source);
+ ast = ast.first().first('block').first('declaration');
+
+ result = linter.lint(options, ast);
+
+ expect(result).to.be.undefined;
});
it('should throw on invalid "style" value', function () {
|
brings zeroUnit inline with the spec, provides config for both units and property names
|
lesshint_lesshint
|
train
|
a4a8d15e9f3b9fcd69052ecfbaa50bc51ab1474d
|
diff --git a/lib/search_engine_query_parser.py b/lib/search_engine_query_parser.py
index <HASH>..<HASH> 100644
--- a/lib/search_engine_query_parser.py
+++ b/lib/search_engine_query_parser.py
@@ -638,10 +638,12 @@ class SpiresToInvenioSyntaxConverter:
# match cases where a keyword distributes across a conjunction
self._re_distribute_keywords = re.compile(r'''(?ix) # verbose, ignorecase on
\b(?P<keyword>\S*:) # a keyword is anything that's not whitespace with a colon
- (?P<content>.+?)\s* # content is the part that comes after the keyword
+ (?P<content>[^:]+?)\s* # content is the part that comes after the keyword; it should NOT
+ # have colons in it! that implies that we might be distributing
+ # a keyword OVER another keyword. see ticket #701
(?P<combination>\ and\ not\ |\ and\ |\ or\ |\ not\ )\s*
(?P<last_content>[^:]*?) # oh look, content without a keyword!
- (?=\ and\ not\ |\ and\ |\ or\ |\ not\ |$)''')
+ (?=\ and\ |\ or\ |\ not\ |$)''')
# massaging SPIRES quirks
self._re_pattern_IRN_search = re.compile(r'970__a:(?P<irn>\d+)')
@@ -1165,7 +1167,7 @@ class SpiresToInvenioSyntaxConverter:
def create_replacement_pattern(match):
return match.group('keyword') + match.group('content') + \
- ' ' + match.group('combination') + ' ' + match.group('keyword') + \
+ match.group('combination') + match.group('keyword') + \
match.group('last_content')
still_matches = True
diff --git a/lib/search_engine_query_parser_tests.py b/lib/search_engine_query_parser_tests.py
index <HASH>..<HASH> 100644
--- a/lib/search_engine_query_parser_tests.py
+++ b/lib/search_engine_query_parser_tests.py
@@ -862,6 +862,12 @@ class TestSpiresToInvenioSyntaxConverter(unittest.TestCase):
inv_search = '(journal:phys.lett and journal:0903 and journal:024)'
self._compare_searches(inv_search, spi_search)
+ def test_spires_keyword_distribution_with_parens(self):
+ """SPIRES search syntax - test find cn d0 and (a abachi or abbott or abazov)"""
+ spi_search = "find cn d0 and (a abachi or abbott or abazov)"
+ inv_search = "collaboration:d0 and (author:abachi or author:abbott or author:abazov)"
+ self._compare_searches(inv_search, spi_search)
+
def test_simple_syntax_mixing(self):
"""SPIRES and invenio search syntax - find a ellis and citedby:hawking"""
combo_search = "find a ellis and citedby:hawking"
|
WebSearch: fix distribution of SPIRES keywords
* regular expression updated so keywords don't get distributed
on top of other keywords
* tests added for same (fixes #<I>)
|
inveniosoftware_invenio-records
|
train
|
e06463361bca2b48e6d655722a4d6eeb6df7691e
|
diff --git a/spec/tcp_sequence_spec.rb b/spec/tcp_sequence_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/tcp_sequence_spec.rb
+++ b/spec/tcp_sequence_spec.rb
@@ -2,16 +2,14 @@ require 'spec_helper'
require 'nmap/tcp_sequence'
describe TcpSequence do
- let(:xml) { XML.new(Helpers::SCAN_FILE) }
-
- subject { xml.hosts.first.tcp_sequence }
+ subject { @xml.hosts.first.tcp_sequence }
it "should be accessible from host objects" do
subject.should be_kind_of(TcpSequence)
end
it "should parse the index" do
- subject.index.should == 25
+ subject.index.should == 195
end
it "should parse the difficulty description" do
@@ -20,12 +18,12 @@ describe TcpSequence do
it "should parse the values" do
subject.values.should == [
- 0xAF1B39BD,
- 0xAF1C33BD,
- 0xAF1F21BD,
- 0xAF201BBD,
- 0xAF2115BD,
- 0xAF220FBD
+ 0x48496039,
+ 0x4823E13C,
+ 0x487362E9,
+ 0x489580F0,
+ 0x4906414A,
+ 0x48C579D8
]
end
end
|
Updated TcpSequence specs to use scan.xml.
|
sophsec_ruby-nmap
|
train
|
31e7baeea0041acd068f3ca56a718bcd49783755
|
diff --git a/lib/travis/notifications/webhook.rb b/lib/travis/notifications/webhook.rb
index <HASH>..<HASH> 100644
--- a/lib/travis/notifications/webhook.rb
+++ b/lib/travis/notifications/webhook.rb
@@ -13,13 +13,17 @@ module Travis
def payload_for(build)
Payload.new(build).to_hash
end
- end
- cattr_accessor :http_client
+ def http_client
+ @http_client ||= Faraday.new do |f|
+ f.request :url_encoded
+ f.adapter :net_http
+ end
+ end
- self.http_client = Faraday.new do |f|
- f.request :url_encoded
- f.adapter :net_http
+ def http_client=(http_client)
+ @http_client = http_client
+ end
end
def notify(event, build, *args)
|
don't use cattr_accessor here
|
travis-ci_travis-core
|
train
|
7b184898d83b924cee5519b04e0363fb2f8e0d90
|
diff --git a/dashboard/api.go b/dashboard/api.go
index <HASH>..<HASH> 100644
--- a/dashboard/api.go
+++ b/dashboard/api.go
@@ -156,9 +156,9 @@ func serveDashboardJs(res http.ResponseWriter, req *http.Request, conf *Config)
window.DashboardConfig = {
API_SERVER: "%s",
PATH_PREFIX: "%s",
- INSTALL_CERT: true
+ INSTALL_CERT: %t
};
- `, conf.URL, conf.PathPrefix))
+ `, conf.URL, conf.PathPrefix, len(conf.CACert) > 0))
r := ioutil.NewMultiReadSeeker(jsConf, f)
|
dashboard: Don't set INSTALL_CERT to true unless CA_CERT
|
flynn_flynn
|
train
|
4b7f9c60c4163d89195b288849f135bdccf200d1
|
diff --git a/blacklist.js b/blacklist.js
index <HASH>..<HASH> 100644
--- a/blacklist.js
+++ b/blacklist.js
@@ -14,9 +14,13 @@ var path = require('path');
// modulePathIgnorePatterns.
var sharedBlacklist = [
'website',
- 'node_modules/react-tools/src/utils/ImmutableObject.js',
- 'node_modules/react-tools/src/core/ReactInstanceHandles.js',
- 'node_modules/react-tools/src/event/EventPropagators.js'
+ 'node_modules/react-tools/src/React.js',
+ 'node_modules/react-tools/src/renderers/shared/event/EventPropagators.js',
+ 'node_modules/react-tools/src/renderers/shared/event/eventPlugins/ResponderEventPlugin.js',
+ 'node_modules/react-tools/src/renderers/shared/event/eventPlugins/ResponderSyntheticEvent.js',
+ 'node_modules/react-tools/src/renderers/shared/event/eventPlugins/ResponderTouchHistoryStore.js',
+ 'node_modules/react-tools/src/renderers/shared/reconciler/ReactInstanceHandles.js',
+ 'node_modules/react-tools/src/shared/vendor/core/ExecutionEnvironment.js',
];
var platformBlacklists = {
@@ -24,17 +28,10 @@ var platformBlacklists = {
'.ios.js'
],
ios: [
- 'node_modules/react-tools/src/browser/ui/React.js',
- 'node_modules/react-tools/src/browser/eventPlugins/ResponderEventPlugin.js',
- 'node_modules/react-tools/src/vendor/core/ExecutionEnvironment.js',
'.web.js',
'.android.js',
],
android: [
- 'node_modules/react-tools/src/browser/ui/React.js',
- 'node_modules/react-tools/src/browser/eventPlugins/ResponderEventPlugin.js',
- 'node_modules/react-tools/src/browser/ReactTextComponent.js',
- 'node_modules/react-tools/src/vendor/core/ExecutionEnvironment.js',
'.web.js',
'.ios.js',
],
|
[ReactNative] Update core RN modules to work with React <I>-beta1
|
facebook_metro
|
train
|
22816a462bc26810c296ca002b86233357db6a69
|
diff --git a/app/models/incline/user.rb b/app/models/incline/user.rb
index <HASH>..<HASH> 100644
--- a/app/models/incline/user.rb
+++ b/app/models/incline/user.rb
@@ -179,7 +179,7 @@ module Incline
update_columns(
disabled_by: other_user.email,
- disabled_at: Time.zone.now,
+ disabled_at: Time.now,
disabled_reason: reason,
enabled: false
) && refresh_comments
@@ -201,7 +201,7 @@ module Incline
def activate
update_columns(
activated: true,
- activated_at: Time.zone.now,
+ activated_at: Time.now,
activation_digest: nil
) && refresh_comments
end
@@ -218,7 +218,7 @@ module Incline
self.reset_token = Incline::User::new_token
update_columns(
reset_digest: Incline::User::digest(reset_token),
- reset_sent_at: Time.zone.now
+ reset_sent_at: Time.now
)
end
@@ -352,7 +352,7 @@ module Incline
enabled: true,
system_admin: true,
activated: true,
- activated_at: Time.zone.now,
+ activated_at: Time.now,
recaptcha: 'na'
)
end
@@ -364,7 +364,7 @@ module Incline
user.enabled = true
user.system_admin = true
user.activated = true
- user.activated_at = Time.zone.now
+ user.activated_at = Time.now
user.save!
end
end
@@ -383,7 +383,7 @@ module Incline
name: 'Anonymous',
enabled: false,
activated: true,
- activated_at: Time.zone.now,
+ activated_at: Time.now,
password: pwd,
password_confirmation: pwd
)
diff --git a/db/seeds.rb b/db/seeds.rb
index <HASH>..<HASH> 100644
--- a/db/seeds.rb
+++ b/db/seeds.rb
@@ -38,7 +38,7 @@ Incline::Recaptcha::pause_for do
end
r += 0.2 if r < 0.2
- while activated < Time.zone.now
+ while activated < Time.now
success,message = if hist == :fail
[ false, 'Invalid email or password.' ]
diff --git a/lib/incline/auth_engine_base.rb b/lib/incline/auth_engine_base.rb
index <HASH>..<HASH> 100644
--- a/lib/incline/auth_engine_base.rb
+++ b/lib/incline/auth_engine_base.rb
@@ -45,7 +45,7 @@ module Incline
private
def purge_old_history_for(user, max_months = 2)
- user.login_histories.where('"incline_user_login_histories"."created_at" <= ?', Time.zone.now - max_months.months).delete_all
+ user.login_histories.where('"incline_user_login_histories"."created_at" <= ?', Time.now - max_months.months).delete_all
end
end
|
Remove Time.zone.now calls.
|
barkerest_incline
|
train
|
6d3e2e0200c311162f160454dcb4d6b5f1fb9eba
|
diff --git a/tests/test_pipfile.py b/tests/test_pipfile.py
index <HASH>..<HASH> 100644
--- a/tests/test_pipfile.py
+++ b/tests/test_pipfile.py
@@ -20,7 +20,7 @@
import os
import pytest
-import contoml as toml
+import toml
from thoth.python import Pipfile
from thoth.python import PipfileLock
@@ -41,7 +41,7 @@ class TestPipfile(PythonTestCase):
instance = Pipfile.from_string(content)
# Sometimes toml does not preserve inline tables causing to_string() fail. However, we produce valid toml.
- assert instance.to_dict() == toml.loads(content).primitive
+ assert instance.to_dict() == toml.loads(content)
class TestPipfileLock(PythonTestCase):
|
Adjust testsuite to use toml instead of contoml
|
thoth-station_python
|
train
|
a27b1607e09c23946239ff1ac276dd50e028a8fd
|
diff --git a/src/iife.js b/src/iife.js
index <HASH>..<HASH> 100644
--- a/src/iife.js
+++ b/src/iife.js
@@ -3,9 +3,14 @@ import _ from "lodash";
export default { surround };
function surround(code, userOptions) {
- return surroundWithIife(code);
-}
+ let lines = [
+ "(function() {",
+ "\"use strict\";",
+ "",
+ code,
+ "}());",
+ ""
+ ];
-function surroundWithIife(code) {
- return `(function() {\n"use strict";\n\n${code}\n}());\n`;
+ return lines.join("\n");
}
|
Refactors the `surroundWithIife` function
|
mariusschulz_gulp-iife
|
train
|
a3b3c7c1f1d07ceebe3d1aae38328da260e69098
|
diff --git a/lib/engine-addon.js b/lib/engine-addon.js
index <HASH>..<HASH> 100644
--- a/lib/engine-addon.js
+++ b/lib/engine-addon.js
@@ -26,6 +26,9 @@ var DEFAULT_CONFIG = {
css: '/assets/engine-vendor.css',
js: '/assets/engine-vendor.js'
}
+ },
+ trees: {
+ addon: 'addon'
}
};
@@ -126,9 +129,17 @@ var buildVendorCSSTree = memoize(function buildVendorCSSTree(vendorTree) {
var buildEngineJSTree = memoize(function buildEngineJSTree() {
var engineSourceTree;
- var treePath = path.resolve(this.root, this.treePaths['addon']);
- if (existsSync(treePath)) {
+ var treePath;
+ var addonTree = this.options.trees.addon;
+
+ if (typeof addonTree === 'string') {
+ treePath = path.resolve(this.root, addonTree);
+ }
+
+ if (treePath && existsSync(treePath)) {
engineSourceTree = this.treeGenerator(treePath);
+ } else {
+ engineSourceTree = addonTree;
}
// We want the config and child app trees to be compiled with the engine source
|
Make the default addon tree an option
This change fixes an issue where the treePath is hardcoded to a path instead of being configurable by the user.
|
ember-engines_ember-engines
|
train
|
2b25f1089c8e557e46925c4c81f4c7b8dca77d16
|
diff --git a/acceptance/ui/features/steps/applications_steps.rb b/acceptance/ui/features/steps/applications_steps.rb
index <HASH>..<HASH> 100644
--- a/acceptance/ui/features/steps/applications_steps.rb
+++ b/acceptance/ui/features/steps/applications_steps.rb
@@ -212,23 +212,24 @@ def removeAllTemplatesCLI()
end
def closeDeployWizard()
- # if the deploy wizard is on the page
- # and visible, it should be closed. if
- # not, we can just ignore the exception
- # that the cabybara finder throws
+ # if the deploy wizard is on the page and visible, close it
+ page_found = false
begin
el = find("#addApp")
# found it!
if el.visible?
+ page_found = true
el.find(".modal-header .close").click()
- # wait till it is no longer visible
- find("#addApp", :count => 0)
end
- true
rescue
# couldn't find the deploy wizard,
# but that's ok. we all make mistakes
- true
+ return
+ end
+
+ if page_found
+ # wait till it is no longer visible, and error if it remains on screen
+ expect(page).not_to have_selector("#addApp")
end
end
|
Optimize closeDeployWizard so we don't wait for the full timeout
|
control-center_serviced
|
train
|
fbf81cad4c8e6a70dd74cc3c9565410311d0e77b
|
diff --git a/bqplot/nbextension/Figure.js b/bqplot/nbextension/Figure.js
index <HASH>..<HASH> 100644
--- a/bqplot/nbextension/Figure.js
+++ b/bqplot/nbextension/Figure.js
@@ -134,7 +134,7 @@ define(["widgets/js/widget", "./d3", "base/js/utils", "./require-less/less!./bqp
that.set_interaction(that.model.get("interaction"));
});
- that.axis_views = new Widget.ViewList(that.add_axis, that.remove_axis, that);
+ that.axis_views = new Widget.ViewList(that.add_axis, null, that);
that.axis_views.update(that.model.get("axes"));
// TODO: move to the model
@@ -263,10 +263,6 @@ define(["widgets/js/widget", "./d3", "base/js/utils", "./require-less/less!./bqp
return view;
});
},
- remove_axis: function(view) {
- // Called when an axis is removed from the axes list.
- view.remove();
- },
remove_from_padding_dict: function(dict, mark_view, scale_model) {
var scale_id = scale_model.id;
if(dict[scale_id] !== undefined) {
|
Remove remove_axis and switch to null
|
bloomberg_bqplot
|
train
|
534301b8333bfa9ab0aa79d1dbeb44adc76b0663
|
diff --git a/pyecore/resources/xmi.py b/pyecore/resources/xmi.py
index <HASH>..<HASH> 100644
--- a/pyecore/resources/xmi.py
+++ b/pyecore/resources/xmi.py
@@ -22,17 +22,12 @@ class XMIResource(Resource):
XMIResource.xmiid = '{{{0}}}id'.format(self.prefixes[xmi])
# Decode the XMI
modelroot = self._init_modelroot(xmlroot)
- print('root decoded')
if not self.contents:
self._clean_registers()
return
- print('decode nodes')
for child in xmlroot:
self._decode_eobject(child, modelroot)
- print('nodes OK')
- print('decode refs')
self._decode_ereferences()
- print('REFS OK')
self._clean_registers()
def resolve(self, fragment):
|
Remove basic print debug message (oopsie)
These 'logs' were here only to identify where the struggle was during
XMI deserialization (see previous commit log for information).
|
pyecore_pyecore
|
train
|
a244deb824b42a1a2520e6e3c852b778d77cc619
|
diff --git a/lib/redde/version.rb b/lib/redde/version.rb
index <HASH>..<HASH> 100644
--- a/lib/redde/version.rb
+++ b/lib/redde/version.rb
@@ -1,3 +1,3 @@
module Redde
- VERSION = "0.0.9"
+ VERSION = "0.1.0"
end
|
Bumped version to <I>
|
redde_redde
|
train
|
ff49cd002eed17dcf4440417bbfb9106948e423a
|
diff --git a/django_extensions/management/commands/sqldiff.py b/django_extensions/management/commands/sqldiff.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/commands/sqldiff.py
+++ b/django_extensions/management/commands/sqldiff.py
@@ -105,7 +105,7 @@ class SQLDiff(object):
SQL_INDEX_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s%s);" % (style.SQL_KEYWORD('CREATE INDEX'), style.SQL_TABLE(qn("%s" % '_'.join(a for a in args[0:3] if a))), style.SQL_KEYWORD('ON'), style.SQL_TABLE(qn(args[0])), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD(args[3]))
# FIXME: need to lookup index name instead of just appending _idx to table + fieldname
SQL_INDEX_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s;" % (style.SQL_KEYWORD('DROP INDEX'), style.SQL_TABLE(qn("%s" % '_'.join(a for a in args[0:3] if a))))
- SQL_UNIQUE_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD COLUMN'), style.SQL_KEYWORD('UNIQUE'), style.SQL_FIELD(qn(args[1])))
+ SQL_UNIQUE_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD'), style.SQL_KEYWORD('UNIQUE'), style.SQL_FIELD(qn(args[1])))
# FIXME: need to lookup unique constraint name instead of appending _key to table + fieldname
SQL_UNIQUE_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_TABLE(qn("%s_key" % ('_'.join(args[:2])))))
SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
|
adding unique constraint should not contain the keyword COLUMN
|
django-extensions_django-extensions
|
train
|
764fda91500dfecb8ac74ab02681cce15c019c0f
|
diff --git a/lib/dep_selector/version_constraint.rb b/lib/dep_selector/version_constraint.rb
index <HASH>..<HASH> 100644
--- a/lib/dep_selector/version_constraint.rb
+++ b/lib/dep_selector/version_constraint.rb
@@ -26,6 +26,8 @@ module DepSelector
OPS = %w(< > = <= >= ~>)
PATTERN = /^(#{OPS.join('|')}) (.+)$/
+ attr_reader :op, :version
+
def initialize(constraint_spec=nil)
constraint_spec ||= DEFAULT_CONSTRAINT
case constraint_spec
@@ -58,6 +60,11 @@ module DepSelector
"#{@op} #{@version}"
end
+ def eql?(o)
+ o.class == self.class && @op == o.op && @version == o.version
+ end
+ alias_method :==, :eql?
+
private
def do_op(other_version)
diff --git a/spec/dep_selector/version_constraint_spec.rb b/spec/dep_selector/version_constraint_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/dep_selector/version_constraint_spec.rb
+++ b/spec/dep_selector/version_constraint_spec.rb
@@ -19,6 +19,13 @@
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe DepSelector::VersionConstraint do
+
+ describe "==" do
+ it "should be true if the constraints are equal" do
+ DepSelector::VersionConstraint.new("= 1.0.0").should == DepSelector::VersionConstraint.new("= 1.0.0")
+ end
+ end
+
describe "validation" do
bad_version = ["> >", ">= 1.2.z", "> 1.2.3 < 5.0", "> 1.2.3, < 5.0"]
bad_op = ["<3.0.1", ">$ 1.2.3", "! 3.4"]
|
Implementing == for VersionConstraint
|
chef_dep-selector
|
train
|
f7c465c973aae3836b8dc4660bbedc9be3e04fa5
|
diff --git a/lib/acts_as_ordered_tree/adapters.rb b/lib/acts_as_ordered_tree/adapters.rb
index <HASH>..<HASH> 100644
--- a/lib/acts_as_ordered_tree/adapters.rb
+++ b/lib/acts_as_ordered_tree/adapters.rb
@@ -7,7 +7,7 @@ require 'acts_as_ordered_tree/adapters/postgresql'
module ActsAsOrderedTree
module Adapters
# adapters map
- ADAPTERS = HashWithIndifferentAccess[:PostgreSQL => PostgreSQL]
+ ADAPTERS = HashWithIndifferentAccess['PostgreSQL' => PostgreSQL]
ADAPTERS.default = Recursive
def self.lookup(name)
diff --git a/spec/adapters/postgresql_spec.rb b/spec/adapters/postgresql_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/adapters/postgresql_spec.rb
+++ b/spec/adapters/postgresql_spec.rb
@@ -6,6 +6,8 @@ require 'acts_as_ordered_tree/adapters/postgresql'
require 'adapters/shared'
describe ActsAsOrderedTree::Adapters::PostgreSQL, :transactional, :pg do
+ it { expect(Default.ordered_tree.adapter).to be_a described_class }
+
it_behaves_like 'ActsAsOrderedTree adapter', ActsAsOrderedTree::Adapters::PostgreSQL, :default
it_behaves_like 'ActsAsOrderedTree adapter', ActsAsOrderedTree::Adapters::PostgreSQL, :default_with_counter_cache
it_behaves_like 'ActsAsOrderedTree adapter', ActsAsOrderedTree::Adapters::PostgreSQL, :scoped
|
Fxied bug when Rails 3.x used Recursive adapter with PostgreSQL
|
take-five_acts_as_ordered_tree
|
train
|
76cefc6542765214e39441b646efd87882cba644
|
diff --git a/src/Stagehand/FSM/StateMachine/StateMachineBuilder.php b/src/Stagehand/FSM/StateMachine/StateMachineBuilder.php
index <HASH>..<HASH> 100644
--- a/src/Stagehand/FSM/StateMachine/StateMachineBuilder.php
+++ b/src/Stagehand/FSM/StateMachine/StateMachineBuilder.php
@@ -80,15 +80,17 @@ class StateMachineBuilder
* Sets the given state as the first state.
*
* @param string $stateID
+ * @param callback $action
+ * @param callback $guard
*/
- public function setStartState($stateID)
+ public function setStartState($stateID, $action = null, $guard = null)
{
if (is_null($this->stateMachine->getState(StateInterface::STATE_INITIAL))) {
$transitionEvent = new TransitionEvent(EventInterface::EVENT_START);
$this->stateMachine->addState(new InitialState($transitionEvent));
}
- $this->addTransition(StateInterface::STATE_INITIAL, EventInterface::EVENT_START, $stateID);
+ $this->addTransition(StateInterface::STATE_INITIAL, EventInterface::EVENT_START, $stateID, $action, $guard);
}
/**
|
changed the StateMachineBuilder::setStartState() method so that the action and guard can be specified
|
phpmentors-jp_stagehand-fsm
|
train
|
4577af51a7a80c7c8efae8b753d1710708aa49f2
|
diff --git a/lib/active_admin_datetimepicker/inputs/filters/date_time_range_input.rb b/lib/active_admin_datetimepicker/inputs/filters/date_time_range_input.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin_datetimepicker/inputs/filters/date_time_range_input.rb
+++ b/lib/active_admin_datetimepicker/inputs/filters/date_time_range_input.rb
@@ -10,6 +10,14 @@ module ActiveAdmin
options[:class] = html_class
end
end
+
+ def gt_input_name
+ "#{method}_gteq"
+ end
+
+ def lt_input_name
+ "#{method}_lteq"
+ end
end
end
end
|
DateTimeRangeInput fields names fixed.
Now filtering by time (hours and minutes) is applied correctly.
|
activeadmin-plugins_active_admin_datetimepicker
|
train
|
ed663fd647dd3beb7aab2d931d7284a501248987
|
diff --git a/anyconfig/tests/common.py b/anyconfig/tests/common.py
index <HASH>..<HASH> 100644
--- a/anyconfig/tests/common.py
+++ b/anyconfig/tests/common.py
@@ -8,6 +8,8 @@ import sys
import tempfile
import unittest
+import anyconfig.compat
+
CNF_0 = dict(name="a", a=1, b=dict(b=[1, 2], c="C"))
SCM_0 = {"type": "object",
@@ -58,6 +60,30 @@ def cleanup_workdir(workdir):
os.system("rm -rf " + workdir)
+def dicts_equal(lhs, rhs):
+ """
+ >>> dicts_equal({}, {})
+ True
+ >>> dicts_equal({}, {'a': 1})
+ False
+ >>> d0 = {'a': 1}; dicts_equal(d0, d0)
+ True
+ >>> d1 = {'a': [1, 2, 3]}; dicts_equal(d1, d1)
+ True
+ >>> dicts_equal(d0, d1)
+ False
+ """
+ if len(lhs.keys()) != len(rhs.keys()):
+ return False
+
+ for key, val in anyconfig.compat.iteritems(rhs):
+ val_ref = lhs.get(key, None)
+ if val != val_ref:
+ return False
+
+ return True
+
+
class MaskedImportLoader(object):
"""
Mask specified module[s] and block importing that module / these modules to
diff --git a/anyconfig/tests/schema.py b/anyconfig/tests/schema.py
index <HASH>..<HASH> 100644
--- a/anyconfig/tests/schema.py
+++ b/anyconfig/tests/schema.py
@@ -7,31 +7,8 @@ from __future__ import absolute_import
import unittest
import anyconfig.schema as TT
-import anyconfig.compat
-
-
-def dicts_equal(lhs, rhs):
- """
- >>> dicts_equal({}, {})
- True
- >>> dicts_equal({}, {'a': 1})
- False
- >>> d0 = {'a': 1}; dicts_equal(d0, d0)
- True
- >>> d1 = {'a': [1, 2, 3]}; dicts_equal(d1, d1)
- True
- >>> dicts_equal(d0, d1)
- False
- """
- if len(lhs.keys()) != len(rhs.keys()):
- return False
-
- for key, val in anyconfig.compat.iteritems(rhs):
- val_ref = lhs.get(key, None)
- if val != val_ref:
- return False
-
- return True
+
+from anyconfig.tests.common import dicts_equal
class Test(unittest.TestCase):
|
move dicts_equal to compare dicts from anyconfig.tests.schema to anyconfig.tests.common
|
ssato_python-anyconfig
|
train
|
9060a192915d93a321bdc00bd81e9f9a407a2657
|
diff --git a/src/helpers.php b/src/helpers.php
index <HASH>..<HASH> 100644
--- a/src/helpers.php
+++ b/src/helpers.php
@@ -37,14 +37,15 @@ if ( ! function_exists('basset_javascripts'))
if ( ! function_exists('basset_assets'))
{
/**
- * Output a given group for an array of collections.
+ * Output the assets for a collection as defined by the extension.
*
- * @param array $collections
* @return string
*/
- function basset_assets(array $collections)
+ function basset_assets()
{
- $responses = array();
+ $collections = array(); $args = func_get_args();
+
+ array_walk_recursive($args, function($v, $k) use (&$collections) { is_numeric($k) ? ($collections[$v] = null) : ($collections[$k] = $v); });
foreach ($collections as $collection => $format) $responses[] = app('basset.server')->collection($collection, $format);
|
Allow an array or string of collections to be given to basset_assets.
|
Marwelln_basset
|
train
|
216aeb3116d2d2a2fb0df1cdb7ece16d458a1bf2
|
diff --git a/src/shogun2-core/shogun2-model/src/main/java/de/terrestris/shogun2/model/map/MapConfig.java b/src/shogun2-core/shogun2-model/src/main/java/de/terrestris/shogun2/model/map/MapConfig.java
index <HASH>..<HASH> 100644
--- a/src/shogun2-core/shogun2-model/src/main/java/de/terrestris/shogun2/model/map/MapConfig.java
+++ b/src/shogun2-core/shogun2-model/src/main/java/de/terrestris/shogun2/model/map/MapConfig.java
@@ -1,15 +1,17 @@
package de.terrestris.shogun2.model.map;
import java.awt.geom.Point2D;
-import java.util.ArrayList;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
-import javax.persistence.OneToMany;
+import javax.persistence.ManyToMany;
+import javax.persistence.ManyToOne;
import javax.persistence.OneToOne;
+import javax.persistence.OrderColumn;
import javax.persistence.Table;
import org.apache.commons.lang3.builder.EqualsBuilder;
@@ -24,8 +26,9 @@ import de.terrestris.shogun2.model.layer.util.Resolution;
/**
* The <i>MapConfig</i> is backend representation for an
* <a href="http://openlayers.org/en/master/apidoc/ol.View.html"> OpenLayers 3 View</a>
- *
+ *
* @author Andre Henn
+ * @author Daniel Koch
* @author terrestris GmbH & Co. KG
*
*/
@@ -37,20 +40,66 @@ public class MapConfig extends PersistentObject{
*
*/
private static final long serialVersionUID = 1L;
+
+ /**
+ *
+ */
private String name;
+
+ /**
+ *
+ */
private Point2D.Double center;
+ /**
+ *
+ */
@OneToOne(cascade = CascadeType.ALL)
private Extent extent;
- @OneToMany(fetch = FetchType.EAGER)
- @JoinTable(name = "MAPCONFIG_RESOLUTIONS")
- private List<Resolution> resolutions = new ArrayList<Resolution>();
+ /**
+ *
+ */
+ @ManyToMany(
+ fetch = FetchType.EAGER,
+ cascade = CascadeType.ALL
+ )
+ @JoinTable(
+ name = "MAPCONFIG_RESOLUTION",
+ joinColumns = { @JoinColumn(name = "MAPCONFIG_ID") },
+ inverseJoinColumns = { @JoinColumn(name = "RESOLUTION_ID") }
+ )
+ @OrderColumn(name = "INDEX")
+ private List<Resolution> resolutions;
+ /**
+ *
+ */
private Integer zoom;
- private Double maxResolution;
- private Double minResolution;
+
+ /**
+ *
+ */
+ @ManyToOne(
+ fetch = FetchType.EAGER,
+ cascade = CascadeType.ALL
+ )
+ private Resolution maxResolution;
+
+ /**
+ *
+ */
+ @ManyToOne(
+ fetch = FetchType.EAGER,
+ cascade = CascadeType.ALL
+ )
+ private Resolution minResolution;
+
+ /**
+ *
+ */
private Double rotation;
+
/*
* use String as datatype since classical EPSG code
* as well as OGC URN (urn:x-ogc:def:crs:EPSG:XXXX) should be covered.
@@ -76,7 +125,7 @@ public class MapConfig extends PersistentObject{
* @param projection
*/
public MapConfig(String name, Point2D.Double center, Extent extent, List<Resolution> resolutions, Integer zoom,
- Double maxResolution, Double minResolution, Double rotation, String projection) {
+ Resolution maxResolution, Resolution minResolution, Double rotation, String projection) {
super();
this.name = name;
this.center = center;
@@ -162,28 +211,28 @@ public class MapConfig extends PersistentObject{
/**
* @return the maxResolution
*/
- public Double getMaxResolution() {
+ public Resolution getMaxResolution() {
return maxResolution;
}
/**
* @param maxResolution the maxResolution to set
*/
- public void setMaxResolution(Double maxResolution) {
+ public void setMaxResolution(Resolution maxResolution) {
this.maxResolution = maxResolution;
}
/**
* @return the minResolution
*/
- public Double getMinResolution() {
+ public Resolution getMinResolution() {
return minResolution;
}
/**
* @param minResolution the minResolution to set
*/
- public void setMinResolution(Double minResolution) {
+ public void setMinResolution(Resolution minResolution) {
this.minResolution = minResolution;
}
@@ -223,6 +272,7 @@ public class MapConfig extends PersistentObject{
* -and-hashcode-in-java it is recommended only to use getter-methods
* when using ORM like Hibernate
*/
+ @Override
public int hashCode() {
// two randomly chosen prime numbers
return new HashCodeBuilder(5, 13).
@@ -247,6 +297,7 @@ public class MapConfig extends PersistentObject{
* -and-hashcode-in-java it is recommended only to use getter-methods
* when using ORM like Hibernate
*/
+ @Override
public boolean equals(Object obj) {
if (!(obj instanceof MapConfig))
return false;
|
Change max-/minResolution type & correct resolutions relation
|
terrestris_shogun-core
|
train
|
aa10452bb8a66a23c15d345476d62a17832edd13
|
diff --git a/domovoi/app.py b/domovoi/app.py
index <HASH>..<HASH> 100644
--- a/domovoi/app.py
+++ b/domovoi/app.py
@@ -60,11 +60,11 @@ class Domovoi(Chalice):
def step_function_task(self, state_name, state_machine_definition):
def register_sfn_task(func):
- if func.__name__ in self.sfn_tasks:
- raise KeyError(func.__name__)
- self.sfn_tasks[func.__name__] = dict(state_name=state_name,
- state_machine_definition=state_machine_definition,
- func=func)
+ if state_name in self.sfn_tasks:
+ raise KeyError(state_name)
+ self.sfn_tasks[state_name] = dict(state_name=state_name,
+ state_machine_definition=state_machine_definition,
+ func=func)
return func
return register_sfn_task
|
Key state machine tasks by state name, not function name
This allows assigning the same function to handle multiple tasks
|
kislyuk_domovoi
|
train
|
dc117a74aef813ab8b3b59524482e535e9f39524
|
diff --git a/src/babel/transformation/file.js b/src/babel/transformation/file.js
index <HASH>..<HASH> 100644
--- a/src/babel/transformation/file.js
+++ b/src/babel/transformation/file.js
@@ -534,7 +534,7 @@ export default class File {
result.code += "\n" + convertSourceMap.fromObject(result.map).toComment();
}
- if (opts.sourceMap === "both") {
+ if (opts.sourceMap === "inline") {
result.map = null;
}
|
both -> inline - thanks @dkieks
|
babel_babel
|
train
|
46c5501b7a9e4036462706d67c135f6b9278551e
|
diff --git a/dependency-check-core/src/main/java/org/owasp/dependencycheck/Engine.java b/dependency-check-core/src/main/java/org/owasp/dependencycheck/Engine.java
index <HASH>..<HASH> 100644
--- a/dependency-check-core/src/main/java/org/owasp/dependencycheck/Engine.java
+++ b/dependency-check-core/src/main/java/org/owasp/dependencycheck/Engine.java
@@ -32,8 +32,6 @@ import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.AnalyzerService;
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
-import org.owasp.dependencycheck.data.cpe.CpeMemoryIndex;
-import org.owasp.dependencycheck.data.cpe.IndexException;
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
@@ -513,22 +511,20 @@ public class Engine implements Serializable {
* @throws DatabaseException thrown if there is an exception opening the database
*/
private void ensureDataExists() throws NoDataException, DatabaseException {
- final CpeMemoryIndex cpe = CpeMemoryIndex.getInstance();
+ //final CpeMemoryIndex cpe = CpeMemoryIndex.getInstance();
final CveDB cve = new CveDB();
-
try {
cve.open();
- cpe.open(cve);
- } catch (IndexException ex) {
- throw new NoDataException(ex.getMessage(), ex);
+ if (!cve.dataExists()) {
+ throw new NoDataException("No documents exist");
+ }
+// cpe.open(cve);
+// } catch (IndexException ex) {
+// throw new NoDataException(ex.getMessage(), ex);
} catch (DatabaseException ex) {
throw new NoDataException(ex.getMessage(), ex);
} finally {
cve.close();
}
- if (cpe.numDocs() <= 0) {
- cpe.close();
- throw new NoDataException("No documents exist");
- }
}
}
|
changed ensureDataExists method to resolve startup time delays as part of a fix for issue #<I>
Former-commit-id: <I>e<I>be9e5e<I>f<I>d<I>da4c<I>b<I>eb
|
jeremylong_DependencyCheck
|
train
|
bd8ab48a681e9a36dd1eae29cbc01de9a6d6c7b7
|
diff --git a/lib/builder.js b/lib/builder.js
index <HASH>..<HASH> 100644
--- a/lib/builder.js
+++ b/lib/builder.js
@@ -113,11 +113,8 @@ Builder.prototype.prep_target_url = function(callback) {
if(er)
return callback(er)
- if(res.statusCode == 200 && res.body._id) {
- self.log.info('Deploying to document', {'id':res.body._id, 'rev':res.body._rev})
- self.target += DEFS.suffix
- return callback(null, self.target)
- }
+ if(res.statusCode == 200 && res.body._id && res.body._rev)
+ return found_doc(res.body._rev)
if(res.statusCode == 200 && res.body.couchdb == 'Welcome')
return callback(new Error('Output must be a couch database URL, not a couch server URL'))
@@ -130,13 +127,17 @@ Builder.prototype.prep_target_url = function(callback) {
return callback(new Error('Output URL is not a document in a CouchDB database'))
self.log.debug('Confirmed couch database for target', {target:self.target})
- self.target += DEFS.suffix
-
- self.emit('target', self.target)
- return callback(null, self.target)
+ return found_doc()
})
}
})
+
+ function found_doc(rev) {
+ self.target += DEFS.suffix
+ self.log.info('Deploying to document', {'target':self.target, 'rev':rev || null})
+ self.emit('target', self.target)
+ return callback(null, self.target)
+ }
}
diff --git a/test/api.js b/test/api.js
index <HASH>..<HASH> 100644
--- a/test/api.js
+++ b/test/api.js
@@ -189,23 +189,35 @@ test('Bad couch output', function(t) {
})
test('Good couch output', function(t) {
- var doc_url = couch.DB + '/output'
- var builder = new auto.Builder({ 'template':couch.simple_tmpl, 'target':doc_url })
-
- var error = null
- , target = null
- , done = false
-
- builder.on('error', function(er) { error = er })
- builder.on('target', function(t) { target = t })
- builder.on('stop', function() { done = true })
-
- setTimeout(check_result, couch.rtt() * 2)
- function check_result() {
- t.false(error, 'No errors for good doc url: ' + doc_url)
- t.equal(target, doc_url+'-baking', 'Builder emitted its target')
- t.ok(done, 'Builder finished with good doc_url')
-
- t.end()
- }
+ request.post({'uri':couch.DB, 'json':{'_id':'output'}}, function(er, res) {
+ if(er) throw er
+
+ var ids = ['output', 'no-exist-1234']
+ test_id()
+ function test_id() {
+ var id = ids.shift()
+ if(!id)
+ return t.end()
+
+ var doc_url = couch.DB + '/' + id
+ var builder = new auto.Builder({ 'template':couch.simple_tmpl, 'target':doc_url })
+
+ var error = null
+ , target = null
+ , done = false
+
+ builder.on('error', function(er) { error = er })
+ builder.on('target', function(t) { target = t })
+ builder.on('stop', function() { done = true })
+
+ setTimeout(check_result, couch.rtt() * 2)
+ function check_result() {
+ t.false(error, 'No errors for good doc url: ' + doc_url)
+ t.equal(target, doc_url+'-baking', 'Builder emitted its target')
+ t.ok(done, 'Builder finished with good doc_url')
+
+ test_id()
+ }
+ }
+ })
})
|
Emit the target for both found and missing docs
|
iriscouch_static-plus
|
train
|
c9869826dce2cdb479e9f6861d0ca45245a44e49
|
diff --git a/code/fullcalendar/FullcalendarController.php b/code/fullcalendar/FullcalendarController.php
index <HASH>..<HASH> 100755
--- a/code/fullcalendar/FullcalendarController.php
+++ b/code/fullcalendar/FullcalendarController.php
@@ -96,7 +96,7 @@ class FullcalendarController extends Controller {
/**
* Handles returning the JSON events data for a time range.
*
- * @param SS_HTTPRequest $request
+ * @param SS_HTTPRequest $request
* @return SS_HTTPResponse
*/
function publicevents($request, $json=true, $calendars=null, $offset=30){
|
fixing phpdoc blocks to refering to Objects to built-in types of PHP
|
titledk_silverstripe-calendar
|
train
|
48fa3c72ae1e01f678c67c8127b8f0a594bc6063
|
diff --git a/test/modelbase_test.js b/test/modelbase_test.js
index <HASH>..<HASH> 100644
--- a/test/modelbase_test.js
+++ b/test/modelbase_test.js
@@ -45,5 +45,21 @@ describe('Modelbase', function() {
assert(empty())
done()
})
+
+ it ('should remove undefined paramaters from query', function(done) {
+ var callback = sinon.spy()
+ var FakeModel = { sync : sinon.spy() }
+ ngin.Model.list.call(FakeModel, {
+ url:'/test',
+ query: {
+ undef:undefined,
+ nil:null
+ }
+ }, callback)
+ assert(FakeModel.sync.calledOnce, 'FakeModel.sync not calledOnce')
+ assert.deepEqual(FakeModel.sync.getCall(0).args[2], { url:'/test', query:{} })
+ done()
+ })
+
})
})
|
add a test for the query filtering
|
sportngin_ngin_client_node
|
train
|
a99672c23478485b79bf18265c8d3ca4de018290
|
diff --git a/expression/builtin_miscellaneous_vec.go b/expression/builtin_miscellaneous_vec.go
index <HASH>..<HASH> 100644
--- a/expression/builtin_miscellaneous_vec.go
+++ b/expression/builtin_miscellaneous_vec.go
@@ -16,6 +16,7 @@ package expression
import (
"bytes"
"encoding/binary"
+ "fmt"
"math"
"net"
"strings"
@@ -484,11 +485,37 @@ func (b *builtinInetAtonSig) vecEvalInt(input *chunk.Chunk, result *chunk.Column
}
func (b *builtinInet6NtoaSig) vectorized() bool {
- return false
+ return true
}
func (b *builtinInet6NtoaSig) vecEvalString(input *chunk.Chunk, result *chunk.Column) error {
- return errors.Errorf("not implemented")
+ n := input.NumRows()
+ val, err := b.bufAllocator.get(types.ETString, n)
+ if err != nil {
+ return err
+ }
+ defer b.bufAllocator.put(val)
+ if err := b.args[0].VecEvalString(b.ctx, input, val); err != nil {
+ return err
+ }
+ result.ReserveString(n)
+ for i := 0; i < n; i++ {
+ if val.IsNull(i) {
+ result.AppendNull()
+ continue
+ }
+ valI := val.GetString(i)
+ ip := net.IP([]byte(valI)).String()
+ if len(valI) == net.IPv6len && !strings.Contains(ip, ":") {
+ ip = fmt.Sprintf("::ffff:%s", ip)
+ }
+ if net.ParseIP(ip) == nil {
+ result.AppendNull()
+ continue
+ }
+ result.AppendString(ip)
+ }
+ return nil
}
func (b *builtinNameConstRealSig) vectorized() bool {
diff --git a/expression/builtin_miscellaneous_vec_test.go b/expression/builtin_miscellaneous_vec_test.go
index <HASH>..<HASH> 100644
--- a/expression/builtin_miscellaneous_vec_test.go
+++ b/expression/builtin_miscellaneous_vec_test.go
@@ -28,9 +28,17 @@ var vecBuiltinMiscellaneousCases = map[string][]vecExprBenchCase{
ast.IsIPv6: {
{retEvalType: types.ETInt, childrenTypes: []types.EvalType{types.ETString}},
},
- ast.Sleep: {},
- ast.UUID: {},
- ast.Inet6Ntoa: {},
+ ast.Sleep: {},
+ ast.UUID: {},
+ ast.Inet6Ntoa: {
+ {retEvalType: types.ETString, childrenTypes: []types.EvalType{types.ETString}, geners: []dataGenerator{
+ &selectStringGener{
+ candidates: []string{
+ "192.168.0.1",
+ "2001:db8::68", //ipv6
+ },
+ }}},
+ },
ast.InetAton: {
{retEvalType: types.ETInt, childrenTypes: []types.EvalType{types.ETString}, geners: []dataGenerator{&ipv4StrGener{}}},
{retEvalType: types.ETInt, childrenTypes: []types.EvalType{types.ETString}},
|
expression:implement vectorized evaluation for builtinInet6NtoaSig (#<I>)
|
pingcap_tidb
|
train
|
c7604b48f049be7c532b5255eaad66b48707826e
|
diff --git a/lib/plugin.js b/lib/plugin.js
index <HASH>..<HASH> 100644
--- a/lib/plugin.js
+++ b/lib/plugin.js
@@ -1,7 +1,6 @@
'use strict';
const assign = require('lodash.assign');
-const gutil = require('gulp-util');
const expand = require('expand-hash');
const extend = require('util-extend');
const isTextOrBinary = require('istextorbinary');
diff --git a/lib/plugin.test.js b/lib/plugin.test.js
index <HASH>..<HASH> 100644
--- a/lib/plugin.test.js
+++ b/lib/plugin.test.js
@@ -3,7 +3,6 @@
const assign = require('lodash.assign');
const expect = require('expect');
const fs = require('fs');
-const gutil = require('gulp-util');
const ListStream = require('list-stream');
const Lab = require('lab');
const Path = require('path');
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -47,7 +47,6 @@
"dependencies": {
"expand-hash": "^0.2.1",
"front-matter": "^2.1.0",
- "gulp-util": "^3.0.7",
"istextorbinary": "^2.1.0",
"list-stream": "^1.0.1",
"lodash.assign": "^4.1.0",
|
Remove gulp-util
resolves #<I>
|
sparkartgroup_gulp-markdown-to-json
|
train
|
1dfafbd5bb50db9a49a2549c6fd2ff3c0868b768
|
diff --git a/python/setup.py b/python/setup.py
index <HASH>..<HASH> 100755
--- a/python/setup.py
+++ b/python/setup.py
@@ -62,6 +62,7 @@ setup(
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
'Programming Language :: Cython',
'Topic :: Scientific/Engineering :: Mathematics',
],
|
Add Python <I> category to setup.py.
|
cmcqueen_simplerandom
|
train
|
c71dc0e90311cef895d55c9d438518606b7f247b
|
diff --git a/models/Client.js b/models/Client.js
index <HASH>..<HASH> 100644
--- a/models/Client.js
+++ b/models/Client.js
@@ -92,7 +92,7 @@ var Client = Modinha.define('clients', {
}
})
- // Web clients with implicit grant type (enforced only in development)
+ // Web clients with implicit grant type (not enforced in development)
} else if (
!inDevelopment &&
Array.isArray(instance.grant_types) &&
|
fix(Client): Update comment to reflect logic
|
anvilresearch_connect
|
train
|
b7982ae7793d167d3f7bca138bf6934694d8f172
|
diff --git a/java/test/org/openqa/selenium/grid/distributor/local/LocalDistributorTest.java b/java/test/org/openqa/selenium/grid/distributor/local/LocalDistributorTest.java
index <HASH>..<HASH> 100644
--- a/java/test/org/openqa/selenium/grid/distributor/local/LocalDistributorTest.java
+++ b/java/test/org/openqa/selenium/grid/distributor/local/LocalDistributorTest.java
@@ -34,6 +34,7 @@ import org.openqa.selenium.grid.data.Session;
import org.openqa.selenium.grid.data.SessionRequest;
import org.openqa.selenium.grid.distributor.Distributor;
import org.openqa.selenium.grid.distributor.selector.DefaultSlotSelector;
+import org.openqa.selenium.grid.node.HealthCheck;
import org.openqa.selenium.grid.node.Node;
import org.openqa.selenium.grid.node.local.LocalNode;
import org.openqa.selenium.grid.security.Secret;
@@ -76,6 +77,7 @@ import static java.util.Collections.newSetFromMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.openqa.selenium.grid.data.Availability.DRAINING;
+import static org.openqa.selenium.grid.data.Availability.UP;
import static org.openqa.selenium.remote.Dialect.W3C;
import static org.openqa.selenium.remote.http.HttpMethod.GET;
@@ -226,6 +228,8 @@ public class LocalDistributorTest {
.add(caps, new TestSessionFactory(VerifyingHandler::new))
.add(caps, new TestSessionFactory(VerifyingHandler::new))
.maximumConcurrentSessions(3)
+ .advanced()
+ .healthCheck(() -> new HealthCheck.Result(UP, "UP!"))
.build();
LocalDistributor distributor = new LocalDistributor(
|
[grid] Add heath check for LocalDistributor test
|
SeleniumHQ_selenium
|
train
|
00334f95f9c158dc12767792e0edbc48eea777b3
|
diff --git a/src/org/opencms/main/OpenCmsCore.java b/src/org/opencms/main/OpenCmsCore.java
index <HASH>..<HASH> 100644
--- a/src/org/opencms/main/OpenCmsCore.java
+++ b/src/org/opencms/main/OpenCmsCore.java
@@ -2047,8 +2047,9 @@ public final class OpenCmsCore {
// get the right site for the request
String siteRoot = null;
- if (cms.getRequestContext().getUri().startsWith("/system/workplace/")
- && getRoleManager().hasRole(cms, CmsRole.ELEMENT_AUTHOR)) {
+ boolean isWorkplace = cms.getRequestContext().getUri().startsWith("/system/workplace/")
+ || request.getRequestURI().startsWith(OpenCms.getSystemInfo().getWorkplaceContext());
+ if (isWorkplace && getRoleManager().hasRole(cms, CmsRole.ELEMENT_AUTHOR)) {
// keep the site root for workplace requests
siteRoot = cms.getRequestContext().getSiteRoot();
} else {
|
Fixed problem where OpenCmsCore#updateContext overwrote the site root in
the new workplace.
Conflicts:
src/org/opencms/main/OpenCmsCore.java
|
alkacon_opencms-core
|
train
|
beaa5e83bea8a35db024037637cf811a746535ea
|
diff --git a/auth/users.go b/auth/users.go
index <HASH>..<HASH> 100644
--- a/auth/users.go
+++ b/auth/users.go
@@ -79,23 +79,34 @@ type UserManager struct {
// Create will create a new user with given email and cleartext password.
// It will panic on any crypto or database connection errors.
func (m *UserManager) Create(email, first, last, clear string) (User, error) {
+ return m.create(email, first, last, clear, false)
+}
+
+// CreateSuperuser will create a new superuser with given email and cleartext password.
+// It will panic on any crypto or database connection errors.
+func (m *UserManager) CreateSuperuser(email, first, last, clear string) (User, error) {
+ return m.create(email, first, last, clear, true)
+}
+
+func (m *UserManager) create(email, first, last, clear string, isAdmin bool) (User, error) {
user := User{
- Email: email,
- FirstName: first,
- LastName: last,
- IsActive: true,
- Password: MakePassword(m.hash, clear),
- Token: m.tokenFunc(),
- TokenSetAt: time.Now(),
- manager: m,
+ Email: email,
+ FirstName: first,
+ LastName: last,
+ IsActive: true,
+ IsSuperuser: isAdmin,
+ Password: MakePassword(m.hash, clear),
+ Token: m.tokenFunc(),
+ TokenSetAt: time.Now(),
+ manager: m,
}
- err := m.create(&user)
+ err := m.createUser(&user)
return user, err
}
-// create checks for a duplicate email before inserting the user.
+// createUser checks for a duplicate email before inserting the user.
// Email must already be normalized.
-func (m *UserManager) create(user *User) error {
+func (m *UserManager) createUser(user *User) error {
var duplicate string
email := sql.Select(
Users.C["email"],
|
Added CreateSuperuser method to user manager
|
aodin_volta
|
train
|
9ebb04acbd07c208c30160bd28c342c569246e8f
|
diff --git a/airflow/contrib/operators/awsbatch_operator.py b/airflow/contrib/operators/awsbatch_operator.py
index <HASH>..<HASH> 100644
--- a/airflow/contrib/operators/awsbatch_operator.py
+++ b/airflow/contrib/operators/awsbatch_operator.py
@@ -153,7 +153,7 @@ class AWSBatchOperator(BaseOperator):
for job in response['jobs']:
job_status = job['status']
- if job_status is 'FAILED':
+ if job_status == 'FAILED':
reason = job['statusReason']
raise AirflowException('Job failed with status {}'.format(reason))
elif job_status in [
|
[AIRFLOW-<I>] Use equality, not identity, check for detecting AWS Batch failures[]
Closes #<I> from craigforster/master
|
apache_airflow
|
train
|
0f6f374d272cad9a5e9c5e345427dd1c626c3abe
|
diff --git a/sqlite3.go b/sqlite3.go
index <HASH>..<HASH> 100644
--- a/sqlite3.go
+++ b/sqlite3.go
@@ -251,7 +251,7 @@ func (s *SQLiteStmt) bind(args []driver.Value) error {
rv = C._sqlite3_bind_text(s.s, n, (*C.char)(unsafe.Pointer(&b[0])), C.int(len(b)))
}
case int:
- rv = C.sqlite3_bind_int(s.s, n, C.int(v))
+ rv = C.sqlite3_bind_int64(s.s, n, C.sqlite3_int64(v))
case int32:
rv = C.sqlite3_bind_int(s.s, n, C.int(v))
case int64:
|
Treat int as <I>bit
|
xeodou_go-sqlcipher
|
train
|
4784f02d482a51f35f49cff848dfe8d4e361ca93
|
diff --git a/chess/pgn.py b/chess/pgn.py
index <HASH>..<HASH> 100644
--- a/chess/pgn.py
+++ b/chess/pgn.py
@@ -358,7 +358,7 @@ class Game(GameNode):
Unless the `SetUp` and `FEN` header tags are set this is the default
starting position.
"""
- if "FEN" in self.headers and "SetUp" in self.headers and self.headers["SetUp"] == "1":
+ if "FEN" in self.headers and self.headers.get("SetUp", "1") == "1":
chess960 = self.headers.get("Variant") == "Chess960"
board = chess.Board(self.headers["FEN"], chess960=chess960)
board.chess960 = board.chess960 or board.has_chess960_castling_rights()
diff --git a/test.py b/test.py
index <HASH>..<HASH> 100755
--- a/test.py
+++ b/test.py
@@ -1715,6 +1715,32 @@ class PgnTestCase(unittest.TestCase):
game = chess.pgn.read_game(pgn)
self.assertEqual(game.headers["Result"], "1-0")
+ def test_missing_setup_tag(self):
+ pgn = StringIO(textwrap.dedent("""\
+ [Event "Test position"]
+ [Site "Black to move "]
+ [Date "1997.10.26"]
+ [Round "?"]
+ [White "Pos 16"]
+ [Black "VA33.EPD"]
+ [Result "1-0"]
+ [FEN "rbb1N1k1/pp1n1ppp/8/2Pp4/3P4/4P3/P1Q2PPq/R1BR1K2 b - - 0 1"]
+
+ {Houdini 1.5 x64: 1)} 1... Nxc5 ({Houdini 1.5 x64: 2)} 1... Qh1+ 2. Ke2 Qxg2 3.
+ Kd2 Nxc5 4. Qxc5 Bg4 5. Ba3 Qxf2+ 6. Kc3 Qxe3+ 7. Kb2 Qxe8 8. Re1 Be6 9. Rh1 a5
+ 10. Rag1 Ba7 11. Qc3 g6 12. Bc5 Qb5+ 13. Qb3 Qe2+ 14. Qc2 Qxc2+ 15. Kxc2 Bxc5
+ 16. dxc5 Rc8 17. Kd2 {-2.39/22}) 2. dxc5 Bg4 3. f3 Bxf3 4. Qf2 Bxd1 5. Nd6 Bxd6
+ 6. cxd6 Qxd6 7. Bb2 Ba4 8. Qf4 Bb5+ 9. Kf2 Qg6 10. Bd4 f6 11. Qc7 Bc6 12. a4 a6
+ 13. Qg3 Qxg3+ 14. Kxg3 Rc8 15. Rc1 Kf7 16. a5 h5 17. Rh1 {-2.63/23}
+ 1-0"""))
+
+ game = chess.pgn.read_game(pgn)
+ self.assertTrue("FEN" in game.headers)
+ self.assertFalse("SetUp" in game.headers)
+
+ board = chess.Board("rbb1N1k1/pp1n1ppp/8/2Pp4/3P4/4P3/P1Q2PPq/R1BR1K2 b - - 0 1")
+ self.assertEqual(game.board(), board)
+
class StockfishTestCase(unittest.TestCase):
|
Ignore missing SetUp tag in PGNs
|
niklasf_python-chess
|
train
|
8e7d06bee747e6afc9ad2fbe6084b57d21631adc
|
diff --git a/spring-cloud-contract-stub-runner/src/main/java/org/springframework/cloud/contract/stubrunner/spring/StubRunnerConfiguration.java b/spring-cloud-contract-stub-runner/src/main/java/org/springframework/cloud/contract/stubrunner/spring/StubRunnerConfiguration.java
index <HASH>..<HASH> 100644
--- a/spring-cloud-contract-stub-runner/src/main/java/org/springframework/cloud/contract/stubrunner/spring/StubRunnerConfiguration.java
+++ b/spring-cloud-contract-stub-runner/src/main/java/org/springframework/cloud/contract/stubrunner/spring/StubRunnerConfiguration.java
@@ -63,8 +63,7 @@ public class StubRunnerConfiguration {
.withMinMaxPort(this.props.getMinPort(), this.props.getMaxPort())
.withStubRepositoryRoot(
uriStringOrEmpty(this.props.getRepositoryRoot()))
- .withWorkOffline(this.props.getRepositoryRoot() == null
- || this.props.isWorkOffline())
+ .withWorkOffline(this.props.isWorkOffline())
.withStubsClassifier(this.props.getClassifier())
.withStubs(this.props.getIds())
.withContextPath(contextPath())
diff --git a/tests/spring-cloud-contract-stub-runner-context-path/src/test/java/com/example/loan/FailFastLoanApplicationServiceTests.java b/tests/spring-cloud-contract-stub-runner-context-path/src/test/java/com/example/loan/FailFastLoanApplicationServiceTests.java
index <HASH>..<HASH> 100644
--- a/tests/spring-cloud-contract-stub-runner-context-path/src/test/java/com/example/loan/FailFastLoanApplicationServiceTests.java
+++ b/tests/spring-cloud-contract-stub-runner-context-path/src/test/java/com/example/loan/FailFastLoanApplicationServiceTests.java
@@ -32,4 +32,21 @@ public class FailFastLoanApplicationServiceTests {
.hasMessage("For groupId [org.springframework.cloud.contract.verifier.stubs] artifactId [should-not-be-found] and classifier [stubs] the version was not resolved!");
}
+ @Test
+ public void shouldNotTryAndWorkOfflineWhenWorkOfflineIsSetToFalse() {
+ // When
+ final Throwable throwable = catchThrowable(() -> new SpringApplicationBuilder(Application.class, StubRunnerConfiguration.class)
+ .properties(ImmutableMap.of(
+ "stubrunner.workOffline", "false",
+ "stubrunner.ids", new String[]{"org.springframework.cloud.contract.verifier.stubs:should-not-be-found"}))
+ .run());
+
+ // Then
+ assertThat(throwable).isInstanceOf(BeanCreationException.class);
+ assertThat(throwable.getCause()).isInstanceOf(BeanInstantiationException.class);
+ assertThat(throwable.getCause().getCause())
+ .isInstanceOf(IllegalStateException.class)
+ .hasMessage("Remote repositories for stubs are not specified and work offline flag wasn't passed");
+ }
+
}
|
Fixes #<I> - When specifying 'workOffline=false', the stubrunner will now always fail if unable to find the stub remotely
|
spring-cloud_spring-cloud-contract
|
train
|
6822ed223dc93d2b73c5dccf041368b172f9b260
|
diff --git a/src/Post.php b/src/Post.php
index <HASH>..<HASH> 100644
--- a/src/Post.php
+++ b/src/Post.php
@@ -4,17 +4,49 @@ namespace Formulaic;
abstract class Post extends Form
{
+ use Form\Tostring {
+ Form\Tostring::__toString as private __parentToString;
+ }
+
protected $attributes = ['method' => 'post'];
+ /*
public function __construct($id = null)
{
parent::__construct($id);
$this->addSource($_POST);
}
-
+
public function cancelled()
{
return isset($_POST['act_cancel']);
}
+ */
+
+ public function __toString()
+ {
+ $files = false;
+ foreach ((array)$this as $element) {
+ if ($element instanceof Fieldset) {
+ foreach ((array)$element as $field) {
+ if ($field instanceof File) {
+ $files = true;
+ break 2;
+ }
+ }
+ } elseif ($element instanceof Label
+ && $element->element() instanceof File
+ ) {
+ $files = true;
+ } elseif ($element instanceof File) {
+ $files = true;
+ break;
+ }
+ }
+ if ($files) {
+ $this->attributes += ['enctype' => 'multipart/form-data'];
+ }
+ return $this->__parentToString();
+ }
}
|
cleanup, and check if the post form has any file inputs so we can set enctype accordingly
|
monolyth-php_formulaic
|
train
|
3ae3414a0d05a5779e603286f4963fbf63f91870
|
diff --git a/tests/PHPUnit/RedisCommandConstraint.php b/tests/PHPUnit/RedisCommandConstraint.php
index <HASH>..<HASH> 100644
--- a/tests/PHPUnit/RedisCommandConstraint.php
+++ b/tests/PHPUnit/RedisCommandConstraint.php
@@ -25,10 +25,10 @@ class RedisCommandConstraint extends PHPUnit_Framework_Constraint
public function __construct($command = null, array $arguments = null)
{
if ($command instanceof CommandInterface) {
- $this->commandID = $command->getId();
+ $this->commandID = strtoupper($command->getId());
$this->arguments = $arguments ?: $commant->getArguments();
} else {
- $this->commandID = $command;
+ $this->commandID = strtoupper($command);
$this->arguments = $arguments;
}
}
@@ -42,7 +42,7 @@ class RedisCommandConstraint extends PHPUnit_Framework_Constraint
return false;
}
- if ($this->commandID && $other->getId() !== $this->commandID) {
+ if ($this->commandID && strtoupper($other->getId()) !== $this->commandID) {
return false;
}
@@ -74,7 +74,7 @@ class RedisCommandConstraint extends PHPUnit_Framework_Constraint
$string = 'is a Redis command';
if ($this->commandID) {
- $string .= " with ID `{$this->commandID}";
+ $string .= " with ID '{$this->commandID}'";
}
if ($this->arguments) {
|
[tests] Normalize casing of command identifiers in constraint.
We do not care much about the casing of command IDs in our constraint,
so it makes no difference for use if it is "SET" or "set".
|
nrk_predis
|
train
|
049982e0de00f88e20671d348aefe6677ed91ca7
|
diff --git a/lib/api_object.rb b/lib/api_object.rb
index <HASH>..<HASH> 100644
--- a/lib/api_object.rb
+++ b/lib/api_object.rb
@@ -20,7 +20,7 @@ module ActiveApi
def get_results_by_ip ip, arguments = {}
- self.api_key = arguments.delete(:key) if arguments[:key]
+ self.api_key = arguments.delete(:key) if arguments.include?(:key)
if self.api_key
location = GeoIp.geolocation(ip)
raise unless location[:status_code] == "OK"
diff --git a/test/unit/api_object_test.rb b/test/unit/api_object_test.rb
index <HASH>..<HASH> 100644
--- a/test/unit/api_object_test.rb
+++ b/test/unit/api_object_test.rb
@@ -54,7 +54,6 @@ class ApiObjectTest < MiniTest::Unit::TestCase
end
end
-
def test_should_get_correct_weather
weather_mv = Weather.new(Weather.get_results(:weather => 'Mountain+View'))
assert_equal(weather_mv, @@weather_mv)
@@ -66,11 +65,11 @@ class ApiObjectTest < MiniTest::Unit::TestCase
# Note that the 2 services give slightly different location
def test_should_get_correct_weather_by_ip_with_no_key
GeoIp.api_key = nil
- weather_rr = Weather.new(Weather.get_results_by_ip(IP, :weather => :zipcode))
+ weather_rr = Weather.new(Weather.get_results_by_ip(IP, :key => nil, :weather => :zipcode))
assert_equal(weather_rr, @@weather_rr)
refute_has_errors(weather_rr)
end
-
+
def test_should_get_correct_weather_with_key
unless @@ip_key.nil?
weather_au = Weather.new(Weather.get_results_by_ip(IP, :key => @@ip_key, :weather => :zip_code))
@@ -116,7 +115,7 @@ class ApiObjectTest < MiniTest::Unit::TestCase
assert(routes.errors, Route.invalid_url_msg)
end
-
+
private
#ensure that the estimates of the first station include the sample
|
fixed getting data by ip with no api key
|
tmoskun_api_object
|
train
|
7659aec188430dfeec336da924c39e4322b37676
|
diff --git a/src/js/cljs.js b/src/js/cljs.js
index <HASH>..<HASH> 100644
--- a/src/js/cljs.js
+++ b/src/js/cljs.js
@@ -347,13 +347,13 @@ export default function startClojureScriptEngine(opts: CLIOptsType): void {
if (mainScript) {
initClojureScriptEngine(opts);
executeScript(mainScript, 'path');
- process.exit(process.exitValue);
+ setImmediate(process.exit, process.exitValue);
}
if (mainNsName) {
initClojureScriptEngine(opts);
runMain(mainNsName, args);
- process.exit(process.exitValue);
+ setImmediate(process.exit, process.exitValue);
}
if (repl) {
|
don't exit scripts synchronously, queue the exit callback
|
anmonteiro_lumo
|
train
|
b544f93491fd73ef00c4fc4d83f6438d6c55f03e
|
diff --git a/src/webroot/cms/lib/supra/combo/combo.php b/src/webroot/cms/lib/supra/combo/combo.php
index <HASH>..<HASH> 100644
--- a/src/webroot/cms/lib/supra/combo/combo.php
+++ b/src/webroot/cms/lib/supra/combo/combo.php
@@ -31,7 +31,7 @@ $css = strpos($files[0], '.css') !== false ? true : false;
$ext = ($css ? 'css' : 'js');
$extLength = ($css ? 3 : 2);
$lessCss = true;
-$pre = @$pre ?: $_SERVER['DOCUMENT_ROOT'];
+$pre = @$pre ? : $_SERVER['DOCUMENT_ROOT'];
// if will need to store in webroot...
//$cacheDir = $pre . '/tmp';
@@ -95,11 +95,11 @@ function getEtag($files)
{
global $css, $checkFileModificationTime, $version;
$cacheSource = array($version);
-
+
if ($checkFileModificationTime) {
$cacheSource[] = filemtime(__FILE__);
}
-
+
foreach ($files as $file) {
if ($checkFileModificationTime) {
$cacheSource = array_merge($cacheSource, getFileMtime($file));
@@ -125,10 +125,20 @@ function writeFiles($files, $eTag)
if ($apc) {
apc_store('combo-' . $eTag, $out, 1800);
}
- @mkdir($cacheDir . '/yui/' . substr($eTag, 0, 2) . '/', 0777, true);
- @file_put_contents($cacheDir . '/yui/' . substr($eTag, 0, 2) . '/' . $eTag, $out);
- @chmod($cacheDir . '/yui/' . substr($eTag, 0, 2), 0777);
- @chmod($cacheDir . '/yui/' . substr($eTag, 0, 2) . '/' . $eTag, 0666);
+
+ $outDirname = $cacheDir . '/yui/' . substr($eTag, 0, 2);
+
+ @mkdir($outDirname, 0777, true);
+
+ $tmpFilename = tempnam($outDirname, 'tmp-');
+ @file_put_contents($tmpFilename, $out);
+
+ $outFilename = $outDirname . '/' . $eTag;
+
+ @rename($tmpFilename, $outFilename);
+
+ @chmod($outDirname, 0777);
+ @chmod($outFilename, 0666);
}
return $out;
@@ -137,15 +147,15 @@ function writeFiles($files, $eTag)
function getFileMtime($file)
{
global $css, $pre, $lessCss, $checkFileModificationTimeForIncludedLessCss;
-
+
$cacheSource = array();
$thisPre = $pre;
if (strpos($file, '/cms-local/') === 0) {
$thisPre = realpath('../../../../../../../src/webroot');
- }
-
+ }
+
$files = array($thisPre . $file);
// Try searching for .less file
@@ -153,7 +163,7 @@ function getFileMtime($file)
$lessFile = $thisPre . $file . '.less';
if ($lessCss && file_exists($lessFile)) {
-
+
if ($checkFileModificationTimeForIncludedLessCss) {
$lessPhp = $thisPre . '/cms/lib/supra/lessphp/SupraLessC.php';
require_once $lessPhp;
@@ -185,9 +195,9 @@ function getFileContent($file)
global $css, $pre, $lessCss;
$outFile = null;
-
+
$thisPre = $pre;
-
+
if (strpos($file, '/cms-local/') === 0) {
$thisPre = realpath('../../../../../../../src/webroot');
} else {
|
Issue red #<I>;
Make combo.php cache file writes atomic on fs level, hopefully.
|
sitesupra_sitesupra
|
train
|
87fe2ebebb497765518005636c8379549ec922a7
|
diff --git a/bin/doctrine-module.php b/bin/doctrine-module.php
index <HASH>..<HASH> 100644
--- a/bin/doctrine-module.php
+++ b/bin/doctrine-module.php
@@ -40,10 +40,9 @@ while (!file_exists('config/application.config.php')) {
chdir($dir);
}
-require_once (getenv('ZF2_PATH') ?: 'vendor/ZendFramework/library') . '/Zend/Loader/AutoloaderFactory.php';
-
-// setup autoloader
-AutoloaderFactory::factory();
+if (!include('vendor/autoload.php')) {
+ throw new RuntimeException('vendor/autoload.php could not be found. Did you run php composer.phar install?');
+}
// get application stack configuration
$configuration = include 'config/application.config.php';
@@ -53,7 +52,7 @@ $serviceManager = new ServiceManager(new ServiceManagerConfiguration($configurat
$serviceManager->setService('ApplicationConfiguration', $configuration);
$serviceManager->get('ModuleManager')->loadModules();
-$serviceManager
- ->get('Di')
- ->get('doctrine_cli')
- ->run();
+// currently bug caused by lazy initialization of Di factory
+$serviceManager->get('Di');
+
+$serviceManager->get('doctrine_cli')->run();
|
Updating CLI to work with latest autoloading as of skeleton application
|
doctrine_DoctrineModule
|
train
|
2ec982efd69c40f571bc568cdaee4630c8f3a554
|
diff --git a/unitypack/asset.py b/unitypack/asset.py
index <HASH>..<HASH> 100644
--- a/unitypack/asset.py
+++ b/unitypack/asset.py
@@ -63,7 +63,7 @@ class Asset:
return self.environment.get_asset_by_filename(path)
def __init__(self):
- self.objects = {}
+ self._objects = {}
self.adds = []
self.asset_refs = [self]
self.types = {}
@@ -72,15 +72,26 @@ class Asset:
self.name = ""
self.long_object_ids = False
self.tree = TypeMetadata(self)
+ self.loaded = False
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.name)
@property
+ def objects(self):
+ if not self.loaded:
+ self.load(self.data)
+ return self._objects
+
+ @property
def is_resource(self):
return self.name.endswith(".resource")
def load(self, buf):
+ if self.is_resource:
+ self.loaded = True
+ return
+
self.metadata_size = buf.read_uint()
self.file_size = buf.read_uint()
self.format = buf.read_uint()
@@ -121,6 +132,7 @@ class Asset:
unk_string = buf.read_string()
assert not unk_string, repr(unk_string)
+ self.loaded = True
def read_id(self, buf):
if self.format >= 14:
@@ -139,10 +151,10 @@ class Asset:
logging.warning("%r absent from structs.dat", obj.class_id)
self.types[obj.type_id] = None
- if obj.path_id in self.objects:
+ if obj.path_id in self._objects:
raise ValueError("Duplicate asset object: %r (path_id=%r)" % (obj, obj.path_id))
- self.objects[obj.path_id] = obj
+ self._objects[obj.path_id] = obj
def pretty(self):
ret = []
diff --git a/unitypack/assetbundle.py b/unitypack/assetbundle.py
index <HASH>..<HASH> 100644
--- a/unitypack/assetbundle.py
+++ b/unitypack/assetbundle.py
@@ -71,8 +71,6 @@ class AssetBundle:
num_assets = 1
for i in range(num_assets):
asset = Asset.from_bundle(self, buf)
- if not asset.is_resource:
- asset.load(asset.data)
self.assets.append(asset)
def read_compressed_data(self, buf, compression):
@@ -116,8 +114,6 @@ class AssetBundle:
buf.seek(basepos + ofs)
asset = Asset.from_bundle(self, buf)
asset.name = name
- if not asset.is_resource:
- asset.load(asset.data)
self.assets.append(asset)
# Hacky
|
Add lazy-loading of Asset on .objects access
|
HearthSim_UnityPack
|
train
|
fe444afbd33df835591eadee35c78a3925608fad
|
diff --git a/src/Notifications/Notifications/CertificateExpiresSoon.php b/src/Notifications/Notifications/CertificateExpiresSoon.php
index <HASH>..<HASH> 100644
--- a/src/Notifications/Notifications/CertificateExpiresSoon.php
+++ b/src/Notifications/Notifications/CertificateExpiresSoon.php
@@ -57,6 +57,6 @@ class CertificateExpiresSoon extends BaseNotification
protected function getMessageText(): string
{
- return "SSL certificate for {$this->event->monitor->url} expires soon";
+ return "SSL certificate for {$this->getMonitor()->url} expires soon";
}
}
diff --git a/src/Notifications/Notifications/UptimeCheckFailed.php b/src/Notifications/Notifications/UptimeCheckFailed.php
index <HASH>..<HASH> 100644
--- a/src/Notifications/Notifications/UptimeCheckFailed.php
+++ b/src/Notifications/Notifications/UptimeCheckFailed.php
@@ -57,7 +57,7 @@ class UptimeCheckFailed extends BaseNotification
$extraProperties = [
$since => $date,
- 'Failure reason' => $this->event->monitor->uptime_check_failure_reason,
+ 'Failure reason' => $this->getMonitor()->uptime_check_failure_reason,
];
return parent::getMonitorProperties($extraProperties);
@@ -65,7 +65,7 @@ class UptimeCheckFailed extends BaseNotification
public function isStillRelevant(): bool
{
- return $this->event->monitor->uptime_status == UptimeStatus::DOWN;
+ return $this->getMonitor()->uptime_status == UptimeStatus::DOWN;
}
public function setEvent(MonitorFailedEvent $event)
@@ -77,6 +77,6 @@ class UptimeCheckFailed extends BaseNotification
protected function getMessageText(): string
{
- return "{$this->event->monitor->url} seems down";
+ return "{$this->getMonitor()->url} seems down";
}
}
diff --git a/src/Notifications/Notifications/UptimeCheckRecovered.php b/src/Notifications/Notifications/UptimeCheckRecovered.php
index <HASH>..<HASH> 100644
--- a/src/Notifications/Notifications/UptimeCheckRecovered.php
+++ b/src/Notifications/Notifications/UptimeCheckRecovered.php
@@ -60,7 +60,7 @@ class UptimeCheckRecovered extends BaseNotification
public function isStillRelevant(): bool
{
- return $this->event->monitor->uptime_status == UptimeStatus::UP;
+ return $this->getMonitor()->uptime_status == UptimeStatus::UP;
}
public function setEvent(MonitorRecoveredEvent $event)
@@ -72,6 +72,6 @@ class UptimeCheckRecovered extends BaseNotification
public function getMessageText(): string
{
- return "{$this->event->monitor->url} has recovered after {$this->event->downtimePeriod->duration()}";
+ return "{$this->getMonitor()->url} has recovered after {$this->event->downtimePeriod->duration()}";
}
}
diff --git a/src/Notifications/Notifications/UptimeCheckSucceeded.php b/src/Notifications/Notifications/UptimeCheckSucceeded.php
index <HASH>..<HASH> 100644
--- a/src/Notifications/Notifications/UptimeCheckSucceeded.php
+++ b/src/Notifications/Notifications/UptimeCheckSucceeded.php
@@ -49,7 +49,7 @@ class UptimeCheckSucceeded extends BaseNotification
public function isStillRelevant(): bool
{
- return $this->event->monitor->uptime_status != UptimeStatus::DOWN;
+ return $this->getMonitor()->uptime_status != UptimeStatus::DOWN;
}
public function setEvent(MonitorSucceededEvent $event)
@@ -61,6 +61,6 @@ class UptimeCheckSucceeded extends BaseNotification
public function getMessageText(): string
{
- return "{$this->event->monitor->url} is up";
+ return "{$this->getMonitor()->url} is up";
}
}
|
Use existing $this->getMonitor() method for consistency (#<I>)
|
spatie_laravel-uptime-monitor
|
train
|
4db61d025d1d683cbb88fb76f0595186d77203bf
|
diff --git a/provision/kubernetes/provisioner.go b/provision/kubernetes/provisioner.go
index <HASH>..<HASH> 100644
--- a/provision/kubernetes/provisioner.go
+++ b/provision/kubernetes/provisioner.go
@@ -372,6 +372,9 @@ func (p *kubernetesProvisioner) RegisterUnit(a provision.App, unitID string, cus
}
pod, err := client.Core().Pods(client.Namespace()).Get(unitID, metav1.GetOptions{})
if err != nil {
+ if k8sErrors.IsNotFound(err) {
+ return &provision.UnitNotFoundError{ID: unitID}
+ }
return errors.WithStack(err)
}
units, err := p.podsToUnits(client, []apiv1.Pod{*pod}, a, nil)
diff --git a/provision/swarm/provisioner.go b/provision/swarm/provisioner.go
index <HASH>..<HASH> 100644
--- a/provision/swarm/provisioner.go
+++ b/provision/swarm/provisioner.go
@@ -368,13 +368,13 @@ func (p *swarmProvisioner) RegisterUnit(a provision.App, unitId string, customDa
if err != nil {
return err
}
- if customData == nil {
- return nil
- }
task, err := findTaskByContainerId(tasks, unitId)
if err != nil {
return err
}
+ if customData == nil {
+ return nil
+ }
labels := provision.LabelSet{Labels: task.Spec.ContainerSpec.Labels, Prefix: tsuruLabelPrefix}
if !labels.IsDeploy() {
return nil
|
provision/swarm,kubernetes: fail register unit for unit not found
|
tsuru_tsuru
|
train
|
10cc5f7b1aceb199b8df42d3e2606d0bc50ee316
|
diff --git a/app-servers/MongoNode.php b/app-servers/MongoNode.php
index <HASH>..<HASH> 100644
--- a/app-servers/MongoNode.php
+++ b/app-servers/MongoNode.php
@@ -36,7 +36,7 @@ class MongoNode extends AppInstance
$appInstance = $this;
$this->LockClient->job(__CLASS__,TRUE,function($jobname) use ($appInstance)
{
- $this->pushRequest(new MongoNode_ReplicationRequest($this,$this));
+ $appInstance->pushRequest(new MongoNode_ReplicationRequest($$appInstance,$appInstance));
});
}
}
|
Fixed critical typo bug in MongoNode.php (issue #9 reported by Emmerman)
|
kakserpom_phpdaemon
|
train
|
59559f3f9f711e3c8468d2419caf944979e27580
|
diff --git a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/ProjectFileSystemAdapter.java b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/ProjectFileSystemAdapter.java
index <HASH>..<HASH> 100644
--- a/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/ProjectFileSystemAdapter.java
+++ b/sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/ProjectFileSystemAdapter.java
@@ -24,13 +24,19 @@ import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.CharEncoding;
import org.apache.maven.project.MavenProject;
-import org.sonar.api.resources.*;
import org.sonar.api.resources.InputFile;
+import org.sonar.api.resources.InputFileUtils;
+import org.sonar.api.resources.Java;
+import org.sonar.api.resources.Language;
+import org.sonar.api.resources.Project;
+import org.sonar.api.resources.ProjectFileSystem;
+import org.sonar.api.resources.Resource;
import org.sonar.api.scan.filesystem.FileQuery;
import org.sonar.api.scan.filesystem.PathResolver;
import org.sonar.api.utils.SonarException;
import javax.annotation.Nullable;
+
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
@@ -47,7 +53,6 @@ public class ProjectFileSystemAdapter implements ProjectFileSystem {
private final PathResolver pathResolver = new PathResolver();
private final MavenProject pom;
-
public ProjectFileSystemAdapter(DefaultModuleFileSystem target, Project project, @Nullable MavenProject pom) {
this.target = target;
this.pom = pom;
@@ -137,7 +142,7 @@ public class ProjectFileSystemAdapter implements ProjectFileSystem {
public List<File> getSourceFiles(Language... langs) {
List<File> result = Lists.newArrayList();
for (Language lang : langs) {
- result.addAll(target.files(FileQuery.onSource().onLanguage(lang.getKey())));
+ result.addAll(target.files(FileQuery.onMain().onLanguage(lang.getKey())));
}
return result;
}
@@ -186,12 +191,14 @@ public class ProjectFileSystemAdapter implements ProjectFileSystem {
public List<InputFile> mainFiles(String... langs) {
List<InputFile> result = Lists.newArrayList();
- Iterable<org.sonar.api.scan.filesystem.InputFile> files = target.inputFiles(FileQuery.onSource().onLanguage(langs));
+ Iterable<org.sonar.api.scan.filesystem.InputFile> files = target.inputFiles(FileQuery.onMain().onLanguage(langs));
for (org.sonar.api.scan.filesystem.InputFile file : files) {
String sourceDir = file.attribute(org.sonar.api.scan.filesystem.internal.DefaultInputFile.ATTRIBUTE_SOURCEDIR_PATH);
String sourceRelativePath = file.attribute(org.sonar.api.scan.filesystem.internal.DefaultInputFile.ATTRIBUTE_SOURCE_RELATIVE_PATH);
if (sourceDir != null && sourceRelativePath != null) {
result.add(InputFileUtils.create(new File(sourceDir), sourceRelativePath));
+ } else {
+ result.add(InputFileUtils.create(target.baseDir(), file.path()));
}
}
return result;
@@ -205,6 +212,8 @@ public class ProjectFileSystemAdapter implements ProjectFileSystem {
String sourceRelativePath = file.attribute(org.sonar.api.scan.filesystem.internal.DefaultInputFile.ATTRIBUTE_SOURCE_RELATIVE_PATH);
if (sourceDir != null && sourceRelativePath != null) {
result.add(InputFileUtils.create(new File(sourceDir), sourceRelativePath));
+ } else {
+ result.add(InputFileUtils.create(target.baseDir(), file.path()));
}
}
return result;
|
Fix ProjectFileSystemAdapter to also returns files that are outside source folders
|
SonarSource_sonarqube
|
train
|
5cedab4bd60383eea7b75ecac466342d17ed1b93
|
diff --git a/biojava-core/src/main/java/org/biojava/nbio/core/sequence/io/GenbankSequenceParser.java b/biojava-core/src/main/java/org/biojava/nbio/core/sequence/io/GenbankSequenceParser.java
index <HASH>..<HASH> 100644
--- a/biojava-core/src/main/java/org/biojava/nbio/core/sequence/io/GenbankSequenceParser.java
+++ b/biojava-core/src/main/java/org/biojava/nbio/core/sequence/io/GenbankSequenceParser.java
@@ -257,11 +257,11 @@ public class GenbankSequenceParser<S extends AbstractSequence<C>, C extends Comp
if (m.matches()) {
String dbname = m.group(1);
String raccession = m.group(2);
- Qualifier xref = new DBReferenceInfo(dbname, raccession);
+ DBReferenceInfo xref = new DBReferenceInfo(dbname, raccession);
gbFeature.addQualifier(key, xref);
- ArrayList<DBReferenceInfo> listDBEntry = new ArrayList<DBReferenceInfo>();
- listDBEntry.add((DBReferenceInfo) xref);
+ ArrayList<DBReferenceInfo> listDBEntry = new ArrayList<>();
+ listDBEntry.add(xref);
mapDB.put(key, listDBEntry);
} else {
throw new ParserException("Bad dbxref");
|
Refactored this, no need explicit cast if DBReferenceInfo is needed
|
biojava_biojava
|
train
|
cc1c074c227bcbe43f55f9be181bbef3f93fc979
|
diff --git a/capi_experimental/manifest.go b/capi_experimental/manifest.go
index <HASH>..<HASH> 100644
--- a/capi_experimental/manifest.go
+++ b/capi_experimental/manifest.go
@@ -57,7 +57,8 @@ var _ = CapiExperimentalDescribe("apply_manifest", func() {
By("Creating a Route")
By("Starting an App")
StartApp(appGUID)
- route = fmt.Sprintf("bar.%s", Config.GetAppsDomain())
+ random_route_prefix := random_name.CATSRandomName("ROUTE")
+ route = fmt.Sprintf("%s.%s", random_route_prefix, Config.GetAppsDomain())
By("Registering a Service Broker")
broker = NewServiceBroker(
|
Use a random route prefix in experimental app manifest spec
- If CATS are being run concurrently there is a chance that two tests
will try to assign the same route and fail. Randomizing the name of the
routes that these tests use will avoid this situation
|
cloudfoundry_cf-acceptance-tests
|
train
|
b2f60730bde45bbf1fa3a1d4f855d34f5cbd53f1
|
diff --git a/py/selenium/webdriver/chrome/service.py b/py/selenium/webdriver/chrome/service.py
index <HASH>..<HASH> 100644
--- a/py/selenium/webdriver/chrome/service.py
+++ b/py/selenium/webdriver/chrome/service.py
@@ -17,6 +17,7 @@
# specific language governing permissions and limitations
# under the License.
import os
+import errno
import subprocess
from subprocess import PIPE
import time
@@ -54,8 +55,10 @@ class Service(object):
Starts the ChromeDriver Service.
:Exceptions:
- - WebDriverException : Raised either when it can't start the service
- or when it can't connect to the service
+ - WebDriverException : Raised either when it cannot find the
+ executable, when it does not have permissions for the
+ executable, or when it cannot connect to the service.
+ - Possibly other Exceptions in rare circumstances (OSError, etc).
"""
env = self.env or os.environ
try:
@@ -63,13 +66,21 @@ class Service(object):
self.path,
"--port=%d" % self.port] +
self.service_args, env=env, stdout=PIPE, stderr=PIPE)
- except:
- raise WebDriverException(
- "'" + os.path.basename(self.path) + "' executable needs to be \
- available in the path. Please look at \
- http://docs.seleniumhq.org/download/#thirdPartyDrivers \
- and read up at \
- https://github.com/SeleniumHQ/selenium/wiki/ChromeDriver")
+ except OSError as err:
+ docs_msg = "Please see " \
+ "https://sites.google.com/a/chromium.org/chromedriver/home"
+ if err.errno == errno.ENOENT:
+ raise WebDriverException(
+ "'%s' executable needs to be in PATH. %s" % (
+ os.path.basename(self.path), docs_msg)
+ )
+ elif err.errno == errno.EACCES:
+ raise WebDriverException(
+ "'%s' executable may have wrong permissions. %s" % (
+ os.path.basename(self.path), docs_msg)
+ )
+ else:
+ raise
count = 0
while not utils.is_connectable(self.port):
count += 1
|
py: improve OSError exception on starting chromedriver
Fixes issue <I> on googlecode.
Catching too-broad an exception squashes esoteric bugs like
<I> on googlecode (too many open files).
|
SeleniumHQ_selenium
|
train
|
c97685ae9175128e18ed184fd4ff4faee7ef32d0
|
diff --git a/lib/salt.rb b/lib/salt.rb
index <HASH>..<HASH> 100644
--- a/lib/salt.rb
+++ b/lib/salt.rb
@@ -9,7 +9,7 @@ require 'erubis'
require 'redcarpet'
require 'salt/frontable'
-require 'salt/renderable'
+require 'salt/publishable'
require 'salt/configuration'
require 'salt/page'
require 'salt/post'
|
Swapped renderable for publishable.
|
waferbaby_dimples
|
train
|
7f751b44ed3efcc57f0b9d35ace44d0a5951be0b
|
diff --git a/pkg/hijack/hijack.go b/pkg/hijack/hijack.go
index <HASH>..<HASH> 100644
--- a/pkg/hijack/hijack.go
+++ b/pkg/hijack/hijack.go
@@ -14,8 +14,8 @@ type HijackReadWriter struct {
Header string
Response http.ResponseWriter
+ Hijacked bool
- hijacked bool
// writer is returned when hijacking the connection
writer io.Writer
// reader is returned when hijacking the connection
@@ -44,7 +44,7 @@ func (rw *HijackReadWriter) hijack() error {
rw.Lock()
defer rw.Unlock()
- if !rw.hijacked {
+ if !rw.Hijacked {
reader, writer, err := hijackServer(rw.Response)
if err != nil {
return err
@@ -52,7 +52,7 @@ func (rw *HijackReadWriter) hijack() error {
rw.reader = reader
rw.writer = writer
fmt.Fprintf(writer, rw.Header)
- rw.hijacked = true
+ rw.Hijacked = true
}
return nil
}
diff --git a/server/heroku/processes.go b/server/heroku/processes.go
index <HASH>..<HASH> 100644
--- a/server/heroku/processes.go
+++ b/server/heroku/processes.go
@@ -1,6 +1,7 @@
package heroku
import (
+ "fmt"
"net/http"
"time"
@@ -111,6 +112,10 @@ func (h *PostProcess) ServeHTTPContext(ctx context.Context, w http.ResponseWrite
opts.Output = stream
if err := h.Run(ctx, opts); err != nil {
+ if stream.Hijacked {
+ fmt.Fprintf(stream, "%v\r", err)
+ return nil
+ }
return err
}
} else {
|
If the connection has been hijacked, write the error to the stream
|
remind101_empire
|
train
|
ffcaaac9d92541ffc632156942d1c26f44749755
|
diff --git a/src/PHPRouter/Router.php b/src/PHPRouter/Router.php
index <HASH>..<HASH> 100755
--- a/src/PHPRouter/Router.php
+++ b/src/PHPRouter/Router.php
@@ -101,6 +101,9 @@ class Router
continue;
}
+ $currentDir = dirname($_SERVER['SCRIPT_NAME']);
+ $requestUrl = trim(str_replace($currentDir, '', $requestUrl), '/');
+
// check if request _url matches route regex. if not, return false.
if (! preg_match("@^" . $this->basePath . $routes->getRegex() . "*$@i", $requestUrl, $matches)) {
continue;
|
Make possible to use PHPRouter on a subpath folder as `localhost/app/web`
|
dannyvankooten_PHP-Router
|
train
|
f16848a5cd0f2de33ec0ce3ee48cc394f2f8fcac
|
diff --git a/modules/Access.php b/modules/Access.php
index <HASH>..<HASH> 100755
--- a/modules/Access.php
+++ b/modules/Access.php
@@ -103,9 +103,9 @@ class Piwik_Access
// we join with site in case there are rows in access for an idsite that doesn't exist anymore
// (backward compatibility ; before we deleted the site without deleting rows in _access table)
- $accessRaw = $db->fetchAll("SELECT access, idsite
- FROM ".Piwik::prefixTable('access').
- " JOIN ".Piwik::prefixTable('site')." USING (idsite) ".
+ $accessRaw = $db->fetchAll("SELECT access, t2.idsite
+ FROM ".Piwik::prefixTable('access'). " as t1
+ JOIN ".Piwik::prefixTable('site')." as t2 USING (idsite) ".
" WHERE login=?", $this->identity);
foreach($accessRaw as $access)
|
- fixing Uncaught exception: 'SQLSTATE[<I>]: Integrity constraint violation:
<I> Column 'idsite' in field list is ambiguous'
git-svn-id: <URL>
|
matomo-org_matomo
|
train
|
654c620933129314130cd12bdb0596c5e69900c1
|
diff --git a/src/org/jgroups/client/StompConnection.java b/src/org/jgroups/client/StompConnection.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/client/StompConnection.java
+++ b/src/org/jgroups/client/StompConnection.java
@@ -13,6 +13,7 @@ import java.io.IOException;
import java.net.*;
import javax.net.SocketFactory;
import javax.net.ssl.SSLSocketFactory;
+import javax.net.ssl.SSLContext;
import java.util.*;
/**
@@ -71,6 +72,10 @@ public class StompConnection implements Runnable {
this(dest, null, null, reconnect, ssl);
}
+ public StompConnection(String dest, boolean reconnect, SSLContext ssl) {
+ this(dest, null, null, reconnect, ssl);
+ }
+
public StompConnection(String dest, String userid, String password, boolean reconnect, boolean ssl) {;
server_destinations.add(dest);
this.userid = userid;
@@ -82,6 +87,14 @@ public class StompConnection implements Runnable {
socket_factory = SocketFactory.getDefault();
}
+ public StompConnection(String dest, String userid, String password, boolean reconnect, SSLContext sslcontext) {;
+ server_destinations.add(dest);
+ this.userid = userid;
+ this.password = password;
+ this.reconnect = reconnect;
+ socket_factory = sslcontext.getSocketFactory();
+ }
+
public String getSessionId() {return session_id;}
public void addListener(Listener listener) {
|
Add constructor that adds a custome SSLContext object.
Remove if conditional.
Create final version of sslcontext changes
Remove comments
|
belaban_JGroups
|
train
|
a31f01e3d524973de362b15df7665fc7ecfe8116
|
diff --git a/announce.go b/announce.go
index <HASH>..<HASH> 100644
--- a/announce.go
+++ b/announce.go
@@ -177,6 +177,8 @@ func (a *Announce) getPeers(addr Addr) error {
return a.server.getPeers(addr, a.infoHash, func(m krpc.Msg, err error) {
// Register suggested nodes closer to the target info-hash.
if m.R != nil {
+ expvars.Add("announce get_peers response nodes values", int64(len(m.R.Nodes)))
+ expvars.Add("announce get_peers response nodes6 values", int64(len(m.R.Nodes6)))
a.mu.Lock()
for _, n := range m.R.Nodes {
a.responseNode(n)
|
Add some expvars for get_peers response values
|
anacrolix_dht
|
train
|
678aee340bb2ac74695fdb6118757bc61b4a6f7c
|
diff --git a/openstack_dashboard/dashboards/project/stacks/tabs.py b/openstack_dashboard/dashboards/project/stacks/tabs.py
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/dashboards/project/stacks/tabs.py
+++ b/openstack_dashboard/dashboards/project/stacks/tabs.py
@@ -40,7 +40,7 @@ class StackTopologyTab(tabs.Tab):
(("orchestration", "stacks:template"),
("orchestration", "stacks:lookup"),
("orchestration", "stacks:show"),
- ("orchestration", "resources:index"),),
+ ("orchestration", "resource:index"),),
request)
def get_context_data(self, request):
|
Correct error in policy action name
It is "resource" not "resources".
Change-Id: I<I>cd5be<I>ebe<I>ebc6a<I>f<I>f1ce
Closes-Bug: <I>
|
openstack_horizon
|
train
|
35d3923ea0c51b3a628e913e3b35f85124de8ac8
|
diff --git a/activerecord/lib/active_record/autosave_association.rb b/activerecord/lib/active_record/autosave_association.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/autosave_association.rb
+++ b/activerecord/lib/active_record/autosave_association.rb
@@ -284,7 +284,7 @@ module ActiveRecord
# Returns whether or not this record is already being saved outside of the
# current autosave callback
- def saving?
+ def saving? #:nodoc:
@saving
end
|
Exclude #saving? from API docs
We don't have a concrete use case for it outside of AR itself.
|
rails_rails
|
train
|
5d66a7800590ec20ddc2940ba982afb6c71631bb
|
diff --git a/m2bk/driver.py b/m2bk/driver.py
index <HASH>..<HASH> 100644
--- a/m2bk/driver.py
+++ b/m2bk/driver.py
@@ -43,6 +43,9 @@ def load(*, name="dummy", **kwargs):
# Try to load specified driver
if name in VALID_DRIVERS:
+ # log the thing first
+ log.msg_debug("Attempting to load driver: {d}".format(d=name))
+
# List options that are passed to the driver
options = kwargs.get('options', {})
|
Explicit log messages on driver interface
|
axltxl_m2bk
|
train
|
f1130fdc03178be72cb117a8a5303a5edfdda432
|
diff --git a/minio/api.py b/minio/api.py
index <HASH>..<HASH> 100644
--- a/minio/api.py
+++ b/minio/api.py
@@ -1525,7 +1525,15 @@ class Minio(object):
parts_to_upload.append((bucket_name, object_name, upload_id, part_number, part_data))
# Run parts upload in parallel
- pool.parallel_run(self._upload_part_routine, parts_to_upload)
+ try:
+ pool.parallel_run(self._upload_part_routine, parts_to_upload)
+ except:
+ # Any exception that occurs sends an abort on the
+ # on-going multipart operation.
+ self._remove_incomplete_upload(bucket_name,
+ object_name,
+ upload_id)
+ raise
# Update uploaded_parts with the part uploads result
# and check total uploaded data.
@@ -1544,14 +1552,28 @@ class Minio(object):
if total_uploaded != content_size:
msg = 'Data uploaded {0} is not equal input size ' \
'{1}'.format(total_uploaded, content_size)
+ # cleanup incomplete upload upon incorrect upload
+ # automatically
+ self._remove_incomplete_upload(bucket_name,
+ object_name,
+ upload_id)
raise InvalidSizeError(msg)
# Complete all multipart transactions if possible.
- mpart_result = self._complete_multipart_upload(bucket_name,
- object_name,
- upload_id,
- uploaded_parts,
- metadata=metadata)
+ try:
+ mpart_result = self._complete_multipart_upload(bucket_name,
+ object_name,
+ upload_id,
+ uploaded_parts,
+ metadata=metadata)
+ except:
+ # Any exception that occurs sends an abort on the
+ # on-going multipart operation.
+ self._remove_incomplete_upload(bucket_name,
+ object_name,
+ upload_id)
+ raise
+
# Return etag here.
return mpart_result.etag
|
Cleanup incomplete upload upon error (#<I>)
Since we do not have resuming support, we should
simply cleanup failed transfers.
|
minio_minio-py
|
train
|
88da704eedc5149b70dcec7845453456a6e26761
|
diff --git a/js/src/carousel.js b/js/src/carousel.js
index <HASH>..<HASH> 100644
--- a/js/src/carousel.js
+++ b/js/src/carousel.js
@@ -389,10 +389,6 @@ class Carousel extends BaseComponent {
}
_directionToOrder(direction) {
- if (![DIRECTION_RIGHT, DIRECTION_LEFT].includes(direction)) {
- return direction
- }
-
if (isRTL()) {
return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT
}
@@ -401,10 +397,6 @@ class Carousel extends BaseComponent {
}
_orderToDirection(order) {
- if (![ORDER_NEXT, ORDER_PREV].includes(order)) {
- return order
- }
-
if (isRTL()) {
return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT
}
diff --git a/js/tests/unit/carousel.spec.js b/js/tests/unit/carousel.spec.js
index <HASH>..<HASH> 100644
--- a/js/tests/unit/carousel.spec.js
+++ b/js/tests/unit/carousel.spec.js
@@ -1200,9 +1200,7 @@ describe('Carousel', () => {
const carousel = new Carousel(carouselEl, {})
expect(carousel._directionToOrder('left')).toEqual('next')
- expect(carousel._directionToOrder('prev')).toEqual('prev')
expect(carousel._directionToOrder('right')).toEqual('prev')
- expect(carousel._directionToOrder('next')).toEqual('next')
expect(carousel._orderToDirection('next')).toEqual('left')
expect(carousel._orderToDirection('prev')).toEqual('right')
@@ -1217,9 +1215,7 @@ describe('Carousel', () => {
expect(isRTL()).toBeTrue()
expect(carousel._directionToOrder('left')).toEqual('prev')
- expect(carousel._directionToOrder('prev')).toEqual('prev')
expect(carousel._directionToOrder('right')).toEqual('next')
- expect(carousel._directionToOrder('next')).toEqual('next')
expect(carousel._orderToDirection('next')).toEqual('right')
expect(carousel._orderToDirection('prev')).toEqual('left')
|
Carousel: omit redundant checks as we are always transforming the right values
|
twbs_bootstrap
|
train
|
96e96a080bb80a636ffd752d5aa700494ba99d70
|
diff --git a/www/_include.php b/www/_include.php
index <HASH>..<HASH> 100644
--- a/www/_include.php
+++ b/www/_include.php
@@ -32,6 +32,13 @@ set_exception_handler('SimpleSAML_exception_handler');
/* Log full backtrace on errors and warnings. */
function SimpleSAML_error_handler($errno, $errstr, $errfile = NULL, $errline = 0, $errcontext = NULL) {
+ static $limit = 5;
+ $limit -= 1;
+ if ($limit < 0) {
+ /* We have reached the limit in the number of backtraces we will log. */
+ return FALSE;
+ }
+
/* Show an error with a full backtrace. */
$e = new SimpleSAML_Error_Exception('Error ' . $errno . ' - ' . $errstr);
$e->logError();
|
_include.php: Limit the number of errors that will be logged.
|
simplesamlphp_saml2
|
train
|
17cc088a6ee49e29daf35429060574de08b67720
|
diff --git a/registrasion/reporting/views.py b/registrasion/reporting/views.py
index <HASH>..<HASH> 100644
--- a/registrasion/reporting/views.py
+++ b/registrasion/reporting/views.py
@@ -382,6 +382,21 @@ def credit_notes(request, form):
)
+@report_view("Invoices")
+def invoices(request,form):
+ ''' Shows all of the invoices in the system. '''
+
+ invoices = commerce.Invoice.objects.all().order_by("status")
+
+ return QuerysetReport(
+ "Invoices",
+ ["id", "recipient", "value", "get_status_display"],
+ invoices,
+ headings=["id", "Recipient", "Value", "Status"],
+ link_view=views.invoice,
+ )
+
+
class AttendeeListReport(ListReport):
def get_link(self, argument):
diff --git a/registrasion/urls.py b/registrasion/urls.py
index <HASH>..<HASH> 100644
--- a/registrasion/urls.py
+++ b/registrasion/urls.py
@@ -49,6 +49,7 @@ reports = [
url(r"^attendee/([0-9]*)$", rv.attendee, name="attendee"),
url(r"^credit_notes/?$", rv.credit_notes, name="credit_notes"),
url(r"^discount_status/?$", rv.discount_status, name="discount_status"),
+ url(r"^invoices/?$", rv.invoices, name="invoices"),
url(
r"^paid_invoices_by_date/?$",
rv.paid_invoices_by_date,
|
Adds an “invoices” view
|
chrisjrn_registrasion
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.