hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
d52b562d6eb13fdac759ddb80cf6ca5e41a9d4bf
|
diff --git a/proxy/proxy_test.go b/proxy/proxy_test.go
index <HASH>..<HASH> 100644
--- a/proxy/proxy_test.go
+++ b/proxy/proxy_test.go
@@ -447,6 +447,44 @@ func (s *ProxySuite) TestXFFIsAppended(c *C) {
<-done
}
+func (s *ProxySuite) TestXRequestStartIsAppended(c *C) {
+ done := make(chan bool)
+
+ s.RegisterHandler(c, "app", func(x *httpConn) {
+ req, _ := x.ReadRequest()
+ c.Check(req.Header.Get("X-Request-Start"), Matches, "^\\d{10}\\d{3}$") // unix timestamp millis
+ done <- true
+ })
+
+ x := s.DialProxy(c)
+
+ req := x.NewRequest("GET", "/", nil)
+ req.Host = "app"
+ x.WriteRequest(req)
+
+ <-done
+}
+
+func (s *ProxySuite) TestXRequestStartIsNotOverwritten(c *C) {
+ done := make(chan bool)
+
+ s.RegisterHandler(c, "app", func(x *httpConn) {
+ req, _ := x.ReadRequest()
+ c.Check(req.Header[http.CanonicalHeaderKey("X-Request-Start")], DeepEquals, []string{"", "user-set2"})
+ done <- true
+ })
+
+ x := s.DialProxy(c)
+
+ req := x.NewRequest("GET", "/", nil)
+ req.Host = "app"
+ req.Header.Add("X-Request-Start", "") // impl cannot just check for empty string
+ req.Header.Add("X-Request-Start", "user-set2")
+ x.WriteRequest(req)
+
+ <-done
+}
+
func (s *ProxySuite) TestWebSocketUpgrade(c *C) {
s.RegisterHandler(c, "ws", func(x *httpConn) {
req, _ := x.ReadRequest()
diff --git a/proxy/request_handler.go b/proxy/request_handler.go
index <HASH>..<HASH> 100644
--- a/proxy/request_handler.go
+++ b/proxy/request_handler.go
@@ -6,6 +6,7 @@ import (
"io"
"net"
"net/http"
+ "strconv"
"strings"
"time"
@@ -162,6 +163,7 @@ func (h *RequestHandler) copyToResponse(src io.ReadCloser) (int64, error) {
func (h *RequestHandler) setupRequest(endpoint *route.Endpoint) {
h.setRequestURL(endpoint.CanonicalAddr())
h.setRequestXForwardedFor()
+ h.setRequestXRequestStart()
}
func (h *RequestHandler) setRequestURL(addr string) {
@@ -182,6 +184,12 @@ func (h *RequestHandler) setRequestXForwardedFor() {
}
}
+func (h *RequestHandler) setRequestXRequestStart() {
+ if _, ok := h.request.Header[http.CanonicalHeaderKey("X-Request-Start")]; !ok {
+ h.request.Header.Set("X-Request-Start", strconv.FormatInt(time.Now().UnixNano()/1e6, 10))
+ }
+}
+
func (h *RequestHandler) setupConnection() {
// Use a new connection for every request
// Keep-alive can be bolted on later, if we want to
|
Added x-request-start header for each request
|
cloudfoundry_gorouter
|
train
|
a2eaa80086ad0cc26bfdc5142f16d6ca7b458e31
|
diff --git a/sdl/syswm_x11.go b/sdl/syswm_x11.go
index <HASH>..<HASH> 100644
--- a/sdl/syswm_x11.go
+++ b/sdl/syswm_x11.go
@@ -1,4 +1,4 @@
-// +build x11 OR linux
+// +build x11
package sdl
|
Build syswm_x<I>.go only if actually using X<I> (#<I>)
|
veandco_go-sdl2
|
train
|
9586d5cbd086d21b400b70d0aa03e990a355bd13
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -372,7 +372,10 @@ Runner.prototype = {
return next(err);
}
- var env = {};
+ var env = {
+ CI: 'true',
+ STRIDER: 'true',
+ };
if (config.envKeys) {
env.STRIDER_SSH_PUB = config.pubkey;
|
feat: Set default CI environment variables (#<I>)
This allows code being run by the CI server to detect that it's running
inside a CI server.
The `CI` environment variable is exposed by most major CI servers, but
having a vendor specific environment variable (i.e. `STRIDER`) is also
common practice.
For details see issue <URL>
|
Strider-CD_strider-simple-runner
|
train
|
f285ec79448ec28ca9a354a0e6687690336e895d
|
diff --git a/src/sap.ui.fl/src/sap/ui/fl/themes/base/VariantManagement.less b/src/sap.ui.fl/src/sap/ui/fl/themes/base/VariantManagement.less
index <HASH>..<HASH> 100644
--- a/src/sap.ui.fl/src/sap/ui/fl/themes/base/VariantManagement.less
+++ b/src/sap.ui.fl/src/sap/ui/fl/themes/base/VariantManagement.less
@@ -90,4 +90,12 @@
.sapUiFlVarMngmtFavColor {
color: @sapUiButtonIconColor;
+}
+
+.sapUiFlVarMngmtSaveDialog .sapUiFlVarMngmtSaveDialogLabel.sapMLabel::after {
+ content: ":";
+}
+
+.sapUiFlVarMngmtSaveDialog .sapUiFlVarMngmtSaveDialogLabel.sapMLabel::after {
+ content: ":";
}
\ No newline at end of file
diff --git a/src/sap.ui.fl/src/sap/ui/fl/variants/VariantManagement.js b/src/sap.ui.fl/src/sap/ui/fl/variants/VariantManagement.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.fl/src/sap/ui/fl/variants/VariantManagement.js
+++ b/src/sap.ui.fl/src/sap/ui/fl/variants/VariantManagement.js
@@ -948,6 +948,7 @@ sap.ui.define([
path: "/popoverTitle",
model: VariantManagement.INNER_MODEL_NAME
},
+ titleAlignment: "Center",
contentWidth: "400px",
placement: PlacementType.VerticalPreferredBottom,
content: [
@@ -1058,10 +1059,10 @@ sap.ui.define([
});
var oLabelName = new Label(this.getId() + "-namelabel", {
- text: this._oRb.getText("VARIANT_MANAGEMENT_NAME"),
- required: true
+ text: this._oRb.getText("VARIANT_MANAGEMENT_NAME")
});
oLabelName.setLabelFor(this.oInputName);
+ oLabelName.addStyleClass("sapUiFlVarMngmtSaveDialogLabel");
this.oDefault = new CheckBox(this.getId() + "-default", {
text: this._oRb.getText("VARIANT_MANAGEMENT_SETASDEFAULT"),
@@ -1148,6 +1149,9 @@ sap.ui.define([
this.oInputName.setEnabled(true);
this.oInputName.setValueState(ValueState.None);
this.oInputName.setValueStateText(null);
+
+ this._checkVariantNameConstraints(this.oInputName, this.oSaveSave);
+
this.oDefault.setSelected(false);
this.oExecuteOnSelect.setSelected(false);
@@ -1301,7 +1305,8 @@ sap.ui.define([
width: "14rem"
}), new Column({
header: new Text({
- text: this._oRb.getText("VARIANT_MANAGEMENT_DEFAULT")
+ text: this._oRb.getText("VARIANT_MANAGEMENT_DEFAULT"),
+ wrappingType: "Hyphenated"
}),
width: "4rem",
demandPopin: true,
@@ -1313,7 +1318,8 @@ sap.ui.define([
}
}), new Column({
header: new Text({
- text: this._oRb.getText("VARIANT_MANAGEMENT_EXECUTEONSELECT")
+ text: this._oRb.getText("VARIANT_MANAGEMENT_EXECUTEONSELECT"),
+ wrappingType: "Hyphenated"
}),
width: "6rem",
hAlign: TextAlign.Center,
@@ -1388,7 +1394,7 @@ sap.ui.define([
}.bind(this));
var oSubHeader = new Bar(this.getId() + "-mgmHeaderSearch", {
- contentRight: [
+ contentMiddle: [
this._oSearchFieldOnMgmtDialog
]
});
@@ -1891,4 +1897,4 @@ sap.ui.define([
};
return VariantManagement;
-});
+});
\ No newline at end of file
diff --git a/src/sap.ui.fl/test/sap/ui/fl/qunit/variants/VariantManagement.qunit.js b/src/sap.ui.fl/test/sap/ui/fl/qunit/variants/VariantManagement.qunit.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.fl/test/sap/ui/fl/qunit/variants/VariantManagement.qunit.js
+++ b/src/sap.ui.fl/test/sap/ui/fl/qunit/variants/VariantManagement.qunit.js
@@ -463,6 +463,7 @@ sap.ui.define([
sinon.stub(this.oVariantManagement.oSaveAsDialog, "open");
this.oVariantManagement._openSaveAsDialog();
+ assert.equal(this.oVariantManagement.oInputName.getValueState(), "Error");
var aItems = this.oVariantManagement._getItems();
assert.ok(aItems);
@@ -471,7 +472,6 @@ sap.ui.define([
this.oVariantManagement._handleVariantSaveAs("1");
assert.ok(bCalled);
assert.ok(oModel._handleSave.calledOnce);
- assert.equal(this.oVariantManagement.oInputName.getValueState(), "None");
this.oVariantManagement._handleVariantSaveAs(" ");
assert.equal(this.oVariantManagement.oInputName.getValueState(), "Error");
|
[INTERNAL] fl.VariantManagement: alignment with comp.VariantMagement
JIRA: CPOUIFDENVER-<I>
Change-Id: I5b<I>e<I>b<I>e<I>b<I>d9e<I>f2f<I>a<I>a3d<I>
|
SAP_openui5
|
train
|
677b8728281ec0c52569113902082082fadd9eb6
|
diff --git a/lib/juici/interface.rb b/lib/juici/interface.rb
index <HASH>..<HASH> 100644
--- a/lib/juici/interface.rb
+++ b/lib/juici/interface.rb
@@ -9,6 +9,10 @@ module Juici
module Routes extend self
NEW_BUILD = '/builds/new'
+ def build_new
+ NEW_BUILD
+ end
+
def build_list(project)
"/builds/#{project}/list"
end
diff --git a/lib/juici/routes.rb b/lib/juici/routes.rb
index <HASH>..<HASH> 100644
--- a/lib/juici/routes.rb
+++ b/lib/juici/routes.rb
@@ -2,6 +2,10 @@ module Juici
module Router
include Routes
+ def build_new_path
+ "/builds/new"
+ end
+
def build_list_path
%r{^/builds/(?<project>[\w\/]+)/list$}
end
diff --git a/lib/juici/server.rb b/lib/juici/server.rb
index <HASH>..<HASH> 100644
--- a/lib/juici/server.rb
+++ b/lib/juici/server.rb
@@ -68,7 +68,7 @@ module Juici
end
end
- post NEW_BUILD do
+ post build_new_path do
build = Controllers::Trigger.new(params[:project], params).build!
@redirect_to = build_url_for(build)
erb(:redirect, {}, {})
diff --git a/spec/juici_router.rb b/spec/juici_router.rb
index <HASH>..<HASH> 100644
--- a/spec/juici_router.rb
+++ b/spec/juici_router.rb
@@ -1,6 +1,11 @@
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe ::Juici::Router do
+ it "shoudl match for new_build" do
+ route = Juici::Router.build_new_path
+ route.should == ::Juici::Routes.build_new
+ end
+
it "should match for build_list" do
route = Juici::Router.build_list_path
should.should match ::Juici::Routes.build_list("fo/test/o")
|
Use standard API
retain constant for backward compatibility
|
richo_juici
|
train
|
1f066c5f9cadea7cd6dfebadaa8e97929d66baa4
|
diff --git a/src/ValidationFactory.php b/src/ValidationFactory.php
index <HASH>..<HASH> 100644
--- a/src/ValidationFactory.php
+++ b/src/ValidationFactory.php
@@ -19,21 +19,6 @@ class ValidationFactory
private $dir = '';
/**
- * @var Rules
- */
- private $rules;
-
- /**
- * @var Verify
- */
- private $verify;
-
- /**
- * @var Dio
- */
- private $dio;
-
- /**
* @var FilterInterface
*/
private $filter;
@@ -44,10 +29,7 @@ class ValidationFactory
*/
public function __construct($locale = null, $dir = null)
{
- $this->dir = __DIR__ . '/Locale/';
- if (!is_null($locale)) {
- $this->setLocale($locale, $dir);
- }
+ $this->setLocale($locale, $dir);
}
/**
@@ -57,7 +39,7 @@ class ValidationFactory
public function setLocale($locale = null, $dir = null)
{
$this->locale = $locale ?: $this->locale;
- $this->dir = $dir ?: $this->dir;
+ $this->dir = $dir ?: __DIR__ . '/Locale/';
$this->dir = rtrim($this->dir, '/') . '/';
$this->factory();
@@ -77,10 +59,7 @@ class ValidationFactory
*/
public function on(array $data = [])
{
- if (!$this->dio) {
- $this->factory();
- }
- $dio = clone($this->dio);
+ $dio = $this->factory();
$dio->source($data);
return $dio;
@@ -91,21 +70,18 @@ class ValidationFactory
*/
public function verify()
{
- if (!$this->verify) {
- $this->factory();
- }
-
- return $this->verify;
+ return new Verify(
+ $this->filter ?: new Filter(),
+ new ValueTO(new Message($this->locale, $this->dir))
+ );
}
+ /**
+ * @return Dio
+ */
private function factory()
{
- $this->rules = $this->rules();
- $this->verify = new Verify(
- $this->filter ?: new Filter(),
- new ValueTO(new Message($this->locale, $this->dir))
- );
- $this->dio = new Dio($this->verify, $this->rules);
+ return new Dio($this->verify(), $this->rules());
}
/**
|
clean up factory code. always return a new object.
(except for filters that are set).
|
WScore_Validation
|
train
|
ee07ebd0c89f55acbaa49147de4e142502cd99f0
|
diff --git a/src/test/QafooLabs/Refactoring/Domain/Model/PhpClassNameTest.php b/src/test/QafooLabs/Refactoring/Domain/Model/PhpClassNameTest.php
index <HASH>..<HASH> 100644
--- a/src/test/QafooLabs/Refactoring/Domain/Model/PhpClassNameTest.php
+++ b/src/test/QafooLabs/Refactoring/Domain/Model/PhpClassNameTest.php
@@ -10,5 +10,6 @@ class PhpClassNameTest extends \PHPUnit_Framework_TestCase
$this->assertEquals("PhpClassNameTest", $className->getShortname());
$this->assertEquals("QafooLabs\Refactoring\Domain\Model", $className->getNamespace());
+ $this->assertEquals("QafooLabs\Refactoring\Domain\Model\PhpClassNameTest", $className->getName());
}
}
|
PhpClassNameTest: add assertion for getName()
|
QafooLabs_php-refactoring-browser
|
train
|
e3527f605995e2ddaed0ec35e9077251bbcca90d
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,7 @@ CHANGE LOG
* Re-write using transformers
* Tweaked the limit functionality
+* Made the throtter countable
* Improved the test suite
* Added cs fixers to the test suite
* Improved the docs
diff --git a/src/Throttlers/CacheThrottler.php b/src/Throttlers/CacheThrottler.php
index <HASH>..<HASH> 100644
--- a/src/Throttlers/CacheThrottler.php
+++ b/src/Throttlers/CacheThrottler.php
@@ -16,6 +16,7 @@
namespace GrahamCampbell\Throttle\Throttlers;
+use Countable;
use Illuminate\Cache\StoreInterface;
/**
@@ -25,7 +26,7 @@ use Illuminate\Cache\StoreInterface;
* @copyright 2013-2014 Graham Campbell
* @license <https://github.com/GrahamCampbell/Laravel-Throttle/blob/master/LICENSE.md> Apache 2.0
*/
-class CacheThrottler implements ThrottlerInterface
+class CacheThrottler implements ThrottlerInterface, Countable
{
/**
* The store instance.
diff --git a/tests/Throttlers/CacheThrottlerTest.php b/tests/Throttlers/CacheThrottlerTest.php
index <HASH>..<HASH> 100644
--- a/tests/Throttlers/CacheThrottlerTest.php
+++ b/tests/Throttlers/CacheThrottlerTest.php
@@ -117,6 +117,16 @@ class CacheThrottlerTest extends AbstractTestCase
$this->assertFalse($return);
}
+ public function testIsCountable()
+ {
+ $throttler = $this->getThrottler();
+
+ $throttler->getStore()->shouldReceive('get')->once()->with('abc')->andReturn(42);
+
+ $this->assertInstanceOf('Countable', $throttler);
+ $this->assertCount(42, $throttler);
+ }
+
protected function getThrottler()
{
$store = Mockery::mock('Illuminate\Cache\StoreInterface');
|
Made the throtter countable
|
GrahamCampbell_Laravel-Throttle
|
train
|
d8965254ed605673a716392caa00a02dd0b56976
|
diff --git a/lib/zookeeper/common/queue_with_pipe.rb b/lib/zookeeper/common/queue_with_pipe.rb
index <HASH>..<HASH> 100644
--- a/lib/zookeeper/common/queue_with_pipe.rb
+++ b/lib/zookeeper/common/queue_with_pipe.rb
@@ -82,6 +82,17 @@ module Common
nil
end
+ def open
+ @mutex.lock
+ begin
+ return unless @closed
+ @closed = @graceful = false
+ @cond.broadcast
+ ensure
+ @mutex.unlock rescue nil
+ end
+ end
+
def close
@mutex.lock
begin
|
add a method to reopen the pipe after its been closed
|
zk-ruby_zookeeper
|
train
|
10fd67bf84118affc9269ca0c0dbc8da4b0bf2cd
|
diff --git a/arguments.go b/arguments.go
index <HASH>..<HASH> 100644
--- a/arguments.go
+++ b/arguments.go
@@ -25,7 +25,16 @@ import "strings"
type Arguments []string
+// Matches performs an case-insensitive, out-of-order check that the items
+// provided exist and equal all of the args in arguments.
+// Note:
+// - Providing a list that includes duplicate string-case items will return not
+// matched.
func (r Arguments) Matches(items ...string) bool {
+ if len(r) != len(items) {
+ return false
+ }
+
found := make(map[string]bool)
for _, item := range items {
if !StringInSlice(item, r) {
@@ -34,9 +43,11 @@ func (r Arguments) Matches(items ...string) bool {
found[item] = true
}
- return len(found) == len(r) && len(r) == len(items)
+ return len(found) == len(r)
}
+// Has checks, in a case-insensitive manner, that all of the items
+// provided exists in arguments.
func (r Arguments) Has(items ...string) bool {
for _, item := range items {
if !StringInSlice(item, r) {
@@ -47,6 +58,8 @@ func (r Arguments) Has(items ...string) bool {
return true
}
+// HasOneOf checks, in a case-insensitive manner, that one of the items
+// provided exists in arguments.
func (r Arguments) HasOneOf(items ...string) bool {
for _, item := range items {
if StringInSlice(item, r) {
@@ -62,18 +75,24 @@ func (r Arguments) Exact(name string) bool {
return name == strings.Join(r, " ")
}
+// ExactOne checks, by string case, that a single argument equals the provided
+// string.
func (r Arguments) ExactOne(name string) bool {
return len(r) == 1 && r[0] == name
}
+// MatchesExact checks, by order and string case, that the items provided equal
+// those in arguments.
func (r Arguments) MatchesExact(items ...string) bool {
if len(r) != len(items) {
return false
}
+
for i, item := range items {
if item != r[i] {
return false
}
}
- return false
+
+ return true
}
diff --git a/arguments_test.go b/arguments_test.go
index <HASH>..<HASH> 100644
--- a/arguments_test.go
+++ b/arguments_test.go
@@ -151,6 +151,21 @@ type matchesTestCase struct {
var matchesTests = []matchesTestCase{
{
+ args: Arguments{},
+ is: []string{},
+ expect: true,
+ },
+ {
+ args: Arguments{"foo", "bar"},
+ is: []string{"foo", "bar"},
+ expect: true,
+ },
+ {
+ args: Arguments{"Foo", "Bar"},
+ is: []string{"Foo", "Bar"},
+ expect: true,
+ },
+ {
args: Arguments{"foo", "foo"},
is: []string{"foo"},
expect: false,
@@ -189,11 +204,24 @@ var matchesTests = []matchesTestCase{
func TestArgumentsMatchesExact(t *testing.T) {
testCases := append(matchesTests, []matchesTestCase{
+ // should fail if items are out of order
{
args: Arguments{"foo", "bar"},
is: []string{"bar", "foo"},
expect: false,
},
+ // should fail due to case-sensitivity.
+ {
+ args: Arguments{"fOo", "bar"},
+ is: []string{"foo", "BaR"},
+ expect: false,
+ },
+ // duplicate items should return allowed.
+ {
+ args: Arguments{"foo", "foo"},
+ is: []string{"foo", "foo"},
+ expect: true,
+ },
}...)
for k, c := range testCases {
assert.Equal(t, c.expect, c.args.MatchesExact(c.is...), "%d", k)
@@ -203,16 +231,29 @@ func TestArgumentsMatchesExact(t *testing.T) {
func TestArgumentsMatches(t *testing.T) {
testCases := append(matchesTests, []matchesTestCase{
+ // should match if items are out of order.
{
args: Arguments{"foo", "bar"},
- is: []string{"foo", "bar"},
+ is: []string{"bar", "foo"},
expect: true,
},
+ // should allow case-insensitive matching.
{
- args: Arguments{"foo", "bar"},
- is: []string{"bar", "foo"},
+ args: Arguments{"fOo", "bar"},
+ is: []string{"foo", "BaR"},
expect: true,
},
+ // should return non-matching if duplicate items exist.
+ {
+ args: Arguments{"foo", "bar"},
+ is: []string{"FOO", "FOO", "bar"},
+ expect: false,
+ },
+ {
+ args: Arguments{"foo", "foo"},
+ is: []string{"foo", "foo"},
+ expect: false,
+ },
}...)
for k, c := range testCases {
assert.Equal(t, c.expect, c.args.Matches(c.is...), "%d", k)
|
fix(arguments): fixes a logic bug in MatchesExact and adds documentation (#<I>)
|
ory_fosite
|
train
|
d3756e4babbc16692b1892138746ab33ce984aea
|
diff --git a/src/QueryBuilder.php b/src/QueryBuilder.php
index <HASH>..<HASH> 100644
--- a/src/QueryBuilder.php
+++ b/src/QueryBuilder.php
@@ -100,6 +100,7 @@ class QueryBuilder extends \yii\base\Object
'eq' => 'buildEqCondition',
'ne' => 'buildNotEqCondition',
'in' => 'buildInCondition',
+ 'ni' => 'buildNotInCondition',
'like' => 'buildLikeCondition',
'gt' => 'buildCompareCondition',
'ge' => 'buildCompareCondition',
@@ -177,7 +178,7 @@ class QueryBuilder extends \yii\base\Object
throw new NotSupportedException('Between condition is not supported by HiArt.');
}
- private function buildInCondition($operator, $operands)
+ private function buildInCondition($operator, $operands, $not = false)
{
if (!isset($operands[0], $operands[1])) {
throw new InvalidParamException("Operator '$operator' requires two operands.");
@@ -200,7 +201,17 @@ class QueryBuilder extends \yii\base\Object
}
}
- return [$column . '_in' => $values];
+ if ($not) {
+ $key = $column . '_ni'; // not in
+ } else {
+ $key = $column . '_in';
+ }
+ return [$key => $values];
+ }
+
+ private function buildNotInCondition($operator, $operands)
+ {
+ return $this->buildInCondition($operator, $operands, true);
}
private function buildEqCondition($operator, $operands)
|
Added `ni` condition
|
hiqdev_yii2-hiart
|
train
|
a17a626ab843d522fad5e3d2e22cec5519661e55
|
diff --git a/internal/encoding/ssh/filexfer/attrs.go b/internal/encoding/ssh/filexfer/attrs.go
index <HASH>..<HASH> 100644
--- a/internal/encoding/ssh/filexfer/attrs.go
+++ b/internal/encoding/ssh/filexfer/attrs.go
@@ -74,7 +74,6 @@ func (a *Attributes) SetPermissions(perms FileMode) {
// GetACModTime returns the ATime and MTime fields and a bool that is true if and only if the values are valid/defined.
func (a *Attributes) GetACModTime() (atime, mtime uint32, ok bool) {
return a.ATime, a.MTime, a.Flags&AttrACModTime != 0
- return a.ATime, a.MTime, a.Flags&AttrACModTime != 0
}
// SetACModTime is a convenience function that sets the ATime and MTime fields,
|
Remove unreachable and duplicated return statement
I’m confident blame will show I am at fault here, not sure how I missed it, and why `go vet` didn’t catch it either.
|
pkg_sftp
|
train
|
afc64aae16a730354f2bc40ad03db97f15b2325b
|
diff --git a/src/Core/Database/Connector/Connector.php b/src/Core/Database/Connector/Connector.php
index <HASH>..<HASH> 100644
--- a/src/Core/Database/Connector/Connector.php
+++ b/src/Core/Database/Connector/Connector.php
@@ -26,7 +26,7 @@ abstract class Connector
try {
return new PDO($dsn, $username, $password, $options);
} catch (Exception $exception) {
- return $this->tryAgainLostConnection($exception, $dsn, $username, $password, $options);
+ return $this->tryAgainLostConnection($exception, $dsn, $config);
}
}
@@ -64,8 +64,13 @@ abstract class Connector
*/
protected function tryAgainLostConnection(Exception $exception, $dsn, $config)
{
+
+ $username = arr_get($config, "username");
+ $password = arr_get($config, "password");
+ $options = arr_get($config, "options", array());
+
if ($this->isErrorLostConnection($exception)) {
- return new PDO($dsn, $config["username"], $config["password"], $config["options"]);
+ return new PDO($dsn, $username, $password, $options);
} else {
throw $exception;
}
|
The call to Connector::tryAgainLostConnection() has too many arguments
|
jayacode_framework
|
train
|
a6912d09719fae3df3d2634d4da92c52b110700a
|
diff --git a/hazelcast/src/main/java/com/hazelcast/partition/impl/InternalPartitionServiceImpl.java b/hazelcast/src/main/java/com/hazelcast/partition/impl/InternalPartitionServiceImpl.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/partition/impl/InternalPartitionServiceImpl.java
+++ b/hazelcast/src/main/java/com/hazelcast/partition/impl/InternalPartitionServiceImpl.java
@@ -1017,8 +1017,11 @@ public class InternalPartitionServiceImpl implements InternalPartitionService, M
}
private boolean hasOnGoingMigrationMaster(Level level) {
- Operation operation = new HasOngoingMigration();
Address masterAddress = node.getMasterAddress();
+ if (masterAddress == null) {
+ return true;
+ }
+ Operation operation = new HasOngoingMigration();
OperationService operationService = nodeEngine.getOperationService();
InvocationBuilder invocationBuilder = operationService.createInvocationBuilder(SERVICE_NAME, operation,
masterAddress);
|
Fixed java.lang.IllegalArgumentException: Target cannot be null!. This was caused by the node.masterAddress being null.
This can happen after a cluster merge in the window of time between masterAddresses being cleared and repopulated.
|
hazelcast_hazelcast
|
train
|
f600918ee9ff412803d57b7055978d6ecfabf659
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -22,7 +22,7 @@ function hasFlex(decl) {
}
-function addgap(decl, opts) {
+function addGap(decl, opts) {
const container = decl.parent;
@@ -237,7 +237,7 @@ export default postcss.plugin("postcss-gap", (opts) => {
if (declTwo.prop === "display") {
if (declTwo.value !== "grid") {
- addgap(decl, webComponents);
+ addGap(decl, webComponents);
}
}
});
|
Fixed naming of function addGap
|
limitlessloop_postcss-gutters
|
train
|
c67faf1ff58607d9bc7dd46b9a3215a41911ad92
|
diff --git a/lib/tools/system-calls.js b/lib/tools/system-calls.js
index <HASH>..<HASH> 100644
--- a/lib/tools/system-calls.js
+++ b/lib/tools/system-calls.js
@@ -20,6 +20,7 @@ const DEVICE_NOT_FOUND_ERROR_REGEXP = new RegExp(`error: device ('.+' )?not foun
const DEVICE_CONNECTING_ERROR_REGEXP = new RegExp('error: device still connecting', 'i');
const CERTS_ROOT = '/system/etc/security/cacerts';
+const EMU_STOP_TIMEOUT = 2000;
/**
* Retrieve full path to the given binary.
@@ -772,40 +773,51 @@ systemCallMethods.waitForDevice = async function (appDeviceReadyTimeout = 30) {
};
/**
+ * Tries to run privileged shell command without root, otherwise elevates privileges and run them again
+ *
+ * @param {args} Array<string> - Shell arguments commands
+ * @return {boolean} True if the device was switched to root.
+ */
+systemCallMethods.runPrivilegedShell = async function (args) {
+ let isRoot = false;
+ try {
+ await this.shell(args);
+ } catch (err) {
+ if (!err.message.includes('must be root')) {
+ throw err;
+ }
+ // this device needs adb to be running as root to run the specific command.
+ // so try to restart the daemon
+ log.debug(`Device requires adb to be running as root in order to run "adb shell ${quote(args)}". Restarting daemon`);
+ isRoot = await this.root();
+ await this.shell(args);
+ }
+ return isRoot;
+};
+
+/**
* Reboot the current device and wait until it is completed.
*
* @param {number} retries [DEFAULT_ADB_REBOOT_RETRIES] - The maximum number of reboot retries.
* @throws {Error} If the device failed to reboot and number of retries is exceeded.
*/
systemCallMethods.reboot = async function (retries = DEFAULT_ADB_REBOOT_RETRIES) {
- try {
- try {
- await this.shell(['stop']);
- } catch (err) {
- if (err.message.indexOf('must be root') === -1) {
- throw err;
- }
- // this device needs adb to be running as root to stop.
- // so try to restart the daemon
- log.debug('Device requires adb to be running as root in order to reboot. Restarting daemon');
- await this.root();
- await this.shell(['stop']);
+ let isRoot = false;
+ isRoot = await this.runPrivilegedShell(['stop']);
+ // TODO - Figure out a condition in adb so we can replace this static delay with a waitForCondition
+ await B.delay(EMU_STOP_TIMEOUT); // let the emu finish stopping;
+ isRoot = await this.runPrivilegedShell(['setprop', 'sys.boot_completed', 0]);
+ isRoot = await this.runPrivilegedShell(['start']);
+ await retryInterval(retries, 1000, async () => {
+ if ((await this.getDeviceProperty('sys.boot_completed')) === '1') {
+ return;
}
- await B.delay(2000); // let the emu finish stopping;
- await this.setDeviceProperty('sys.boot_completed', 0);
- await this.shell(['start']);
- await retryInterval(retries, 1000, async () => {
- let booted = await this.getDeviceProperty('sys.boot_completed');
- if (booted === '1') {
- return;
- } else {
- // we don't want the stack trace, so no log.errorAndThrow
- let msg = 'Waiting for reboot. This takes time';
- log.debug(msg);
- throw new Error(msg);
- }
- });
- } finally {
+ // we don't want the stack trace, so no log.errorAndThrow
+ let msg = 'Waiting for reboot. This takes time';
+ log.debug(msg);
+ throw new Error(msg);
+ });
+ if (isRoot) {
await this.unroot();
}
};
@@ -840,6 +852,7 @@ systemCallMethods.root = async function () {
*/
systemCallMethods.unroot = async function () {
try {
+ log.debug("Removing root privileges. Restarting adb daemon");
await exec(this.executable.path, ['unroot']);
return true;
} catch (err) {
|
Fix reboot for Android O, run as root (#<I>)
|
appium_appium-adb
|
train
|
c4387157e51741e9830f82027bd3613b7ed3e1a1
|
diff --git a/lib/ohai/plugins/erlang.rb b/lib/ohai/plugins/erlang.rb
index <HASH>..<HASH> 100644
--- a/lib/ohai/plugins/erlang.rb
+++ b/lib/ohai/plugins/erlang.rb
@@ -19,6 +19,7 @@
provides "languages/erlang"
require_plugin "languages"
+require_plugin "platform"
output = nil
|
Add dependency on platform plugin for erlang
|
chef_ohai
|
train
|
98f75b8d8def707fb05500a79c5a8e42e610a55a
|
diff --git a/src/java/com/samskivert/util/ArrayIntSet.java b/src/java/com/samskivert/util/ArrayIntSet.java
index <HASH>..<HASH> 100644
--- a/src/java/com/samskivert/util/ArrayIntSet.java
+++ b/src/java/com/samskivert/util/ArrayIntSet.java
@@ -22,7 +22,6 @@ package com.samskivert.util;
import java.io.Serializable;
-import java.util.AbstractSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
@@ -232,7 +231,7 @@ public class ArrayIntSet extends AbstractIntSet
}
}
- public void remove () {
+ @Override public void remove () {
if (_pos == 0) {
throw new IllegalStateException();
}
diff --git a/src/java/com/samskivert/util/HashIntMap.java b/src/java/com/samskivert/util/HashIntMap.java
index <HASH>..<HASH> 100644
--- a/src/java/com/samskivert/util/HashIntMap.java
+++ b/src/java/com/samskivert/util/HashIntMap.java
@@ -396,7 +396,7 @@ public class HashIntMap<V> extends AbstractMap<Integer,V>
// damn Sun bastards made the 'keySet' variable with default access, so we can't share it
if (_keySet == null) {
_keySet = new AbstractIntSet() {
- @Override public Interator interator () {
+ public Interator interator () {
return new AbstractInterator () {
public boolean hasNext () {
return i.hasNext();
@@ -404,7 +404,7 @@ public class HashIntMap<V> extends AbstractMap<Integer,V>
public int nextInt () {
return i.next().getIntKey();
}
- public void remove () {
+ @Override public void remove () {
i.remove();
}
private Iterator<IntEntry<V>> i = intEntrySet().iterator();
diff --git a/src/java/com/samskivert/util/IntIntMap.java b/src/java/com/samskivert/util/IntIntMap.java
index <HASH>..<HASH> 100644
--- a/src/java/com/samskivert/util/IntIntMap.java
+++ b/src/java/com/samskivert/util/IntIntMap.java
@@ -313,7 +313,7 @@ public class IntIntMap
public IntSet keySet ()
{
return new AbstractIntSet() {
- @Override public Interator interator () {
+ public Interator interator () {
return IntIntMap.this.keys();
}
@@ -321,11 +321,11 @@ public class IntIntMap
return IntIntMap.this.size();
}
- public boolean contains (int t) {
+ @Override public boolean contains (int t) {
return IntIntMap.this.containsKey(t);
}
- public boolean remove (int value) {
+ @Override public boolean remove (int value) {
// we have to check for presence in the map separately because we have no "not in
// the set" return value
if (!IntIntMap.this.containsKey(value)) {
@@ -566,7 +566,7 @@ public class IntIntMap
return _eiter.hasNext();
}
- public void remove () {
+ @Override public void remove () {
_eiter.remove();
}
|
Per Dave Hoover:
- <I> compatibility: Removed @Override on some methods that
had no concrete implementation in superclasses.
- Added @Override to a few places where there does happen to
be an implementation in our abstract superclass.
I now think the <I> behavior of @Override is more sensible, it's
less special-casey.
git-svn-id: <URL>
|
samskivert_samskivert
|
train
|
fbe4ced186b4e034f0dbd7a5b6d030f3c009ec2b
|
diff --git a/azurerm/resource_arm_cognitive_account.go b/azurerm/resource_arm_cognitive_account.go
index <HASH>..<HASH> 100644
--- a/azurerm/resource_arm_cognitive_account.go
+++ b/azurerm/resource_arm_cognitive_account.go
@@ -241,9 +241,7 @@ func resourceArmCognitiveAccountRead(d *schema.ResourceData, meta interface{}) e
return err
}
- if key1 := keys.Key1; key1 != nil {
- d.Set("key1", key1)
- }
+ d.Set("key1", keys.Key1)
if key2 := keys.Key2; key2 != nil {
d.Set("key2", key2)
|
removed nil check on key1 as handled by d.set()
|
terraform-providers_terraform-provider-azurerm
|
train
|
0f69b14a6b9427a8723773b94fad47fe148afdb1
|
diff --git a/app/models/camaleon_cms/custom_field.rb b/app/models/camaleon_cms/custom_field.rb
index <HASH>..<HASH> 100644
--- a/app/models/camaleon_cms/custom_field.rb
+++ b/app/models/camaleon_cms/custom_field.rb
@@ -7,6 +7,7 @@
See the GNU Affero General Public License (GPLv3) for more details.
=end
class CamaleonCms::CustomField < ActiveRecord::Base
+ self.primary_key = :id
include CamaleonCms::Metas
has_many :metas, ->{ where(object_class: 'CustomField')}, :class_name => "CamaleonCms::Meta", foreign_key: :objectid, dependent: :destroy
self.table_name = "#{PluginRoutes.static_system_info["db_prefix"]}custom_fields"
diff --git a/app/models/camaleon_cms/custom_field_group.rb b/app/models/camaleon_cms/custom_field_group.rb
index <HASH>..<HASH> 100644
--- a/app/models/camaleon_cms/custom_field_group.rb
+++ b/app/models/camaleon_cms/custom_field_group.rb
@@ -7,6 +7,7 @@
See the GNU Affero General Public License (GPLv3) for more details.
=end
class CamaleonCms::CustomFieldGroup < CamaleonCms::CustomField
+ self.primary_key = :id
# attrs required: name, slug, description
default_scope { where.not(object_class: '_fields').reorder("#{CamaleonCms::CustomField.table_name}.field_order ASC") }
|
fixed missing primary key (postgres)
|
owen2345_camaleon-cms
|
train
|
40091e6c4e0b97e73cfe35d6092b9faa49bb3704
|
diff --git a/filesystem/File.php b/filesystem/File.php
index <HASH>..<HASH> 100755
--- a/filesystem/File.php
+++ b/filesystem/File.php
@@ -48,7 +48,7 @@ class File extends DataObject {
* @var array
*/
public static $allowed_extensions = array(
- 'html','htm','xhtml','js','css',
+ '','html','htm','xhtml','js','css',
'bmp','png','gif','jpg','jpeg','ico','pcx','tif','tiff',
'au','mid','midi','mpa','mp3','ogg','m4a','ra','wma','wav','cda',
'avi','mpg','mpeg','asf','wmv','m4v','mov','mkv','mp4','swf','flv','ram','rm',
|
BUGFIX Allow files with no extensions by setting File::$allowed_extensions with an empty string (from r<I>)
git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/sapphire/trunk@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
|
silverstripe_silverstripe-framework
|
train
|
edcec370bde9d97d49428478cef01913b61dd118
|
diff --git a/lib/openapi3_parser/nodes/components.rb b/lib/openapi3_parser/nodes/components.rb
index <HASH>..<HASH> 100644
--- a/lib/openapi3_parser/nodes/components.rb
+++ b/lib/openapi3_parser/nodes/components.rb
@@ -4,41 +4,53 @@ require "openapi3_parser/node/object"
module Openapi3Parser
module Nodes
+ # @see https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#componentsObject
class Components
include Node::Object
+ # @return [Map] a map of String: {Schema}[../Schema.html] objects
def schemas
node_data["schemas"]
end
+ # @return [Map] a map of String: {Response}[./Response.html] objects
def responses
node_data["responses"]
end
+ # @return [Map] a map of String: {Parameter}[./Parameter.html] objects
def parameters
node_data["parameters"]
end
+ # @return [Map] a map of String: {Example}[../Example.html] objects
def examples
node_data["examples"]
end
+ # @return [Map] a map of String: {RequestBody}[./RequestBody.html]
+ # objects
def request_bodies
node_data["requestBodies"]
end
+ # @return [Map] a map of String: {Header}[./Header.html] objects
def headers
node_data["headers"]
end
+ # @return [Map] a map of String: {SecurityScheme}[./SecurityScheme.html]
+ # objects
def security_schemes
node_data["securitySchemes"]
end
+ # @return [Map] a map of String: {Link}[./Link.html] objects
def links
node_data["links"]
end
+ # @return [Map] a map of String: {Callback}[./Callback.html] objects
def callbacks
node_data["callbacks"]
end
|
Documentation for Components node
|
kevindew_openapi3_parser
|
train
|
452fbf2799df55c15a530a2fb477f0afb6de8021
|
diff --git a/Stub/StubIntlDateFormatter.php b/Stub/StubIntlDateFormatter.php
index <HASH>..<HASH> 100644
--- a/Stub/StubIntlDateFormatter.php
+++ b/Stub/StubIntlDateFormatter.php
@@ -179,8 +179,17 @@ class StubIntlDateFormatter
}
// behave like the intl extension
+ $argumentError = null;
if (!is_int($timestamp) && version_compare(\PHP_VERSION, '5.3.4', '<')) {
- StubIntl::setError(StubIntl::U_ILLEGAL_ARGUMENT_ERROR, 'datefmt_format: takes either an array or an integer timestamp value ');
+ $argumentError = 'datefmt_format: takes either an array or an integer timestamp value ';
+ } elseif (!is_int($timestamp) && !$timestamp instanceOf \DateTime && version_compare(\PHP_VERSION, '5.3.4', '>=')) {
+ $argumentError = 'datefmt_format: takes either an array or an integer timestamp value or a DateTime object';
+ }
+
+ if (null !== $argumentError) {
+ StubIntl::setError(StubIntl::U_ILLEGAL_ARGUMENT_ERROR, $argumentError);
+ $this->errorCode = StubIntl::getErrorCode();
+ $this->errorMessage = StubIntl::getErrorMessage();
return false;
}
@@ -193,6 +202,11 @@ class StubIntlDateFormatter
$transformer = new FullTransformer($this->getPattern(), $this->getTimeZoneId());
$formatted = $transformer->format($this->createDateTime($timestamp));
+ // behave like the intl extension
+ StubIntl::setError(StubIntl::U_ZERO_ERROR);
+ $this->errorCode = StubIntl::getErrorCode();
+ $this->errorMessage = StubIntl::getErrorMessage();
+
return $formatted;
}
@@ -359,6 +373,7 @@ class StubIntlDateFormatter
$timestamp = $transformer->parse($dateTime, $value);
+ // behave like the intl extension. FullTransformer::parse() set the proper error
if (false === $timestamp) {
$this->errorCode = StubIntl::getErrorCode();
$this->errorMessage = StubIntl::getErrorMessage();
|
[Locale] fixed StubIntlDateFormatter::format() to set the right error for PHP >= <I> and to behave like the intl when formatting successfully
|
symfony_locale
|
train
|
5f8b5bf8ec7fe816b2f6f1b766f6edbd38ff03da
|
diff --git a/src/main/java/io/dropwizard/flyway/cli/DbCommand.java b/src/main/java/io/dropwizard/flyway/cli/DbCommand.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/dropwizard/flyway/cli/DbCommand.java
+++ b/src/main/java/io/dropwizard/flyway/cli/DbCommand.java
@@ -34,6 +34,7 @@ public class DbCommand<T extends Configuration> extends AbstractFlywayCommand<T>
@Override
public void configure(final Subparser subparser) {
+ super.configure(subparser);
for (AbstractFlywayCommand<T> subCommand : subCommands.values()) {
final Subparser cmdParser = subparser.addSubparsers()
.addParser(subCommand.getName())
|
Add missing call to DbCommand#configure
|
dropwizard_dropwizard-flyway
|
train
|
6f0896531705016011abf73473e0fc330afebc63
|
diff --git a/loguru/__init__.py b/loguru/__init__.py
index <HASH>..<HASH> 100644
--- a/loguru/__init__.py
+++ b/loguru/__init__.py
@@ -82,36 +82,29 @@ class StrRecord(str):
class Handler:
- def __init__(self, *, writter, level, format, filter, colored, better_exceptions):
+ def __init__(self, *, writter, level, format_, filter, colored, better_exceptions):
self.writter = writter
self.level = level
- self.format = format
+ self.format = format_
self.filter = filter
self.colored = colored
self.better_exceptions = better_exceptions
- self.formats_per_level = self.generate_formats(format, colored)
+ self.formats_per_level = self.generate_formats(format_, colored)
self.exception_formatter = ExceptionFormatter(colored=colored)
@staticmethod
- def generate_formats(format, colored):
+ def generate_formats(format_, colored):
formats_per_level = {}
for level_name, level_color in LEVELS_COLORS.items():
color = ansimarkup.parse(level_color)
custom_markup = dict(level=color, lvl=color)
am = ansimarkup.AnsiMarkup(tags=custom_markup)
- formats_per_level[level_name] = am.parse(format) if colored else am.strip(format)
+ formats_per_level[level_name] = am.parse(format_) if colored else am.strip(format_)
return formats_per_level
- # def format_exception(self, type, value, tb):
- # ...
-
- # def handle(self, record):
- # loguru_record = RecordUtils.to_loguru_record(record)
- # self.emit(loguru_record)
-
def emit(self, record):
level = record['level']
if self.level > level.no:
@@ -513,7 +506,7 @@ class Logger:
handler = Handler(
writter=writter,
level=level,
- format=format,
+ format_=format,
filter=filter,
colored=colored,
better_exceptions=better_exceptions,
|
No longer shadow built-in "format" in private functions
|
Delgan_loguru
|
train
|
9194c5d8ad73bb4b4b975bb351b2e60d4f787198
|
diff --git a/Manager/WidgetsManager.php b/Manager/WidgetsManager.php
index <HASH>..<HASH> 100644
--- a/Manager/WidgetsManager.php
+++ b/Manager/WidgetsManager.php
@@ -146,7 +146,7 @@ class WidgetsManager
);
/** @var AbstractWidget $replacedWidget */
$replacedWidget = $this->entityManager->getRepository('IcapPortfolioBundle:Widget\AbstractWidget')->findOneBy($replacedWidgetParameters);
- $replacedWidget->setRow(--$newRow);
+ $replacedWidget->setRow($originalRow);
$this->entityManager->persist($replacedWidget);
}
|
[PortfolioBundle] Fix setting the new row of a replaced widget
|
claroline_Distribution
|
train
|
27c60e131057ce97435abd0618ea298e97f71085
|
diff --git a/core/array.rb b/core/array.rb
index <HASH>..<HASH> 100644
--- a/core/array.rb
+++ b/core/array.rb
@@ -120,8 +120,9 @@ class Array
if (typeof index !== 'number') {
if (index.o$flags & T_RANGE) {
- length = index.end;
- index = index.begin;
+ var exclude = index.exclude;
+ length = index.end;
+ index = index.begin;
if (index > size) {
return nil;
@@ -131,7 +132,8 @@ class Array
length += size;
}
- return this.slice(index, length + 1);
+ if (!exclude) length += 1;
+ return this.slice(index, length);
}
else {
throw RubyException.$new('bad arg for Array#[]');
diff --git a/core_spec/core/array/element_reference_spec.rb b/core_spec/core/array/element_reference_spec.rb
index <HASH>..<HASH> 100644
--- a/core_spec/core/array/element_reference_spec.rb
+++ b/core_spec/core/array/element_reference_spec.rb
@@ -145,6 +145,40 @@ describe "Array#[]" do
a.should == [1, 2, 3, 4]
end
+ it "returns elements specified by Range indexes except the lement at index n with [m...n]" do
+ [ "a", "b", "c", "d", "e" ][1...3].should == ["b", "c"]
+
+ a = [1, 2, 3, 4]
+
+ a[0...-10].should == []
+ a[0...0].should == []
+ a[0...1].should == [1]
+ a[0...2].should == [1, 2]
+ a[0...3].should == [1, 2, 3]
+ a[0...4].should == [1, 2, 3, 4]
+ a[0...10].should == [1, 2, 3, 4]
+
+ a[2...-10].should == []
+ a[2...0].should == []
+ a[2...2].should == []
+ a[2...3].should == [3]
+ a[2...4].should == [3, 4]
+
+ a[3...0].should == []
+ a[3...3].should == []
+ a[3...4].should == [4]
+
+ a[4...0].should == []
+ a[4...4].should == []
+ a[4...5].should == []
+
+ a[5...0].should == nil
+ a[5...5].should == nil
+ a[5...6].should == nil
+
+ a.should == [1, 2, 3, 4]
+ end
+
it "returns nil for a requested index not in the array with [index]" do
[ "a", "b", "c", "d", "e" ][5].should == nil
end
|
Add specs for '...' ranges in Array#[] and Array#slice
|
opal_opal
|
train
|
ec95ca98a53b8e347d82c133bb843063ea4883fb
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -111,7 +111,7 @@ hexo.extend.filter.register('before_generate', function() {
return
}
return vow.all(_.map(options.profiles, function(profile, profileName) {
- post[profileName + '_cover'] = url.resolve(
+ post[profileName + 'Cover'] = url.resolve(
path.dirname(post.cover),
profileName + '-' + path.basename(post.cover)
)
|
changed frontmatter key to camelcase
|
leviwheatcroft_hexo-thumbnails
|
train
|
bc9673efe03a42587fd6ff0c14b5f0d3343659f4
|
diff --git a/src/Models/BaseServiceConfigModel.php b/src/Models/BaseServiceConfigModel.php
index <HASH>..<HASH> 100644
--- a/src/Models/BaseServiceConfigModel.php
+++ b/src/Models/BaseServiceConfigModel.php
@@ -105,6 +105,10 @@ abstract class BaseServiceConfigModel extends BaseModel implements ServiceConfig
if ($schema) {
$out = [];
foreach ($schema->columns as $name => $column) {
+ // Skip if column is hidden
+ if (in_array($name, $model->getHidden())) {
+ continue;
+ }
/** @var ColumnSchema $column */
if (('service_id' === $name) || $column->autoIncrement) {
continue;
@@ -144,7 +148,7 @@ abstract class BaseServiceConfigModel extends BaseModel implements ServiceConfig
} else {
$newRules = [];
foreach ($config as $key => $value) {
- if(array_key_exists($key, $rules)){
+ if (array_key_exists($key, $rules)) {
$newRules[$key] = $rules[$key];
}
}
|
hiding hidden column/fields on service config models schema
|
dreamfactorysoftware_df-core
|
train
|
2196c92c0ef4e3d3622d5a8bb549e5faed135382
|
diff --git a/modules/caddyhttp/reverseproxy/upstreams.go b/modules/caddyhttp/reverseproxy/upstreams.go
index <HASH>..<HASH> 100644
--- a/modules/caddyhttp/reverseproxy/upstreams.go
+++ b/modules/caddyhttp/reverseproxy/upstreams.go
@@ -186,7 +186,6 @@ func (su SRVUpstreams) expandedAddr(r *http.Request) (addr, service, proto, name
name = repl.ReplaceAll(su.Name, "")
if su.Service == "" && su.Proto == "" {
addr = name
- name = ""
return
}
service = repl.ReplaceAll(su.Service, "")
|
reverseproxy: Don't clear name in SRV upstreams
Fix for dc4d<I>f<I>d<I>b<I>e7d4
|
mholt_caddy
|
train
|
ed4e7e888e04f6f7ea5078a91e51966da5077150
|
diff --git a/src/Command/Site/ModeCommand.php b/src/Command/Site/ModeCommand.php
index <HASH>..<HASH> 100644
--- a/src/Command/Site/ModeCommand.php
+++ b/src/Command/Site/ModeCommand.php
@@ -107,6 +107,7 @@ class ModeCommand extends Command
}
$servicesOverrideResult = $this->overrideServices(
+ $environment,
$loadedConfigurations['services'],
$io
);
@@ -172,7 +173,7 @@ class ModeCommand extends Command
return $result;
}
- protected function overrideServices($servicesSettings, DrupalStyle $io)
+ protected function overrideServices($environment, $servicesSettings, DrupalStyle $io)
{
$directory = sprintf(
'%s/%s',
@@ -198,19 +199,32 @@ class ModeCommand extends Command
}
$yaml = new Yaml();
+
$services = $yaml->parse(file_get_contents($settingsServicesFile));
$result = [];
foreach ($servicesSettings as $service => $parameters) {
- foreach ($parameters as $parameter => $value) {
- $services['parameters'][$service][$parameter] = $value;
+ if(is_array($parameters)) {
+ foreach ($parameters as $parameter => $value) {
+ print 'parameters: ' . $parameter . "\n";
+ $services['parameters'][$service][$parameter] = $value;
+ // Set values for output
+ $result[$parameter]['service'] = $service;
+ $result[$parameter]['parameter'] = $parameter;
+ if (is_bool($value)) {
+ $value = $value ? 'true' : 'false';
+ }
+ $result[$parameter]['value'] = $value;
+ }
+ } else {
+ $services['parameters'][$service] = $parameters;
// Set values for output
- $result[$parameter]['service'] = $service;
- $result[$parameter]['parameter'] = $parameter;
- if (is_bool($value)) {
- $value = $value? 'true' : 'false';
+ $result[$service]['service'] = $service;
+ $result[$service]['parameter'] = '';
+ if (is_bool($parameters)) {
+ $value = $parameters ? 'true' : 'false';
}
- $result[$parameter]['value'] = $value;
+ $result[$service]['value'] = $value;
}
}
@@ -247,19 +261,23 @@ class ModeCommand extends Command
if (!file_exists($configFile)) {
$configFile = sprintf(
'%s/config/dist/site.mode.yml',
- $this->appRoot
+ $this->configurationManager->getApplicationDirectory() . DRUPAL_CONSOLE_CORE
);
}
- $siteModeConfiguration = Yaml::dump(file_get_contents($configFile));
+ $siteModeConfiguration = Yaml::parse(file_get_contents($configFile));
$configKeys = array_keys($siteModeConfiguration);
$configurationSettings = [];
foreach ($configKeys as $configKey) {
$siteModeConfigurationItem = $siteModeConfiguration[$configKey];
foreach ($siteModeConfigurationItem as $setting => $parameters) {
- foreach ($parameters as $parameter => $value) {
- $configurationSettings[$configKey][$setting][$parameter] = $value[$env];
+ if(array_key_exists($env, $parameters)) {
+ $configurationSettings[$configKey][$setting] = $parameters[$env];
+ } else {
+ foreach ($parameters as $parameter => $value) {
+ $configurationSettings[$configKey][$setting][$parameter] = $value[$env];
+ }
}
}
}
|
Include service option for http.response.debug_cacheability_headers (#<I>)
|
hechoendrupal_drupal-console
|
train
|
016eded1dea75f32cd51831dafeb6fef89994fe0
|
diff --git a/rb/lib/selenium/webdriver/common/log_entry.rb b/rb/lib/selenium/webdriver/common/log_entry.rb
index <HASH>..<HASH> 100644
--- a/rb/lib/selenium/webdriver/common/log_entry.rb
+++ b/rb/lib/selenium/webdriver/common/log_entry.rb
@@ -30,14 +30,14 @@ module Selenium
def as_json(*)
{
- 'level' => level,
'timestamp' => timestamp,
+ 'level' => level,
'message' => message
}
end
def to_s
- "#{level} #{time}: #{message}"
+ "#{time} #{level}: #{message}"
end
def time
|
[rb]: Standardise driver logging output (#<I>)
Ensure all log formats (Including browser ones), are in a consistent format
|
SeleniumHQ_selenium
|
train
|
9547b6c8432e2fa0917978a9de52227819295787
|
diff --git a/hazelcast/src/test/java/com/hazelcast/ringbuffer/impl/RingbufferAddAllReadManyStressTest.java b/hazelcast/src/test/java/com/hazelcast/ringbuffer/impl/RingbufferAddAllReadManyStressTest.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/test/java/com/hazelcast/ringbuffer/impl/RingbufferAddAllReadManyStressTest.java
+++ b/hazelcast/src/test/java/com/hazelcast/ringbuffer/impl/RingbufferAddAllReadManyStressTest.java
@@ -25,6 +25,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.ringbuffer.OverflowPolicy.FAIL;
import static java.lang.Math.max;
+import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertEquals;
@RunWith(HazelcastSerialClassRunner.class)
@@ -143,7 +144,7 @@ public class RingbufferAddAllReadManyStressTest extends HazelcastTestSupport {
produced++;
long currentTimeMs = System.currentTimeMillis();
- if (lastLogMs + TimeUnit.SECONDS.toMillis(5000) < currentTimeMs) {
+ if (lastLogMs + SECONDS.toMillis(5) < currentTimeMs) {
lastLogMs = currentTimeMs;
logger.info(getName() + " at " + produced);
}
@@ -200,7 +201,7 @@ public class RingbufferAddAllReadManyStressTest extends HazelcastTestSupport {
seq++;
long currentTimeMs = System.currentTimeMillis();
- if (lastLogMs + TimeUnit.SECONDS.toMillis(5000) < currentTimeMs) {
+ if (lastLogMs + SECONDS.toMillis(5) < currentTimeMs) {
lastLogMs = currentTimeMs;
logger.info(getName() + " at " + seq);
}
|
RingbufferAddAlLReadManyStressTest logging fix
|
hazelcast_hazelcast
|
train
|
5f43e2a22b4497614e5f19e2912f32758d3cb298
|
diff --git a/kernel/content/view.php b/kernel/content/view.php
index <HASH>..<HASH> 100644
--- a/kernel/content/view.php
+++ b/kernel/content/view.php
@@ -199,8 +199,14 @@ else
$node =& eZContentObjectTreeNode::fetch( $NodeID );
}
+ if ( !is_object( $node ) )
+ return $Module->handleError( EZ_ERROR_KERNEL_NOT_AVAILABLE, 'kernel' );
+
$object = $node->attribute( 'object' );
+ if ( !is_object( $object ) )
+ return $Module->handleError( EZ_ERROR_KERNEL_NOT_AVAILABLE, 'kernel' );
+
if ( $Params['Language'] != '' )
{
$object->setCurrentLanguage( $Params['Language'] );
|
- Made sure the fetch node and object is checked before being used,
if they do not exist in the database the view returns with
EZ_ERROR_KERNEL_NOT_AVAILABLE.
git-svn-id: file:///home/patrick.allaert/svn-git/ezp-repo/ezpublish/trunk@<I> a<I>eee8c-daba-<I>-acae-fa<I>f<I>
|
ezsystems_ezpublish-legacy
|
train
|
29df95adfc3f9709720bf639924c77f7a646309b
|
diff --git a/helpers/translation/TranslationBundle.php b/helpers/translation/TranslationBundle.php
index <HASH>..<HASH> 100644
--- a/helpers/translation/TranslationBundle.php
+++ b/helpers/translation/TranslationBundle.php
@@ -110,7 +110,10 @@ class TranslationBundle {
'translations' => $translations
);
if(is_dir($directory)){
- $file = $directory. '/' . $this->langCode . '.json';
+ if(!is_dir($directory. '/' . $this->langCode)){
+ mkdir($directory. '/' . $this->langCode);
+ }
+ $file = $directory. '/' . $this->langCode . '/messages.json';
if(file_put_contents($file, json_encode($content))){
return $file;
}
diff --git a/views/js/i18n.js b/views/js/i18n.js
index <HASH>..<HASH> 100644
--- a/views/js/i18n.js
+++ b/views/js/i18n.js
@@ -1,4 +1,4 @@
-define(['lodash', 'json!i18n_tr', 'context', 'core/format'], function(_, i18nTr, context, format){
+define(['lodash', 'json!i18ntr/messages.json', 'context', 'core/format'], function(_, i18nTr, context, format){
'use strict';
var translations = i18nTr.translations;
|
change the path of client translation bundles
|
oat-sa_tao-core
|
train
|
79e60c96e51c57a50a586857e799be76753a1a96
|
diff --git a/lib/ungulate/job.rb b/lib/ungulate/job.rb
index <HASH>..<HASH> 100644
--- a/lib/ungulate/job.rb
+++ b/lib/ungulate/job.rb
@@ -6,7 +6,7 @@ require 'yaml'
module Ungulate
class Job
- attr_accessor :bucket, :key, :queue, :versions
+ attr_accessor :bucket, :key, :notification_url, :queue, :versions
def self.s3
@s3 ||=
@@ -37,6 +37,7 @@ module Ungulate
def attributes=(options)
self.bucket = Job.s3.bucket(options[:bucket])
self.key = options[:key]
+ self.notification_url = options[:notification_url]
self.versions = options[:versions]
end
@@ -79,6 +80,19 @@ module Ungulate
)
image.destroy!
end
+ send_notification
+ end
+
+ def send_notification
+ return false if notification_url.blank?
+
+ @logger.info "Sending notification to #{notification_url}"
+
+ url = URI.parse(notification_url)
+
+ Net::HTTP.start(url.host) do |http|
+ http.put(url.path, nil)
+ end
end
def version_key(version)
diff --git a/spec/ungulate/job_spec.rb b/spec/ungulate/job_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/ungulate/job_spec.rb
+++ b/spec/ungulate/job_spec.rb
@@ -99,7 +99,8 @@ module Ungulate
job.attributes = {
:bucket => 'hello',
:key => 'path/to/filename.gif',
- :versions => @versions
+ :versions => @versions,
+ :notification_url => 'http://some.host/with/simple/path',
}
job
end
@@ -107,6 +108,7 @@ module Ungulate
its(:bucket) { should == @bucket }
its(:key) { should == 'path/to/filename.gif' }
its(:versions) { should == @versions }
+ its(:notification_url) { should == 'http://some.host/with/simple/path' }
end
describe :source do
@@ -175,6 +177,7 @@ module Ungulate
job.stub(:bucket).and_return(@bucket)
job.stub(:version_key).with(:big).and_return('path/to/someimage_big.jpg')
job.stub(:version_key).with(:little).and_return('path/to/someimage_little.jpg')
+ job.stub(:send_notification)
job
end
@@ -203,6 +206,10 @@ module Ungulate
expected_headers)
end
+ it "should notify" do
+ subject.should_receive(:send_notification)
+ end
+
context "empty array" do
before do
subject.stub(:processed_versions).and_return([])
@@ -213,6 +220,32 @@ module Ungulate
end
end
+ describe :send_notification do
+ after { subject.send_notification }
+
+ context "notification URL provided" do
+ before do
+ subject.stub(:notification_url).and_return('http://some.host/processing_images/some/path')
+ end
+
+ it "should PUT to the URL" do
+ http = mock('Net::HTTP')
+ Net::HTTP.stub(:start).with('some.host').and_yield(http)
+ http.should_receive(:put).with('/processing_images/some/path', nil)
+ end
+ end
+
+ context "notification URL not provided" do
+ before do
+ subject.stub(:notification_url).and_return(nil)
+ end
+
+ it "should not PUT" do
+ Net::HTTP.should_not_receive(:put)
+ end
+ end
+ end
+
describe :version_key do
subject do
job = Job.new
|
support sending of PUT notification to a particular URL at end of job
|
camelpunch_ungulate
|
train
|
ad81212ec3194d767913baf7a0b80b6a9720ba3a
|
diff --git a/admin/user.php b/admin/user.php
index <HASH>..<HASH> 100644
--- a/admin/user.php
+++ b/admin/user.php
@@ -156,21 +156,8 @@
if ($users = get_records_sql("SELECT id, username, email, firstname, lastname, city, country, lastaccess from user WHERE username <> 'guest'
AND deleted <> '1' ORDER BY $sort $dir LIMIT $page,$recordsperpage")) {
- foreach ($users as $key => $user) {
- $users[$key]->country = $COUNTRIES[$user->country];
- }
- if ($sort == "country") { // Need to resort by full country name, not code
- foreach ($users as $user) {
- $susers[$user->id] = $user->country;
- }
- asort($susers);
- foreach ($susers as $key => $value) {
- $nusers[] = $users[$key];
- }
- $users = $nusers;
- }
-
print_heading("$usercount ".get_string("users"));
+
$a->start = $page;
$a->end = $page + $recordsperpage;
if ($a->end > $usercount) {
@@ -206,6 +193,22 @@
}
echo "</TD></TR></TABLE>";
+ flush();
+
+ foreach ($users as $key => $user) {
+ $users[$key]->country = $COUNTRIES[$user->country];
+ }
+ if ($sort == "country") { // Need to resort by full country name, not code
+ foreach ($users as $user) {
+ $susers[$user->id] = $user->country;
+ }
+ asort($susers);
+ foreach ($susers as $key => $value) {
+ $nusers[] = $users[$key];
+ }
+ $users = $nusers;
+ }
+
$table->head = array ($name, $email, $city, $country, $lastaccess, "", "");
$table->align = array ("LEFT", "LEFT", "LEFT", "LEFT", "LEFT", "CENTER", "CENTER");
$table->width = "95%";
|
Rearranged code so some output is printed before tackling the big table
|
moodle_moodle
|
train
|
dd8767f5b08ac64fb0afae3dff669abd9e0360a5
|
diff --git a/validator/setup.py b/validator/setup.py
index <HASH>..<HASH> 100644
--- a/validator/setup.py
+++ b/validator/setup.py
@@ -83,33 +83,19 @@ else:
library_dirs = []
if os.name == 'nt':
- conf_dir = "C:\\Program Files (x86)\\Intel\\sawtooth-validator\\conf"
- log_dir = "C:\\Program Files (x86)\\Intel\\sawtooth-validator\\logs"
- data_dir = "C:\\Program Files (x86)\\Intel\\sawtooth-validator\\data"
- run_dir = "C:\\Program Files (x86)\\Intel\\sawtooth-validator\\run"
- static_content_dir = "Lib\\site-packages\\txnserver\\static_content"
+ conf_dir = "C:\\Program Files (x86)\\Intel\\sawtooth\\conf"
+ log_dir = "C:\\Program Files (x86)\\Intel\\sawtooth\\logs"
+ data_dir = "C:\\Program Files (x86)\\Intel\\sawtooth\\data"
else:
- conf_dir = "/etc/sawtooth-validator"
- log_dir = "/var/log/sawtooth-validator"
- data_dir = "/var/lib/sawtooth-validator"
- run_dir = "/var/run/sawtooth-validator"
- static_content_dir = "lib/python2.7/dist-packages/txnserver/static_content"
-
-
-# collect the static files to put in the installation dir
-static_content_files = []
-for root, dirs, files in os.walk("txnserver/static_content"):
- for file in files:
- static_content_files.append(os.path.join(root, file))
-
+ conf_dir = "/etc/sawtooth"
+ log_dir = "/var/log/sawtooth"
+ data_dir = "/var/lib/sawtooth"
data_files = [
(conf_dir, ["etc/txnvalidator-logging.yaml.example"]),
(os.path.join(conf_dir, "keys"), []),
(log_dir, []),
- (data_dir, []),
- (run_dir, []),
- (static_content_dir, static_content_files),
+ (data_dir, [])
]
if os.path.exists("/etc/default"):
|
Update sawtooth default paths in setup.py
This changes from "sawtooth-validator" directories to "sawtooth"
directories (example: /etc/sawtooth-validator -> /etc/sawtooth)
to match the new default locations.
Removes run_dir since it is not currently used by the validator.
Removes static files which no longer exist.
|
hyperledger_sawtooth-core
|
train
|
c653b912d86e50b96bb06b733aee9e1b60ec31e7
|
diff --git a/doc/source/cookbook/environment.rst b/doc/source/cookbook/environment.rst
index <HASH>..<HASH> 100644
--- a/doc/source/cookbook/environment.rst
+++ b/doc/source/cookbook/environment.rst
@@ -31,8 +31,8 @@ Note in case you use the environment there is no need to call
:func:`~pypet.trajectory.Trajectory.f_store`
for data storage, this will always be called at the end of the simulation and at the end of a
single run automatically (unless you set `automatic_storing` to `False`).
-Yet, be aware that if add any custom data during a single run not under
-`results.runs.run_XXXXXXXX` or `derived_parameters.runs.run_XXXXXXXXX` this data will not
+Yet, be aware that if add any custom data during a single run not under a group named
+`run_XXXXXXXX` this data will not
be immediately saved after the run completion. In case of multiprocessing this data will be
lost if not manually stored.
diff --git a/pypet/environment.py b/pypet/environment.py
index <HASH>..<HASH> 100644
--- a/pypet/environment.py
+++ b/pypet/environment.py
@@ -24,7 +24,7 @@ import traceback
import hashlib
import time
import datetime
-import copy as cp
+import pickle
try:
from sumatra.projects import load_project
@@ -952,12 +952,8 @@ class Environment(HasLogger):
self._clean_up_runs = clean_up_runs
self._deep_copy_arguments = False # For future reference deep_copy_arguments
-
-
if self._do_single_runs:
-
-
config_name='environment.%s.multiproc' % self.v_name
self._traj.f_add_config(config_name, self._multiproc,
comment= 'Whether or not to use multiprocessing.').f_lock()
@@ -1176,9 +1172,6 @@ class Environment(HasLogger):
for handler in root.handlers:
handler.setFormatter(f)
-
-
-
@deprecated('Please use assignment in environment constructor.')
def f_switch_off_large_overview(self):
""" Switches off the tables consuming the most memory.
@@ -2094,6 +2087,14 @@ class Environment(HasLogger):
(self._traj.v_name, self._ncores))
else:
+ if self._deep_copy_arguments: # For future reference not supported atm
+ deep_copied_data = []
+ deep_copied_data.append(self._runfunc,
+ self._traj, self._args, self._kwargs)
+ if dill is not None:
+ deep_copy_dump = dill.dumps(deep_copied_data)
+ else:
+ deep_copy_dump = pickle.dumps(deep_copied_data)
# Single Processing
self._logger.info('\n************************************************************\n'
'************************************************************\n'
@@ -2106,15 +2107,21 @@ class Environment(HasLogger):
for n in xrange(start_run_idx, len(self._traj)):
if not self._traj.f_is_completed(n):
if self._deep_copy_arguments: # This is so far not supported!!!! For future reference
- result = _single_run((cp.deepcopy(self._traj._make_single_run(n)),
+
+ if dill is not None:
+ deep_copied_data = dill.loads(deep_copy_dump)
+ else:
+ deep_copied_data = pickle.loads(deep_copy_dump)
+
+ result = _single_run((deep_copied_data[1]._make_single_run(n),
self._log_path,
self._log_stdout,
- None, self._runfunc,
+ None, deep_copied_data[0],
len(self._traj),
self._multiproc,
None,
- cp.deepcopy(self._args),
- cp.deepcopy(self._kwargs),
+ deep_copied_data[2],
+ deep_copied_data[3],
self._clean_up_runs,
self._continue_path,
self._automatic_storing))
diff --git a/pypet/naturalnaming.py b/pypet/naturalnaming.py
index <HASH>..<HASH> 100644
--- a/pypet/naturalnaming.py
+++ b/pypet/naturalnaming.py
@@ -853,7 +853,7 @@ class NaturalNamingInterface(HasLogger):
if len(index) < pypetconstants.FORMAT_ZEROS:
return pypetconstants.FORMATTED_RUN_NAME % int(index)
- elif name in SHORTCUT_SET:
+ if name in SHORTCUT_SET:
if name == 'crun':
if self._root_instance._as_run is not None:
return '$'
|
Removed small bug I introduced in former commit
|
SmokinCaterpillar_pypet
|
train
|
8c3e02624942430171ce579ec6126de421ec3cbe
|
diff --git a/packages/core/utils/src/openInBrowser.js b/packages/core/utils/src/openInBrowser.js
index <HASH>..<HASH> 100644
--- a/packages/core/utils/src/openInBrowser.js
+++ b/packages/core/utils/src/openInBrowser.js
@@ -49,7 +49,9 @@ function getAppName(appName: string): string {
export default async function openInBrowser(url: string, browser: string) {
try {
const options =
- typeof browser === 'string' ? {app: [getAppName(browser)]} : undefined;
+ typeof browser === 'string' && browser.length > 0
+ ? {app: [getAppName(browser)]}
+ : undefined;
await open(url, options);
} catch (err) {
|
empty string for --open cli param is treated as undefined (#<I>)
|
parcel-bundler_parcel
|
train
|
7a844d34983496c4ad02e6b7d95fedb54fb9da17
|
diff --git a/tests/phpunit/unit/Twig/TwigExtensionTest.php b/tests/phpunit/unit/Twig/TwigExtensionTest.php
index <HASH>..<HASH> 100644
--- a/tests/phpunit/unit/Twig/TwigExtensionTest.php
+++ b/tests/phpunit/unit/Twig/TwigExtensionTest.php
@@ -3,6 +3,8 @@ namespace Bolt\Tests\Twig;
use Bolt\EventListener\ConfigListener;
use Bolt\Tests\BoltUnitTest;
+use Bolt\Twig\SetcontentTokenParser;
+use Bolt\Twig\SwitchTokenParser;
use Bolt\Twig\TwigExtension;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpKernel\Event\GetResponseEvent;
@@ -18,9 +20,7 @@ class TwigExtensionTest extends BoltUnitTest
{
public function testTwigInterface()
{
- $app = $this->getApp();
- $handlers = $this->getTwigHandlers($app);
- $twig = new TwigExtension($app, $handlers, false);
+ $twig = new TwigExtension(false);
$this->assertGreaterThan(0, $twig->getFunctions());
$this->assertGreaterThan(0, $twig->getFilters());
$this->assertGreaterThan(0, $twig->getTests());
@@ -59,8 +59,7 @@ class TwigExtensionTest extends BoltUnitTest
$request = Request::createFromGlobals();
$app['request'] = $request;
$app['request_stack']->push($request);
- $handlers = $this->getTwigHandlers($app);
- $twig = new TwigExtension($app, $handlers, true);
+ $twig = new TwigExtension(true);
$result = $twig->getGlobals();
$this->assertArrayHasKey('config', $result);
@@ -94,11 +93,10 @@ class TwigExtensionTest extends BoltUnitTest
public function testGetTokenParsers()
{
- $app = $this->getApp();
- $handlers = $this->getTwigHandlers($app);
- $twig = new TwigExtension($app, $handlers, false);
+ $twig = new TwigExtension(false);
$result = $twig->getTokenParsers();
- $this->assertInstanceOf('Bolt\Twig\SetcontentTokenParser', $result[0]);
+ $this->assertInstanceOf(SetcontentTokenParser::class, $result[0]);
+ $this->assertInstanceOf(SwitchTokenParser::class, $result[1]);
}
}
|
[Tests] Update TwigExtensionTest as TwigExtension no longer requires handlers to load
|
bolt_bolt
|
train
|
ed5667a57891a1b9bf0bd01bafeddf38952d149e
|
diff --git a/src/Plugin.php b/src/Plugin.php
index <HASH>..<HASH> 100644
--- a/src/Plugin.php
+++ b/src/Plugin.php
@@ -35,6 +35,7 @@ class Plugin implements PluginInterface, EventSubscriberInterface
const MESSAGE_NOTHING_TO_INSTALL = 'Nothing to install or update';
const MESSAGE_NOT_INSTALLED = 'PHPCodeSniffer is not installed';
+ const PACKAGE_NAME = 'squizlabs/php_codesniffer';
const PACKAGE_TYPE = 'phpcodesniffer-standard';
const PHPCS_CONFIG_KEY = 'installed_paths';
@@ -288,17 +289,19 @@ class Plugin implements PluginInterface, EventSubscriberInterface
/**
* Simple check if PHP_CodeSniffer is installed.
*
- * @return bool PHP_CodeSniffer is installed
+ * @return bool Whether PHP_CodeSniffer is installed
*/
private function isPHPCodeSnifferInstalled()
{
- // Check if PHP_CodeSniffer is actually installed
- return (count(
- $this
- ->composer
- ->getRepositoryManager()
- ->getLocalRepository()
- ->findPackages('squizlabs/php_codesniffer')
- ) !== 0);
+ $packages = $this
+ ->composer
+ ->getRepositoryManager()
+ ->getLocalRepository()
+ ->findPackages(self::PACKAGE_NAME)
+ ;
+
+ $packageCount = count($packages);
+
+ return ($packageCount !== 0);
}
}
|
Improves readability of Plugin::isPHPCodeSnifferInstalled() method.
|
Dealerdirect_phpcodesniffer-composer-installer
|
train
|
f7d9dfafd0c486b31957e0eefa0bf792bfba9b10
|
diff --git a/provider/kubernetes/kubernetes.go b/provider/kubernetes/kubernetes.go
index <HASH>..<HASH> 100644
--- a/provider/kubernetes/kubernetes.go
+++ b/provider/kubernetes/kubernetes.go
@@ -27,9 +27,6 @@ var _ provider.Provider = (*Provider)(nil)
const (
annotationFrontendRuleType = "traefik.frontend.rule.type"
- ruleTypePathPrefixStrip = "PathPrefixStrip"
- ruleTypePathStrip = "PathStrip"
- ruleTypePath = "Path"
ruleTypePathPrefix = "PathPrefix"
)
@@ -199,12 +196,8 @@ func (p *Provider) loadIngresses(k8sClient Client) (*types.Configuration, error)
}
if len(pa.Path) > 0 {
- ruleType, unknown := getRuleTypeFromAnnotation(i.Annotations)
- switch {
- case unknown:
- log.Warnf("Unknown RuleType '%s' for Ingress %s/%s, falling back to PathPrefix", ruleType, i.ObjectMeta.Namespace, i.ObjectMeta.Name)
- fallthrough
- case ruleType == "":
+ ruleType := i.Annotations[annotationFrontendRuleType]
+ if ruleType == "" {
ruleType = ruleTypePathPrefix
}
@@ -392,24 +385,3 @@ func (p *Provider) loadConfig(templateObjects types.Configuration) *types.Config
}
return configuration
}
-
-func getRuleTypeFromAnnotation(annotations map[string]string) (ruleType string, unknown bool) {
- ruleType = annotations[annotationFrontendRuleType]
- for _, knownRuleType := range []string{
- ruleTypePathPrefixStrip,
- ruleTypePathStrip,
- ruleTypePath,
- ruleTypePathPrefix,
- } {
- if strings.ToLower(ruleType) == strings.ToLower(knownRuleType) {
- return knownRuleType, false
- }
- }
-
- if ruleType != "" {
- // Annotation is set but does not match anything we know.
- unknown = true
- }
-
- return ruleType, unknown
-}
diff --git a/provider/kubernetes/kubernetes_test.go b/provider/kubernetes/kubernetes_test.go
index <HASH>..<HASH> 100644
--- a/provider/kubernetes/kubernetes_test.go
+++ b/provider/kubernetes/kubernetes_test.go
@@ -5,7 +5,6 @@ import (
"errors"
"fmt"
"reflect"
- "strings"
"testing"
"github.com/containous/traefik/types"
@@ -333,19 +332,14 @@ func TestRuleType(t *testing.T) {
frontendRuleType string
}{
{
- desc: "implicit default",
+ desc: "rule type annotation missing",
ingressRuleType: "",
frontendRuleType: ruleTypePathPrefix,
},
{
- desc: "unknown ingress / explicit default",
- ingressRuleType: "unknown",
- frontendRuleType: ruleTypePathPrefix,
- },
- {
- desc: "explicit ingress",
- ingressRuleType: ruleTypePath,
- frontendRuleType: ruleTypePath,
+ desc: "rule type annotation set",
+ ingressRuleType: "Path",
+ frontendRuleType: "Path",
},
}
@@ -1773,65 +1767,6 @@ func TestInvalidPassHostHeaderValue(t *testing.T) {
}
}
-func TestGetRuleTypeFromAnnotation(t *testing.T) {
- tests := []struct {
- in string
- wantedUnknown bool
- }{
- {
- in: ruleTypePathPrefixStrip,
- wantedUnknown: false,
- },
- {
- in: ruleTypePathStrip,
- wantedUnknown: false,
- },
- {
- in: ruleTypePath,
- wantedUnknown: false,
- },
- {
- in: ruleTypePathPrefix,
- wantedUnknown: false,
- },
- {
- wantedUnknown: false,
- },
- {
- in: "Unknown",
- wantedUnknown: true,
- },
- }
-
- for _, test := range tests {
- test := test
- inputs := []string{test.in, strings.ToLower(test.in)}
- if inputs[0] == inputs[1] {
- // Lower-casing makes no difference -- truncating to single case.
- inputs = inputs[:1]
- }
- for _, input := range inputs {
- t.Run(fmt.Sprintf("in='%s'", input), func(t *testing.T) {
- t.Parallel()
- annotations := map[string]string{}
- if test.in != "" {
- annotations[annotationFrontendRuleType] = test.in
- }
-
- gotRuleType, gotUnknown := getRuleTypeFromAnnotation(annotations)
-
- if gotUnknown != test.wantedUnknown {
- t.Errorf("got unknown '%t', wanted '%t'", gotUnknown, test.wantedUnknown)
- }
-
- if gotRuleType != test.in {
- t.Errorf("got rule type '%s', wanted '%s'", gotRuleType, test.in)
- }
- })
- }
- }
-}
-
func TestKubeAPIErrors(t *testing.T) {
ingresses := []*v1beta1.Ingress{{
ObjectMeta: v1.ObjectMeta{
|
[k8s] Remove rule type path list.
Instead of doing sanity checks in the Kubernetes provider, we just
accept any non-empty value from the annotation and rely on the server
part to filter out unknown rules.
This allows us to automatically stay in sync with the currently
supported Path matchers/modifiers.
|
containous_traefik
|
train
|
83232201f9304af54303289db0c7846da46ab19e
|
diff --git a/lib/pusher.rb b/lib/pusher.rb
index <HASH>..<HASH> 100644
--- a/lib/pusher.rb
+++ b/lib/pusher.rb
@@ -30,7 +30,7 @@ module Pusher
def_delegators :default_client, :authentication_token, :url
def_delegators :default_client, :encrypted=, :url=
- def_delegators :default_client, :channels, :presence_channels, :trigger
+ def_delegators :default_client, :channels, :trigger
attr_writer :logger
diff --git a/lib/pusher/client.rb b/lib/pusher/client.rb
index <HASH>..<HASH> 100644
--- a/lib/pusher/client.rb
+++ b/lib/pusher/client.rb
@@ -76,35 +76,18 @@ module Pusher
#
# GET /apps/[id]/channels
#
- # @return [Hash] See Pusher api docs
+ # @param options [Hash] Hash of options for the API - see Pusher API docs
+ # @return [Hash] See Pusher API docs
# @raise [Pusher::Error] on invalid Pusher response - see the error message for more details
# @raise [Pusher::HTTPError] on any error raised inside Net::HTTP - the original error is available in the original_error attribute
#
- def channels
+ def channels(options = {})
@_channels_url ||= begin
uri = url.dup
uri.path = uri.path + '/channels'
uri
end
- request = Pusher::Request.new(:get, @_channels_url, {}, nil, nil, self)
- return request.send_sync
- end
-
- # Request presence channels from the API
- #
- # GET /apps/[id]/channels/presence
- #
- # @return [Hash] See Pusher api docs
- # @raise [Pusher::Error] on invalid Pusher response - see the error message for more details
- # @raise [Pusher::HTTPError] on any error raised inside Net::HTTP - the original error is available in the original_error attribute
- #
- def presence_channels
- @_pc_url ||= begin
- uri = url.dup
- uri.path = uri.path + '/channels/presence'
- uri
- end
- request = Pusher::Request.new(:get, @_pc_url, {}, nil, nil, self)
+ request = Pusher::Request.new(:get, @_channels_url, options, nil, nil, self)
return request.send_sync
end
|
Add support for sending options to /channels API
* Removed the presence API: it has been replaced by extending /channels
|
pusher_pusher-http-ruby
|
train
|
fa8e1c9b21f1766bc74114e6cf579fe585702934
|
diff --git a/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/parser/DataSource.java b/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/parser/DataSource.java
index <HASH>..<HASH> 100644
--- a/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/parser/DataSource.java
+++ b/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/parser/DataSource.java
@@ -131,32 +131,29 @@ public class DataSource {
@Override
public String toString() {
- StringBuilder sb = new StringBuilder("DataSource{\n");
+ StringBuilder sb = new StringBuilder("DataSource{");
if (tableAlias != null) {
- sb.append(String.format("\ttableAlias='%s',\n", String.valueOf(tableAlias)));
+ sb.append(String.format("tableAlias='%s',", String.valueOf(tableAlias)));
}
if (leftDataSource != null) {
- sb.append(String.format("\tleftDataSource=%s,\n", String.valueOf(leftDataSource).replace("\n", "\n\t")));
+ sb.append(String.format("leftDataSource=%s,", String.valueOf(leftDataSource)));
}
if (isNaturalJoin) {
- sb.append("\tnaturalJoin=true,\n");
+ sb.append("naturalJoin=true,");
}
if (joinType != null) {
- sb.append(String.format("\tjoinType=%s,\n", String.valueOf(joinType)));
+ sb.append(String.format("joinType=%s,", String.valueOf(joinType)));
}
if (condition != null) {
- sb.append(String.format("\tcondition=%s,\n", String.valueOf(condition)));
+ sb.append(String.format("condition=%s,", String.valueOf(condition)));
}
if (!columns.isEmpty()) {
- sb.append(String.format("\tcolumns=%s,\n", columns));
+ sb.append(String.format("columns=%s,", columns));
}
if (rightDatasource != null) {
- sb.append(String.format(
- "\trightDatasource=%s,\n",
- String.valueOf(rightDatasource).replace("\n", "\n\t")
- ));
+ sb.append(String.format("rightDatasource=%s,", String.valueOf(rightDatasource)));
}
- sb.append(String.format("\ttype=%s\n", String.valueOf(type)));
+ sb.append(String.format("type=%s", String.valueOf(type)));
sb.append("}");
return sb.toString();
}
diff --git a/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/task/DataSourceTask.java b/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/task/DataSourceTask.java
index <HASH>..<HASH> 100644
--- a/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/task/DataSourceTask.java
+++ b/perspective-sql/src/main/java/org/meridor/perspective/sql/impl/task/DataSourceTask.java
@@ -65,9 +65,9 @@ public class DataSourceTask implements Task {
@Override
public String toString() {
- return "DataSourceTask{\n" +
- String.format("\tdataSource=%s,\n", String.valueOf(dataSource).replace("\n", "\n\t")) +
- String.format("\ttableAliases=%s\n", tableAliases) +
+ return "DataSourceTask{" +
+ String.format("dataSource=%s,", String.valueOf(dataSource)) +
+ String.format("tableAliases=%s", tableAliases) +
"}";
}
}
|
Removed newlines and tables from DataSource pretty-printing
|
meridor_perspective-backend
|
train
|
bd95f5350e7b5d834b3fd74f251b6f0a71d92e39
|
diff --git a/src/jsonPointer.js b/src/jsonPointer.js
index <HASH>..<HASH> 100644
--- a/src/jsonPointer.js
+++ b/src/jsonPointer.js
@@ -32,7 +32,8 @@
/**
- * Returns |target| object's value pointed by |pointer|.
+ * Returns |target| object's value pointed by |pointer|, returns undefined
+ * if |pointer| points to non-existing value.
* If |pointer| is not provided returns curried function bound to |target|.
* @param {!string} target JSON document.
* @param {string=} pointer JSON Pointer string. Optional.
@@ -50,7 +51,7 @@
var token;
var value = target;
- while ( 'undefined' !== typeof (token = tokensList.pop()) ) {
+ while (!isUndefined(value) && !isUndefined(token = tokensList.pop())) {
value = getValue(value, token);
}
@@ -150,6 +151,11 @@
}
+ function isUndefined(v) {
+ return 'undefined' === typeof v;
+ }
+
+
// Expose API
var jsonPointer = {
|
Return undefined if value was not found
|
alexeykuzmin_jsonpointer.js
|
train
|
31a6087851abc2cae2e55f0502aef07acdb3c78d
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,8 +8,6 @@ except ImportError:
print 'TRYING: from distutils.core import setup'
from distutils.core import setup
from distutils.extension import Extension
-#
-import numpy
# If we're building from Git (no PKG-INFO), we use Cython. If we're
@@ -31,6 +29,14 @@ else:
source_ext = '.cpp'
+try:
+ import numpy
+except ImportError:
+ numpy_includes = []
+else:
+ numpy_includes = [numpy.get_include()]
+
+
# http://stackoverflow.com/a/18992595
ON_LINUX = 'linux' in sys.platform
if ON_LINUX:
@@ -83,7 +89,7 @@ os.chdir(this_dir)
# locations
pyx_src_dir = 'crosscat/cython_code'
cpp_src_dir = 'cpp_code/src'
-include_dirs = ['cpp_code/include/CrossCat', numpy.get_include()]
+include_dirs = ['cpp_code/include/CrossCat'] + numpy_includes
# specify sources
|
Allow pip to discover it needs numpy.
|
probcomp_crosscat
|
train
|
8db1918397c21bc88ddcd9b473fd657de47a5ebb
|
diff --git a/xmldog/src/main/java/jlibs/xml/sax/dog/XPathResults.java b/xmldog/src/main/java/jlibs/xml/sax/dog/XPathResults.java
index <HASH>..<HASH> 100644
--- a/xmldog/src/main/java/jlibs/xml/sax/dog/XPathResults.java
+++ b/xmldog/src/main/java/jlibs/xml/sax/dog/XPathResults.java
@@ -72,23 +72,22 @@ public class XPathResults extends EvaluationListener{
}
private void print(PrintStream out, Object result, int indent){
- if(result instanceof LongTreeMap){
- LongTreeMap treeMap = (LongTreeMap)result;
- for(LongTreeMap.Entry entry = treeMap.firstEntry(); entry!=null; entry=entry.next()){
- printIndent(out, indent);
- out.println("[");
- print(out, entry.value, indent+2);
- printIndent(out, indent);
- out.println("]");
- }
- }else if(result instanceof Collection){
+ if(result instanceof Collection){
int i = 0;
Collection c = (Collection)result;
String format = "%0"+String.valueOf(c.size()).length()+"d: %s%n";
for(Object item: c){
- printIndent(out, indent);
- out.printf(format, ++i, item);
+ if(item instanceof Collection){
+ printIndent(out, indent);
+ out.println("[");
+ print(out, item, indent+2);
+ printIndent(out, indent);
+ out.println("]");
+ }else{
+ printIndent(out, indent);
+ out.printf(format, ++i, item);
+ }
}
}else{
printIndent(out, indent);
diff --git a/xmldog/src/main/java/jlibs/xml/sax/dog/expr/nodset/PathExpression.java b/xmldog/src/main/java/jlibs/xml/sax/dog/expr/nodset/PathExpression.java
index <HASH>..<HASH> 100644
--- a/xmldog/src/main/java/jlibs/xml/sax/dog/expr/nodset/PathExpression.java
+++ b/xmldog/src/main/java/jlibs/xml/sax/dog/expr/nodset/PathExpression.java
@@ -250,9 +250,10 @@ final class PathEvaluation extends Evaluation<PathExpression> implements NodeSet
@SuppressWarnings({"unchecked"})
public Object computeResult(){
if(expression.forEach){
+ List<Object> result = new ArrayList<Object>(evaluations.size());
for(LongTreeMap.Entry entry = evaluations.firstEntry(); entry!=null; entry=entry.next())
- entry.value = computeResultItem(((EvaluationInfo)entry.value).result);
- return evaluations;
+ result.add(computeResultItem(((EvaluationInfo)entry.value).result));
+ return result;
}else{
LongTreeMap result = new LongTreeMap();
for(LongTreeMap.Entry<EvaluationInfo> entry = evaluations.firstEntry(); entry!=null; entry=entry.next())
|
use list of lists as pathexpression result
|
santhosh-tekuri_jlibs
|
train
|
3495be364f75aefccf3e7fb0b68e5b5060fa265e
|
diff --git a/HydraServer/python/HydraServer/lib/template.py b/HydraServer/python/HydraServer/lib/template.py
index <HASH>..<HASH> 100644
--- a/HydraServer/python/HydraServer/lib/template.py
+++ b/HydraServer/python/HydraServer/lib/template.py
@@ -794,17 +794,24 @@ def remove_type_from_resource( type_id, resource_type, resource_id,**kwargs):
ResourceType.node_id == node_id,
ResourceType.link_id == link_id,
ResourceType.group_id == group_id).one()
+
DBSession.delete(resourcetype)
def _parse_data_restriction(restriction_dict):
-# {{soap_server.hydra_complexmodels}LESSTHAN}
-
- if restriction_dict is None or restriction_dict == '':
+ log.critical(restriction_dict)
+ if restriction_dict is None or len(restriction_dict) == 0:
return None
+ #replace soap text with an empty string
+ #'{soap_server.hydra_complexmodels}' -> ''
dict_str = re.sub('{[a-zA-Z\.\_]*}', '', str(restriction_dict))
- new_dict = eval(dict_str)
+ new_dict = eval(str(restriction_dict))
+
+ #Evaluate whether the dict actually contains anything.
+ if not isinstance(new_dict, dict) or len(new_dict) == 0:
+ log.critical('A restriction was specified, but it is null')
+ return None
ret_dict = {}
for k, v in new_dict.items():
@@ -815,14 +822,14 @@ def _parse_data_restriction(restriction_dict):
return str(ret_dict)
-def add_template(template,**kwargs):
+def add_template(template, **kwargs):
"""
Add template and a type and typeattrs.
"""
tmpl = Template()
tmpl.template_name = template.name
if template.layout:
- tmpl.layout = str(template.layout)
+ tmpl.layout = str(template.layout)
DBSession.add(tmpl)
diff --git a/HydraServer/python/HydraServer/unittests/test_templates.py b/HydraServer/python/HydraServer/unittests/test_templates.py
index <HASH>..<HASH> 100644
--- a/HydraServer/python/HydraServer/unittests/test_templates.py
+++ b/HydraServer/python/HydraServer/unittests/test_templates.py
@@ -100,9 +100,8 @@ class TemplatesTest(server.SoapServerTest):
tattr_2 = self.client.factory.create('hyd:TypeAttr')
tattr_2.attr_id = node_attr_2.id
- tattr_1.description = "Type attribute 2 description"
+ tattr_2.description = "Type attribute 2 description"
tattr_2.data_restriction = {'INCREASING': None}
- tattrs.TypeAttr.append(tattr_2)
type1.typeattrs = tattrs
@@ -129,6 +128,9 @@ class TemplatesTest(server.SoapServerTest):
types.TemplateType.append(type2)
+ #**********************
+ #type 2 #
+ #**********************
type3 = self.client.factory.create('hyd:TemplateType')
type3.name = "Network Type"
type3.alias = "Network Type alias"
@@ -137,9 +139,10 @@ class TemplatesTest(server.SoapServerTest):
tattr_3 = self.client.factory.create('hyd:TypeAttr')
tattr_3.attr_id = net_attr_1.id
- tattrs.TypeAttr.append(tattr_1)
+ tattr_3.data_restriction = {}
+ tattrs.TypeAttr.append(tattr_3)
types.TemplateType.append(type3)
- # type3.typeattrs = tattrs
+ type3.typeattrs = tattrs
template.types = types
|
Check to make sure that data restriction is a dict and isn't empty.
|
hydraplatform_hydra-base
|
train
|
94f96f0758785fe0c31f2c3309f3b87c81411ab8
|
diff --git a/asciimatics/widgets.py b/asciimatics/widgets.py
index <HASH>..<HASH> 100644
--- a/asciimatics/widgets.py
+++ b/asciimatics/widgets.py
@@ -454,7 +454,8 @@ class Frame(Effect):
def _update(self, frame_no):
# TODO: Should really be in a separate Desktop Manager class - wait for v2.0
if self.scene and self.scene.effects[-1] != self:
- self._layouts[self._focus].blur()
+ if self._focus < len(self._layouts):
+ self._layouts[self._focus].blur()
self._has_focus = False
# Reset the canvas to prepare for next round of updates.
|
Fix issue with experimental effects embedded in Frames
|
peterbrittain_asciimatics
|
train
|
f15cfa08c1a02a776eeb9f90c7d24590c1ab9cdf
|
diff --git a/repository/upload/repository.class.php b/repository/upload/repository.class.php
index <HASH>..<HASH> 100755
--- a/repository/upload/repository.class.php
+++ b/repository/upload/repository.class.php
@@ -13,7 +13,8 @@ class repository_upload extends repository {
global $SESSION, $action, $CFG;
parent::__construct($repositoryid, $context, $options);
if($action=='upload'){
- $this->info = repository_store_to_filepool('repo_upload_file');
+ $filepath = '/'.uniqid().'/';
+ $this->info = repository_store_to_filepool('repo_upload_file', 'user_draft', $filepath);
}
}
|
"MDL-<I>, use a random path for uploaded file"
|
moodle_moodle
|
train
|
0cada103079dfbe89fe5f66c2ea806c101babd79
|
diff --git a/test/test-cases.js b/test/test-cases.js
index <HASH>..<HASH> 100644
--- a/test/test-cases.js
+++ b/test/test-cases.js
@@ -48,6 +48,20 @@ module.exports = {
])
],
+ "complex class name": [
+ ".class\\.Name",
+ singleSelector([
+ { type: "class", name: "class\\.Name" }
+ ])
+ ],
+
+ "class name starting with number": [
+ ".\\5-5",
+ singleSelector([
+ { type: "class", name: "\\5-5" }
+ ])
+ ],
+
"id name": [
"#idName",
singleSelector([
|
added a few edge cases for the parser around escaped characters
|
css-modules_css-selector-tokenizer
|
train
|
ed06ba427c28e6408d35cda0b2acb93b7e1394ea
|
diff --git a/edalize/ghdl.py b/edalize/ghdl.py
index <HASH>..<HASH> 100644
--- a/edalize/ghdl.py
+++ b/edalize/ghdl.py
@@ -89,9 +89,10 @@ class Ghdl(Edatool):
ghdlimport = ""
vhdl_sources = ""
- # GHDL doesn't support the dot notation used by other tools (e.g.
- # my_lib.top_design) for the top level so work around this if the user
- # has specified the top level in this manner.
+ # GHDL versions older than 849a25e0 don't support the dot notation (e.g.
+ # my_lib.top_design) for the top level.
+ # Nonetheless, we unconditionally split the library and the primary unit,
+ # if the user specified the top level using the dot notation.
top = self.toplevel.split(".")
if len(top) > 2:
|
[GHDL] Dot notation for specifying my_lib.top_unit is supported
|
olofk_edalize
|
train
|
997a5437f5213a9afdf9261358b6803ff278a169
|
diff --git a/public/javascripts/provider_redhat.js b/public/javascripts/provider_redhat.js
index <HASH>..<HASH> 100644
--- a/public/javascripts/provider_redhat.js
+++ b/public/javascripts/provider_redhat.js
@@ -29,7 +29,8 @@ $(document).ready(function() {
onNodeShow: function(){$.sparkline_display_visible()}
});
- $('#products_table input[type="checkbox"]').bind('change', function() {
+
+ $('#products_table input[type="checkbox"]').live('change', function() {
KT.redhat_provider_page.checkboxChanged($(this));
});
|
Small fix to get the redhat enablement working in FF <I>
|
Katello_katello
|
train
|
059b77dd82dc78b52e6ca2169278ce2c3c8210de
|
diff --git a/lib/db.js b/lib/db.js
index <HASH>..<HASH> 100644
--- a/lib/db.js
+++ b/lib/db.js
@@ -1282,6 +1282,7 @@ db.bulk = function(list, options, callback)
// - start - the primary key to start the scan from
// - search - use search instead of select, for ElasticSearch,...
// - batch - if true rowCallback will be called with all rows from the batch, not every row individually, batch size is defined by the count property
+// - concurrency - how many rows to process at the same time, if not given proxess sequentially
// - noscan - if 1 no scan will be performed if no prmary keys are specified
// - fullscan - if 1 force to scan full table without using any primary key conditons, use all query properties for all records (DynamoDB)
// - useCapacity - triggers to use specific capacity, default is `read`
@@ -1305,6 +1306,7 @@ db.scan = function(table, query, options, rowCallback, endCallback)
options = lib.objClone(options);
options.count = lib.toNumber(options.count, { dflt: 100 });
+ options.concurrency = lib.toNumber(options.concurrency, { min: 0 });
var pool = this.getPool(table, options);
if (pool.configOptions.requireCapacity || options.useCapacity || options.factorCapacity) {
options.capacity = db.getCapacity(options.tableCapacity || table, { useCapacity: options.useCapacity || "read", factorCapacity: options.factorCapacity || 0.9 });
@@ -1325,6 +1327,11 @@ db.scan = function(table, query, options, rowCallback, endCallback)
options.nrows += rows.length;
if (options.batch) {
rowCallback(rows, next);
+ } else
+ if (options.concurrency) {
+ lib.forEachLimit(rows, options.concurrency, function(row, next2) {
+ rowCallback(row, next2);
+ }, next);
} else {
lib.forEachSeries(rows, function(row, next2) {
rowCallback(row, next2);
|
support concurrency in db scans
|
vseryakov_backendjs
|
train
|
a656e14e0dee10bb49289d9584c2b2aa01588398
|
diff --git a/src/main/java/com/j256/ormlite/stmt/StatementExecutor.java b/src/main/java/com/j256/ormlite/stmt/StatementExecutor.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/j256/ormlite/stmt/StatementExecutor.java
+++ b/src/main/java/com/j256/ormlite/stmt/StatementExecutor.java
@@ -618,9 +618,7 @@ public class StatementExecutor<T, ID> implements GenericRowMapper<String[]> {
saved = connectionSource.saveSpecialConnection(connection);
return doCallBatchTasks(connection, saved, callable);
} finally {
- if (saved) {
- connectionSource.clearSpecialConnection(connection);
- }
+ connectionSource.clearSpecialConnection(connection);
connectionSource.releaseConnection(connection);
localIsInBatchMode.set(false);
if (dao != null) {
diff --git a/src/test/java/com/j256/ormlite/stmt/StatementExecutorTest.java b/src/test/java/com/j256/ormlite/stmt/StatementExecutorTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/j256/ormlite/stmt/StatementExecutorTest.java
+++ b/src/test/java/com/j256/ormlite/stmt/StatementExecutorTest.java
@@ -79,6 +79,7 @@ public class StatementExecutorTest extends BaseCoreStmtTest {
expect(connectionSource.isSingleConnection("foo")).andReturn(false);
expect(connectionSource.getReadWriteConnection("foo")).andReturn(connection);
expect(connectionSource.saveSpecialConnection(connection)).andReturn(false);
+ connectionSource.clearSpecialConnection(connection);
connectionSource.releaseConnection(connection);
expect(connection.isAutoCommitSupported()).andReturn(false);
@@ -106,6 +107,7 @@ public class StatementExecutorTest extends BaseCoreStmtTest {
expect(connectionSource.isSingleConnection("foo")).andReturn(false);
expect(connectionSource.getReadWriteConnection("foo")).andReturn(connection);
expect(connectionSource.saveSpecialConnection(connection)).andReturn(false);
+ connectionSource.clearSpecialConnection(connection);
connectionSource.releaseConnection(connection);
expect(connection.isAutoCommitSupported()).andReturn(true);
@@ -134,6 +136,7 @@ public class StatementExecutorTest extends BaseCoreStmtTest {
expect(connectionSource.isSingleConnection("foo")).andReturn(false);
expect(connectionSource.getReadWriteConnection("foo")).andReturn(connection);
expect(connectionSource.saveSpecialConnection(connection)).andReturn(false);
+ connectionSource.clearSpecialConnection(connection);
connectionSource.releaseConnection(connection);
expect(connection.isAutoCommitSupported()).andReturn(true);
@@ -164,6 +167,7 @@ public class StatementExecutorTest extends BaseCoreStmtTest {
expect(connectionSource.isSingleConnection("foo")).andReturn(true);
expect(connectionSource.getReadWriteConnection("foo")).andReturn(connection);
expect(connectionSource.saveSpecialConnection(connection)).andReturn(false);
+ connectionSource.clearSpecialConnection(connection);
connectionSource.releaseConnection(connection);
expect(connection.isAutoCommitSupported()).andReturn(true);
@@ -194,6 +198,7 @@ public class StatementExecutorTest extends BaseCoreStmtTest {
expect(connectionSource.isSingleConnection("foo")).andReturn(false);
expect(connectionSource.getReadWriteConnection("foo")).andReturn(connection);
expect(connectionSource.saveSpecialConnection(connection)).andReturn(false);
+ connectionSource.clearSpecialConnection(connection);
connectionSource.releaseConnection(connection);
expect(connection.isAutoCommitSupported()).andReturn(true);
|
Always clear special connection on batch tasks.
|
j256_ormlite-core
|
train
|
3fcdb9f8f972232d192bac3125494614e4f61efd
|
diff --git a/polyaxon/api/experiments/views.py b/polyaxon/api/experiments/views.py
index <HASH>..<HASH> 100644
--- a/polyaxon/api/experiments/views.py
+++ b/polyaxon/api/experiments/views.py
@@ -498,15 +498,14 @@ class ExperimentLogsView(ExperimentViewMixin, RetrieveAPIView, PostAPIView):
def post(self, request, *args, **kwargs):
experiment = self.get_experiment()
- log_lines = request.data.get('log_lines')
- if not log_lines or not isinstance(log_lines, str):
- raise ValidationError('Logs handler expects `log_lines`.')
+ if not request.data or not isinstance(request.data, str):
+ raise ValidationError('Logs handler expects `data` to be a string.')
celery_app.send_task(
LogsCeleryTasks.LOGS_HANDLE_EXPERIMENT_JOB,
kwargs={
'experiment_name': experiment.unique_name,
'experiment_uuid': experiment.uuid.hex,
- 'log_lines': log_lines
+ 'log_lines': request.data
})
return Response(status=status.HTTP_200_OK)
diff --git a/tests/test_experiments/test_views.py b/tests/test_experiments/test_views.py
index <HASH>..<HASH> 100644
--- a/tests/test_experiments/test_views.py
+++ b/tests/test_experiments/test_views.py
@@ -1709,12 +1709,12 @@ class TestExperimentLogsViewV1(BaseViewTest):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
- data = {'log_lines': 'logs here'}
+ data = 'logs here'
with patch('logs_handlers.tasks.logs_handle_experiment_job.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
- assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
+ assert mock_fct.call_count == 1
@pytest.mark.experiments_mark
diff --git a/tests/utils.py b/tests/utils.py
index <HASH>..<HASH> 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -2,6 +2,7 @@ import datetime
import json
import tempfile
import uuid
+from collections import Mapping
from urllib.parse import urlparse
@@ -39,6 +40,8 @@ class BaseClient(Client):
data = {}
def validate_data(dvalues):
+ if not isinstance(dvalues, Mapping):
+ return
for key, value in dvalues.items():
# Fix UUIDs for convenience
if isinstance(value, uuid.UUID):
|
Send logs directly as text blob
|
polyaxon_polyaxon
|
train
|
24c012291ab455cffc410fd239a9f39e0617a8f2
|
diff --git a/src/main/java/org/sfm/jdbc/JdbcMapperBuilder.java b/src/main/java/org/sfm/jdbc/JdbcMapperBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/sfm/jdbc/JdbcMapperBuilder.java
+++ b/src/main/java/org/sfm/jdbc/JdbcMapperBuilder.java
@@ -81,7 +81,6 @@ public final class JdbcMapperBuilder<T> {
ResultSet.class,
classMeta,
getterFactory,
- new FieldMapperFactory<ResultSet, JdbcColumnKey>(getterFactory),
columnDefinitions,
propertyNameMatcherFactory,
mapperBuilderErrorHandler,
diff --git a/src/main/java/org/sfm/jooq/JooqMapperBuilder.java b/src/main/java/org/sfm/jooq/JooqMapperBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/sfm/jooq/JooqMapperBuilder.java
+++ b/src/main/java/org/sfm/jooq/JooqMapperBuilder.java
@@ -31,8 +31,6 @@ public class JooqMapperBuilder<E> {
Record.class,
classMeta,
new RecordGetterFactory<Record>(),
- new FieldMapperFactory<Record, JooqFieldKey>(
- new RecordGetterFactory<Record>()),
new IdentityFieldMapperColumnDefinitionProvider<JooqFieldKey, Record>(),
new DefaultPropertyNameMatcherFactory(),
new RethrowMapperBuilderErrorHandler(),
diff --git a/src/main/java/org/sfm/map/impl/FieldMapperMapperBuilder.java b/src/main/java/org/sfm/map/impl/FieldMapperMapperBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/sfm/map/impl/FieldMapperMapperBuilder.java
+++ b/src/main/java/org/sfm/map/impl/FieldMapperMapperBuilder.java
@@ -45,7 +45,6 @@ public final class FieldMapperMapperBuilder<S, T, K extends FieldKey<K>> {
final Class<S> source,
final ClassMeta<T> classMeta,
GetterFactory<S, K> getterFactory,
- FieldMapperFactory<S, K> fieldMapperFactory,
ColumnDefinitionProvider<FieldMapperColumnDefinition<K, S>, K> columnDefinitions,
PropertyNameMatcherFactory propertyNameMatcherFactory,
MapperBuilderErrorHandler mapperBuilderErrorHandler,
@@ -57,7 +56,7 @@ public final class FieldMapperMapperBuilder<S, T, K extends FieldKey<K>> {
this.asmMapperNbFieldsLimit = asmMapperNbFieldsLimit;
this.source = requireNonNull("source", source);
this.getterFactory = requireNonNull("getterFactory", getterFactory);
- this.fieldMapperFactory = requireNonNull("fieldMapperFactory", fieldMapperFactory);
+ this.fieldMapperFactory = new FieldMapperFactory<S, K>(getterFactory);
this.propertyMappingsBuilder = new PropertyMappingsBuilder<T, K, FieldMapperColumnDefinition<K, S>>(classMeta, propertyNameMatcherFactory, mapperBuilderErrorHandler);
this.propertyNameMatcherFactory = requireNonNull("propertyNameMatcherFactory", propertyNameMatcherFactory);
this.target = requireNonNull("classMeta", classMeta).getType();
@@ -342,7 +341,6 @@ public final class FieldMapperMapperBuilder<S, T, K extends FieldKey<K>> {
source,
classMeta,
getterFactory,
- fieldMapperFactory,
columnDefinitions,
propertyNameMatcherFactory,
mapperBuilderErrorHandler,
diff --git a/src/main/java/org/sfm/querydsl/QueryDslMapperBuilder.java b/src/main/java/org/sfm/querydsl/QueryDslMapperBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/sfm/querydsl/QueryDslMapperBuilder.java
+++ b/src/main/java/org/sfm/querydsl/QueryDslMapperBuilder.java
@@ -31,7 +31,6 @@ public final class QueryDslMapperBuilder<T> {
Tuple.class,
classMeta,
new TupleGetterFactory(),
- new FieldMapperFactory<Tuple, TupleElementKey>(new TupleGetterFactory()),
new IdentityFieldMapperColumnDefinitionProvider<TupleElementKey, Tuple>(),
new DefaultPropertyNameMatcherFactory(),
new RethrowMapperBuilderErrorHandler(),
|
remove need to pass fieldmapperfactory
|
arnaudroger_SimpleFlatMapper
|
train
|
020325617129b6ae9b25da1126ba0f98b044bb1b
|
diff --git a/interop-testing/src/main/java/io/grpc/testing/integration/TestServiceClient.java b/interop-testing/src/main/java/io/grpc/testing/integration/TestServiceClient.java
index <HASH>..<HASH> 100644
--- a/interop-testing/src/main/java/io/grpc/testing/integration/TestServiceClient.java
+++ b/interop-testing/src/main/java/io/grpc/testing/integration/TestServiceClient.java
@@ -396,9 +396,10 @@ public class TestServiceClient {
private class Tester extends AbstractInteropTest {
@Override
protected ManagedChannelBuilder<?> createChannelBuilder() {
+ boolean useGeneric = false;
ChannelCredentials channelCredentials;
if (customCredentialsType != null) {
- useOkHttp = false; // Retain old behavior; avoids erroring if incompatible
+ useGeneric = true; // Retain old behavior; avoids erroring if incompatible
if (customCredentialsType.equals("google_default_credentials")) {
channelCredentials = GoogleDefaultChannelCredentials.create();
} else if (customCredentialsType.equals("compute_engine_channel_creds")) {
@@ -409,7 +410,7 @@ public class TestServiceClient {
}
} else if (useAlts) {
- useOkHttp = false; // Retain old behavior; avoids erroring if incompatible
+ useGeneric = true; // Retain old behavior; avoids erroring if incompatible
channelCredentials = AltsChannelCredentials.create();
} else if (useTls) {
@@ -442,6 +443,14 @@ public class TestServiceClient {
channelCredentials = InsecureChannelCredentials.create();
}
}
+ if (useGeneric) {
+ ManagedChannelBuilder<?> channelBuilder =
+ Grpc.newChannelBuilderForAddress(serverHost, serverPort, channelCredentials);
+ if (serverHostOverride != null) {
+ channelBuilder.overrideAuthority(serverHostOverride);
+ }
+ return channelBuilder;
+ }
if (!useOkHttp) {
NettyChannelBuilder nettyBuilder =
NettyChannelBuilder.forAddress(serverHost, serverPort, channelCredentials)
|
interop-testing: Avoid alts incompatibility with netty
alts requires netty-shaded, not netty.
|
grpc_grpc-java
|
train
|
7d897a7cee757b4d021c7d0734018c15acca4363
|
diff --git a/library/CM/Model/Abstract.php b/library/CM/Model/Abstract.php
index <HASH>..<HASH> 100644
--- a/library/CM/Model/Abstract.php
+++ b/library/CM/Model/Abstract.php
@@ -650,11 +650,4 @@ abstract class CM_Model_Abstract extends CM_Class_Abstract implements CM_Compara
public static function fromArray(array $data) {
return self::factoryGeneric($data['_type'], $data['_id']);
}
-
- /**
- * @return CM_ModelAsset_Abstract[]
- */
- public function getAssets() {
- return $this->_assets;
- }
}
|
Removed legacy getAssets
|
cargomedia_cm
|
train
|
7c571383dc906679faa9dc1570d1dff66f0cd02c
|
diff --git a/ipyrad/core/params.py b/ipyrad/core/params.py
index <HASH>..<HASH> 100644
--- a/ipyrad/core/params.py
+++ b/ipyrad/core/params.py
@@ -417,6 +417,15 @@ class Params(object):
# returns string values as a tuple ("", "") or ("",)
value = tuplecheck(value, str)
+ # Fix a weird bug which only shows up in step 7 during edge trim
+ # The call to tuplecheck returns different values if you have one
+ # overhang sequence and you do or don't include a comma after it.
+ # This makes it so that whether you have a comma after the sequence
+ # the values are the same. This is super hax, but I didn't want to
+ # monkey with the tuplecheck code because it's used all over the place.
+ if len(value) == 1:
+ value = (value[0], '')
+
# expand GBS for user if they set only one cutter
if (self.datatype == "GBS") & (len(value) == 1):
value = (value[0], value[0])
|
Fix a nasty stupid bug setting the overhang sequence
|
dereneaton_ipyrad
|
train
|
50efd0cf8a617efd8a98130004335bd48af84b51
|
diff --git a/request.js b/request.js
index <HASH>..<HASH> 100644
--- a/request.js
+++ b/request.js
@@ -116,6 +116,10 @@ TChannelRequest.prototype.shouldRetry = function shouldRetry(err, res, arg2, arg
}
}
+ if (!res.ok && self.options.shouldApplicationRetry) {
+ return self.options.shouldApplicationRetry(self, res, arg2, arg3);
+ }
+
return false;
};
|
TChannelRequest: add application retry support
|
uber_tchannel-node
|
train
|
ca40642c08b48a52f2df162d20cdfb3d436bf7d7
|
diff --git a/luminoso_api/upload.py b/luminoso_api/upload.py
index <HASH>..<HASH> 100644
--- a/luminoso_api/upload.py
+++ b/luminoso_api/upload.py
@@ -49,7 +49,7 @@ def upload_stream(stream, server, account, projname, reader_dict,
project.wait_for(final_job_id)
def upload_file(filename, server, account, projname, reader_dict,
- append=False, stage=False):
+ append=False, stage=False, vectorize_only=False):
"""
Upload a file to Luminoso with the given account and project name.
@@ -59,7 +59,8 @@ def upload_file(filename, server, account, projname, reader_dict,
"""
stream = transcode_to_stream(filename)
upload_stream(stream_json_lines(stream), server, account, projname,
- reader_dict, append=append, stage=stage)
+ reader_dict, append=append, stage=stage,
+ vectorize_only=vectorize_only)
def main():
"""
|
let the vectorize_only option through upload_file
|
LuminosoInsight_luminoso-api-client-python
|
train
|
abcd6e263ae36ea9a202ef2b43486920f9e32882
|
diff --git a/serv/init/systemd.py b/serv/init/systemd.py
index <HASH>..<HASH> 100644
--- a/serv/init/systemd.py
+++ b/serv/init/systemd.py
@@ -168,7 +168,10 @@ class SystemD(Base):
def is_system_exists():
try:
- sh.systemctl('--version')
- return True
- except sh.CommandNotFound:
+ try:
+ sh.systemctl('--version')
+ return True
+ except sh.CommandNotFound:
+ return False
+ except NameError:
return False
diff --git a/serv/init/upstart.py b/serv/init/upstart.py
index <HASH>..<HASH> 100644
--- a/serv/init/upstart.py
+++ b/serv/init/upstart.py
@@ -70,7 +70,10 @@ class Upstart(Base):
def is_system_exists():
try:
- sh.initctl.version()
- return True
- except sh.CommandNotFound:
+ try:
+ sh.initctl.version()
+ return True
+ except sh.CommandNotFound:
+ return False
+ except NameError:
return False
diff --git a/tests/test_deploy.py b/tests/test_deploy.py
index <HASH>..<HASH> 100644
--- a/tests/test_deploy.py
+++ b/tests/test_deploy.py
@@ -67,24 +67,24 @@ class TestDeployReal:
utils.get_tmp_dir(system, self.service_name),
ignore_errors=True)
- @pytest.mark.skipif(utils.IS_WIN, reason='Irrelevant on Windows')
@pytest.mark.skipif(
not init.systemd.is_system_exists(),
reason='Systemd not found on this system.')
+ @pytest.mark.skipif(utils.IS_WIN, reason='Irrelevant on Windows')
def test_systemd(self):
self._test_deploy_remove('systemd')
- @pytest.mark.skipif(utils.IS_WIN, reason='Irrelevant on Windows')
@pytest.mark.skipif(
not init.upstart.is_system_exists(),
reason='Upstart not found on this system.')
+ @pytest.mark.skipif(utils.IS_WIN, reason='Irrelevant on Windows')
def test_upstart(self):
self._test_deploy_remove('upstart')
- @pytest.mark.skipif(utils.IS_WIN, reason='Irrelevant on Windows')
@pytest.mark.skipif(
not init.sysv.is_system_exists(),
reason='SysV not found on this system.')
+ @pytest.mark.skipif(utils.IS_WIN, reason='Irrelevant on Windows')
def test_sysv(self):
self._test_deploy_remove('sysv')
|
Fix ordering of skipping tests since some skips require testing only linux enabled stuff
|
nir0s_serv
|
train
|
893d228e5e0b6efa8c3614128ff611b30b9a88b2
|
diff --git a/datastore/google/cloud/datastore/__init__.py b/datastore/google/cloud/datastore/__init__.py
index <HASH>..<HASH> 100644
--- a/datastore/google/cloud/datastore/__init__.py
+++ b/datastore/google/cloud/datastore/__init__.py
@@ -18,17 +18,17 @@ You'll typically use these to get started with the API:
.. doctest:: constructors
- >>> from google.cloud import datastore
- >>>
- >>> client = datastore.Client()
- >>> key = client.key('EntityKind', 1234)
- >>> key
- <Key('EntityKind', 1234), project=...>
- >>> entity = datastore.Entity(key)
- >>> entity['answer'] = 42
- >>> entity
- <Entity('EntityKind', 1234) {'answer': 42}>
- >>> query = client.query(kind='EntityKind')
+ >>> from google.cloud import datastore
+ >>>
+ >>> client = datastore.Client()
+ >>> key = client.key('EntityKind', 1234)
+ >>> key
+ <Key('EntityKind', 1234), project=...>
+ >>> entity = datastore.Entity(key)
+ >>> entity['answer'] = 42
+ >>> entity
+ <Entity('EntityKind', 1234) {'answer': 42}>
+ >>> query = client.query(kind='EntityKind')
The main concepts with this API are:
|
Changing doctest indent from 2->3 spaces to be uniform.
|
googleapis_google-cloud-python
|
train
|
0bf15a50f3ce3d08eef6da166f5723b99c9b5ef4
|
diff --git a/gist/gist.py b/gist/gist.py
index <HASH>..<HASH> 100644
--- a/gist/gist.py
+++ b/gist/gist.py
@@ -9,7 +9,7 @@ import shutil
import tarfile
import tempfile
-__version__ = '0.2.0'
+__version__ = '0.2.1'
requests.packages.urllib3.disable_warnings()
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python
import setuptools
-import gist
def dependencies():
packages = ['tox']
@@ -13,7 +12,7 @@ def dependencies():
setuptools.setup(
name='python-gist',
- version=gist.__version__,
+ version='0.2.1',
description='Manage github gists',
license='MIT',
long_description=(open('README.rst').read()),
|
gist: fixed installation error
The problem with retrieving the version number from the gist package from the
setup.py script is that gist has dependencies on packages that may not have been
installed yet. This causes the installation to fail.
|
jdowner_gist
|
train
|
1544f5147125856e4de066e566820bd8548be007
|
diff --git a/source/rafcon/gui/mygaphas/view.py b/source/rafcon/gui/mygaphas/view.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/gui/mygaphas/view.py
+++ b/source/rafcon/gui/mygaphas/view.py
@@ -31,6 +31,7 @@ class ExtendedGtkView(GtkView, Observer):
Observer.__init__(self)
self._selection = state_machine_m.selection
self.observe_model(self._selection)
+ self.observe_model(state_machine_m.root_state)
self._bounding_box_painter = BoundingBoxPainter(self)
self.graphical_editor = graphical_editor_v
@@ -154,6 +155,12 @@ class ExtendedGtkView(GtkView, Observer):
pass
super(ExtendedGtkView, self).queue_draw_item(*gaphas_items)
+ @Observer.observe("destruction_signal", signal=True)
+ def _on_root_state_destruction(self, root_state_m, signal_name, signal_msg):
+ """Ignore future selection changes when state machine is being destroyed"""
+ self.relieve_model(self._selection)
+ self.relieve_model(root_state_m)
+
@Observer.observe("selection_changed_signal", signal=True)
def _on_selection_changed_externally(self, selection_m, signal_name, signal_msg):
selected_items = self._get_selected_items()
|
fix(gaphas): Expose event on destruction
If a model had been selected when the GUI was closed, that element was
drawn, as it was deselected by the Selection. This had led (under some
circumstances) to exception during the close operation.
This is now fixed by relieving the Selection in the ExtendedGTKView when
then root state is being destroyed.
|
DLR-RM_RAFCON
|
train
|
b783b70a50bf7d419bcfe8e2ca5cee81a8069a6b
|
diff --git a/ternary/ternary_axes_subplot.py b/ternary/ternary_axes_subplot.py
index <HASH>..<HASH> 100644
--- a/ternary/ternary_axes_subplot.py
+++ b/ternary/ternary_axes_subplot.py
@@ -184,7 +184,7 @@ class TernaryAxesSubplot(object):
"""
if not position:
- position = (1./2, offset, 1./2)
+ position = (1./2, -offset, 1./2)
self._labels["bottom"] = (label, position, rotation, kwargs)
def annotate(self, text, position, **kwargs):
|
Changed offset behaviour of bottom_axis_label
Added a minus sign in the definition of position in bottom_axis_label()
so that increasing the value of offset moves the label down, further
below the bottom axis. This seemed more intuitive to me because when
increasingn the value of offset for the other two axes, the labels move
further away from the axes i.e. away from the centre of th simplex.
|
marcharper_python-ternary
|
train
|
762e50e8122bb007866006cc8a429a3e229183f3
|
diff --git a/src/Support/Minutes.php b/src/Support/Minutes.php
index <HASH>..<HASH> 100644
--- a/src/Support/Minutes.php
+++ b/src/Support/Minutes.php
@@ -17,7 +17,16 @@ class Minutes {
{
$this->minutes = $minutes;
- $this->calculateStartEnd();
+ if ($minutes instanceof Minutes)
+ {
+ $this->start = $minutes->getStart();
+
+ $this->end = $minutes->getEnd();
+ }
+ else
+ {
+ $this->calculateStartEnd();
+ }
}
public function getStart()
@@ -51,4 +60,8 @@ class Minutes {
$this->end = Carbon::now()->setTime(23,59,59);
}
+ public static function make($minutes)
+ {
+ return new static($minutes);
+ }
}
diff --git a/src/Tracker.php b/src/Tracker.php
index <HASH>..<HASH> 100644
--- a/src/Tracker.php
+++ b/src/Tracker.php
@@ -27,6 +27,7 @@ use PragmaRX\Tracker\Support\Database\Migrator as Migrator;
use Illuminate\Http\Request;
use Illuminate\Routing\Router;
use Illuminate\Log\Writer as Logger;
+use PragmaRX\Tracker\Support\Minutes;
class Tracker
{
@@ -313,7 +314,7 @@ class Tracker
public function sessions($minutes = 1440, $results = true)
{
- return $this->dataRepositoryManager->getLastSessions($minutes, $results);
+ return $this->dataRepositoryManager->getLastSessions(Minutes::make($minutes), $results);
}
public function sessionLog($uuid, $results = true)
@@ -323,27 +324,27 @@ class Tracker
public function pageViews($minutes, $results = true)
{
- return $this->dataRepositoryManager->pageViews($minutes, $results);
+ return $this->dataRepositoryManager->pageViews(Minutes::make($minutes), $results);
}
public function pageViewsByCountry($minutes, $results = true)
{
- return $this->dataRepositoryManager->pageViewsByCountry($minutes, $results);
+ return $this->dataRepositoryManager->pageViewsByCountry(Minutes::make($minutes), $results);
}
public function users($minutes, $results = true)
{
- return $this->dataRepositoryManager->users($minutes, $results);
+ return $this->dataRepositoryManager->users(Minutes::make($minutes), $results);
}
public function events($minutes, $results = true)
{
- return $this->dataRepositoryManager->events($minutes, $results);
+ return $this->dataRepositoryManager->events(Minutes::make($minutes), $results);
}
public function errors($minutes, $results = true)
{
- return $this->dataRepositoryManager->errors($minutes, $results);
+ return $this->dataRepositoryManager->errors(Minutes::make($minutes), $results);
}
public function currentSession()
@@ -379,6 +380,11 @@ class Tracker
public function logByRouteName($name, $minutes = null)
{
+ if ($minutes)
+ {
+ $minutes = Minutes::make($minutes);
+ }
+
return $this->dataRepositoryManager->logByRouteName($name, $minutes);
}
|
Add conversion from number minutes to class Minute
|
antonioribeiro_tracker
|
train
|
1688b043db26e8a70d6e067b502302a597959878
|
diff --git a/xchange-kraken/src/main/java/org/knowm/xchange/kraken/KrakenAdapters.java b/xchange-kraken/src/main/java/org/knowm/xchange/kraken/KrakenAdapters.java
index <HASH>..<HASH> 100644
--- a/xchange-kraken/src/main/java/org/knowm/xchange/kraken/KrakenAdapters.java
+++ b/xchange-kraken/src/main/java/org/knowm/xchange/kraken/KrakenAdapters.java
@@ -119,13 +119,16 @@ public class KrakenAdapters {
orderStatus = OrderStatus.PARTIALLY_FILLED;
}
+ Double time = krakenOrder.getOpenTimestamp() * 1000;//eg: "opentm":1519731205.9987
+ Date timestamp = new Date(time.longValue());
+
if(krakenOrder.getOrderDescription().getOrderType().equals(KrakenOrderType.LIMIT))
return new LimitOrder(
orderType,
krakenOrder.getVolume(),
currencyPair,
orderId,
- new Date(new Double(krakenOrder.getOpenTimestamp()).longValue()),
+ timestamp,
krakenOrder.getOrderDescription().getPrice(),
krakenOrder.getPrice(),
krakenOrder.getVolumeExecuted(),
@@ -139,7 +142,7 @@ public class KrakenAdapters {
krakenOrder.getVolume(),
currencyPair,
orderId,
- new Date(new Double(krakenOrder.getOpenTimestamp()).longValue()),
+ timestamp,
krakenOrder.getPrice(),
krakenOrder.getVolumeExecuted(),
krakenOrder.getFee(),
|
[kraken] fixed timestamp parse for open orders
|
knowm_XChange
|
train
|
0677d17d9d3848b8a04f74ef653b240b4a26e3c9
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,7 @@ unreleased
----------
- Change Google Pay button to black style to better match [Google's brand guidelines](https://developers.google.com/pay/api/web/guides/brand-guidelines)
- Allow passing in [button options](https://developers.google.com/pay/api/web/reference/object#ButtonOptions) to Google Pay configuration
+- Fix issue where Drop-in would emit `noPaymentMethodRequestable` and `paymentMethodRequestable` right after tokenization
1.13.0
------
diff --git a/src/views/payment-sheet-views/card-view.js b/src/views/payment-sheet-views/card-view.js
index <HASH>..<HASH> 100644
--- a/src/views/payment-sheet-views/card-view.js
+++ b/src/views/payment-sheet-views/card-view.js
@@ -394,13 +394,13 @@ CardView.prototype.tokenize = function () {
resolve(payload);
classlist.remove(self.element, 'braintree-sheet--tokenized');
}, 0);
- self._isTokenizing = false;
};
transitionHelper.onTransitionEnd(self.element, 'max-height', transitionCallback);
setTimeout(function () {
self.allowUserAction();
+ self._isTokenizing = false;
}, constants.CHANGE_ACTIVE_PAYMENT_METHOD_TIMEOUT);
classlist.add(self.element, 'braintree-sheet--tokenized');
diff --git a/test/unit/views/payment-sheet-views/card-view.js b/test/unit/views/payment-sheet-views/card-view.js
index <HASH>..<HASH> 100644
--- a/test/unit/views/payment-sheet-views/card-view.js
+++ b/test/unit/views/payment-sheet-views/card-view.js
@@ -2219,7 +2219,7 @@ describe('CardView', function () {
setTimeout(function () {
expect(this.context._isTokenizing).to.equal(false);
done();
- }.bind(this), 100);
+ }.bind(this), 300);
}.bind(this));
});
|
Fix issue where Drop-in would emit requestable events after tokenization (#<I>)
* Fix issue where Drop-in would emit requestable events after tokenization
* Fix unit test
|
braintree_braintree-web-drop-in
|
train
|
f8daa2bc6d0abef6ed477b82ce46cc532e2ed02f
|
diff --git a/xchange-okcoin/src/main/java/com/xeiam/xchange/okcoin/OkCoinAdapters.java b/xchange-okcoin/src/main/java/com/xeiam/xchange/okcoin/OkCoinAdapters.java
index <HASH>..<HASH> 100644
--- a/xchange-okcoin/src/main/java/com/xeiam/xchange/okcoin/OkCoinAdapters.java
+++ b/xchange-okcoin/src/main/java/com/xeiam/xchange/okcoin/OkCoinAdapters.java
@@ -132,8 +132,8 @@ public final class OkCoinAdapters {
OkcoinFuturesFundsCross btcFunds = info.getBtcFunds();
OkcoinFuturesFundsCross ltcFunds = info.getLtcFunds();
- Wallet btcWallet = new Wallet(BTC, btcFunds.getAccountRights().add(btcFunds.getProfitReal()).add(btcFunds.getProfitUnreal()));
- Wallet ltcWallet = new Wallet(LTC, ltcFunds.getAccountRights().add(ltcFunds.getProfitReal()).add(ltcFunds.getProfitUnreal()));
+ Wallet btcWallet = new Wallet(BTC, btcFunds.getAccountRights());
+ Wallet ltcWallet = new Wallet(LTC, ltcFunds.getAccountRights());
return new AccountInfo(null, Arrays.asList(emptyUsdWallet, btcWallet, ltcWallet));
}
|
Account Equity already contains all realised and unrealised pnl
|
knowm_XChange
|
train
|
e5ee41f6892a68ecfda9fb30d9f31b09452b23da
|
diff --git a/colin/cli/colin.py b/colin/cli/colin.py
index <HASH>..<HASH> 100644
--- a/colin/cli/colin.py
+++ b/colin/cli/colin.py
@@ -116,24 +116,52 @@ def list_checks(ruleset, ruleset_file, debug, json, verbose):
if debug and verbose:
raise click.BadOptionUsage("Options '--debug' and '--verbose' cannot be used together.")
- checks = get_checks(ruleset_name=ruleset,
- ruleset_file=ruleset_file,
- logging_level=_get_log_level(debug=debug, verbose=verbose))
- _print_checks(checks=checks)
+ try:
+ if not debug:
+ logger.disabled = True
- if json:
- AbstractCheck.save_checks_to_json(file=json, checks=checks)
+ log_level = _get_log_level(debug=debug,
+ verbose=verbose)
+
+ checks = get_checks(ruleset_name=ruleset,
+ ruleset_file=ruleset_file,
+ logging_level=log_level)
+ _print_checks(checks=checks)
+
+ if json:
+ AbstractCheck.save_checks_to_json(file=json, checks=checks)
+ except ColinException as ex:
+ logger.error("An error occurred: %r", ex)
+ if debug:
+ raise
+ else:
+ raise click.ClickException(str(ex))
+ except Exception as ex:
+ logger.error("An error occurred: %r", ex)
+ if debug:
+ raise
+ else:
+ raise click.ClickException(str(ex))
@click.command(name="list-rulesets",
context_settings=CONTEXT_SETTINGS)
-def list_rulesets():
+@click.option('--debug', default=False, is_flag=True,
+ help="Enable debugging mode (debugging logs, full tracebacks).")
+def list_rulesets(debug):
"""
List available rulesets.
"""
- rulesets = get_rulesets()
- for r in rulesets:
- click.echo(r)
+ try:
+ rulesets = get_rulesets()
+ for r in rulesets:
+ click.echo(r)
+ except Exception as ex:
+ logger.error("An error occurred: %r", ex)
+ if debug:
+ raise
+ else:
+ raise click.ClickException(str(ex))
cli.add_command(check)
|
Handle errors in the subcommands
|
user-cont_colin
|
train
|
71379beab85df346a2e84e29a6a8bde7ae173c0a
|
diff --git a/lib/client.js b/lib/client.js
index <HASH>..<HASH> 100644
--- a/lib/client.js
+++ b/lib/client.js
@@ -242,6 +242,8 @@ Client.prototype.connect = function(cfg) {
self.emit('drain');
}).once('header', function(header) {
self._remoteVer = header.versions.software;
+ }).on('continue', function() {
+ self.emit('continue');
});
if (typeof cfg.hostVerifier === 'function'
diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -168,6 +168,8 @@ function Client(stream, socket) {
self.emit('error', err);
}).on('drain', function() {
self.emit('drain');
+ }).on('continue', function() {
+ self.emit('continue');
});
var exchanges = 0,
|
lib: re-emit continue event for client and server
|
mscdex_ssh2
|
train
|
b9d79b1f5fde5c8933e841248261066d316b962e
|
diff --git a/activerecord/test/cases/adapter_test.rb b/activerecord/test/cases/adapter_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/adapter_test.rb
+++ b/activerecord/test/cases/adapter_test.rb
@@ -92,7 +92,7 @@ module ActiveRecord
)
end
ensure
- ActiveRecord::Base.establish_connection 'arunit'
+ ActiveRecord::Base.establish_connection :arunit
end
end
end
diff --git a/railties/test/application/rake/dbs_test.rb b/railties/test/application/rake/dbs_test.rb
index <HASH>..<HASH> 100644
--- a/railties/test/application/rake/dbs_test.rb
+++ b/railties/test/application/rake/dbs_test.rb
@@ -153,7 +153,7 @@ module ApplicationTests
`rails generate model book title:string;
bundle exec rake db:migrate db:structure:dump db:test:load_structure`
ActiveRecord::Base.configurations = Rails.application.config.database_configuration
- ActiveRecord::Base.establish_connection 'test'
+ ActiveRecord::Base.establish_connection :test
require "#{app_path}/app/models/book"
#if structure is not loaded correctly, exception would be raised
assert Book.count, 0
|
using symbol instead of string in establish_connection
|
rails_rails
|
train
|
63f4a769c672fb05d4ab75e69ac8d77262b58f6c
|
diff --git a/discord/client.py b/discord/client.py
index <HASH>..<HASH> 100644
--- a/discord/client.py
+++ b/discord/client.py
@@ -388,8 +388,10 @@ class Client:
you should run it in an executor or schedule the coroutine to
be executed later using ``loop.create_task``.
- This function throws :exc:`ClientException` if called before
- logging in via :meth:`login`.
+ Raises
+ -------
+ ClientException
+ If this is called before :meth:`login` was invoked successfully.
"""
yield from self._make_websocket()
|
Documentation fixes in Client.connect.
|
Rapptz_discord.py
|
train
|
88cace4d58584318224dc0ab6ebd3c39c16fe133
|
diff --git a/core-bundle/src/Resources/contao/dca/tl_content.php b/core-bundle/src/Resources/contao/dca/tl_content.php
index <HASH>..<HASH> 100644
--- a/core-bundle/src/Resources/contao/dca/tl_content.php
+++ b/core-bundle/src/Resources/contao/dca/tl_content.php
@@ -178,7 +178,7 @@ $GLOBALS['TL_DCA']['tl_content'] = array
'inputType' => 'select',
'options_callback' => array('tl_content', 'getContentElements'),
'reference' => &$GLOBALS['TL_LANG']['CTE'],
- 'eval' => array('helpwizard'=>true, 'chosen'=>true, 'submitOnChange'=>true, 'gallery_types'=>array('gallery'), 'downloads_types'=>array('downloads')),
+ 'eval' => array('helpwizard'=>true, 'chosen'=>true, 'submitOnChange'=>true),
'sql' => "varchar(32) NOT NULL default ''"
),
'headline' => array
@@ -528,7 +528,11 @@ $GLOBALS['TL_DCA']['tl_content'] = array
'exclude' => true,
'inputType' => 'fileTree',
'eval' => array('multiple'=>true, 'fieldType'=>'checkbox', 'orderField'=>'orderSRC', 'files'=>true, 'mandatory'=>true),
- 'sql' => "blob NULL"
+ 'sql' => "blob NULL",
+ 'load_callback' => array
+ (
+ array('tl_content', 'setFileTreeFlags')
+ )
),
'orderSRC' => array
(
@@ -1522,6 +1526,30 @@ class tl_content extends Backend
/**
+ * Dynamically set the "isGallery" or "isDownloads" flag depending on the type
+ * @param mixed
+ * @param \DataContainer
+ * @return mixed
+ */
+ public function setFileTreeFlags($varValue, DataContainer $dc)
+ {
+ if ($dc->activeRecord)
+ {
+ if ($dc->activeRecord->type == 'gallery')
+ {
+ $GLOBALS['TL_DCA'][$dc->table]['fields'][$dc->field]['eval']['isGallery'] = true;
+ }
+ elseif ($dc->activeRecord->type == 'downloads')
+ {
+ $GLOBALS['TL_DCA'][$dc->table]['fields'][$dc->field]['eval']['isDownloads'] = true;
+ }
+ }
+
+ return $varValue;
+ }
+
+
+ /**
* Pre-fill the "alt" and "caption" fields with the file meta data
* @param mixed
* @param \DataContainer
diff --git a/core-bundle/src/Resources/contao/widgets/FileTree.php b/core-bundle/src/Resources/contao/widgets/FileTree.php
index <HASH>..<HASH> 100644
--- a/core-bundle/src/Resources/contao/widgets/FileTree.php
+++ b/core-bundle/src/Resources/contao/widgets/FileTree.php
@@ -103,8 +103,8 @@ class FileTree extends \Widget
$this->{$this->strOrderField} = array_filter(explode(',', $objRow->{$this->strOrderField}));
}
- $this->blnIsGallery = (isset($GLOBALS['TL_DCA'][$this->strTable]['fields']['type']['eval']['gallery_types']) && in_array($this->activeRecord->type, $GLOBALS['TL_DCA'][$this->strTable]['fields']['type']['eval']['gallery_types']));
- $this->blnIsDownloads = (isset($GLOBALS['TL_DCA'][$this->strTable]['fields']['type']['eval']['downloads_types']) && in_array($this->activeRecord->type, $GLOBALS['TL_DCA'][$this->strTable]['fields']['type']['eval']['downloads_types']));
+ $this->blnIsGallery = $GLOBALS['TL_DCA'][$this->strTable]['fields'][$this->strField]['eval']['isGallery'];
+ $this->blnIsDownloads = $GLOBALS['TL_DCA'][$this->strTable]['fields'][$this->strField]['eval']['isDownloads'];
}
|
[Core] Render the file tree view based on the eval flags (see #<I>)
|
contao_contao
|
train
|
f18f7795bf635706efdded186c0a08469ee058c7
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -1116,10 +1116,13 @@
DCP.getAllUsers = function(callback) {
var client = this, servers = Object.keys(this.servers).filter(function(s) {
s = client.servers[s];
- return s.member_count !== Object.keys(s.members).length && (client.bot ? s.large : true);
+ if (s.members) return s.member_count !== Object.keys(s.members).length && (client.bot ? s.large : true);
});
- if (!servers[0]) return handleErrCB("There are no users to be collected", callback);
+ if (!servers[0]) {
+ this.emit('allUsers');
+ return handleErrCB("There are no users to be collected", callback);
+ }
if (!this.bot) send(this._ws, { op: 12, d: Object.keys(this.servers) } );
return getOfflineUsers(client, servers, callback);
|
Fix getAllUsers for unavailable servers, emit if received all users
|
izy521_discord.io
|
train
|
556245e2ce744ff75b2de2dd5b865bacecdafa6d
|
diff --git a/pandas/core/indexes/interval.py b/pandas/core/indexes/interval.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/interval.py
+++ b/pandas/core/indexes/interval.py
@@ -1250,15 +1250,9 @@ class IntervalIndex(IntervalMixin, Index):
first_nan_loc = np.arange(len(self))[self.isna()][0]
mask[first_nan_loc] = True
- lmiss = other.left.get_indexer_non_unique(self.left)[1]
- lmatch = np.setdiff1d(np.arange(len(self)), lmiss)
-
- for i in lmatch:
- potential = other.left.get_loc(self.left[i])
- if is_scalar(potential):
- if self.right[i] == other.right[potential]:
- mask[i] = True
- elif self.right[i] in other.right[potential]:
+ other_tups = set(zip(other.left, other.right))
+ for i, tup in enumerate(zip(self.left, self.right)):
+ if tup in other_tups:
mask[i] = True
return self[mask]
|
PERF: speed up IntervalIndex._intersection_non_unique by ~<I>x (#<I>)
|
pandas-dev_pandas
|
train
|
aad490186e2a09b1bdb448bae71c028cb3e6d475
|
diff --git a/lib/selections/belongs_to_selection.rb b/lib/selections/belongs_to_selection.rb
index <HASH>..<HASH> 100644
--- a/lib/selections/belongs_to_selection.rb
+++ b/lib/selections/belongs_to_selection.rb
@@ -93,7 +93,7 @@ module Selections
end
def predicate_method?(method)
- method[-1] == '?' && self.class.reflect_on_all_associations(:belongs_to).any? do |relationship|
+ method[-1] == '?' && self.class.reflect_on_all_associations.any? do |relationship|
relationship.options[:class_name] == 'Selection' && method.to_s.starts_with?(relationship.name.to_s)
end
end
@@ -115,7 +115,7 @@ module Selections
end
def scope_method?(method)
- self.reflect_on_all_associations(:belongs_to).any? do |relationship|
+ self.reflect_on_all_associations.any? do |relationship|
relationship.options[:class_name] == 'Selection' && method.to_s.starts_with?(relationship.name.to_s)
end
end
diff --git a/lib/selections/has_many_selections.rb b/lib/selections/has_many_selections.rb
index <HASH>..<HASH> 100644
--- a/lib/selections/has_many_selections.rb
+++ b/lib/selections/has_many_selections.rb
@@ -105,7 +105,7 @@ module Selections
end
def predicate_method?(method)
- method[-1] == '?' && self.class.reflect_on_all_associations(:has_many).any? do |relationship|
+ method[-1] == '?' && self.class.reflect_on_all_associations.any? do |relationship|
if ActiveRecord::VERSION::MAJOR > 4
relationship.options[:class_name] == 'Selection' && method.to_s.starts_with?(relationship.name.to_s.singularize)
else
@@ -131,7 +131,7 @@ module Selections
end
def scope_method?(method)
- self.reflect_on_all_associations(:has_many).any? do |relationship|
+ self.reflect_on_all_associations.any? do |relationship|
if ActiveRecord::VERSION::MAJOR > 4
relationship.options[:class_name] == 'Selection' && method.to_s.starts_with?(relationship.name.to_s.singularize)
else
|
Update method_missing methods for has_many and belongs_to
* The method_missing attribute is added to the same model for has_many and belongs_to so they need to be checking both relationship types
|
nigelr_selections
|
train
|
6e5124daf2aedf14ee5a4947372d0f8fba96b9ee
|
diff --git a/lib/linked_in/api/query_methods.rb b/lib/linked_in/api/query_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/linked_in/api/query_methods.rb
+++ b/lib/linked_in/api/query_methods.rb
@@ -43,6 +43,11 @@ module LinkedIn
simple_query(path, options)
end
+ def group(options = {})
+ path = "#{group_path(options)}"
+ simple_query(path, options)
+ end
+
def shares(options={})
path = "#{person_path(options)}/network/updates"
simple_query(path, {:type => "SHAR", :scope => "self"}.merge(options))
@@ -66,7 +71,7 @@ module LinkedIn
if options.delete(:public)
path +=":public"
elsif fields
- path +=":(#{fields.map{ |f| f.to_s.gsub("_","-") }.join(',')})"
+ path +=":(#{build_fields_params(fields)})"
end
headers = options.delete(:headers) || {}
@@ -76,6 +81,18 @@ module LinkedIn
Mash.from_json(get(path, headers))
end
+ def build_fields_params(fields)
+ if fields.is_a?(Hash) && fields.present?
+ hash_array = []
+ fields.each do |index, value|
+ hash_array << "#{index}:(#{build_fields_params(value)})"
+ end
+ hash_array.join(',')
+ else
+ fields.map{ |f| f.is_a?(Hash) ? build_fields_params(f) : f.to_s.gsub("_","-") }.join(',')
+ end
+ end
+
def person_path(options)
path = "/people/"
if id = options.delete(:id)
@@ -87,6 +104,17 @@ module LinkedIn
end
end
+ def group_path(options)
+ path = "/groups/"
+ if id = options.delete(:id)
+ path += id
+ elsif url = options.delete(:url)
+ path += "url=#{CGI.escape(url)}"
+ else
+ path += "~"
+ end
+ end
+
def company_path(options)
path = "/companies"
diff --git a/lib/linked_in/api/update_methods.rb b/lib/linked_in/api/update_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/linked_in/api/update_methods.rb
+++ b/lib/linked_in/api/update_methods.rb
@@ -9,6 +9,11 @@ module LinkedIn
post(path, defaults.merge(share).to_json, "Content-Type" => "application/json")
end
+ def group_share(group_id, share)
+ path = "/groups/#{group_id}/posts"
+ post(path, share.to_json, "Content-Type" => "application/json")
+ end
+
def join_group(group_id)
path = "/people/~/group-memberships/#{group_id}"
body = {'membership-state' => {'code' => 'member' }}
@@ -51,13 +56,13 @@ module LinkedIn
def send_message(subject, body, recipient_paths)
path = "/people/~/mailbox"
-
+
message = {
- 'subject' => subject,
+ 'subject' => subject,
'body' => body,
'recipients' => {
- 'values' => recipient_paths.map do |profile_path|
- { 'person' => { '_path' => "/people/#{profile_path}" } }
+ 'values' => recipient_paths.map do |profile_path|
+ { 'person' => { '_path' => "/people/#{profile_path}" } }
end
}
}
|
modified the query methods to support groups as well as nested field hashes/objects.
Modified the update methods to be able to share to a group
|
hexgnu_linkedin
|
train
|
a24596989d29052c1028dfbc61de81f577c33ec9
|
diff --git a/extdirectspring-demo/src/main/java/ch/ralscha/extdirectspring/demo/MyStringHttpMessageConverter.java b/extdirectspring-demo/src/main/java/ch/ralscha/extdirectspring/demo/MyStringHttpMessageConverter.java
index <HASH>..<HASH> 100644
--- a/extdirectspring-demo/src/main/java/ch/ralscha/extdirectspring/demo/MyStringHttpMessageConverter.java
+++ b/extdirectspring-demo/src/main/java/ch/ralscha/extdirectspring/demo/MyStringHttpMessageConverter.java
@@ -27,11 +27,13 @@ import org.springframework.util.FileCopyUtils;
public class MyStringHttpMessageConverter extends StringHttpMessageConverter {
+ private static final Charset UTF_8 = Charset.forName("UTF-8");
+
@Override
protected String readInternal(Class clazz, HttpInputMessage inputMessage) throws IOException {
Charset charset = getContentTypeCharset(inputMessage.getHeaders().getContentType());
String s = FileCopyUtils.copyToString(new InputStreamReader(inputMessage.getBody(), charset));
- return new String(s.getBytes(charset));
+ return new String(s.getBytes(charset), UTF_8);
}
private Charset getContentTypeCharset(MediaType contentType) {
|
trying to fix the encoding problem
|
ralscha_extdirectspring
|
train
|
0fc0397d779d96879d7b903c3fa1b9bd53e490e3
|
diff --git a/xds/internal/balancer/cdsbalancer/cdsbalancer.go b/xds/internal/balancer/cdsbalancer/cdsbalancer.go
index <HASH>..<HASH> 100644
--- a/xds/internal/balancer/cdsbalancer/cdsbalancer.go
+++ b/xds/internal/balancer/cdsbalancer/cdsbalancer.go
@@ -78,6 +78,7 @@ func (cdsBB) Build(cc balancer.ClientConn, opts balancer.BuildOptions) balancer.
bOpts: opts,
updateCh: buffer.NewUnbounded(),
closed: grpcsync.NewEvent(),
+ done: grpcsync.NewEvent(),
cancelWatch: func() {}, // No-op at this point.
xdsHI: xdsinternal.NewHandshakeInfo(nil, nil),
}
@@ -181,6 +182,7 @@ type cdsBalancer struct {
clusterToWatch string
logger *grpclog.PrefixLogger
closed *grpcsync.Event
+ done *grpcsync.Event
// The certificate providers are cached here to that they can be closed when
// a new provider is to be created.
@@ -380,9 +382,6 @@ func (b *cdsBalancer) run() {
case *watchUpdate:
b.handleWatchUpdate(update)
}
-
- // Close results in cancellation of the CDS watch and closing of the
- // underlying edsBalancer and is the only way to exit this goroutine.
case <-b.closed.Done():
b.cancelWatch()
b.cancelWatch = func() {}
@@ -392,8 +391,8 @@ func (b *cdsBalancer) run() {
b.edsLB = nil
}
b.xdsClient.Close()
- // This is the *ONLY* point of return from this function.
b.logger.Infof("Shutdown")
+ b.done.Fire()
return
}
}
@@ -494,6 +493,7 @@ func (b *cdsBalancer) UpdateSubConnState(sc balancer.SubConn, state balancer.Sub
// Close closes the cdsBalancer and the underlying edsBalancer.
func (b *cdsBalancer) Close() {
b.closed.Fire()
+ <-b.done.Done()
}
// ccWrapper wraps the balancer.ClientConn passed to the CDS balancer at
diff --git a/xds/internal/balancer/edsbalancer/eds.go b/xds/internal/balancer/edsbalancer/eds.go
index <HASH>..<HASH> 100644
--- a/xds/internal/balancer/edsbalancer/eds.go
+++ b/xds/internal/balancer/edsbalancer/eds.go
@@ -65,6 +65,7 @@ func (b *edsBalancerBuilder) Build(cc balancer.ClientConn, opts balancer.BuildOp
x := &edsBalancer{
cc: cc,
closed: grpcsync.NewEvent(),
+ done: grpcsync.NewEvent(),
grpcUpdate: make(chan interface{}),
xdsClientUpdate: make(chan *edsUpdate),
childPolicyUpdate: buffer.NewUnbounded(),
@@ -130,6 +131,7 @@ type edsBalancerImplInterface interface {
type edsBalancer struct {
cc balancer.ClientConn
closed *grpcsync.Event
+ done *grpcsync.Event
logger *grpclog.PrefixLogger
// edsBalancer continuously monitors the channels below, and will handle
@@ -170,6 +172,8 @@ func (x *edsBalancer) run() {
x.cancelWatch()
x.xdsClient.Close()
x.edsImpl.close()
+ x.logger.Infof("Shutdown")
+ x.done.Fire()
return
}
}
@@ -379,7 +383,7 @@ func (x *edsBalancer) enqueueChildBalancerState(p priorityType, s balancer.State
func (x *edsBalancer) Close() {
x.closed.Fire()
- x.logger.Infof("Shutdown")
+ <-x.done.Done()
}
// equalStringPointers returns true if
|
xds: actually close stuff in cds/eds `Close()` (#<I>)
|
grpc_grpc-go
|
train
|
faf8bf405be44f38e8222af30563ef2597f7bb58
|
diff --git a/js/binance.js b/js/binance.js
index <HASH>..<HASH> 100644
--- a/js/binance.js
+++ b/js/binance.js
@@ -845,12 +845,7 @@ module.exports = class binance extends Exchange {
if (typeof since !== 'undefined')
para['startTime'] = since;
let response = await this.wapiGetDepositHistory (this.extend (para, params));
- if ('success' in response) {
- if (response['success']) {
- return this.parseTransactions (response['depositList'], asset, since, limit, 'deposit');
- }
- }
- throw new ExchangeError (this.id + ' depositHistory failed: ' + this.last_http_response);
+ return this.parseTransactions (response['depositList'], asset, since, limit);
}
async fetchWithdrawals (code = undefined, since = undefined, limit = undefined, params = {}) {
@@ -864,12 +859,7 @@ module.exports = class binance extends Exchange {
if (typeof since !== 'undefined')
para['startTime'] = since;
let response = await this.wapiGetWithdrawHistory (this.extend (para, params));
- if ('success' in response) {
- if (response['success']) {
- return this.parseTransactions (response['withdrawList'], asset, since, limit, 'withdrawal');
- }
- }
- throw new ExchangeError (this.id + ' withdrawHistory failed: ' + this.last_http_response);
+ return this.parseTransactions (response['withdrawList'], asset, since, limit);
}
parseTransactionStatus (status) {
@@ -1053,12 +1043,19 @@ module.exports = class binance extends Exchange {
// response in format {'msg': 'The coin does not exist.', 'success': true/false}
let success = this.safeValue (response, 'success', true);
if (!success) {
- if ('msg' in response)
+ let message = this.safeString (response, 'msg');
+ let parsedMessage = undefined;
+ if (typeof message !== 'undefined') {
try {
- response = JSON.parse (response['msg']);
+ parsedMessage = JSON.parse (message);
} catch (e) {
- response = {};
+ // do nothing
+ parsedMessage = undefined;
}
+ if (typeof parsedMessage !== 'undefined') {
+ response = parsedMessage;
+ }
+ }
}
// checks against error codes
let error = this.safeString (response, 'code');
|
minor edits in binance handleErrors
|
ccxt_ccxt
|
train
|
8e8e43e38c93bc6ad5c3dcd2f11231690c332d6b
|
diff --git a/master/buildbot/status/web/waterfall.py b/master/buildbot/status/web/waterfall.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/status/web/waterfall.py
+++ b/master/buildbot/status/web/waterfall.py
@@ -110,7 +110,7 @@ class CurrentBox(components.Adapter):
brcount = brcounts[builderName]
if brcount:
text.append("%d pending" % brcount)
- for t in upcoming:
+ for t in sorted(upcoming):
if t is not None:
eta = t - util.now()
text.extend(self.formatETA("next in", eta))
|
Sort next builds in waterfall
Fixes #<I>.
|
buildbot_buildbot
|
train
|
2eca8ee83be9e6ed8c9a59145da46861ff25e975
|
diff --git a/features/steps/metric_configuration.py b/features/steps/metric_configuration.py
index <HASH>..<HASH> 100644
--- a/features/steps/metric_configuration.py
+++ b/features/steps/metric_configuration.py
@@ -1,5 +1,5 @@
from behave import *
-from nose.tools import assert_true, assert_in, assert_equal
+from nose.tools import assert_true, assert_in, assert_equal, assert_is_instance
from ..tests.factories import MetricConfigurationFactory, \
LinesOfCodeMetricFactory
@@ -50,7 +50,7 @@ def step_impl(context):
@then(u'I should get an error')
def step_impl(context):
- assert_true(isinstance(context.response, KalibroClientNotFoundError))
+ assert_is_instance(context.response, KalibroClientNotFoundError)
@given(u'I have a metric configuration within the given kalibro configuration')
def step_impl(context):
|
Improved assertion in MetricConfiguration error step.
Now using assert_is_instance for a better error message.
|
mezuro_kalibro_client_py
|
train
|
2bb4ed01be3e5cd71e1cf142710ef748eca436c5
|
diff --git a/lib/Cake/Controller/Component/SecurityComponent.php b/lib/Cake/Controller/Component/SecurityComponent.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/Controller/Component/SecurityComponent.php
+++ b/lib/Cake/Controller/Component/SecurityComponent.php
@@ -208,6 +208,9 @@ class SecurityComponent extends Component {
}
}
$this->_generateToken($controller);
+ if ($isPost) {
+ unset($controller->request->data['_Token']);
+ }
}
/**
|
Removing _Token from request data.
It is not used outside the component and could possibly affect Model::save().
Fixes #<I>
|
cakephp_cakephp
|
train
|
6c889161d7f1de09a67f94f6631763bbe7581892
|
diff --git a/opencensus/trace/exporters/zipkin_exporter.py b/opencensus/trace/exporters/zipkin_exporter.py
index <HASH>..<HASH> 100644
--- a/opencensus/trace/exporters/zipkin_exporter.py
+++ b/opencensus/trace/exporters/zipkin_exporter.py
@@ -137,10 +137,6 @@ class ZipkinExporter(base.Exporter):
:returns: List of zipkin format spans.
"""
- top_span = span_datas[0]
- trace_id = top_span.context.trace_id if top_span.context is not None \
- else None
-
local_endpoint = {
'serviceName': self.service_name,
'port': self.port,
@@ -173,7 +169,7 @@ class ZipkinExporter(base.Exporter):
duration_ms = end_timestamp_ms - start_timestamp_ms
zipkin_span = {
- 'traceId': trace_id,
+ 'traceId': span.context.trace_id,
'id': str(span.span_id),
'name': span.name,
'timestamp': int(round(start_timestamp_ms)),
|
fix ZipkinExporter when using BackgroundThreadTransport (#<I>)
* changed time precision on zipkin exporter to microseconds
* fixed tests
* fixed line length
* fixed translate_to_zipkin function on ZipkinExporter
- previously when using BackgroundThreadTransport, all traces were sent with same trace_id
|
census-instrumentation_opencensus-python
|
train
|
9bde6e5fedb44945d6b68200f62a243cff8a9613
|
diff --git a/suspect/io/tarquin.py b/suspect/io/tarquin.py
index <HASH>..<HASH> 100644
--- a/suspect/io/tarquin.py
+++ b/suspect/io/tarquin.py
@@ -1,3 +1,6 @@
+import subprocess
+
+
def save_dpt(filename, data):
with open(filename, 'wb') as fout:
fout.write("Dangerplot_version\t1.0\n".encode())
@@ -10,4 +13,19 @@ def save_dpt(filename, data):
fout.write("Echo_time\t{0:8.8e}\n".format(data.te).encode())
fout.write("Real_FID\tImag_FID\t\n".encode())
for x in data:
- fout.write("{0.real:8.8e} {0.imag:8.8e}\n".format(x).encode())
\ No newline at end of file
+ fout.write("{0.real:8.8e} {0.imag:8.8e}\n".format(x).encode())
+
+
+def read_output(filename):
+ with open(filename) as fin:
+ pass
+
+
+def process(data):
+ save_dpt("/tmp/temp.dpt", data)
+ subprocess.run("tarquin --input {} --format dpt --output_txt {}".format(
+ "/tmp/temp.dpt", "/tmp/output.txt"
+ ), shell=True)
+ with open("/tmp/output.txt") as fin:
+ result = fin.read()
+ return result
|
first integration with tarquin quantitation software
|
bennyrowland_suspect
|
train
|
ce8b53d99c1cc55f2fc8076383c06b80a28fa2e8
|
diff --git a/spec/unit/resource/api800/c7000/storage_pool_spec.rb b/spec/unit/resource/api800/c7000/storage_pool_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/resource/api800/c7000/storage_pool_spec.rb
+++ b/spec/unit/resource/api800/c7000/storage_pool_spec.rb
@@ -1,4 +1,4 @@
-# (C) Copyright 2017 Hewlett Packard Enterprise Development LP
+# (C) Copyright 2020 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
|
Update storage_pool_spec.rb
|
HewlettPackard_oneview-sdk-ruby
|
train
|
1930d510448e5d06ef4795be97e8b5b24e4e6618
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -38,7 +38,6 @@ setup(
'Converting plain ASCII punctuation characters into HTML '
'entities according to basic typography rules'
),
- long_description=open('README.rst').read(),
author=meta['author'],
author_email='mail@honzajavorek.cz',
url='https://github.com/honzajavorek/tipi',
|
Removed long description, because it looks silly on PyPI.
|
honzajavorek_tipi
|
train
|
1385c677bdc4c8007173e75a3e4a13cb861adb1a
|
diff --git a/acceptancetests/substrate.py b/acceptancetests/substrate.py
index <HASH>..<HASH> 100644
--- a/acceptancetests/substrate.py
+++ b/acceptancetests/substrate.py
@@ -1,6 +1,6 @@
from contextlib import (
contextmanager,
- )
+)
import json
import logging
import os
@@ -19,7 +19,7 @@ import gce
from utility import (
temp_dir,
until_timeout,
- )
+)
import winazurearm
@@ -767,7 +767,7 @@ class MAASAccount:
"""Create a new vlan on fabric with given fabric_id."""
args = [
self.profile, 'vlans', 'create', str(fabric_id), 'vid=' + str(vid),
- ]
+ ]
if name is not None:
args.append('name=' + name)
return self._maas(*args)
@@ -946,6 +946,43 @@ class LXDAccount:
return uncleaned_resource
+class K8sAccount:
+ """Represent a K8s account."""
+
+ @classmethod
+ @contextmanager
+ def from_boot_config(cls, boot_config):
+ """Create a ContextManager for a K8sAccount."""
+ yield cls()
+
+ def delete_namespaces(self, ns_names):
+ """Delete the specified namespaces."""
+ failed = []
+ for ns in ns_names:
+ log.info("deleting namespace %s", ns)
+ try:
+ subprocess.check_call(['kubectl', 'delete', 'ns', ns])
+ except subprocess.CalledProcessError as e:
+ log.warn(e)
+ failed.append(ns)
+ return failed
+
+ def ensure_cleanup(self, resource_details):
+ """
+ Do K8s specific clean-up activity.
+ :param resource_details: The list of resource to be cleaned up
+ :return: list of resources that were not cleaned up
+ """
+ uncleaned_resources = []
+
+ uncleaned_namespaces = self.delete_namespaces(resource_details.get('namespaces', []))
+ if uncleaned_namespaces:
+ uncleaned_resources.append(
+ dict(resource='namespaces', errors=uncleaned_namespaces,)
+ )
+ return uncleaned_resources
+
+
def get_config(boot_config):
config = boot_config.make_config_copy()
if boot_config.provider not in ('lxd', 'manual'):
@@ -969,6 +1006,7 @@ def make_substrate_manager(boot_config):
'azure-arm': AzureARMAccount.from_boot_config,
'lxd': LXDAccount.from_boot_config,
'gce': GCEAccount.from_boot_config,
+ 'kubernetes': K8sAccount.from_boot_config,
}
substrate_type = config['type']
if substrate_type == 'azure' and 'application-id' in config:
|
Add K8sAccount substrate and register into substrate_factory
|
juju_juju
|
train
|
691ed78ea8a1fa1fe43e23262934d05b21131eef
|
diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/search/InlineSearchPane.js b/bundles/org.eclipse.orion.client.ui/web/orion/search/InlineSearchPane.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/orion/search/InlineSearchPane.js
+++ b/bundles/org.eclipse.orion.client.ui/web/orion/search/InlineSearchPane.js
@@ -79,7 +79,7 @@ define([
this._slideout.getContentNode().removeChild(this._searchWrapper); // detach wrapper now that initialization is done, see getContentNode().appendChild() call above
this._inputManager.addEventListener("InputChanged", function(evt) { //$NON-NLS-0$
if(evt.metadata && !evt.metadata.Directory) {
- this._generateAnnotations(evt.metadata.Location, evt.editor);
+ this._generateAnnotations(decodeURIComponent(evt.metadata.Location), evt.editor);
}
}.bind(this));
},
@@ -164,7 +164,7 @@ define([
if(this._inputManager && this._inputManager.inputManager) {
var fMeta = this._inputManager.inputManager.getFileMetadata();
if(fMeta && !fMeta.Directory) {
- this._generateAnnotations(fMeta.Location, this._inputManager.inputManager.editor);
+ this._generateAnnotations(decodeURIComponent(fMeta.Location), this._inputManager.inputManager.editor);
}
}
},
diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/searchModel.js b/bundles/org.eclipse.orion.client.ui/web/orion/searchModel.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/orion/searchModel.js
+++ b/bundles/org.eclipse.orion.client.ui/web/orion/searchModel.js
@@ -352,7 +352,7 @@ define([
var fileNode;
if(this._indexedFileItems) {
this._indexedFileItems.some(function(element){
- if (element.location === fileLocation) {
+ if (decodeURIComponent(element.location) === fileLocation) {
fileNode = element;
return true;
}
|
Bug <I> - References UI becomes out of sync when you change a file (provide a refresh button)
|
eclipse_orion.client
|
train
|
00bf0e01a12e9ce13adef15e5ddb53700d90bd81
|
diff --git a/mux_test.go b/mux_test.go
index <HASH>..<HASH> 100644
--- a/mux_test.go
+++ b/mux_test.go
@@ -135,6 +135,33 @@ func TestHost(t *testing.T) {
path: "",
shouldMatch: false,
},
+ {
+ title: "Path route with single pattern with pipe, match",
+ route: new(Route).Path("/{category:a|b/c}"),
+ request: newRequest("GET", "http://localhost/a"),
+ vars: map[string]string{"category": "a"},
+ host: "",
+ path: "/a",
+ shouldMatch: true,
+ },
+ {
+ title: "Path route with single pattern with pipe, match",
+ route: new(Route).Path("/{category:a|b/c}"),
+ request: newRequest("GET", "http://localhost/b/c"),
+ vars: map[string]string{"category": "b/c"},
+ host: "",
+ path: "/b/c",
+ shouldMatch: true,
+ },
+ {
+ title: "Path route with multiple patterns with pipe, match",
+ route: new(Route).Path("/{category:a|b/c}/{product}/{id:[0-9]+}"),
+ request: newRequest("GET", "http://localhost/a/product_name/1"),
+ vars: map[string]string{"category": "a", "product": "product_name", "id": "1"},
+ host: "",
+ path: "/a/product_name/1",
+ shouldMatch: true,
+ },
}
for _, test := range tests {
testRoute(t, test)
|
Add tests for patterns with pipe
closes #<I>
|
gorilla_mux
|
train
|
d5324413664a44da1cbdf671e0fe00b8571c2b4b
|
diff --git a/lib/java/src/org/apache/thrift/TBaseAsyncProcessor.java b/lib/java/src/org/apache/thrift/TBaseAsyncProcessor.java
index <HASH>..<HASH> 100644
--- a/lib/java/src/org/apache/thrift/TBaseAsyncProcessor.java
+++ b/lib/java/src/org/apache/thrift/TBaseAsyncProcessor.java
@@ -54,8 +54,13 @@ public class TBaseAsyncProcessor<I> implements TAsyncProcessor, TProcessor {
if (fn == null) {
TProtocolUtil.skip(in, TType.STRUCT);
in.readMessageEnd();
- if (!fn.isOneway()) {
- TApplicationException x = new TApplicationException(TApplicationException.UNKNOWN_METHOD, "Invalid method name: '"+msg.name+"'");
+
+ TApplicationException x = new TApplicationException(TApplicationException.UNKNOWN_METHOD,
+ "Invalid method name: '" + msg.name + "'");
+ LOGGER.debug("Invalid method name", x);
+
+ // this means it is a two-way request, so we can send a reply
+ if (msg.type == TMessageType.CALL) {
out.writeMessageBegin(new TMessage(msg.name, TMessageType.EXCEPTION, msg.seqid));
x.write(out);
out.writeMessageEnd();
@@ -72,8 +77,12 @@ public class TBaseAsyncProcessor<I> implements TAsyncProcessor, TProcessor {
args.read(in);
} catch (TProtocolException e) {
in.readMessageEnd();
+
+ TApplicationException x = new TApplicationException(TApplicationException.PROTOCOL_ERROR,
+ e.getMessage());
+ LOGGER.debug("Could not retrieve function arguments", x);
+
if (!fn.isOneway()) {
- TApplicationException x = new TApplicationException(TApplicationException.PROTOCOL_ERROR, e.getMessage());
out.writeMessageBegin(new TMessage(msg.name, TMessageType.EXCEPTION, msg.seqid));
x.write(out);
out.writeMessageEnd();
@@ -93,6 +102,7 @@ public class TBaseAsyncProcessor<I> implements TAsyncProcessor, TProcessor {
try {
fn.start(iface, args, resultHandler);
} catch (Exception e) {
+ LOGGER.debug("Exception handling function", e);
resultHandler.onError(e);
}
return true;
|
THRIFT-<I>: Fix guaranteed NPE in TBaseAsyncProcessor.java
wq# Please enter the commit message for your changes. Lines starting
|
apache_thrift
|
train
|
bac76baaf79f46d3cf23ecc13f8908cfc208bd73
|
diff --git a/pdftotree/TreeExtract.py b/pdftotree/TreeExtract.py
index <HASH>..<HASH> 100644
--- a/pdftotree/TreeExtract.py
+++ b/pdftotree/TreeExtract.py
@@ -196,10 +196,14 @@ class TreeExtractor(object):
for (pnum, pwidth, pheight, top, left, bottom, right) in self.tree[page_num][clust]:
boxes += [[clust.lower().replace(' ', '_'), top, left,
bottom, right]]
- # TODO(XXX): Ideally, we autodetect whether we have a 1 column
- # or two column paper, and sort accordingly.
- boxes.sort(key=cmp_to_key(two_column_paper_order))
+
+ # TODO: We need to detect columns and sort acccordingly.
+ boxes.sort(key=cmp_to_key(column_order))
+
+ # from pprint import pprint
+ # pprint(boxes, width=120)
# import pdb; pdb.set_trace()
+
for box in boxes:
if(box[0] == "table"):
table = box[1:]
diff --git a/pdftotree/pdf/vector_utils.py b/pdftotree/pdf/vector_utils.py
index <HASH>..<HASH> 100644
--- a/pdftotree/pdf/vector_utils.py
+++ b/pdftotree/pdf/vector_utils.py
@@ -126,8 +126,8 @@ def reading_order(e1,e2):
b1 = e1.bbox
b2 = e2.bbox
if round(b1[y0]) == round(b2[y0]) or round(b1[y1]) == round(b2[y1]):
- return float_cmp(b1[x0],b2[x0])
- return float_cmp(b1[y0],b2[y0])
+ return float_cmp(b1[x0], b2[x0])
+ return float_cmp(b1[y0], b2[y0])
def xy_reading_order(e1, e2):
'''
@@ -136,17 +136,34 @@ def xy_reading_order(e1, e2):
b1 = e1.bbox
b2 = e2.bbox
if round(b1[x0]) == round(b2[x0]):
- return float_cmp(b1[y0],b2[y0])
- return float_cmp(b1[x0],b2[x0])
+ return float_cmp(b1[y0], b2[y0])
+ return float_cmp(b1[x0], b2[x0])
-def two_column_paper_order(b1, b2):
+def column_order(b1, b2):
'''
+ A comparator that sorts bboxes first by "columns", where a column is made
+ up of all bboxes that overlap, then by vertical position in each column.
+
b1 = [b1.type, b1.top, b1.left, b1.bottom, b1.right]
b2 = [b2.type, b2.top, b2.left, b2.bottom, b2.right]
'''
- if((b1[2] > b2[2] and b1[2] < b2[4]) or (b2[2] > b1[2] and b2[2] < b1[4])):
- return float_cmp(b1[1], b2[1])
- return float_cmp(b1[2], b2[2])
+ (top, left, bottom, right) = (1, 2, 3, 4)
+ # TODO(senwu): Reimplement the functionality of this comparator to
+ # detect the number of columns, and sort those in reading order.
+
+ # TODO: This is just a simple top to bottom, left to right comparator
+ # for now.
+ if (round(b1[top]) == round(b2[top]) or
+ round(b1[bottom]) == round(b2[bottom])):
+ return float_cmp(b1[left], b2[left])
+ return float_cmp(b1[top], b2[top])
+
+ # if((b1[left] >= b2[left] and b1[left] <= b2[right]) or
+ # (b2[left] >= b1[left] and b2[left] <= b1[right])):
+ # return float_cmp(b1[top], b2[top])
+ #
+ # # Return leftmost columns first
+ # return float_cmp(b1[left], b2[left])
def float_cmp(f1, f2):
if f1 > f2:
|
Add placeholders for #3
We will need to implement functionality for:
1. Detecting the number of columns in a document based on bboxes
2. Ordering the content in reading order
Currently, this just swaps out the inconsistent two-column code we used
to have in the TreeStructure repository for a simple top-to-bottom,
left-to-right comparator. We will need to fix this in the future.
|
HazyResearch_pdftotree
|
train
|
cdc7a5db82ff8d2f47f324ba1acfd69dfd0ecfbe
|
diff --git a/db/upgrade-X.X-X.Y.sql b/db/upgrade-X.X-X.Y.sql
index <HASH>..<HASH> 100644
--- a/db/upgrade-X.X-X.Y.sql
+++ b/db/upgrade-X.X-X.Y.sql
@@ -49,7 +49,8 @@ DROP PROCEDURE IF EXISTS ValidateVersion;
\! echo "altering pki_certs"
ALTER TABLE pki_certs
- ADD COLUMN IF NOT EXISTS `scep` BOOLEAN DEFAULT FALSE AFTER ip_addresses;
+ ADD COLUMN IF NOT EXISTS `scep` BOOLEAN DEFAULT FALSE AFTER ip_addresses,
+ ADD COLUMN IF NOT EXISTS `alert` BOOLEAN DEFAULT FALSE AFTER scep;
\! echo "set pki_certs.scep to true if private key is empty"
UPDATE pki_certs
diff --git a/go/caddy/pfpki/handlers/handlers.go b/go/caddy/pfpki/handlers/handlers.go
index <HASH>..<HASH> 100644
--- a/go/caddy/pfpki/handlers/handlers.go
+++ b/go/caddy/pfpki/handlers/handlers.go
@@ -651,6 +651,35 @@ func GetRevokedByID(pfpki *types.Handler) http.Handler {
})
}
+func CheckRenewal(pfpki *types.Handler) http.Handler {
+ return http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
+
+ o := models.NewCertModel(pfpki)
+ var Information types.Info
+ var err error
+
+ Error := types.Errors{Status: 0}
+ switch req.Method {
+ case "GET":
+ Information.Status = http.StatusOK
+ vars := mux.Vars(req)
+ Information, err = o.CheckRenewal(vars)
+ if err != nil {
+ Error.Message = err.Error()
+ Error.Status = http.StatusNotFound
+ break
+ }
+
+ default:
+ err = errors.New("Method " + req.Method + " not supported")
+ Error.Message = err.Error()
+ Error.Status = http.StatusMethodNotAllowed
+ break
+ }
+ manageAnswer(Information, Error, pfpki, res, req)
+ })
+}
+
func manageAnswer(Information types.Info, Error types.Errors, pfpki *types.Handler, res http.ResponseWriter, req *http.Request) {
var err error
diff --git a/go/caddy/pfpki/models/models.go b/go/caddy/pfpki/models/models.go
index <HASH>..<HASH> 100644
--- a/go/caddy/pfpki/models/models.go
+++ b/go/caddy/pfpki/models/models.go
@@ -145,6 +145,7 @@ type (
DNSNames string `json:"dns_names,omitempty"`
IPAddresses string `json:"ip_addresses,omitempty"`
Scep *bool `json:"scep,omitempty" gorm:"default:false"`
+ Alert *bool `json:"alert,omitempty" gorm:"default:false"`
}
// RevokedCert struct
@@ -1374,6 +1375,40 @@ func (c Cert) Revoke(params map[string]string) (types.Info, error) {
return Information, nil
}
+func (c Cert) CheckRenewal(params map[string]string) (types.Info, error) {
+ Information := types.Info{}
+ var certdb []Cert
+
+ if CertDB := c.DB.Where("alert <> ? and scep <>", 1, 1).Find(&certdb); CertDB.Error != nil {
+ Information.Error = CertDB.Error.Error()
+ return Information, CertDB.Error
+ }
+
+ for _, v := range certdb {
+ // Find the profile
+ var prof Profile
+ if profDB := c.DB.First(&prof, v.ProfileID); profDB.Error != nil {
+ Information.Error = profDB.Error.Error()
+ return Information, errors.New(dbError)
+ }
+ // Revoke due certificate
+ if time.Now().Unix() > v.ValidUntil.Unix() {
+ params := make(map[string]string)
+
+ params["id"] = strconv.Itoa(int(v.ID))
+ params["reason"] = strconv.Itoa(ocsp.Superseded)
+ c.Revoke(params)
+ }
+ if v.ValidUntil.Unix()-int64((time.Duration(prof.DaysBeforeRenewal)*24*time.Hour).Seconds()) < time.Now().Unix() {
+ // Send Email
+ }
+ }
+
+ Information.Entries = certdb
+
+ return Information, nil
+}
+
func NewRevokedCertModel(pfpki *types.Handler) *RevokedCert {
RevokedCert := &RevokedCert{}
diff --git a/go/caddy/pfpki/pfpki.go b/go/caddy/pfpki/pfpki.go
index <HASH>..<HASH> 100644
--- a/go/caddy/pfpki/pfpki.go
+++ b/go/caddy/pfpki/pfpki.go
@@ -129,6 +129,8 @@ func buildPfpkiHandler(ctx context.Context) (types.Handler, error) {
// Get Revoked Certificate by ID
api.Handle("/pki/revokedcert/{id}", handlers.GetRevokedByID(PFPki)).Methods("GET")
+ api.Handle("/pki/checkrenewal", handlers.CheckRenewal(PFPki)).Methods("GET")
+
// OCSP responder
api.Handle("/pki/ocsp", handlers.ManageOcsp(PFPki)).Methods("GET", "POST")
|
New api endpoint /pki/checkrenewal (revoke due certificates and email
renewable ones)
|
inverse-inc_packetfence
|
train
|
5352d9eb6d3e47d6418527d804b243003932f01b
|
diff --git a/core/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java b/core/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java
index <HASH>..<HASH> 100644
--- a/core/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java
+++ b/core/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java
@@ -960,13 +960,10 @@ public final class DefaultBlockMaster extends AbstractMaster implements BlockMas
List<BlockLocation> locations = new ArrayList<>();
List<MasterBlockLocation> blockLocations = masterBlockInfo.getBlockLocations();
// Sort the block locations by their alias ordinal in the master storage tier mapping
- Collections.sort(blockLocations, new Comparator<MasterBlockLocation>() {
- @Override
- public int compare(MasterBlockLocation o1, MasterBlockLocation o2) {
- return mGlobalStorageTierAssoc.getOrdinal(o1.getTierAlias())
- - mGlobalStorageTierAssoc.getOrdinal(o2.getTierAlias());
- }
- });
+ Collections.sort(blockLocations, Comparator
+ .comparingInt(o -> mGlobalStorageTierAssoc
+ .getOrdinal(o.getTierAlias()))
+ );
for (MasterBlockLocation masterBlockLocation : blockLocations) {
MasterWorkerInfo workerInfo =
mWorkers.getFirstByField(ID_INDEX, masterBlockLocation.getWorkerId());
|
check it please (#<I>)
|
Alluxio_alluxio
|
train
|
9bf00ab7262959b66172774211a551c0a6429742
|
diff --git a/lib/sass/selector/sequence.rb b/lib/sass/selector/sequence.rb
index <HASH>..<HASH> 100644
--- a/lib/sass/selector/sequence.rb
+++ b/lib/sass/selector/sequence.rb
@@ -212,10 +212,13 @@ module Sass
return unless sseq2.size > 1
# .foo ~ .bar is a superselector of .foo + .bar
return unless sseq1[1] == "~" ? sseq2[1] != ">" : sseq2[1] == sseq1[1]
- return sseq1.first.superselector?(sseq2.first) &&
- subweave_superselector?(sseq1[2..-1], sseq2[2..-1])
+ return unless sseq1.first.superselector?(sseq2.first)
+ return true if sseq1.size == 2
+ return false if sseq2.size == 2
+ return subweave_superselector?(sseq1[2..-1], sseq2[2..-1])
elsif sseq2.size > 1
return true if sseq2[1] == ">" && sseq1.first.superselector?(sseq2.first)
+ return false if sseq2.size == 2
return subweave_superselector?(sseq1, sseq2[2..-1])
else
sseq1.first.superselector?(sseq2.first)
|
[Sass] Handle sseqs with nothing after the combinator.
|
sass_ruby-sass
|
train
|
cfb0072e67d1d342f6fe72a668f9cc9a355a4868
|
diff --git a/examples/form-with-rethinkdb/idp/main.go b/examples/form-with-rethinkdb/idp/main.go
index <HASH>..<HASH> 100644
--- a/examples/form-with-rethinkdb/idp/main.go
+++ b/examples/form-with-rethinkdb/idp/main.go
@@ -7,8 +7,7 @@ import (
"os"
"time"
- // "github.com/boj/rethinkstore"
- "github.com/gorilla/sessions"
+ "github.com/boj/rethinkstore"
"github.com/janekolszak/idp/core"
"github.com/janekolszak/idp/helpers"
"github.com/janekolszak/idp/providers/cookie"
@@ -124,16 +123,12 @@ func main() {
MaxAge: time.Minute * 1,
}
- // challengeCookieStore := sessions.NewFilesystemStore("", []byte("something-very-secret"))
- challengeCookieStore := sessions.NewCookieStore([]byte("something-very-secret"))
-
- // TODO: Uncomment when rethinkstore is fixed
- // challengeCookieStore, err := rethinkstore.NewRethinkStore(os.Getenv("DATABASE_URL"), os.Getenv("DATABASE_NAME"), "challenges", 5, 5, []byte("something-very-secret"))
- // if err != nil {
- // panic(err)
- // }
- // defer challengeCookieStore.Close()
- // challengeCookieStore.MaxAge(60 * 5) // 5 min
+ challengeCookieStore, err := rethinkstore.NewRethinkStore(os.Getenv("DATABASE_URL"), os.Getenv("DATABASE_NAME"), "challenges", 5, 5, []byte("something-very-secret"))
+ if err != nil {
+ panic(err)
+ }
+ defer challengeCookieStore.Close()
+ challengeCookieStore.MaxAge(60 * 5) // 5 min
idp := core.NewIDP(&core.IDPConfig{
ClusterURL: *hydraURL,
|
Using rethinkdb as a userdb in provider
|
janekolszak_idp
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.