hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
c0bb9519cb5a0eb39160a5e4670454be30c86247
|
diff --git a/addon/components/bs-datetimepicker.js b/addon/components/bs-datetimepicker.js
index <HASH>..<HASH> 100644
--- a/addon/components/bs-datetimepicker.js
+++ b/addon/components/bs-datetimepicker.js
@@ -84,7 +84,7 @@ export default Component.extend({
});
this.addObserver('locale', function() {
- this.$().data('DateTimePicker').minDate(this.get('locale'));
+ this.$().data('DateTimePicker').locale(this.get('locale'));
});
},
|
Call the appropriate method to change the locale
|
btecu_ember-cli-bootstrap-datetimepicker
|
train
|
d48e93736e5336870d56c802f2e7ae89b98006f4
|
diff --git a/spaceship/lib/spaceship/tunes/tunes_client.rb b/spaceship/lib/spaceship/tunes/tunes_client.rb
index <HASH>..<HASH> 100644
--- a/spaceship/lib/spaceship/tunes/tunes_client.rb
+++ b/spaceship/lib/spaceship/tunes/tunes_client.rb
@@ -111,6 +111,30 @@ module Spaceship
send_shared_login_request(user, password)
end
+ # Sometimes we get errors or info nested in our data
+ # This method allows you to pass in a set of keys to check for
+ # along with the name of the sub_section of your original data
+ # where we should check
+ # Returns a mapping of keys to data array if we find anything, otherwise, empty map
+ def fetch_errors_in_data(data_section: nil, sub_section_name: nil, keys: nil)
+ if data_section && sub_section_name
+ sub_section = data_section[sub_section_name]
+ else
+ sub_section = data_section
+ end
+
+ unless sub_section
+ return {}
+ end
+
+ error_map = {}
+ keys.each do |key|
+ errors = sub_section.fetch(key, [])
+ error_map[key] = errors if errors.count > 0
+ end
+ return error_map
+ end
+
# rubocop:disable Metrics/PerceivedComplexity
# If the response is coming from a flaky api, set flaky_api_call to true so we retry a little.
# Patience is a virtue.
@@ -119,12 +143,17 @@ module Spaceship
return unless raw.kind_of? Hash
data = raw['data'] || raw # sometimes it's with data, sometimes it isn't
+ error_keys_to_check = [
+ "sectionErrorKeys",
+ "sectionInfoKeys",
+ "sectionWarningKeys",
+ "validationErrors"
+ ]
+ errors_in_data = fetch_errors_in_data(data_section: data, keys: error_keys_to_check)
+ errors_in_version_info = fetch_errors_in_data(data_section: data, sub_section_name: "versionInfo", keys: error_keys_to_check)
- if data.fetch('sectionErrorKeys', []).count == 0 and
- data.fetch('sectionInfoKeys', []).count == 0 and
- data.fetch('sectionWarningKeys', []).count == 0 and
- data.fetch('validationErrors', []).count == 0
-
+ # If we have any errors or "info" we need to treat them as warnings or errors
+ if errors_in_data.count == 0 && errors_in_version_info.count == 0
logger.debug("Request was successful")
end
@@ -154,8 +183,15 @@ module Spaceship
end
errors = handle_response_hash.call(data)
- errors += data.fetch('sectionErrorKeys', [])
- errors += data.fetch('validationErrors', [])
+
+ # Search at data level, as well as "versionInfo" level for errors
+ error_keys = ["sectionErrorKeys", "validationErrors"]
+ errors_in_data = fetch_errors_in_data(data_section: data, keys: error_keys)
+ errors_in_version_info = fetch_errors_in_data(data_section: data, sub_section_name: "versionInfo", keys: error_keys)
+
+ errors += errors_in_data.values if errors_in_data.values
+ errors += errors_in_version_info.values if errors_in_version_info.values
+ errors = errors.flat_map { |value| value }
# Sometimes there is a different kind of error in the JSON response
# e.g. {"warn"=>nil, "error"=>["operation_failed"], "info"=>nil}
@@ -177,8 +213,18 @@ module Spaceship
end
end
- puts data['sectionInfoKeys'] if data['sectionInfoKeys']
- puts data['sectionWarningKeys'] if data['sectionWarningKeys']
+ # Search at data level, as well as "versionInfo" level for info and warnings
+ info_keys = ["sectionInfoKeys", "sectionWarningKeys"]
+ info_in_data = fetch_errors_in_data(data_section: data, keys: info_keys)
+ info_in_version_info = fetch_errors_in_data(data_section: data, sub_section_name: "versionInfo", keys: info_keys)
+
+ info_in_data.each do |info_key, info_value|
+ puts(info_value)
+ end
+
+ info_in_version_info.each do |info_key, info_value|
+ puts(info_value)
+ end
return data
end
|
Find nested errors messages (#<I>)
Responses sometimes come with nest error messages
This should fix #<I>
|
fastlane_fastlane
|
train
|
6d6152d0d74319d1135e62ef426fcf473b744b90
|
diff --git a/pkg/sdn/plugin/pod_unsupported.go b/pkg/sdn/plugin/pod_unsupported.go
index <HASH>..<HASH> 100644
--- a/pkg/sdn/plugin/pod_unsupported.go
+++ b/pkg/sdn/plugin/pod_unsupported.go
@@ -10,7 +10,7 @@ import (
"github.com/openshift/origin/pkg/sdn/plugin/cniserver"
)
-func (m *podManager) setup(req *cniserver.PodRequest) (*cnitypes.Result, *runningPod, error) {
+func (m *podManager) setup(req *cniserver.PodRequest) (cnitypes.Result, *runningPod, error) {
return nil, nil, fmt.Errorf("openshift-sdn is unsupported on this OS!")
}
|
Fix cross-platform compile of pod manager
|
openshift_origin
|
train
|
3aebce7dc9804c601ebd10c925ee7b06c223f467
|
diff --git a/mardao-gae/src/main/java/net/sf/mardao/core/dao/TypeDaoImpl.java b/mardao-gae/src/main/java/net/sf/mardao/core/dao/TypeDaoImpl.java
index <HASH>..<HASH> 100644
--- a/mardao-gae/src/main/java/net/sf/mardao/core/dao/TypeDaoImpl.java
+++ b/mardao-gae/src/main/java/net/sf/mardao/core/dao/TypeDaoImpl.java
@@ -702,7 +702,7 @@ public abstract class TypeDaoImpl<T, ID extends Serializable> extends
Query q = new Query(AUDIT_KIND, AUDIT_PARENT_KEY);
q.setKeysOnly();
q.addFilter(getUpdatedDateColumnName(), FilterOperator.GREATER_THAN_OR_EQUAL, since);
- q.addSort(getUpdatedDateColumnName(), SortDirection.ASCENDING);
+// q.addSort(getUpdatedDateColumnName(), SortDirection.ASCENDING);
PreparedQuery pq = datastore.prepare(q);
final String cursorKey = null != auditCursorKey ?
|
Removing sort order for whatsDeleted(since)
|
sosandstrom_mardao
|
train
|
9b04790cb33a59e37e0a6d1a1e03f8cdd72a5af5
|
diff --git a/Decorators/ExtraLazyResponseCollection.php b/Decorators/ExtraLazyResponseCollection.php
index <HASH>..<HASH> 100644
--- a/Decorators/ExtraLazyResponseCollection.php
+++ b/Decorators/ExtraLazyResponseCollection.php
@@ -26,6 +26,14 @@ final class ExtraLazyResponseCollection implements \IteratorAggregate, ResponseC
return $this->collection;
}
+ /**
+ * @return LazyResponseCollection
+ */
+ public function getInnerCollection()
+ {
+ return $this->collection;
+ }
+
/** {@inheritdoc} */
public function getResponse(RpcRequestInterface $request)
{
diff --git a/Decorators/ExtraLazyRpcClient.php b/Decorators/ExtraLazyRpcClient.php
index <HASH>..<HASH> 100644
--- a/Decorators/ExtraLazyRpcClient.php
+++ b/Decorators/ExtraLazyRpcClient.php
@@ -26,7 +26,7 @@ final class ExtraLazyRpcClient implements RpcClientInterface
{
$collection = $this->client->invoke($calls);
- if (!$this->lazyCollection || $collection->isFrozen()) {
+ if (!$this->lazyCollection || $this->lazyCollection->getInnerCollection() !== $collection) {
$this->lazyCollection = new ExtraLazyResponseCollection($collection);
}
diff --git a/Tests/Decorators/ExtraLazyDecoratorTest.php b/Tests/Decorators/ExtraLazyDecoratorTest.php
index <HASH>..<HASH> 100644
--- a/Tests/Decorators/ExtraLazyDecoratorTest.php
+++ b/Tests/Decorators/ExtraLazyDecoratorTest.php
@@ -17,15 +17,19 @@ final class ExtraLazyDecoratorTest extends TestCase
/** @var RpcMockClient */
private $client;
+ /** @var ExtraLazyRpcClient */
+ private $extraLazyRpcClient;
+
public function setUp()
{
- $this->client = new RpcMockClient();
+ $this->client = new RpcMockClient();
+ $this->extraLazyRpcClient = new ExtraLazyRpcClient($this->client);
}
public function tearDown()
{
- self::assertCount(0, $this->client);
- $this->client = null;
+ $this->client = null;
+ $this->extraLazyRpcClient = null;
}
/**
@@ -46,19 +50,13 @@ final class ExtraLazyDecoratorTest extends TestCase
/** @var RpcResponseInterface[] $responses */
$responses = [$rs1, $rs2, $rs3];
- $client = $this->client;
- $client->push($rs1);
- $client->push($rs2);
- $client->push($rs3);
-
- $lazyClient = new ExtraLazyRpcClient($client);
+ $this->client->push($rs1);
+ $this->client->push($rs2);
+ $this->client->push($rs3);
- $c1 = $lazyClient->invoke($rq1);
- self::assertCount(3, $client);
- $c2 = $lazyClient->invoke($rq2);
- self::assertCount(3, $client);
- $c3 = $lazyClient->invoke($rq3);
- self::assertCount(3, $client);
+ $c1 = $this->extraLazyRpcClient->invoke($rq1);
+ $c2 = $this->extraLazyRpcClient->invoke($rq2);
+ $c3 = $this->extraLazyRpcClient->invoke($rq3);
self::assertEquals($c1, $c2);
self::assertEquals($c1, $c3);
@@ -81,6 +79,8 @@ final class ExtraLazyDecoratorTest extends TestCase
self::assertEquals($c1->getResponse($rs)->getError(), $responses[$id]->getError());
self::assertEquals($c1->getResponse($rs)->getBody(), $responses[$id]->getBody());
}
+
+ self::assertCount(0, $this->client);
}
public function testCollectionIteratorInvokesProxy()
@@ -97,5 +97,24 @@ final class ExtraLazyDecoratorTest extends TestCase
self::assertEquals($response->getError(), $responses[$id]->getError());
self::assertEquals($response->getBody(), $responses[$id]->getBody());
}
+
+ self::assertCount(0, $this->client);
+ }
+
+ public function testSameCollections()
+ {
+ list($requests, $responses, $c1) = $this->getCollection();
+ list($requests, $responses, $c2) = $this->getCollection();
+
+ self::assertSame($c1, $c2);
+ }
+
+ public function testDifferentCollections()
+ {
+ list($requests, $responses, $c1) = $this->getCollection();
+ $current = (new \IteratorIterator($c1))->current();
+ list($requests, $responses, $c2) = $this->getCollection();
+
+ self::assertNotSame($c1, $c2);
}
}
|
Fix inner collection matching [scrutinizer] (#8)
|
scaytrase_rpc-common
|
train
|
a0bc5f0e7885a3be6739cf794616bbaa988179c8
|
diff --git a/js/cbrowser.js b/js/cbrowser.js
index <HASH>..<HASH> 100644
--- a/js/cbrowser.js
+++ b/js/cbrowser.js
@@ -133,7 +133,7 @@ Browser.prototype.realInit = function() {
thisB.resizeViewer();
}, false);
- this.ruler = makeElement('div', null, null, {width: '1px', height: '2000px', backgroundColor: 'blue', position: 'absolute', zIndex: '10000', left: '' + ((this.featurePanelWidth/2)|0) + 'px', top: '0px'});
+ this.ruler = makeElement('div', null, null, {width: '1px', height: '2000px', backgroundColor: 'blue', position: 'absolute', zIndex: '900', left: '' + ((this.featurePanelWidth/2)|0) + 'px', top: '0px'});
this.tierHolder.appendChild(this.ruler);
// Dimension stuff
diff --git a/js/version.js b/js/version.js
index <HASH>..<HASH> 100644
--- a/js/version.js
+++ b/js/version.js
@@ -12,7 +12,7 @@ var VERSION = {
MAJOR: 0,
MINOR: 8,
MICRO: 0,
- PATCH: 'pre1',
+ PATCH: 'pre2',
BRANCH: ''
}
|
Fix ruler z-index to play nicer with Bootstrap.
|
dasmoth_dalliance
|
train
|
f9f1ad2e0cc61fb5d5040d5259247cd98f7dcaad
|
diff --git a/vault/core.go b/vault/core.go
index <HASH>..<HASH> 100644
--- a/vault/core.go
+++ b/vault/core.go
@@ -672,14 +672,29 @@ func (c *Core) handleLoginRequest(req *logical.Request) (*logical.Response, *log
TTL: auth.TTL,
}
- if !strListSubset(te.Policies, []string{"root"}) {
- // Append 'default' policy to the token being created
- te.Policies = append(te.Policies, "default")
- sort.Strings(te.Policies)
+ if strListSubset(te.Policies, []string{"root"}) {
+ te.Policies = []string{"root"}
+ } else {
+ // Use a map to filter out/prevent duplicates
+ policyMap := map[string]bool{}
+ for _, policy := range te.Policies {
+ if policy == "" {
+ // Don't allow a policy with no name, even though it is a valid
+ // slice member
+ continue
+ }
+ policyMap[policy] = true
+ }
+
+ // Add the default policy
+ policyMap["default"] = true
- // Update the response with the policies associated with token
- auth.Policies = append(auth.Policies, "default")
- sort.Strings(auth.Policies)
+ te.Policies = []string{}
+ for k, _ := range policyMap {
+ te.Policies = append(te.Policies, k)
+ }
+
+ sort.Strings(te.Policies)
}
if err := c.tokenStore.create(&te); err != nil {
@@ -690,6 +705,7 @@ func (c *Core) handleLoginRequest(req *logical.Request) (*logical.Response, *log
// Populate the client token and accessor
auth.ClientToken = te.ID
auth.Accessor = te.Accessor
+ auth.Policies = te.Policies
// Register with the expiration manager
if err := c.expiration.RegisterAuth(req.Path, auth); err != nil {
|
Sort and filter policies going into the create token entry, then use
that as the definitive source for the response Auth object.
|
hashicorp_vault
|
train
|
2f0fe2170d5304979f53163406111a2c83a595a7
|
diff --git a/tests/jtreminio/Zimple/Tests/ZimpleTest.php b/tests/jtreminio/Zimple/Tests/ZimpleTest.php
index <HASH>..<HASH> 100755
--- a/tests/jtreminio/Zimple/Tests/ZimpleTest.php
+++ b/tests/jtreminio/Zimple/Tests/ZimpleTest.php
@@ -4,7 +4,7 @@ namespace jtreminio\Zimple\Tests;
use jtreminio\Zimple\Zimple as Container;
-class ContainerTest extends \PHPUnit_Framework_TestCase
+class ZimpleTest extends \PHPUnit_Framework_TestCase
{
/**
* Override service container
|
Renamed class to ZimpleTest to match new name
|
jtreminio_Zimple
|
train
|
d90530b7abba4713b689852e4671dd7f017b685c
|
diff --git a/src/labels/collision.js b/src/labels/collision.js
index <HASH>..<HASH> 100644
--- a/src/labels/collision.js
+++ b/src/labels/collision.js
@@ -1,3 +1,4 @@
+import Label from './label';
import RepeatGroup from './repeat_group';
import log from '../utils/log';
@@ -162,7 +163,7 @@ export default Collision = {
if (repeat) {
RepeatGroup.add(label, label.layout, tile);
}
- label.add(this.tiles[tile].bboxes);
+ Label.add(label, this.tiles[tile].bboxes);
}
};
diff --git a/src/labels/label.js b/src/labels/label.js
index <HASH>..<HASH> 100644
--- a/src/labels/label.js
+++ b/src/labels/label.js
@@ -69,13 +69,6 @@ export default class Label {
return intersect;
}
- // Add this label's bounding box to the provided set
- add (bboxes) {
- this.placed = true;
- bboxes.aabb.push(this.aabb);
- bboxes.obb.push(this.obb);
- }
-
// checks whether the label is within the tile boundaries
inTileBounds () {
let min = [ this.aabb[0], this.aabb[1] ];
@@ -98,6 +91,24 @@ export default class Label {
}
}
+// Generic label placement function, adds a label's bounding boxes to the currently placed set
+// Supports single or multiple collision boxes
+Label.add = function (label, bboxes) {
+ label.placed = true;
+
+ if (label.aabb) {
+ bboxes.aabb.push(label.aabb);
+ bboxes.obb.push(label.obb);
+ }
+
+ if (label.aabbs) {
+ for (let i = 0; i < label.aabbs.length; i++) {
+ bboxes.aabb.push(label.aabbs[i]);
+ bboxes.obb.push(label.obbs[i]);
+ }
+ }
+};
+
Label.id = 0;
Label.id_prefix = ''; // id prefix scoped to worker thread
diff --git a/src/labels/label_line.js b/src/labels/label_line.js
index <HASH>..<HASH> 100644
--- a/src/labels/label_line.js
+++ b/src/labels/label_line.js
@@ -154,17 +154,6 @@ class LabelLineBase {
return [longest_line, flip];
}
- // Add each bounding box to the collision pass
- add(bboxes) {
- this.placed = true;
- for (let i = 0; i < this.aabbs.length; i++) {
- let aabb = this.aabbs[i];
- let obb = this.obbs[i];
- let obj = { aabb, obb };
- Label.prototype.add.call(obj, bboxes);
- }
- }
-
// Checks each segment to see if it should be discarded (via collision). If any segment fails this test, they all fail.
discard(bboxes, exclude = null) {
if (this.throw_away) {
|
replace class-specific label place function with generic one
|
tangrams_tangram
|
train
|
31b2755274d2f6f138b40382e6f29c4b38196cc5
|
diff --git a/src/Mongolid/Serializer/Type/UTCDateTime.php b/src/Mongolid/Serializer/Type/UTCDateTime.php
index <HASH>..<HASH> 100644
--- a/src/Mongolid/Serializer/Type/UTCDateTime.php
+++ b/src/Mongolid/Serializer/Type/UTCDateTime.php
@@ -11,9 +11,9 @@ use Mongolid\Serializer\SerializableTypeInterface;
class UTCDateTime implements SerializableTypeInterface
{
/**
- * @var string
+ * @var MongoUTCDateTime
*/
- protected $date;
+ protected $mongoDate;
/**
* Constructor
@@ -22,7 +22,7 @@ class UTCDateTime implements SerializableTypeInterface
*/
public function __construct(MongoUTCDateTime $mongoDate)
{
- $this->date = $mongoDate->toDateTime()->format('Y-m-d H:i:s');
+ $this->mongoDate = $mongoDate;
}
/**
@@ -32,7 +32,7 @@ class UTCDateTime implements SerializableTypeInterface
*/
public function serialize()
{
- return serialize($this->date);
+ return serialize($this->getFormattedDate());
}
/**
@@ -44,7 +44,11 @@ class UTCDateTime implements SerializableTypeInterface
*/
public function unserialize($data)
{
- $this->date = unserialize($data);
+ $date = DateTime::createFromFormat(
+ 'Y-m-d H:i:s',
+ unserialize($data)
+ );
+ $this->mongoDate = new MongoUTCDateTime($date->getTimestamp()*1000);
}
/**
@@ -54,8 +58,16 @@ class UTCDateTime implements SerializableTypeInterface
*/
public function convert()
{
- $date = DateTime::createFromFormat('Y-m-d H:i:s', $this->date);
+ return $this->mongoDate;
+ }
- return new MongoUTCDateTime($date->getTimestamp()*1000);
+ /**
+ * Retrieves formated date string
+ *
+ * @return string
+ */
+ protected function getFormattedDate()
+ {
+ return $this->mongoDate->toDateTime()->format('Y-m-d H:i:s');
}
}
diff --git a/tests/Mongolid/Serializer/Type/UTCDatetimeTest.php b/tests/Mongolid/Serializer/Type/UTCDatetimeTest.php
index <HASH>..<HASH> 100644
--- a/tests/Mongolid/Serializer/Type/UTCDatetimeTest.php
+++ b/tests/Mongolid/Serializer/Type/UTCDatetimeTest.php
@@ -49,11 +49,11 @@ class UTCDateTimeTest extends TestCase
);
}
- public function testConstructorShouldCastMongodbUtcDateTimeToString()
+ public function testConstructorShouldSetMongoDate()
{
$this->assertAttributeEquals(
- $this->formatedDate,
- 'date',
+ $this->mongoDate,
+ 'mongoDate',
new UTCDateTime($this->mongoDate)
);
}
@@ -62,7 +62,7 @@ class UTCDateTimeTest extends TestCase
{
$date = unserialize(serialize(new UTCDateTime($this->mongoDate)));
- $this->assertAttributeEquals($this->formatedDate, 'date', $date);
+ $this->assertAttributeEquals($this->mongoDate, 'mongoDate', $date);
}
public function testConvertShouldRetrieveMongodbUtcDateTime()
|
Minor change in UTCDateTime in order to access UTCDateTime from driver
|
leroy-merlin-br_mongolid
|
train
|
6996d289985aebd17d998f8c655cd0274a6834a5
|
diff --git a/lib/sendyr/client.rb b/lib/sendyr/client.rb
index <HASH>..<HASH> 100644
--- a/lib/sendyr/client.rb
+++ b/lib/sendyr/client.rb
@@ -40,6 +40,22 @@ module Sendyr
respond_with_failure(result)
end
end
+
+ def delete(opts = {})
+ return noop if @noop
+
+ opts = {boolean: true, list: @list_id}.merge(opts)
+ raise_if_missing_arg([:email, :list], opts)
+
+ path = '/api/subscribers/delete.php'
+ result = post_to(path, opts)
+
+ if result.success? && %w(true 1).include?(clean_body(result))
+ respond_with_success(result)
+ else
+ respond_with_failure(result)
+ end
+ end
def subscription_status(opts = {})
return noop if @noop
|
Add delete endpoint
Rather than unsubscribing a user, delete the user from a list instead
|
cmer_sendyr
|
train
|
130f6075ae018505ab3a70b14856ebba3186bbc6
|
diff --git a/Gemfile b/Gemfile
index <HASH>..<HASH> 100644
--- a/Gemfile
+++ b/Gemfile
@@ -4,8 +4,8 @@ source 'https://rubygems.org'
gemspec
gem 'rake'
-group :test do
+group :development, :test do
gem 'rspec'
- gem 'mocha', :require => 'mocha/api'
- gem 'simplecov', :require => false
+ gem 'mocha', require: 'mocha/api'
+ gem 'simplecov', require: false
end
\ No newline at end of file
diff --git a/spec/artist_spec.rb b/spec/artist_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/artist_spec.rb
+++ b/spec/artist_spec.rb
@@ -137,9 +137,7 @@ describe Echonest::Artist do
end # /images
- describe '#list_genres' do
- use_vcr_cassette 'list_genres'
-
+ describe '#list_genres', vcr: {cassette_name: 'list_genres'} do
it 'should return an array of acceptable genres' do
create_valid_artist
@a.list_genres.should be_a Array
@@ -153,9 +151,7 @@ describe Echonest::Artist do
end
end
- describe '#search' do
- use_vcr_cassette 'search'
-
+ describe '#search', vcr: {cassette_name: 'search'} do
it 'should return an Array of artists' do
create_valid_artist
@a.search.should be_a Array
@@ -175,8 +171,7 @@ describe Echonest::Artist do
end
end
- context 'with bucket' do
- use_vcr_cassette 'search_2'
+ context 'with bucket', vcr: {cassette_name: 'search_2'} do
it 'should search the specified bucket' do
create_valid_artist
results = @a.search(bucket: "id:musicbrainz")
@@ -186,9 +181,7 @@ describe Echonest::Artist do
end
end # /search
- describe '#top_hottt' do
- use_vcr_cassette('top_hottt')
-
+ describe '#top_hottt', vcr: {cassette_name: 'top_hottt'} do
it 'should return an Array of artists' do
create_valid_artist
@a.top_hottt.should be_a Array
@@ -217,9 +210,7 @@ describe Echonest::Artist do
end # /top_hottt
- describe '#songs' do
- use_vcr_cassette 'songs'
-
+ describe '#songs', vcr: {cassette_name: 'songs'} do
it 'should return an Array of a Hash of songs' do
create_valid_artist
@a.songs.should be_a Array
@@ -244,9 +235,7 @@ describe Echonest::Artist do
end
end
- describe '#terms' do
- use_vcr_cassette 'terms'
-
+ describe '#terms', vcr: {cassette_name: 'terms'} do
it 'should return an array of hashes of terms' do
create_valid_artist_with_id
@a.terms.should be_a Array
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -7,15 +7,16 @@ require 'echonest-ruby-api'
require 'vcr'
require 'webmock/rspec'
require 'coveralls'
+#require 'mocha/api'
Coveralls.wear!
RSpec.configure do |config|
config.mock_with :mocha
- config.extend VCR::RSpec::Macros
end
VCR.configure do |c|
+ c.configure_rspec_metadata!
c.cassette_library_dir = 'fixtures/vcr_cassettes'
c.hook_into :webmock
# TODO: Get rid of this when possible!
|
Tweak spec to work with new format VCR options
|
maxehmookau_echonest-ruby-api
|
train
|
dd665904420a4a90bebff67fe2993b059a9903d0
|
diff --git a/js/bitmart.js b/js/bitmart.js
index <HASH>..<HASH> 100644
--- a/js/bitmart.js
+++ b/js/bitmart.js
@@ -691,7 +691,6 @@ module.exports = class bitmart extends Exchange {
if (percentage !== undefined) {
percentage *= 100;
}
- // bitmart base/quote reversed
const baseVolume = this.safeFloat2 (ticker, 'base_volume_24h', 'base_coin_volume');
const quoteVolume = this.safeFloat2 (ticker, 'quote_volume_24h', 'quote_coin_volume');
let vwap = undefined;
|
bitmart comment/minor edit
|
ccxt_ccxt
|
train
|
87143b78e4127025fb2c9307a7448918b159d83d
|
diff --git a/container/endpoint.go b/container/endpoint.go
index <HASH>..<HASH> 100644
--- a/container/endpoint.go
+++ b/container/endpoint.go
@@ -469,10 +469,10 @@ func (c *Controller) processTenantEndpoint(conn coordclient.Connection, parentPa
// setProxyAddresses tells the proxies to update with addresses
func (c *Controller) setProxyAddresses(tenantEndpointID string, endpoints []dao.ApplicationEndpoint, importVirtualAddress, purpose string) {
- glog.Infof("starting setProxyAddresses(tenantEndpointID: %s, purpose: %s)", tenantEndpointID, purpose)
+ glog.V(1).Info("starting setProxyAddresses(tenantEndpointID: %s, purpose: %s)", tenantEndpointID, purpose)
proxiesLock.Lock()
defer proxiesLock.Unlock()
- glog.Infof("starting setProxyAddresses(tenantEndpointID: %s) locked", tenantEndpointID)
+ glog.V(1).Infof("starting setProxyAddresses(tenantEndpointID: %s) locked", tenantEndpointID)
if len(endpoints) <= 0 {
if prxy, ok := proxies[tenantEndpointID]; ok {
diff --git a/web/vhost.go b/web/vhost.go
index <HASH>..<HASH> 100644
--- a/web/vhost.go
+++ b/web/vhost.go
@@ -217,7 +217,7 @@ func (sc *ServiceConfig) processVhost(vhostID string) registry.ProcessChildrenFu
glog.Errorf("processVhost - Error getting vhost for %v/%v: %v", parentPath, child, err)
continue
}
- glog.Infof("Processing vhost %s/%s: %#v", parentPath, child, vhEndpoint)
+ glog.V(1).Infof("Processing vhost %s/%s: %#v", parentPath, child, vhEndpoint)
vepInfo := createvhostEndpointInfo(vhEndpoint)
vhostEndpoints.endpoints = append(vhostEndpoints.endpoints, vepInfo)
}
diff --git a/zzk/registry/registry.go b/zzk/registry/registry.go
index <HASH>..<HASH> 100644
--- a/zzk/registry/registry.go
+++ b/zzk/registry/registry.go
@@ -235,7 +235,7 @@ func watch(conn client.Connection, path string, cancel <-chan bool, processChild
return client.ErrNoNode
}
for {
- glog.V(0).Infof("watching children at path: %s", path)
+ glog.V(1).Infof("watching children at path: %s", path)
nodeIDs, event, err := conn.ChildrenW(path)
glog.V(1).Infof("child watch for path %s returned: %#v", path, nodeIDs)
if err != nil {
@@ -244,9 +244,9 @@ func watch(conn client.Connection, path string, cancel <-chan bool, processChild
return err
}
processChildren(conn, path, nodeIDs...)
- //This blocks until a change happens under the key
select {
- case <-time.After(time.Second * 10):
+ // timeout in case we missed a zookeeper event
+ case <-time.After(time.Second * 60):
case ev := <-event:
glog.V(1).Infof("watch event %+v at path: %s", ev, path)
case <-cancel:
diff --git a/zzk/registry/vhostregistry.go b/zzk/registry/vhostregistry.go
index <HASH>..<HASH> 100644
--- a/zzk/registry/vhostregistry.go
+++ b/zzk/registry/vhostregistry.go
@@ -67,10 +67,7 @@ type VhostRegistry struct {
// VHostRegistry ensures the vhost registry and returns the VhostRegistry type
func VHostRegistry(conn client.Connection) (*VhostRegistry, error) {
- glog.Infof("getting vhostPath()")
path := vhostPath()
- glog.Infof("got path: %s", path)
- glog.Infof("checking path exists: %s", path)
timeout := time.After(time.Second * 60)
var err error
|
increased timeout, decreased logging priority of chatty items
|
control-center_serviced
|
train
|
da1e0aafea63e1d70a7f4f2bd1d20d37c7392517
|
diff --git a/grimoire_elk/elk/enrich.py b/grimoire_elk/elk/enrich.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/elk/enrich.py
+++ b/grimoire_elk/elk/enrich.py
@@ -735,16 +735,16 @@ class Enrich(ElasticItems):
name=iden['name'], username=iden['username'])
sh_ids['uuid'] = u.uuid
except WrappedValueError:
- logger.error("None Identity found")
- logger.error(identity)
+ logger.warning("None Identity found %s", backend_name)
+ logger.warning(identity)
except NotFoundError:
- logger.error("Identity not found in Sorting Hat")
+ logger.error("Identity not found in Sorting Hat %s", backend_name)
logger.error(identity)
except UnicodeEncodeError:
- logger.error("UnicodeEncodeError")
+ logger.error("UnicodeEncodeError %s", backend_name)
logger.error(identity)
except Exception as ex:
- logger.error("Unknown error adding sorting hat identity %s", ex)
+ logger.error("Unknown error adding sorting hat identity %s %s", ex, backend_name)
logger.error(identity)
logger.error(ex)
|
[enrich][sortinghat] Improve logs when error appears during SH activity in arthur.py.
Add the data source in which the error appears to debug more easily later using
the mordred logs.
|
chaoss_grimoirelab-elk
|
train
|
c3830d1507eb51944f2a2d03d563836857544d95
|
diff --git a/tests/cancel_test.js b/tests/cancel_test.js
index <HASH>..<HASH> 100644
--- a/tests/cancel_test.js
+++ b/tests/cancel_test.js
@@ -161,3 +161,35 @@ test("cancel during flush", function() {
ok(!functionWasCalled, "function was not called");
});
+
+test("with GUID_KEY", function() {
+ expect(3);
+
+ var obj = {
+ ___FOO___: 1
+ };
+
+ var bb = new Backburner(['action'], {
+ GUID_KEY: '___FOO___'
+ });
+
+ var wasCalled = 0;
+
+ function fn () {
+ wasCalled++;
+ }
+
+ bb.run(function() {
+ var timer = bb.scheduleOnce('action', obj, fn);
+
+ equal(wasCalled, 0);
+
+ bb.cancel(timer);
+
+ bb.scheduleOnce('action', obj, fn);
+
+ equal(wasCalled, 0);
+ });
+ equal(wasCalled, 1);
+
+});
diff --git a/tests/defer_once_test.js b/tests/defer_once_test.js
index <HASH>..<HASH> 100644
--- a/tests/defer_once_test.js
+++ b/tests/defer_once_test.js
@@ -219,35 +219,3 @@ test("onError", function() {
});
});
});
-
-test("cancel", function() {
- expect(3);
-
- var obj = {
- ___FOO___: 1
- };
-
- var bb = new Backburner(['action'], {
- GUID_KEY: '___FOO___'
- });
-
- var wasCalled = 0;
-
- function fn () {
- wasCalled++;
- }
-
- bb.run(function() {
- var timer = bb.scheduleOnce('action', obj, fn);
-
- equal(wasCalled, 0);
-
- bb.cancel(timer);
-
- bb.scheduleOnce('action', obj, fn);
-
- equal(wasCalled, 0);
- });
- equal(wasCalled, 1);
-
-});
|
move cancel test to the cancel test file.
|
BackburnerJS_backburner.js
|
train
|
206775936042f1f8150ad30f2816d8a10f65b025
|
diff --git a/packages/dev/config/babel.js b/packages/dev/config/babel.js
index <HASH>..<HASH> 100644
--- a/packages/dev/config/babel.js
+++ b/packages/dev/config/babel.js
@@ -22,7 +22,7 @@ module.exports = {
['@babel/preset-env', {
modules: 'commonjs',
targets: {
- browsers: 'defaults',
+ browsers: '>0.25% and last 2 versions and not ie 11 and not OperaMini all',
node: '10'
}
}],
|
Browserslist and queries (#<I>)
|
polkadot-js_dev
|
train
|
b952ba9fd879fde2b6af8e3e949912116960e561
|
diff --git a/app/assets/javascripts/component_guide/application.js b/app/assets/javascripts/component_guide/application.js
index <HASH>..<HASH> 100644
--- a/app/assets/javascripts/component_guide/application.js
+++ b/app/assets/javascripts/component_guide/application.js
@@ -1,3 +1,8 @@
+//= require jquery/dist/jquery
//= require govuk/modules
//= require_tree ./vendor
//= require_tree .
+
+$(document).ready(function () {
+ GOVUK.modules.start()
+})
|
Include jQuery and start modules from component_guide JS
Without static requiring jQuery and running GOVUK.modules start there
isn't anything to start the modules.
The call to modules.start() is wrapped in a document.ready as to give
chance for modules added in later scripts to start.
|
alphagov_govuk_publishing_components
|
train
|
807eb2551ebf79b446effb7ba481c041e462b764
|
diff --git a/src/com/opera/core/systems/OperaDesktopDriver.java b/src/com/opera/core/systems/OperaDesktopDriver.java
index <HASH>..<HASH> 100644
--- a/src/com/opera/core/systems/OperaDesktopDriver.java
+++ b/src/com/opera/core/systems/OperaDesktopDriver.java
@@ -43,12 +43,22 @@ public class OperaDesktopDriver extends OperaDriver {
if (!opera_path.isEmpty()) {
this.settings.setOperaBinaryLocation(opera_path);
- // Now create the OperaLauncherRunner that we have the binary path
- this.operaRunner = new OperaLauncherRunner(this.settings);
-
+ // OBS: Have to quit opera before creating the LauncherRunner, and sleep,
+ // else the restart doesn't work on linux, even with the external launcher
+
// Quit Opera and shutdown the services
this.services.quit();
+
+ try {
+ Thread.sleep(6000);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+
+ // Now create the OperaLauncherRunner that we have the binary path
+ this.operaRunner = new OperaLauncherRunner(this.settings);
+
// Work around stop and restart Opera so the Launcher has control of it now
// Initialising the services will start Opera if the OperaLauncherRunner is
// setup correctly
|
We have to quit opera first (using action) before creating the launcher
|
operasoftware_operaprestodriver
|
train
|
bcd9125c09243bbaf682636a3c4eb0c03ba3ebec
|
diff --git a/tests/thumbnail_tests/tests.py b/tests/thumbnail_tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/thumbnail_tests/tests.py
+++ b/tests/thumbnail_tests/tests.py
@@ -8,6 +8,7 @@ from PIL import Image
from django.core.files.storage import default_storage
from django.template.loader import render_to_string
from django.test.client import Client
+from django.test import TestCase
from os.path import join as pjoin
from sorl.thumbnail import default, get_thumbnail, delete
from sorl.thumbnail.conf import settings
@@ -248,7 +249,7 @@ class SimpleTestCase(SimpleTestCaseBase):
p2 = Popen(['grep', '-c', 'Quality: 50'], stdin=p1.stdout, stdout=PIPE)
p1.stdout.close()
output = p2.communicate()[0].strip()
- self.assertEqual(output, '1')
+ self.assertEqual(str(output), '1')
def test_image_file_deserialize(self):
im = ImageFile(Item.objects.get(image='500x500.jpg').image)
@@ -353,7 +354,7 @@ class TemplateTestCaseA(SimpleTestCaseBase):
path = pjoin(settings.MEDIA_ROOT, th.name)
p = Popen(['identify', '-verbose', path], stdout=PIPE)
p.wait()
- m = re.search('Interlace: JPEG', p.stdout.read())
+ m = re.search('Interlace: JPEG', str(p.stdout.read()))
self.assertEqual(bool(m), True)
def test_nonprogressive(self):
@@ -415,29 +416,17 @@ class TemplateTestCaseB(unittest.TestCase):
self.assertEqual(val, '<p>empty</p>')
-class TemplateTestCaseClient(unittest.TestCase):
- def setUp(self):
- self.org_settings = {}
- params = {
- 'THUMBNAIL_DEBUG': False,
- }
- for k, v in params.iteritems():
- self.org_settings[k] = getattr(settings, k)
- setattr(settings, k, v)
-
+class TemplateTestCaseClient(TestCase):
@skip("mailsending not working")
def testEmptyError(self):
- client = Client()
- response = client.get('/thumbnail9.html')
- self.assertEqual(response.content.strip(), '<p>empty</p>')
- from django.core.mail import outbox
- self.assertEqual(outbox[0].subject, '[sorl-thumbnail] ERROR: /thumbnail9.html')
- end = outbox[0].body.split('\n\n')[-2][-20:-1]
- self.assertEqual(end, 'tests/media/invalid')
-
- def tearDown(self):
- for k, v in self.org_settings.iteritems():
- setattr(settings, k, v)
+ with self.settings(THUMBNAIL_DEBUG=False):
+ client = Client()
+ response = client.get('/thumbnail9.html')
+ self.assertEqual(response.content.strip(), '<p>empty</p>')
+ from django.core.mail import outbox
+ self.assertEqual(outbox[0].subject, '[sorl-thumbnail] ERROR: /thumbnail9.html')
+ end = outbox[0].body.split('\n\n')[-2][-20:-1]
+ self.assertEqual(end, 'tests/media/invalid')
class CropTestCase(unittest.TestCase):
|
make tests more python 3 compatible
|
jazzband_sorl-thumbnail
|
train
|
2ade831e1630800dcd271ee654026219bddba4c8
|
diff --git a/testdata/blank-import-lib.go b/testdata/blank-import-lib.go
index <HASH>..<HASH> 100644
--- a/testdata/blank-import-lib.go
+++ b/testdata/blank-import-lib.go
@@ -10,6 +10,7 @@ package foo
import (
"fmt"
+
/* MATCH /blank import/ */ _ "os"
/* MATCH /blank import/ */ _ "net/http"
|
Add a blank line in a test so that gofmt won't break the magic comment's location.
|
golang_lint
|
train
|
415c5a52be6063c52484d80d2ae5795226829334
|
diff --git a/bcbio/variation/bedutils.py b/bcbio/variation/bedutils.py
index <HASH>..<HASH> 100644
--- a/bcbio/variation/bedutils.py
+++ b/bcbio/variation/bedutils.py
@@ -67,7 +67,7 @@ def remove_bad(line):
"""Remove non-increasing BED lines which will cause variant callers to choke.
"""
parts = line.strip().split("\t")
- if int(parts[2]) > int(parts[1]):
+ if line.strip() and len(parts) > 2 and int(parts[2]) > int(parts[1]):
return line
else:
return None
|
Remove empty lines in BED files when pre-processing
Avoids errors looking for incorrect BED files. Fixes #<I>
|
bcbio_bcbio-nextgen
|
train
|
5d66a49310c51b4c63fd79b8217bba8983afdefb
|
diff --git a/builder/lib/builder/xmlbase.rb b/builder/lib/builder/xmlbase.rb
index <HASH>..<HASH> 100644
--- a/builder/lib/builder/xmlbase.rb
+++ b/builder/lib/builder/xmlbase.rb
@@ -77,7 +77,7 @@ module Builder
# Append text to the output target. Escape any markup. May be
# used within the markup brakets as:
#
- # builder.p { br; text! "HI" } #=> <p><br/>HI</p>
+ # builder.p { |b| b.br; b.text! "HI" } #=> <p><br/>HI</p>
def text!(text)
_text(_escape(text))
end
@@ -93,7 +93,7 @@ module Builder
#
# It is also useful for stacking builder objects. Builders only
# use <tt><<</tt> to append to the target, so by supporting this
- # method/operation builders can use oother builders as their
+ # method/operation builders can use other builders as their
# targets.
def <<(text)
_text(text)
|
fixed a few comments
git-svn-id: svn+ssh://rubyforge.org/var/svn/builder/trunk@<I> b<I>df<I>-ad1a-<I>-<I>b8-e<I>a<I>
|
jimweirich_builder
|
train
|
1a3422dccc3e48108bb0f386a792fe9e83299664
|
diff --git a/discord/integrations.py b/discord/integrations.py
index <HASH>..<HASH> 100644
--- a/discord/integrations.py
+++ b/discord/integrations.py
@@ -196,7 +196,7 @@ class StreamIntegration(Integration):
self.expire_behaviour: ExpireBehaviour = try_enum(ExpireBehaviour, data['expire_behavior'])
self.expire_grace_period: int = data['expire_grace_period']
self.synced_at: datetime.datetime = parse_time(data['synced_at'])
- self._role_id: int = int(data['role_id'])
+ self._role_id: Optional[int] = _get_as_snowflake(data, 'role_id')
self.syncing: bool = data['syncing']
self.enable_emoticons: bool = data['enable_emoticons']
self.subscriber_count: int = data['subscriber_count']
diff --git a/discord/types/integration.py b/discord/types/integration.py
index <HASH>..<HASH> 100644
--- a/discord/types/integration.py
+++ b/discord/types/integration.py
@@ -69,7 +69,7 @@ class BaseIntegration(PartialIntegration):
class StreamIntegration(BaseIntegration):
- role_id: Snowflake
+ role_id: Optional[Snowflake]
enable_emoticons: bool
subscriber_count: int
revoked: bool
|
Handle role_id possibly being None for StreamIntegration
|
Rapptz_discord.py
|
train
|
6796c21b92e95e8f1fcafa3ef889a2b3160d7db6
|
diff --git a/lib/radar_client_rb/resource.rb b/lib/radar_client_rb/resource.rb
index <HASH>..<HASH> 100644
--- a/lib/radar_client_rb/resource.rb
+++ b/lib/radar_client_rb/resource.rb
@@ -19,27 +19,34 @@ module Radar
def get
result = {}
- @client.redis.hgetall(@name).each do |key, value|
- user_id, client_id = key.split('.')
- message = JSON.parse(value)
- if message['online'] && is_sentry_online?(message['sentry'])
- result[user_id] ||= { :clients => {}, :userType => message['userType'] }
- result[user_id][:clients][client_id] = message['userData'] || {}
- end
+
+ clients = get_clients.select { |client| client['online'] }
+ sentries = clients.map { |client| client['sentry'] }
+ online_sentries = select_online_sentries(sentries)
+ online_clients = clients.select { |client| online_sentries.include?(client['sentry']) }
+
+ online_clients.each do |client|
+ user_id = client['userId']
+ result[user_id] ||= { :clients => {}, :userType => client['userType'] }
+ result[user_id][:clients][client['clientId']] = client['userData'] || {}
end
result
end
private
- def sentries
- @sentries ||= @client.redis.hgetall('sentry:/radar')
+ def get_clients
+ @client.redis.hgetall(@name).values.map { |value| JSON.parse(value) }
end
- def is_sentry_online?(sentry)
- return true unless sentry
+ def select_online_sentries(sentry_ids)
+ return [] unless sentry_ids && sentry_ids.any?
+ online_sentries = @client.redis.hmget('sentry:/radar', *sentry_ids.uniq)
+ .select { |x| !x.nil? }
+ .map { |data| JSON.parse(data) }
+ .select { |sentry| !message_is_expired?(sentry) }
- sentries.include?(sentry) && !message_is_expired?(JSON.parse(sentries[sentry]))
+ online_sentries.map { |sentry| sentry['name'] }
end
def message_is_expired?(message)
diff --git a/spec/client_spec.rb b/spec/client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/client_spec.rb
+++ b/spec/client_spec.rb
@@ -72,7 +72,8 @@ describe Radar::Client do
:userData => 'userData3',
:clientId => client_id3,
:online => true,
- :at => Time.now.to_i * 1000
+ :at => Time.now.to_i * 1000,
+ :sentry => sentry_id1
}
end
let(:presence4) do
@@ -167,7 +168,7 @@ describe Radar::Client do
end
it 'does not crash if the key does not exist' do
- assert_equal client.presence('inexistant').get, {}
+ assert_equal client.presence('nonexistant').get, {}
end
end
@@ -212,8 +213,8 @@ describe Radar::Client do
)
assert_equal client.message(scope).get, [[message1, 123], [message2, 124]]
end
- it 'does not crash on inexistant keys' do
- assert_equal client.message('inexistant').get, []
+ it 'does not crash on nonexistant keys' do
+ assert_equal client.message('nonexistant').get, []
end
end
end
|
Filter out clients with offline sentries
|
zendesk_radar_client_rb
|
train
|
8d3bff6800887f5051e81e3b36253604b3bca1cb
|
diff --git a/spyder/plugins/maininterpreter/tests/test_confpage.py b/spyder/plugins/maininterpreter/tests/test_confpage.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/maininterpreter/tests/test_confpage.py
+++ b/spyder/plugins/maininterpreter/tests/test_confpage.py
@@ -9,6 +9,9 @@
# Standard library imports
import time
+# Third-party imports
+import pytest
+
# Local imports
from spyder.api.plugins import Plugins
from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY
@@ -23,11 +26,15 @@ from spyder.utils.pyenv import get_list_pyenv_envs
# We're also recording the time needed to get them to compare it with the
# loading time of that config page.
t0 = time.time()
-get_list_conda_envs()
-get_list_pyenv_envs()
+conda_envs = get_list_conda_envs()
+pyenv_envs = get_list_pyenv_envs()
GET_ENVS_TIME = time.time() - t0
+@pytest.mark.skipif(
+ len(conda_envs) == 0 and len(pyenv_envs) == 0,
+ reason="Makes no sense if conda and pyenv are not installed"
+)
def test_load_time(qtbot):
# Create Preferences dialog
main = MainWindowMock()
|
Testing: Skip main interpreter conf page test if conda and pyenv are not installed
|
spyder-ide_spyder
|
train
|
5e0ee6c09522ae573ef03b77dbd609f3db4d15b6
|
diff --git a/codec-dns/src/main/java/io/netty/handler/codec/dns/AbstractDnsMessage.java b/codec-dns/src/main/java/io/netty/handler/codec/dns/AbstractDnsMessage.java
index <HASH>..<HASH> 100644
--- a/codec-dns/src/main/java/io/netty/handler/codec/dns/AbstractDnsMessage.java
+++ b/codec-dns/src/main/java/io/netty/handler/codec/dns/AbstractDnsMessage.java
@@ -375,6 +375,11 @@ public abstract class AbstractDnsMessage extends AbstractReferenceCounted implem
@Override
protected void deallocate() {
clear();
+
+ final ResourceLeak leak = this.leak;
+ if (leak != null) {
+ leak.close();
+ }
}
@Override
|
Fix missing ResourceLeak.close() in AbstractDnsMessage
Motivation:
ResourceLeak.close() must be called when a reference-counted resource is
deallocated, but AbstractDnsMessage.deallocate() forgot to call it.
Modifications:
Call ResourceLeak.close() for the tracked AbstractDnsMessage instances
Result:
Fix the false resource leak warnings
|
netty_netty
|
train
|
a0039e2930c765269930b97fd2f8905a820ebb8a
|
diff --git a/apiserver/service/charmstore.go b/apiserver/service/charmstore.go
index <HASH>..<HASH> 100644
--- a/apiserver/service/charmstore.go
+++ b/apiserver/service/charmstore.go
@@ -194,7 +194,11 @@ func resolveCharm(ref *charm.Reference, repo charmrepo.Interface) (*charm.URL, e
}
// Resolve the charm location with the repository.
- curl, _, err := repo.Resolve(ref)
+ ref, _, err := repo.Resolve(ref)
+ if err != nil {
+ return nil, err
+ }
+ curl, err := ref.URL("")
if err != nil {
return nil, err
}
diff --git a/cmd/juju/commands/common.go b/cmd/juju/commands/common.go
index <HASH>..<HASH> 100644
--- a/cmd/juju/commands/common.go
+++ b/cmd/juju/commands/common.go
@@ -154,7 +154,11 @@ func resolveCharmURL(curlStr string, csParams charmrepo.NewCharmStoreParams, rep
}
return curl, repo, nil
}
- curl, _, err := repo.Resolve(ref)
+ ref, _, err = repo.Resolve(ref)
+ if err != nil {
+ return nil, nil, errors.Trace(err)
+ }
+ curl, err := ref.URL("")
if err != nil {
return nil, nil, errors.Trace(err)
}
diff --git a/juju/testing/conn.go b/juju/testing/conn.go
index <HASH>..<HASH> 100644
--- a/juju/testing/conn.go
+++ b/juju/testing/conn.go
@@ -398,7 +398,11 @@ func updateSecrets(env environs.Environ, st *state.State) error {
func PutCharm(st *state.State, curl *charm.URL, repo charmrepo.Interface, bumpRevision bool) (*state.Charm, error) {
if curl.Revision == -1 {
var err error
- curl, _, err = repo.Resolve(curl.Reference())
+ ref, _, err := repo.Resolve(curl.Reference())
+ if err != nil {
+ return nil, fmt.Errorf("cannot get latest charm revision: %v", err)
+ }
+ curl, err = ref.URL("")
if err != nil {
return nil, fmt.Errorf("cannot get latest charm revision: %v", err)
}
|
Changes to support modified Resolve() method on repos
|
juju_juju
|
train
|
8348f2c2abd9f96de9d70f7fb30e6049a2d8ab50
|
diff --git a/test/test_formats.py b/test/test_formats.py
index <HASH>..<HASH> 100644
--- a/test/test_formats.py
+++ b/test/test_formats.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
"""Test Mapchete default formats."""
+import pytest
import os
import yaml
from tilematrix import TilePyramid
@@ -15,6 +16,7 @@ HTTP_RASTER = (
"http://sentinel-s2-l1c.s3.amazonaws.com/tiles/33/T/WN/2016/4/3/0/B02.jp2"
)
+
def test_available_input_formats():
"""Check if default input formats can be listed."""
assert set(['Mapchete', 'raster_file', 'vector_file']).issubset(
@@ -54,34 +56,19 @@ def test_base_format_classes():
assert tmp.pixelbuffer == 0
assert tmp.crs
assert tmp.srid
- try:
+ with pytest.raises(NotImplementedError):
tmp.open(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.bbox()
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.exists()
- raise Exception()
- except NotImplementedError:
- pass
# InputTile
tmp = base.InputTile(None)
- try:
+ with pytest.raises(NotImplementedError):
tmp.read()
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.is_empty()
- raise Exception()
- except NotImplementedError:
- pass
# OutputData
tmp = base.OutputData(dict(pixelbuffer=0, type="geodetic", metatiling=1))
@@ -89,41 +76,20 @@ def test_base_format_classes():
assert tmp.pixelbuffer == 0
assert tmp.crs
assert tmp.srid
- try:
+ with pytest.raises(NotImplementedError):
tmp.read(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.write(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.tiles_exist(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.is_valid_with_config(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.for_web(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.empty(None)
- raise Exception()
- except NotImplementedError:
- pass
- try:
+ with pytest.raises(NotImplementedError):
tmp.open(None, None)
- raise Exception()
- except NotImplementedError:
- pass
def test_http_rasters():
|
using proper pytest.raises to test exceptions
|
ungarj_mapchete
|
train
|
a2d2b763d6be7c60a4c757c5be4f85dc80a404d6
|
diff --git a/src/cobra/io/web/load.py b/src/cobra/io/web/load.py
index <HASH>..<HASH> 100644
--- a/src/cobra/io/web/load.py
+++ b/src/cobra/io/web/load.py
@@ -25,18 +25,16 @@ logger = logging.getLogger(__name__)
configuration = Configuration()
-cobrapy_repository = Cobrapy()
-bigg_models_repository = BiGGModels()
-bio_models_repository = BioModels()
+DEFAULT_REPOSITORIES = (
+ Cobrapy(),
+ BiGGModels(),
+ BioModels(),
+)
def load_model(
model_id: str,
- repositories: Iterable[AbstractModelRepository] = (
- cobrapy_repository,
- bigg_models_repository,
- bio_models_repository,
- ),
+ repositories: Iterable[AbstractModelRepository] = DEFAULT_REPOSITORIES,
cache: bool = True,
) -> "Model":
"""
|
refactor: group default repositories for load.py
|
opencobra_cobrapy
|
train
|
e278f852b34122de1d1e874026047c0823c00943
|
diff --git a/builtin/providers/openstack/provider_test.go b/builtin/providers/openstack/provider_test.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/openstack/provider_test.go
+++ b/builtin/providers/openstack/provider_test.go
@@ -8,6 +8,11 @@ import (
"github.com/hashicorp/terraform/terraform"
)
+
+var (
+ OS_REGION_NAME = ""
+)
+
var testAccProviders map[string]terraform.ResourceProvider
var testAccProvider *schema.Provider
@@ -29,23 +34,24 @@ func TestProvider_impl(t *testing.T) {
}
func testAccPreCheck(t *testing.T) {
- if v := os.Getenv("OS_REGION_NAME"); v == "" {
- t.Fatal("OS_REGION_NAME must be set for acceptance tests")
- }
-
- if v := os.Getenv("OS_AUTH_URL"); v == "" {
+ v := os.Getenv("OS_AUTH_URL")
+ if v == "" {
t.Fatal("OS_AUTH_URL must be set for acceptance tests")
}
- if v := os.Getenv("OS_USERNAME"); v == "" {
- t.Fatal("OS_USERNAME must be set for acceptance tests")
+ v = os.Getenv("OS_REGION_NAME")
+ if v == "" {
+ t.Fatal("OS_REGION_NAME must be set for acceptance tests")
}
+ OS_REGION_NAME = v
- if v := os.Getenv("OS_TENANT_NAME"); v != "us-central1" {
- t.Fatal("OS_TENANT_NAME must be set to us-central1 for acceptance tests")
+ v = os.Getenv("OS_IMAGE_ID")
+ if v == "" {
+ t.Fatal("OS_IMAGE_ID must be set for acceptance tests")
}
- if v := os.Getenv("OS_PASSWORD"); v != "us-central1" {
- t.Fatal("OS_PASSWORD must be set to us-central1 for acceptance tests")
+ v = os.Getenv("OS_FLAVOR_ID")
+ if v == "" {
+ t.Fatal("OS_FLAVOR_ID must be set for acceptance tests")
}
}
|
add image_ref and flavor_ref checks
|
hashicorp_terraform
|
train
|
18e25ff67aab89e030118e7b8ff1f1a9279b94a4
|
diff --git a/test/amqp-integration-test.py b/test/amqp-integration-test.py
index <HASH>..<HASH> 100644
--- a/test/amqp-integration-test.py
+++ b/test/amqp-integration-test.py
@@ -103,7 +103,6 @@ def run_client_tests():
assert root is not None, (
"Please set LETSENCRYPT_PATH env variable to point at "
"initialized (virtualenv) client repo root")
- os.environ['SERVER'] = 'http://localhost:4000/acme/new-reg'
test_script_path = os.path.join(root, 'tests', 'boulder-integration.sh')
if subprocess.Popen(test_script_path, shell=True, cwd=root).wait() != 0:
die(ExitStatus.PythonFailure)
|
Remove SERVER env var in integration test.
Per @Kuba, this is no longer needed for the letsencrypt client.
|
letsencrypt_boulder
|
train
|
e61c71fc11dcbbb9ff3bdb40c4f7282412352641
|
diff --git a/synapse/lib/net.py b/synapse/lib/net.py
index <HASH>..<HASH> 100644
--- a/synapse/lib/net.py
+++ b/synapse/lib/net.py
@@ -1,4 +1,5 @@
import os
+import time
import socket
import logging
import selectors
@@ -55,11 +56,13 @@ class Plex(s_config.Config):
def _runPollLoop(self):
+ fems = []
while not self.isfini:
try:
- for (_, fino, events, _), mask in self.epoll.select():
+ fems = self.epoll.select()
+ for (_, fino, events, _), mask in fems:
if self.isfini:
return
@@ -86,6 +89,9 @@ class Plex(s_config.Config):
continue
logger.exception('plex thread error: %r' % (e,))
+ if not fems:
+ time.sleep(0.035)
+
def _onPlexFini(self):
[l.fini() for l in list(self.links.values())]
|
Add a time.sleep in the _runPollLoop in the event that no events are recieved by the selector
|
vertexproject_synapse
|
train
|
096e57c0206a9bdaf9bdf15ad12fca60e05a90a7
|
diff --git a/lib/types/func.js b/lib/types/func.js
index <HASH>..<HASH> 100644
--- a/lib/types/func.js
+++ b/lib/types/func.js
@@ -1,9 +1,9 @@
'use strict';
-const AnyType = require('./any');
+const ObjectType = require('./object');
const utils = require('../utils');
-class FuncType extends AnyType {
+class FuncType extends ObjectType {
constructor() {
super();
|
alter func type inherits from object
|
lyfeyaj_ovt
|
train
|
da41db061f334c1492ffb5b79f4e2f7e0270c333
|
diff --git a/js-git.js b/js-git.js
index <HASH>..<HASH> 100644
--- a/js-git.js
+++ b/js-git.js
@@ -133,8 +133,15 @@ function newRepo(db, workDir) {
return resolveHashish(hashish, onResolve);
function onResolve(err, hash) {
if (err) return callback(err);
- var item = {hash: hash, path: "/"};
- return callback(null, walk(item, treeScan, treeCompare, {}));
+ load(hash, function (err, item) {
+ if (err) return callback(err);
+ if (item.type === "commit") {
+ return onResolve(null, item.body.tree);
+ }
+ item.hash = hash;
+ item.path = "/";
+ return callback(null, walk(item, treeScan, treeCompare, {}));
+ });
}
}
|
Allow commit hashish for walkTree
|
creationix_js-git
|
train
|
43c68f0bf96c116ac831d80f42d05650bfe67dc3
|
diff --git a/ghost/admin/routes/posts.js b/ghost/admin/routes/posts.js
index <HASH>..<HASH> 100644
--- a/ghost/admin/routes/posts.js
+++ b/ghost/admin/routes/posts.js
@@ -38,7 +38,7 @@ var PostsRoute = Ember.Route.extend(SimpleAuth.AuthenticatedRouteMixin, Shortcut
stepThroughPosts: function (step) {
var currentPost = this.get('controller.currentPost'),
- posts = this.get('controller.model'),
+ posts = this.get('controller.arrangedContent'),
length = posts.get('length'),
newPosition;
|
Ensure keyboard navigation matches custom post sorting.
|
TryGhost_Ghost
|
train
|
2dcd25701d158e4875d77036983c41d40534fabc
|
diff --git a/src-test/core/fontwatchrunnertest.js b/src-test/core/fontwatchrunnertest.js
index <HASH>..<HASH> 100644
--- a/src-test/core/fontwatchrunnertest.js
+++ b/src-test/core/fontwatchrunnertest.js
@@ -326,7 +326,7 @@ FontWatchRunnerTest.prototype.testWatchFontWebkitWithEqualMetrics = function() {
this.fontDescription_, true);
fontWatchRunner.start();
- assertEquals(2, this.asyncCount_);
+ assertEquals(10, this.asyncCount_);
assertEquals(1, this.fontActiveCalled_);
assertEquals(true, this.fontActive_['fontFamily1 n4']);
};
diff --git a/src/core/fontwatchrunner.js b/src/core/fontwatchrunner.js
index <HASH>..<HASH> 100644
--- a/src/core/fontwatchrunner.js
+++ b/src/core/fontwatchrunner.js
@@ -99,7 +99,15 @@ webfont.FontWatchRunner.prototype.check_ = function() {
this.asyncCheck_();
}
} else if (this.getTime_() - this.started_ >= 5000) {
- this.finish_(this.inactiveCallback_);
+ if (this.hasWebkitFallbackBug_ && this.sizeChangeCount_ === 1) {
+ // If we reach the timeout and we are in a Webkit browser with the
+ // fallback and we observed at least one size change, hope for the
+ // best and assume that the font has loaded and has identical font
+ // metrics compared to the browser's last resort font.
+ this.finish_(this.activeCallback_);
+ } else {
+ this.finish_(this.inactiveCallback_);
+ }
} else {
this.asyncCheck_();
}
|
Changed the behaviour of the timeout when the Webkit bug is present. If the timeout happens, and we are running in a webkit browser with the bug and we observed at least one size change, we assume the font has loaded and has identical metrics to the last resort font. In this case we fire the active event instead of the inactive event.
|
typekit_webfontloader
|
train
|
ba058ede84e189dee63986eaa44edc8b030b9e6d
|
diff --git a/genson/__init__.py b/genson/__init__.py
index <HASH>..<HASH> 100644
--- a/genson/__init__.py
+++ b/genson/__init__.py
@@ -1,4 +1,4 @@
-from .schema.builder import SchemaBuilder, Schema, custom_schema_builder
+from .schema.builder import SchemaBuilder, Schema
from .schema.node import SchemaNode, SchemaGenerationError
from .schema.generators.base import SchemaGenerator, TypedSchemaGenerator
@@ -8,5 +8,4 @@ __all__ = [
'SchemaGenerationError',
'Schema',
'SchemaGenerator',
- 'TypedSchemaGenerator',
- 'custom_schema_builder']
+ 'TypedSchemaGenerator']
diff --git a/genson/schema/builder.py b/genson/schema/builder.py
index <HASH>..<HASH> 100644
--- a/genson/schema/builder.py
+++ b/genson/schema/builder.py
@@ -1,6 +1,7 @@
import json
from warnings import warn
from .node import SchemaNode
+from .generators import BASIC_SCHEMA_TYPES
class SchemaBuilder(object):
@@ -12,6 +13,7 @@ class SchemaBuilder(object):
DEFAULT_URI = 'http://json-schema.org/schema#'
NULL_URI = 'NULL'
NODE_CLASS = SchemaNode
+ SCHEMA_TYPES = BASIC_SCHEMA_TYPES
def __init__(self, schema_uri='DEFAULT'):
"""
@@ -116,6 +118,29 @@ class SchemaBuilder(object):
return {'$schema': self.schema_uri or self.DEFAULT_URI}
+class _MetaSchemaBuilder(type):
+ def __init__(cls, name, bases, attrs):
+ super(_MetaSchemaBuilder, cls).__init__(name, bases, attrs)
+
+ if 'SCHEMA_TYPES' in attrs:
+ schema_types = list(attrs['SCHEMA_TYPES'])
+ for base in bases:
+ schema_types += list(getattr(base, 'SCHEMA_TYPES', []))
+
+ unique_schema_types = []
+ for schema_type in schema_types:
+ if schema_type not in unique_schema_types:
+ unique_schema_types.append(schema_type)
+
+ cls.SCHEMA_TYPES = tuple(unique_schema_types)
+ cls.NODE_CLASS = type('%sSchemaNode' % name, (SchemaNode,),
+ {'SCHEMA_TYPES': cls.SCHEMA_TYPES})
+
+
+# apply metaclass in python 2/3 compatible manner
+SchemaBuilder = _MetaSchemaBuilder('SchemaBuilder', (SchemaBuilder,), {})
+
+
class Schema(SchemaBuilder):
def __init__(self):
@@ -133,13 +158,3 @@ class Schema(SchemaBuilder):
warn('the `recurse` option for #to_dict does nothing in v1.0',
DeprecationWarning)
return self.to_schema()
-
-
-def custom_schema_builder(custom_generators):
- class _CustomSchemaNode(SchemaNode):
- GENERATORS = tuple(list(custom_generators) + list(SchemaNode.GENERATORS))
-
- class _CustomSchemaBuilder(SchemaBuilder):
- NODE_CLASS = _CustomSchemaNode
-
- return _CustomSchemaBuilder
diff --git a/test/test_custom.py b/test/test_custom.py
index <HASH>..<HASH> 100644
--- a/test/test_custom.py
+++ b/test/test_custom.py
@@ -1,4 +1,4 @@
-from genson import custom_schema_builder
+from genson import SchemaBuilder
from genson.schema.generators import Number
from . import base
@@ -11,7 +11,8 @@ class MaxTenGenerator(Number):
return schema
-MaxTenSchemaBuilder = custom_schema_builder([MaxTenGenerator])
+class MaxTenSchemaBuilder(SchemaBuilder):
+ SCHEMA_TYPES = (MaxTenGenerator,)
class TestBasicTypes(base.SchemaNodeTestCase):
|
add metaclass for easily extending SchemaBuilder
|
wolverdude_GenSON
|
train
|
c59c4d8662114819b209739c09d3580c0e4cdf16
|
diff --git a/cassandra/decoder.py b/cassandra/decoder.py
index <HASH>..<HASH> 100644
--- a/cassandra/decoder.py
+++ b/cassandra/decoder.py
@@ -825,6 +825,7 @@ cql_encoders = {
datetime.datetime: cql_encode_datetime,
datetime.date: cql_encode_date,
dict: cql_encode_map_collection,
+ OrderedDict: cql_encode_map_collection,
list: cql_encode_list_collection,
tuple: cql_encode_list_collection,
set: cql_encode_set_collection,
|
add encoder mapping for OrderedDict
OrderedDict is the resulted object type when SELECT-ing map data object.
|
datastax_python-driver
|
train
|
1056501b27ed1263b1929566a67fd5c1a2d0d83a
|
diff --git a/tests/test_algorithm.py b/tests/test_algorithm.py
index <HASH>..<HASH> 100644
--- a/tests/test_algorithm.py
+++ b/tests/test_algorithm.py
@@ -16,6 +16,7 @@ from collections import namedtuple
import datetime
from datetime import timedelta
from textwrap import dedent
+import warnings
from unittest import TestCase, skip
from copy import deepcopy
@@ -1854,6 +1855,72 @@ def handle_data(context, data):
)
algo.run(self.data_portal)
+ def test_schedule_function_time_rule_positionally_misplaced(self):
+ """
+ Test that when a user specifies a time rule for the date_rule argument,
+ but no rule in the time_rule argument
+ (e.g. schedule_function(func, <time_rule>)), we assume that means
+ assign a time rule but no date rule
+ """
+
+ sim_params = factory.create_simulation_parameters(
+ start=pd.Timestamp('2006-01-12', tz='UTC'),
+ end=pd.Timestamp('2006-01-13', tz='UTC'),
+ data_frequency='minute'
+ )
+
+ algocode = dedent("""
+ from zipline.api import time_rules, schedule_function
+
+ def do_at_open(context, data):
+ context.done_at_open.append(context.get_datetime())
+
+ def do_at_close(context, data):
+ context.done_at_close.append(context.get_datetime())
+
+ def initialize(context):
+ context.done_at_open = []
+ context.done_at_close = []
+ schedule_function(do_at_open, time_rules.market_open())
+ schedule_function(do_at_close, time_rules.market_close())
+
+ def handle_data(algo, data):
+ pass
+ """)
+
+ with warnings.catch_warnings(record=True) as w:
+ algo = TradingAlgorithm(
+ script=algocode,
+ sim_params=sim_params,
+ env=self.env
+ )
+ algo.run(self.data_portal)
+
+ self.assertEqual(len(w), 2)
+ for i, warning in enumerate(w):
+ self.assertIsInstance(warning.message, UserWarning)
+ self.assertEqual(
+ warning.message.args[0],
+ 'Got a time rule for the second positional argument '
+ 'date_rule. You should use keyword argument '
+ 'time_rule= when calling schedule_function without '
+ 'specifying a date_rule'
+ )
+ # The warnings come from line 13 and 14 in the algocode
+ self.assertEqual(warning.lineno, 13 + i)
+
+ self.assertEqual(
+ algo.done_at_open,
+ [pd.Timestamp('2006-01-12 14:31:00', tz='UTC'),
+ pd.Timestamp('2006-01-13 14:31:00', tz='UTC')]
+ )
+
+ self.assertEqual(
+ algo.done_at_close,
+ [pd.Timestamp('2006-01-12 20:59:00', tz='UTC'),
+ pd.Timestamp('2006-01-13 20:59:00', tz='UTC')]
+ )
+
class TestCapitalChanges(WithLogger,
WithDataPortal,
diff --git a/zipline/algorithm.py b/zipline/algorithm.py
index <HASH>..<HASH> 100644
--- a/zipline/algorithm.py
+++ b/zipline/algorithm.py
@@ -100,6 +100,8 @@ from zipline.utils.events import (
make_eventrule,
date_rules,
time_rules,
+ AfterOpen,
+ BeforeClose
)
from zipline.utils.factory import create_simulation_parameters
from zipline.utils.math_utils import (
@@ -951,11 +953,21 @@ class TradingAlgorithm(object):
:class:`zipline.api.date_rules`
:class:`zipline.api.time_rules`
"""
+
+ # When the user calls schedule_function(func, <time_rule>), assume that
+ # the user meant to specify a time rule but no date rule, instead of
+ # a date rule and no time rule as the signature suggests
+ if isinstance(date_rule, (AfterOpen, BeforeClose)) and not time_rule:
+ warnings.warn('Got a time rule for the second positional argument '
+ 'date_rule. You should use keyword argument '
+ 'time_rule= when calling schedule_function without '
+ 'specifying a date_rule', stacklevel=3)
+
date_rule = date_rule or date_rules.every_day()
- time_rule = ((time_rule or time_rules.market_open())
+ time_rule = ((time_rule or time_rules.every_minute())
if self.sim_params.data_frequency == 'minute' else
# If we are in daily mode the time_rule is ignored.
- zipline.utils.events.Always())
+ time_rules.every_minute())
self.add_event(
make_eventrule(date_rule, time_rule, half_days),
diff --git a/zipline/utils/events.py b/zipline/utils/events.py
index <HASH>..<HASH> 100644
--- a/zipline/utils/events.py
+++ b/zipline/utils/events.py
@@ -689,6 +689,7 @@ class date_rules(object):
class time_rules(object):
market_open = AfterOpen
market_close = BeforeClose
+ every_minute = Always
def make_eventrule(date_rule, time_rule, half_days=True):
|
MAINT: Support the passing of a time rule positionally on the date_rule arg
But log a warning to the user
|
quantopian_zipline
|
train
|
061a9f6f3d4c65dcba5a1c31e619c9dc246da928
|
diff --git a/src/vectorizer.js b/src/vectorizer.js
index <HASH>..<HASH> 100644
--- a/src/vectorizer.js
+++ b/src/vectorizer.js
@@ -41,12 +41,10 @@ V = Vectorizer = (function() {
}
function isObject(o) {
- return o === Object(o);
+ return o && (typeof o === 'object');
}
- function isArray(o) {
- return Object.prototype.toString.call(o) == '[object Array]';
- }
+ var isArray = Array.isArray;
// Create an SVG document element.
// If `content` is passed, it will be used as the SVG content of the `<svg>` root element.
|
Use native Array.isArray and typeof for type checking
|
clientIO_joint
|
train
|
b286fcdfdcf2c99f0674446d874958d1bd2baeb6
|
diff --git a/lib/form/yui/dateselector/dateselector.js b/lib/form/yui/dateselector/dateselector.js
index <HASH>..<HASH> 100644
--- a/lib/form/yui/dateselector/dateselector.js
+++ b/lib/form/yui/dateselector/dateselector.js
@@ -136,6 +136,7 @@ YUI.add('moodle-form-dateselector', function(Y) {
this.yearselect.set('selectedIndex', newindex);
this.monthselect.set('selectedIndex', date[1] - this.monthselect.firstOptionValue());
this.dayselect.set('selectedIndex', date[2] - this.dayselect.firstOptionValue());
+ M.form.dateselector.release_current();
},
connect_handlers : function() {
M.form.dateselector.calendar.selectEvent.subscribe(this.set_selects_from_date, this, true);
|
MDL-<I> Automatically hide the dateselector after choosing a date
|
moodle_moodle
|
train
|
c8256442f1907e1e661592f454700cac9198169a
|
diff --git a/tests/lib/index.js b/tests/lib/index.js
index <HASH>..<HASH> 100644
--- a/tests/lib/index.js
+++ b/tests/lib/index.js
@@ -809,18 +809,14 @@ describe('BundleLocator', function () {
});
}
if (2 === bundleCalls) {
- try {
- expect(Object.keys(evt.files).length).to.equal(2);
- expect(evt.files).to.have.property('configs/foo.json');
- expect(evt.files).to.have.property('configs/bar.json');
- expect(Object.keys(evt.resources).length).to.equal(2);
- expect(evt.resources).to.have.property('configs/foo.json');
- expect(evt.resources).to.have.property('configs/bar.json');
- } catch (err) {
- mockery.deregisterAll();
- mockery.disable();
- next(err);
- }
+ expect(Object.keys(evt.files).length).to.equal(2);
+ expect(evt.files).to.have.property('configs/foo.json');
+ expect(evt.files).to.have.property('configs/bar.json');
+ expect(Object.keys(evt.resources).length).to.equal(2);
+ expect(evt.resources).to.have.property('configs/foo.json');
+ expect(evt.resources).to.have.property('configs/bar.json');
+ mockery.deregisterAll();
+ mockery.disable();
}
}
}
|
Do not call next() prematurely in bundleUpdated tests
|
yahoo_locator
|
train
|
9908d7cf7cb22b6ffa9ac6d69fe258a526d63d48
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -132,7 +132,7 @@ class Gstore {
}
// Convert gstore entities to datastore forma ({key, data})
- args[0] = datastoreSerializer.entitiesToDatastore(entities);
+ args[0] = datastoreSerializer.entitiesToDatastore(entities, options);
if (args[1] && args[1].constructor && args[1].constructor.name === 'Transaction') {
// Save inside a transaction
diff --git a/lib/serializers/datastore.js b/lib/serializers/datastore.js
index <HASH>..<HASH> 100644
--- a/lib/serializers/datastore.js
+++ b/lib/serializers/datastore.js
@@ -3,7 +3,7 @@
const is = require('is');
const arrify = require('arrify');
-function toDatastore(entity) {
+function toDatastore(entity, options = {}) {
const excludeFromIndexes = [...entity.excludeFromIndexes] || [];
let isArray;
@@ -57,12 +57,14 @@ function toDatastore(entity) {
datastoreFormat.excludeFromIndexes = excludeFromIndexes;
}
+ if (options.method) {
+ datastoreFormat.method = options.method;
+ }
+
return datastoreFormat;
}
-function fromDatastore(entity, options) {
- options = options || {};
-
+function fromDatastore(entity, options = {}) {
switch (options.format) {
case 'ENTITY':
return convertToEntity.call(this);
@@ -122,7 +124,7 @@ function fromDatastore(entity, options) {
* @param {any} entities Entity(ies) to format
* @returns {array} the formated entity(ies)
*/
-function entitiesToDatastore(entities) {
+function entitiesToDatastore(entities, options) {
const multiple = is.array(entities);
entities = arrify(entities);
@@ -131,7 +133,7 @@ function entitiesToDatastore(entities) {
return entities;
}
- const result = entities.map(toDatastore);
+ const result = entities.map(e => toDatastore(e, options));
return multiple ? result : result[0];
}
diff --git a/test/index-test.js b/test/index-test.js
index <HASH>..<HASH> 100644
--- a/test/index-test.js
+++ b/test/index-test.js
@@ -189,10 +189,10 @@ describe('gstore-node', () => {
));
it('should convert entity instances to datastore Format', () => {
- const model1 = new ModelInstance({ name: 'John' });
- const model2 = new ModelInstance({ name: 'Mick' });
+ const entity1 = new ModelInstance({ name: 'John' });
+ const entity2 = new ModelInstance({ name: 'Mick' });
- return gstore.save([model1, model2]).then(() => {
+ return gstore.save([entity1, entity2]).then(() => {
const { args } = ds.save.getCall(0);
const firstEntity = args[0][0];
assert.isUndefined(firstEntity.className);
@@ -201,9 +201,9 @@ describe('gstore-node', () => {
});
it('should work inside a transaction', () => {
- const model1 = new ModelInstance({ name: 'John' });
+ const entity = new ModelInstance({ name: 'John' });
- gstore.save(model1, transaction);
+ gstore.save(entity, transaction);
expect(transaction.save.called).equal(true);
expect(ds.save.called).equal(false);
@@ -214,9 +214,9 @@ describe('gstore-node', () => {
sinon.stub(ds, 'save').callsFake((entity, cb) => cb());
- const model = new ModelInstance({ name: 'John' });
+ const entity = new ModelInstance({ name: 'John' });
- return gstore.save(model, () => {
+ return gstore.save(entity, () => {
const { args } = ds.save.getCall(0);
const firstEntity = args[0];
assert.isUndefined(firstEntity.className);
@@ -247,6 +247,16 @@ describe('gstore-node', () => {
done();
});
});
+
+ it('should allow to pass a save method ("insert", "update", "upsert")', () => {
+ const entity = new ModelInstance({ name: 'John' });
+
+ return gstore.save(entity, undefined, { method: 'insert' })
+ .then(() => {
+ const { args } = ds.save.getCall(0);
+ expect(args[0].method).equal('insert');
+ });
+ });
});
describe('cache', () => {
|
feat(gstore-save): Add save method to global save
gstore.save() method has accepts a options object to declare the save method (insert, update,
upsert)
#<I>
|
sebelga_gstore-node
|
train
|
03642be1372e26f8a0758052270223f47955737b
|
diff --git a/js/src/Lines.js b/js/src/Lines.js
index <HASH>..<HASH> 100644
--- a/js/src/Lines.js
+++ b/js/src/Lines.js
@@ -386,30 +386,37 @@ var Lines = mark.Mark.extend({
},
update_curves_subset: function() {
- var that = this;
+ var display_labels = this.model.get("labels_visibility") === "label";
// Show a subset of the curves
var curves_subset = this.model.get("curves_subset");
if (curves_subset.length > 0) {
this.d3el.selectAll(".curve")
- .select("path")
.attr("display", function(d, i) {
return curves_subset.indexOf(i) !== -1 ?
"inline" : "none";
- });
- this.d3el.selectAll(".curve")
+ })
.select(".curve_label")
.attr("display", function(d, i) {
- return (curves_subset.indexOf(i) !== -1 && that.model.get("labels_visibility") === "label") ?
+ return (curves_subset.indexOf(i) !== -1 && display_labels) ?
"inline" : "none";
});
- } else { //make all curves visible
+ if (this.legend_el) {
+ this.legend_el
+ .attr("display", function(d, i) {
+ return curves_subset.indexOf(i) !== -1 ?
+ "inline" : "none";
+ });
+ }
this.d3el.selectAll(".curve")
- .select("path").attr("display", "inline");
+
+ } else { //make all curves visible
this.d3el.selectAll(".curve")
- .select(".curve_label").attr("display", function(d) {
- return that.model.get("labels_visibility") === "label" ?
- "inline" : "none";
+ .attr("display", "inline")
+ .select(".curve_label")
+ .attr("display", function(d) {
+ return display_labels ? "inline" : "none";
});
+ this.legend_el.attr("display", "inline");
}
},
|
apply curves_subset to legend and markers
|
bloomberg_bqplot
|
train
|
550ad088ebb442bbd5b78a32aec8dae6c3f44a84
|
diff --git a/pysat/_instrument.py b/pysat/_instrument.py
index <HASH>..<HASH> 100644
--- a/pysat/_instrument.py
+++ b/pysat/_instrument.py
@@ -2632,11 +2632,12 @@ class Instrument(object):
if (self.index[-1] == last_pad) & (not want_last_pad):
self.data = self[:-1]
- # if self.pad is False, load single day
+ # If self.pad is False, load single day
else:
self.data, meta = self._load_data(date=self.date, fid=self._fid,
inc=self.load_step)
if not self.empty:
+ meta.accept_default_labels(self.meta)
self.meta = meta
# If only some metadata included, define the remaining variables
|
BUG: accepted default meta labels
Fixed a bug where the metadata from the load
routine did not always update the labels to conform
with those specified by the instrument.
|
rstoneback_pysat
|
train
|
6c2e73187915935fd9d57c06b723d1f2e9657682
|
diff --git a/aeron-driver/src/main/java/uk/co/real_logic/aeron/driver/MediaDriver.java b/aeron-driver/src/main/java/uk/co/real_logic/aeron/driver/MediaDriver.java
index <HASH>..<HASH> 100644
--- a/aeron-driver/src/main/java/uk/co/real_logic/aeron/driver/MediaDriver.java
+++ b/aeron-driver/src/main/java/uk/co/real_logic/aeron/driver/MediaDriver.java
@@ -165,16 +165,16 @@ public class MediaDriver implements AutoCloseable
private MediaDriver start()
{
conductorThread = new Thread(conductor);
- startThread(conductorThread, "driver-conductor");
+ startThread(conductorThread, "aeron-driver-conductor");
senderThread = new Thread(sender);
- startThread(senderThread, "sender");
+ startThread(senderThread, "aeron-sender");
receiverThread = new Thread(receiver);
- startThread(receiverThread, "receiver");
+ startThread(receiverThread, "aeron-receiver");
eventReaderThread = new Thread(eventReader);
- startThread(eventReaderThread, "event-reader");
+ startThread(eventReaderThread, "aeron-event-reader");
return this;
}
|
[Java] Qualify the names of aeron media driver threads.
|
real-logic_aeron
|
train
|
1d03010d1cdcbbff11afbecd057e3148dd6627c6
|
diff --git a/save.go b/save.go
index <HASH>..<HASH> 100644
--- a/save.go
+++ b/save.go
@@ -87,10 +87,10 @@ func calcDepRoots(importPath string) []*repoRoot {
}
// Convert from packages to repo roots.
+ var attempts = 1
GetAllDeps:
var depRoots = map[string]*repoRoot{}
var missingPackages []string
- var attempts = 1
for _, importPath := range getAllDeps(importPath) {
var repoRoot, err = glockRepoRootForImportPath(importPath)
if err != nil {
@@ -113,6 +113,7 @@ GetAllDeps:
}
fmt.Fprintln(os.Stderr, "go", "get", strings.Join(missingPackages, " "))
run("go", append([]string{"get"}, missingPackages...)...)
+ attempts++
goto GetAllDeps
}
diff --git a/save_test.go b/save_test.go
index <HASH>..<HASH> 100644
--- a/save_test.go
+++ b/save_test.go
@@ -4,6 +4,7 @@ import (
"bufio"
"bytes"
"fmt"
+ "go/build"
"os/exec"
"path/filepath"
"strings"
@@ -223,8 +224,13 @@ import (
}
// Temporarily set the GOPATH and dep printing function.
- defer os.Setenv("GOPATH", os.Getenv("GOPATH"))
+ var oldGOPATH = build.Default.GOPATH
+ defer func() {
+ os.Setenv("GOPATH", oldGOPATH)
+ build.Default.GOPATH = oldGOPATH
+ }()
os.Setenv("GOPATH", gopath)
+ build.Default.GOPATH = gopath
var buf bytes.Buffer
outputDeps(&buf, calcDepRoots(test.pkgs[0].importPath))
|
save: fix bug when retrying "go get". fix test by mocking build.Default.GOPATH.
|
robfig_glock
|
train
|
dd17f65f23003903cc72d82ed1c0e26cefc9f984
|
diff --git a/src/main/java/water/api/Inspect.java b/src/main/java/water/api/Inspect.java
index <HASH>..<HASH> 100644
--- a/src/main/java/water/api/Inspect.java
+++ b/src/main/java/water/api/Inspect.java
@@ -119,8 +119,10 @@ public class Inspect extends Request {
if( len < 0 )
break;
off += len;
- if( off == bs.length ) // Dataset is uncompressing alot! Need more space...
+ if( off == bs.length ) { // Dataset is uncompressing alot! Need more space...
+ if( bs.length >= ValueArray.CHUNK_SZ ) break; // Already got enough
bs = Arrays.copyOf(bs,bs.length*2);
+ }
}
} catch( IOException ioe ) { // Stop at any io error
} finally {
|
Stop uncompression at 1meg... just enough to display for user
|
h2oai_h2o-2
|
train
|
c225d97e4b91e42341c16a345ac2f3b3fe53b040
|
diff --git a/lib/constants.json b/lib/constants.json
index <HASH>..<HASH> 100644
--- a/lib/constants.json
+++ b/lib/constants.json
@@ -13,10 +13,6 @@
"reset": "reset",
"any": "any"
},
- "events": {
- "contractFilesChanged": "contractFilesChanged",
- "contractConfigChanged": "contractConfigChanged"
- },
"process": {
"processLaunchRequest": "process:launch-request",
"processLaunchComplete": "process:launch-complete",
diff --git a/lib/core/config.js b/lib/core/config.js
index <HASH>..<HASH> 100644
--- a/lib/core/config.js
+++ b/lib/core/config.js
@@ -222,7 +222,6 @@ Config.prototype.loadContractsConfigFile = function() {
const newContractsConfig = this._mergeConfig(configFilePath, configObject, this.env);
if (!deepEqual(newContractsConfig, this.contractsConfig)) {
- this.events.emit(constants.events.contractConfigChanged, newContractsConfig);
this.contractsConfig = newContractsConfig;
}
};
@@ -333,8 +332,11 @@ Config.prototype.loadEmbarkConfigFile = function() {
const contracts = this.embarkConfig.contracts;
const newContractsFiles = this.loadFiles(contracts);
if (!this.contractFiles || newContractsFiles.length !== this.contractFiles.length || !deepEqual(newContractsFiles, this.contractFiles)) {
- this.events.emit(constants.events.contractFilesChanged, newContractsFiles);
- this.contractsFiles = newContractsFiles;
+ this.contractsFiles = this.contractsFiles.concat(newContractsFiles).filter((file, index, arr) => {
+ return !arr.some((file2, index2) => {
+ return file.filename === file2.filename && index < index2;
+ });
+ });
}
// determine contract 'root' directories
this.contractDirectories = contracts.map((dir) => {
diff --git a/lib/tests/test.js b/lib/tests/test.js
index <HASH>..<HASH> 100644
--- a/lib/tests/test.js
+++ b/lib/tests/test.js
@@ -2,7 +2,6 @@ const async = require('async');
const Engine = require('../core/engine.js');
const TestLogger = require('./test_logger.js');
const Web3 = require('web3');
-const constants = require('../constants');
const Events = require('../core/events');
const AccountParser = require('../utils/accountParser');
// TODO: breaks module isolation; tests need to be refactored to use the engine and avoid this
@@ -272,7 +271,6 @@ class Test {
async.waterfall([
function getConfig(next) {
self.engine.config.contractsConfig = {contracts: config.contracts, versions: self.versions_default};
- self.engine.events.emit(constants.events.contractConfigChanged, self.engine.config.contractsConfig);
next();
},
function getAccounts(next) {
|
fix reload file removing plugins
|
embark-framework_embark
|
train
|
34565d57ab051a7cbf429b7a82f34b7dc294ede7
|
diff --git a/spyder/plugins/explorer.py b/spyder/plugins/explorer.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/explorer.py
+++ b/spyder/plugins/explorer.py
@@ -73,11 +73,6 @@ class Explorer(ExplorerWidget, SpyderPluginMixin):
self.renamed.connect(self.main.editor.renamed)
self.main.editor.open_dir.connect(self.chdir)
self.create_module.connect(self.main.editor.new)
- self.run.connect(
- lambda fname:
- self.main.open_external_console(to_text_string(fname),
- osp.dirname(to_text_string(fname)),
- '', False, False, True, '', True))
# Signal "set_explorer_cwd(QString)" will refresh only the
# contents of path passed by the signal in explorer:
self.main.workingdirectory.set_explorer_cwd.connect(
diff --git a/spyder/plugins/ipythonconsole.py b/spyder/plugins/ipythonconsole.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/ipythonconsole.py
+++ b/spyder/plugins/ipythonconsole.py
@@ -810,7 +810,11 @@ class IPythonConsole(SpyderPluginWidget):
self.tabwidget.currentChanged.connect(self.update_working_directory)
self.explorer.open_interpreter.connect(self.create_client_from_path)
+ self.explorer.run.connect(lambda fname: self.run_script(
+ fname, osp.dirname(fname), '', False, False, False, True))
self.projects.open_interpreter.connect(self.create_client_from_path)
+ self.projects.run.connect(lambda fname: self.run_script(
+ fname, osp.dirname(fname), '', False, False, False, True))
#------ Public API (for clients) ------------------------------------------
def get_clients(self):
diff --git a/spyder/plugins/projects.py b/spyder/plugins/projects.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/projects.py
+++ b/spyder/plugins/projects.py
@@ -154,9 +154,6 @@ class Projects(ProjectExplorerWidget, SpyderPluginMixin):
self.sig_project_closed.connect(
lambda v: self.editor.setup_open_files())
self.recent_project_menu.aboutToShow.connect(self.setup_menu_actions)
- self.run.connect(lambda fname: self.main.open_external_console(
- to_text_string(fname), osp.dirname(to_text_string(fname)),
- '', False, False, True, '', True))
def refresh_plugin(self):
"""Refresh project explorer widget"""
|
Execute on dedicated IPython consoles
|
spyder-ide_spyder
|
train
|
5c6b27a5481d852da0c5bbd0065f841132703424
|
diff --git a/lib/pdf/reader/page_text_receiver.rb b/lib/pdf/reader/page_text_receiver.rb
index <HASH>..<HASH> 100644
--- a/lib/pdf/reader/page_text_receiver.rb
+++ b/lib/pdf/reader/page_text_receiver.rb
@@ -40,7 +40,7 @@ module PDF
def page=(page)
@state = PageState.new(page)
@content = []
- @characters = []
+ @characters = TextRunCollection.new
end
#####################################################
@@ -189,11 +189,35 @@ module PDF
tj = 0
end
scaled_glyph_width = glyph_width * @state.font_size * th
- @characters << TextRun.new(newx, newy, scaled_glyph_width, utf8_chars)
+ @characters.add_run(newx, newy, scaled_glyph_width, utf8_chars)
@state.process_glyph_displacement(glyph_width, tj, utf8_chars == " ")
end
end
+ class TextRunCollection
+ extend Forwardable
+
+ def_delegators :@runs, :group_by, :each
+
+ def initialize
+ @runs = []
+ end
+
+ def add_run(x, y, width, utf8)
+ last = @runs.last
+ if @runs.size > 0 && @runs.last.mergable_by_pos?(x, y)
+ last.width = (x+width) - last.x
+ last.text = last.text + utf8
+ else
+ @runs << TextRun.new(x, y, width, utf8)
+ #@runs.sort!
+ end
+ end
+
+ private
+
+ end
+
end
end
end
diff --git a/lib/pdf/reader/text_run.rb b/lib/pdf/reader/text_run.rb
index <HASH>..<HASH> 100644
--- a/lib/pdf/reader/text_run.rb
+++ b/lib/pdf/reader/text_run.rb
@@ -32,6 +32,10 @@ class PDF::Reader
y.to_i == other.y.to_i && Range.new(endx - 3, endx + MERGE_LIMIT).include?(other.x)
end
+ def mergable_by_pos?(otherx, othery)
+ y.to_i == othery.to_i && Range.new(endx - 3, endx + MERGE_LIMIT).include?(otherx)
+ end
+
def +(other)
raise ArgumentError, "#{other} cannot be merged with this run" unless mergable?(other)
|
a failed experiment in reducing object allocations
|
yob_pdf-reader
|
train
|
5c1dc995cb329f0b550cb69a5ae01def8fb3f845
|
diff --git a/src/Emojis.js b/src/Emojis.js
index <HASH>..<HASH> 100644
--- a/src/Emojis.js
+++ b/src/Emojis.js
@@ -1,6 +1,5 @@
import React from 'react'
import createEmojisComponent from './createEmojisComponent'
-import emptyObj from 'empty/object'
-export default createEmojisComponent(emptyObj)
+export default createEmojisComponent()
diff --git a/src/createEmojisComponent.js b/src/createEmojisComponent.js
index <HASH>..<HASH> 100644
--- a/src/createEmojisComponent.js
+++ b/src/createEmojisComponent.js
@@ -1,8 +1,8 @@
import React from 'react'
import PropTypes from 'prop-types'
import emptyObj from 'empty/object'
-import {toEmojis} from './utils'
import walk from './walk'
+import {toEmojis} from './utils'
export default function createEmojisComponent (options = emptyObj) {
|
<Emojis> doesn't need to provide empty.object to createEmojisComponent
|
jaredLunde_react-emoji-component
|
train
|
166a638a16ce66b065d5be74a2b16051abe8aa4b
|
diff --git a/engine/delete.go b/engine/delete.go
index <HASH>..<HASH> 100644
--- a/engine/delete.go
+++ b/engine/delete.go
@@ -70,6 +70,8 @@ func truncateTable(e *Engine, table *Table, conn protocol.EngineConn) error {
if r == nil {
return fmt.Errorf("Table %v not found", table.name)
}
+ r.Lock()
+ defer r.Unlock()
if r.rows != nil {
rowsDeleted = int64(len(r.rows))
|
fix (DELETE): fix data race when table is truncated
|
proullon_ramsql
|
train
|
a61c5482d9113944417ce168a40163de153cad94
|
diff --git a/packages/test-utils/src/create-dom-event.js b/packages/test-utils/src/create-dom-event.js
index <HASH>..<HASH> 100644
--- a/packages/test-utils/src/create-dom-event.js
+++ b/packages/test-utils/src/create-dom-event.js
@@ -24,9 +24,35 @@ const modifiers = {
pagedown: 34
}
+// get from https://github.com/ashubham/w3c-keys/blob/master/index.ts
+const w3cKeys = {
+ enter: 'Enter',
+ tab: 'Tab',
+ delete: 'Delete',
+ esc: 'Esc',
+ escape: 'Escape',
+ space: ' ',
+ up: 'Up',
+ left: 'Left',
+ right: 'Right',
+ down: 'Down',
+ end: 'End',
+ home: 'Home',
+ backspace: 'Backspace',
+ insert: 'Insert',
+ pageup: 'PageUp',
+ pagedown: 'PageDown'
+}
+
+const codeToKeyNameMap = Object.entries(modifiers).reduce(
+ (acc, [key, value]) => Object.assign(acc, { [value]: w3cKeys[key] }),
+ {}
+)
+
function getOptions(eventParams) {
const { modifier, meta, options } = eventParams
const keyCode = modifiers[modifier] || options.keyCode || options.code
+ const key = codeToKeyNameMap[keyCode]
return {
...options, // What the user passed in as the second argument to #trigger
@@ -36,7 +62,8 @@ function getOptions(eventParams) {
// Any derived options should go here
keyCode,
- code: keyCode
+ code: keyCode,
+ key
}
}
diff --git a/test/specs/wrapper/trigger.spec.js b/test/specs/wrapper/trigger.spec.js
index <HASH>..<HASH> 100644
--- a/test/specs/wrapper/trigger.spec.js
+++ b/test/specs/wrapper/trigger.spec.js
@@ -43,23 +43,27 @@ describeWithShallowAndMount('trigger', mountingMethod => {
expect(keydownHandler).toHaveBeenCalled()
})
- describe('causes keydown handler to fire with the appropriate keyCode when wrapper.trigger("keydown", { keyCode: 65 }) is fired on a Component', async () => {
+ describe('causes keydown handler to fire with the appropriate keyCode when wrapper.trigger("keydown", { keyCode: 46 }) is fired on a Component', async () => {
const keydownHandler = jest.fn()
const wrapper = mountingMethod(ComponentWithEvents, {
propsData: { keydownHandler }
})
- await wrapper.find('.keydown').trigger('keydown', { keyCode: 65 })
+ await wrapper.find('.keydown').trigger('keydown', { keyCode: 46 })
const keyboardEvent = keydownHandler.mock.calls[0][0]
// Unfortunately, JSDom will give different types than PhantomJS for keyCodes (string vs number), so we have to use parseInt to normalize the types.
it('contains the keyCode', () => {
- expect(parseInt(keyboardEvent.keyCode, 10)).toEqual(65)
+ expect(parseInt(keyboardEvent.keyCode, 10)).toEqual(46)
+ })
+
+ it('contains the key', () => {
+ expect(keyboardEvent.key).toEqual('Delete')
})
itDoNotRunIf(isRunningChrome, 'contains the code', () => {
- expect(parseInt(keyboardEvent.code, 10)).toEqual(65)
+ expect(parseInt(keyboardEvent.code, 10)).toEqual(46)
})
})
@@ -73,7 +77,7 @@ describeWithShallowAndMount('trigger', mountingMethod => {
expect(keydownHandler).toHaveBeenCalled()
})
- it.skip('convert a registered key name to a key code', async () => {
+ it('convert a registered key name to a key code and key', async () => {
const modifiers = {
enter: 13,
esc: 27,
@@ -91,14 +95,47 @@ describeWithShallowAndMount('trigger', mountingMethod => {
pageup: 33,
pagedown: 34
}
+
+ // get from https://github.com/ashubham/w3c-keys/blob/master/index.ts
+ const w3cKeys = {
+ enter: 'Enter',
+ tab: 'Tab',
+ delete: 'Delete',
+ esc: 'Esc',
+ escape: 'Escape',
+ space: ' ',
+ up: 'Up',
+ left: 'Left',
+ right: 'Right',
+ down: 'Down',
+ end: 'End',
+ home: 'Home',
+ backspace: 'Backspace',
+ insert: 'Insert',
+ pageup: 'PageUp',
+ pagedown: 'PageDown'
+ }
+
+ const codeToKeyNameMap = Object.entries(modifiers).reduce(
+ (acc, [key, value]) => Object.assign(acc, { [value]: w3cKeys[key] }),
+ {}
+ )
+
+ const modifiersArray = Object.entries(modifiers)
+
const keyupHandler = jest.fn()
const wrapper = mountingMethod(ComponentWithEvents, {
propsData: { keyupHandler }
})
- for (const keyName in modifiers) {
- const keyCode = modifiers[keyName]
+
+ for (let index = 0; index < modifiersArray.length; index++) {
+ const [keyName, keyCode] = modifiersArray[index]
await wrapper.find('.keydown').trigger(`keyup.${keyName}`)
- expect(keyupHandler.mock.calls[0][0].keyCode).toEqual(keyCode)
+
+ expect(keyupHandler.mock.calls[index][0].keyCode).toEqual(keyCode)
+ expect(keyupHandler.mock.calls[index][0].key).toEqual(
+ codeToKeyNameMap[keyCode]
+ )
}
})
|
Fix VTU don't return key property to event when use trigger.('eventName.key') (#<I>)
* Add key to events
When trigger 'keydown.tab` - key does not exists. This affect case when in components we use w3c-keys for compare
* fix: pass key proprety to event
* refactor: remove console.log
* fix: test describe
|
vuejs_vue-test-utils
|
train
|
3bb149f1dbf6ae2d48b2d5e0595a0a7927bff9b4
|
diff --git a/tests/classes/Utils.js b/tests/classes/Utils.js
index <HASH>..<HASH> 100644
--- a/tests/classes/Utils.js
+++ b/tests/classes/Utils.js
@@ -224,11 +224,33 @@ describe('Utils', () => {
beforeEach(() => {
serverless.init();
+ // create a new tmpDir for the serverlessPath
const tmpDirPath = testUtils.getTmpDirPath();
fse.mkdirsSync(tmpDirPath);
serverlessPath = tmpDirPath;
serverless.config.serverlessPath = tmpDirPath;
+
+ // add some mock data to the serverless service object
+ serverless.service.functions = {
+ foo: {
+ memorySize: 47,
+ timeout: 11,
+ events: [
+ {
+ http: 'GET foo',
+ },
+ ],
+ },
+ bar: {
+ events: [
+ {
+ http: 'GET foo',
+ s3: 'someBucketName',
+ },
+ ],
+ },
+ };
});
it('should create a new file with a tracking id if not found', () => {
|
Add some mock data to the serverless.service object in the track() tests
So that parts of the data retrieval are run.
|
serverless_serverless
|
train
|
37fd9f6fa8c11953dacfa5d193970bd3ea83ee16
|
diff --git a/lib/swag_dev/project/tasks/gem.rb b/lib/swag_dev/project/tasks/gem.rb
index <HASH>..<HASH> 100644
--- a/lib/swag_dev/project/tasks/gem.rb
+++ b/lib/swag_dev/project/tasks/gem.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-[:gemspec, :package, :install].each do |req|
- require "swag_dev/project/tasks/gem/#{req}"
-end
+require 'swag_dev/project'
+[:gemspec, :package, :install]
+ .each { |req| require_relative "gem/#{req}" }
desc 'Build all the packages'
task gem: ['gem:gemspec', 'gem:package']
|
gem (tasks) minor syntactic changes
|
SwagDevOps_kamaze-project
|
train
|
2c42960baa6bf681b127b51171c512da14accd4e
|
diff --git a/webroot/js/lib/controller.js b/webroot/js/lib/controller.js
index <HASH>..<HASH> 100755
--- a/webroot/js/lib/controller.js
+++ b/webroot/js/lib/controller.js
@@ -146,6 +146,7 @@ Frontend.Controller = Class.extend({
onComplete: function(controller, response) {
if ($form.hasClass('dialog-ajax-form-close-on-success') && response.data.frontendData.jsonData.success) {
$('.modal').modal('hide').data('bs.modal', null);
+ App.Main.publishEvent('ajaxDialog.closed', response);
}
App.Main.UIBlocker.unblockElement(this._dom);
if (typeof callback === 'function') {
|
Publish an event when an AJAX dialog has been closed
|
scherersoftware_cake-frontend-bridge
|
train
|
7d6b721044d13d5712275fff75e87fc200686f10
|
diff --git a/test/integration/scheduler/local-pv-neg-affinity_test.go b/test/integration/scheduler/local-pv-neg-affinity_test.go
index <HASH>..<HASH> 100644
--- a/test/integration/scheduler/local-pv-neg-affinity_test.go
+++ b/test/integration/scheduler/local-pv-neg-affinity_test.go
@@ -19,7 +19,6 @@ package scheduler
// This file tests the VolumeScheduling feature.
import (
- "encoding/json"
"fmt"
"net/http"
"net/http/httptest"
@@ -311,13 +310,3 @@ func markNodeSelector(pod *v1.Pod, node string) {
}
pod.Spec.NodeSelector = ns
}
-
-func printIndentedJson(data interface{}) string {
- var indentedJSON []byte
-
- indentedJSON, err := json.MarshalIndent(data, "", "\t")
- if err != nil {
- return fmt.Sprintf("JSON parse error: %v", err)
- }
- return string(indentedJSON)
-}
diff --git a/test/integration/scheduler/util.go b/test/integration/scheduler/util.go
index <HASH>..<HASH> 100644
--- a/test/integration/scheduler/util.go
+++ b/test/integration/scheduler/util.go
@@ -381,15 +381,3 @@ func cleanupPods(cs clientset.Interface, t *testing.T, pods []*v1.Pod) {
}
}
}
-
-// printAllPods prints a list of all the pods and their node names. This is used
-// for debugging.
-func printAllPods(t *testing.T, cs clientset.Interface, nsName string) {
- podList, err := cs.CoreV1().Pods(nsName).List(metav1.ListOptions{})
- if err != nil {
- t.Logf("Error getting pods: %v", err)
- }
- for _, pod := range podList.Items {
- t.Logf("Pod:\n\tName:%v\n\tNamespace:%v\n\tNode Name:%v\n", pod.Name, pod.Namespace, pod.Spec.NodeName)
- }
-}
|
remove unused function printIndentedJson and printAllPods in test/integration/scheduler
|
kubernetes_kubernetes
|
train
|
f1526f4f75f4d19ecdf24474991c3749c8cffa40
|
diff --git a/test/unit/lib/processes.specs.js b/test/unit/lib/processes.specs.js
index <HASH>..<HASH> 100644
--- a/test/unit/lib/processes.specs.js
+++ b/test/unit/lib/processes.specs.js
@@ -200,7 +200,7 @@ test.describe('processes', () => {
handler = new processes.Handler(fooProcess, fooSuiteData)
handler.on('close', () => {
test.expect(mocksSandbox.logs.stubs.serviceLog).to.have.been.calledWith({
- log: 'foo ',
+ log: 'foo',
service: 'fooService'
})
test.expect(mocksSandbox.logs.stubs.serviceLog).to.have.been.calledWith({
@@ -215,6 +215,32 @@ test.describe('processes', () => {
})
})
+ test.it('should console processes logs that belongs to test services', (done) => {
+ childProcessMock.stubs.spawn.stdout.on.returns('test-container_1 | foo \ntest-container_1 | foo 2 ')
+ handler = new processes.Handler(fooProcess, fooSuiteData)
+ handler.on('close', () => {
+ test.expect(console.log).to.have.been.calledWith(' foo')
+ test.expect(console.log).to.have.been.calledWith(' foo 2')
+ done()
+ })
+ })
+
+ test.it('should trace processes logs that belongs to test services, but have a [trace] identifier', (done) => {
+ childProcessMock.stubs.spawn.stdout.on.returns('test-container_1 |[Narval] [TRACE]foo \ntest-container_1 |[Narval] [TRACE] foo 2 ')
+ handler = new processes.Handler(fooProcess, fooSuiteData)
+ handler.on('close', () => {
+ test.expect(mocksSandbox.logs.stubs.serviceLog).to.have.been.calledWith({
+ log: 'foo',
+ service: 'fooService'
+ })
+ test.expect(mocksSandbox.logs.stubs.serviceLog).to.have.been.calledWith({
+ log: ' foo 2',
+ service: 'fooService'
+ })
+ done()
+ })
+ })
+
test.it('should write a file with the end code if the option close is received', function (done) {
this.timeout(5000)
childProcessMock.stubs.spawn.on.runOnRegister(false)
|
Add unit tests for test logs fix
|
javierbrea_narval
|
train
|
8f9783ac777f17e2ce4c86702e1dcda2b07ced38
|
diff --git a/src/IO/WebSocket/BinaryImageStream/index.js b/src/IO/WebSocket/BinaryImageStream/index.js
index <HASH>..<HASH> 100644
--- a/src/IO/WebSocket/BinaryImageStream/index.js
+++ b/src/IO/WebSocket/BinaryImageStream/index.js
@@ -16,6 +16,8 @@ export default class BinaryImageStream {
this.view_id = -1;
this.stillQuality = stillQuality;
this.interactiveQuality = interactiveQuality;
+
+ this.lastImageReadyEvent = null;
}
enableView(enabled) {
@@ -71,11 +73,13 @@ export default class BinaryImageStream {
this.fps = Math.floor(10000 / (time - this.lastTime)) / 10;
this.lastTime = time;
- this.emit(IMAGE_READY, {
+ this.lastImageReadyEvent = {
url: this.activeURL,
fps: this.fps,
metadata: this.metadata,
- });
+ };
+
+ this.emit(IMAGE_READY, this.lastImageReadyEvent);
}
this.textMode = !this.textMode;
};
@@ -93,6 +97,10 @@ export default class BinaryImageStream {
onImageReady(callback) {
return this.on(IMAGE_READY, callback);
}
+
+ getLastImageReadyEvent() {
+ return this.lastImageReadyEvent;
+ }
}
Monologue.mixInto(BinaryImageStream);
|
fix(BinaryImageStream): Cache last event notification so it can be retreived using a get method
|
Kitware_paraviewweb
|
train
|
75922da752f31752d2fc63d97c1a99b3ef44afbb
|
diff --git a/psamm/commands/vis.py b/psamm/commands/vis.py
index <HASH>..<HASH> 100644
--- a/psamm/commands/vis.py
+++ b/psamm/commands/vis.py
@@ -194,10 +194,11 @@ class VisualizationCommand(MetabolicMixin, ObjectiveMixin, SolverCommandMixin,
exchange_cpds = set()
for reaction in self._mm.reactions:
if self._mm.is_exchange(reaction):
- exchange_rxn = self._mm.get_reaction(reaction)
- g = add_exchange_rxns(g, reaction, exchange_rxn)
- for cpd, _ in exchange_rxn.compounds:
- exchange_cpds.add(text_type(cpd))
+ if reaction != self._model.biomass_reaction:
+ exchange_rxn = self._mm.get_reaction(reaction)
+ g = add_exchange_rxns(g, reaction, exchange_rxn)
+ for cpd, _ in exchange_rxn.compounds:
+ exchange_cpds.add(text_type(cpd))
for node in g.nodes:
if node.props['id'] in bio_cpds_sub:
|
fix bug of one rxn shown twice as exchange and biomass rxn
|
zhanglab_psamm
|
train
|
66146797830e138898d525f0a4866b9a7043e2ed
|
diff --git a/pysat/instruments/timed_saber.py b/pysat/instruments/timed_saber.py
index <HASH>..<HASH> 100644
--- a/pysat/instruments/timed_saber.py
+++ b/pysat/instruments/timed_saber.py
@@ -134,7 +134,7 @@ load = cdw.load
# a dictionary needs to be created for each sat_id and tag
# combination along with the file format template
# outer dict keyed by sat_id, inner dict keyed by tag
-basic_tag = {'dir': '/pub/data/timed/saber/version2_0/level2a_cdf',
+basic_tag = {'dir': '/pub/data/timed/saber/level2a_v2_00_CDF',
'remote_fname': '{year:4d}/{month:02d}/' + fname,
'local_fname': fname}
supported_tags = {'': {'': basic_tag}}
|
fix remote directory for saber
|
rstoneback_pysat
|
train
|
6a2c10a89dd7a6d9e8e65dcb53224358776c2edd
|
diff --git a/lang/en/error.php b/lang/en/error.php
index <HASH>..<HASH> 100644
--- a/lang/en/error.php
+++ b/lang/en/error.php
@@ -144,6 +144,7 @@ $string['cannotsetupblock'] = 'Blocks tables could NOT be set up successfully!';
$string['cannotsetupcapformod'] = 'Could not set up the capabilities for {$a}';
$string['cannotsetupcapforplugin'] = 'Could not set up the capabilities for {$a}';
$string['cannotshowhidecoursesincategory'] = 'Cannot show/hide the courses in category {$a}.';
+$string['cannotsignup'] = 'You cannot create a new account because you are already logged in as {$a}.';
$string['cannotunassigncap'] = 'Could not unassign deprecated capability {$a->cap} from role {$a->role}';
$string['cannotunassignrolefrom'] = 'Cannot unassign this user from role id: {$a}';
$string['cannotunzipfile'] = 'Cannot unzip file';
diff --git a/login/signup.php b/login/signup.php
index <HASH>..<HASH> 100644
--- a/login/signup.php
+++ b/login/signup.php
@@ -48,6 +48,22 @@ $PAGE->https_required();
$PAGE->set_url('/login/signup.php');
$PAGE->set_context(context_system::instance());
+// Override wanted URL, we do not want to end up here again if user clicks "Login".
+$SESSION->wantsurl = $CFG->wwwroot . '/';
+
+if (isloggedin() and !isguestuser()) {
+ // Prevent signing up when already logged in.
+ echo $OUTPUT->header();
+ echo $OUTPUT->box_start();
+ $logout = new single_button(new moodle_url($CFG->httpswwwroot . '/login/logout.php',
+ array('sesskey' => sesskey(), 'loginpage' => 1)), get_string('logout'), 'post');
+ $continue = new single_button(new moodle_url('/'), get_string('cancel'), 'get');
+ echo $OUTPUT->confirm(get_string('cannotsignup', 'error', fullname($USER)), $logout, $continue);
+ echo $OUTPUT->box_end();
+ echo $OUTPUT->footer();
+ exit;
+}
+
$mform_signup = $authplugin->signup_form();
if ($mform_signup->is_cancelled()) {
|
MDL-<I> login: prevent signup when user is logged in
|
moodle_moodle
|
train
|
42510d5776ed1b2ab8948869eedbd866136d3e82
|
diff --git a/pymatgen/analysis/phase_diagram.py b/pymatgen/analysis/phase_diagram.py
index <HASH>..<HASH> 100644
--- a/pymatgen/analysis/phase_diagram.py
+++ b/pymatgen/analysis/phase_diagram.py
@@ -366,7 +366,7 @@ class PhaseDiagram(MSONable):
self.all_entries = computed_data["all_entries"]
self.qhull_data = computed_data["qhull_data"]
self.dim = computed_data["dim"]
- self.el_refs = computed_data["el_refs"]
+ self.el_refs = {Element(el): ref for el, ref in computed_data["el_refs"].items()}
self.qhull_entries = computed_data["qhull_entries"]
self.stable_entries = set(self.qhull_entries[i] for i in set(itertools.chain(*self.facets)))
@@ -461,7 +461,7 @@ class PhaseDiagram(MSONable):
all_entries=all_entries,
qhull_data=qhull_data,
dim=dim,
- el_refs=el_refs,
+ el_refs={str(el): ref for el, ref in el_refs.items()},
qhull_entries=qhull_entries,
)
|
Cannot serialize dictionaries with `Element` keys [...]
This is not an ideal fix, since it does a round-trip Element->str->Element, but works for now.
|
materialsproject_pymatgen
|
train
|
ca1cfc4caa6bcb94aa2d99c7dbab5ced06fb91dd
|
diff --git a/lib/interfaces/common.js b/lib/interfaces/common.js
index <HASH>..<HASH> 100644
--- a/lib/interfaces/common.js
+++ b/lib/interfaces/common.js
@@ -94,6 +94,9 @@ module.exports = function(suites, context, mocha) {
* @returns {Suite}
*/
only: function only(opts) {
+ if (mocha.options.forbidOnly) {
+ throw createForbiddenExclusivityError(mocha);
+ }
opts.isOnly = true;
return this.create(opts);
},
@@ -127,15 +130,14 @@ module.exports = function(suites, context, mocha) {
suite.file = opts.file;
suites.unshift(suite);
if (opts.isOnly) {
- if (mocha.options.forbidOnly && shouldBeTested(suite)) {
- throw createForbiddenExclusivityError(mocha);
- }
- suite.parent.appendOnlySuite(suite);
+ suite.markOnly();
}
- if (suite.pending) {
- if (mocha.options.forbidPending && shouldBeTested(suite)) {
- throw createUnsupportedError('Pending test forbidden');
- }
+ if (
+ suite.pending &&
+ mocha.options.forbidPending &&
+ shouldBeTested(suite)
+ ) {
+ throw createUnsupportedError('Pending test forbidden');
}
if (typeof opts.fn === 'function') {
opts.fn.call(suite);
diff --git a/lib/suite.js b/lib/suite.js
index <HASH>..<HASH> 100644
--- a/lib/suite.js
+++ b/lib/suite.js
@@ -492,6 +492,15 @@ Suite.prototype.appendOnlySuite = function(suite) {
};
/**
+ * Marks a suite to be `only`.
+ *
+ * @private
+ */
+Suite.prototype.markOnly = function() {
+ this.parent && this.parent.appendOnlySuite(this);
+};
+
+/**
* Adds a test to the list of tests marked `only`.
*
* @private
diff --git a/test/integration/options/forbidOnly.spec.js b/test/integration/options/forbidOnly.spec.js
index <HASH>..<HASH> 100644
--- a/test/integration/options/forbidOnly.spec.js
+++ b/test/integration/options/forbidOnly.spec.js
@@ -92,32 +92,37 @@ describe('--forbid-only', function() {
);
});
- it('should succeed if suite marked only does not match grep', function(done) {
+ it('should fail if suite marked only does not match grep', function(done) {
var fixture = path.join('options', 'forbid-only', 'only-suite');
- runMochaJSON(fixture, args.concat('--fgrep', 'bumble bees'), function(
- err,
- res
- ) {
- if (err) {
- return done(err);
- }
- expect(res, 'to have passed');
- done();
- });
+ var spawnOpts = {stdio: 'pipe'};
+ runMocha(
+ fixture,
+ args.concat('--fgrep', 'bumble bees'),
+ function(err, res) {
+ if (err) {
+ return done(err);
+ }
+ expect(res, 'to have failed with output', new RegExp(onlyErrorMessage));
+ done();
+ },
+ spawnOpts
+ );
});
- it('should succeed if suite marked only does not match inverted grep', function(done) {
+ it('should fail if suite marked only does not match inverted grep', function(done) {
var fixture = path.join('options', 'forbid-only', 'only-suite');
- runMochaJSON(
+ var spawnOpts = {stdio: 'pipe'};
+ runMocha(
fixture,
args.concat('--fgrep', 'suite marked with only', '--invert'),
function(err, res) {
if (err) {
return done(err);
}
- expect(res, 'to have passed');
+ expect(res, 'to have failed with output', new RegExp(onlyErrorMessage));
done();
- }
+ },
+ spawnOpts
);
});
diff --git a/test/unit/suite.spec.js b/test/unit/suite.spec.js
index <HASH>..<HASH> 100644
--- a/test/unit/suite.spec.js
+++ b/test/unit/suite.spec.js
@@ -680,6 +680,21 @@ describe('Suite', function() {
});
});
});
+
+ describe('.markOnly()', function() {
+ it('should call appendOnlySuite on parent', function() {
+ var suite = new Suite('foo');
+ var spy = sandbox.spy();
+ suite.parent = {
+ appendOnlySuite: spy
+ };
+ suite.markOnly();
+
+ expect(spy, 'to have a call exhaustively satisfying', [suite]).and(
+ 'was called once'
+ );
+ });
+ });
});
describe('Test', function() {
|
Consistent handling of --forbid-only for suites and tests (#<I>)
* implement markOnly method for suite class
* add test cases to cover suites markOnly method
* make forbidOnly option throw exception even if suite is excluded by grep
* adapt test cases to changed forbidOnly logic
* reuse existing sandbox variable
* remove unnecessary check for forbid only option
* remove duplicate beforeEach afterEach defined for markOnly test suite
|
mochajs_mocha
|
train
|
62afad0d70de93920504b241a1d5c064831582a4
|
diff --git a/lib/HttpRequest.js b/lib/HttpRequest.js
index <HASH>..<HASH> 100644
--- a/lib/HttpRequest.js
+++ b/lib/HttpRequest.js
@@ -12,6 +12,8 @@ util.inherits(HttpRequest, Message);
HttpRequest.prototype.isMessyHttpRequest = true;
+HttpRequest.metadataPropertyNames = ['encrypted', 'cert', 'key', 'ca'];
+
HttpRequest.prototype.populate = function (obj) {
if (obj && typeof obj === 'object' && (typeof Buffer === 'undefined' || !Buffer.isBuffer(obj))) {
this.populateFromObject(obj);
@@ -26,9 +28,11 @@ HttpRequest.prototype.populateFromObject = function (obj) {
if (typeof obj.requestLine !== 'undefined') {
this.requestLine.populate(obj.requestLine);
}
- if (typeof obj.encrypted !== 'undefined') {
- this.encrypted = obj.encrypted;
- }
+ HttpRequest.metadataPropertyNames.forEach(function (metadataPropertyName) {
+ if (typeof obj[metadataPropertyName] !== 'undefined') {
+ this[metadataPropertyName] = obj[metadataPropertyName];
+ }
+ }, this);
this.requestLine.populateFromObject(obj);
return this;
};
|
HttpRequest: Added support for more "metadata" properties alongside 'encrypted': 'cert', 'key', 'ca'.
|
papandreou_messy
|
train
|
e9c3bc0a6d4f6e1fb6f0828abf8339fc88936cd5
|
diff --git a/src/api/symboltable.py b/src/api/symboltable.py
index <HASH>..<HASH> 100644
--- a/src/api/symboltable.py
+++ b/src/api/symboltable.py
@@ -330,15 +330,16 @@ class SymbolTable:
return offset
- def leave_scope(self):
+ def leave_scope(self, show_warnings=True):
""" Ends a function body and pops current scope out of the symbol table.
"""
for v in self.table[self.current_scope].values(filter_by_opt=False):
if not v.accessed:
if v.scope == SCOPE.parameter:
kind = 'Parameter'
- v.accessed = True # HINT: Parameters must always be present even if not used!
- if not v.byref: # HINT: byref is always marked as used: it can be used to return a value
+ v.accessed = True # Parameters must always be present even if not used!
+ # byref is always marked as used: it can be used to return a value
+ if show_warnings and not v.byref:
warning_not_used(v.lineno, v.name, kind=kind)
for entry in self.table[self.current_scope].values(filter_by_opt=True): # Symbols of the current level
diff --git a/src/zxbc/zxbparser.py b/src/zxbc/zxbparser.py
index <HASH>..<HASH> 100755
--- a/src/zxbc/zxbparser.py
+++ b/src/zxbc/zxbparser.py
@@ -2904,7 +2904,7 @@ def p_funcdeclforward(p):
error(p.lineno(1), "duplicated declaration for function '%s'" % p[2].name)
p[2].entry.forwarded = True
- SYMBOL_TABLE.leave_scope()
+ SYMBOL_TABLE.leave_scope(show_warnings=False)
FUNCTION_LEVEL.pop()
diff --git a/tests/functional/test_errmsg.txt b/tests/functional/test_errmsg.txt
index <HASH>..<HASH> 100644
--- a/tests/functional/test_errmsg.txt
+++ b/tests/functional/test_errmsg.txt
@@ -29,7 +29,6 @@ builtin.bi:5: warning: [W520] missing whitespace after macro name
builtin.bi:16: warning: [W500] builtin macro "__FILE__" redefined
builtin.bi:17: warning: [W500] builtin macro "__LINE__" redefined
>>> process_file('param3.bas')
-param3.bas:3: warning: Parameter 's' is never used
param3.bas:5: error: Function 'test' (previously declared at 3) type mismatch
param3.bas:6: error: Type Error: Function must return a numeric value, not a string
>>> process_file('typecast1.bas')
|
Fix warning on declare functions
When using DECLARE in a functions with parameters
a warning about unused parameters was issued. Fixed.
|
boriel_zxbasic
|
train
|
8a9665f93f7d2776e87ff32926cd237cd41873c7
|
diff --git a/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php b/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php
index <HASH>..<HASH> 100644
--- a/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php
+++ b/src/AnimeDb/Bundle/AnimeDbBundle/Command/UpdateCommand.php
@@ -199,7 +199,7 @@ class UpdateCommand extends ContainerAwareCommand
->ignoreUnreadableDirs()
->in($this->getContainer()->getParameter('kernel.root_dir').'/../src')
->in($this->getContainer()->getParameter('kernel.root_dir'))
- ->notPath('Resources/'.$this->getContainer()->getParameter('database_path'))
+ ->notPath('app/Resources')
->notPath('DoctrineMigrations');
$fs->remove($finder);
} catch (\Exception $e) {}
|
save all files in app/Resources
|
anime-db_anime-db
|
train
|
dc4ae62b26fd0c6d91f6c5d49abdff84fbda253d
|
diff --git a/byte-buddy-dep/src/main/java/net/bytebuddy/description/type/TypeList.java b/byte-buddy-dep/src/main/java/net/bytebuddy/description/type/TypeList.java
index <HASH>..<HASH> 100644
--- a/byte-buddy-dep/src/main/java/net/bytebuddy/description/type/TypeList.java
+++ b/byte-buddy-dep/src/main/java/net/bytebuddy/description/type/TypeList.java
@@ -451,7 +451,7 @@ public interface TypeList extends FilterableList<TypeDescription, TypeList> {
* @param detachedTypes The detached types.
* @return A type list representing the detached types being attached to the provided type description.
*/
- public static Generic attach(TypeDescription typeDescription, List<? extends TypeDescription.Generic> detachedTypes) {
+ public static Generic attachLazy(TypeDescription typeDescription, List<? extends TypeDescription.Generic> detachedTypes) {
return new WithLazyResolution(detachedTypes, TypeDescription.Generic.Visitor.Substitutor.ForAttachment.of(typeDescription));
}
diff --git a/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/InstrumentedType.java b/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/InstrumentedType.java
index <HASH>..<HASH> 100644
--- a/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/InstrumentedType.java
+++ b/byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/scaffold/InstrumentedType.java
@@ -676,7 +676,7 @@ public interface InstrumentedType extends TypeDescription {
@Override
public TypeList.Generic getInterfaces() {
- return TypeList.Generic.ForDetachedTypes.attach(this, interfaceTypes);
+ return TypeList.Generic.ForDetachedTypes.attachLazy(this, interfaceTypes);
}
@Override
|
Changed naming to avoid incorrect use of API.
|
raphw_byte-buddy
|
train
|
335d7597b00804ed141baaadbeab74a09bb3cd42
|
diff --git a/lib/semlogr/formatters/json_formatter.rb b/lib/semlogr/formatters/json_formatter.rb
index <HASH>..<HASH> 100644
--- a/lib/semlogr/formatters/json_formatter.rb
+++ b/lib/semlogr/formatters/json_formatter.rb
@@ -27,7 +27,7 @@ module Semlogr
event[:error] = {
type: error.class,
message: error.message,
- backtrace: backtrace.join("\n")
+ backtrace: backtrace
}
end
|
For the default json formatter it should probably not format the backtrace with newlines, as things like cloudwatch and logentries don't display it as well as an array.
|
semlogr_semlogr
|
train
|
d352141e60b323bce3b51638d0ea58270b3a3997
|
diff --git a/course/report/progress/index.php b/course/report/progress/index.php
index <HASH>..<HASH> 100644
--- a/course/report/progress/index.php
+++ b/course/report/progress/index.php
@@ -210,7 +210,7 @@ foreach($progress as $user) {
$a->state=$describe;
$a->date=$date;
$a->user=fullname($user);
- $a->activity=$activity->name;
+ $a->activity=strip_tags($activity->name);
$fulldescribe=get_string('progress-title','completion',$a);
if($csv) {
|
MDL-<I>: Completion system: progress report had problems with HTML code in activity names causing invalid/broken html output
|
moodle_moodle
|
train
|
69d9bdcbbb90f3a6c0271f99883ed59bb0c0c44c
|
diff --git a/livereload/handlers.py b/livereload/handlers.py
index <HASH>..<HASH> 100644
--- a/livereload/handlers.py
+++ b/livereload/handlers.py
@@ -63,30 +63,35 @@ class LiveReloadHandler(WebSocketHandler):
return
if delay:
loop = ioloop.IOLoop.current()
- loop.call_later(delay, self.watch_tasks)
+ loop.call_later(delay, self.reload_waiters)
else:
- self.watch_tasks()
+ self.reload_waiters()
- def watch_tasks(self):
+ @classmethod
+ def reload_waiters(cls, path=None):
logger.info(
'Reload %s waiters: %s',
- len(self.waiters),
- self.watcher.filepath,
+ len(cls.waiters),
+ cls.watcher.filepath,
)
+ if path is None:
+ path = cls.watcher.filepath or '*'
+
msg = {
'command': 'reload',
- 'path': self.watcher.filepath or '*',
- 'liveCSS': True
+ 'path': path,
+ 'liveCSS': True,
+ 'liveImg': True,
}
- self._last_reload_time = time.time()
- for waiter in LiveReloadHandler.waiters:
+ cls._last_reload_time = time.time()
+ for waiter in cls.waiters:
try:
waiter.write_message(msg)
except:
logger.error('Error sending message', exc_info=True)
- LiveReloadHandler.waiters.remove(waiter)
+ cls.waiters.remove(waiter)
def on_message(self, message):
"""Handshake with livereload.js
@@ -130,18 +135,8 @@ class LiveReloadJSHandler(web.RequestHandler):
class ForceReloadHandler(web.RequestHandler):
def get(self):
- msg = {
- 'command': 'reload',
- 'path': self.get_argument('path', default=None) or '*',
- 'liveCSS': True,
- 'liveImg': True
- }
- for waiter in LiveReloadHandler.waiters:
- try:
- waiter.write_message(msg)
- except:
- logger.error('Error sending message', exc_info=True)
- LiveReloadHandler.waiters.remove(waiter)
+ path = self.get_argument('path', default=None) or '*'
+ LiveReloadHandler.reload_waiters(path)
self.write('ok')
|
refactor reload waiters
|
lepture_python-livereload
|
train
|
b89d66e84229f1948f13a2fc2d8d348df7b7c5c8
|
diff --git a/play-maven-plugin/src/main/java/com/google/code/play/AbstractArchivingMojo.java b/play-maven-plugin/src/main/java/com/google/code/play/AbstractArchivingMojo.java
index <HASH>..<HASH> 100644
--- a/play-maven-plugin/src/main/java/com/google/code/play/AbstractArchivingMojo.java
+++ b/play-maven-plugin/src/main/java/com/google/code/play/AbstractArchivingMojo.java
@@ -83,6 +83,10 @@ public abstract class AbstractArchivingMojo
{
case ArchiveEntry.DIRECTORY:
getLog().debug( "adding directory " + destFile.getAbsolutePath() );
+ if ( destFile.exists() )
+ {
+ getLog().warn( "directory already exists!!!" );
+ }
if ( !destFile.exists() && !destFile.mkdirs() )
{
throw new IOException( "Unable to create directory: " + destFile );
|
Add additional, temporary, debug message.
|
play1-maven-plugin_play1-maven-plugin
|
train
|
8bb7860ae48238340454d75f1723bd1ce9bd6f32
|
diff --git a/lib/cucumber/formatter/usage.rb b/lib/cucumber/formatter/usage.rb
index <HASH>..<HASH> 100644
--- a/lib/cucumber/formatter/usage.rb
+++ b/lib/cucumber/formatter/usage.rb
@@ -16,12 +16,16 @@ module Cucumber
@options = options
@stepdef_to_match = Hash.new { |h, stepdef_key| h[stepdef_key] = [] }
@total_duration = 0
+ @matches = {}
+ runtime.configuration.on_event :step_match do |event|
+ @matches[event.test_step.source] = event.step_match
+ end
end
def after_test_step(test_step, result)
return if HookQueryVisitor.new(test_step).hook?
- step_match = @runtime.step_match(test_step.source.last.name)
+ step_match = @matches[test_step.source]
step_definition = step_match.step_definition
stepdef_key = StepDefKey.new(step_definition.regexp_source, step_definition.location)
unless @stepdef_to_match[stepdef_key].map { |key| key[:location] }.include? test_step.location
diff --git a/lib/cucumber/runtime.rb b/lib/cucumber/runtime.rb
index <HASH>..<HASH> 100644
--- a/lib/cucumber/runtime.rb
+++ b/lib/cucumber/runtime.rb
@@ -87,10 +87,6 @@ module Cucumber
@results.steps(status)
end
- def step_match(step_name, name_to_report=nil) #:nodoc:
- @support_code.step_match(step_name, name_to_report)
- end
-
def unmatched_step_definitions
@support_code.unmatched_step_definitions
end
|
Simplify Runtime API by using EventBus for StepMatches
|
cucumber_cucumber-ruby
|
train
|
297e0cdb4d993954f6528e187c680bf912caa9a3
|
diff --git a/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/CreateDatasetMojo.java b/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/CreateDatasetMojo.java
index <HASH>..<HASH> 100644
--- a/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/CreateDatasetMojo.java
+++ b/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/CreateDatasetMojo.java
@@ -149,7 +149,7 @@ public class CreateDatasetMojo extends AbstractDatasetMojo {
Preconditions.checkArgument(datasetName != null,
"kite.datasetName is required if kite.uri is not used");
DatasetRepository repo = getDatasetRepository();
- repo.create(datasetNamespace, datasetName, descriptorBuilder.build());
+ repo.create(datasetName, descriptorBuilder.build());
}
}
diff --git a/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/DeleteDatasetMojo.java b/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/DeleteDatasetMojo.java
index <HASH>..<HASH> 100644
--- a/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/DeleteDatasetMojo.java
+++ b/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/DeleteDatasetMojo.java
@@ -55,7 +55,7 @@ public class DeleteDatasetMojo extends AbstractDatasetMojo {
Preconditions.checkArgument(datasetName != null,
"kite.datasetName is required if kite.uri is not used");
DatasetRepository repo = getDatasetRepository();
- repo.delete(datasetNamespace, datasetName);
+ repo.delete(datasetName);
}
}
}
diff --git a/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/UpdateDatasetMojo.java b/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/UpdateDatasetMojo.java
index <HASH>..<HASH> 100644
--- a/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/UpdateDatasetMojo.java
+++ b/kite-maven-plugin/src/main/java/org/kitesdk/maven/plugins/UpdateDatasetMojo.java
@@ -90,7 +90,7 @@ public class UpdateDatasetMojo extends AbstractDatasetMojo {
Preconditions.checkArgument(datasetName != null,
"kite.datasetName is required if kite.uri is not used");
DatasetRepository repo = getDatasetRepository();
- descriptor = repo.load(datasetNamespace, datasetName).getDescriptor();
+ descriptor = repo.load(datasetName).getDescriptor();
}
DatasetDescriptor.Builder descriptorBuilder = new DatasetDescriptor.Builder(
@@ -117,7 +117,7 @@ public class UpdateDatasetMojo extends AbstractDatasetMojo {
} else {
// datasetName is checked above
DatasetRepository repo = getDatasetRepository();
- repo.update(datasetNamespace, datasetName, descriptorBuilder.build());
+ repo.update(datasetName, descriptorBuilder.build());
}
}
}
|
CDK-<I>: Remove namespaces from maven plugin calls.
This was cherry-picked from a version with namespaces.
|
kite-sdk_kite
|
train
|
57365822b8b78a224d63ab91c8541346ff480d10
|
diff --git a/pysc2/lib/remote_controller.py b/pysc2/lib/remote_controller.py
index <HASH>..<HASH> 100644
--- a/pysc2/lib/remote_controller.py
+++ b/pysc2/lib/remote_controller.py
@@ -268,6 +268,12 @@ class RemoteController(object):
else:
self._last_obs = obs
+ if FLAGS.sc2_log_actions and obs.actions:
+ sys.stderr.write(" Executed actions ".center(60, "<") + "\n")
+ for action in obs.actions:
+ sys.stderr.write(str(action))
+ sys.stderr.flush()
+
return obs
def available_maps(self):
@@ -286,10 +292,11 @@ class RemoteController(object):
@sw.decorate
def actions(self, req_action):
"""Send a `sc_pb.RequestAction`, which may include multiple actions."""
- if FLAGS.sc2_log_actions:
+ if FLAGS.sc2_log_actions and req_action.actions:
+ sys.stderr.write(" Sending actions ".center(60, ">") + "\n")
for action in req_action.actions:
sys.stderr.write(str(action))
- sys.stderr.flush()
+ sys.stderr.flush()
return self._client.send(action=req_action)
@@ -303,10 +310,11 @@ class RemoteController(object):
@sw.decorate
def observer_actions(self, req_observer_action):
"""Send a `sc_pb.RequestObserverAction`."""
- if FLAGS.sc2_log_actions:
+ if FLAGS.sc2_log_actions and req_observer_action.actions:
+ sys.stderr.write(" Sending observer actions ".center(60, ">") + "\n")
for action in req_observer_action.actions:
sys.stderr.write(str(action))
- sys.stderr.flush()
+ sys.stderr.flush()
return self._client.send(obs_action=req_observer_action)
|
Better logging of actions so it's easier to see how the game interpreted the actions.
PiperOrigin-RevId: <I>
|
deepmind_pysc2
|
train
|
40728f67b3a7c037eb05e45e3d208eea7afdc961
|
diff --git a/services/base.js b/services/base.js
index <HASH>..<HASH> 100644
--- a/services/base.js
+++ b/services/base.js
@@ -175,21 +175,24 @@ class BaseService {
let outPreviewUrl
let outPattern
if (namedParams) {
- outExampleUrl = this._makeFullUrlFromParams(pattern, namedParams)
outPreviewUrl = this._makeStaticExampleUrl(staticExample)
outPattern = `${this._dotSvg(this._makeFullUrl(pattern))}${suffix}`
+ outExampleUrl = `${this._makeFullUrlFromParams(
+ pattern,
+ namedParams
+ )}${suffix}`
} else if (staticExample) {
+ outPreviewUrl = this._makeStaticExampleUrl(staticExample)
+ outPattern = `${this._dotSvg(this._makeFullUrl(pattern))}${suffix}`
outExampleUrl = `${this._dotSvg(
this._makeFullUrl(exampleUrl)
)}${suffix}`
- outPreviewUrl = this._makeStaticExampleUrl(staticExample)
- outPattern = `${this._dotSvg(this._makeFullUrl(pattern))}${suffix}`
} else {
- outExampleUrl = undefined
outPreviewUrl = `${this._dotSvg(
this._makeFullUrl(previewUrl)
)}${suffix}`
outPattern = undefined
+ outExampleUrl = undefined
}
return {
diff --git a/services/base.spec.js b/services/base.spec.js
index <HASH>..<HASH> 100644
--- a/services/base.spec.js
+++ b/services/base.spec.js
@@ -54,6 +54,13 @@ class DummyService extends BaseService {
staticExample: this.render({ namedParamA: 'foo', queryParamA: 'bar' }),
keywords: ['hello'],
},
+ {
+ pattern: ':world',
+ namedParams: { world: 'World' },
+ query: { queryParamA: '!!!' },
+ staticExample: this.render({ namedParamA: 'foo', queryParamA: 'bar' }),
+ keywords: ['hello'],
+ },
]
}
static get route() {
@@ -475,6 +482,7 @@ describe('BaseService', function() {
third,
fourth,
fifth,
+ sixth,
] = DummyService.prepareExamples()
expect(first).to.deep.equal({
title: 'DummyService',
@@ -504,6 +512,15 @@ describe('BaseService', function() {
expect(third).to.deep.equal(preparedStaticExample)
expect(fourth).to.deep.equal(preparedStaticExample)
expect(fifth).to.deep.equal(preparedStaticExample)
+ expect(sixth).to.deep.equal({
+ title: 'DummyService',
+ exampleUrl: '/foo/World.svg?queryParamA=%21%21%21',
+ previewUrl:
+ '/badge/cat-Hello%20namedParamA%3A%20foo%20with%20queryParamA%3A%20bar-lightgrey.svg',
+ urlPattern: '/foo/:world.svg?queryParamA=%21%21%21',
+ documentation: undefined,
+ keywords: ['hello'],
+ })
})
})
|
Examples: Fix queryParams with namedParams (#<I>)
These tests clearly should be refactored, though I’d like to get this fix in, in advance of a more involved refactor that shifts most of the responsibility away from this function. Maybe we can even eliminate at least one of these cases in the meantime.
|
badges_shields
|
train
|
6af681bb38b73d054d30d7655b69ae18e6836953
|
diff --git a/lib/mongoid/document.rb b/lib/mongoid/document.rb
index <HASH>..<HASH> 100644
--- a/lib/mongoid/document.rb
+++ b/lib/mongoid/document.rb
@@ -262,7 +262,7 @@ module Mongoid
#
# @since 2.4.0
def model_key
- @model_cache_key ||= "#{self.class.model_name.cache_key}"
+ @model_cache_key ||= self.class.model_name.cache_key
end
# Implement this for calls to flatten on array.
|
self.class.model_name.cache_key is a string, not need convert
|
mongodb_mongoid
|
train
|
c2ca84e4da68cecd35738db4701f642104a7f572
|
diff --git a/commands/command_migrate.go b/commands/command_migrate.go
index <HASH>..<HASH> 100644
--- a/commands/command_migrate.go
+++ b/commands/command_migrate.go
@@ -43,6 +43,10 @@ var (
// migrateCommitMessage is the message to use with the commit generated
// by the migrate command
migrateCommitMessage string
+
+ // exportRemote is the remote from which to download objects when
+ // performing an export
+ exportRemote string
)
// migrate takes the given command and arguments, *odb.ObjectDatabase, as well
@@ -300,6 +304,7 @@ func init() {
exportCmd := NewCommand("export", migrateExportCommand)
exportCmd.Flags().BoolVar(&migrateVerbose, "verbose", false, "Verbose logging")
exportCmd.Flags().StringVar(&objectMapFilePath, "object-map", "", "Object map file")
+ exportCmd.Flags().StringVar(&exportRemote, "remote", "", "Remote from which to download objects")
RegisterCommand("migrate", nil, func(cmd *cobra.Command) {
cmd.PersistentFlags().StringVarP(&includeArg, "include", "I", "", "Include a list of paths")
diff --git a/commands/command_migrate_export.go b/commands/command_migrate_export.go
index <HASH>..<HASH> 100644
--- a/commands/command_migrate_export.go
+++ b/commands/command_migrate_export.go
@@ -113,9 +113,18 @@ func migrateExportCommand(cmd *cobra.Command, args []string) {
ExitWithError(err)
}
+ remote := cfg.Remote()
+ if cmd.Flag("remote").Changed {
+ remote = exportRemote
+ }
+ remoteURL := getAPIClient().Endpoints.RemoteEndpoint("download", remote).Url
+ if remoteURL == "" && cmd.Flag("remote").Changed {
+ ExitWithError(errors.Errorf("fatal: invalid remote %s provided", remote))
+ }
+
// If we have a valid remote, pre-download all objects using the Transfer Queue
- if remoteURL := getAPIClient().Endpoints.RemoteEndpoint("download", cfg.Remote()).Url; remoteURL != "" {
- q := newDownloadQueue(getTransferManifestOperationRemote("Download", cfg.Remote()), cfg.Remote())
+ if remoteURL != "" {
+ q := newDownloadQueue(getTransferManifestOperationRemote("Download", remote), remote)
gs := lfs.NewGitScanner(func(p *lfs.WrappedPointer, err error) {
if err != nil {
return
diff --git a/test/test-migrate-export.sh b/test/test-migrate-export.sh
index <HASH>..<HASH> 100755
--- a/test/test-migrate-export.sh
+++ b/test/test-migrate-export.sh
@@ -361,3 +361,51 @@ begin_test "migrate export (--verbose)"
git lfs migrate export --everything --include="*" --verbose 2>&1 | grep -q "migrate: commit "
)
end_test
+
+begin_test "migrate export (--remote)"
+(
+ set -e
+
+ setup_single_remote_branch_tracked
+
+ git push origin master
+
+ md_oid="$(calc_oid "$(cat a.md)")"
+ txt_oid="$(calc_oid "$(cat a.txt)")"
+
+ assert_pointer "refs/heads/master" "a.md" "$md_oid" "50"
+ assert_pointer "refs/heads/master" "a.txt" "$txt_oid" "30"
+
+ # Flush the cache to ensure all objects have to be downloaded
+ rm -rf .git/lfs/objects
+
+ # Setup a new remote and invalidate the default
+ remote_url="$(git config --get remote.origin.url)"
+ git remote add zeta "$remote_url"
+ git remote set-url origin ""
+
+ git lfs migrate export --everything --remote="zeta" --include="*.md, *.txt"
+
+ [ ! $(assert_pointer "refs/heads/master" "a.md" "$md_oid" "50") ]
+ [ ! $(assert_pointer "refs/heads/master" "a.txt" "$txt_oid" "30") ]
+
+ refute_local_object "$md_oid" "50"
+ refute_local_object "$txt_oid" "30"
+)
+end_test
+
+begin_test "migrate export (invalid --remote)"
+(
+ set -e
+
+ setup_single_remote_branch_tracked
+
+ git lfs migrate export --include="*" --remote="zz" 2>&1 | tee migrate.log
+ if [ ${PIPESTATUS[0]} -eq 0 ]; then
+ echo >&2 "fatal: expected git lfs migrate export to fail, didn't"
+ exit 1
+ fi
+
+ grep "fatal: invalid remote zz provided" migrate.log
+)
+end_test
\ No newline at end of file
|
commands: add --remote flag to export
Add a --remote flag to the `migrate export` command allowing
specification of a remote from which to download objects
|
git-lfs_git-lfs
|
train
|
6e834443d251b5ce1b8905f79940955830a68116
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -21,6 +21,7 @@ setup(
],
install_requires=[
'hashids',
+ 'psycopg2',
'requests',
],
)
|
Added psycopg2 as requirement
|
xtream1101_cutil
|
train
|
445a57a95f29aff67241b321d7a72bb6cf2f3ea3
|
diff --git a/lib/autokey/scripting/engine.py b/lib/autokey/scripting/engine.py
index <HASH>..<HASH> 100644
--- a/lib/autokey/scripting/engine.py
+++ b/lib/autokey/scripting/engine.py
@@ -221,12 +221,13 @@ Folders created within temporary folders must themselves be set temporary")
if abbreviations and isinstance(abbreviations, str):
abbreviations = [abbreviations]
check_abbreviation_unique(self.configManager, abbreviations, window_filter)
+
if not replace_existing_hotkey:
check_hotkey_unique(self.configManager, hotkey, window_filter)
else:
- existing_item = self.get_item_with_hotkey(hotkey)
- if not isinstance(existing_item, configmanager.configmanager.GlobalHotkey):
- existing_item.unset_hotkey()
+ # XXX If something causes the phrase creation to fail after this,
+ # this will unset the hotkey without replacing it.
+ self.__clear_existing_hotkey(hotkey, window_filter)
@@ -256,6 +257,12 @@ Folders created within temporary folders must themselves be set temporary")
self.configManager.config_altered(False)
+ def __clear_existing_hotkey(self, hotkey, window_filter):
+ existing_item = self.get_item_with_hotkey(hotkey)
+ if existing_item and not isinstance(existing_item, configmanager.configmanager.GlobalHotkey):
+ if existing_item.filter_matches(window_filter):
+ existing_item.unset_hotkey()
+
def create_abbreviation(self, folder, description, abbr, contents):
"""
DEPRECATED. Use engine.create_phrase() with appropriate keyword arguments instead.
|
Engine: check the window filter matches if overriding a phrase
|
autokey_autokey
|
train
|
feaf7ba84e98bbb7e3127a4c11c01c3cbcd1db15
|
diff --git a/comparators/large-building.js b/comparators/large-building.js
index <HASH>..<HASH> 100644
--- a/comparators/large-building.js
+++ b/comparators/large-building.js
@@ -10,7 +10,7 @@ function largeBuilding(newVersion, oldVersion, callback) {
}
var area = turfArea(newVersion);
- if (area > 100000 && newVersion.properties.hasOwnProperty('building')) {
+ if (area > 1500 && newVersion.properties.hasOwnProperty('building')) {
result['result:large-building'] = area;
}
return callback(null, result);
|
updated threshold for acceptable area size to <I>
|
mapbox_osm-compare
|
train
|
1e252e0b45b41b2c3d216c1d2542d592380bf495
|
diff --git a/lib/jsdom/living/xhr-utils.js b/lib/jsdom/living/xhr-utils.js
index <HASH>..<HASH> 100644
--- a/lib/jsdom/living/xhr-utils.js
+++ b/lib/jsdom/living/xhr-utils.js
@@ -349,6 +349,8 @@ function createClient(xhr) {
preflightRequestHeaders["Access-Control-Request-Headers"] = nonSimpleHeaders.join(", ");
}
+ preflightRequestHeaders["User-Agent"] = requestHeaders["User-Agent"];
+
flag.preflight = true;
const preflightOptions = {
|
Add User-Agent header to CORS preflight requests
Fixes #<I>.
This mirrors the behavior of other browsers, and fixes broken XHR
requests to APIs that require a User-Agent header for all requests,
such as api.github.com.
|
jsdom_jsdom
|
train
|
0c9b8c3e8eb3c65a2f1b4fbee011579d3a1e322e
|
diff --git a/src/bosh-director/lib/bosh/director/disk_manager.rb b/src/bosh-director/lib/bosh/director/disk_manager.rb
index <HASH>..<HASH> 100644
--- a/src/bosh-director/lib/bosh/director/disk_manager.rb
+++ b/src/bosh-director/lib/bosh/director/disk_manager.rb
@@ -177,6 +177,7 @@ module Bosh::Director
# @todo[multi-disks] the rescue is duplicated with migrate_disk
def mount_disk(disk)
agent_client = agent_client(disk.instance)
+ agent_client.wait_until_ready
agent_client.mount_disk(disk.disk_cid)
rescue => e
@logger.debug("Failed to mount disk, deleting new disk. #{e.inspect}")
diff --git a/src/bosh-director/spec/unit/disk_manager_spec.rb b/src/bosh-director/spec/unit/disk_manager_spec.rb
index <HASH>..<HASH> 100644
--- a/src/bosh-director/spec/unit/disk_manager_spec.rb
+++ b/src/bosh-director/spec/unit/disk_manager_spec.rb
@@ -47,6 +47,7 @@ module Bosh::Director
allow(cloud_collection).to receive(:detach_disk)
allow(agent_client).to receive(:stop)
allow(agent_client).to receive(:mount_disk)
+ allow(agent_client).to receive(:wait_until_ready)
allow(agent_client).to receive(:migrate_disk)
allow(agent_client).to receive(:unmount_disk)
allow(agent_client).to receive(:update_settings)
@@ -59,6 +60,7 @@ module Bosh::Director
it 'attaches + mounts disk' do
expect(cloud_factory).to receive(:for_availability_zone).with(instance_model.availability_zone).once.and_return(cloud_collection)
expect(cloud_collection).to receive(:attach_disk).with('vm234', 'disk123')
+ expect(agent_client).to receive(:wait_until_ready)
expect(agent_client).to receive(:mount_disk).with('disk123')
disk_manager.attach_disk(persistent_disk)
end
diff --git a/src/bosh-director/spec/unit/jobs/attach_disk_spec.rb b/src/bosh-director/spec/unit/jobs/attach_disk_spec.rb
index <HASH>..<HASH> 100644
--- a/src/bosh-director/spec/unit/jobs/attach_disk_spec.rb
+++ b/src/bosh-director/spec/unit/jobs/attach_disk_spec.rb
@@ -237,6 +237,7 @@ module Bosh::Director
let(:agent_client) do
instance_double(AgentClient,
mount_disk: nil,
+ wait_until_ready: nil,
list_disk: ['original-disk-cid'],
stop: nil,
unmount_disk: nil
@@ -275,7 +276,7 @@ module Bosh::Director
let(:original_disk) { nil }
- let(:agent_client) { instance_double(AgentClient, mount_disk: nil) }
+ let(:agent_client) { instance_double(AgentClient, mount_disk: nil, wait_until_ready: nil) }
before do
allow(Config.cloud).to receive(:attach_disk)
allow(AgentClient).to receive(:with_vm_credentials_and_agent_id).and_return(agent_client)
|
Director should wait for the agent to be ready when mounting a persistent disk
There are CPIs that need to recreate a VM in order to attach a volume.
Specifically the Kubernetes CPI @ <URL>
|
cloudfoundry_bosh
|
train
|
48d6af5a6c135cb8077887c43bbfaa84fcdc20a7
|
diff --git a/python3/cobs/cobs/_cobs_py.py b/python3/cobs/cobs/_cobs_py.py
index <HASH>..<HASH> 100644
--- a/python3/cobs/cobs/_cobs_py.py
+++ b/python3/cobs/cobs/_cobs_py.py
@@ -13,6 +13,10 @@ def _get_buffer_view(in_bytes):
mv = memoryview(in_bytes)
if mv.ndim > 1 or mv.itemsize > 1:
raise BufferError('object must be a single-dimension buffer of bytes.')
+ try:
+ mv = mv.cast('c')
+ except AttributeError:
+ pass
return mv
def encode(in_bytes):
diff --git a/python3/cobs/cobsr/_cobsr_py.py b/python3/cobs/cobsr/_cobsr_py.py
index <HASH>..<HASH> 100644
--- a/python3/cobs/cobsr/_cobsr_py.py
+++ b/python3/cobs/cobsr/_cobsr_py.py
@@ -13,6 +13,10 @@ def _get_buffer_view(in_bytes):
mv = memoryview(in_bytes)
if mv.ndim > 1 or mv.itemsize > 1:
raise BufferError('object must be a single-dimension buffer of bytes.')
+ try:
+ mv = mv.cast('c')
+ except AttributeError:
+ pass
return mv
def encode(in_bytes):
|
Fix pure Python implementation for Python <I> and later
Python <I> changed the type of byte memoryview indexed items:
memoryview(b'abc')[0] == b'abc'[0] == <I>
In Python <I> and earlier:
memoryview(b'abc')[0] == b'a'
|
cmcqueen_cobs-python
|
train
|
25fce2da1a9989bc0707336fa63c6b07bae9ff35
|
diff --git a/tests/test_signature_parser.py b/tests/test_signature_parser.py
index <HASH>..<HASH> 100644
--- a/tests/test_signature_parser.py
+++ b/tests/test_signature_parser.py
@@ -15,6 +15,7 @@
Test signature parsing.
"""
+from os import environ
from os import sys
import string
import unittest
@@ -37,7 +38,7 @@ from into_dbus_python import IntoDPError
settings.register_profile(
"tracing", deadline=None, suppress_health_check=[HealthCheck.too_slow])
-if sys.gettrace() is not None:
+if sys.gettrace() is not None or environ.get('TRAVIS') is not None:
settings.load_profile("tracing")
# Omits h, unix fd, because it is unclear what are valid fds for dbus
|
Also use tracing profile when running Travis
Travis can be very slow at unanticipated times.
|
stratis-storage_into-dbus-python
|
train
|
f333a1b1143a3591fd6c6ec8e6e329a1dbba17df
|
diff --git a/xdis/marsh.py b/xdis/marsh.py
index <HASH>..<HASH> 100644
--- a/xdis/marsh.py
+++ b/xdis/marsh.py
@@ -97,9 +97,9 @@ class _Marshaller:
self.python_version = python_version
def dump(self, x):
- if isinstance(x, types.CodeType) and PYTHON_VERSION != self.version:
+ if isinstance(x, types.CodeType) and PYTHON_VERSION != self.python_version:
raise RuntimeError("code type passed for version %s but we are running version %s" %
- (PYTHON_VERSION, self.version))
+ (PYTHON_VERSION, self.python_version))
try:
self.dispatch[type(x)](self, x)
except KeyError:
|
Fix incorrect variable name in marshaller dump
|
rocky_python-xdis
|
train
|
d0f7f98d22e1c9be6b4a00025e7bc6ebab42d4ea
|
diff --git a/transport/src/test/java/io/netty/channel/DefaultChannelHandlerInvokerTest.java b/transport/src/test/java/io/netty/channel/DefaultChannelHandlerInvokerTest.java
index <HASH>..<HASH> 100644
--- a/transport/src/test/java/io/netty/channel/DefaultChannelHandlerInvokerTest.java
+++ b/transport/src/test/java/io/netty/channel/DefaultChannelHandlerInvokerTest.java
@@ -22,6 +22,7 @@ import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
+import static org.junit.Assert.*;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -38,19 +39,29 @@ public class DefaultChannelHandlerInvokerTest {
MockitoAnnotations.initMocks(this);
}
- @Test(expected = IllegalArgumentException.class)
+ @Test
public void writeWithInvalidPromiseStillReleasesMessage() {
when(promise.isDone()).thenReturn(true);
DefaultChannelHandlerInvoker invoker = new DefaultChannelHandlerInvoker(ImmediateEventExecutor.INSTANCE);
- invoker.invokeWrite(ctx, msg, promise);
- verify(msg).release();
+ try {
+ invoker.invokeWrite(ctx, msg, promise);
+ } catch (IllegalArgumentException e) {
+ verify(msg).release();
+ return;
+ }
+ fail();
}
- @Test(expected = NullPointerException.class)
+ @Test
public void writeWithNullPromiseStillReleasesMessage() {
when(promise.isDone()).thenReturn(true);
DefaultChannelHandlerInvoker invoker = new DefaultChannelHandlerInvoker(ImmediateEventExecutor.INSTANCE);
- invoker.invokeWrite(ctx, msg, null);
- verify(msg).release();
+ try {
+ invoker.invokeWrite(ctx, msg, null);
+ } catch (NullPointerException e) {
+ verify(msg).release();
+ return;
+ }
+ fail();
}
}
|
e2f<I> unit test cleanup
Motivation:
e2f<I> added unit tests which did not verify the buffer was released as it was intended to.
Modification:
- Unit tests must verify release is called
Result:
Unit tests enforce that ByteBufs are released.
|
netty_netty
|
train
|
6c9ac5dee4341179656526be38544cd6f9eab6fa
|
diff --git a/tests/server/public/index.php b/tests/server/public/index.php
index <HASH>..<HASH> 100644
--- a/tests/server/public/index.php
+++ b/tests/server/public/index.php
@@ -106,13 +106,13 @@ $app->router->post('/multi-part', function () {
], 200);
});
-$app->get('/set-cookie', function() {
+$app->router->get('/set-cookie', function() {
return response(null, 200)->withCookie(
new \Symfony\Component\HttpFoundation\Cookie('foo', 'bar')
);
});
-$app->get('/set-another-cookie', function() {
+$app->router->get('/set-another-cookie', function() {
return response(null, 200)->withCookie(
new \Symfony\Component\HttpFoundation\Cookie('baz', 'qux')
);
|
fix test server as done in 9c<I>e<I>f<I>c1f<I>a5df2aaf<I>b6d0a9ea8a
|
kitetail_zttp
|
train
|
d5685ab96a29c73e4ed1c3b9795a353b27c3a3c5
|
diff --git a/src/test/java/org/la4j/matrix/AbstractMatrixTest.java b/src/test/java/org/la4j/matrix/AbstractMatrixTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/la4j/matrix/AbstractMatrixTest.java
+++ b/src/test/java/org/la4j/matrix/AbstractMatrixTest.java
@@ -26,7 +26,6 @@
package org.la4j.matrix;
-import org.junit.Ignore;
import org.junit.Test;
import org.la4j.factory.Factory;
import org.la4j.vector.Vector;
@@ -2083,22 +2082,20 @@ public abstract class AbstractMatrixTest {
}
@Test(expected = IndexOutOfBoundsException.class)
- @Ignore
public void testSelect1() {
// Throw exception when row indices are invalid
Matrix a = matrixA();
- int[] rowInd = new int[]{3, 4, 10};
- int[] colInd = new int[]{0, 1, 2}; // all columns
+ int[] rowInd = new int[]{3, 4, 10}; // 10 is too large of a row index
+ int[] colInd = new int[]{0, 1, 2};
a.select(rowInd, colInd);
}
@Test(expected = IndexOutOfBoundsException.class)
- @Ignore
public void testSelect2() {
// Throw exception when column indices are invalid
Matrix a = matrixA();
int[] rowInd = new int[]{0, 1, 2};
- int[] colInd = new int[]{-1, 1, 2}; // all columns
+ int[] colInd = new int[]{-1, 1, 2}; // -1 is a negative column index
a.select(rowInd, colInd);
}
|
Fixed tests around index selection. Fixes #<I>.
|
vkostyukov_la4j
|
train
|
131f941d8fac0afb98fa44f41d96237aaf702957
|
diff --git a/src/Config.php b/src/Config.php
index <HASH>..<HASH> 100644
--- a/src/Config.php
+++ b/src/Config.php
@@ -861,7 +861,7 @@ class Config
/**
* Sanity check for slashes in in taxonomy slugs.
*/
- public function checkTaxonomy()
+ private function checkTaxonomy()
{
foreach ($this->data['taxonomy'] as $key => $taxonomy) {
if (empty($taxonomy['options']) || !is_array($taxonomy['options'])) {
@@ -1005,6 +1005,7 @@ class Config
}
}
}
+ $this->checkTaxonomy();
}
/**
diff --git a/src/Controller/Backend/BackendBase.php b/src/Controller/Backend/BackendBase.php
index <HASH>..<HASH> 100644
--- a/src/Controller/Backend/BackendBase.php
+++ b/src/Controller/Backend/BackendBase.php
@@ -71,7 +71,6 @@ abstract class BackendBase extends Base
// Sanity checks for doubles in in contenttypes. This has to be done
// here, because the 'translator' classes need to be initialised.
$app['config']->checkConfig();
- $app['config']->checkTaxonomy();
// If we had to reload the config earlier on because we detected a
// version change, display a notice.
|
Move checkTaxonomy to the end of checkConfig
|
bolt_bolt
|
train
|
4069eea28aa6b265627a45fd374d3a46bad8bcb4
|
diff --git a/context.go b/context.go
index <HASH>..<HASH> 100644
--- a/context.go
+++ b/context.go
@@ -4,6 +4,7 @@ import (
"fmt"
"net/http"
"net/url"
+ "strconv"
"golang.org/x/net/context"
)
@@ -151,17 +152,20 @@ func (r *ResponseData) Written() bool {
// encoders. It uses the default service encoder if no match is found.
func (r *ResponseData) Send(ctx context.Context, code int, body interface{}) error {
r.WriteHeader(code)
+ go IncrCounter([]string{"goa", "response", "manual", strconv.Itoa(code)}, 1.0)
return RequestService(ctx).EncodeResponse(ctx, body)
}
// BadRequest sends a HTTP response with status code 400 and the given error as body.
func (r *ResponseData) BadRequest(ctx context.Context, err *BadRequestError) error {
+ go IncrCounter([]string{"goa", "response", "400"}, 1.0)
return r.Send(ctx, 400, err.Error())
}
// Bug sends a HTTP response with status code 500 and the given body.
// The body can be set using a format and substituted values a la fmt.Printf.
func (r *ResponseData) Bug(ctx context.Context, format string, a ...interface{}) error {
+ go IncrCounter([]string{"goa", "response", "bug"}, 1.0)
body := fmt.Sprintf(format, a...)
return r.Send(ctx, 500, body)
}
|
add some basic incrementing counters in response
|
goadesign_goa
|
train
|
2260e3b06c1905e560f2c01b661e5745a19a7cef
|
diff --git a/gosu-core/src/main/java/gw/internal/gosu/parser/GosuParser.java b/gosu-core/src/main/java/gw/internal/gosu/parser/GosuParser.java
index <HASH>..<HASH> 100644
--- a/gosu-core/src/main/java/gw/internal/gosu/parser/GosuParser.java
+++ b/gosu-core/src/main/java/gw/internal/gosu/parser/GosuParser.java
@@ -13590,7 +13590,7 @@ public final class GosuParser extends ParserBase implements IGosuParser
if( !dfs.isOverride() )
{
boolean bIsConstructorName = gsClass != null && gsClass.getRelativeName().equals( dfs.getDisplayName() );
- warn( element, bIsConstructorName, Res.MSG_MISSING_OVERRIDE_MODIFIER, dfsExisting.getName(), dfsExisting.getScriptPart().getContainingTypeName() );
+ warn( element, bIsConstructorName || element instanceof VarStatement, Res.MSG_MISSING_OVERRIDE_MODIFIER, dfsExisting.getName(), dfsExisting.getScriptPart().getContainingTypeName() );
if( !bIsConstructorName )
{
// Set the override modifier when the modifier is missing
|
don't warn about missing override for shorthand property overrides
|
gosu-lang_gosu-lang
|
train
|
a722b42eed3148c47c5d9c65114c01817041a408
|
diff --git a/lib/coffee_script/command_line.rb b/lib/coffee_script/command_line.rb
index <HASH>..<HASH> 100644
--- a/lib/coffee_script/command_line.rb
+++ b/lib/coffee_script/command_line.rb
@@ -20,7 +20,8 @@ Usage:
WATCH_INTERVAL = 0.5
# Command to execute in Narwhal
- LAUNCHER = "narwhal -e 'require(\"coffee-script\").run(system.args);'"
+ PACKAGE = File.dirname(File.dirname(File.dirname(__FILE__)))
+ LAUNCHER = "narwhal -p #{PACKAGE} -e 'require(\"coffee-script\").run(system.args);'"
# Run the CommandLine off the contents of ARGV.
def initialize
|
Add package on command line in case it's not installed in a Narwhal packages path.
|
gkovacs_livescript-async
|
train
|
d8cdd622dae24e76620901aa54de7a32c22cc01e
|
diff --git a/lib/lotus/views/default.rb b/lib/lotus/views/default.rb
index <HASH>..<HASH> 100644
--- a/lib/lotus/views/default.rb
+++ b/lib/lotus/views/default.rb
@@ -8,6 +8,7 @@ module Lotus
# @api private
class Default
include Lotus::View
+
configuration.reset!
layout nil
@@ -20,7 +21,7 @@ module Lotus
def self.render(root, template_name, context)
format = context[:format]
- template = DefaultTemplateFinder.new(root, template_name, format).find
+ template = DefaultTemplateFinder.new(self, root, template_name, format).find
if template
new(template, context).render
diff --git a/lib/lotus/views/default_template_finder.rb b/lib/lotus/views/default_template_finder.rb
index <HASH>..<HASH> 100644
--- a/lib/lotus/views/default_template_finder.rb
+++ b/lib/lotus/views/default_template_finder.rb
@@ -5,9 +5,10 @@ module Lotus
#
# @since 0.2.0
# @api private
- def initialize(root, template_name, format)
- @root = root
- @options = { template: template_name, format: format }
+ def initialize(view, root, template_name, format)
+ @view = view
+ @root = root
+ @options = { template: template_name, format: format }
end
private
|
Ensure to build template finder specifying a view
New version of lotus/view introduce the ability of encoding
configuration which turns out to introduce a requirement of
template_finder to always have a view
|
hanami_hanami
|
train
|
571a664ef4f3b4373f232173b2707b46d10b44ff
|
diff --git a/pandas/core/frame.py b/pandas/core/frame.py
index <HASH>..<HASH> 100644
--- a/pandas/core/frame.py
+++ b/pandas/core/frame.py
@@ -2317,6 +2317,11 @@ class DataFrame(NDFrame):
level_index = axis_index.levels[level]
+ if len(self) == 0:
+ return DataFrame(np.zeros((len(level_index),
+ len(self.columns)), dtype=int),
+ index=level_index, columns=self.columns)
+
n = len(level_index)
locs = axis_index.labels[level].searchsorted(np.arange(n))
diff --git a/pandas/core/series.py b/pandas/core/series.py
index <HASH>..<HASH> 100644
--- a/pandas/core/series.py
+++ b/pandas/core/series.py
@@ -493,6 +493,9 @@ copy : boolean, default False
level_index = obj.index.levels[level]
+ if len(self) == 0:
+ return Series(0, index=level_index)
+
n = len(level_index)
locs = obj.index.labels[level].searchsorted(np.arange(n))
diff --git a/pandas/tests/test_multilevel.py b/pandas/tests/test_multilevel.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/test_multilevel.py
+++ b/pandas/tests/test_multilevel.py
@@ -252,6 +252,18 @@ class TestMultiLevel(unittest.TestCase):
df = tm.makeTimeDataFrame()
self.assertRaises(Exception, df.count, level=0)
+ def test_count_level_corner(self):
+ s = self.frame['A'][:0]
+ result = s.count(level=0)
+ expected = Series(0, index=s.index.levels[0])
+ assert_series_equal(result, expected)
+
+ df = self.frame[:0]
+ result = df.count(level=0)
+ expected = DataFrame({}, index=s.index.levels[0],
+ columns=df.columns).fillna(0).astype(int)
+ assert_frame_equal(result, expected)
+
def test_unstack(self):
# just check that it works for now
unstacked = self.ymd.unstack()
diff --git a/scripts/bench_join.py b/scripts/bench_join.py
index <HASH>..<HASH> 100644
--- a/scripts/bench_join.py
+++ b/scripts/bench_join.py
@@ -86,7 +86,6 @@ def do_outer_join_multi(a, b, av, bv):
_, bk = bv.shape
result_index, rindexer, lindexer = lib.outer_join_indexer(a, b)
result = np.empty((ak + bk, len(result_index)), dtype=np.float64)
-
lib.take_axis0(av, rindexer, out=result[:ak].T)
lib.take_axis0(bv, lindexer, out=result[ak:].T)
return result_index, result
|
BUG: count_level did not handle zero-length data case, caused segfault with NumPy < <I> for some.
Fixes GH #<I>
|
pandas-dev_pandas
|
train
|
c38d33df45ef4b958d735e2d2f756e2f41f9838b
|
diff --git a/block_tag.go b/block_tag.go
index <HASH>..<HASH> 100644
--- a/block_tag.go
+++ b/block_tag.go
@@ -3,6 +3,7 @@ package tags
import (
"bytes"
"fmt"
+ "html/template"
)
type BlockTag struct {
@@ -22,6 +23,10 @@ func (b BlockTag) String() string {
return bb.String()
}
+func (b BlockTag) HTML() template.HTML {
+ return template.HTML(b.String())
+}
+
func NewBlockTag(name string, opts Options) *BlockTag {
tag := &BlockTag{
Tag: New(name, opts),
diff --git a/form/form.go b/form/form.go
index <HASH>..<HASH> 100644
--- a/form/form.go
+++ b/form/form.go
@@ -3,6 +3,7 @@ package form
import (
"bytes"
"fmt"
+ "html/template"
"strings"
"github.com/markbates/tags"
@@ -27,6 +28,10 @@ func (f Form) String() string {
return f.BlockTag.String()
}
+func (f Form) HTML() template.HTML {
+ return template.HTML(f.String())
+}
+
func New(opts tags.Options) *Form {
if opts["method"] == nil {
opts["method"] = "POST"
diff --git a/form/select_tag.go b/form/select_tag.go
index <HASH>..<HASH> 100644
--- a/form/select_tag.go
+++ b/form/select_tag.go
@@ -1,6 +1,7 @@
package form
import (
+ "html/template"
"reflect"
"strings"
@@ -23,6 +24,10 @@ func (s SelectTag) String() string {
return s.BlockTag.String()
}
+func (s SelectTag) HTML() template.HTML {
+ return template.HTML(s.String())
+}
+
func NewSelectTag(opts tags.Options) *SelectTag {
so := parseSelectOptions(opts)
selected := opts["selected"]
diff --git a/tag.go b/tag.go
index <HASH>..<HASH> 100644
--- a/tag.go
+++ b/tag.go
@@ -30,6 +30,10 @@ func (t Tag) String() string {
return bb.String()
}
+func (t Tag) HTML() template.HTML {
+ return template.HTML(t.String())
+}
+
func New(name string, opts Options) *Tag {
tag := &Tag{
Name: name,
|
added support for the velvet.HTMLer interface
|
gobuffalo_tags
|
train
|
ac818454ab9bee4b762c035103718e0e95ca39c3
|
diff --git a/mythril/laser/ethereum/instructions.py b/mythril/laser/ethereum/instructions.py
index <HASH>..<HASH> 100644
--- a/mythril/laser/ethereum/instructions.py
+++ b/mythril/laser/ethereum/instructions.py
@@ -385,20 +385,20 @@ class Instruction:
dstart_sym = False
try:
dstart = util.get_concrete_int(op1)
- dstart_sym = True
# FIXME: broad exception catch
except:
logging.debug("Unsupported symbolic calldata offset in CALLDATACOPY")
dstart = simplify(op1)
+ dstart_sym = True
size_sym = False
try:
size = util.get_concrete_int(op2)
- size_sym = True
# FIXME: broad exception catch
except:
logging.debug("Unsupported symbolic size in CALLDATACOPY")
size = simplify(op2)
+ size_sym = True
if dstart_sym or size_sym:
state.mem_extend(mstart, 1)
|
Correcting the symetric variable formatting for calldatacopy
|
ConsenSys_mythril-classic
|
train
|
263fa3f9973b9ab999a79d6b66cd86880a3caa0a
|
diff --git a/src/Queryyetsimple/Di/Container.php b/src/Queryyetsimple/Di/Container.php
index <HASH>..<HASH> 100644
--- a/src/Queryyetsimple/Di/Container.php
+++ b/src/Queryyetsimple/Di/Container.php
@@ -213,7 +213,7 @@ class Container implements IContainer, ArrayAccess
}
$result = [];
- $instance = ( array ) $this->groups[$group];
+ $instance = (array) $this->groups[$group];
foreach ($instance as $item) {
$result[$item] = $this->make($item, $args);
}
diff --git a/src/Queryyetsimple/Http/RedirectResponse.php b/src/Queryyetsimple/Http/RedirectResponse.php
index <HASH>..<HASH> 100644
--- a/src/Queryyetsimple/Http/RedirectResponse.php
+++ b/src/Queryyetsimple/Http/RedirectResponse.php
@@ -101,7 +101,13 @@ class RedirectResponse extends Response
*/
public function with($key, $value = null)
{
- $key = is_array($key) ? $key : [$key => $value];
+ if ($this->checkTControl()) {
+ return $this;
+ }
+
+ $key = is_array($key) ? $key : [
+ $key => $value
+ ];
foreach ($key as $k => $v) {
$this->session->flash($k, $v);
@@ -111,6 +117,78 @@ class RedirectResponse extends Response
}
/**
+ * 闪存输入信息
+ *
+ * @param array $input
+ * @return $this
+ */
+ public function withInput(array $input = null)
+ {
+ if ($this->checkTControl()) {
+ return $this;
+ }
+
+ $input = $input ?: $this->request->input();
+
+ $inputs = array_merge($this->session->getFlash('inputs', []), $input);
+
+ $this->session->flash('inputs', $inputs);
+
+ return $this;
+ }
+
+ /**
+ * 闪存输入信息
+ *
+ * @return $this
+ */
+ public function onlyInput()
+ {
+ $args = func_get_args();
+ if (! $args) {
+ throw new InvalidArgumentException('Method onlyInput need an args.');
+ }
+
+ return $this->withInput($this->request->only($args));
+ }
+
+ /**
+ * 闪存输入信息
+ *
+ * @return $this
+ */
+ public function exceptInput()
+ {
+ $args = func_get_args();
+ if (! $args) {
+ throw new InvalidArgumentException('Method exceptInput need an args.');
+ }
+
+ return $this->withInput($this->request->except($args));
+ }
+
+ /**
+ * 闪存错误信息
+ *
+ * @param mixed $value
+ * @param string $key
+ * @return $this
+ */
+ public function withErrors($value, string $key = 'default')
+ {
+ if ($this->checkTControl()) {
+ return $this;
+ }
+
+ $errors = $this->session->getFlash('errors', []);
+ $errors[$key] = $value;
+
+ $this->session->flash('errors', $errors);
+
+ return $this;
+ }
+
+ /**
* 获取目标 URL 地址
*
* @return string
diff --git a/src/Queryyetsimple/Http/Response.php b/src/Queryyetsimple/Http/Response.php
index <HASH>..<HASH> 100644
--- a/src/Queryyetsimple/Http/Response.php
+++ b/src/Queryyetsimple/Http/Response.php
@@ -369,6 +369,19 @@ class Response implements IResponse
}
/**
+ * 设置 COOKIE 别名
+ *
+ * @param string $name
+ * @param string $value
+ * @param array $option
+ * @return $this
+ */
+ public function cookie($name, $value = '', array $option = [])
+ {
+ return $this->setCookie($name, $value, $option);
+ }
+
+ /**
* 设置 COOKIE
*
* @param string $name
@@ -396,16 +409,17 @@ class Response implements IResponse
* 批量设置 COOKIE
*
* @param array $cookies
+ * @param array $option
* @return $this
*/
- public function withCookies(array $cookies)
+ public function withCookies(array $cookies, array $option = [])
{
if ($this->checkTControl()) {
return $this;
}
- foreach ($cookies as $value) {
- call_user_func_array([$this, 'setCookie'], $value);
+ foreach ($cookies as $key => $value) {
+ $this->setCookie($key, $value, $option);
}
return $this;
|
redirectresponse is done
|
hunzhiwange_framework
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.