hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
6d1f779e2d2265c3ecf2b7fed8187a4686dc196b
|
diff --git a/example/main.py b/example/main.py
index <HASH>..<HASH> 100644
--- a/example/main.py
+++ b/example/main.py
@@ -50,6 +50,7 @@ class Login(webapp2.RequestHandler):
result = self.adapter.login(provider_name,
callback=self.callback,
+ report_errors=False,
oi_identifier=self.request.params.get('id'))
if result:
diff --git a/simpleauth2/__init__.py b/simpleauth2/__init__.py
index <HASH>..<HASH> 100644
--- a/simpleauth2/__init__.py
+++ b/simpleauth2/__init__.py
@@ -44,6 +44,13 @@ def login(adapter, provider_name, callback=None, report_errors=True, scope=[], *
return provider.login(**kwargs)
+class Counter(object):
+ count = 0
+ def __call__(self):
+ self.count += 1
+ return self.count
+
+
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s.encode('utf-8'), safe='~')
@@ -153,8 +160,10 @@ class Credentials(object):
def serialize(self):
- # short_name is the first item by all providers
+ # short_name will be the first item by all providers
short_name = self.provider_short_name
+ if short_name is None:
+ raise exceptions.ConfigError('The provider config must have a "short_name" key set to a unique value to be able to serialize credentials!')
rest = self.get_provider_class().credentials_to_tuple(self)
result = (short_name, ) + rest
diff --git a/simpleauth2/providers/openid.py b/simpleauth2/providers/openid.py
index <HASH>..<HASH> 100644
--- a/simpleauth2/providers/openid.py
+++ b/simpleauth2/providers/openid.py
@@ -7,6 +7,12 @@ from simpleauth2.exceptions import FailureError, DeniedError, OpenIDError
import datetime
import logging
import simpleauth2
+from openid import oidutil
+
+
+# supress openid logging to stderr
+oidutil.log = lambda message, level=0: None
+
REALM_HTML = \
"""
|
Supressed python-openid logging to stderr.
|
authomatic_authomatic
|
train
|
74f6ccea201d778e020f8c758013d90ea8a5c8a1
|
diff --git a/activesupport/lib/active_support/notifications.rb b/activesupport/lib/active_support/notifications.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/notifications.rb
+++ b/activesupport/lib/active_support/notifications.rb
@@ -44,11 +44,16 @@ module ActiveSupport
class << self
attr_writer :notifier
- delegate :publish, :subscribe, :instrument, :to => :notifier
+ delegate :publish, :subscribe, :to => :notifier
+ delegate :instrument, :to => :instrumenter
def notifier
@notifier ||= Notifier.new
end
+
+ def instrumenter
+ Thread.current[:"instrumentation_#{notifier.object_id}"] ||= Instrumenter.new(notifier)
+ end
end
class Notifier
@@ -67,13 +72,6 @@ module ActiveSupport
def wait
@queue.wait
end
-
- delegate :instrument, :to => :current_instrumenter
-
- private
- def current_instrumenter
- Thread.current[:"instrumentation_#{object_id}"] ||= Notifications::Instrumenter.new(self)
- end
end
end
end
diff --git a/activesupport/test/notifications_test.rb b/activesupport/test/notifications_test.rb
index <HASH>..<HASH> 100644
--- a/activesupport/test/notifications_test.rb
+++ b/activesupport/test/notifications_test.rb
@@ -5,7 +5,8 @@ module Notifications
def setup
Thread.abort_on_exception = true
- @notifier = ActiveSupport::Notifications::Notifier.new
+ ActiveSupport::Notifications.notifier = nil
+ @notifier = ActiveSupport::Notifications.notifier
@events = []
@notifier.subscribe { |*args| @events << event(*args) }
end
@@ -82,17 +83,19 @@ module Notifications
end
class InstrumentationTest < TestCase
+ delegate :instrument, :to => ActiveSupport::Notifications
+
def test_instrument_returns_block_result
- assert_equal 2, @notifier.instrument(:awesome) { 1 + 1 }
+ assert_equal 2, instrument(:awesome) { 1 + 1 }
end
def test_instrumenter_exposes_its_id
- assert_equal 20, ActiveSupport::Notifications::Instrumenter.new(@notifier).id.size
+ assert_equal 20, ActiveSupport::Notifications.instrumenter.id.size
end
def test_nested_events_can_be_instrumented
- @notifier.instrument(:awesome, :payload => "notifications") do
- @notifier.instrument(:wot, :payload => "child") do
+ instrument(:awesome, :payload => "notifications") do
+ instrument(:wot, :payload => "child") do
1 + 1
end
@@ -112,7 +115,7 @@ module Notifications
def test_instrument_publishes_when_exception_is_raised
begin
- @notifier.instrument(:awesome, :payload => "notifications") do
+ instrument(:awesome, :payload => "notifications") do
raise "OMG"
end
flunk
@@ -127,7 +130,7 @@ module Notifications
end
def test_event_is_pushed_even_without_block
- @notifier.instrument(:awesome, :payload => "notifications")
+ instrument(:awesome, :payload => "notifications")
drain
assert_equal 1, @events.size
|
instrumenter should be accessible from ActiveSupport::Notifications.
|
rails_rails
|
train
|
c1c107ffda62dd978aa483070ced76c8c959649b
|
diff --git a/lib/mongo/cluster.rb b/lib/mongo/cluster.rb
index <HASH>..<HASH> 100644
--- a/lib/mongo/cluster.rb
+++ b/lib/mongo/cluster.rb
@@ -168,14 +168,16 @@ module Mongo
if server_selection_timeout < 3
server_selection_timeout = 3
end
- deadline = Time.now + server_selection_timeout
+ start_time = Time.now
+ deadline = start_time + server_selection_timeout
# Wait for the first scan of each server to complete, for
# backwards compatibility.
- # If any servers are discovered during this SDAM round we do NOT
- # wait for newly discovered servers to be queried.
+ # If any servers are discovered during this SDAM round we are going to
+ # wait for these servers to also be queried, and so on, up to the
+ # server selection timeout or the 3 second minimum.
loop do
servers = servers_list.dup
- if servers.all? { |server| server.last_scan_completed_at }
+ if servers.all? { |server| server.description.last_update_time > start_time }
break
end
if (time_remaining = deadline - Time.now) <= 0
diff --git a/lib/mongo/server.rb b/lib/mongo/server.rb
index <HASH>..<HASH> 100644
--- a/lib/mongo/server.rb
+++ b/lib/mongo/server.rb
@@ -93,10 +93,6 @@ module Mongo
:compressor
alias :heartbeat_frequency_seconds :heartbeat_frequency
- # @api private
- def_delegators :monitor,
- :last_scan_completed_at
-
# Delegate convenience methods to the monitor description.
def_delegators :description,
:arbiter?,
diff --git a/lib/mongo/server/monitor.rb b/lib/mongo/server/monitor.rb
index <HASH>..<HASH> 100644
--- a/lib/mongo/server/monitor.rb
+++ b/lib/mongo/server/monitor.rb
@@ -87,13 +87,6 @@ module Mongo
# @since 2.4.0
attr_reader :last_scan
- # @return [ Time ] last_scan_completed_at The time when the last server
- # scan completed.
- #
- # @since 2.7.0
- # @api private
- attr_reader :last_scan_completed_at
-
# The compressor is determined during the handshake, so it must be an attribute
# of the connection.
def_delegators :connection, :compressor
@@ -153,7 +146,6 @@ module Mongo
def scan!
throttle_scan_frequency!
result = ismaster
- @last_scan_completed_at = Time.now
new_description = Description.new(description.address, result,
@round_trip_time_averager.average_round_trip_time)
publish(Event::DESCRIPTION_CHANGED, description, new_description)
|
RUBY-<I> Use server description's last update time when waiting for first sdam round (#<I>)
|
mongodb_mongo-ruby-driver
|
train
|
b0efd685a959265fa3fb7c9a2032bc01e2a494fb
|
diff --git a/acme/acme.go b/acme/acme.go
index <HASH>..<HASH> 100644
--- a/acme/acme.go
+++ b/acme/acme.go
@@ -119,11 +119,12 @@ func (a *ACME) CreateClusterConfig(leadership *cluster.Leadership, tlsConfig *tl
}
datastore, err := cluster.NewDataStore(
+ leadership.Pool.Ctx(),
staert.KvSource{
Store: leadership.Store,
Prefix: a.Storage,
},
- leadership.Pool.Ctx(), &Account{},
+ &Account{},
listener)
if err != nil {
return err
diff --git a/acme/localStore.go b/acme/localStore.go
index <HASH>..<HASH> 100644
--- a/acme/localStore.go
+++ b/acme/localStore.go
@@ -68,7 +68,7 @@ func (t *localTransaction) Commit(object cluster.Object) error {
t.LocalStore.account = object.(*Account)
defer t.storageLock.Unlock()
if t.dirty {
- return fmt.Errorf("Transaction already used. Please begin a new one.")
+ return fmt.Errorf("transaction already used, please begin a new one")
}
// write account to file
diff --git a/cluster/datastore.go b/cluster/datastore.go
index <HASH>..<HASH> 100644
--- a/cluster/datastore.go
+++ b/cluster/datastore.go
@@ -56,7 +56,7 @@ type Datastore struct {
}
// NewDataStore creates a Datastore
-func NewDataStore(kvSource staert.KvSource, ctx context.Context, object Object, listener Listener) (*Datastore, error) {
+func NewDataStore(ctx context.Context, kvSource staert.KvSource, object Object, listener Listener) (*Datastore, error) {
datastore := Datastore{
kv: kvSource,
ctx: ctx,
@@ -230,7 +230,7 @@ func (s *datastoreTransaction) Commit(object Object) error {
s.localLock.Lock()
defer s.localLock.Unlock()
if s.dirty {
- return fmt.Errorf("Transaction already used. Please begin a new one.")
+ return fmt.Errorf("transaction already used, please begin a new one")
}
s.Datastore.meta.object = object
err := s.Datastore.meta.Marshall()
diff --git a/cluster/leadership.go b/cluster/leadership.go
index <HASH>..<HASH> 100644
--- a/cluster/leadership.go
+++ b/cluster/leadership.go
@@ -39,7 +39,7 @@ func (l *Leadership) Participate(pool *safe.Pool) {
defer log.Debugf("Node %s no more running for election", l.Cluster.Node)
backOff := backoff.NewExponentialBackOff()
operation := func() error {
- return l.run(l.candidate, ctx)
+ return l.run(ctx, l.candidate)
}
notify := func(err error, time time.Duration) {
@@ -63,7 +63,7 @@ func (l *Leadership) Resign() {
log.Infof("Node %s resigned", l.Cluster.Node)
}
-func (l *Leadership) run(candidate *leadership.Candidate, ctx context.Context) error {
+func (l *Leadership) run(ctx context.Context, candidate *leadership.Candidate) error {
electedCh, errCh := candidate.RunForElection()
for {
select {
diff --git a/configuration.go b/configuration.go
index <HASH>..<HASH> 100644
--- a/configuration.go
+++ b/configuration.go
@@ -233,10 +233,10 @@ func (certs *Certificates) CreateTLSConfig() (*tls.Config, error) {
if errKey == nil {
isAPath = true
} else {
- return nil, fmt.Errorf("Bad TLS Certificate KeyFile format. Expected a path.")
+ return nil, fmt.Errorf("bad TLS Certificate KeyFile format, expected a path")
}
} else if errKey == nil {
- return nil, fmt.Errorf("Bad TLS Certificate KeyFile format. Expected a path.")
+ return nil, fmt.Errorf("bad TLS Certificate KeyFile format, expected a path")
}
cert := tls.Certificate{}
diff --git a/integration/consul_test.go b/integration/consul_test.go
index <HASH>..<HASH> 100644
--- a/integration/consul_test.go
+++ b/integration/consul_test.go
@@ -446,9 +446,9 @@ func (s *ConsulSuite) TestDatastore(c *check.C) {
c.Assert(err, checker.IsNil)
ctx := context.Background()
- datastore1, err := cluster.NewDataStore(*kvSource, ctx, &TestStruct{}, nil)
+ datastore1, err := cluster.NewDataStore(ctx, *kvSource, &TestStruct{}, nil)
c.Assert(err, checker.IsNil)
- datastore2, err := cluster.NewDataStore(*kvSource, ctx, &TestStruct{}, nil)
+ datastore2, err := cluster.NewDataStore(ctx, *kvSource, &TestStruct{}, nil)
c.Assert(err, checker.IsNil)
setter1, _, err := datastore1.Begin()
|
Fix golint
recent additions to golint mean that a number of files cause the
build to start failing if they are edited (we only run against changed
files)
This fixes all the errors in the repo so things don't unexpectedly start
failing for people making PRs
|
containous_traefik
|
train
|
d26466e8d811216ba1afc6d1802e889f32fb6cf4
|
diff --git a/lib/fluent/plugin/out_elasticsearch.rb b/lib/fluent/plugin/out_elasticsearch.rb
index <HASH>..<HASH> 100644
--- a/lib/fluent/plugin/out_elasticsearch.rb
+++ b/lib/fluent/plugin/out_elasticsearch.rb
@@ -68,7 +68,11 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
chunk.msgpack_each do |tag, time, record|
if @logstash_format
- record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s}) unless record.has_key?("@timestamp")
+ if record.has_key?("@timestamp")
+ time = Time.parse record["@timestamp"]
+ else
+ record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s})
+ end
if @utc_index
target_index = "#{@logstash_prefix}-#{Time.at(time).getutc.strftime("#{@logstash_dateformat}")}"
else
|
use @timestamp for index
|
uken_fluent-plugin-elasticsearch
|
train
|
95ba631785316293607e80bd74ac834288c782ca
|
diff --git a/nodeconductor/jira/serializers.py b/nodeconductor/jira/serializers.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/jira/serializers.py
+++ b/nodeconductor/jira/serializers.py
@@ -39,10 +39,12 @@ class IssueSerializer(serializers.Serializer):
class CommentSerializer(serializers.Serializer):
+ author = UserSerializer(read_only=True)
created = serializers.DateTimeField(read_only=True)
body = serializers.CharField()
- AUTHOR_RE = re.compile("Comment posted by user ([\w.@+-]+)")
+ AUTHOR_RE = re.compile("Comment posted by user ([\w.@+-]+) \(([0-9a-z]{32})\)")
+ AUTHOR_TEMPLATE = "Comment posted by user {username} ({uuid})\n{body}"
def save(self, issue):
self.issue = issue
@@ -52,22 +54,34 @@ class CommentSerializer(serializers.Serializer):
return JiraClient().comments.create(self.issue, self.serialize_body())
def to_representation(self, obj):
+ """
+ Try to extract injected author information.
+ Use original author otherwise.
+ """
data = super(CommentSerializer, self).to_representation(obj)
author, body = self.parse_body(data['body'])
- data['author'] = {'displayName': author}
+ data['author'] = author or data['author']
data['body'] = body
return data
def serialize_body(self):
+ """
+ Inject author's name and UUID into comment's body
+ """
body = self.validated_data['body']
- author = self.context['request'].user.username
- return "Comment posted by user {}\n{}".format(author, body)
+ user = self.context['request'].user
+ return self.AUTHOR_TEMPLATE.format(username=user.username, uuid=user.uuid.hex, body=body)
def parse_body(self, body):
+ """
+ Extract author's name and UUID from comment's body
+ """
match = re.match(self.AUTHOR_RE, body)
if match:
- author = match.group(1)
- body = body[match.end(1) + 1:]
+ username = match.group(1)
+ uuid = match.group(2)
+ body = body[match.end(2) + 2:]
+ author = {'displayName': username, 'uuid': uuid}
return author, body
else:
- return "User", body
+ return None, body
diff --git a/nodeconductor/jira/tests.py b/nodeconductor/jira/tests.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/jira/tests.py
+++ b/nodeconductor/jira/tests.py
@@ -78,14 +78,16 @@ class JiraTest(test.APITransactionTestCase):
class JiraCommentAuthorSerializerTest(unittest.TestCase):
- def test_serialization(self):
- author = "Walter"
+ def test_parsing(self):
+ username = "Walter"
+ uuid = '1c3323fc4ae44120b57ec40dea1be6e6'
body = "Hello, world!"
- comment = {"body": "Comment posted by user {}\n{}".format(author, body)}
+ comment = {"body": "Comment posted by user {} ({})\n{}".format(username, uuid, body)}
expected = {
'author': {
- 'displayName': author
+ 'displayName': username,
+ 'uuid': uuid
},
'body': body
}
|
Inject author's name and UUID into comment's body (SAAS-<I>). Try to extract injected author information. Use original author otherwise.
|
opennode_waldur-core
|
train
|
22002f3b0c1419530b2d81f9ede6f70412b5b23d
|
diff --git a/juju/model.py b/juju/model.py
index <HASH>..<HASH> 100644
--- a/juju/model.py
+++ b/juju/model.py
@@ -304,8 +304,8 @@ class ModelEntity:
# Allow the overriding of entity names from the type instead of from
# the class name. Useful because Model and ModelInfo clash and we really
# want ModelInfo to be called Model.
- if hasattr(self.__class__, "entity_name") and callable(self.__class__.entity_name):
- return self.__class__.entity_name()
+ if hasattr(self.__class__, "type_name_override") and callable(self.__class__.type_name_override):
+ return self.__class__.type_name_override()
def first_lower(s):
if len(s) == 0:
@@ -2300,5 +2300,5 @@ class ModelInfo(ModelEntity):
return tag.model(self.uuid)
@staticmethod
- def entity_name():
+ def type_name_override():
return "model"
|
Type name override
Improve the name of the method that allows us to override the type name.
Offically we would like to call ModelInfo - Model, but Model already
exists and I don't want Model to also become a type delta. Model is
both a entity for state and an entity for connections and to become an
entity for deltas seems like a bit too much.
|
juju_python-libjuju
|
train
|
ba2c558b728ebc8a79acbf61fe344569f7225eb2
|
diff --git a/src/ol/format/IIIFInfo.js b/src/ol/format/IIIFInfo.js
index <HASH>..<HASH> 100644
--- a/src/ol/format/IIIFInfo.js
+++ b/src/ol/format/IIIFInfo.js
@@ -22,6 +22,81 @@ import {assert} from '../asserts.js';
*/
/**
+ * @typedef {Object} ImageInformationResponse1_0
+ * @property {string} identifier
+ * @property {number} width
+ * @property {number} height
+ * @property {Array<number>} [scale_factors] Resolution scaling factors.
+ * @property {number} [tile_width]
+ * @property {number} [tile_height]
+ * @property {Array<string>} [formats] Supported image formats.
+ * @property {string} [profile] Compliance level URI.
+ */
+
+/**
+ * @typedef {Object} ImageInformationResponse1_1
+ * @property {string} "@id" The base URI of the image service.
+ * @property {string} "@context" JSON-LD context URI.
+ * @property {number} width Full image width.
+ * @property {number} height Full image height.
+ * @property {Array<number>} [scale_factors] Resolution scaling factors.
+ * @property {number} [tile_width]
+ * @property {number} [tile_height]
+ * @property {Array<string>} [formats] Supported image formats.
+ * @property {string} [profile] Compliance level URI.
+ */
+
+/**
+ * @typedef {Object} TileInfo
+ * @property {Array<number>} scaleFactors Supported resolution scaling factors.
+ * @property {number} width Tile width in pixels.
+ * @property {number} [height] Tile height in pixels. Same as tile width if height is
+ * not given.
+ */
+
+/**
+ * @typedef {Object} IiifProfile
+ * @property {Array<string>} [formats] Supported image formats for the image service.
+ * @property {Array<string>} [qualities] Supported IIIF image qualities.
+ * @property {Array<string>} [supports] Supported features.
+ * @property {number} [maxArea] Maximum area (pixels) available for this image service.
+ * @property {number} [maxHeight] Maximum height.
+ * @property {number} [maxWidth] Maximum width.
+ */
+
+/**
+ * @typedef {Object} ImageInformationResponse2
+ * @property {string} "@id" The base URI of the image service.
+ * @property {string} "@context" JSON-LD context IRI
+ * @property {number} width Full image width.
+ * @property {number} height Full image height.
+ * @property {Array<string|IiifProfile>} profile Additional informations about the image
+ * service's capabilities.
+ * @property {Array<Object<string, number>>} [sizes] Supported full image dimensions.
+ * @property {Array<TileInfo>} [tiles] Supported tile sizes and resolution scaling factors.
+ */
+
+/**
+ * @typedef {Object} ImageInformationResponse3
+ * @property {string} id The base URI of the image service.
+ * @property {string} "@context" JSON-LD context IRI
+ * @property {number} width Full image width.
+ * @property {number} height Full image height.
+ * @property {string} profile Compliance level, one of 'level0', 'level1' or 'level2'
+ * @property {Array<Object<string, number>>} [sizes] Supported full image dimensions.
+ * @property {Array<TileInfo>} [tiles] Supported tile sizes and resolution scaling factors.
+ * @property {number} [maxArea] Maximum area (pixels) available for this image service.
+ * @property {number} [maxHeight] Maximum height.
+ * @property {number} [maxWidth] Maximum width.
+ * @property {Array<string>} [extraQualities] IIIF image qualities supported by the
+ * image service additional to the ones indicated by the compliance level.
+ * @property {Array<string>} [extraFormats] Image formats supported by the
+ * image service additional to the ones indicated by the compliance level.
+ * @property {Array<string>} [extraFeatures] Additional supported features whose support
+ * is not indicated by the compliance level.
+ */
+
+/**
* @enum {string}
*/
export const Versions = {
@@ -200,8 +275,8 @@ versionFunctions[Versions.VERSION3] = generateVersion3Options;
class IIIFInfo {
/**
- * @param {Object|string} imageInfo Deserialized image information JSON response
- * object or JSON response as string
+ * @param {ImageInformationResponse1_0|ImageInformationResponse1_1|ImageInformationResponse2|ImageInformationResponse3|string} imageInfo
+ * Deserialized image information JSON response object or JSON response as string
*/
constructor(imageInfo) {
this.setImageInfo(imageInfo);
|
Add typedefs for IIIF info.json responses
|
openlayers_openlayers
|
train
|
b04916b691ac1a9621eb998adf4de5cf78cc4b41
|
diff --git a/fireplace/cards/game/all.py b/fireplace/cards/game/all.py
index <HASH>..<HASH> 100644
--- a/fireplace/cards/game/all.py
+++ b/fireplace/cards/game/all.py
@@ -4,6 +4,17 @@ GAME set and other special cards
from ..utils import *
+# Luck of the Coin
+GAME_001 = buff(health=3)
+
+
+# Coin's Vengeance
+class GAME_003:
+ events = Play(CONTROLLER, MINION).on(Buff(Play.CARD, "GAME_003e"), Destroy(SELF))
+
+GAME_003e = buff(+1, +1)
+
+
# AFK
class GAME_004:
update = Refresh(CONTROLLER, {GameTag.TIMEOUT: 10})
|
Implement Luck of the Coin and Coin's Vengeance
Who the hell knows.
|
jleclanche_fireplace
|
train
|
de128d15dc954a0261227d3a7c8919686ff67d05
|
diff --git a/src/sync.js b/src/sync.js
index <HASH>..<HASH> 100644
--- a/src/sync.js
+++ b/src/sync.js
@@ -116,7 +116,7 @@ function handleError(m, key, error) {
// TODO: this should be configurable for each sync
if (key !== "sso") {
- const stopError = new Error("An error ocurred when fetching data.");
+ const stopError = new Error("An error occurred when fetching data.");
stopError.code = "sync";
stopError.origin = error;
result = l.stop(result, stopError);
|
Fix spelling of occurred (#<I>)
Just bugged me. ;)
|
auth0_lock
|
train
|
b7cb10b8006d28063f268574cb19b44a1a50331a
|
diff --git a/rxjava-core/src/main/java/rx/Scheduler.java b/rxjava-core/src/main/java/rx/Scheduler.java
index <HASH>..<HASH> 100644
--- a/rxjava-core/src/main/java/rx/Scheduler.java
+++ b/rxjava-core/src/main/java/rx/Scheduler.java
@@ -17,6 +17,7 @@ package rx;
import java.util.Date;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
import rx.subscriptions.Subscriptions;
import rx.util.functions.Action0;
@@ -73,6 +74,8 @@ public abstract class Scheduler {
/**
* Schedules a cancelable action to be executed periodically.
+ * This default implementation schedules recursively and waits for actions to complete (instead of potentially executing
+ * long-running actions concurrently). Each scheduler that can do periodic scheduling in a better way should override this.
*
* @param state State to pass into the action.
* @param action The action to execute periodically.
@@ -81,7 +84,38 @@ public abstract class Scheduler {
* @param unit The time unit the interval above is given in.
* @return A subscription to be able to unsubscribe from action.
*/
- public abstract <T> Subscription schedulePeriodically(T state, Func2<Scheduler, T, Subscription> action, long initialDelay, long period, TimeUnit unit);
+ public <T> Subscription schedulePeriodically(T state, final Func2<Scheduler, T, Subscription> action, long initialDelay, long period, TimeUnit unit) {
+ final long periodInNanos = unit.toNanos(period);
+ final AtomicBoolean complete = new AtomicBoolean();
+
+ final Func2<Scheduler, T, Subscription> recursiveAction = new Func2<Scheduler, T, Subscription>() {
+ @Override
+ public Subscription call(Scheduler scheduler, T state0) {
+ if (! complete.get()) {
+ long startedAt = System.nanoTime();
+ final Subscription sub1 = action.call(scheduler, state0);
+ long timeTakenByActionInNanos = System.nanoTime() - startedAt;
+ final Subscription sub2 = schedule(state0, this, periodInNanos - timeTakenByActionInNanos, TimeUnit.NANOSECONDS);
+ return Subscriptions.create(new Action0() {
+ @Override
+ public void call() {
+ sub1.unsubscribe();
+ sub2.unsubscribe();
+ }
+ });
+ }
+ return Subscriptions.empty();
+ }
+ };
+ final Subscription sub = schedule(state, recursiveAction, initialDelay, unit);
+ return Subscriptions.create(new Action0() {
+ @Override
+ public void call() {
+ complete.set(true);
+ sub.unsubscribe();
+ }
+ });
+ }
/**
* Schedules a cancelable action to be executed at dueTime.
|
taken over default recursive implementation
|
ReactiveX_RxJava
|
train
|
518e310015fb73168284c5fa707747b1f051fc82
|
diff --git a/packages/browser/rollup.config.js b/packages/browser/rollup.config.js
index <HASH>..<HASH> 100644
--- a/packages/browser/rollup.config.js
+++ b/packages/browser/rollup.config.js
@@ -54,6 +54,7 @@ const bundleConfig = {
format: 'iife',
name: 'Sentry',
sourcemap: true,
+ strict: false,
},
context: 'window',
plugins: [
diff --git a/packages/browser/src/tracekit.ts b/packages/browser/src/tracekit.ts
index <HASH>..<HASH> 100644
--- a/packages/browser/src/tracekit.ts
+++ b/packages/browser/src/tracekit.ts
@@ -1,6 +1,6 @@
// tslint:disable
-import { getGlobalObject, isError, isErrorEvent } from '@sentry/utils';
+import { getGlobalObject, isError, isErrorEvent, normalize } from '@sentry/utils';
/**
* @hidden
@@ -274,6 +274,9 @@ TraceKit._report = (function reportModuleWrapper() {
var err = (e && (e.detail ? e.detail.reason : e.reason)) || e;
var stack = TraceKit._computeStackTrace(err);
stack.mechanism = 'onunhandledrejection';
+ if (!stack.message) {
+ stack.message = JSON.stringify(normalize(err))
+ }
_notifyHandlers(stack, true, err);
}
diff --git a/packages/browser/test/integration/test.js b/packages/browser/test/integration/test.js
index <HASH>..<HASH> 100644
--- a/packages/browser/test/integration/test.js
+++ b/packages/browser/test/integration/test.js
@@ -667,9 +667,9 @@ for (var idx in frames) {
},
function(sentryData) {
if (debounceAssertEventCount(sentryData, 1, done)) {
+ // non-error rejections doesnt provide stacktraces so we can skip the assertion
assert.equal(sentryData[0].exception.values[0].value, '"test"');
assert.equal(sentryData[0].exception.values[0].type, 'UnhandledRejection');
- assert.equal(sentryData[0].exception.values[0].stacktrace, undefined);
assert.equal(sentryData[0].exception.values[0].mechanism.handled, false);
assert.equal(sentryData[0].exception.values[0].mechanism.type, 'onunhandledrejection');
done();
@@ -696,9 +696,9 @@ for (var idx in frames) {
},
function(sentryData) {
if (debounceAssertEventCount(sentryData, 1, done)) {
- assert.isAtMost(sentryData[0].exception.values[0].value.length, 303);
+ // non-error rejections doesnt provide stacktraces so we can skip the assertion
+ assert.equal(sentryData[0].exception.values[0].value.length, 253);
assert.equal(sentryData[0].exception.values[0].type, 'UnhandledRejection');
- assert.equal(sentryData[0].exception.values[0].stacktrace, undefined);
assert.equal(sentryData[0].exception.values[0].mechanism.handled, false);
assert.equal(sentryData[0].exception.values[0].mechanism.type, 'onunhandledrejection');
done();
@@ -725,9 +725,9 @@ for (var idx in frames) {
},
function(sentryData) {
if (debounceAssertEventCount(sentryData, 1, done)) {
+ // non-error rejections doesnt provide stacktraces so we can skip the assertion
assert.equal(sentryData[0].exception.values[0].value, '{"a":"b"}');
assert.equal(sentryData[0].exception.values[0].type, 'UnhandledRejection');
- assert.equal(sentryData[0].exception.values[0].stacktrace, undefined);
assert.equal(sentryData[0].exception.values[0].mechanism.handled, false);
assert.equal(sentryData[0].exception.values[0].mechanism.type, 'onunhandledrejection');
done();
@@ -761,9 +761,9 @@ for (var idx in frames) {
},
function(sentryData) {
if (debounceAssertEventCount(sentryData, 1, done)) {
- assert.isAtMost(sentryData[0].exception.values[0].value.length, 303);
+ // non-error rejections doesnt provide stacktraces so we can skip the assertion
+ assert.equal(sentryData[0].exception.values[0].value.length, 253);
assert.equal(sentryData[0].exception.values[0].type, 'UnhandledRejection');
- assert.equal(sentryData[0].exception.values[0].stacktrace, undefined);
assert.equal(sentryData[0].exception.values[0].mechanism.handled, false);
assert.equal(sentryData[0].exception.values[0].mechanism.type, 'onunhandledrejection');
done();
diff --git a/packages/integrations/rollup.config.js b/packages/integrations/rollup.config.js
index <HASH>..<HASH> 100644
--- a/packages/integrations/rollup.config.js
+++ b/packages/integrations/rollup.config.js
@@ -76,6 +76,7 @@ function loadAllIntegrations() {
file: `build/${file.replace('.ts', build.extension)}`,
format: 'cjs',
sourcemap: true,
+ strict: false,
},
plugins: build.plugins,
})),
diff --git a/packages/typescript/tsconfig.json b/packages/typescript/tsconfig.json
index <HASH>..<HASH> 100644
--- a/packages/typescript/tsconfig.json
+++ b/packages/typescript/tsconfig.json
@@ -1,5 +1,6 @@
{
"compilerOptions": {
+ "alwaysStrict": false,
"declaration": true,
"declarationMap": true,
"downlevelIteration": true,
@@ -12,6 +13,7 @@
"noFallthroughCasesInSwitch": true,
"noImplicitAny": true,
"noImplicitReturns": true,
+ "noImplicitUseStrict": true,
"noImplicitThis": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
|
ref: Remove 'use strict' from the bundle (#<I>)
* ref: Remove 'use strict' from the bundle
* test: Fix unhandledrejection handling with non-error input
|
getsentry_sentry-javascript
|
train
|
b5497d67554196c02e58b6a44befb45bcb27fc12
|
diff --git a/pmxbot/saysomething.py b/pmxbot/saysomething.py
index <HASH>..<HASH> 100755
--- a/pmxbot/saysomething.py
+++ b/pmxbot/saysomething.py
@@ -6,10 +6,12 @@ import threading
import random
import logging
import time
+import datetime
from itertools import chain
import pmxbot.core
import pmxbot.logging
+import pmxbot.quotes
import pmxbot.timing
log = logging.getLogger(__name__)
@@ -81,9 +83,9 @@ class FastSayer(object):
@classmethod
def init_class(cls):
- time.sleep(30)
log.info("Initializing FastSayer...")
timer = pmxbot.timing.Stopwatch()
+ cls._wait_for_stores(timer)
words = words_from_logger_and_quotes(
pmxbot.logging.Logger.store,
pmxbot.quotes.Quotes.store,
@@ -95,6 +97,19 @@ class FastSayer(object):
return paragraph_from_words(words_from_markov_data(self.markov_data,
initial_word))
+ @classmethod
+ def _wait_for_stores(cls, timer):
+ while timer.elapsed < datetime.timedelta(seconds=30):
+ stores_initialized = (
+ hasattr(pmxbot.logging.Logger, 'store') and
+ hasattr(pmxbot.quotes.Quotes, 'store')
+ )
+ if stores_initialized:
+ break
+ time.sleep(0.1)
+ else:
+ raise RuntimeError("Timeout waiting for stores to be initialized")
+
@pmxbot.core.command("saysomething", aliases=(),
doc="Generate a Markov Chain response based on past logs. Seed it with "
"a starting word by adding that to the end, eg "
|
Don't set an arbitrary delay, but instead wait for the necessary stores to be initialized.
|
yougov_pmxbot
|
train
|
ac35710218492e38e8a6f524e675e90f94b88012
|
diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/partitioner/BroadcastPartitioner.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/partitioner/BroadcastPartitioner.java
index <HASH>..<HASH> 100644
--- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/partitioner/BroadcastPartitioner.java
+++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/partitioner/BroadcastPartitioner.java
@@ -30,22 +30,18 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
public class BroadcastPartitioner<T> extends StreamPartitioner<T> {
private static final long serialVersionUID = 1L;
- int[] returnArray;
- boolean set;
- int setNumber;
+ private int[] returnArray;
@Override
public int[] selectChannels(SerializationDelegate<StreamRecord<T>> record,
int numberOfOutputChannels) {
- if (set && setNumber == numberOfOutputChannels) {
+ if (returnArray != null && returnArray.length == numberOfOutputChannels) {
return returnArray;
} else {
this.returnArray = new int[numberOfOutputChannels];
for (int i = 0; i < numberOfOutputChannels; i++) {
returnArray[i] = i;
}
- set = true;
- setNumber = numberOfOutputChannels;
return returnArray;
}
}
|
[FLINK-<I>][network] Make the condition of broadcast partitioner simple (#<I>)
|
apache_flink
|
train
|
09e136af1179f6b79ad1ecc5595302e9a44140ea
|
diff --git a/functional/basic_auth_tests.js b/functional/basic_auth_tests.js
index <HASH>..<HASH> 100644
--- a/functional/basic_auth_tests.js
+++ b/functional/basic_auth_tests.js
@@ -46,14 +46,10 @@ exports['Simple authentication test for single server'] = {
}, function(err, r) {
test.equal(null, err);
test.equal(1, r.result.ok);
-
// Grab the connection
var connection = r.connection;
- console.log("--------------------------------------------- TEST 0 :: " + configuration.db)
// Authenticate
_server.auth('mongocr', configuration.db, 'test', 'test', function(err, session) {
- console.log("--------------------------------------------- TEST 1")
- console.dir(err)
test.equal(null, err);
test.ok(session != null);
// Reconnect message
|
fix in timing of _execute method
|
mongodb_node-mongodb-native
|
train
|
cb0731c98d8ab3ef7d0a04b775ec831901a2505b
|
diff --git a/rtpipe/interactive.py b/rtpipe/interactive.py
index <HASH>..<HASH> 100644
--- a/rtpipe/interactive.py
+++ b/rtpipe/interactive.py
@@ -1,13 +1,43 @@
-from scipy.special import erfinv
-import numpy as np
-import logging, pickle, os
+import logging, pickle, os, glob
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
+
+import numpy as np
+from scipy.special import erfinv
from bokeh.plotting import ColumnDataSource, Figure, save, output_file, show
from bokeh.models import HoverTool, TapTool, OpenURL
from bokeh.models.layouts import HBox, VBox
from collections import OrderedDict
from rtpipe.parsecands import read_noise, read_candidates
+from time import asctime
+import activegit, sys
+
+def initializenb():
+ """ Find input files and log initialization info """
+
+ print('Working directory: {0}'.format(os.getcwd()))
+ print('Run on {0}'.format(asctime()))
+ try:
+ fileroot = os.environ['fileroot']
+ print('Setting fileroot to {0} from environment variable.\n'.format(fileroot))
+ candsfile = 'cands_{0}_merge.pkl'.format(fileroot)
+ noisefile = 'noise_{0}_merge.pkl'.format(fileroot)
+ except KeyError:
+ sdmdir = os.getcwd()
+ print('Setting sdmdir to current directory {0}\n'.format(os.path.abspath(sdmdir)))
+ candsfiles = glob.glob('cands_*_merge.pkl')
+ noisefiles = glob.glob('noise_*_merge.pkl')
+ if len(candsfiles) == 1 and len(noisefiles) == 1:
+ print('Found one cands/merge file set')
+ else:
+ print('Found multiple cands/noise file sets. Taking first.')
+
+ candsfile = candsfiles[0]
+ noisefile = noisefiles[0]
+ fileroot = candsfile.rstrip('_merge.pkl').lstrip('cands_')
+ print('Set: \n\t candsfile {} \n\t noisefile {} \n\t fileroot {} '.format(candsfile, noisefile, fileroot))
+ return (candsfile, noisefile, fileroot)
+
def plot_interactive(mergepkl, noisepkl=None, thresh=6.0, thresh_link=7.0, ignoret=None, savehtml=True, url_path='plots'):
""" Backwards compatible function for making interactive candidate summary plot """
@@ -336,7 +366,8 @@ def plotnoise(noisepkl, mergepkl, plot_width=950, plot_height=400):
logger.info('Median image noise is {0:.3} Jy.'.format(fluxscale*imstd))
ncum = plotnoisecum(noisepkl, fluxscale=fluxscale, plot_width=plot_width/2, plot_height=plot_height)
- return HBox(ndist, ncum, width=plot_width)
+ hndl = show(HBox(ndist, ncum, width=plot_width, height=plot_height))
+ return hndl
def plotnoisecum(noisepkl, fluxscale=1, plot_width=450, plot_height=400):
@@ -734,3 +765,24 @@ def displayplot(data, plinds, plottype, scaling, fileroot, url_path='http://www.
edgeinds=plinds['edg'], url_path=url_path,
fileroot=fileroot)
hdl = show(pl)
+
+
+def addclassifications(agdir, prop, version=None, statfeats = [0,4,5,6,7,8]):
+ """ Calculates real score probability of prop from an activegit repo.
+
+ version is string name of activegit tag.
+ Default agdir initialization will have latest tag, so version is optional.
+ statfeats set to work with alnotebook naming.
+ """
+
+ try:
+ ag = activegit.ActiveGit(agdir)
+ if version:
+ ag.set_version(version)
+ clf = ag.read_classifier()
+
+ score = clf.predict_proba((np.nan_to_num(prop[:,statfeats])))[:,1] # take real score
+ except:
+ logger.info('Failure when parsing activegit repo or applying classification.\n{0}'.format(sys.exc_info()[0]))
+
+ return score
diff --git a/rtpipe/nbpipeline.py b/rtpipe/nbpipeline.py
index <HASH>..<HASH> 100644
--- a/rtpipe/nbpipeline.py
+++ b/rtpipe/nbpipeline.py
@@ -1,4 +1,4 @@
-from ipywidgets import interact, FloatSlider, Text, Dropdown, Button, fixed
+from ipywidgets import interact, FloatSlider, Text, Dropdown, Button, Output, VBox, fixed
import pickle, os
from IPython.display import display, Javascript
@@ -83,16 +83,9 @@ def setDropdown(label, default=None, options=[], description='Set Dropdown', for
hndl = interact(save, obj=dropdownw, label=fixed(label), format=fixed(format), statedir=fixed(statedir))
-def setButton(function, description=''):
- """ Create button for clicking to run function """
-
- def function2(b):
- function()
+def getnbname():
+ """ Runs javascript to get name of notebook. Saved as python obj 'nbname' """
- button = Button(description=description, value=False)
- display(button)
- button.on_click(function2)
+ # can this be wrapped to return nbname after js call?
-def setnbname():
- """ Runs javascript to get name of notebook. Saved as python obj 'nbname' """
- display(Javascript("""IPython.notebook.kernel.execute("nbname = " + "\'"+IPython.notebook.notebook_name+"\'");"""))
+ display(Javascript("""IPython.notebook.kernel.execute("nbname = " + "\'"+IPython.notebook.notebook_name+"\'");"""))
|
pulling functions in from baseinteract2.ipynb. tested and working with latest version of notebook
|
caseyjlaw_rtpipe
|
train
|
d17a2958a60683c7917c2602f59aadc9b7200851
|
diff --git a/dht/dht.go b/dht/dht.go
index <HASH>..<HASH> 100644
--- a/dht/dht.go
+++ b/dht/dht.go
@@ -483,6 +483,8 @@ func (cni *NodeInfo) UnmarshalCompact(b []byte) error {
}
func (s *Server) Ping(node *net.UDPAddr) (*transaction, error) {
+ s.mu.Lock()
+ defer s.mu.Unlock()
return s.query(node, "ping", nil)
}
|
dht.Server.Ping didn't lock structure
|
anacrolix_torrent
|
train
|
9a18eac1178820a2942cb235231737b883cfc629
|
diff --git a/src/server/pps/server/api_server.go b/src/server/pps/server/api_server.go
index <HASH>..<HASH> 100644
--- a/src/server/pps/server/api_server.go
+++ b/src/server/pps/server/api_server.go
@@ -5,6 +5,7 @@ import (
"bytes"
"fmt"
"io"
+ "io/ioutil"
"path/filepath"
"sort"
"strconv"
@@ -494,10 +495,12 @@ func (a *apiServer) ListDatum(ctx context.Context, request *pps.ListDatumRequest
if err != nil {
return nil, err
}
- var datumInfos []*pps.DatumInfo
datums := make(map[string]*pps.DatumInfo)
+ fmt.Printf("all datums: %v, %v\n", len(allFileInfos.FileInfo), allFileInfos.FileInfo)
for _, fileInfo := range allFileInfos.FileInfo {
+ fileInfo := fileInfo
_, datumHash := filepath.Split(fileInfo.File.Path)
+ fmt.Printf("populating datums w key: %v\n", datumHash)
datums[datumHash] = &pps.DatumInfo{
Hash: []byte(datumHash),
State: pps.DatumState_DATUM_SKIPPED,
@@ -507,7 +510,8 @@ func (a *apiServer) ListDatum(ctx context.Context, request *pps.ListDatumRequest
// Diff the files under /parentJobID and /jobID to get non-skipped datums
newFile := &pfs.File{
Commit: statsBranch.Head,
- Path: fmt.Sprintf("/%v", request.JobId),
+ //Path: fmt.Sprintf("/%v", request.JobId),
+ Path: "/",
}
commitInfo, err := pfsClient.InspectCommit(
ctx,
@@ -525,18 +529,39 @@ func (a *apiServer) ListDatum(ctx context.Context, request *pps.ListDatumRequest
}
oldFile := &pfs.File{
Commit: commitInfo.ParentCommit,
- Path: fmt.Sprintf("/%v", request.JobId),
+ // Path: fmt.Sprintf("/%v", request.JobId),
+ Path: "/",
}
resp, err := pfsClient.DiffFile(ctx, &pfs.DiffFileRequest{newFile, oldFile})
if err != nil {
return nil, err
}
fmt.Printf("# new %v, # old %v\n", len(resp.NewFiles), len(resp.OldFiles))
-
+ fmt.Printf("new files: %v\n", resp.NewFiles)
// The newFileInfos contain datums that were new in this job
// For these datums, populate the status and stats
+ fmt.Printf("datums map: %v\n", datums)
+ pathToDatumHash := func(path string) (string, error) {
+ tokens := strings.Split(path, "/")
+ fmt.Printf("tokens: %v\n", tokens)
+ if len(tokens) < 3 {
+ return "", fmt.Errorf("invalid datum path %v", path)
+ }
+ // Stats path is /jobID/datumHash/...
+ return tokens[2], nil
+ }
+ newDatums := make(map[string]bool)
for _, fileInfo := range resp.NewFiles {
- _, datumHash := filepath.Split(fileInfo.File.Path)
+ fileInfo := fileInfo
+ datumHash, err := pathToDatumHash(fileInfo.File.Path)
+ if err != nil {
+ return nil, err
+ }
+ _, ok := newDatums[datumHash]
+ if ok {
+ continue
+ }
+ fmt.Printf("walking over this new file %v with hash: %v\n", fileInfo, datumHash)
// Populate status
state := pps.DatumState_DATUM_FAILED
stateFile := &pfs.File{
@@ -563,15 +588,39 @@ func (a *apiServer) ListDatum(ctx context.Context, request *pps.ListDatumRequest
return nil, err
}
r, w := io.Pipe()
- defer w.Close()
- if err := grpcutil.WriteFromStreamingBytesClient(getFileClient, w); err != nil {
- return nil, err
- }
- var stats *pps.ProcessStats
+ fmt.Printf("going to write from getfile server to writer\n")
+ go func() error {
+ if err := grpcutil.WriteFromStreamingBytesClient(getFileClient, w); err != nil {
+ fmt.Printf("done writing %v\n", err)
+ return err
+ }
+ fmt.Printf("done writing\n")
+ w.Close()
+ fmt.Printf("closed writer\n")
+ return nil
+ }()
+ // var stats *pps.ProcessStats
+ stats := &pps.ProcessStats{}
// decoder := json.NewDecoder(r)
// jsonpb.Unmarshal(decoder, stats)
- jsonpb.Unmarshal(r, stats)
+
+ // debug what's getting read
+ raw, err := ioutil.ReadAll(r)
+ if err != nil {
+ return nil, err
+ }
+ fmt.Printf("raw json: %v\n", string(raw))
+ newR := bytes.NewReader(raw)
+ fmt.Printf("unmarhsalling ...\n")
+ jsonpb.Unmarshal(newR, stats)
+ fmt.Printf("done unmarhsalling ...\n")
datums[datumHash].Stats = stats
+ newDatums[datumHash] = true
+ }
+
+ var datumInfos []*pps.DatumInfo
+ for _, datum := range datums {
+ datumInfos = append(datumInfos, datum)
}
// Sort results (failed first, slow first)
|
Populate DatumInfo stats and state in pps ListDatum
|
pachyderm_pachyderm
|
train
|
3b318ba87062c2790635e16cc95f8628ab81eacb
|
diff --git a/guacamole/src/main/java/org/glyptodon/guacamole/net/basic/rest/connectiongroup/ConnectionGroupRESTService.java b/guacamole/src/main/java/org/glyptodon/guacamole/net/basic/rest/connectiongroup/ConnectionGroupRESTService.java
index <HASH>..<HASH> 100644
--- a/guacamole/src/main/java/org/glyptodon/guacamole/net/basic/rest/connectiongroup/ConnectionGroupRESTService.java
+++ b/guacamole/src/main/java/org/glyptodon/guacamole/net/basic/rest/connectiongroup/ConnectionGroupRESTService.java
@@ -35,7 +35,6 @@ import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response.Status;
import org.glyptodon.guacamole.GuacamoleClientException;
import org.glyptodon.guacamole.GuacamoleException;
import org.glyptodon.guacamole.GuacamoleResourceNotFoundException;
@@ -47,7 +46,6 @@ import org.glyptodon.guacamole.net.auth.UserContext;
import org.glyptodon.guacamole.net.auth.permission.ConnectionPermission;
import org.glyptodon.guacamole.net.auth.permission.ObjectPermission;
import org.glyptodon.guacamole.net.basic.rest.AuthProviderRESTExposure;
-import org.glyptodon.guacamole.net.basic.rest.HTTPException;
import org.glyptodon.guacamole.net.basic.rest.auth.AuthenticationService;
import org.glyptodon.guacamole.net.basic.rest.connection.APIConnection;
import org.slf4j.Logger;
@@ -222,10 +220,10 @@ public class ConnectionGroupRESTService {
* The ID of the connection group to retrieve.
*
* @param permission
- * If specified, limit the returned list to only those connections and
- * connection groups for which the current user has the given
- * permission. Otherwise, all visible connections and connection groups
- * are returned.
+ * If specified, limit the returned list to only those connections for
+ * which the current user has the given permission. Otherwise, all
+ * visible connections are returned. Connection groups are unaffected
+ * by this parameter.
*
* @return
* The requested connection group, including all descendants.
|
GUAC-<I>: Fix documented connection group filtering semantics.
|
glyptodon_guacamole-client
|
train
|
41309e64662479c4980a434fd52b539057e68686
|
diff --git a/graylog2-server/src/main/java/org/graylog2/web/resources/WebInterfaceAssetsResource.java b/graylog2-server/src/main/java/org/graylog2/web/resources/WebInterfaceAssetsResource.java
index <HASH>..<HASH> 100644
--- a/graylog2-server/src/main/java/org/graylog2/web/resources/WebInterfaceAssetsResource.java
+++ b/graylog2-server/src/main/java/org/graylog2/web/resources/WebInterfaceAssetsResource.java
@@ -30,6 +30,7 @@ import org.graylog2.web.PluginAssets;
import javax.activation.MimetypesFileTypeMap;
import javax.annotation.Nonnull;
import javax.inject.Inject;
+import javax.inject.Singleton;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.Path;
@@ -62,6 +63,7 @@ import java.util.concurrent.TimeUnit;
import static com.google.common.base.MoreObjects.firstNonNull;
+@Singleton
@Path("/")
public class WebInterfaceAssetsResource {
private static final MimetypesFileTypeMap MIME_TYPES = new MimetypesFileTypeMap();
|
Make WebInterfaceAssetsResource singleton to make caching work
Unless the class is being marked as singleton, it's being instantiated for every
request, making the filesystem cache useless.
|
Graylog2_graylog2-server
|
train
|
8d8e6d05f49918ba08fb60e8447f000c1ed039c9
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,11 +1,17 @@
#!/usr/bin/env python
+from codecs import open
+
from setuptools import find_packages, setup
+with open('README.rst', 'r', 'utf-8') as f:
+ readme = f.read()
+
+
setup(
name='blanc-basic-assets',
version='0.3.2',
description='Blanc Basic Assets for Django',
- long_description=open('README.rst').read(),
+ long_description=readme,
url='https://github.com/developersociety/blanc-basic-assets',
maintainer='Developer Society',
maintainer_email='studio@dev.ngo',
|
Ensure README works with Python 2
Defensive measure incase any unicode appears in README
|
developersociety_blanc-basic-assets
|
train
|
29801402e59160f7e1dae70967d3cfea3e7c39ac
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file.
This file should follow the standards specified on [http://keepachangelog.com/]
This project adheres to [Semantic Versioning](http://semver.org/).
+## [6.0.0.alpha.2] - 10-14-2015
+
+### Added
+
+- Look for ENV variables for Neo4j URL / path for Rails apps
+
## [6.0.0.alpha.1] - 10-12-2015
### Changed
diff --git a/lib/neo4j/railtie.rb b/lib/neo4j/railtie.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/railtie.rb
+++ b/lib/neo4j/railtie.rb
@@ -34,8 +34,8 @@ module Neo4j
end
def setup_config_defaults!(cfg)
- cfg.session_type ||= :server_db
- cfg.session_path ||= 'http://localhost:7474'
+ cfg.session_type ||= ENV['NEO4J_PATH'] ? :embedded_db : :server_db
+ cfg.session_path ||= ENV['NEO4J_URL'] || ENV['NEO4J_PATH'] || 'http://localhost:7474'
cfg.session_options ||= {}
cfg.sessions ||= []
diff --git a/lib/neo4j/version.rb b/lib/neo4j/version.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/version.rb
+++ b/lib/neo4j/version.rb
@@ -1,3 +1,3 @@
module Neo4j
- VERSION = '6.0.0.alpha.1'
+ VERSION = '6.0.0.alpha.2'
end
|
Look for ENV variables for Neo4j URL / path for Rails apps
|
neo4jrb_neo4j
|
train
|
22aa775ab27e5456a9df549e56ed86ba1478516c
|
diff --git a/lib/filelib.php b/lib/filelib.php
index <HASH>..<HASH> 100644
--- a/lib/filelib.php
+++ b/lib/filelib.php
@@ -59,6 +59,36 @@ function get_file_url($path, $options=null, $type='coursefile') {
}
/**
+ * Finds occurences of a link to "draftfile.php" in text and replaces the
+ * address based on passed information. Matching is performed using the given
+ * current itemid, contextid and filearea and $CFG->wwwroot. This function
+ * replaces all the urls for one file. If more than one files were sent, it
+ * must be called once for each file.
+ *
+ * @uses $CFG
+ *
+ * @param $text string text to modify
+ * @param $contextid int context that the file should be assigned to
+ * @param $filepath string filepath under which the file should be saved
+ * @param $filearea string filearea into which the file should be saved
+ * @param $itemid int the itemid of the file
+ * @param $currentcontextid int the current contextid of the file
+ * @param $currentfilearea string the current filearea of the file (defaults
+ * to "user_draft")
+ * @return string modified $text, or null if an error occured.
+ */
+function file_rewrite_urls($text, $contextid, $filepath, $filearea, $itemid, $currentcontextid, $currentfilearea = 'userdraft') {
+ global $CFG;
+
+ //TODO: complete, test and adjust if the filearea is removed from the url.
+
+ $re = $CFG->wwwroot .'\/draftfile.php\/'. $currentcontextid .'\/'. $currentfilearea .'\/'. $itemid .'\/(?[A-Fa-f0-9]+)\/([.]+)/';
+ $newurl = $CFG->wwwroot .'/userfile.php/'. $contextid .'/'. $filearea .'/'. $itemid .'/'. $filepath .'/\0';
+
+ return preg_replace($re, $newurl, $text);
+}
+
+/**
* Fetches content of file from Internet (using proxy if defined). Uses cURL extension if present.
* Due to security concerns only downloads from http(s) sources are supported.
*
|
MDL-<I>: first version of post-upload content rewriting function
|
moodle_moodle
|
train
|
6e9485c9ac91026cec99175f356088063fea3bbf
|
diff --git a/Lib/fontParts/base/layer.py b/Lib/fontParts/base/layer.py
index <HASH>..<HASH> 100644
--- a/Lib/fontParts/base/layer.py
+++ b/Lib/fontParts/base/layer.py
@@ -66,7 +66,7 @@ class _BaseGlyphVendor(BaseObject):
Subclasses may override this method.
"""
- names = self.keys()
+ names = list(self.keys())
while names:
name = names[0]
yield self[name]
|
keys must be editable list but not changing the actual keys set
|
robotools_fontParts
|
train
|
8f04a8e7145ada46e89dc456c1f448272c4caf8e
|
diff --git a/pandas/core/arrays/arrow/array.py b/pandas/core/arrays/arrow/array.py
index <HASH>..<HASH> 100644
--- a/pandas/core/arrays/arrow/array.py
+++ b/pandas/core/arrays/arrow/array.py
@@ -18,6 +18,7 @@ from pandas._typing import (
from pandas.compat import (
pa_version_under1p01,
pa_version_under2p0,
+ pa_version_under3p0,
pa_version_under4p0,
pa_version_under5p0,
pa_version_under6p0,
@@ -402,6 +403,10 @@ class ArrowExtensionArray(OpsMixin, ExtensionArray):
"""
return len(self._data)
+ @property
+ def _hasna(self) -> bool:
+ return self._data.null_count > 0
+
def isna(self) -> npt.NDArray[np.bool_]:
"""
Boolean NumPy array indicating if each value is missing.
@@ -439,6 +444,49 @@ class ArrowExtensionArray(OpsMixin, ExtensionArray):
else:
return type(self)(pc.drop_null(self._data))
+ def isin(self: ArrowExtensionArrayT, values) -> npt.NDArray[np.bool_]:
+ if pa_version_under2p0:
+ fallback_performancewarning(version="2")
+ return super().isin(values)
+
+ # for an empty value_set pyarrow 3.0.0 segfaults and pyarrow 2.0.0 returns True
+ # for null values, so we short-circuit to return all False array.
+ if not len(values):
+ return np.zeros(len(self), dtype=bool)
+
+ kwargs = {}
+ if pa_version_under3p0:
+ # in pyarrow 2.0.0 skip_null is ignored but is a required keyword and raises
+ # with unexpected keyword argument in pyarrow 3.0.0+
+ kwargs["skip_null"] = True
+
+ result = pc.is_in(
+ self._data, value_set=pa.array(values, from_pandas=True), **kwargs
+ )
+ # pyarrow 2.0.0 returned nulls, so we explicitly specify dtype to convert nulls
+ # to False
+ return np.array(result, dtype=np.bool_)
+
+ def _values_for_factorize(self) -> tuple[np.ndarray, Any]:
+ """
+ Return an array and missing value suitable for factorization.
+
+ Returns
+ -------
+ values : ndarray
+ na_value : pd.NA
+
+ Notes
+ -----
+ The values returned by this method are also used in
+ :func:`pandas.util.hash_pandas_object`.
+ """
+ if pa_version_under2p0:
+ values = self._data.to_pandas().values
+ else:
+ values = self._data.to_numpy()
+ return values, self.dtype.na_value
+
@doc(ExtensionArray.factorize)
def factorize(
self,
@@ -636,8 +684,6 @@ class ArrowExtensionArray(OpsMixin, ExtensionArray):
-------
ArrowExtensionArray
"""
- import pyarrow as pa
-
chunks = [array for ea in to_concat for array in ea._data.iterchunks()]
arr = pa.chunked_array(chunks)
return cls(arr)
|
ENH/TST: Add isin, _hasna for ArrowExtensionArray (#<I>)
|
pandas-dev_pandas
|
train
|
8dcc4528fca396110e9921840a9a1652428eed8f
|
diff --git a/terraform/context.go b/terraform/context.go
index <HASH>..<HASH> 100644
--- a/terraform/context.go
+++ b/terraform/context.go
@@ -16,7 +16,7 @@ import (
// This is a function type used to implement a walker for the resource
// tree internally on the Terraform structure.
-type genericWalkFunc func(*Resource) error
+type genericWalkFunc func(*walkContext, *Resource) error
// Context represents all the context that Terraform needs in order to
// perform operations on infrastructure. This structure is built using
@@ -498,7 +498,7 @@ func (c *walkContext) Refresh() error {
}
func (c *walkContext) applyWalkFn() depgraph.WalkFunc {
- cb := func(r *Resource) error {
+ cb := func(c *walkContext, r *Resource) error {
var err error
diff := r.Diff
@@ -649,7 +649,7 @@ func (c *walkContext) planWalkFn(result *Plan) depgraph.WalkFunc {
// Initialize the result
result.init()
- cb := func(r *Resource) error {
+ cb := func(c *walkContext, r *Resource) error {
if r.Flags&FlagTainted != 0 {
// We don't diff tainted resources.
return nil
@@ -749,7 +749,7 @@ func (c *walkContext) planWalkFn(result *Plan) depgraph.WalkFunc {
}
func (c *walkContext) refreshWalkFn() depgraph.WalkFunc {
- cb := func(r *Resource) error {
+ cb := func(c *walkContext, r *Resource) error {
is := r.State
if is == nil || is.ID == "" {
@@ -807,8 +807,9 @@ func (c *walkContext) genericWalkFn(cb genericWalkFunc) depgraph.WalkFunc {
switch m := n.Meta.(type) {
case *GraphNodeModule:
- // TODO
- return nil
+ // Build another walkContext for this module and walk it.
+ wc := c.Context.walkContext(m.Path)
+ return m.Graph.Walk(wc.genericWalkFn(cb))
case *GraphNodeResource:
// Continue, we care about this the most
case *GraphNodeResourceMeta:
@@ -859,10 +860,11 @@ func (c *walkContext) genericWalkFn(cb genericWalkFunc) depgraph.WalkFunc {
// Call the callack
log.Printf(
- "[INFO] Walking: %s (Graph node: %s)",
+ "[INFO] Module %s walking: %s (Graph node: %s)",
+ strings.Join(c.Path, "."),
rn.Resource.Id,
n.Name)
- if err := cb(rn.Resource); err != nil {
+ if err := cb(c, rn.Resource); err != nil {
log.Printf("[ERROR] Error walking '%s': %s", rn.Resource.Id, err)
return err
}
diff --git a/terraform/context_test.go b/terraform/context_test.go
index <HASH>..<HASH> 100644
--- a/terraform/context_test.go
+++ b/terraform/context_test.go
@@ -2180,9 +2180,6 @@ func TestContextRefresh_hook(t *testing.T) {
}
func TestContextRefresh_modules(t *testing.T) {
- // TODO: uncomment when we get it going
- t.Skip()
-
p := testProvider("aws")
m := testModule(t, "refresh-modules")
state := &State{
@@ -2543,7 +2540,7 @@ root
const testContextRefreshModuleStr = `
aws_instance.web: (1 tainted)
ID = <not created>
- Tainted ID 1 = foo
+ Tainted ID 1 = bar
module.child:
aws_instance.web:
diff --git a/terraform/graph.go b/terraform/graph.go
index <HASH>..<HASH> 100644
--- a/terraform/graph.go
+++ b/terraform/graph.go
@@ -142,7 +142,7 @@ func Graph(opts *GraphOpts) (*depgraph.Graph, error) {
modState = opts.State.ModuleByPath(opts.ModulePath)
}
- log.Printf("[DEBUG] Creating graph...")
+ log.Printf("[DEBUG] Creating graph for path: %v", opts.ModulePath)
g := new(depgraph.Graph)
@@ -214,7 +214,8 @@ func Graph(opts *GraphOpts) (*depgraph.Graph, error) {
}
log.Printf(
- "[DEBUG] Graph created and valid. %d nouns.",
+ "[DEBUG] Graph %v created and valid. %d nouns.",
+ opts.ModulePath,
len(g.Nouns))
return g, nil
|
terraform: basic sub-module walks work
Lots broken still, but its a start.
|
hashicorp_terraform
|
train
|
d0864f43676a137a3b0fc706becc36f10a064450
|
diff --git a/dropwizard-example/src/main/java/com/example/helloworld/HelloWorldApplication.java b/dropwizard-example/src/main/java/com/example/helloworld/HelloWorldApplication.java
index <HASH>..<HASH> 100644
--- a/dropwizard-example/src/main/java/com/example/helloworld/HelloWorldApplication.java
+++ b/dropwizard-example/src/main/java/com/example/helloworld/HelloWorldApplication.java
@@ -1,6 +1,8 @@
package com.example.helloworld;
+
import com.example.helloworld.auth.ExampleAuthorizer;
import io.dropwizard.auth.AuthValueFactoryProvider;
+import com.codahale.metrics.ScheduledReporter;
import com.example.helloworld.auth.ExampleAuthenticator;
import com.example.helloworld.cli.RenderCommand;
import com.example.helloworld.core.Person;
@@ -23,9 +25,11 @@ import io.dropwizard.configuration.EnvironmentVariableSubstitutor;
import io.dropwizard.configuration.SubstitutingSourceProvider;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.hibernate.HibernateBundle;
+import io.dropwizard.metrics.graphite.GraphiteReporterFactory;
import io.dropwizard.migrations.MigrationsBundle;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
+import io.dropwizard.util.Duration;
import io.dropwizard.views.ViewBundle;
import org.glassfish.jersey.server.filter.RolesAllowedDynamicFeature;
import java.util.Map;
@@ -89,11 +93,19 @@ public class HelloWorldApplication extends Application<HelloWorldConfiguration>
.buildAuthFilter()));
environment.jersey().register(new AuthValueFactoryProvider.Binder<>(User.class));
environment.jersey().register(RolesAllowedDynamicFeature.class);
+
+ // resources
environment.jersey().register(new HelloWorldResource(template));
environment.jersey().register(new ViewResource());
environment.jersey().register(new ProtectedResource());
environment.jersey().register(new PeopleResource(dao));
environment.jersey().register(new PersonResource(dao));
environment.jersey().register(new FilteredResource());
+
+ // metrics
+ final GraphiteReporterFactory graphite = configuration.getGraphiteReporterFactory();
+ final Duration frequency = graphite.getFrequency().orElse(Duration.minutes(1));
+ final ScheduledReporter reporter = graphite.build(environment.metrics());
+ reporter.start(frequency.getQuantity(), frequency.getUnit());
}
}
diff --git a/dropwizard-metrics-graphite/src/main/java/io/dropwizard/metrics/graphite/GraphiteReporterFactory.java b/dropwizard-metrics-graphite/src/main/java/io/dropwizard/metrics/graphite/GraphiteReporterFactory.java
index <HASH>..<HASH> 100644
--- a/dropwizard-metrics-graphite/src/main/java/io/dropwizard/metrics/graphite/GraphiteReporterFactory.java
+++ b/dropwizard-metrics-graphite/src/main/java/io/dropwizard/metrics/graphite/GraphiteReporterFactory.java
@@ -10,8 +10,8 @@ import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.annotations.VisibleForTesting;
import io.dropwizard.metrics.BaseReporterFactory;
import io.dropwizard.validation.OneOf;
+import io.dropwizard.validation.PortRange;
import org.hibernate.validator.constraints.NotEmpty;
-import org.hibernate.validator.constraints.Range;
import javax.validation.constraints.NotNull;
@@ -32,7 +32,7 @@ import javax.validation.constraints.NotNull;
* </tr>
* <tr>
* <td>port</td>
- * <td>8080</td>
+ * <td>2003</td>
* <td>The port of the Graphite server to report to.</td>
* </tr>
* <tr>
@@ -53,8 +53,8 @@ public class GraphiteReporterFactory extends BaseReporterFactory {
@NotEmpty
private String host = "localhost";
- @Range(min = 0, max = 49151)
- private int port = 8080;
+ @PortRange
+ private int port = 2003;
@NotNull
private String prefix = "";
|
Add an example of using the GraphiteReporter to the example application
|
dropwizard_dropwizard
|
train
|
622cd09d5937cdee4a453d8aad0151797c18c058
|
diff --git a/lib/rambling/trie/branches.rb b/lib/rambling/trie/branches.rb
index <HASH>..<HASH> 100644
--- a/lib/rambling/trie/branches.rb
+++ b/lib/rambling/trie/branches.rb
@@ -31,7 +31,7 @@ module Rambling
protected
def branch_when_uncompressed?(chars)
- chars.empty? or fulfills_uncompressed_condition?(:branch_when_uncompressed?, chars)
+ chars.empty? || fulfills_uncompressed_condition?(:branch_when_uncompressed?, chars)
end
def branch_when_compressed?(chars)
@@ -58,11 +58,11 @@ module Rambling
end
def word_when_uncompressed?(chars)
- (chars.empty? and terminal?) or fulfills_uncompressed_condition?(:word_when_uncompressed?, chars)
+ (chars.empty? && terminal?) || fulfills_uncompressed_condition?(:word_when_uncompressed?, chars)
end
def word_when_compressed?(chars)
- return true if chars.empty? and terminal?
+ return true if chars.empty? && terminal?
first_letter = ''
while not chars.empty?
diff --git a/lib/rambling/trie/compressor.rb b/lib/rambling/trie/compressor.rb
index <HASH>..<HASH> 100644
--- a/lib/rambling/trie/compressor.rb
+++ b/lib/rambling/trie/compressor.rb
@@ -5,13 +5,13 @@ module Rambling
# Flag for compressed tries.
# @return [Boolean] `true` for compressed tries, `false` otherwise.
def compressed?
- parent and parent.compressed?
+ parent && parent.compressed?
end
# Compress the current node using redundant node elimination.
# @return [Root, Node] the compressed node.
def compress_tree!
- if children.size == 1 and not terminal? and letter
+ if children.size == 1 && !terminal? && letter
merge_with! children.values.first
compress_tree!
end
diff --git a/lib/rambling/trie/inspector.rb b/lib/rambling/trie/inspector.rb
index <HASH>..<HASH> 100644
--- a/lib/rambling/trie/inspector.rb
+++ b/lib/rambling/trie/inspector.rb
@@ -4,7 +4,7 @@ module Rambling
module Inspector
# @return [String] a string representation of the current node.
def inspect
- "#<#{self.class.name} letter: #{letter.inspect or 'nil'}, children: #{children.keys}>"
+ "#<#{self.class.name} letter: #{letter.inspect || 'nil'}, children: #{children.keys}>"
end
end
end
diff --git a/lib/rambling/trie/node.rb b/lib/rambling/trie/node.rb
index <HASH>..<HASH> 100644
--- a/lib/rambling/trie/node.rb
+++ b/lib/rambling/trie/node.rb
@@ -27,7 +27,7 @@ module Rambling
self.parent = parent
self.children = {}
- unless word.nil? or word.empty?
+ unless word.nil? || word.empty?
letter = word.slice! 0
self.letter = letter.to_sym if letter
self.terminal = word.empty?
@@ -45,7 +45,7 @@ module Rambling
# @return [String] the string representation of the current node.
# @raise [InvalidOperation] if node is not terminal or is root.
def as_word
- raise InvalidOperation, 'Cannot represent branch as a word' unless letter.nil? or terminal?
+ raise InvalidOperation, 'Cannot represent branch as a word' unless letter.nil? || terminal?
letter_string
end
diff --git a/lib/rambling/trie/root.rb b/lib/rambling/trie/root.rb
index <HASH>..<HASH> 100644
--- a/lib/rambling/trie/root.rb
+++ b/lib/rambling/trie/root.rb
@@ -13,7 +13,7 @@ module Rambling
# Compresses the existing tree using redundant node elimination. Flags the trie as compressed.
# @return [Root] self
def compress!
- self.compressed = (compressed? or !!compress_tree!)
+ self.compressed = (compressed? || !!compress_tree!)
self
end
|
Changed 'and' and 'or' to '&&' and '||'.
|
gonzedge_rambling-trie
|
train
|
bb5540cb4ab38bbc95e415d5321087b5131db3b2
|
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index <HASH>..<HASH> 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -98,7 +98,7 @@ class _Zypper(object):
# Call config
self.__xml = False
self.__no_lock = False
- self.__no_raise = True
+ self.__no_raise = False
self.__refresh = False
def __getattr__(self, item):
@@ -118,7 +118,7 @@ class _Zypper(object):
elif item == 'nolock':
self.__no_lock = True
elif item == 'noraise':
- self.__no_raise = False
+ self.__no_raise = True
elif item == 'refreshable':
self.__refresh = True
else:
|
Bugfix: inverted logic on raising (or not) exceptions
|
saltstack_salt
|
train
|
b73b4c73108ed9718f84ff901ff6dfa49ed24cb9
|
diff --git a/src/main/java/org/graylog2/indexer/Indexer.java b/src/main/java/org/graylog2/indexer/Indexer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/graylog2/indexer/Indexer.java
+++ b/src/main/java/org/graylog2/indexer/Indexer.java
@@ -60,11 +60,12 @@ public class Indexer {
URL url = new URL(Indexer.buildIndexURL());
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("HEAD");
- if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
+ // Older versions of ElasticSearch return 400 Bad Request in cse of an existing index.
+ if (conn.getResponseCode() == HttpURLConnection.HTTP_OK || conn.getResponseCode() == HttpURLConnection.HTTP_BAD_REQUEST) {
return true;
} else {
if (conn.getResponseCode() != HttpURLConnection.HTTP_NOT_FOUND) {
- LOG.warn("Indexer response code was not 200 or 404, but " + conn.getResponseCode());
+ LOG.warn("Indexer response code was not (200 or 400) or 404, but " + conn.getResponseCode());
}
return false;
|
HTTP <I> as ES answer for existing index is okay
strange, but okay. happens in older versions. fixes #SERVER-<I>
|
Graylog2_graylog2-server
|
train
|
1195db6a293613595987a5139fafad1b5321632c
|
diff --git a/lib/coverband/reporter.rb b/lib/coverband/reporter.rb
index <HASH>..<HASH> 100644
--- a/lib/coverband/reporter.rb
+++ b/lib/coverband/reporter.rb
@@ -146,7 +146,9 @@ module Coverband
Coverband.configuration.logger.info scov_style_report.inspect
end
- SimpleCov::Result.new(scov_style_report).format!
+ # add in files never hit in coverband
+ SimpleCov.track_files "#{current_root}/{app,lib,config}/**/*.{rb,haml,erb,slim}"
+ SimpleCov::Result.new(SimpleCov.add_not_loaded_files(scov_style_report)).format!
if open_report
`open #{SimpleCov.coverage_dir}/index.html`
else
diff --git a/lib/coverband/version.rb b/lib/coverband/version.rb
index <HASH>..<HASH> 100644
--- a/lib/coverband/version.rb
+++ b/lib/coverband/version.rb
@@ -1,3 +1,3 @@
module Coverband
- VERSION = "1.3.0"
+ VERSION = "1.3.1"
end
diff --git a/test/unit/reporter_test.rb b/test/unit/reporter_test.rb
index <HASH>..<HASH> 100644
--- a/test/unit/reporter_test.rb
+++ b/test/unit/reporter_test.rb
@@ -37,6 +37,26 @@ class ReporterTest < Test::Unit::TestCase
Coverband::Reporter.report
end
+ test "report data with scov" do
+ Coverband.configure do |config|
+ config.redis = fake_redis
+ config.reporter = 'scov'
+ config.s3_bucket = nil
+ end
+
+ Coverband::Reporter.expects(:current_root).at_least_once.returns('/root_dir')
+ fake_redis.expects(:smembers).with('coverband').returns(fake_coverband_members)
+
+ fake_coverband_members.each do |key|
+ File.expects(:exists?).with(key).returns(true)
+ File.expects(:foreach).with(key).returns(['a','b','c'])
+ fake_redis.expects(:smembers).with("coverband.#{key}").returns(["54", "55"])
+ end
+
+ Coverband.configuration.logger.stubs('info')
+
+ Coverband::Reporter.report(open_report: false)
+ end
####
# TODO
|
show files not touched by coverband but exist in project... add tests on scov output path
|
danmayer_coverband
|
train
|
3f3712c729266d536b0f32129248c88187799c7d
|
diff --git a/test/spec/Menu-test.js b/test/spec/Menu-test.js
index <HASH>..<HASH> 100644
--- a/test/spec/Menu-test.js
+++ b/test/spec/Menu-test.js
@@ -36,17 +36,15 @@ define(function (require, exports, module) {
KeyEvent = require("utils/KeyEvent");
- describe("Menus", function () {
+ describe("Menus (Native Shell)", function () {
this.category = "integration";
var testWindow;
beforeFirst(function () {
- // Create a new window that will be shared by ALL tests in this spec. (We need the tests to
- // run in a real Brackets window since HTMLCodeHints requires various core modules (it can't
- // run 100% in isolation), but popping a new window per testcase is unneeded overhead).
- SpecRunnerUtils.createTestWindowAndRun(this, function (w) {
+ // Create a new native menu window that will be shared by ALL tests in this spec.
+ SpecRunnerUtils.createNativeTestWindowAndRun(this, function (w) {
testWindow = w;
// Load module instances from brackets.test
@@ -210,11 +208,37 @@ define(function (require, exports, module) {
expect($menus.length).toBe(0);
});
});
+ });
- if (!brackets.inBrowser) {
- return;
- }
+
+ describe("Menus (HTML)", function () {
+
+ this.category = "integration";
+ var testWindow;
+
+ beforeFirst(function () {
+ // Create a new HTML menu window that will be shared by ALL tests in this spec.
+ SpecRunnerUtils.createHTMLTestWindowAndRun(this, function (w) {
+ testWindow = w;
+
+ // Load module instances from brackets.test
+ CommandManager = testWindow.brackets.test.CommandManager;
+ Commands = testWindow.brackets.test.Commands;
+ KeyBindingManager = testWindow.brackets.test.KeyBindingManager;
+ Menus = testWindow.brackets.test.Menus;
+ });
+ });
+
+ afterLast(function () {
+ testWindow = null;
+ CommandManager = null;
+ Commands = null;
+ KeyBindingManager = null;
+ Menus = null;
+ SpecRunnerUtils.closeTestWindow();
+ });
+
describe("Add Menus", function () {
function getTopMenus() {
diff --git a/test/spec/SpecRunnerUtils.js b/test/spec/SpecRunnerUtils.js
index <HASH>..<HASH> 100644
--- a/test/spec/SpecRunnerUtils.js
+++ b/test/spec/SpecRunnerUtils.js
@@ -476,8 +476,7 @@ define(function (require, exports, module) {
waitsForDone(promise, "dismiss dialog");
}
-
- function createTestWindowAndRun(spec, callback) {
+ function _createTestWindowAndRun(spec, hasNativeMenus, callback) {
runs(function () {
// Position popup windows in the lower right so they're out of the way
var testWindowWid = 1000,
@@ -503,6 +502,9 @@ define(function (require, exports, module) {
// disable initial dialog for live development
params.put("skipLiveDevelopmentInfo", true);
+ // determines if test window should have native or html menus
+ params.put("hasNativeMenus", hasNativeMenus);
+
_testWindow = window.open(getBracketsSourceRoot() + "/index.html?" + params.toString(), "_blank", optionsStr);
_testWindow.isBracketsTestWindow = true;
@@ -524,7 +526,7 @@ define(function (require, exports, module) {
});
};
});
-
+
// FIXME (issue #249): Need an event or something a little more reliable...
waitsFor(
function isBracketsDoneLoading() {
@@ -540,6 +542,18 @@ define(function (require, exports, module) {
});
}
+ function createTestWindowAndRun(spec, callback) {
+ _createTestWindowAndRun(spec, brackets.nativeMenus, callback);
+ }
+
+ function createHTMLTestWindowAndRun(spec, callback) {
+ _createTestWindowAndRun(spec, false, callback);
+ }
+
+ function createNativeTestWindowAndRun(spec, callback) {
+ _createTestWindowAndRun(spec, true, callback);
+ }
+
function closeTestWindow() {
// debug-only to see testWindow state before closing
// waits(500);
@@ -1268,6 +1282,8 @@ define(function (require, exports, module) {
exports.createMockEditorForDocument = createMockEditorForDocument;
exports.createMockEditor = createMockEditor;
exports.createTestWindowAndRun = createTestWindowAndRun;
+ exports.createHTMLTestWindowAndRun = createHTMLTestWindowAndRun;
+ exports.createNativeTestWindowAndRun = createNativeTestWindowAndRun;
exports.closeTestWindow = closeTestWindow;
exports.clickDialogButton = clickDialogButton;
exports.destroyMockEditor = destroyMockEditor;
|
Split menu specs into HTML and Native. Add HTML and Native versions of createTestWindowAndRun.
|
adobe_brackets
|
train
|
008100893f8e9955e8d6b364616abedda22e83d6
|
diff --git a/example/Gulpfile.js b/example/Gulpfile.js
index <HASH>..<HASH> 100644
--- a/example/Gulpfile.js
+++ b/example/Gulpfile.js
@@ -1,4 +1,4 @@
-var NwBuilder = require('../');
+var NwBuilder = require('nw-builder');
var gulp = require('gulp');
var gutil = require('gulp-util');
|
<I> - Fix for cache check of some legacy versions
|
nwjs-community_nw-builder
|
train
|
b978db0ababe7222def88875f7d07d303b77dce9
|
diff --git a/builtin/logical/ssh/backend_test.go b/builtin/logical/ssh/backend_test.go
index <HASH>..<HASH> 100644
--- a/builtin/logical/ssh/backend_test.go
+++ b/builtin/logical/ssh/backend_test.go
@@ -2,7 +2,6 @@ package ssh
import (
"fmt"
- "log"
"os/user"
"reflect"
"strconv"
@@ -102,19 +101,23 @@ func TestSSHBackend_Lookup(t *testing.T) {
data := map[string]interface{}{
"ip": testIP,
}
+ resp1 := []string(nil)
+ resp2 := []string{testOTPRoleName}
+ resp3 := []string{testDynamicRoleName, testOTPRoleName}
+ resp4 := []string{testDynamicRoleName}
logicaltest.Test(t, logicaltest.TestCase{
Factory: Factory,
Steps: []logicaltest.TestStep{
- testLookupRead(t, data, 0),
+ testLookupRead(t, data, resp1),
testRoleWrite(t, testOTPRoleName, testOTPRoleData),
- testLookupRead(t, data, 1),
+ testLookupRead(t, data, resp2),
testNamedKeysWrite(t),
testRoleWrite(t, testDynamicRoleName, testDynamicRoleData),
- testLookupRead(t, data, 2),
+ testLookupRead(t, data, resp3),
testRoleDelete(t, testOTPRoleName),
- testLookupRead(t, data, 1),
+ testLookupRead(t, data, resp4),
testRoleDelete(t, testDynamicRoleName),
- testLookupRead(t, data, 0),
+ testLookupRead(t, data, resp1),
},
})
}
@@ -352,18 +355,17 @@ func testNamedKeysDelete(t *testing.T) logicaltest.TestStep {
}
}
-func testLookupRead(t *testing.T, data map[string]interface{}, length int) logicaltest.TestStep {
+func testLookupRead(t *testing.T, data map[string]interface{}, expected []string) logicaltest.TestStep {
return logicaltest.TestStep{
Operation: logical.WriteOperation,
Path: "lookup",
Data: data,
Check: func(resp *logical.Response) error {
- log.Printf("Lookup Read Response: %#v", resp)
if resp.Data == nil || resp.Data["roles"] == nil {
return fmt.Errorf("Missing roles information")
}
- if len(resp.Data["roles"].([]string)) != length {
- return fmt.Errorf("Role information incorrect")
+ if !reflect.DeepEqual(resp.Data["roles"].([]string), expected) {
+ return fmt.Errorf("Invalid response: \nactual:%#v\nexpected:%#v", resp.Data["roles"].([]string), expected)
}
return nil
},
|
Vault SSH: Refactor lookup test case
|
hashicorp_vault
|
train
|
b1fee1961ca744db6c477c481a6738b43b5d003a
|
diff --git a/admin/jqadm/templates/attribute/item-property-default.php b/admin/jqadm/templates/attribute/item-property-default.php
index <HASH>..<HASH> 100644
--- a/admin/jqadm/templates/attribute/item-property-default.php
+++ b/admin/jqadm/templates/attribute/item-property-default.php
@@ -99,15 +99,15 @@ $enc = $this->encoder();
</select>
</td>
<td class="property-language">
- <select class="custom-select item-languageid" tabindex="<?= $this->get( 'tabindex' ); ?>" disabled="disabled"
+ <select class="form-control custom-select item-languageid" tabindex="<?= $this->get( 'tabindex' ); ?>" disabled="disabled"
name="<?= $enc->attr( $this->formparam( array( 'property', 'attribute.property.languageid', '' ) ) ); ?>">
<option value="">
<?= $enc->html( $this->translate( 'admin', 'All' ) ); ?>
</option>
- <?php foreach( $this->get( 'pageLanguages', [] ) as $langId => $langItem ) : ?>
+ <?php foreach( $this->get( 'pageLangItems', [] ) as $langId => $langItem ) : ?>
<option value="<?= $enc->attr( $langId ); ?>">
- <?= $enc->html( $langId ); ?>
+ <?= $enc->html( $langItem->getLabel() ); ?>
</option>
<?php endforeach; ?>
</select>
|
Fixes language selector in product property panel
|
aimeos_ai-admin-jqadm
|
train
|
2cbb9b7c09ee6644e318ec3b0e6d4623c7cabd5a
|
diff --git a/km3pipe/io/__init__.py b/km3pipe/io/__init__.py
index <HASH>..<HASH> 100644
--- a/km3pipe/io/__init__.py
+++ b/km3pipe/io/__init__.py
@@ -86,7 +86,7 @@ def read_calibration(detx=None, det_id=None, from_file=False,
"""Retrive calibration from file, the DB."""
from km3pipe.calib import Calibration # noqa
- if not detx or det_id or from_file:
+ if not (detx or det_id or from_file):
return None
if detx is not None:
return Calibration(filename=detx)
@@ -97,13 +97,11 @@ def read_calibration(detx=None, det_id=None, from_file=False,
det_id = det_ids[0]
if det_id is not None:
if det_id < 0:
- log.warning("Negative detector ID found ({0}), skipping..."
+ log.warning("Negative detector ID found ({0}). This is a MC "
+ "detector and cannot be retrieved from the DB."
.format(det_id))
return None
- try:
- return Calibration(det_id=det_id)
- except ValueError:
- log.warning("Could not retrieve the calibration information.")
+ return Calibration(det_id=det_id)
return None
|
Cleanup of read_calibration
|
tamasgal_km3pipe
|
train
|
588b39e1cdf09dddf3a02ff4b062d5a09a0911e9
|
diff --git a/activejob/test/jobs/callback_job.rb b/activejob/test/jobs/callback_job.rb
index <HASH>..<HASH> 100644
--- a/activejob/test/jobs/callback_job.rb
+++ b/activejob/test/jobs/callback_job.rb
@@ -1,12 +1,21 @@
class CallbackJob < ActiveJob::Base
before_perform ->(job) { job.history << "CallbackJob ran before_perform" }
- after_perform ->(job) { job.history << "CallbackJob ran after_perform" }
+ after_perform ->(job) { job.history << "CallbackJob ran after_perform" }
before_enqueue ->(job) { job.history << "CallbackJob ran before_enqueue" }
- after_enqueue ->(job) { job.history << "CallbackJob ran after_enqueue" }
+ after_enqueue ->(job) { job.history << "CallbackJob ran after_enqueue" }
- around_perform :around_perform
- around_enqueue :around_enqueue
+ around_perform do |job, block|
+ job.history << "CallbackJob ran around_perform_start"
+ block.call
+ job.history << "CallbackJob ran around_perform_stop"
+ end
+
+ around_enqueue do |job, block|
+ job.history << "CallbackJob ran around_enqueue_start"
+ block.call
+ job.history << "CallbackJob ran around_enqueue_stop"
+ end
def perform(person = "david")
@@ -17,16 +26,4 @@ class CallbackJob < ActiveJob::Base
@history ||= []
end
- # FIXME: Not sure why these can't be declared inline like before/after
- def around_perform
- history << "CallbackJob ran around_perform_start"
- yield
- history << "CallbackJob ran around_perform_stop"
- end
-
- def around_enqueue
- history << "CallbackJob ran around_enqueue_start"
- yield
- history << "CallbackJob ran around_enqueue_stop"
- end
end
|
- Inline AJ around_perform and around_enqueue in CallbackJob used for tests.
|
rails_rails
|
train
|
77faaaf09e83a3979c1ca80f6afd4ec30948a3ed
|
diff --git a/lib/SmtpValidatorEmail/Helper/TransportHelper.php b/lib/SmtpValidatorEmail/Helper/TransportHelper.php
index <HASH>..<HASH> 100644
--- a/lib/SmtpValidatorEmail/Helper/TransportHelper.php
+++ b/lib/SmtpValidatorEmail/Helper/TransportHelper.php
@@ -3,7 +3,6 @@
namespace SmtpValidatorEmail\Helper;
-use SmtpValidatorEmail\Exception\ExceptionNoConnection;
use SmtpValidatorEmail\Helper\Interfaces\TransportInterface;
use SmtpValidatorEmail\Service\StatusManager;
use SmtpValidatorEmail\Smtp\Smtp;
diff --git a/lib/SmtpValidatorEmail/Smtp/Smtp.php b/lib/SmtpValidatorEmail/Smtp/Smtp.php
index <HASH>..<HASH> 100644
--- a/lib/SmtpValidatorEmail/Smtp/Smtp.php
+++ b/lib/SmtpValidatorEmail/Smtp/Smtp.php
@@ -227,10 +227,9 @@ class Smtp
if (!$this->state['helo']) {
throw new Exception\ExceptionNoHelo('Need HELO before MAIL FROM');
}
- // issue MAIL FROM, 5 minute timeout
- $this->send('MAIL FROM:<' . $from . '>');
try {
-
+ // issue MAIL FROM, 5 minute timeout
+ $this->send('MAIL FROM:<' . $from . '>');
$this->expect($this->config['responseCodes']['SMTP_GENERIC_SUCCESS'], $this->config['commandTimeouts']['mail']);
// set state flags
$this->state['mail'] = true;
@@ -352,25 +351,14 @@ class Smtp
{
// must be connected
if (!$this->isConnect()) {
- // TODO: Change to log to statusManager
-// if($this->logPath){
-// $this->writeLog('No connection');
-// }
throw new Exception\ExceptionNoConnection('No connection');
}
-
// write the cmd to the connection stream
$result = fwrite($this->socket, $cmd . self::CRLF);
// did the send work?
if ($result === false) {
- // TODO: Change to log to statusManager
-// if($this->logPath){
-// $this->writeLog('Send failed on: ' . $this->host);
-// }
throw new Exception\ExceptionSendFailed('Send failed on: '. $this->host );
}
- //TODO: Send cmd , put to log
- // dump('Send: '.$cmd);
return $result;
}
|
removed exception thrower , instead returns message for the manager
|
ddtraceweb_smtp-validator-email
|
train
|
25d87b18f8e29de40ab27cb18b74e741f38bdbc1
|
diff --git a/library/CM/Http/Response/RPC.php b/library/CM/Http/Response/RPC.php
index <HASH>..<HASH> 100644
--- a/library/CM/Http/Response/RPC.php
+++ b/library/CM/Http/Response/RPC.php
@@ -20,6 +20,7 @@ class CM_Http_Response_RPC extends CM_Http_Response_Abstract {
$output['error'] = array('type' => get_class($e), 'msg' => $e->getMessagePublic($this->getRender()), 'isPublic' => $e->isPublic());
});
+ $output['deployVersion'] = CM_App::getInstance()->getDeployVersion();
$this->setHeader('Content-Type', 'application/json');
$this->_setContent(json_encode($output));
}
diff --git a/tests/library/CM/Http/Response/RPCTest.php b/tests/library/CM/Http/Response/RPCTest.php
index <HASH>..<HASH> 100644
--- a/tests/library/CM/Http/Response/RPCTest.php
+++ b/tests/library/CM/Http/Response/RPCTest.php
@@ -99,6 +99,21 @@ class CM_Http_Response_RPCTest extends CMTest_TestCase {
], $responseData);
}
+ public function testProcessReturnDeployVersion() {
+ $body = CM_Params::jsonEncode([
+ 'method' => 'CM_Http_Response_RPCTest.add',
+ 'params' => [2, 3],
+ ]);
+ $site = $this->getMockSite();
+ $request = new CM_Http_Request_Post('/rpc', null, null, $body);
+ $response = CM_Http_Response_RPC::createFromRequest($request, $site, $this->getServiceManager());
+ $response->process();
+
+ $responseDecoded = CM_Params::jsonDecode($response->getContent());
+ $this->assertArrayHasKey('deployVersion', $responseDecoded);
+ $this->assertSame(CM_App::getInstance()->getDeployVersion(), $responseDecoded['deployVersion']);
+ }
+
public static function rpc_add($foo, $bar) {
return $foo + $bar;
}
|
Add deployVersion to RPC response
|
cargomedia_cm
|
train
|
50baeba59e2032a710060116e95055502f30e5b0
|
diff --git a/xml/node_test.go b/xml/node_test.go
index <HASH>..<HASH> 100644
--- a/xml/node_test.go
+++ b/xml/node_test.go
@@ -103,18 +103,12 @@ func TestSetChildren(t *testing.T) {
RunTest(t, "node", "set_children", testLogic)
}
-func _BenchmarkSetChildren(b *testing.B) {
+func BenchmarkSetChildren(b *testing.B) {
benchmarkLogic := func(b *testing.B, doc *XmlDocument) {
- println("a")
root := doc.Root()
- println("b")
-// for i := 0; i < b.N; i++ {
-// for i := 0; i < 1000; i++ {
-// for i := 0; i < 200; i++ {
- for i := 0; i < 2; i++ {
+ for i := 0; i < b.N; i++ {
root.SetChildren("<fun></fun>")
}
- println("c")
}
RunBenchmark(b, "node", "set_children", benchmarkLogic)
@@ -137,13 +131,13 @@ func TestReplace(t *testing.T) {
RunTest(t, "node", "replace", testLogic, rootAssertion)
}
-func _BenchmarkReplace(b *testing.B) {
+func BenchmarkReplace(b *testing.B) {
benchmarkLogic := func(b *testing.B, doc *XmlDocument) {
root := doc.Root()
-// for i := 0; i < b.N; i++ {
- for i := 0; i < 2; i++ {
- root.Replace("<fun></fun><cool/>")
+ for i := 0; i < b.N; i++ {
+ root.Replace("<fun></fun>")
+ root = doc.Root() //once the node has been replaced, we need to get a new node
}
}
|
restore the tests; change the replace test because it needs to get the new node once being replaced
|
moovweb_gokogiri
|
train
|
227739c68f95924f7c192ab1178781d996b9139d
|
diff --git a/runtime-module.js b/runtime-module.js
index <HASH>..<HASH> 100644
--- a/runtime-module.js
+++ b/runtime-module.js
@@ -14,11 +14,7 @@ var hadRuntime = g.regeneratorRuntime &&
var oldRuntime = hadRuntime && g.regeneratorRuntime;
// Force reevalutation of runtime.js.
-try {
- delete g.regeneratorRuntime;
-} catch(e) {
- g.regeneratorRuntime = undefined;
-}
+g.regeneratorRuntime = undefined;
module.exports = require("./runtime");
|
Just set `global.regeneratorRuntime = undefined` unconditionally.
|
facebook_regenerator
|
train
|
a15e63db8ff0d99320f6dca907c02c9841cf262d
|
diff --git a/src/main/php/CatchedException.php b/src/main/php/CatchedException.php
index <HASH>..<HASH> 100644
--- a/src/main/php/CatchedException.php
+++ b/src/main/php/CatchedException.php
@@ -80,11 +80,12 @@ class CatchedException
*
* @api
* @param \bovigo\assert\predicate\Predicate|callable $predicate
+ * @param string $description optional additional description for failure message
* @return \bovigo\assert\CatchedException
*/
- public function with($predicate)
+ public function with($predicate, $description = null)
{
- assert($this->actualException, $predicate);
+ assert($this->actualException, $predicate, $description);
return $this;
}
diff --git a/src/test/php/CatchedExceptionTest.php b/src/test/php/CatchedExceptionTest.php
index <HASH>..<HASH> 100644
--- a/src/test/php/CatchedExceptionTest.php
+++ b/src/test/php/CatchedExceptionTest.php
@@ -9,6 +9,7 @@ namespace bovigo\assert;
use function bovigo\assert\predicate\contains;
use function bovigo\assert\predicate\isInstanceOf;
use function bovigo\assert\predicate\isSameAs;
+use function bovigo\assert\predicate\isNotSameAs;
/**
* Tests for bovigo\assert\CatchedException.
*
@@ -120,12 +121,7 @@ class CatchedExceptionTest extends \PHPUnit_Framework_TestCase
*/
public function withAppliesPredicateToException()
{
- $this->catchedException->with(
- function(\Exception $e)
- {
- return assert($e, isSameAs($this->exception));
- }
- );
+ $this->catchedException->with(isSameAs($this->exception));
}
/**
@@ -142,6 +138,24 @@ class CatchedExceptionTest extends \PHPUnit_Framework_TestCase
/**
* @test
*/
+ public function withThrowsAssertionFailureWhenPredicateFails()
+ {
+ expect(function() {
+ $this->catchedException->with(
+ isNotSameAs($this->exception),
+ 'additional info'
+ );
+ })
+ ->throws(AssertionFailure::class)
+ ->withMessage(
+ 'Failed asserting that object of type "BadFunctionCallException" is not identical to object of type "BadFunctionCallException".
+additional info'
+ );
+ }
+
+ /**
+ * @test
+ */
public function afterExecutesGivenPredicateWithGivenValue()
{
$this->catchedException->after(
|
allow additional description when additional assertion on exception is performed
|
bovigo_assert
|
train
|
0dfe23d0fe800eeffc4c4f4031e20a89612d42a0
|
diff --git a/openquake/engine/engine.py b/openquake/engine/engine.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/engine.py
+++ b/openquake/engine/engine.py
@@ -312,7 +312,7 @@ def run_calc(job_id, oqparam, exports, hazard_calculation_id=None, **kw):
if len(executing_jobs) + len(submitted_jobs) > max_concurrent_jobs:
logs.LOG.warn('Wait for other jobs in queue to finish ...')
- while (len(executing_jobs) > max_concurrent_jobs - 1 or
+ while (len(executing_jobs) >= max_concurrent_jobs or
(min(submitted_jobs) < job_id
if submitted_jobs else False)):
time.sleep(15)
|
Small improvement in queue logic
Former-commit-id: e<I>d<I>e<I>a<I>a1e<I>f<I>c4
|
gem_oq-engine
|
train
|
7f41f08a293c04aa8f04b7b3d1d7098ccd645f20
|
diff --git a/internal/service/ce/anomaly_subscription_test.go b/internal/service/ce/anomaly_subscription_test.go
index <HASH>..<HASH> 100644
--- a/internal/service/ce/anomaly_subscription_test.go
+++ b/internal/service/ce/anomaly_subscription_test.go
@@ -53,6 +53,31 @@ func TestAccCEAnomalySubscription_basic(t *testing.T) {
})
}
+func TestAccCEAnomalySubscription_disappears(t *testing.T) {
+ var subscription costexplorer.AnomalySubscription
+ resourceName := "aws_ce_anomaly_subscription.test"
+ rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
+ domain := acctest.RandomDomainName()
+ address := acctest.RandomEmailAddress(domain)
+
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acctest.PreCheck(t) },
+ ProviderFactories: acctest.ProviderFactories,
+ CheckDestroy: testAccCheckAnomalySubscriptionDestroy,
+ ErrorCheck: acctest.ErrorCheck(t, costexplorer.EndpointsID),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAnomalySubscriptionConfig_basic(rName, address),
+ Check: resource.ComposeTestCheckFunc(
+ testAccCheckAnomalySubscriptionExists(resourceName, &subscription),
+ acctest.CheckResourceDisappears(acctest.Provider, tfce.ResourceAnomalySubscription(), resourceName),
+ ),
+ ExpectNonEmptyPlan: true,
+ },
+ },
+ })
+}
+
func TestAccCEAnomalySubscription_Frequency(t *testing.T) {
var subscription costexplorer.AnomalySubscription
resourceName := "aws_ce_anomaly_subscription.test"
|
Add disappearance acceptance test for anomaly subscription
|
terraform-providers_terraform-provider-aws
|
train
|
19c58f9af00cf699b1c424484c55d54d92d657c3
|
diff --git a/presto-main/src/main/java/com/facebook/presto/sql/planner/optimizations/DesugaringOptimizer.java b/presto-main/src/main/java/com/facebook/presto/sql/planner/optimizations/DesugaringOptimizer.java
index <HASH>..<HASH> 100644
--- a/presto-main/src/main/java/com/facebook/presto/sql/planner/optimizations/DesugaringOptimizer.java
+++ b/presto-main/src/main/java/com/facebook/presto/sql/planner/optimizations/DesugaringOptimizer.java
@@ -24,6 +24,7 @@ import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.AggregationNode.Aggregation;
+import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
@@ -42,6 +43,8 @@ import java.util.Map;
import java.util.Optional;
import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.getExpressionTypes;
+import static com.facebook.presto.sql.planner.ExpressionExtractor.extractExpressionsNonRecursive;
+import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static java.util.Collections.emptyList;
@@ -89,6 +92,13 @@ public class DesugaringOptimizer
}
@Override
+ public PlanNode visitPlan(PlanNode node, RewriteContext<Void> context)
+ {
+ checkState(extractExpressionsNonRecursive(node).isEmpty(), "Unhandled plan node with expressions");
+ return super.visitPlan(node, context);
+ }
+
+ @Override
public PlanNode visitAggregation(AggregationNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
@@ -172,6 +182,16 @@ public class DesugaringOptimizer
.collect(toImmutableList()));
}
+ @Override
+ public PlanNode visitApply(ApplyNode node, RewriteContext<Void> context)
+ {
+ PlanNode input = context.rewrite(node.getInput());
+ PlanNode subquery = context.rewrite(node.getSubquery());
+ // ApplyNode.Assignments are synthetic expressions which are meaningful for ApplyNode transformations.
+ // They cannot contain any lambda or "sugared" expression
+ return new ApplyNode(node.getId(), input, subquery, node.getSubqueryAssignments(), node.getCorrelation());
+ }
+
private Expression desugar(Expression expression)
{
if (expression instanceof SymbolReference) {
|
Add ApplyNode support to DesugaringOptimizer
ApplyNode.Assignments are synthetic expressions which are meaningful
for ApplyNode transformations. They cannot contain any lambda or "sugared" expression.
|
prestodb_presto
|
train
|
5ffe6a2dc7304d0f05396d157f7c31c2ae176f22
|
diff --git a/src/processors/FromProcessor.php b/src/processors/FromProcessor.php
index <HASH>..<HASH> 100644
--- a/src/processors/FromProcessor.php
+++ b/src/processors/FromProcessor.php
@@ -122,10 +122,11 @@ class FromProcessor extends AbstractProcessor {
$parseInfo = $this->initParseInfo();
$expr = array();
$token_category = '';
-
+ $prevToken = '';
+
$skip_next = false;
$i = 0;
-
+
foreach ($tokens as $token) {
$upper = strtoupper(trim($token));
@@ -140,18 +141,42 @@ class FromProcessor extends AbstractProcessor {
}
switch ($upper) {
- case 'OUTER':
- case 'LEFT':
- case 'RIGHT':
case 'NATURAL':
case 'CROSS':
case ',':
- case 'JOIN':
case 'INNER':
case 'STRAIGHT_JOIN':
break;
+ case 'OUTER':
+ case 'JOIN':
+ if ($token_category === 'LEFT' || $token_category === 'RIGHT') {
+ $token_category = '';
+ $parseInfo['next_join_type'] = strtoupper(trim($prevToken)); // it seems to be a join
+ }
+ break;
+
+ case 'LEFT':
+ case 'RIGHT':
+ $token_category = $upper;
+ $prevToken = $token;
+ $i++;
+ continue 2;
+
default:
+ if ($token_category === 'LEFT' || $token_category === 'RIGHT') {
+ if ($upper === '') {
+ $prevToken .= $token;
+ break;
+ } else {
+ $token_category = ''; // it seems to be a function
+ $parseInfo['expression'] .= $prevToken;
+ if ($parseInfo['ref_type'] !== false) { // all after ON / USING
+ $parseInfo['ref_expr'] .= $prevToken;
+ }
+ $prevToken = '';
+ }
+ }
$parseInfo['expression'] .= $token;
if ($parseInfo['ref_type'] !== false) { // all after ON / USING
$parseInfo['ref_expr'] .= $token;
@@ -163,7 +188,7 @@ class FromProcessor extends AbstractProcessor {
$i++;
continue;
}
-
+
switch ($upper) {
case 'AS':
$parseInfo['alias'] = array('as' => true, 'name' => "", 'base_expr' => $token);
@@ -190,7 +215,7 @@ class FromProcessor extends AbstractProcessor {
case 'KEY':
case 'INDEX':
if ($token_category === 'CREATE') {
- $token_category = $upper; // TODO: what is it for a statement?
+ $token_category = $upper; // TODO: what is it for a statement?
continue 2;
}
if ($token_category === 'IDX_HINT') {
@@ -216,11 +241,6 @@ class FromProcessor extends AbstractProcessor {
$skip_next = true;
continue;
- case 'LEFT':
- case 'RIGHT':
- $parseInfo['next_join_type'] = $upper;
- break;
-
case 'STRAIGHT_JOIN':
$parseInfo['next_join_type'] = "STRAIGHT_JOIN";
if ($parseInfo['subquery']) {
@@ -246,8 +266,8 @@ class FromProcessor extends AbstractProcessor {
break;
default:
- // TODO: enhance it, so we can have base_expr to calculate the position of the keywords
- // build a subtree under "hints"
+ // TODO: enhance it, so we can have base_expr to calculate the position of the keywords
+ // build a subtree under "hints"
if ($token_category === 'IDX_HINT') {
$token_category = '';
$cur_hint = (count($parseInfo['hints']) - 1);
|
CHG: a quick and dirty hack for issue <I> has been added, thanks to Justin for the idea
FIX: issue <I>
git-svn-id: <URL>
|
greenlion_PHP-SQL-Parser
|
train
|
aa10e39e4071a99a9717b125b25d70221d6f6d9b
|
diff --git a/saltapi/netapi/rest_flask.py b/saltapi/netapi/rest_flask.py
index <HASH>..<HASH> 100644
--- a/saltapi/netapi/rest_flask.py
+++ b/saltapi/netapi/rest_flask.py
@@ -212,16 +212,16 @@ def start():
debug = apiopts.get('debug', False)
port = apiopts.get('port', 8080)
- cert = apiopts.get('cert', '')
- cert_priv = apiopts.get('cert_priv', '')
- verify_certs(cert, cert_priv)
+ ssl_crt = apiopts.get('ssl_crt', '')
+ ssl_key = apiopts.get('ssl_key', '')
+ verify_certs(ssl_crt, ssl_key)
app = build_app()
if debug:
app.run(host='0.0.0.0', port=port, debug=True)
else:
- ssl_a = cheroot.ssllib.ssl_builtin.BuiltinSSLAdapter(cert, cert_priv)
+ ssl_a = cheroot.ssllib.ssl_builtin.BuiltinSSLAdapter(ssl_crt, ssl_key)
wsgi_d = cheroot.wsgi.WSGIPathInfoDispatcher({'/': app})
server = cheroot.wsgi.WSGIServer(('0.0.0.0', port),
wsgi_app=wsgi_d,
|
Updated config names for the SSL cert/key
|
saltstack_salt
|
train
|
ae8b1cbd0dcf0b58e24328b3c99ad446e0ceaf34
|
diff --git a/ctk_cli/module.py b/ctk_cli/module.py
index <HASH>..<HASH> 100644
--- a/ctk_cli/module.py
+++ b/ctk_cli/module.py
@@ -75,22 +75,49 @@ class CLIModule(list):
__slots__ = ('path', ) + tuple(map(_tagToIdentifier, REQUIRED_ELEMENTS + OPTIONAL_ELEMENTS))
- def __init__(self, path, env = None):
+ def __init__(self, path = None, env = None, stream = None):
+ """
+ Parse a CLI specification from an XML document. This class can be
+ instantiated in three different modes:
+
+ 1. Pass the ``path`` parameter that points to a CLI executable. This
+ mode will run the executable in a subprocess, passing "--xml"
+ and parsing the output.
+ 2. Pass the ``path`` parameter representing the path to an XML
+ file on disk that contains the CLI description.
+ 3. Pass a file-like object in the ``stream`` kwarg that contains
+ the XML document to be parsed.
+
+ :param path: The path on the filesystem of either the CLI to
+ be interrogated, or a CLI XML description document.
+ :param env: If using mode 1 described above, setting this parameter
+ causes "env=<value>" to be passed as an additional command line argument
+ when invoking the subprocess to describe the CLI.
+ :param stream: An open file-like object that will stream the CLI
+ XML description document.
+ """
self.path = path
- if isCLIExecutable(path):
+ if path and isCLIExecutable(path):
elementTree = getXMLDescription(path, env = env)
- else:
+ elif path:
with file(path) as f:
elementTree = ET.parse(f)
+ elif stream is not None:
+ elementTree = ET.parse(stream)
+ else:
+ raise RuntimeError('You must pass either a path or stream when instantiating CLIModule.')
self._parse(elementTree.getroot())
def __repr__(self):
return '<CLIModule %r>' % (self.name, )
-
+
@property
def name(self):
+ if self.path is None:
+ return None
+
result = os.path.basename(self.path)
base, ext = os.path.splitext(result)
if ext in ('.exe', '.xml', '.py'):
@@ -154,9 +181,9 @@ class CLIParameters(list):
@classmethod
def parse(cls, elementTree):
self = cls()
-
+
childNodes = _parseElements(self, elementTree, 'parameters')
-
+
self.advanced = _parseBool(elementTree.get('advanced', 'false'))
for pnode in childNodes:
@@ -206,7 +233,7 @@ class CLIParameter(object):
OPTIONAL_ELEMENTS = (# either 'index' or at least one of 'flag' or 'longflag' is required
'flag', 'longflag', 'index',
'default', 'channel')
-
+
__slots__ = ("typ", "hidden", "_pythonType") + REQUIRED_ELEMENTS + OPTIONAL_ELEMENTS + (
"constraints", # scalarVectorType, scalarType
"multiple", # multipleType
@@ -219,10 +246,10 @@ class CLIParameter(object):
def __str__(self):
return "%s parameter '%s'" % (self.typ, self.identifier())
-
+
def __repr__(self):
return '<CLIParameter %r of type %s>' % (self.identifier(), self.typ)
-
+
def identifier(self):
result = self.name if self.name else self.longflag.lstrip('-')
if not result:
@@ -368,7 +395,7 @@ class CLIConstraints(object):
REQUIRED_ELEMENTS = ('step', )
OPTIONAL_ELEMENTS = ('minimum', 'maximum')
-
+
__slots__ = REQUIRED_ELEMENTS + OPTIONAL_ELEMENTS
@classmethod
|
Closes #<I>. Allow CLIModule to be invoked on a stream
|
commontk_ctk-cli
|
train
|
3a9cc101ff50ac3c3f882e978bda119afdf31f17
|
diff --git a/examples/src/java/com/twitter/heron/examples/streamlet/TransformsTopology.java b/examples/src/java/com/twitter/heron/examples/streamlet/TransformsTopology.java
index <HASH>..<HASH> 100644
--- a/examples/src/java/com/twitter/heron/examples/streamlet/TransformsTopology.java
+++ b/examples/src/java/com/twitter/heron/examples/streamlet/TransformsTopology.java
@@ -65,12 +65,14 @@ public final class TransformsTopology {
private static class IncrementTransformer implements SerializableTransformer<Integer, Integer> {
private static final long serialVersionUID = -3198491688219997702L;
private int increment;
+ private int total;
IncrementTransformer(int increment) {
this.increment = increment;
}
public void setup(Context context) {
+ context.registerMetric("InCrementMetric", 30, () -> total);
}
/**
@@ -79,6 +81,7 @@ public final class TransformsTopology {
*/
public void transform(Integer in, Consumer<Integer> consumer) {
int incrementedValue = in + increment;
+ total += increment;
consumer.accept(incrementedValue);
}
diff --git a/heron/api/src/java/com/twitter/heron/streamlet/Context.java b/heron/api/src/java/com/twitter/heron/streamlet/Context.java
index <HASH>..<HASH> 100644
--- a/heron/api/src/java/com/twitter/heron/streamlet/Context.java
+++ b/heron/api/src/java/com/twitter/heron/streamlet/Context.java
@@ -16,6 +16,7 @@ package com.twitter.heron.streamlet;
import java.io.Serializable;
import java.util.Map;
+import java.util.function.Supplier;
import com.twitter.heron.api.state.State;
@@ -50,6 +51,14 @@ public interface Context {
int getStreamPartition();
/**
+ * Register a metric function. This function will be called
+ * by the system every collectionInterval seconds and the resulting value
+ * will be collected
+ */
+ <T> void registerMetric(String metricName, int collectionInterval,
+ Supplier<T> metricFn);
+
+ /**
* The state where components can store any of their local state
* @return The state interface where users can store their local state
*/
diff --git a/heron/api/src/java/com/twitter/heron/streamlet/impl/ContextImpl.java b/heron/api/src/java/com/twitter/heron/streamlet/impl/ContextImpl.java
index <HASH>..<HASH> 100644
--- a/heron/api/src/java/com/twitter/heron/streamlet/impl/ContextImpl.java
+++ b/heron/api/src/java/com/twitter/heron/streamlet/impl/ContextImpl.java
@@ -16,7 +16,9 @@ package com.twitter.heron.streamlet.impl;
import java.io.Serializable;
import java.util.Map;
+import java.util.function.Supplier;
+import com.twitter.heron.api.metric.IMetric;
import com.twitter.heron.api.state.State;
import com.twitter.heron.api.topology.TopologyContext;
import com.twitter.heron.streamlet.Context;
@@ -59,7 +61,26 @@ public class ContextImpl implements Context {
}
@Override
+ public <T> void registerMetric(String metricName, int collectionInterval,
+ Supplier<T> metricFn) {
+ topologyContext.registerMetric(metricName, new StreamletMetric<T>(metricFn),
+ collectionInterval);
+ }
+
+ @Override
public State<Serializable, Serializable> getState() {
return state;
}
+
+ private class StreamletMetric<T> implements IMetric<T> {
+ private Supplier<T> metricFn;
+ StreamletMetric(Supplier<T> metricFn) {
+ this.metricFn = metricFn;
+ }
+
+ @Override
+ public T getValueAndReset() {
+ return metricFn.get();
+ }
+ }
}
|
Used for example (#<I>)
|
apache_incubator-heron
|
train
|
5ab1c42be430a2af75dce95c7fc327e61a812cd1
|
diff --git a/lib/simple_enum/version.rb b/lib/simple_enum/version.rb
index <HASH>..<HASH> 100644
--- a/lib/simple_enum/version.rb
+++ b/lib/simple_enum/version.rb
@@ -1,5 +1,5 @@
module SimpleEnum
# The current `SimpleEnum` version.
- VERSION = "2.0.0.rc3"
+ VERSION = "2.0.0"
end
|
release <I> (finally)
|
lwe_simple_enum
|
train
|
071df7d625ee18be64d225e78abb445fdbf17462
|
diff --git a/lib/patterns.rb b/lib/patterns.rb
index <HASH>..<HASH> 100644
--- a/lib/patterns.rb
+++ b/lib/patterns.rb
@@ -18,7 +18,6 @@ module Patterns
continuation
coverage
csv
- curses
date
delegate
digest
|
remove curses from gem name blacklist
|
rubygems_rubygems.org
|
train
|
4bc8d13b7f7d8f0e55c8da09982ee1da4cfc70b3
|
diff --git a/lib/editor.js b/lib/editor.js
index <HASH>..<HASH> 100644
--- a/lib/editor.js
+++ b/lib/editor.js
@@ -36,8 +36,8 @@ Editor.prototype._initHandlers = function () {
self.on('keypress', function (ch, key) {
var direction = {
- up: -1, down: 1,
left: -1, right: 1,
+ up: -1, down: 1,
pageup: -1, pagedown: 1,
home: -1, end: 1,
backspace: -1, 'delete': 1
@@ -53,19 +53,20 @@ Editor.prototype._initHandlers = function () {
}
var prevSelection = self.startSelection();
- if (!key.shift && !self.data.mouseDown) {
- if (prevSelection && coordinate.linear.cmp(prevSelection, self.cursor()) === direction) {
- self.cursor(prevSelection);
- }
+ if (!key.shift) {
self.startSelection(null);
} else if (!prevSelection) {
self.startSelection(self.cursor());
}
- if (key.name === 'up' || key.name === 'down') {
+ if (key.name === 'left' || key.name === 'right') {
+ if (!key.shift && prevSelection && coordinate.linear.cmp(prevSelection, self.cursor()) === direction) {
+ self.cursor(prevSelection).preferredCursorX(true);
+ } else {
+ self.moveCursorHorizontal(direction, key.ctrl);
+ }
+ } else if (key.name === 'up' || key.name === 'down') {
self.moveCursorVertical(direction, key.ctrl);
- } else if (key.name === 'left' || key.name === 'right') {
- self.moveCursorHorizontal(direction, key.ctrl);
} else if (key.name === 'pageup' || key.name === 'pagedown') {
self.moveCursorVertical(direction * self.options.pageLines);
} else if (key.name === 'home') {
|
Improves left/right keyboard cursor movement
|
slap-editor_slap
|
train
|
88149b19259ccfd67187d252905fc6fb74bdd1cd
|
diff --git a/src/main/com/mongodb/DB.java b/src/main/com/mongodb/DB.java
index <HASH>..<HASH> 100644
--- a/src/main/com/mongodb/DB.java
+++ b/src/main/com/mongodb/DB.java
@@ -790,6 +790,8 @@ public abstract class DB {
* @return the result of executing this operation
* @throws MongoException
* @mongodb.driver.manual administration/security-access-control/ Access Control
+ * @mongodb.driver.manual reference/command/createUser createUser
+ * @mongodb.driver.manual reference/command/updateUser updateUser
* @deprecated Use {@code DB.command} to call either the createUser or updateUser command
*/
@Deprecated
@@ -806,6 +808,8 @@ public abstract class DB {
* @return the result of executing this operation
* @throws MongoException
* @mongodb.driver.manual administration/security-access-control/ Access Control
+ * @mongodb.driver.manual reference/command/createUser createUser
+ * @mongodb.driver.manual reference/command/updateUser updateUser
* @deprecated Use {@code DB.command} to call either the createUser or updateUser command
*/
@Deprecated
@@ -826,6 +830,7 @@ public abstract class DB {
* @return the result of executing this operation
* @throws MongoException
* @mongodb.driver.manual administration/security-access-control/ Access Control
+ * @mongodb.driver.manual reference/command/dropUser dropUser
* @deprecated Use {@code DB.command} to call the dropUser command
*/
@Deprecated
|
Added reference links to deprecated auth doctags
JAVA-<I>
|
mongodb_mongo-java-driver
|
train
|
5b7ed79b6a787ca38ed1a0a8dceb836eee3a0c82
|
diff --git a/core/src/main/java/org/acegisecurity/context/HttpSessionContextIntegrationFilter.java b/core/src/main/java/org/acegisecurity/context/HttpSessionContextIntegrationFilter.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/acegisecurity/context/HttpSessionContextIntegrationFilter.java
+++ b/core/src/main/java/org/acegisecurity/context/HttpSessionContextIntegrationFilter.java
@@ -93,12 +93,10 @@ import org.springframework.util.ReflectionUtils;
*
* @author Ben Alex
* @author Patrick Burleson
- * @version $Id: HttpSessionContextIntegrationFilter.java 1784 2007-02-24
- * 21:00:24Z luke_t $
+ * @version $Id$
*/
public class HttpSessionContextIntegrationFilter implements InitializingBean, Filter {
- // ~ Static fields/initializers
- // =====================================================================================
+ //~ Static fields/initializers =====================================================================================
protected static final Log logger = LogFactory.getLog(HttpSessionContextIntegrationFilter.class);
@@ -106,8 +104,7 @@ public class HttpSessionContextIntegrationFilter implements InitializingBean, Fi
public static final String ACEGI_SECURITY_CONTEXT_KEY = "ACEGI_SECURITY_CONTEXT";
- // ~ Instance fields
- // ================================================================================================
+ //~ Instance fields ================================================================================================
private Class context = SecurityContextImpl.class;
@@ -168,8 +165,7 @@ public class HttpSessionContextIntegrationFilter implements InitializingBean, Fi
this.contextObject = generateNewContext();
}
- // ~ Methods
- // ========================================================================================================
+ //~ Methods ========================================================================================================
public void afterPropertiesSet() throws Exception {
if ((this.context == null) || (!SecurityContext.class.isAssignableFrom(this.context))) {
@@ -178,7 +174,7 @@ public class HttpSessionContextIntegrationFilter implements InitializingBean, Fi
+ ")");
}
- if ((forceEagerSessionCreation == true) && (allowSessionCreation == false)) {
+ if (forceEagerSessionCreation && !allowSessionCreation) {
throw new IllegalArgumentException(
"If using forceEagerSessionCreation, you must set allowSessionCreation to also be true");
}
|
SEC-<I>: Reformatted "divider" comments (//~ Methods=== etc). Simplified boolean expression in afterPropertiesSet.
|
spring-projects_spring-security
|
train
|
13bbdc09ece4f48e9b2537092a4505a3942e373b
|
diff --git a/railties/test/generators/app_generator_test.rb b/railties/test/generators/app_generator_test.rb
index <HASH>..<HASH> 100644
--- a/railties/test/generators/app_generator_test.rb
+++ b/railties/test/generators/app_generator_test.rb
@@ -214,14 +214,14 @@ class AppGeneratorTest < Rails::Generators::TestCase
def test_new_application_load_defaults
app_root = File.join(destination_root, "myfirstapp")
- run_generator [app_root, "--no-skip-javascript"]
+ run_generator [app_root]
output = nil
assert_file "#{app_root}/config/application.rb", /\s+config\.load_defaults #{Rails::VERSION::STRING.to_f}/
Dir.chdir(app_root) do
- output = `./bin/rails r "puts Rails.application.config.assets.unknown_asset_fallback"`
+ output = `SKIP_REQUIRE_WEBPACKER=true ./bin/rails r "puts Rails.application.config.assets.unknown_asset_fallback"`
end
assert_equal "false\n", output
|
Avoid `webpacker:install` if unnecessary
Since this is a test to check the behavior of `load_defaults`,
webpacker is unnecessary.
|
rails_rails
|
train
|
14d6c233fba6adf4dd18b0ce297dbbc8dc7cd27b
|
diff --git a/lib/moodlelib.php b/lib/moodlelib.php
index <HASH>..<HASH> 100644
--- a/lib/moodlelib.php
+++ b/lib/moodlelib.php
@@ -104,6 +104,7 @@ define('PARAM_PATH', 0x0020);
define('PARAM_HOST', 0x0040); // FQDN or IPv4 dotted quad
define('PARAM_URL', 0x0080);
define('PARAM_LOCALURL',0x0180); // NOT orthogonal to the others! Implies PARAM_URL!
+define('PARAM_CLEANFILE',0x0200);
/// PARAMETER HANDLING ////////////////////////////////////////////////////
@@ -198,22 +199,20 @@ function clean_param($param, $options) {
$param = strip_tags($param);
}
+ if ($options & PARAM_CLEANFILE) { // allow only safe characters
+ $param = clean_filename($param);
+ }
+
if ($options & PARAM_FILE) { // Strip all suspicious characters from filename
- $param = clean_param($param, PARAM_PATH);
- $pos = strrpos($param,'/');
- if ($pos !== FALSE) {
- $param = substr($param, $pos+1);
- }
- if ($param === '.' or $param === ' ') {
- $param = '';
- }
+ $param = ereg_replace('[[:cntrl:]]|[<>"`\|\':\\/]', '', $param);
+ $param = ereg_replace('\.\.+', '', $param);
}
if ($options & PARAM_PATH) { // Strip all suspicious characters from file path
$param = str_replace('\\\'', '\'', $param);
$param = str_replace('\\"', '"', $param);
$param = str_replace('\\', '/', $param);
- $param = ereg_replace('[[:cntrl:]]|[<>"`\|\']', '', $param);
+ $param = ereg_replace('[[:cntrl:]]|[<>"`\|\':]', '', $param);
$param = ereg_replace('\.\.+', '', $param);
$param = ereg_replace('//+', '/', $param);
}
|
changes in file/path cleaning SC#<I>, merged from MOODLE_<I>_STABLE
|
moodle_moodle
|
train
|
58e453f8206ccd6a87c02f7374c34ff3decb3171
|
diff --git a/endpoint.go b/endpoint.go
index <HASH>..<HASH> 100644
--- a/endpoint.go
+++ b/endpoint.go
@@ -561,7 +561,7 @@ func (ep *endpoint) sbLeave(sbox Sandbox, options ...EndpointOption) error {
sb.deleteHostsEntries(n.getSvcRecords(ep))
- if sb.needDefaultGW() {
+ if !sb.inDelete && sb.needDefaultGW() {
ep := sb.getEPwithoutGateway()
if ep == nil {
return fmt.Errorf("endpoint without GW expected, but not found")
|
Skip defaultGw check if sandbox is being deleted
- On Sandbox deletion, during Leave of each
connected endpoint, avoid the default gw
check, which may create an unnecessary
connection to the default gateway network.
|
docker_libnetwork
|
train
|
e7a6ef5484f705136641b74a817d7ea73b4128c4
|
diff --git a/tasks_list.go b/tasks_list.go
index <HASH>..<HASH> 100644
--- a/tasks_list.go
+++ b/tasks_list.go
@@ -24,6 +24,7 @@ type TasksListService struct {
taskId []string
actions []string
detailed *bool
+ human *bool
nodeId []string
parentNode string
parentTaskId *string
@@ -56,6 +57,12 @@ func (s *TasksListService) Detailed(detailed bool) *TasksListService {
return s
}
+// Human indicates whether to return time and byte values in human-readable format.
+func (s *TasksListService) Human(human bool) *TasksListService {
+ s.human = &human
+ return s
+}
+
// NodeId is a list of node IDs or names to limit the returned information;
// use `_local` to return information from the node you're connecting to,
// leave empty to get information from all nodes.
@@ -123,6 +130,9 @@ func (s *TasksListService) buildURL() (string, url.Values, error) {
if s.detailed != nil {
params.Set("detailed", fmt.Sprintf("%v", *s.detailed))
}
+ if s.human != nil {
+ params.Set("human", fmt.Sprintf("%v", *s.human))
+ }
if len(s.nodeId) > 0 {
params.Set("node_id", strings.Join(s.nodeId, ","))
}
diff --git a/tasks_list_test.go b/tasks_list_test.go
index <HASH>..<HASH> 100644
--- a/tasks_list_test.go
+++ b/tasks_list_test.go
@@ -44,15 +44,7 @@ func TestTasksListBuildURL(t *testing.T) {
func TestTasksList(t *testing.T) {
client := setupTestClientAndCreateIndexAndAddDocs(t) //, SetTraceLog(log.New(os.Stdout, "", 0)))
- esversion, err := client.ElasticsearchVersion(DefaultURL)
- if err != nil {
- t.Fatal(err)
- }
- if esversion < "2.3.0" {
- t.Skipf("Elasticsearch %v does not support Tasks Management API yet", esversion)
- }
-
- res, err := client.TasksList().Pretty(true).Do(context.TODO())
+ res, err := client.TasksList().Pretty(true).Human(true).Do(context.TODO())
if err != nil {
t.Fatal(err)
}
|
Add human property to Task List API
See #<I>
|
olivere_elastic
|
train
|
1f86b3197683ee7bbe74a2172dd59f19c2682cea
|
diff --git a/components/messages.js b/components/messages.js
index <HASH>..<HASH> 100644
--- a/components/messages.js
+++ b/components/messages.js
@@ -50,6 +50,12 @@ protobufs['GameServers.GetServerIPsBySteamID#1_Response'] = Schema.CGameServers_
ByteBuffer.DEFAULT_ENDIAN = ByteBuffer.LITTLE_ENDIAN;
SteamUser.prototype._send = function(emsg, body, callback) {
+ if((!this.steamID || !this.client.connected) && [Steam.EMsg.ChannelEncryptRequest, Steam.EMsg.ChannelEncryptResponse, Steam.EMsg.ChannelEncryptResult, Steam.EMsg.ClientLogon].indexOf(emsg) == -1) {
+ // We're disconnected, drop it
+ this.emit('debug', 'Dropping message ' + emsg + ' because we\'re not logged on.');
+ return;
+ }
+
var header = {
"msg": emsg
};
|
Don't attempt to send messages when logged off
|
DoctorMcKay_node-steam-user
|
train
|
5709c63498420d5ba77c1bb4fda92c974a55722e
|
diff --git a/email/libraries/Emailer.php b/email/libraries/Emailer.php
index <HASH>..<HASH> 100644
--- a/email/libraries/Emailer.php
+++ b/email/libraries/Emailer.php
@@ -609,11 +609,28 @@ class Emailer
foreach ( $_send->data['attachments'] AS $file ) :
- if ( ! $this->_add_attachment( $file ) ) :
+ // Has a "new name" beens et for the file?
+ // This requires the Email library be extended, suppossedly fixed in v3.0 CI branch
+ // Wodner if it'll ever be released.
+ // TODO: Make sure this still works if CI is ever updated.
+
+ if ( is_array( $file ) ) :
+
+ $_file = isset( $file[0] ) ? $file[0] : NULL;
+ $_filename = isset( $file[1] ) ? $file[1] : NULL;
+
+ else :
+
+ $_file = $file;
+ $_filename = NULL;
+
+ endif;
+
+ if ( ! $this->_add_attachment( $_file, $_filename ) ) :
if ( ! $graceful ) :
- show_error( 'EMAILER: Failed to add attachment: ' . $file );
+ show_error( 'EMAILER: Failed to add attachment: ' . $_file );
else :
@@ -859,7 +876,7 @@ class Emailer
* @param string file to add
* @return boolean
**/
- private function _add_attachment( $file )
+ private function _add_attachment( $file, $filename = NULL )
{
if ( ! file_exists( $file ) ) :
@@ -867,7 +884,7 @@ class Emailer
endif;
- if ( ! $this->ci->email->attach( $file ) ) :
+ if ( ! $this->ci->email->attach( $file, 'attachment', $filename ) ) :
return FALSE;
|
Allowing for attachments to be given custom names
|
nails_module-email
|
train
|
7dcb9b0616a33a1fca5a04fe5117a2f2669a82de
|
diff --git a/report/questioninstances/index.php b/report/questioninstances/index.php
index <HASH>..<HASH> 100644
--- a/report/questioninstances/index.php
+++ b/report/questioninstances/index.php
@@ -94,6 +94,7 @@ if ($requestedqtype) {
JOIN {question_categories} qc ON q.category = qc.id
JOIN {context} con ON con.id = qc.contextid
$sqlqtypetest
+ AND (q.parent = 0 OR q.parent = q.id)
GROUP BY qc.contextid, $ctxgroupby
ORDER BY numquestions DESC, numhidden ASC, con.contextlevel ASC, con.id ASC", $params);
|
MDL-<I> question instances report: ignore subquestions
We should not count subquestions of cloze questions (parent != 0) but we
do want to count random questions (parent = id).
|
moodle_moodle
|
train
|
b2f0e59592cf0e9334b532ebd21d3bd3bd1175df
|
diff --git a/src/Application.php b/src/Application.php
index <HASH>..<HASH> 100644
--- a/src/Application.php
+++ b/src/Application.php
@@ -137,6 +137,11 @@ class Application implements RequestHandler
}
}
+ public function handleWithoutCatchingExceptions(Request $request): Response
+ {
+ return $this->handleRequest($request);
+ }
+
/**
* Converts an HttpException to a Response.
*
@@ -235,8 +240,6 @@ class Application implements RequestHandler
throw $this->invalidReturnValue('controller', Response::class, $result);
}
}
- } catch (HttpException $e) {
- $response = $this->handleHttpException($e, $request);
} finally {
$event = new ControllerInvocatedEvent($request, $match, $instance);
$this->eventDispatcher->dispatch(ControllerInvocatedEvent::class, $event);
diff --git a/src/Event/ResponseReceivedEvent.php b/src/Event/ResponseReceivedEvent.php
index <HASH>..<HASH> 100644
--- a/src/Event/ResponseReceivedEvent.php
+++ b/src/Event/ResponseReceivedEvent.php
@@ -10,11 +10,6 @@ use Brick\App\RouteMatch;
/**
* Event dispatched after the controller response has been received.
- *
- * If an HttpException is caught during the controller method invocation,
- * the exception it is converted to a Response, and this event is dispatched as well.
- *
- * Other exceptions break the application flow and don't trigger this event.
*/
final class ResponseReceivedEvent
{
|
Add Application::handleWithoutCatchingExceptions()
Useful when re-routing a request that already triggered an exception.
ResponseReceivedEvent is not triggered anymore after an HttpException.
|
brick_app
|
train
|
74c9e23eeb6b48d169f70757ae3cdb72b24929e4
|
diff --git a/circle.yml b/circle.yml
index <HASH>..<HASH> 100644
--- a/circle.yml
+++ b/circle.yml
@@ -1,6 +1,6 @@
machine:
node:
- version: 5.5.0
+ version: 5.7.1
dependencies:
pre:
- sudo apt-get update && sudo apt-get install --only-upgrade google-chrome-stable
diff --git a/main.js b/main.js
index <HASH>..<HASH> 100755
--- a/main.js
+++ b/main.js
@@ -450,7 +450,11 @@ function getProp(){
var obj = arguments[0],
i;
- for(i = 1;i < arguments.length;i++) obj = obj[arguments[i]];
+ for(i = 1;i < arguments.length;i++){
+ if(obj == null) obj = {};
+ obj = obj[arguments[i]];
+ }
+
return obj;
}
diff --git a/test/main.nd.js b/test/main.nd.js
index <HASH>..<HASH> 100644
--- a/test/main.nd.js
+++ b/test/main.nd.js
@@ -230,6 +230,11 @@ t('\'get\' works',function(){
setter.value = {foo: {foo: 'foo'}};
assert.strictEqual(yd.done,true);
assert.strictEqual(foo.value,'foo');
+
+ setter.value = {foo: 5};
+ assert.strictEqual(foo.value,undefined);
+ setter.value = {bar: 5};
+ assert.strictEqual(foo.value,undefined);
});
t('\'watch\' works',function*(){
|
Fix getter.get() when properties don't exist
|
manvalls_y-setter
|
train
|
c74338f4091f9c2927d67ec299a831fdbc44a8ab
|
diff --git a/lib/upcoming/version.rb b/lib/upcoming/version.rb
index <HASH>..<HASH> 100644
--- a/lib/upcoming/version.rb
+++ b/lib/upcoming/version.rb
@@ -1,3 +1,3 @@
module Upcoming
- VERSION = '0.0.1'
+ VERSION = '0.1.0'
end
|
Version up to `<I>`
|
sldblog_upcoming
|
train
|
9fa06293d7649dceef261f4c5355feba202321f3
|
diff --git a/packages/net/env/browser/socketio.js b/packages/net/env/browser/socketio.js
index <HASH>..<HASH> 100644
--- a/packages/net/env/browser/socketio.js
+++ b/packages/net/env/browser/socketio.js
@@ -36,10 +36,15 @@ exports.Connector = Class(net.interfaces.Connector, function() {
this._connect = function (io) {
logger.debug('opening the connection');
var socket = io(this._opts.namespace);
- socket.on('connect', bind(this, function () {
- this.onConnect(new Transport(socket));
- }));
+ var transport = new Transport(socket);
+ var onConnect = bind(this, 'onConnect', transport);
+
socket.on('disconnect', bind(this, 'onDisconnect'));
+ if (socket.connected) {
+ onConnect();
+ } else {
+ socket.on('connect', onConnect);
+ }
}
});
|
when multiplexing, a socket may come back already connected
|
gameclosure_js.io
|
train
|
eda717abd8515cbfa257151f43b47348e732a7f6
|
diff --git a/docs/src/components/page-parts/meet-the-team/team.js b/docs/src/components/page-parts/meet-the-team/team.js
index <HASH>..<HASH> 100644
--- a/docs/src/components/page-parts/meet-the-team/team.js
+++ b/docs/src/components/page-parts/meet-the-team/team.js
@@ -85,6 +85,14 @@ export default [
name: 'Honorable mentions',
members: [
{
+ name: 'Allan Gaunt',
+ role: 'Senior Developer',
+ avatar: 'allan_gaunt.png',
+ github: 'webnoob',
+ desc: 'Wrote the initial BEX mode and much more'
+ },
+
+ {
name: 'Tobias Mesquita',
role: 'Senior Developer',
avatar: 'tobias_mesquita.jpg',
@@ -105,7 +113,7 @@ export default [
role: 'Developer / Community Staff',
avatar: 'noah_klayman.jpeg',
github: 'nklayman',
- desc: 'Capacitor App CLI mode'
+ desc: 'Wrote the initial Capacitor mode'
},
{
|
feat(docs): update meet the team
|
quasarframework_quasar
|
train
|
7e465f9491610c0a553cdc2e88d14c38c8692abf
|
diff --git a/lib/reporters/default.js b/lib/reporters/default.js
index <HASH>..<HASH> 100644
--- a/lib/reporters/default.js
+++ b/lib/reporters/default.js
@@ -84,8 +84,12 @@ module.exports = function (componentHint) {
}
// Final report
- process.stdout.write('\n');
- process.stdout.write('Total Warnings: ' + totalWarnings + '\n');
- process.stdout.write('Total Errors: ' + totalErrors + '\n');
+ if (totalWarnings > 0 || totalErrors > 0) {
+ process.stdout.write('\n');
+ process.stdout.write('Total Warnings: ' + totalWarnings + '\n');
+ process.stdout.write('Total Errors: ' + totalErrors + '\n');
+ } else {
+ process.stdout.write('No errors found!\n');
+ }
});
};
\ No newline at end of file
|
modified final report to show appropriate exit sentence when no errors are found
|
Wizcorp_component-hint
|
train
|
7cfbd05b8a6551d4b88780601711b334566f9436
|
diff --git a/structr-ui/src/test/java/org/structr/web/common/UiTest.java b/structr-ui/src/test/java/org/structr/web/common/UiTest.java
index <HASH>..<HASH> 100644
--- a/structr-ui/src/test/java/org/structr/web/common/UiTest.java
+++ b/structr-ui/src/test/java/org/structr/web/common/UiTest.java
@@ -66,7 +66,7 @@ public class UiTest extends StructrUiTest {
assertNotNull(tn);
assertEquals(new Integer(200), tn.getWidth());
assertEquals(new Integer(48), tn.getHeight()); // cropToFit = false
- assertEquals("image/" + Thumbnail.Format.png, tn.getContentType());
+ assertEquals("image/" + Thumbnail.Format.jpeg, tn.getContentType());
tx.success();
|
Fixes test: Default format for thumbnail is jpeg now.
|
structr_structr
|
train
|
b70e141699b1d7d56838bb90fdb4835d2d2751fd
|
diff --git a/lib/metar/station.rb b/lib/metar/station.rb
index <HASH>..<HASH> 100644
--- a/lib/metar/station.rb
+++ b/lib/metar/station.rb
@@ -12,7 +12,7 @@ module Metar
class << self
- @nsd_cccc = nil
+ @nsd_cccc = nil # Contains the text of the station list
attr_accessor :nsd_cccc # Allow tests to run from local file
def download_local
@@ -60,19 +60,21 @@ module Metar
end
attr_reader :cccc, :loaded
+ alias :code :cccc
# loaded? indicates whether the data has been collected from the Web
alias :loaded? :loaded
# No check is made on the existence of the station
def initialize(cccc, options = {})
- raise "Station identifier cannot be nil" if cccc.nil?
- raise "Station identifier must be a string" if not cccc.respond_to?('chars')
+ raise "Station identifier must not be nil" if cccc.nil?
+ raise "Station identifier must be a text" if not cccc.respond_to?('to_s')
@cccc = cccc
@name = options[:name]
@state = options[:state]
@country = options[:country]
@longitude = options[:longitude]
@latitude = options[:latitude]
+ @raw = options[:raw]
@loaded = false
end
@@ -103,6 +105,11 @@ module Metar
@longitude
end
+ def raw
+ load! if not @loaded
+ @raw
+ end
+
def exist?
Station.exist?(@cccc)
end
@@ -137,6 +144,7 @@ module Metar
:country => fields[5],
:latitude => fields[7],
:longitude => fields[8],
+ :raw => station.clone
}
end
@@ -156,8 +164,9 @@ module Metar
@name = noaa_data[:name]
@state = noaa_data[:state]
@country = noaa_data[:country]
- @longitude = noaa_data[:longitude]
- @latitude = noaa_data[:latitude]
+ @longitude = Station.to_longitude(noaa_data[:longitude])
+ @latitude = Station.to_latitude(noaa_data[:latitude])
+ @raw = noaa_data[:raw]
@loaded = true
self
end
diff --git a/test/metar_test_helper.rb b/test/metar_test_helper.rb
index <HASH>..<HASH> 100644
--- a/test/metar_test_helper.rb
+++ b/test/metar_test_helper.rb
@@ -12,4 +12,3 @@ Metar::Raw.instance_eval do
end
require 'test/unit'
-
|
Parse stations's long. and lat.
|
joeyates_metar-parser
|
train
|
53dc3a05220f67f8c178e074ca6af7d11863d04e
|
diff --git a/lib/puppet/network/http/pool.rb b/lib/puppet/network/http/pool.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/network/http/pool.rb
+++ b/lib/puppet/network/http/pool.rb
@@ -90,6 +90,8 @@ class Puppet::Network::HTTP::Pool
#
# @api private
def setsockopts(netio)
+ return unless netio
+
socket = netio.io
socket.setsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE, true)
end
|
(maint) Account for stubbed Net::HTTP
When using webmock, the Net::HTTP object does not have a socket instance
variable, so don't fail in that case.
Cherry-pick of 6c5bbae<I>a<I>b8c<I>a<I>a<I>dc5d2c8db
|
puppetlabs_puppet
|
train
|
0763cf14a3a862cb1631b604e46d2660142ec3f8
|
diff --git a/js/models/conversations.js b/js/models/conversations.js
index <HASH>..<HASH> 100644
--- a/js/models/conversations.js
+++ b/js/models/conversations.js
@@ -403,7 +403,8 @@
message : message.getNotificationText(),
iconUrl : iconUrl,
imageUrl : message.getImageUrl(),
- conversationId : conversationId
+ conversationId : conversationId,
+ messageId : message.id
});
});
});
diff --git a/js/models/messages.js b/js/models/messages.js
index <HASH>..<HASH> 100644
--- a/js/models/messages.js
+++ b/js/models/messages.js
@@ -367,6 +367,9 @@
},
markRead: function(sync) {
this.unset('unread');
+ Whisper.Notifications.remove(Whisper.Notifications.where({
+ messageId: this.id
+ }));
return this.save();
}
|
Remove messages from notifications when read
Remove individual messages from Notifications when marked read.
Previously this was only done from the conversation model when marking
the entire conversation as read.
Fixes #<I>
// FREEBIE
|
ForstaLabs_librelay-node
|
train
|
d003d98f57d2ead4d8e261f00e66e28e5b2dcc8a
|
diff --git a/basescript/basescript.py b/basescript/basescript.py
index <HASH>..<HASH> 100644
--- a/basescript/basescript.py
+++ b/basescript/basescript.py
@@ -41,6 +41,9 @@ class BaseScript(object):
self.args.log_level = 'info'
self.args.metric_grouping_interval = self.METRIC_GROUPING_INTERVAL
+ if self.args.metric_grouping_interval is None:
+ self.args.metric_grouping_interval = self.METRIC_GROUPING_INTERVAL
+
self.log = init_logger(
fmt=self.args.log_format,
quiet=self.args.quiet,
diff --git a/basescript/log.py b/basescript/log.py
index <HASH>..<HASH> 100644
--- a/basescript/log.py
+++ b/basescript/log.py
@@ -16,7 +16,6 @@ import structlog
_GLOBAL_LOG_CONFIGURED = False
HOSTNAME = socket.gethostname()
-METRIC_GROUPING_INTERVAL = 1 # one second
METRICS_STATE = {}
METRICS_STATE_LOCK = Lock()
@@ -442,9 +441,6 @@ def init_logger(
metric_grouping_interval=None
):
- if metric_grouping_interval is None and not level == 'debug':
- metric_grouping_interval = METRIC_GROUPING_INTERVAL
-
global LOG
if LOG is not None:
return LOG
|
#<I> resolve the debug mode issue
|
deep-compute_basescript
|
train
|
85530e3f26526e7b3a06eb5c380661bbca95eff9
|
diff --git a/ipuz/puzzlekinds/wordsearch.py b/ipuz/puzzlekinds/wordsearch.py
index <HASH>..<HASH> 100644
--- a/ipuz/puzzlekinds/wordsearch.py
+++ b/ipuz/puzzlekinds/wordsearch.py
@@ -1,5 +1,9 @@
from ipuz.exceptions import IPUZException
-from ipuz.validators import validate_bool, validate_dict_of_strings
+from ipuz.validators import (
+ validate_bool,
+ validate_dict_of_strings,
+ validate_non_negative_int,
+)
def validate_dictionary(field_name, field_data):
@@ -12,11 +16,6 @@ def validate_showanswers(field_name, field_data):
raise IPUZException("Invalid {} value found".format(field_name))
-def validate_time(field_name, field_data):
- if type(field_data) is not int or field_data < 0:
- raise IPUZException("Invalid {} value found".format(field_name))
-
-
def validate_points(field_name, field_data):
if field_data not in ["linear", "log", None]:
raise IPUZException("Invalid {} value found".format(field_name))
@@ -25,7 +24,7 @@ def validate_points(field_name, field_data):
IPUZ_WORDSEARCH_VALIDATORS = {
"dictionary": validate_dictionary,
"showanswers": validate_showanswers,
- "time": validate_time,
+ "time": validate_non_negative_int,
"points": validate_points,
"zigzag": validate_bool,
"retrace": validate_bool,
diff --git a/ipuz/validators.py b/ipuz/validators.py
index <HASH>..<HASH> 100644
--- a/ipuz/validators.py
+++ b/ipuz/validators.py
@@ -14,6 +14,11 @@ def validate_int(field_name, field_data):
raise IPUZException("Invalid {} value found".format(field_name))
+def validate_non_negative_int(field_name, field_data):
+ if type(field_data) is not int or field_data < 0:
+ raise IPUZException("Invalid {} value found".format(field_name))
+
+
def validate_dict_of_strings(field_name, field_data):
if type(field_data) is not dict:
raise IPUZException("Invalid {} value found".format(field_name))
|
Refactored time validator to be a generic validator
|
svisser_ipuz
|
train
|
a64319d243d8535795e3ee070ba79222df2df4b3
|
diff --git a/src/Concerns/Cash.php b/src/Concerns/Cash.php
index <HASH>..<HASH> 100644
--- a/src/Concerns/Cash.php
+++ b/src/Concerns/Cash.php
@@ -7,7 +7,7 @@ use Money\Number;
trait Cash
{
/**
- * Get absolute value.
+ * Get formatted amount.
*
* @return string
*/
@@ -17,6 +17,18 @@ trait Cash
}
/**
+ * Get formatted cash.
+ *
+ * @return string
+ */
+ public function cash()
+ {
+ return $this->getFormatter()->format(
+ $this->newInstance($this->getCashAmount())
+ );
+ }
+
+ /**
* Get amount for cash.
*
* @return string
diff --git a/src/Concerns/Vat.php b/src/Concerns/Vat.php
index <HASH>..<HASH> 100644
--- a/src/Concerns/Vat.php
+++ b/src/Concerns/Vat.php
@@ -52,6 +52,30 @@ trait Vat
}
/**
+ * Get formatted amount with GST/VAT.
+ *
+ * @return string
+ */
+ public function amountWithVat()
+ {
+ return $this->getFormatter()->format(
+ static::asMoney($this->getAmountWithVat())
+ );
+ }
+
+ /**
+ * Get formatted cash with GST/VAT.
+ *
+ * @return string
+ */
+ public function cashWithVat()
+ {
+ return $this->getFormatter()->format(
+ $this->newInstance($this->getCashAmount())
+ );
+ }
+
+ /**
* Enable GST/VAT for calculation.
*
* @return $this
@@ -156,16 +180,6 @@ trait Vat
}
/**
- * Get absolute value.
- *
- * @return string
- */
- public function amountWithVat()
- {
- return $this->getFormatter()->format(static::asMoney($this->getAmountWithVat()));
- }
-
- /**
* Get the money object.
*
* @return \Money\Money
|
Update cash and cashWithVat method.
|
jomweb_ringgit
|
train
|
3389e740c39b63d178289e6aaf0130061108995a
|
diff --git a/javalite-async/src/main/java/org/javalite/async/Async.java b/javalite-async/src/main/java/org/javalite/async/Async.java
index <HASH>..<HASH> 100644
--- a/javalite-async/src/main/java/org/javalite/async/Async.java
+++ b/javalite-async/src/main/java/org/javalite/async/Async.java
@@ -204,12 +204,13 @@ public class Async {
private void configureListeners(Injector injector, List<QueueConfig> queueConfigs) throws JMSException, IllegalAccessException, InstantiationException {
for (QueueConfig queueConfig : queueConfigs) {
Queue queue = (Queue) jmsServer.lookup(QUEUE_NAMESPACE + queueConfig.getName());
+ CommandListener listener = (CommandListener) queueConfig.getCommandListenerClass().newInstance();
+ listener.setInjector(injector);
+ if(injector != null){
+ injector.injectMembers(listener);
+ }
+
for (int i = 0; i < queueConfig.getListenerCount(); i++) {
- CommandListener listener = (CommandListener) queueConfig.getCommandListenerClass().newInstance();
- listener.setInjector(injector);
- if(injector != null){
- injector.injectMembers(listener);
- }
Session session = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(queue);
consumer.setMessageListener(listener);
diff --git a/javalite-async/src/main/java/org/javalite/async/QueueConfig.java b/javalite-async/src/main/java/org/javalite/async/QueueConfig.java
index <HASH>..<HASH> 100644
--- a/javalite-async/src/main/java/org/javalite/async/QueueConfig.java
+++ b/javalite-async/src/main/java/org/javalite/async/QueueConfig.java
@@ -31,7 +31,7 @@ public class QueueConfig {
* Creates a specification of a queue for Async. By default the queue is durable (can save messages to hard drive).
*
* @param name human readable name of queue
- * @param commandListenerClass CommandListener class
+ * @param commandListenerClass CommandListener class. Listeners must be thread safe.
* @param listenerCount number of listeners to attach to a queue. Effectively this
* is a number of processing threads.
*/
@@ -45,7 +45,7 @@ public class QueueConfig {
* Creates a specification of a queue for Async
*
* @param name human readable name of queue
- * @param commandListenerClass CommandListener class
+ * @param commandListenerClass CommandListener class. Listeners must be thread safe.
* @param listenerCount number of listeners to attach to a queue. Effectively this
* is a number of processing threads.
* @param durable true to enable to save messages to hard drive, false otherwise.
|
#<I> Implement passing zipped messages as binary data in Async - small code optimization
|
javalite_activeweb
|
train
|
67583970196a8ac4d91dade05a364e92018b7536
|
diff --git a/activesupport/lib/active_support/testing/assertions.rb b/activesupport/lib/active_support/testing/assertions.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/testing/assertions.rb
+++ b/activesupport/lib/active_support/testing/assertions.rb
@@ -31,7 +31,7 @@ module ActiveSupport
# perform_service(param: 'no_exception')
# end
def assert_nothing_raised
- yield
+ yield.tap { assert(true) }
rescue => error
raise Minitest::UnexpectedError.new(error)
end
diff --git a/railties/test/application/rake_test.rb b/railties/test/application/rake_test.rb
index <HASH>..<HASH> 100644
--- a/railties/test/application/rake_test.rb
+++ b/railties/test/application/rake_test.rb
@@ -229,7 +229,7 @@ module ApplicationTests
end
output = rails("test")
- assert_match(/7 runs, 9 assertions, 0 failures, 0 errors/, output)
+ assert_match(/7 runs, 11 assertions, 0 failures, 0 errors/, output)
assert_no_match(/Errors running/, output)
end
@@ -247,7 +247,7 @@ module ApplicationTests
with_rails_env("test") { rails("db:migrate") }
output = rails("test")
- assert_match(/5 runs, 7 assertions, 0 failures, 0 errors/, output)
+ assert_match(/5 runs, 9 assertions, 0 failures, 0 errors/, output)
assert_no_match(/Errors running/, output)
end
@@ -260,7 +260,7 @@ module ApplicationTests
end
output = rails("test")
- assert_match(/7 runs, 9 assertions, 0 failures, 0 errors/, output)
+ assert_match(/7 runs, 11 assertions, 0 failures, 0 errors/, output)
assert_no_match(/Errors running/, output)
end
diff --git a/railties/test/generators/scaffold_generator_test.rb b/railties/test/generators/scaffold_generator_test.rb
index <HASH>..<HASH> 100644
--- a/railties/test/generators/scaffold_generator_test.rb
+++ b/railties/test/generators/scaffold_generator_test.rb
@@ -573,7 +573,7 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
`bin/rails g scaffold User name:string age:integer;
bin/rails db:migrate`
end
- assert_match(/8 runs, 10 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
+ assert_match(/8 runs, 12 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
end
end
@@ -593,7 +593,7 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
assert_match(/class UsersController < ApplicationController/, content)
end
- assert_match(/8 runs, 10 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
+ assert_match(/8 runs, 12 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
end
end
@@ -607,7 +607,7 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
`bin/rails g scaffold User name:string age:integer;
bin/rails db:migrate`
end
- assert_match(/8 runs, 10 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
+ assert_match(/8 runs, 12 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
end
end
@@ -621,7 +621,7 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
`bin/rails g scaffold User name:string age:integer;
bin/rails db:migrate`
end
- assert_match(/6 runs, 8 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
+ assert_match(/6 runs, 10 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
end
end
@@ -635,7 +635,7 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
`bin/rails g scaffold User name:string age:integer;
bin/rails db:migrate`
end
- assert_match(/6 runs, 8 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
+ assert_match(/6 runs, 10 assertions, 0 failures, 0 errors/, `bin/rails test 2>&1`)
end
end
|
Increment assertions count on assert_nothing_raised
|
rails_rails
|
train
|
c3e06ab38cb5cca8affe6d74292a676ba421b9c5
|
diff --git a/docs/README.md b/docs/README.md
index <HASH>..<HASH> 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -113,6 +113,12 @@ import ReactModal from 'react-modal';
describedby: "full_description"
}}
/*
+ Additional data attributes (optional).
+ */
+ data={{
+ background: "green"
+ }}
+ /*
Overlay ref callback.
*/
overlayRef={setOverlayRef}
diff --git a/specs/Modal.spec.js b/specs/Modal.spec.js
index <HASH>..<HASH> 100644
--- a/specs/Modal.spec.js
+++ b/specs/Modal.spec.js
@@ -393,6 +393,17 @@ export default () => {
unmountModal();
});
+ it("additional data attributes", () => {
+ const modal = renderModal(
+ { isOpen: true, data: { background: "green" } },
+ "hello"
+ );
+ mcontent(modal)
+ .getAttribute("data-background")
+ .should.be.eql("green");
+ unmountModal();
+ });
+
it("raises an exception if the appElement selector does not match", () => {
should(() => ariaAppHider.setElement(".test")).throw();
});
diff --git a/src/components/Modal.js b/src/components/Modal.js
index <HASH>..<HASH> 100644
--- a/src/components/Modal.js
+++ b/src/components/Modal.js
@@ -60,6 +60,7 @@ class Modal extends Component {
shouldReturnFocusAfterClose: PropTypes.bool,
parentSelector: PropTypes.func,
aria: PropTypes.object,
+ data: PropTypes.object,
role: PropTypes.string,
contentLabel: PropTypes.string,
shouldCloseOnEsc: PropTypes.bool,
diff --git a/src/components/ModalPortal.js b/src/components/ModalPortal.js
index <HASH>..<HASH> 100644
--- a/src/components/ModalPortal.js
+++ b/src/components/ModalPortal.js
@@ -51,6 +51,7 @@ export default class ModalPortal extends Component {
role: PropTypes.string,
contentLabel: PropTypes.string,
aria: PropTypes.object,
+ data: PropTypes.object,
children: PropTypes.node,
shouldCloseOnEsc: PropTypes.bool,
overlayRef: PropTypes.func,
@@ -315,9 +316,9 @@ export default class ModalPortal extends Component {
: className;
};
- ariaAttributes = items =>
+ attributesFromObject = (prefix, items) =>
Object.keys(items).reduce((acc, name) => {
- acc[`aria-${name}`] = items[name];
+ acc[`${prefix}-${name}`] = items[name];
return acc;
}, {});
@@ -346,8 +347,8 @@ export default class ModalPortal extends Component {
onClick={this.handleContentOnClick}
role={this.props.role}
aria-label={this.props.contentLabel}
- {...this.ariaAttributes(this.props.aria || {})}
- data-testid={this.props.testId}
+ {...this.attributesFromObject("aria", this.props.aria || {})}
+ {...this.attributesFromObject("data", this.props.data || {})}
>
{this.props.children}
</div>
|
[added] additional data attributes.
|
reactjs_react-modal
|
train
|
305c86eee569270f89f4418b7f41aeedbedb1d82
|
diff --git a/mapillary_tools/geotag/camm_builder.py b/mapillary_tools/geotag/camm_builder.py
index <HASH>..<HASH> 100644
--- a/mapillary_tools/geotag/camm_builder.py
+++ b/mapillary_tools/geotag/camm_builder.py
@@ -1,5 +1,4 @@
import dataclasses
-import time
import typing as T
from .. import geo
@@ -160,8 +159,6 @@ def create_camm_trak(
},
}
- now = int(time.time())
-
media_duration = sum(s.timedelta for s in raw_samples)
assert media_timescale <= builder.UINT64_MAX
@@ -171,8 +168,11 @@ def create_camm_trak(
"data": {
# use 64-bit version
"version": 1,
- "creation_time": now,
- "modification_time": now,
+ # TODO: find timestamps from mvhd?
+ # do not set dynamic timestamps (e.g. time.time()) here because we'd like to
+ # make sure the md5 of the new mp4 file unchanged
+ "creation_time": 0,
+ "modification_time": 0,
"timescale": media_timescale,
"duration": media_duration,
"language": 21956,
@@ -197,8 +197,11 @@ def create_camm_trak(
"data": {
# use 32-bit version of the box
"version": 0,
- "creation_time": now,
- "modification_time": now,
+ # TODO: find timestamps from mvhd?
+ # do not set dynamic timestamps (e.g. time.time()) here because we'd like to
+ # make sure the md5 of the new mp4 file unchanged
+ "creation_time": 0,
+ "modification_time": 0,
# will update the track ID later
"track_ID": 0,
# If the duration of this track cannot be determined then duration is set to all 1s (32-bit maxint).
|
fix: do not write dynamic timestamps which makes mp4 content change (#<I>)
|
mapillary_mapillary_tools
|
train
|
7914e55c9cc2b935e6844ee78e9eb509a8c2d8ba
|
diff --git a/ezp/Persistence/Storage/Legacy/Content/FieldValue/Converter/BinaryFileStorage.php b/ezp/Persistence/Storage/Legacy/Content/FieldValue/Converter/BinaryFileStorage.php
index <HASH>..<HASH> 100644
--- a/ezp/Persistence/Storage/Legacy/Content/FieldValue/Converter/BinaryFileStorage.php
+++ b/ezp/Persistence/Storage/Legacy/Content/FieldValue/Converter/BinaryFileStorage.php
@@ -39,7 +39,7 @@ class BinaryFileStorage implements Storage
public function storeFieldData( $fieldId, FieldValue $value, array $context )
{
$dbHandler = $context['connection'];
- $file = $value['file'];
+ $file = $value->data->file;
$q = $dbHandler->createInsertQuery();
$q->insertInto(
@@ -53,13 +53,13 @@ class BinaryFileStorage implements Storage
$q->bindValue( 0, null, \PDO::PARAM_INT )
)->set(
$dbHandler->quoteColumn( 'filename' ),
- $q->bindValue( basename( $value['file']->path ) )
+ $q->bindValue( basename( $file->path ) )
)->set(
$dbHandler->quoteColumn( 'mime_type' ),
- $q->bindValue( (string)$value['file']->contentType )
+ $q->bindValue( (string)$file->contentType )
)->set(
$dbHandler->quoteColumn( 'original_filename' ),
- $q->bindValue( $value['originalFilename'] )
+ $q->bindValue( $value->data->originalFilename )
)->set(
// @todo: How should I get the version number here ?
$dbHandler->quoteColumn( 'version' ),
|
Updated BinaryFile storage converter to use $value->data as field type value
|
ezsystems_ezpublish-kernel
|
train
|
9903e9a1167d8678a8fa5209f592e29b455ffa1c
|
diff --git a/pyrc/bots.py b/pyrc/bots.py
index <HASH>..<HASH> 100644
--- a/pyrc/bots.py
+++ b/pyrc/bots.py
@@ -30,6 +30,7 @@ class Bot(object):
self._inbuffer = ""
self._commands = []
+ self._privmsgs = []
self._threads = []
self.socket = None
self.initialized = False
@@ -96,25 +97,32 @@ class Bot(object):
if callable(func) and hasattr(func, '_type'):
if func._type == 'COMMAND':
self._commands.append(func)
- elif func._type == "REPEAT":
+ elif func._type == 'PRIVMSG':
+ self._privmsgs.append(func)
+ elif func._type == 'REPEAT':
thread = threads.JobThread(func, self)
self._threads.append(thread)
else:
raise "This is not a type I've ever heard of."
def receivemessage(self, channel, sender, message):
- self.parsecommand(channel, sender, message)
+ to_continue = self.parsecommand(channel, sender, message)
+ if not to_continue:
+ return
+
+ to_continue = self.parseprivmsg(channel, sender, message)
def parsecommand(self, channel, sender, message):
command = self.bot_called(message)
if not command:
- return
+ return False
for command_func in self._commands:
match = command_func._matcher.search(command)
if match:
group_dict = match.groupdict()
groups = match.groups()
+
if group_dict and (len(groups) > len(group_dict)):
# match.groups() also returns named parameters
raise "You cannot use both named and unnamed parameters"
@@ -123,7 +131,26 @@ class Bot(object):
else:
command_func(self, channel, sender, *groups)
- if self.config['break_on_match']: break
+ if self.config['break_on_match']: return True
+ return False
+
+ def parseprivmsg(self, channel, sender, message):
+ for privmsg_func in self._privmsgs:
+ match = privmsg_func._matcher.search(message)
+ if match:
+ group_dict = match.groupdict()
+ groups = match.groups()
+
+ if group_dict and (len(groups) > len(group_dict)):
+ # match.groups() also returns named parameters
+ raise "You cannot use both named and unnamed parameters"
+ elif group_dict:
+ privmsg_func(self, channel, sender, **group_dict)
+ else:
+ privmsg_func(self, channel, sender, *groups)
+
+ if self.config['break_on_match']: return True
+ return False
def bot_called(self, message):
"""
diff --git a/pyrc/utils/hooks.py b/pyrc/utils/hooks.py
index <HASH>..<HASH> 100644
--- a/pyrc/utils/hooks.py
+++ b/pyrc/utils/hooks.py
@@ -22,6 +22,21 @@ class command(object):
wrapped_command._matcher = matcher
return wrapped_command
+class privmsg(object):
+ def __init__(self, matcher=None):
+ self._matcher = matcher
+
+ def __call__(self, func):
+ # convert matcher to regular expression
+ matcher = re.compile(self._matcher)
+
+ @functools.wraps(func)
+ def wrapped_command(*args, **kwargs):
+ return func(*args, **kwargs)
+ wrapped_command._type = "PRIVMSG"
+ wrapped_command._matcher = matcher
+ return wrapped_command
+
def interval(milliseconds):
def wrapped(func):
@functools.wraps(func)
|
Add privmsg hook which runs when a PRIVMSG is sent.
|
sarenji_pyrc
|
train
|
5c1eb6d80cf1bbea1002f771a307b69682e37c10
|
diff --git a/public/js/actions/pause.js b/public/js/actions/pause.js
index <HASH>..<HASH> 100644
--- a/public/js/actions/pause.js
+++ b/public/js/actions/pause.js
@@ -46,9 +46,12 @@ function paused(pauseInfo) {
}
function loadedFrames(frames) {
- return {
- type: constants.LOADED_FRAMES,
- frames: frames
+ return ({ dispatch, getState, client }) => {
+ frames = frames.map(f => _updateFrame(getState(), f));
+ dispatch({
+ type: constants.LOADED_FRAMES,
+ frames: frames
+ });
};
}
|
Translate call stack frames (#<I>)
|
firefox-devtools_debugger
|
train
|
1cf25e9e9952c07c1864f879ec886750aab68cd5
|
diff --git a/lib/pg_charmer.rb b/lib/pg_charmer.rb
index <HASH>..<HASH> 100644
--- a/lib/pg_charmer.rb
+++ b/lib/pg_charmer.rb
@@ -62,11 +62,14 @@ end
ActionController::Base.instance_eval do
def use_db_connection(connection, args)
- klasses = args.delete(:for).map { |klass| if klass.is_a? String then klass else klass.name end }
+ default_connections = {}
+ klass_names = args.delete(:for)
+ klass_names.each do |klass_name|
+ default_connections[klass_name] = connection
+ end
+
before_filter(args) do |controller, &block|
- klasses.each do |klass|
- PgCharmer.overwritten_default_connections[klass] = connection
- end
+ PgCharmer.overwritten_default_connections.merge!(default_connections)
end
end
|
Premature optimization never hurt :)
|
sauspiel_postamt
|
train
|
816ab26bd096cb46142321d8e9531d1119f9bc2f
|
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index <HASH>..<HASH> 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,6 +2,10 @@
CHANGELOG
=========
+Unreleased
+==========
+* improvement: Added support for IGNORE_ERROR option when context is missing. `PR338 <https://github.com/aws/aws-xray-sdk-python/pull/338>`_.
+
2.9.0
==========
* bugfix: Change logging behavior to avoid overflow. `PR302 <https://github.com/aws/aws-xray-sdk-python/pull/302>`_.
diff --git a/aws_xray_sdk/core/context.py b/aws_xray_sdk/core/context.py
index <HASH>..<HASH> 100644
--- a/aws_xray_sdk/core/context.py
+++ b/aws_xray_sdk/core/context.py
@@ -10,7 +10,7 @@ from aws_xray_sdk import global_sdk_config
log = logging.getLogger(__name__)
MISSING_SEGMENT_MSG = 'cannot find the current segment/subsegment, please make sure you have a segment open'
-SUPPORTED_CONTEXT_MISSING = ('RUNTIME_ERROR', 'LOG_ERROR')
+SUPPORTED_CONTEXT_MISSING = ('RUNTIME_ERROR', 'LOG_ERROR', 'IGNORE_ERROR')
CXT_MISSING_STRATEGY_KEY = 'AWS_XRAY_CONTEXT_MISSING'
@@ -121,7 +121,7 @@ class Context(object):
"""
if self.context_missing == 'RUNTIME_ERROR':
raise SegmentNotFoundException(MISSING_SEGMENT_MSG)
- else:
+ elif self.context_missing == 'LOG_ERROR':
log.error(MISSING_SEGMENT_MSG)
def _is_subsegment(self, entity):
diff --git a/aws_xray_sdk/core/recorder.py b/aws_xray_sdk/core/recorder.py
index <HASH>..<HASH> 100644
--- a/aws_xray_sdk/core/recorder.py
+++ b/aws_xray_sdk/core/recorder.py
@@ -112,6 +112,7 @@ class AWSXRayRecorder(object):
RUNTIME_ERROR means the recorder will raise an exception.
LOG_ERROR means the recorder will only log the error and
do nothing.
+ IGNORE_ERROR means the recorder will do nothing
:param str daemon_address: The X-Ray daemon address where the recorder
sends data to.
:param str service: default segment name if creating a segment without
diff --git a/docs/configurations.rst b/docs/configurations.rst
index <HASH>..<HASH> 100644
--- a/docs/configurations.rst
+++ b/docs/configurations.rst
@@ -92,6 +92,7 @@ Supported strategies are:
* RUNTIME_ERROR: throw an SegmentNotFoundException
* LOG_ERROR: log an error and continue
+* IGNORE_ERROR: do nothing
Segment Dynamic Naming
----------------------
diff --git a/tests/ext/aiohttp/test_client.py b/tests/ext/aiohttp/test_client.py
index <HASH>..<HASH> 100644
--- a/tests/ext/aiohttp/test_client.py
+++ b/tests/ext/aiohttp/test_client.py
@@ -1,8 +1,11 @@
+import logging
+
import pytest
from aiohttp import ClientSession
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.async_context import AsyncContext
+from aws_xray_sdk.core.context import MISSING_SEGMENT_MSG
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException
from aws_xray_sdk.ext.util import strip_url, get_hostname
from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config
@@ -144,7 +147,8 @@ async def test_no_segment_raise(loop, recorder):
pass
-async def test_no_segment_not_raise(loop, recorder):
+async def test_no_segment_log_error(loop, recorder, caplog):
+ caplog.set_level(logging.ERROR)
xray_recorder.configure(context_missing='LOG_ERROR')
trace_config = aws_xray_trace_config()
status_code = 200
@@ -155,3 +159,19 @@ async def test_no_segment_not_raise(loop, recorder):
# Just check that the request was done correctly
assert status_received == status_code
+ assert MISSING_SEGMENT_MSG in [rec.message for rec in caplog.records]
+
+
+async def test_no_segment_ignore_error(loop, recorder, caplog):
+ caplog.set_level(logging.ERROR)
+ xray_recorder.configure(context_missing='IGNORE_ERROR')
+ trace_config = aws_xray_trace_config()
+ status_code = 200
+ url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
+ async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
+ async with session.get(url) as resp:
+ status_received = resp.status
+
+ # Just check that the request was done correctly
+ assert status_received == status_code
+ assert MISSING_SEGMENT_MSG not in [rec.message for rec in caplog.records]
|
Added the option to ignore missing segments (#<I>)
* Add `IGNORE_ERROR` option for `AWS_XRAY_CONTEXT_MISSING`
|
aws_aws-xray-sdk-python
|
train
|
045e869b5482d48bbe115185c24d4174876e7741
|
diff --git a/bob/bio/spear/test/test_extractors.py b/bob/bio/spear/test/test_extractors.py
index <HASH>..<HASH> 100644
--- a/bob/bio/spear/test/test_extractors.py
+++ b/bob/bio/spear/test/test_extractors.py
@@ -19,12 +19,14 @@
import numpy
-import pkg_resources
import bob.bio.base
import bob.bio.spear
-from .test_annotators import _wav
+# from .test_annotators import _wav
+
+# import pkg_resources
+
regenerate_refs = False
@@ -41,6 +43,7 @@ def _compare(
assert numpy.allclose(data, reference, atol=1e-5)
+"""
def test_mfcc():
# read input wave file
rate, wav = _wav()
@@ -59,7 +62,7 @@ def test_mfcc():
bob.bio.base.save,
bob.bio.base.load,
)
-
+"""
# def test_lfcc():
# # read input wave file
|
Removing test case. This resource doesn't exist
|
bioidiap_bob.bio.spear
|
train
|
4813863215bab65e493147fc6c9fd34f0a78023f
|
diff --git a/client/state/state_database.go b/client/state/state_database.go
index <HASH>..<HASH> 100644
--- a/client/state/state_database.go
+++ b/client/state/state_database.go
@@ -209,7 +209,6 @@ func (s *BoltStateDB) getAllAllocations(tx *boltdd.Tx) ([]*structs.Allocation, m
// Handle upgrade path
ae.Alloc.Canonicalize()
- ae.Alloc.Job.Canonicalize()
allocs = append(allocs, ae.Alloc)
}
diff --git a/nomad/structs/structs.go b/nomad/structs/structs.go
index <HASH>..<HASH> 100644
--- a/nomad/structs/structs.go
+++ b/nomad/structs/structs.go
@@ -7661,9 +7661,7 @@ func (a *Allocation) Canonicalize() {
a.AllocatedResources = &ar
}
- // TODO: Investigate if we should canonicalize the job
- // it may be out of sync with respect to the original job
- // a.Job.Canonicalize()
+ a.Job.Canonicalize()
}
func (a *Allocation) copyImpl(job bool) *Allocation {
|
actually always canonicalize alloc.Job
alloc.Job may be stale as well and need to migrate it. It does cost
extra cycles but should be negligible.
|
hashicorp_nomad
|
train
|
b55a3a93982b1243c41fb2c50b7803538e61fc56
|
diff --git a/Classes/TrackingFrontend.php b/Classes/TrackingFrontend.php
index <HASH>..<HASH> 100644
--- a/Classes/TrackingFrontend.php
+++ b/Classes/TrackingFrontend.php
@@ -183,7 +183,8 @@ class TrackingFrontend extends \Frontend
$child = $event->getLayerData();
$level = $child['pid'];
$stringClass = $GLOBALS['con4gis']['stringClass'];
-
+ $objMap = C4gMapsModel::findById($child['id']);
+ $child['raw'] = $objMap;
if (in_array($child['type'], $this->arrAllowedLocationTypes)) {
$arrData = [];
$arrData['excludeFromSingleLayer'] = true;
@@ -311,7 +312,7 @@ class TrackingFrontend extends \Frontend
'settings' => [
'loadAsync' => true,
'refresh' => true,
- // "interval" => getTrackingConfig -> getHTTPInterval
+ 'interval' => 60000,
'crossOrigin' => false,
],
],
diff --git a/Classes/TrackingService.php b/Classes/TrackingService.php
index <HASH>..<HASH> 100644
--- a/Classes/TrackingService.php
+++ b/Classes/TrackingService.php
@@ -86,6 +86,9 @@ class TrackingService extends \Controller
$intGroupId = \Input::get('useGroup');
$objPositions = $this->Database->prepare('SELECT tl_c4g_tracking_devices.name, tl_c4g_tracking_positions.* FROM tl_c4g_tracking_devices LEFT JOIN tl_c4g_tracking_positions ON tl_c4g_tracking_devices.lastPositionId=tl_c4g_tracking_positions.id WHERE tl_c4g_tracking_devices.lastPositionId>0 AND tl_c4g_tracking_devices.groupId=?')
->execute($intGroupId);
+ } else if ($intMapsItem) {
+ $objPositions = $this->Database->prepare('SELECT tl_c4g_tracking_devices.name, tl_c4g_tracking_positions.* FROM tl_c4g_tracking_devices LEFT JOIN tl_c4g_tracking_positions ON tl_c4g_tracking_devices.lastPositionId=tl_c4g_tracking_positions.id WHERE tl_c4g_tracking_devices.lastPositionId>0 AND tl_c4g_tracking_devices.mapStructureId=?')
+ ->execute($intMapsItem);
} else {
// Fallback: keine weiteren Einstellungen -> alle Geräte mit Positionsdaten
$objPositions = $this->Database->prepare('SELECT tl_c4g_tracking_devices.name, tl_c4g_tracking_positions.* FROM tl_c4g_tracking_devices LEFT JOIN tl_c4g_tracking_positions ON tl_c4g_tracking_devices.lastPositionId=tl_c4g_tracking_positions.id WHERE tl_c4g_tracking_devices.lastPositionId>0')
|
only deliver elements for mapStructure
|
Kuestenschmiede_TrackingBundle
|
train
|
d320c43c7b350e3536c3f1dc5ea7e54e21ee50d4
|
diff --git a/lib/scheman/diff.rb b/lib/scheman/diff.rb
index <HASH>..<HASH> 100644
--- a/lib/scheman/diff.rb
+++ b/lib/scheman/diff.rb
@@ -41,7 +41,7 @@ module Scheman
# @return [Array<Hash>] ALTER TABLE statements for adding new fields
def add_fields
- after_schema.tables.inject([]) do |result, after_table|
+ after_schema.tables.each_with_object([]) do |after_table, result|
if before_table = before_schema.tables_indexed_by_name[after_table.name]
after_table.fields.each do |after_field|
unless before_table.fields_indexed_by_name[after_field.name]
@@ -51,13 +51,12 @@ module Scheman
end
end
end
- result
end
end
# @return [Array<Hash>] ALTER TABLE statements for dropping fields
def drop_fields
- after_schema.tables.inject([]) do |result, after_table|
+ after_schema.tables.each_with_object([]) do |after_table, result|
if before_table = before_schema.tables_indexed_by_name[after_table.name]
before_table.fields.each do |before_field|
unless after_table.fields_indexed_by_name[before_field.name]
@@ -67,7 +66,6 @@ module Scheman
end
end
end
- result
end
end
|
Refactor: reduce the last lines in blocks by using .each_with_object method
|
r7kamura_scheman
|
train
|
6292ae75ffa29f371f57103565f86c5e786dc06a
|
diff --git a/src/girocheckout-sdk/GiroCheckout_SDK_Request.php b/src/girocheckout-sdk/GiroCheckout_SDK_Request.php
index <HASH>..<HASH> 100644
--- a/src/girocheckout-sdk/GiroCheckout_SDK_Request.php
+++ b/src/girocheckout-sdk/GiroCheckout_SDK_Request.php
@@ -1,6 +1,7 @@
<?php
namespace girosolution\GiroCheckout_SDK;
+use girosolution\GiroCheckout_SDK\api\GiroCheckout_SDK_AbstractApi;
use girosolution\GiroCheckout_SDK\helper\GiroCheckout_SDK_Debug_helper;
use girosolution\GiroCheckout_SDK\helper\GiroCheckout_SDK_Curl_helper;
use girosolution\GiroCheckout_SDK\helper\GiroCheckout_SDK_TransactionType_helper;
@@ -43,6 +44,7 @@ class GiroCheckout_SDK_Request {
/**
* Stores the api call request method object
+ * @var GiroCheckout_SDK_AbstractApi $requestMethod
*/
private $requestMethod;
@@ -194,6 +196,16 @@ class GiroCheckout_SDK_Request {
}
/**
+ * Set URL to post requests to dev.girosolution.de.
+ * This can be used when the method of changing the apache environment variable
+ * GIROCHECKOUT_SERVER isn't applicable.
+ * Call before submit.
+ */
+ public function setDevServer() {
+ $this->requestMethod->setDevServer();
+ }
+
+ /**
* Submits the request to the GiroCheckout API by using the given request method. Uses all given and needed
* params in the correct order.
*
|
V. <I>, setDevSever method
|
girosolution_girocheckout_sdk
|
train
|
2277a9c479bc6e8551e9fee4cff18a3cc6e1e016
|
diff --git a/src/backends/github/implementation.js b/src/backends/github/implementation.js
index <HASH>..<HASH> 100644
--- a/src/backends/github/implementation.js
+++ b/src/backends/github/implementation.js
@@ -8,7 +8,7 @@ class API {
this.token = token;
this.repo = repo;
this.branch = branch;
- this.baseURL = API_ROOT + `/repos/${this.repo}`;
+ this.repoURL = `/repos/${this.repo}`;
}
user() {
@@ -20,9 +20,9 @@ class API {
return cache.then((cached) => {
if (cached) { return cached; }
- return this.request(`/contents/${path}`, {
- headers: {Accept: 'application/vnd.github.VERSION.raw'},
- data: {ref: this.branch},
+ return this.request(`${this.repoURL}/contents/${path}`, {
+ headers: { Accept: 'application/vnd.github.VERSION.raw' },
+ data: { ref: this.branch },
cache: false
}).then((result) => {
if (sha) {
@@ -35,8 +35,8 @@ class API {
}
listFiles(path) {
- return this.request(`/contents/${path}`, {
- data: {ref: this.branch}
+ return this.request(`${this.repoURL}/contents/${path}`, {
+ data: { ref: this.branch }
});
}
@@ -60,7 +60,7 @@ class API {
request(path, options = {}) {
const headers = this.requestHeaders(options.headers || {});
- return fetch(this.baseURL + path, {...options, headers: headers}).then((response) => {
+ return fetch(API_ROOT + path, { ...options, headers: headers }).then((response) => {
if (response.headers.get('Content-Type').match(/json/)) {
return this.parseJsonResponse(response);
}
|
bugfix: request cannot be bound to repo url
|
netlify_netlify-cms
|
train
|
7bd947dfbe2ffc44f786723267133fd31420bab9
|
diff --git a/source/core/oxserversmanager.php b/source/core/oxserversmanager.php
index <HASH>..<HASH> 100644
--- a/source/core/oxserversmanager.php
+++ b/source/core/oxserversmanager.php
@@ -64,8 +64,9 @@ class oxServersManager
{
$aServersData = $this->_getServersData();
$aServersData[$oNode->getId()] = array(
+ 'id' => $oNode->getId(),
'timestamp' => $oNode->getTimestamp(),
- 'serverIp' => $oNode->getIp(),
+ 'ip' => $oNode->getIp(),
'lastFrontendUsage' => $oNode->getLastFrontendUsage(),
'lastAdminUsage' => $oNode->getLastAdminUsage(),
);
diff --git a/tests/unit/core/oxserversmanagerTest.php b/tests/unit/core/oxserversmanagerTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit/core/oxserversmanagerTest.php
+++ b/tests/unit/core/oxserversmanagerTest.php
@@ -83,8 +83,9 @@ class Unit_Core_oxServersManagerTest extends OxidTestCase
$aExpectedServerData = array(
'serverNameHash1' => array(
+ 'id' => 'serverNameHash1',
'timestamp' => 'timestamp',
- 'serverIp' => '127.0.0.1',
+ 'ip' => '127.0.0.1',
'lastFrontendUsage' => 'frontendUsageTimestamp',
'lastAdminUsage' => 'adminUsageTimestamp',
),
@@ -97,8 +98,9 @@ class Unit_Core_oxServersManagerTest extends OxidTestCase
oxRegistry::getConfig()->setConfigParam('aServersData', array(
'serverNameHash1' => array(),
'serverNameHash2' => array(
+ 'id' => 'serverNameHash2',
'timestamp' => 'timestamp',
- 'serverIp' => '127.0.0.1',
+ 'ip' => '127.0.0.1',
'lastFrontendUsage' => 'frontendUsageTimestamp',
'lastAdminUsage' => 'adminUsageTimestamp',
),
@@ -118,8 +120,9 @@ class Unit_Core_oxServersManagerTest extends OxidTestCase
$aExpectedServerData = array(
'serverNameHash1' => array(),
'serverNameHash2' => array(
+ 'id' => 'serverNameHash2',
'timestamp' => 'timestampUpdated',
- 'serverIp' => '127.0.0.255',
+ 'ip' => '127.0.0.255',
'lastFrontendUsage' => 'frontendUsageTimestampUpdated',
'lastAdminUsage' => 'adminUsageTimestampUpdated',
),
@@ -146,8 +149,9 @@ class Unit_Core_oxServersManagerTest extends OxidTestCase
$aExpectedServerData = array(
'serverNameHash1' => array(
+ 'id' => 'serverNameHash1',
'timestamp' => 'timestampUpdated',
- 'serverIp' => '127.0.0.1',
+ 'ip' => '127.0.0.1',
'lastFrontendUsage' => 'frontendUsageTimestampUpdated',
'lastAdminUsage' => 'adminUsageTimestampUpdated',
),
|
ESDEV-<I> Save information about frontend servers so it could be used for license check.
Updating servers manager so that it would save servers information to configuration with format used for sending xml to server.
|
OXID-eSales_oxideshop_ce
|
train
|
cfd3b34ac258d732488dffcf7d168841fdad36b7
|
diff --git a/spec/kopflos/xvfb_spec.rb b/spec/kopflos/xvfb_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/kopflos/xvfb_spec.rb
+++ b/spec/kopflos/xvfb_spec.rb
@@ -16,6 +16,15 @@ module Kopflos
Xvfb.stub!(:determine_font_path).and_return('/usr/share/fonts/X11/misc')
@xvfb = Xvfb.new
end
+
+ after :each do
+ @xvfb.stop
+ end
+
+ it "should authorize itself on startup" do
+ @xvfb.should_receive(:authorize).and_return(true)
+ @xvfb.start
+ end
end
end
|
Xvfb authorize itself on startup
|
niklas_kopflos
|
train
|
2ab363ab8228d11a2da87e76eaee28c8c1e8403a
|
diff --git a/tests/Doctrine/Tests/ORM/Functional/Ticket/DDC6303Test.php b/tests/Doctrine/Tests/ORM/Functional/Ticket/DDC6303Test.php
index <HASH>..<HASH> 100644
--- a/tests/Doctrine/Tests/ORM/Functional/Ticket/DDC6303Test.php
+++ b/tests/Doctrine/Tests/ORM/Functional/Ticket/DDC6303Test.php
@@ -2,7 +2,6 @@
namespace Doctrine\Tests\ORM\Functional\Ticket;
-use Doctrine\DBAL\Schema\SchemaException;
use Doctrine\ORM\Tools\ToolsException;
use Doctrine\Tests\OrmFunctionalTestCase;
@@ -11,7 +10,7 @@ use Doctrine\Tests\OrmFunctionalTestCase;
*/
class DDC6303Test extends OrmFunctionalTestCase
{
- public function setUp()
+ public function setUp() : void
{
parent::setUp();
@@ -25,7 +24,7 @@ class DDC6303Test extends OrmFunctionalTestCase
}
}
- public function testMixedTypeHydratedCorrectlyInJoinedInheritance()
+ public function testMixedTypeHydratedCorrectlyInJoinedInheritance() : void
{
$a = new DDC6303ChildA();
$b = new DDC6303ChildB();
@@ -71,7 +70,7 @@ class DDC6303Test extends OrmFunctionalTestCase
}
}
- public function testEmptyValuesInJoinedInheritance()
+ public function testEmptyValuesInJoinedInheritance() : void
{
$stringEmptyData = '';
$stringZeroData = 0;
|
#<I> #<I> adding `void` hints where applicable
|
doctrine_orm
|
train
|
ccf4baaff099e50c877b18999fe53e11e6005542
|
diff --git a/performanceplatform/collector/__init__.py b/performanceplatform/collector/__init__.py
index <HASH>..<HASH> 100644
--- a/performanceplatform/collector/__init__.py
+++ b/performanceplatform/collector/__init__.py
@@ -2,6 +2,6 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
-__VERSION__ = "1.0.0"
-__AUTHOR__ = "GDS Developers"
-__AUTHOR_EMAIL__ = ""
+__version__ = "1.0.0"
+__author__ = "GDS Developers"
+__author_email__ = "performance@digital.cabinet-office.gov.uk"
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -31,7 +31,7 @@ class Setup(object):
'performanceplatform/collector/__init__.py'
)
version = re.search(
- r"^__VERSION__ = ['\"]([^'\"]*)['\"]",
+ r"^__version__ = ['\"]([^'\"]*)['\"]",
data,
re.M | re.I
).group(1).strip()
@@ -58,8 +58,8 @@ setup(
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
- author=collector.__AUTHOR__,
- author_email=collector.__AUTHOR_EMAIL__,
+ author=collector.__author__,
+ author_email=collector.__author_email__,
maintainer='Government Digital Service',
url='https://github.com/alphagov/performanceplatform-collector',
|
Update collector package metadata
- The convention for 'Version Bookkeeping' in PEP8 (which we know and love) [is lowercase](<URL>)
- <URL>
|
alphagov_performanceplatform-collector
|
train
|
4a5d0698957444ca92da26fdf295f177a91832fe
|
diff --git a/spock/mcp/mcpacket_extensions.py b/spock/mcp/mcpacket_extensions.py
index <HASH>..<HASH> 100644
--- a/spock/mcp/mcpacket_extensions.py
+++ b/spock/mcp/mcpacket_extensions.py
@@ -427,7 +427,7 @@ class ExtensionPSTC38:
o += datautils.pack(MC_VARINT, item['gamemode'])
if act in [mcdata.PL_ADD_PLAYER, mcdata.PL_UPDATE_LATENCY]:
o += datautils.pack(MC_VARINT, item['ping'])
- if act in [mcdata.PL_ADD_PLAYER,mcdata.PL_UPDATE_DISPLAY]:
+ if act in [mcdata.PL_ADD_PLAYER, mcdata.PL_UPDATE_DISPLAY]:
o += datautils.pack(MC_BOOL, item['has_display'])
if item['has_display']:
o += datautils.pack(MC_CHAT, item['display_name'])
|
OH GOD I BROKE THE BUILD
|
SpockBotMC_SpockBot
|
train
|
d1fe06e26e3e435a6fae00c763f79ad15129a559
|
diff --git a/src/java/voldemort/client/protocol/RequestFormatFactory.java b/src/java/voldemort/client/protocol/RequestFormatFactory.java
index <HASH>..<HASH> 100644
--- a/src/java/voldemort/client/protocol/RequestFormatFactory.java
+++ b/src/java/voldemort/client/protocol/RequestFormatFactory.java
@@ -16,9 +16,6 @@
package voldemort.client.protocol;
-import java.util.EnumMap;
-
-import voldemort.VoldemortException;
import voldemort.client.protocol.pb.ProtoBuffClientRequestFormat;
import voldemort.client.protocol.vold.VoldemortNativeClientRequestFormat;
@@ -31,21 +28,17 @@ import voldemort.client.protocol.vold.VoldemortNativeClientRequestFormat;
*/
public class RequestFormatFactory {
- private EnumMap<RequestFormatType, RequestFormat> typeToInstance;
-
- public RequestFormatFactory() {
- this.typeToInstance = new EnumMap<RequestFormatType, RequestFormat>(RequestFormatType.class);
- this.typeToInstance.put(RequestFormatType.VOLDEMORT_V1,
- new VoldemortNativeClientRequestFormat());
- this.typeToInstance.put(RequestFormatType.PROTOCOL_BUFFERS,
- new ProtoBuffClientRequestFormat());
- }
-
- public RequestFormat getRequestFormat(RequestFormatType type) {
- RequestFormat format = this.typeToInstance.get(type);
- if(type == null)
- throw new VoldemortException("Unknown wire format " + type);
- return format;
+ public RequestFormatFactory() {}
+
+ public synchronized RequestFormat getRequestFormat(RequestFormatType type) {
+ switch(type) {
+ case VOLDEMORT_V1:
+ return new VoldemortNativeClientRequestFormat();
+ case PROTOCOL_BUFFERS:
+ return new ProtoBuffClientRequestFormat();
+ default:
+ throw new IllegalArgumentException("Unknown request format type: " + type);
+ }
}
}
|
Remove hard client dependency on protocol buffers.
|
voldemort_voldemort
|
train
|
6498df7b0290139df57629568d824dfa242900cc
|
diff --git a/accounts/account_manager.go b/accounts/account_manager.go
index <HASH>..<HASH> 100644
--- a/accounts/account_manager.go
+++ b/accounts/account_manager.go
@@ -164,14 +164,15 @@ func (am *Manager) Lock(addr common.Address) error {
return nil
}
-// TimedUnlock unlocks the given account with. The account
+// TimedUnlock unlocks the given account with the passphrase. The account
// stays unlocked for the duration of timeout. A timeout of 0 unlocks the account
-// until the program exits. The account must match a unique key.
+// until the program exits. The account must match a unique key file.
//
-// If the accout is already unlocked, TimedUnlock extends or shortens
-// the active unlock timeout.
-func (am *Manager) TimedUnlock(a Account, keyAuth string, timeout time.Duration) error {
- _, key, err := am.getDecryptedKey(a, keyAuth)
+// If the account address is already unlocked for a duration, TimedUnlock extends or
+// shortens the active unlock timeout. If the address was previously unlocked
+// indefinitely the timeout is not altered.
+func (am *Manager) TimedUnlock(a Account, passphrase string, timeout time.Duration) error {
+ a, key, err := am.getDecryptedKey(a, passphrase)
if err != nil {
return err
}
@@ -180,8 +181,13 @@ func (am *Manager) TimedUnlock(a Account, keyAuth string, timeout time.Duration)
defer am.mu.Unlock()
u, found := am.unlocked[a.Address]
if found {
- // terminate dropLater for this key to avoid unexpected drops.
- if u.abort != nil {
+ if u.abort == nil {
+ // The address was unlocked indefinitely, so unlocking
+ // it with a timeout would be confusing.
+ zeroKey(key.PrivateKey)
+ return nil
+ } else {
+ // Terminate the expire goroutine and replace it below.
close(u.abort)
}
}
diff --git a/accounts/accounts_test.go b/accounts/accounts_test.go
index <HASH>..<HASH> 100644
--- a/accounts/accounts_test.go
+++ b/accounts/accounts_test.go
@@ -120,8 +120,8 @@ func TestOverrideUnlock(t *testing.T) {
pass := "foo"
a1, err := am.NewAccount(pass)
- // Unlock indefinitely
- if err = am.Unlock(a1, pass); err != nil {
+ // Unlock indefinitely.
+ if err = am.TimedUnlock(a1, pass, 5*time.Minute); err != nil {
t.Fatal(err)
}
|
accounts: ensure TimedUnlock does not override indefinite unlock timeout
|
ethereum_go-ethereum
|
train
|
a76dfc7b37800de53b5fc00b526cba243e3644b3
|
diff --git a/plugins/provisioners/ansible/provisioner/host.rb b/plugins/provisioners/ansible/provisioner/host.rb
index <HASH>..<HASH> 100644
--- a/plugins/provisioners/ansible/provisioner/host.rb
+++ b/plugins/provisioners/ansible/provisioner/host.rb
@@ -255,7 +255,7 @@ module VagrantPlugins
# Multiple Private Keys
unless !config.inventory_path && @ssh_info[:private_key_path].size == 1
@ssh_info[:private_key_path].each do |key|
- ssh_options << "-o IdentityFile=%s" % [ key.gsub('%', '%%') ]
+ ssh_options << "-o 'IdentityFile=%s'" % [ key.gsub('%', '%%') ]
end
end
|
as `key` is a file path, it must be quoted
|
hashicorp_vagrant
|
train
|
39c3f5658e215076a34a8706619004eef836ce60
|
diff --git a/searx/webapp.py b/searx/webapp.py
index <HASH>..<HASH> 100644
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -71,8 +71,6 @@ app = Flask(
app.secret_key = settings['server']['secret_key']
-app.logger.addHandler(logger)
-
babel = Babel(app)
global_favicons = []
|
[fix] endless logging recursion
|
asciimoo_searx
|
train
|
73ea72d814cb868e138fbc834b97acb0023d7eab
|
diff --git a/azurerm/internal/services/sentinel/sentinel_data_connector.go b/azurerm/internal/services/sentinel/sentinel_data_connector.go
index <HASH>..<HASH> 100644
--- a/azurerm/internal/services/sentinel/sentinel_data_connector.go
+++ b/azurerm/internal/services/sentinel/sentinel_data_connector.go
@@ -10,10 +10,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
)
-// TODO: remove once one of the PR's has been merged
-var _ = importSentinelDataConnector(securityinsight.DataConnectorKindAmazonWebServicesCloudTrail)
-var _ = assertDataConnectorKind(securityinsight.AADDataConnector{}, securityinsight.DataConnectorKindAmazonWebServicesCloudTrail)
-
func importSentinelDataConnector(expectKind securityinsight.DataConnectorKind) func(d *schema.ResourceData, meta interface{}) (data []*schema.ResourceData, err error) {
return func(d *schema.ResourceData, meta interface{}) (data []*schema.ResourceData, err error) {
id, err := parse.DataConnectorID(d.Id())
diff --git a/azurerm/internal/services/sentinel/sentinel_data_connector_office_365.go b/azurerm/internal/services/sentinel/sentinel_data_connector_office_365.go
index <HASH>..<HASH> 100644
--- a/azurerm/internal/services/sentinel/sentinel_data_connector_office_365.go
+++ b/azurerm/internal/services/sentinel/sentinel_data_connector_office_365.go
@@ -198,10 +198,10 @@ func resourceSentinelDataConnectorOffice365Read(d *schema.ResourceData, meta int
return fmt.Errorf("retrieving %s: %+v", id, err)
}
- if err := assertDataConnectorKind(resp.Value, securityinsight.DataConnectorKindOffice365); err != nil {
- return fmt.Errorf("asserting %s: %+v", id, err)
+ dc, ok := resp.Value.(securityinsight.OfficeDataConnector)
+ if !ok {
+ return fmt.Errorf("%s was not an Office 365 Data Connector", id)
}
- dc := resp.Value.(securityinsight.OfficeDataConnector)
d.Set("name", id.Name)
d.Set("log_analytics_workspace_id", workspaceId.ID())
|
use type assertion rather than artificial asert function
|
terraform-providers_terraform-provider-azurerm
|
train
|
4c3394b3ee99bc04a27c159313e4dc7b635988c7
|
diff --git a/src/org/jgroups/protocols/TP.java b/src/org/jgroups/protocols/TP.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/protocols/TP.java
+++ b/src/org/jgroups/protocols/TP.java
@@ -46,7 +46,7 @@ import java.util.concurrent.locks.ReentrantLock;
* The {@link #receive(Address, Address, byte[], int, int)} method must
* be called by subclasses when a unicast or multicast message has been received.
* @author staBela Ban
- * @version $Id: TP.java,v 1.212 2008/06/03 15:44:14 belaban Exp $
+ * @version $Id: TP.java,v 1.213 2008/06/05 07:37:20 belaban Exp $
*/
@MBean(description="Transport protocol")
@DeprecatedProperty(names={"bind_to_all_interfaces", "use_outgoing_packet_handler"})
@@ -304,9 +304,9 @@ public abstract class TP extends Protocol {
protected ThreadFactory timer_thread_factory;
- @ManagedAttribute(name="timer.num_threads", description="Max number of threads to be used by the timer thread pool")
- @Property(name="timer.max_threads")
- int max_timer_threads=4;
+ @ManagedAttribute(name="timer.num_threads", description="Number of threads to be used by the timer thread pool")
+ @Property(name="timer.num_threads")
+ int num_timer_threads=4;
public ThreadFactory getTimerThreadFactory() {
return timer_thread_factory;
@@ -770,7 +770,7 @@ public abstract class TP extends Protocol {
setInAllThreadFactories(channel_name, local_addr, thread_naming_pattern);
- timer=new TimeScheduler(timer_thread_factory, max_timer_threads);
+ timer=new TimeScheduler(timer_thread_factory, num_timer_threads);
verifyRejectionPolicy(oob_thread_pool_rejection_policy);
verifyRejectionPolicy(thread_pool_rejection_policy);
|
changed timer.max_threads to timer.num_threads
|
belaban_JGroups
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.