hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
2fd88aba418c3b98fb17bb63d1dfbb9a9093725b
|
diff --git a/pydba/postgres.py b/pydba/postgres.py
index <HASH>..<HASH> 100644
--- a/pydba/postgres.py
+++ b/pydba/postgres.py
@@ -1,10 +1,12 @@
"""Support for PostgreSQL database interactions."""
+import getpass
import os
import socket
import logging
import subprocess
from collections import namedtuple
+import pexpect
import psycopg2
from pydba.exc import DatabaseError
@@ -54,6 +56,9 @@ class PostgresDB(object):
application_name: str, optional
allow user to specify the app name in the connection
"""
+ if user is None:
+ user = getpass.getuser()
+
self._connect_args = dict(
application_name=application_name,
database=database, user=user, password=password,
@@ -186,3 +191,34 @@ class PostgresDB(object):
log.warn('overwriting contents of database %s' % name)
log.info('restoring %s from %s' % (name, filename))
self._run_cmd('pg_restore', '--verbose', '--dbname=%s' % name, filename)
+
+ def shell(self, expect=pexpect):
+ """
+ Connects the database client shell to the database.
+
+ Parameters
+ ----------
+ expect_module: str
+ the database to which backup will be restored.
+ """
+ options = [
+ ('dbname', self._connect_args['database']),
+ ('user', self._connect_args['user']),
+ ('host', self._connect_args['host']),
+ ('port', self._connect_args['port']),
+ ]
+
+ if self._connect_args['sslmode'] is not None:
+ options = options + [
+ ('sslmode', self._connect_args['sslmode']),
+ ('sslcert', os.path.expanduser(self._connect_args['sslcert'])),
+ ('sslkey', os.path.expanduser(self._connect_args['sslkey'])),
+ ]
+
+ dsn = ' '.join("%s=%s" % (param, value) for param, value in options)
+
+ child = expect.spawn('psql "%s"' % dsn)
+ if self._connect_args['password'] is not None:
+ child.expect('Password: ')
+ child.sendline(self._connect_args['password'])
+ child.interact()
diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1 +1,2 @@
psycopg2>=2.6
+pexpect>=3.3
diff --git a/tests/pydba/test_postgres.py b/tests/pydba/test_postgres.py
index <HASH>..<HASH> 100644
--- a/tests/pydba/test_postgres.py
+++ b/tests/pydba/test_postgres.py
@@ -1,3 +1,4 @@
+import getpass
import tempfile
import pytest
@@ -67,3 +68,52 @@ def test_backup_and_restore(pg):
fp.seek(0)
pg.restore(db_name, fp.name)
assert pg.exists(db_name)
+
+
+class FakeChild(object):
+ def __init__(self, cmd):
+ self.cmd = cmd
+ self.password = None
+
+ def expect(self, data):
+ pass
+
+ def sendline(self, line):
+ self.password = line
+
+ def interact(self):
+ pass
+
+
+class ExpectModule(object):
+ def __init__(self):
+ self.expect = None
+
+ def spawn(self, cmd):
+ self.expect = FakeChild(cmd)
+ return self.expect
+
+
+def test_shell(pg):
+ fake_expect = ExpectModule()
+
+ pg.shell(fake_expect)
+
+ assert fake_expect.expect.cmd == 'psql ' \
+ '"dbname=postgres user=' + getpass.getuser() + ' host=localhost port=5432"'
+
+
+def test_shell_with_ssl_and_password_prompt():
+ fake_expect = ExpectModule()
+
+ the_password = 'password1234'
+
+ pg = PostgresDB(user='foo', password=the_password,
+ sslmode='require', sslcert='test.pem', sslkey='test.key')
+ pg.shell(fake_expect)
+
+ assert fake_expect.expect.password == the_password
+
+ assert fake_expect.expect.cmd == 'psql ' \
+ '"dbname=postgres user=foo host=localhost port=5432 ' \
+ 'sslmode=require sslcert=test.pem sslkey=test.key"'
|
- added method that drops user into native DB client shell
|
drkjam_pydba
|
train
|
967a9ca7de312d9ef1de3cdf164274e2d9bab285
|
diff --git a/lnwallet/script_utils.go b/lnwallet/script_utils.go
index <HASH>..<HASH> 100644
--- a/lnwallet/script_utils.go
+++ b/lnwallet/script_utils.go
@@ -817,6 +817,31 @@ func htlcSpendSuccess(signer Signer, signDesc *SignDescriptor,
return witnessStack, nil
}
+// htlcSpendRevoke spends a second-level HTLC output. This function is to be
+// used by the sender or receiver of an HTLC to claim the HTLC after a revoked
+// commitment transaction was broadcast.
+func htlcSpendRevoke(signer Signer, signDesc *SignDescriptor,
+ revokeTx *wire.MsgTx) (wire.TxWitness, error) {
+
+ // We don't need any spacial modifications to the transaction as this
+ // is just sweeping a revoked HTLC output. So we'll generate a regular
+ // witness signature.
+ sweepSig, err := signer.SignOutputRaw(revokeTx, signDesc)
+ if err != nil {
+ return nil, err
+ }
+
+ // We set a one as the first element the witness stack (ignoring the
+ // witness script), in order to force execution to the revocation
+ // clause in the second level HTLC script.
+ witnessStack := wire.TxWitness(make([][]byte, 3))
+ witnessStack[0] = append(sweepSig, byte(signDesc.HashType))
+ witnessStack[1] = []byte{1}
+ witnessStack[2] = signDesc.WitnessScript
+
+ return witnessStack, nil
+}
+
// HtlcSecondLevelSpend exposes the public witness generation function for
// spending an HTLC success transaction, either due to an expiring time lock or
// having had the payment preimage. This method is able to spend any
@@ -848,31 +873,6 @@ func HtlcSecondLevelSpend(signer Signer, signDesc *SignDescriptor,
return witnessStack, nil
}
-// htlcTimeoutRevoke spends a second-level HTLC output. This function is to be
-// used by the sender or receiver of an HTLC to claim the HTLC after a revoked
-// commitment transaction was broadcast.
-func htlcSpendRevoke(signer Signer, signDesc *SignDescriptor,
- revokeTx *wire.MsgTx) (wire.TxWitness, error) {
-
- // We don't need any spacial modifications to the transaction as this
- // is just sweeping a revoked HTLC output. So we'll generate a regular
- // witness signature.
- sweepSig, err := signer.SignOutputRaw(revokeTx, signDesc)
- if err != nil {
- return nil, err
- }
-
- // We set a one as the first element the witness stack (ignoring the
- // witness script), in order to force execution to the revocation
- // clause in the second level HTLC script.
- witnessStack := wire.TxWitness(make([][]byte, 3))
- witnessStack[0] = append(sweepSig, byte(signDesc.HashType))
- witnessStack[1] = []byte{1}
- witnessStack[2] = signDesc.WitnessScript
-
- return witnessStack, nil
-}
-
// lockTimeToSequence converts the passed relative locktime to a sequence
// number in accordance to BIP-68.
// See: https://github.com/bitcoin/bips/blob/master/bip-0068.mediawiki
diff --git a/lnwallet/witnessgen.go b/lnwallet/witnessgen.go
index <HASH>..<HASH> 100644
--- a/lnwallet/witnessgen.go
+++ b/lnwallet/witnessgen.go
@@ -63,6 +63,12 @@ const (
// pre-image to the HTLC. We can sweep this without any additional
// timeout.
HtlcAcceptedRemoteSuccess WitnessType = 8
+
+ // HtlcSecondLevelRevoke is a witness that allows us to sweep an HTLC
+ // from the remote party's commitment transaction in the case that the
+ // broadcast a revoked commitment, but then also immediately attempt to
+ // go to the second level to claim the HTLC.
+ HtlcSecondLevelRevoke WitnessType = 9
)
// WitnessGenerator represents a function which is able to generate the final
@@ -111,6 +117,9 @@ func (wt WitnessType) GenWitnessFunc(signer Signer,
// value.
return receiverHtlcSpendTimeout(signer, desc, tx, -1)
+ case HtlcSecondLevelRevoke:
+ return htlcSpendRevoke(signer, desc, tx)
+
default:
return nil, fmt.Errorf("unknown witness type: %v", wt)
}
|
lnwallet: add new HtlcSecondLevelRevoke witness type
In this commit, we add a new witness type to the set of known types.
This new type will be used when we need to sweep an HTLC that the
remote party has taken to the second level.
|
lightningnetwork_lnd
|
train
|
6dbd7cab3428effedd72b2c778e068b7512d8ff8
|
diff --git a/nanopq/convert_faiss.py b/nanopq/convert_faiss.py
index <HASH>..<HASH> 100644
--- a/nanopq/convert_faiss.py
+++ b/nanopq/convert_faiss.py
@@ -2,7 +2,7 @@
import importlib
spec = importlib.util.find_spec("faiss")
if spec is None:
- raise ImportError("Cannot find the faiss module.")
+ pass # If faiss hasn't been installed. Just skip
else:
import faiss
@@ -12,6 +12,7 @@ import numpy as np
def nanopq_to_faiss(pq_nanopq):
"""Convert a :class:`nanopq.PQ` instance to `faiss.IndexPQ <https://github.com/facebookresearch/faiss/blob/master/IndexPQ.h>`_.
+ To use this function, `faiss module needs to be installed <https://github.com/facebookresearch/faiss/blob/master/INSTALL.md>`_.
Args:
pq_nanopq (nanopq.PQ): An input PQ instance.
@@ -44,6 +45,7 @@ def nanopq_to_faiss(pq_nanopq):
def faiss_to_nanopq(pq_faiss):
"""Convert a `faiss.IndexPQ <https://github.com/facebookresearch/faiss/blob/master/IndexPQ.h>`_ instance to :class:`nanopq.PQ`.
+ To use this function, `faiss module needs to be installed <https://github.com/facebookresearch/faiss/blob/master/INSTALL.md>`_.
Args:
pq_faiss (faiss.IndexPQ): An input PQ instance.
|
just skip if faiss hasn't been installed
|
matsui528_nanopq
|
train
|
493b56015e73f18131d6eb60fe247ab9bede3724
|
diff --git a/CHANGES.md b/CHANGES.md
index <HASH>..<HASH> 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,6 +1,12 @@
Nosey Changelog
===============
+Version 0.1.2 (2013-09-07)
+--------------------------
+
+- Upgrade docopt to version 0.6.1.
+
+
Version 0.1.1 (2013-01-05)
--------------------------
diff --git a/nosey/__init__.py b/nosey/__init__.py
index <HASH>..<HASH> 100644
--- a/nosey/__init__.py
+++ b/nosey/__init__.py
@@ -8,7 +8,7 @@ Local continuous test runner with nose and watchdog.
:license: MIT, see LICENSE for more details.
"""
-__version__ = '0.1.1'
+__version__ = '0.1.2'
from . import command
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -39,7 +39,7 @@ def read(fname):
setup(
name='nosey',
- version='0.1.1',
+ version='0.1.2',
description='Local continuous test runner with nose and watchdog.',
long_description=__doc__,
author='Joe Esposito',
|
Set version to <I>.
|
joeyespo_nosey
|
train
|
ca40e9478735167705816bffe7ef1435767cf15b
|
diff --git a/robber/matchers/called.py b/robber/matchers/called.py
index <HASH>..<HASH> 100644
--- a/robber/matchers/called.py
+++ b/robber/matchers/called.py
@@ -14,7 +14,9 @@ class Called(Base):
raise TypeError('{actual} is not a mock'.format(actual=self.actual))
def failure_message(self):
- return 'Expected {actual} to be called'.format(actual=self.actual)
+ return 'Expected {actual}{negated_message} to be called'.format(
+ actual=self.actual, negated_message=self.negated_message
+ )
expect.register('called', Called)
diff --git a/tests/integrations/test_callable.py b/tests/integrations/test_callable.py
index <HASH>..<HASH> 100644
--- a/tests/integrations/test_callable.py
+++ b/tests/integrations/test_callable.py
@@ -15,12 +15,12 @@ class TestCallableIntegrations(TestCase):
expect("a").to.be.callable()
expect(1).to.be.callable()
- def test_not_to_callable_success(self):
+ def test_not_to_be_callable_success(self):
expect("a").not_to.be.callable()
expect(1).not_to.be.callable()
@must_fail
- def test_not_to_callable_failure(self):
+ def test_not_to_be_callable_failure(self):
def a():
pass
expect(a).not_to.be.callable()
diff --git a/tests/integrations/test_called.py b/tests/integrations/test_called.py
index <HASH>..<HASH> 100644
--- a/tests/integrations/test_called.py
+++ b/tests/integrations/test_called.py
@@ -1,6 +1,6 @@
from unittest import TestCase
-from mock.mock import Mock
+from mock import Mock
from robber import expect
from tests import must_fail
@@ -20,3 +20,14 @@ class TestCalledIntegrations(TestCase):
def test_called_not_a_mock(self):
self.assertRaises(TypeError, expect('a').to.be.called)
self.assertRaises(TypeError, expect(1).to.be.called)
+
+ def test_not_to_be_called_success(self):
+ mock = Mock()
+ expect(mock).not_to.be.called()
+
+ @must_fail
+ def test_not_to_be_called_failure(self):
+ mock = Mock()
+ mock()
+
+ expect(mock).not_to.be.called()
diff --git a/tests/matchers/test_called.py b/tests/matchers/test_called.py
index <HASH>..<HASH> 100644
--- a/tests/matchers/test_called.py
+++ b/tests/matchers/test_called.py
@@ -16,7 +16,16 @@ class TestCalled(TestCase):
mock = Mock()
called = Called(mock)
message = called.failure_message()
- expect(message) == 'Expected {function} to be called'.format(function=mock)
+ expect(message) == 'Expected {mock} to be called'.format(mock=mock)
+
+ def test_failure_message_with_not_to(self):
+ mock = Mock()
+ called = Called(mock)
+ message = called.failure_message()
+
+ mock()
+
+ expect(message) == 'Expected {mock} to be called'.format(mock=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
|
[f] Add support for not_to.be.called
|
vesln_robber.py
|
train
|
ada1fe09145c3ae1f05b9e57ac40a4e74ed407be
|
diff --git a/scrapple/cmd.py b/scrapple/cmd.py
index <HASH>..<HASH> 100644
--- a/scrapple/cmd.py
+++ b/scrapple/cmd.py
@@ -44,7 +44,7 @@ def runCLI():
"""
args = docopt(__doc__, version='0.3.0')
try:
- handle_exceptions(args)
+ check_arguments(args)
command_list = ['genconfig', 'run', 'generate']
select = itemgetter('genconfig', 'run', 'generate')
selectedCommand = command_list[select(args).index(True)]
|
Rename 'handle_exceptions' as 'check_arguments' in cmd module
|
AlexMathew_scrapple
|
train
|
4a363a9eadb934ca924896a6b10939a43654e69d
|
diff --git a/app/controllers/s3_relay/uploads_controller.rb b/app/controllers/s3_relay/uploads_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/s3_relay/uploads_controller.rb
+++ b/app/controllers/s3_relay/uploads_controller.rb
@@ -43,7 +43,11 @@ class S3Relay::UploadsController < ApplicationController
end
def user_attrs
- respond_to?(:current_user) ? { user_id: current_user.id } : {}
+ if respond_to?(:current_user) && (id = current_user.try(:id))
+ { user_id: id }
+ else
+ {}
+ end
end
end
|
Only use current user id if present.
|
kjohnston_s3_relay
|
train
|
86540ab3a88a976b3e7ac8b2c83e62408727dc4b
|
diff --git a/openvidu-server/src/main/java/io/openvidu/server/recording/service/ComposedRecordingService.java b/openvidu-server/src/main/java/io/openvidu/server/recording/service/ComposedRecordingService.java
index <HASH>..<HASH> 100644
--- a/openvidu-server/src/main/java/io/openvidu/server/recording/service/ComposedRecordingService.java
+++ b/openvidu-server/src/main/java/io/openvidu/server/recording/service/ComposedRecordingService.java
@@ -325,6 +325,10 @@ public class ComposedRecordingService extends RecordingService {
"There was an error generating the metadata report file for the recording");
}
+ String filesPath = this.openviduConfig.getOpenViduRecordingPath() + recording.getId() + "/";
+ recording = this.sealRecordingMetadataFileAsReady(recording, recording.getSize(), recording.getDuration(),
+ filesPath + RecordingManager.RECORDING_ENTITY_FILE + recording.getId());
+
final long timestamp = System.currentTimeMillis();
this.cdr.recordRecordingStopped(recording, reason, timestamp);
this.cdr.recordRecordingStatusChanged(recording, reason, timestamp, recording.getStatus());
diff --git a/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/OpenViduTestAppE2eTest.java b/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/OpenViduTestAppE2eTest.java
index <HASH>..<HASH> 100644
--- a/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/OpenViduTestAppE2eTest.java
+++ b/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/OpenViduTestAppE2eTest.java
@@ -2582,7 +2582,7 @@ public class OpenViduTestAppE2eTest {
restClient.rest(HttpMethod.GET, "/config", null, HttpStatus.SC_OK, true,
"{'version':'STR','openviduPublicurl':'STR','openviduCdr':false,'maxRecvBandwidth':0,'minRecvBandwidth':0,'maxSendBandwidth':0,'minSendBandwidth':0,'openviduRecording':false,"
+ "'openviduRecordingVersion':'STR','openviduRecordingPath':'STR','openviduRecordingPublicAccess':false,'openviduRecordingNotification':'STR',"
- + "'openviduRecordingCustomLayout':'STR','openviduRecordingAutostopTimeout':0,'openviduWebhook':false,'openviduWebhookEndpoint':'STR','openviduWebhookHeaders':'STR','openviduWebhookEvents':'STR',}");
+ + "'openviduRecordingCustomLayout':'STR','openviduRecordingAutostopTimeout':0,'openviduWebhook':false,'openviduWebhookEndpoint':'STR','openviduWebhookHeaders':[],'openviduWebhookEvents':[],}");
}
@Test
diff --git a/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/utils/CustomHttpClient.java b/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/utils/CustomHttpClient.java
index <HASH>..<HASH> 100644
--- a/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/utils/CustomHttpClient.java
+++ b/openvidu-test-e2e/src/test/java/io/openvidu/test/e2e/utils/CustomHttpClient.java
@@ -110,8 +110,13 @@ public class CustomHttpClient {
jsonObjExpected.length(), json.length());
}
for (String key : jsonObjExpected.keySet()) {
- Assert.assertTrue("Wrong class of property " + key,
- jsonObjExpected.get(key).getClass().equals(json.get(key).getClass()));
+ Class<?> c1 = jsonObjExpected.get(key).getClass();
+ Class<?> c2 = json.get(key).getClass();
+
+ c1 = unifyNumberType(c1);
+ c2 = unifyNumberType(c2);
+
+ Assert.assertTrue("Wrong class of property " + key, c1.equals(c2));
}
return json;
}
@@ -217,4 +222,12 @@ public class CustomHttpClient {
Assert.assertEquals(path + " expected to return status " + status, status, jsonResponse.getStatus());
return json;
}
+
+ private Class<?> unifyNumberType(Class<?> myClass) {
+ if (Number.class.isAssignableFrom(myClass)) {
+ return Number.class;
+ }
+ return myClass;
+ }
+
}
|
openvidu-server: set ready state to COMPOSED video recordings
|
OpenVidu_openvidu
|
train
|
8b1e3e105e15dd70e571de3e191b669eeb2a1986
|
diff --git a/config/default/testRunner.conf.php b/config/default/testRunner.conf.php
index <HASH>..<HASH> 100644
--- a/config/default/testRunner.conf.php
+++ b/config/default/testRunner.conf.php
@@ -389,6 +389,12 @@ return [
*/
'contentNavigatorType' => 'default'
],
+ /**
+ * The plugin renders review panel for test taker
+ */
+ 'review' => [
+ 'reviewLayout' => 'fizzy'
+ ]
],
/**
|
feat: enhance config by new property for review plugin
|
oat-sa_extension-tao-testqti
|
train
|
83fcfe1bb0a2a9cc82cf1651c481763781fd12cd
|
diff --git a/src/saml2/utils.py b/src/saml2/utils.py
index <HASH>..<HASH> 100644
--- a/src/saml2/utils.py
+++ b/src/saml2/utils.py
@@ -175,7 +175,8 @@ def make_instance(klass, spec):
def parse_attribute_map(filenames):
"""
Expects a file with each line being composed of the oid for the attribute
- exactly one space and then a user friendly name of the attribute
+ exactly one space, a user friendly name of the attribute and then
+ the type specification of the name.
:param filename: List of filenames on mapfiles.
:return: A 2-tuple, one dictionary with the oid as keys and the friendly
@@ -185,9 +186,9 @@ def parse_attribute_map(filenames):
backward = {}
for filename in filenames:
for line in open(filename).readlines():
- (name, friendly_name) = line.strip().split(" ")
- forward[name] = friendly_name
- backward[friendly_name] = name
+ (name, friendly_name, name_format) = line.strip().split()
+ forward[(name, name_format)] = friendly_name
+ backward[friendly_name] = (name, name_format)
return (forward, backward)
@@ -227,7 +228,7 @@ def identity_attribute(form, attribute, forward_map=None):
return attribute.friendly_name
elif forward_map:
try:
- return forward_map[attribute.name]
+ return forward_map[(attribute.name, attribute.name_format)]
except KeyError:
return attribute.name
# default is name
@@ -430,15 +431,14 @@ def _attrval(val):
def ava_to_attributes(ava, bmap):
attrs = []
- for key, val in ava.items():
+ for friendly_name, val in ava.items():
dic = {}
attrval = _attrval(val)
if attrval:
dic["attribute_value"] = attrval
- dic["friendly_name"] = key
- dic["name"] = bmap[key]
- dic["name_format"] = NAME_FORMAT_URI
+ dic["friendly_name"] = friendly_name
+ (dic["name"], dic["name_format"]) = bmap[friendly_name]
attrs.append(attribute_factory(**dic))
return attrs
|
Format of the map file changed a bit so some code pieces had to be updated
|
IdentityPython_pysaml2
|
train
|
a00bd52a51adbf6e574522c3359ac22caa02d63c
|
diff --git a/test/kg/apc/emulators/TestJMeterUtils.java b/test/kg/apc/emulators/TestJMeterUtils.java
index <HASH>..<HASH> 100644
--- a/test/kg/apc/emulators/TestJMeterUtils.java
+++ b/test/kg/apc/emulators/TestJMeterUtils.java
@@ -1,25 +1,23 @@
package kg.apc.emulators;
-import java.io.*;
-import java.util.Locale;
-import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import kg.apc.jmeter.DirectoryAnchor;
+import org.apache.commons.lang3.RandomStringUtils;
import org.apache.jmeter.control.LoopController;
import org.apache.jmeter.engine.StandardJMeterEngine;
import org.apache.jmeter.gui.GuiPackage;
import org.apache.jmeter.gui.tree.JMeterTreeListener;
import org.apache.jmeter.gui.tree.JMeterTreeModel;
-import org.apache.jmeter.threads.JMeterContextService;
-import org.apache.jmeter.threads.JMeterThread;
-import org.apache.jmeter.threads.JMeterThreadMonitor;
-import org.apache.jmeter.threads.JMeterVariables;
+import org.apache.jmeter.threads.*;
import org.apache.jmeter.threads.ThreadGroup;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.collections.HashTree;
import org.junit.Test;
+import java.io.*;
+import java.util.Locale;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
/**
*
* @author apc
@@ -77,10 +75,7 @@ public abstract class TestJMeterUtils {
}
public static String getTestData(int i) {
- byte[] bytes = new byte[i];
- Random r = new Random();
- r.nextBytes(bytes);
- return new String(bytes);
+ return RandomStringUtils.randomAlphanumeric(i);
}
/**
diff --git a/test/kg/apc/jmeter/samplers/HTTPRawSamplerTest.java b/test/kg/apc/jmeter/samplers/HTTPRawSamplerTest.java
index <HASH>..<HASH> 100644
--- a/test/kg/apc/jmeter/samplers/HTTPRawSamplerTest.java
+++ b/test/kg/apc/jmeter/samplers/HTTPRawSamplerTest.java
@@ -100,7 +100,7 @@ public class HTTPRawSamplerTest {
public void testSample_hugeparse() throws MalformedURLException, IOException {
System.out.println("sample");
String req = "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n";
- String resp = "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n" + TestJMeterUtils.getTestData(10000000);
+ String resp = "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n" + TestJMeterUtils.getTestData(1000000);
instance.setRequestData(req);
instance.setParseResult(true);
|
Fixed generated testData to be alphanumeric to ensure that there are no \r\n characters (not sure if this is a problem in HTTPRawSampler.parseResponse method). Also reduced the amount of data generated in the test to 1M instead of <I>M (causes OOM errors)
|
undera_jmeter-plugins
|
train
|
ab6eca563bdff2b2561283104a878f18bdfb980e
|
diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,4 @@
requests>=2.9.1
tqdm>=3.7.1
arrow
+tabulate
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -40,7 +40,7 @@ classifiers = [
setup(name="tfatool",
version=tfatool._version.__version__,
scripts=["flashair-util", "flashair-config"],
- install_requires=["requests>=2.9.0", "tqdm>=3.7.1", "arrow"],
+ install_requires=["requests>=2.9.0", "tqdm>=3.7.1", "arrow", "tabulate"],
licence="MIT",
packages=["tfatool"],
description=description,
diff --git a/tfatool/_version.py b/tfatool/_version.py
index <HASH>..<HASH> 100644
--- a/tfatool/_version.py
+++ b/tfatool/_version.py
@@ -1,2 +1,2 @@
-__version__ = "v2.1.0"
+__version__ = "v2.2.0"
|
minor version bump for file tabulate feature
|
TadLeonard_tfatool
|
train
|
2a48d7d74076268f113dc9adc910da42731c136f
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -33,6 +33,23 @@ function getPattern (path, source) {
return pattern
}
+function describeFile (src) {
+ const contents = readFileSync(src)
+ const contentLength = contents.length
+ const tag = md5Hex(contents)
+
+ const mimeType = mime.lookup(src) || 'application/octet-stream'
+ const contentType = mime.contentType(mimeType)
+
+ return {
+ contentLength,
+ contentType,
+ mimeType,
+ src,
+ tag
+ }
+}
+
export default function ({ types: t }) {
return {
visitor: {
@@ -53,35 +70,23 @@ export default function ({ types: t }) {
})
const prefix = commonPathPrefix(matches)
- const details = Object.create(null)
+ const descriptions = Object.create(null)
const files = []
for (const filepath of matches) {
const src = resolve(fromDir, filepath)
- const mimeType = mime.lookup(src) || 'application/octet-stream'
- const contentType = mime.contentType(mimeType)
-
- const contents = readFileSync(src)
- const contentLength = contents.length
- const tag = md5Hex(contents)
-
+ const desc = describeFile(src)
const relpath = filepath.slice(prefix.length)
files.push(relpath)
- details[relpath] = {
- contentLength,
- contentType,
- mimeType,
- src,
- tag
- }
+ descriptions[relpath] = desc
}
- const makeDetail = (detail) => {
+ const makeDescription = (desc) => {
return t.objectExpression([
- t.objectProperty(t.identifier('contentLength'), t.numericLiteral(detail.contentLength)),
- t.objectProperty(t.identifier('contentType'), t.stringLiteral(detail.contentType)),
- t.objectProperty(t.identifier('mimeType'), t.stringLiteral(detail.mimeType)),
- t.objectProperty(t.identifier('src'), t.stringLiteral(detail.src)),
- t.objectProperty(t.identifier('tag'), t.stringLiteral(detail.tag))
+ t.objectProperty(t.identifier('contentLength'), t.numericLiteral(desc.contentLength)),
+ t.objectProperty(t.identifier('contentType'), t.stringLiteral(desc.contentType)),
+ t.objectProperty(t.identifier('mimeType'), t.stringLiteral(desc.mimeType)),
+ t.objectProperty(t.identifier('src'), t.stringLiteral(desc.src)),
+ t.objectProperty(t.identifier('tag'), t.stringLiteral(desc.tag))
])
}
@@ -92,7 +97,7 @@ export default function ({ types: t }) {
t.variableDeclarator(
t.identifier(localName),
t.objectExpression(
- files.map((relpath) => t.objectProperty(t.stringLiteral(relpath), makeDetail(details[relpath])))
+ files.map((relpath) => t.objectProperty(t.stringLiteral(relpath), makeDescription(descriptions[relpath])))
)
)
])
|
refactor how files are described
Use 'description' rather than 'detail'. Extract into a function.
|
novemberborn_babel-plugin-files
|
train
|
8a6fbd2fb96a193184fa18de211d6b313b888648
|
diff --git a/baron/render.py b/baron/render.py
index <HASH>..<HASH> 100644
--- a/baron/render.py
+++ b/baron/render.py
@@ -373,7 +373,7 @@ nodes_rendering_order = {
("formatting", "third_formatting", True),
("constant", "in", True),
("formatting", "fourth_formatting", True),
- ("key", "target", True),
+ ("list", "target", True),
("list", "ifs", True),
],
"comprehension_if": [
|
[fix] list declared as a key in nodes_rendering_order
|
PyCQA_baron
|
train
|
6dbe84c18508a8f36edb5a06f7f9ad1e1bbbde08
|
diff --git a/lib/lwm2m-common.js b/lib/lwm2m-common.js
index <HASH>..<HASH> 100644
--- a/lib/lwm2m-common.js
+++ b/lib/lwm2m-common.js
@@ -648,8 +648,8 @@ export class ResourceRepositoryBuilder {
if (opts.enableDTLS) {
uriPrefix = 'coaps://';
repo[`${securityUriPrefix}/2`].value = 0; // PSK
- repo[`${securityUriPrefix}/3`] = Resource.build({type: 'OPAQUE', value: opts.pskIdentity});
- repo[`${securityUriPrefix}/5`] = Resource.build({type: 'OPAQUE', value: `hex:${opts.presharedKey}`});
+ repo[`${securityUriPrefix}/3`] = Resource.build({type: 'OPAQUE', acl: 'RW', value: opts.pskIdentity});
+ repo[`${securityUriPrefix}/5`] = Resource.build({type: 'OPAQUE', acl: 'RW', value: `hex:${opts.presharedKey}`});
} else {
repo[`${securityUriPrefix}/2`].value = 3; // NONE
}
|
Fix an issue where ACL was missing for PSK resources
|
CANDY-LINE_node-red-contrib-lwm2m
|
train
|
31ec39310d68521985436b62d7ed3a6753d92b25
|
diff --git a/tests/aws/requests/compute/tag_tests.rb b/tests/aws/requests/compute/tag_tests.rb
index <HASH>..<HASH> 100644
--- a/tests/aws/requests/compute/tag_tests.rb
+++ b/tests/aws/requests/compute/tag_tests.rb
@@ -1,4 +1,6 @@
Shindo.tests('Fog::Compute[:aws] | tag requests', ['aws']) do
+ Fog::Compute::AWS::Mock.reset if Fog.mocking?
+
@tags_format = {
'tagSet' => [{
'key' => String,
|
[AWS|Core] Stop flapping test by clearing out mock data
|
fog_fog
|
train
|
7bb00425c102699c52817458265034e460e92aac
|
diff --git a/pandas/tests/test_frame.py b/pandas/tests/test_frame.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/test_frame.py
+++ b/pandas/tests/test_frame.py
@@ -6,6 +6,7 @@ from StringIO import StringIO
import cPickle as pickle
import operator
import os
+import sys
import unittest
from numpy import random, nan
@@ -1810,10 +1811,17 @@ class TestDataFrame(unittest.TestCase, CheckIndexing,
df_s = df.to_string()
- expected = (' x \n0 0.000000\n1 0.250000\n'
- '2 3456.000\n3 1.20e+46\n4 1.64e+06\n'
- '5 1.70e+08\n6 1.253456\n7 3.141593\n'
- '8 -1.00e+06')
+ # Python 2.5 just wants me to be sad
+ if sys.version_info[0] == 2 and sys.version_info[1] < 6:
+ expected = (' x \n0 0.0000000\n1 0.2500000\n'
+ '2 3456.0000\n3 1.20e+046\n4 1.64e+006\n'
+ '5 1.70e+008\n6 1.2534560\n7 3.1415927\n'
+ '8 -1.00e+006')
+ else:
+ expected = (' x \n0 0.000000\n1 0.250000\n'
+ '2 3456.000\n3 1.20e+46\n4 1.64e+06\n'
+ '5 1.70e+08\n6 1.253456\n7 3.141593\n'
+ '8 -1.00e+06')
assert(df_s == expected)
df = DataFrame({'x' : [3234, 0.253]})
@@ -1823,6 +1831,7 @@ class TestDataFrame(unittest.TestCase, CheckIndexing,
assert(df_s == expected)
com.reset_printoptions()
+ self.assertEqual(com.print_config.precision, 4)
df = DataFrame({'x': [1e9, 0.2512]})
df_s = df.to_string()
|
TST: Python <I> float formatting changed
|
pandas-dev_pandas
|
train
|
9980ad14d8d8907c64acf691fdcb8ab72612236f
|
diff --git a/js/gdax.js b/js/gdax.js
index <HASH>..<HASH> 100644
--- a/js/gdax.js
+++ b/js/gdax.js
@@ -83,6 +83,7 @@ module.exports = class gdax extends Exchange {
'post': [
'deposits/coinbase-account',
'deposits/payment-method',
+ 'coinbase-accounts/{id}/addresses',
'funding/repay',
'orders',
'position/close',
|
Added url endpoint for gdax to retrieve deposit address
|
ccxt_ccxt
|
train
|
f27253e0cd590a60de2dc389c89be661d22d0f92
|
diff --git a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingCallableStatement.java b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingCallableStatement.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingCallableStatement.java
+++ b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingCallableStatement.java
@@ -638,6 +638,14 @@ class LoggingCallableStatement extends LoggingPreparedStatement implements Calla
cs().setNClob(parameterName, reader);
}
+ public <T> T getObject(int parameterIndex, Class<T> type) throws SQLException {
+ return cs().getObject(parameterIndex, type);
+ }
+
+ public <T> T getObject(String parameterName, Class<T> type) throws SQLException {
+ return cs().getObject(parameterName, type);
+ }
+
private CallableStatement cs() {
return (CallableStatement) mStatement;
}
diff --git a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingConnection.java b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingConnection.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingConnection.java
+++ b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingConnection.java
@@ -339,4 +339,26 @@ class LoggingConnection implements Connection {
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
+
+ public void setSchema(String schema) throws SQLException {
+ mCon.setSchema(schema);
+ }
+
+ public String getSchema() throws SQLException {
+ return mCon.getSchema();
+ }
+
+ public void abort(java.util.concurrent.Executor executor) throws SQLException {
+ mCon.abort(executor);
+ }
+
+ public void setNetworkTimeout(java.util.concurrent.Executor executor, int milliseconds)
+ throws SQLException
+ {
+ mCon.setNetworkTimeout(executor, milliseconds);
+ }
+
+ public int getNetworkTimeout() throws SQLException {
+ return mCon.getNetworkTimeout();
+ }
}
diff --git a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingDataSource.java b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingDataSource.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingDataSource.java
+++ b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingDataSource.java
@@ -20,6 +20,7 @@ package com.amazon.carbonado.repo.jdbc;
import java.sql.Connection;
import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
import java.io.PrintWriter;
import javax.sql.DataSource;
@@ -115,4 +116,8 @@ public class LoggingDataSource implements DataSource {
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
+
+ public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
+ return mDataSource.getParentLogger();
+ }
}
diff --git a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingStatement.java b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingStatement.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingStatement.java
+++ b/src/main/java/com/amazon/carbonado/repo/jdbc/LoggingStatement.java
@@ -232,4 +232,12 @@ class LoggingStatement implements Statement {
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
+
+ public void closeOnCompletion() throws SQLException {
+ mStatement.closeOnCompletion();
+ }
+
+ public boolean isCloseOnCompletion() throws SQLException {
+ return mStatement.isCloseOnCompletion();
+ }
}
diff --git a/src/main/java/com/amazon/carbonado/repo/jdbc/SimpleDataSource.java b/src/main/java/com/amazon/carbonado/repo/jdbc/SimpleDataSource.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/carbonado/repo/jdbc/SimpleDataSource.java
+++ b/src/main/java/com/amazon/carbonado/repo/jdbc/SimpleDataSource.java
@@ -21,6 +21,7 @@ package com.amazon.carbonado.repo.jdbc;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
import java.io.PrintWriter;
import java.util.Properties;
@@ -151,4 +152,8 @@ public class SimpleDataSource implements DataSource {
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
+
+ public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
+ throw new SQLFeatureNotSupportedException();
+ }
}
|
Support compilation with JDK <I>.
|
Carbonado_Carbonado
|
train
|
d8fb1b2d876df4013ac086dcef266a71902237c6
|
diff --git a/lib/gcli/commands/demo/demo.js b/lib/gcli/commands/demo/demo.js
index <HASH>..<HASH> 100644
--- a/lib/gcli/commands/demo/demo.js
+++ b/lib/gcli/commands/demo/demo.js
@@ -190,124 +190,4 @@ exports.items = [
return 'date1=' + args.date1 + ' date2=' + args.date2;
}
},
- {
- // 'gcli file' command
- item: 'command',
- name: 'gcli open',
- description: 'a file param in open mode',
- params: [
- {
- name: 'p1',
- type: {
- name: 'file',
- filetype: 'file',
- existing: 'yes'
- },
- description: 'open param'
- }
- ],
- exec: function(args, context) {
- return 'p1=\'' + args.p1 + '\'';
- }
- },
- {
- // 'gcli saveas' command
- item: 'command',
- name: 'gcli saveas',
- description: 'a file param in saveas mode',
- params: [
- {
- name: 'p1',
- type: {
- name: 'file',
- filetype: 'file',
- existing: 'no'
- },
- description: 'open param'
- }
- ],
- exec: function(args, context) {
- return 'p1=\'' + args.p1 + '\'';
- }
- },
- {
- // 'gcli save' command
- item: 'command',
- name: 'gcli save',
- description: 'a file param in save mode',
- params: [
- {
- name: 'p1',
- type: {
- name: 'file',
- filetype: 'file',
- existing: 'maybe'
- },
- description: 'open param'
- }
- ],
- exec: function(args, context) {
- return 'p1=\'' + args.p1 + '\'';
- }
- },
- {
- // 'gcli cd' command
- item: 'command',
- name: 'gcli cd',
- description: 'a file param in cd mode',
- params: [
- {
- name: 'p1',
- type: {
- name: 'file',
- filetype: 'directory',
- existing: 'yes'
- },
- description: 'open param'
- }
- ],
- exec: function(args, context) {
- return 'p1=\'' + args.p1 + '\'';
- }
- },
- {
- // 'gcli mkdir' command
- item: 'command',
- name: 'gcli mkdir',
- description: 'a file param in mkdir mode',
- params: [
- {
- name: 'p1',
- type: {
- name: 'file',
- filetype: 'directory',
- existing: 'no'
- },
- description: 'open param'
- }
- ],
- exec: function(args, context) {
- return 'p1=\'' + args.p1 + '\'';
- }
- },
- {
- // 'gcli rm' command
- item: 'command',
- name: 'gcli rm',
- description: 'a file param in rm mode',
- params: [
- {
- name: 'p1',
- type: {
- name: 'file',
- filetype: 'any',
- existing: 'yes'
- },
- description: 'open param'
- }
- ],
- exec: function(args, context) {
- return 'p1=\'' + args.p1 + '\'';
- }
- }
];
diff --git a/lib/gcli/items/basic.js b/lib/gcli/items/basic.js
index <HASH>..<HASH> 100644
--- a/lib/gcli/items/basic.js
+++ b/lib/gcli/items/basic.js
@@ -32,7 +32,6 @@ exports.items = [
require('../types/boolean').items,
require('../types/command').items,
require('../types/date').items,
- require('../types/file').items,
require('../types/javascript').items,
require('../types/node').items,
require('../types/number').items,
diff --git a/lib/gcli/items/server.js b/lib/gcli/items/server.js
index <HASH>..<HASH> 100644
--- a/lib/gcli/items/server.js
+++ b/lib/gcli/items/server.js
@@ -22,6 +22,8 @@
* Keeping this module small helps reduce bringing in unwanted dependencies.
*/
exports.items = [
+ require('../types/file').items,
+
require('../commands/server/exit').items,
require('../commands/server/firefox').items,
require('../commands/server/orion').items,
diff --git a/lib/gcli/types/file.js b/lib/gcli/types/file.js
index <HASH>..<HASH> 100644
--- a/lib/gcli/types/file.js
+++ b/lib/gcli/types/file.js
@@ -63,15 +63,11 @@ exports.items = [
}
},
- getSpec: function() {
- var matches = (typeof this.matches === 'string' || this.matches == null) ?
- this.matches :
- this.matches.source; // Assume RegExp
+ getSpec: function(commandName, paramName) {
return {
- name: 'file',
- filetype: this.filetype,
- existing: this.existing,
- matches: matches
+ name: 'remote',
+ commandName: commandName,
+ paramName: paramName
};
},
|
sync-<I>: Remove 'file' from the set of browser types
Means removing some demo commands, and registering the 'file' type in
server-only contexts.
|
joewalker_gcli
|
train
|
84fff89d3f0a67c7c839593db4d79d1b738d63c7
|
diff --git a/normalize/selector.py b/normalize/selector.py
index <HASH>..<HASH> 100644
--- a/normalize/selector.py
+++ b/normalize/selector.py
@@ -431,6 +431,15 @@ class MultiFieldSelector(object):
for x in tail:
yield head_selector + x
+ def __getitem__(self, index):
+ """Returns the MultiFieldSelector that applies to the specified
+ field/key/index"""
+ tail = self.heads[None] if self.has_none else self.heads[index]
+ return type(self)([None]) if tail == all else tail
+
+ def __contains__(self, index):
+ return self.has_none or index in self.heads
+
def __repr__(self):
return "MultiFieldSelector%r" % (tuple(x.selectors for x in self),)
diff --git a/tests/test_selector.py b/tests/test_selector.py
index <HASH>..<HASH> 100644
--- a/tests/test_selector.py
+++ b/tests/test_selector.py
@@ -319,6 +319,17 @@ class TestStructableFieldSelector(unittest.TestCase):
emitted = set(tuple(x.selectors) for x in mfs_dupe)
self.assertEqual(emitted, selectors)
+ # test various dict-like functions
+ self.assertIn("foo", mfs)
+ self.assertIn("bar", mfs)
+ self.assertNotIn("baz", mfs)
+ self.assertIn('bar', mfs['foo'])
+ self.assertIn(0, mfs['foo']['bar'])
+ self.assertIn('hiss', mfs['foo']['bar'][0])
+ self.assertNotIn('miss', mfs['foo']['bar'][0])
+ self.assertIn('baz', mfs['bar'])
+ self.assertIn('baz', mfs['bar']['frop']['quux']['fred'])
+
# if you add a higher level selector, then more specific paths
# disappear from the MFS
mfs2 = MultiFieldSelector(mfs, ["foo", "bar"])
|
MultiFieldSelector: allow traversing using dict indexing
Allow a MultiFieldSelector to be more easily passed down into a visitor
function by supporting __getattr__ and __contains__
|
hearsaycorp_normalize
|
train
|
2456aaa5f425e46abeace7a005962058a422fbdd
|
diff --git a/doc/_ext/saltdomain.py b/doc/_ext/saltdomain.py
index <HASH>..<HASH> 100644
--- a/doc/_ext/saltdomain.py
+++ b/doc/_ext/saltdomain.py
@@ -290,7 +290,7 @@ class SaltDomain(python_domain.PythonDomain):
type, target, node, contnode)
# Monkey-patch the Python domain remove the python module index
-python_domain.PythonDomain.indices = []
+python_domain.PythonDomain.indices = [SaltModuleIndex]
def setup(app):
|
Porting PR #<I> to <I>
|
saltstack_salt
|
train
|
885c7994f3f4f2d76de8f28b046a4bd9f697afd2
|
diff --git a/src/frontend/org/voltdb/dbmonitor/js/voltdb.admin.ui.js b/src/frontend/org/voltdb/dbmonitor/js/voltdb.admin.ui.js
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/dbmonitor/js/voltdb.admin.ui.js
+++ b/src/frontend/org/voltdb/dbmonitor/js/voltdb.admin.ui.js
@@ -8,6 +8,7 @@ var editStates = {
ShowLoading: 2
};
var INT_MAX_VALUE = 2147483647;
+var client_port = 0;
function getListOfRoles() {
// Got to figure out what roles are available.
@@ -5108,12 +5109,13 @@ function loadAdminPage() {
this.exportTypes = [];
this.isSaveSnapshot = false;
- this.server = function (hostIdvalue, serverNameValue, serverStateValue, ipAddress, HTTPPORT) {
+ this.server = function (hostIdvalue, serverNameValue, serverStateValue, ipAddress, HTTPPORT, ClientPort) {
this.hostId = hostIdvalue;
this.serverName = serverNameValue;
this.serverState = serverStateValue;
this.ipAddress = ipAddress;
this.httpPort = HTTPPORT;
+ this.clientPort = ClientPort;
};
this.stoppedServer = function (hostIdvalue, serverNameValue) {
@@ -5139,6 +5141,7 @@ function loadAdminPage() {
this.displayPortAndRefreshClusterState = function (portAndClusterValues, serverSettings) {
if (portAndClusterValues != undefined && VoltDbAdminConfig.isAdmin) {
+ client_port = portAndClusterValues.clientPort;
configurePortAndOverviewValues(portAndClusterValues, serverSettings);
refreshClusterValues(portAndClusterValues);
configurePromoteAction(portAndClusterValues);
diff --git a/src/frontend/org/voltdb/dbmonitor/js/voltdb.render-10.js b/src/frontend/org/voltdb/dbmonitor/js/voltdb.render-10.js
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/dbmonitor/js/voltdb.render-10.js
+++ b/src/frontend/org/voltdb/dbmonitor/js/voltdb.render-10.js
@@ -3263,7 +3263,7 @@ function set_kubernetes(server,port){
VoltDbAdminConfig.servers.length > 0 &&
check_hostid(hostId)
) {
- VoltDbAdminConfig.servers.sort(sortByHostId)
+ VoltDbAdminConfig.servers.sort(sortByHostId);
$.each(VoltDbAdminConfigservers, function (id, value) {
{
if (
@@ -3275,7 +3275,8 @@ function set_kubernetes(server,port){
serverInfo["HOSTNAME"],
serverInfo["CLUSTERSTATE"],
serverInfo["IPADDRESS"],
- serverInfo["HTTPPORT"]
+ serverInfo["HTTPPORT"],
+ serverInfo["CLIENTPORT"]
);
VoltDbAdminConfig.servers[iteratorCount] = serverDetails;
@@ -3304,10 +3305,11 @@ function set_kubernetes(server,port){
hostname,
serverInfo["CLUSTERSTATE"],
serverInfo["IPADDRESS"],
- serverInfo["HTTPPORT"]
+ serverInfo["HTTPPORT"],
+ serverInfo["CLIENTPORT"]
);
VoltDbAdminConfig.servers.push(serverDetails);
- VoltDbAdminConfig.servers.sort(sortByHostId)
+ VoltDbAdminConfig.servers.sort(sortByHostId);
count++;
}
};
@@ -3401,9 +3403,11 @@ function set_kubernetes(server,port){
conn = false;
}
if (location.port == val.httpPort){
- conn = false;
- className = "disableServer";
- currentServerColumnClass = "shutdownServer stopDisable";
+ if (client_port == val.clientPort){
+ conn = false;
+ className = "disableServer";
+ currentServerColumnClass = "shutdownServer stopDisable";
+ }
}
currentServerColumnClass =
voltDbRenderer.currentHost == val.serverName
|
VDBMC-<I>: Fixed stop button for multiple machine
|
VoltDB_voltdb
|
train
|
5257f13fa45ea8908a3214492d03353bec562fc7
|
diff --git a/src/mako/application/cli/commands/app/ListRoutes.php b/src/mako/application/cli/commands/app/ListRoutes.php
index <HASH>..<HASH> 100644
--- a/src/mako/application/cli/commands/app/ListRoutes.php
+++ b/src/mako/application/cli/commands/app/ListRoutes.php
@@ -61,6 +61,7 @@ class ListRoutes extends Command
implode(', ', $route->getMethods()),
$action,
implode(', ', $route->getMiddleware()),
+ implode(', ', $route->getConstraints()),
(string) $route->getName(),
];
}
@@ -72,10 +73,14 @@ class ListRoutes extends Command
'<green>Route</green>',
'<green>Allowed methods</green>',
'<green>Action</green>',
- '<green>Middleware</green>',
+ '<green>Middleware *</green>',
+ '<green>Constraints **</green>',
'<green>Name</green>',
];
$this->table($headers, $routeCollection);
+
+ $this->write('<green>*</green> <faded>Global middleware is not listed.</faded>');
+ $this->write('<green>**</green> <faded>Global constraints are not listed.</faded>');
}
}
|
The app.routes command now lists route constraints
|
mako-framework_framework
|
train
|
044ee2a9f54d70ead162b16b475188c2753dbbb1
|
diff --git a/code_generation/code_generator_online.py b/code_generation/code_generator_online.py
index <HASH>..<HASH> 100644
--- a/code_generation/code_generator_online.py
+++ b/code_generation/code_generator_online.py
@@ -325,6 +325,7 @@ def output(folder, results, html_content=None):
# end if
# write crawled data
+ mkdir_p(folder)
with open(path_join(folder, "api.py"), "w") as f:
f.write("[\n ")
f.write(",\n ".join([repr(result) for result in results]))
@@ -422,14 +423,15 @@ def safe_to_file(folder, results):
assert isinstance(result, Function)
import_path = "pytgbot.bot."
file_path = calc_path_and_create_folders(folder, import_path)
- result.filepath = [file_path, None]
+ result.filepath = file_path
functions.append(result)
if result.name.startswith('send_'):
import_path = "teleflask_messages."
file_path = calc_path_and_create_folders(folder, import_path)
- result = safe_eval(repr(result), SAVE_VALUES) # serialize + unserialize = deepcopy
- result.filepath = file_path
+ result2 = safe_eval(repr(result), SAVE_VALUES) # serialize + unserialize = deepcopy
+ result2.filepath = file_path
+ message_send_functions.append(result2)
# end if
# end if
# end for
@@ -463,6 +465,12 @@ def safe_to_file(folder, results):
f.write(txt)
# end with
# end if
+ if message_send_functions:
+ txt = teleflask_messages_template.render(functions=message_send_functions)
+ with open(message_send_functions[0].filepath, "w") as f:
+ f.write(txt)
+ # end with
+ # end if
# end def
diff --git a/code_generation/code_generator_template.py b/code_generation/code_generator_template.py
index <HASH>..<HASH> 100644
--- a/code_generation/code_generator_template.py
+++ b/code_generation/code_generator_template.py
@@ -304,8 +304,8 @@ class Variable(dict):
name: str = None,
types: List['Type'] = None,
optional: bool = None,
- default: Optional[str, None] = None,
- description: Optional[str]=None
+ default: Optional[str] = None,
+ description: Optional[str] = None
):
"""
:param api_name: Name the telegram api uses.
diff --git a/code_generation/templates/macros.template b/code_generation/templates/macros.template
index <HASH>..<HASH> 100644
--- a/code_generation/templates/macros.template
+++ b/code_generation/templates/macros.template
@@ -9,7 +9,7 @@
{%- macro for_args_keys(variables) -%}{%- for variable in variables %}"{{ variable.name }}"{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
{%- macro for_args_set(variables) -%}{%- for variable in variables %}{{ variable.name }}={{ variable.name }}{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
{%- macro for_args_set_self(variables) -%}{%- for variable in variables %}{{ variable.name }}=self.{{ variable.name }}{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
-{%- macro for_args_none(variables) -%}{%- for variable in variables %}{{ variable.name }}{% if variable.optional %}={% if variable.default is None %}None{% else %}{{ variable.default }}{% endif %}{% endif %}{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
+{%- macro for_args_none(variables) -%}{%- for variable in variables %}{{ variable.name }}{% if variable.optional %}={% if variable.default == None %}None{% else %}{{ variable.default }}{% endif %}{% endif %}{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
{%- macro for_args_format_str(variables) -%}{%- for variable in variables %}{{ variable.name }}={{ "{" }}self.{{ variable.name }}{{ "}" }}{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
{%- macro for_args_format_repr(variables) -%}{%- for variable in variables %}{{ variable.name }}={{ "{" }}self.{{ variable.name }}{{ "!r}" }}{% if not loop.last %}, {% endif -%}{%- endfor %}{% endmacro %}
{%- macro types_as_assert_tuple(variable) -%}{%- if variable.types|length > 1 %}({{ for_type_or_list(variable) }}){% else -%}{{ for_type_or_list(variable) }}{% endif -%}{%- endmacro -%}
|
[code_generation] Fixed some WIP bugs for that new template
|
luckydonald_pytgbot
|
train
|
c946e1441924cf6423a58efb10167a04ee1d9aaf
|
diff --git a/lib/genealogy/alter_methods.rb b/lib/genealogy/alter_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/genealogy/alter_methods.rb
+++ b/lib/genealogy/alter_methods.rb
@@ -8,7 +8,7 @@ module Genealogy
# add method
define_method "add_#{parent}" do |relative|
unless relative.nil?
- raise IncompatibleObjectException, "Linked objects must be instances of the same class: got #{relative.class} for #{self.class}" unless relative.is_a? self.class
+ raise IncompatibleObjectException, "Linked objects must be instances of the same class: got #{relative.class} for #{self.class}" unless relative.class.respond_to?(:genealogy_enabled)
incompatible_parents = self.offspring | self.siblings.to_a | [self]
raise IncompatibleRelationshipException, "#{relative} can't be #{parent} of #{self}" if incompatible_parents.include? relative
raise WrongSexException, "Can't add a #{relative.sex} #{parent}" unless (parent == :father and relative.is_male?) or (parent == :mother and relative.is_female?)
diff --git a/lib/genealogy/genealogy.rb b/lib/genealogy/genealogy.rb
index <HASH>..<HASH> 100644
--- a/lib/genealogy/genealogy.rb
+++ b/lib/genealogy/genealogy.rb
@@ -26,7 +26,8 @@ module Genealogy
end
end
- class_attribute :spouse_enabled
+ class_attribute :genealogy_enabled, :spouse_enabled
+ self.genealogy_enabled = true
self.spouse_enabled = options[:spouse].try(:==,true) || false
tracked_parents = [:father, :mother]
tracked_parents << :spouse if spouse_enabled
|
check on compatibility among linked objects changed
|
masciugo_genealogy
|
train
|
341e79c469fe9aa9bbce5717a8093e14f0fb77a4
|
diff --git a/tests/TestCase/View/Helper/BreadcrumbsHelperTest.php b/tests/TestCase/View/Helper/BreadcrumbsHelperTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/View/Helper/BreadcrumbsHelperTest.php
+++ b/tests/TestCase/View/Helper/BreadcrumbsHelperTest.php
@@ -58,7 +58,7 @@ class BreadcrumbsHelperTest extends TestCase
$result = $this->Breadcrumbs
->add('joe', null, $attributes['itemWithoutLink'])
- ->add('black', ['controller' => 'foo', 'action' => 'bar'], $attributes['itemWithoutLink'])
+ ->add('black', '/foo/bar', $attributes['itemWithoutLink'])
->render($attributes['wrapper'], $attributes['separator']);
$expected = '<ol class="wrapper-class"><li class="itemWithoutLink-class"><span class="itemWithoutLink-inner-class">joe</span></li><li class="separator-class"><span class="separator-inner-class"></span></li><li class="itemWithoutLink-class"><a href="/foo/bar" class="itemWithoutLink-inner-class">black</a></li></ol>';
|
Fix testing requiring route config file
|
FriendsOfCake_bootstrap-ui
|
train
|
205eec54d0f1ddadab30070be472e2b467504cf9
|
diff --git a/kafka_utils/util/ssh.py b/kafka_utils/util/ssh.py
index <HASH>..<HASH> 100644
--- a/kafka_utils/util/ssh.py
+++ b/kafka_utils/util/ssh.py
@@ -51,7 +51,7 @@ class Connection:
:raises SSHException: if the server fails to execute the command
"""
- new_command = "sudo -S -p 'sudo password:' {0}".format(command)
+ new_command = "sudo {0}".format(command)
return self.exec_command(new_command, bufsize)
def exec_command(self, command, bufsize=-1, check_status=True):
|
PR changes to remove -S -p from sudo command.
|
Yelp_kafka-utils
|
train
|
aec340f02cc33c38f47f0172915fc7b1d904d158
|
diff --git a/go/vt/vttablet/tabletmanager/vreplication/vplayer_flaky_test.go b/go/vt/vttablet/tabletmanager/vreplication/vplayer_flaky_test.go
index <HASH>..<HASH> 100644
--- a/go/vt/vttablet/tabletmanager/vreplication/vplayer_flaky_test.go
+++ b/go/vt/vttablet/tabletmanager/vreplication/vplayer_flaky_test.go
@@ -1271,16 +1271,9 @@ func TestPlayerRowMove(t *testing.T) {
func TestPlayerTypes(t *testing.T) {
log.Errorf("TestPlayerTypes: flavor is %s", env.Flavor)
enableJSONColumnTesting := true
- // some unit test
- supportedFlavorsForJSONColumnTesting := []string{"mysql57", "mysql80"}
- for _, flavor := range supportedFlavorsForJSONColumnTesting {
- if strings.EqualFold(env.Flavor, flavor) {
- log.Warningf("Enabling testing JSON columns for flavor: %s", env.Flavor)
- enableJSONColumnTesting = true
- break
- } else {
- log.Warningf("Not testing JSON columns for flavor: %s", env.Flavor)
- }
+ flavor := strings.ToLower(env.Flavor)
+ if strings.Contains(flavor, "percona") || strings.Contains(flavor, "mariadb") {
+ enableJSONColumnTesting = false
}
defer deleteTablet(addTablet(100))
|
Don't run json test in CI for percona/mariadb flavors, which either don't have json support or have not yet been enabled
|
vitessio_vitess
|
train
|
bce520c6111c2afa690a1aa6dc99d63c6631d6c2
|
diff --git a/src/views/master.blade.php b/src/views/master.blade.php
index <HASH>..<HASH> 100644
--- a/src/views/master.blade.php
+++ b/src/views/master.blade.php
@@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="csrf-token" content="<?= csrf_token() ?>" />
<!-- Fonts -->
- <link href='http://fonts.googleapis.com/css?family=Roboto+Condensed:300,400' rel='stylesheet' type='text/css'>
- <link href='http://fonts.googleapis.com/css?family=Lato:300,400,700,900' rel='stylesheet' type='text/css'>
+ <link href='https://fonts.googleapis.com/css?family=Roboto+Condensed:300,400|Lato:300,400,700,900' rel='stylesheet' type='text/css'>
+
<!-- CSS Libs -->
<link rel="stylesheet" type="text/css" href="{{ config('voyager.assets_path') }}/lib/css/bootstrap.min.css">
<link rel="stylesheet" type="text/css" href="{{ config('voyager.assets_path') }}/lib/css/animate.min.css">
|
Use https and get the fonts using only 1 request
|
the-control-group_voyager
|
train
|
b51000fc984a709bbb6681be1f0c5cea56eeaf5a
|
diff --git a/inc/covergenerator/class-docraptorpdf.php b/inc/covergenerator/class-docraptorpdf.php
index <HASH>..<HASH> 100644
--- a/inc/covergenerator/class-docraptorpdf.php
+++ b/inc/covergenerator/class-docraptorpdf.php
@@ -76,7 +76,7 @@ class DocraptorPdf extends Generator {
*/
public function __construct( Input $input ) {
$this->pdfProfile = apply_filters( 'pb_pdf_for_print_profile', 'PDF/X-1a:2003' );
- $this->pdfOutputIntent = plugins_url( 'pressbooks-docraptor/assets/icc/USWebCoatedSWOP.icc' );
+ $this->pdfOutputIntent = PB_PLUGIN_URL . 'assets/icc/USWebCoatedSWOP.icc';
parent::__construct( $input );
}
diff --git a/inc/modules/export/prince/class-docraptor.php b/inc/modules/export/prince/class-docraptor.php
index <HASH>..<HASH> 100644
--- a/inc/modules/export/prince/class-docraptor.php
+++ b/inc/modules/export/prince/class-docraptor.php
@@ -105,7 +105,7 @@ class Docraptor extends Pdf {
}
$result = \download_url( $status_response->getDownloadUrl() );
if ( is_wp_error( $result ) ) {
- $_SESSION['pb_errors'][] = __( 'Your PDF could not be retrieved.', 'pressbooks-docraptor' );
+ $_SESSION['pb_errors'][] = __( 'Your PDF could not be retrieved.', 'pressbooks' );
} else {
copy( $result, $this->outputPath );
unlink( $result );
diff --git a/inc/modules/export/prince/class-filters.php b/inc/modules/export/prince/class-filters.php
index <HASH>..<HASH> 100644
--- a/inc/modules/export/prince/class-filters.php
+++ b/inc/modules/export/prince/class-filters.php
@@ -71,8 +71,8 @@ class Filters {
public function addToFormats( $formats ) {
$formats['standard'] =
[
- 'docraptor_print' => __( 'PDF (for print)', 'pressbooks-docraptor' ),
- 'docraptor' => __( 'PDF (for digital distribution)', 'pressbooks-docraptor' ),
+ 'docraptor_print' => __( 'PDF (for print)', 'pressbooks' ),
+ 'docraptor' => __( 'PDF (for digital distribution)', 'pressbooks' ),
] + $formats['standard'];
unset( $formats['standard']['pdf'] );
|
Fix path to USWebCoatedSWOP.icc (#<I>)
Bad copy/paste from pressbooks-docraptor
|
pressbooks_pressbooks
|
train
|
c0a558a309c4eae4f7ca62a02e8b47814a51d694
|
diff --git a/src/JMS/Serializer/EventDispatcher/Events.php b/src/JMS/Serializer/EventDispatcher/Events.php
index <HASH>..<HASH> 100644
--- a/src/JMS/Serializer/EventDispatcher/Events.php
+++ b/src/JMS/Serializer/EventDispatcher/Events.php
@@ -22,6 +22,7 @@ abstract class Events
{
const PRE_SERIALIZE = 'serializer.pre_serialize';
const POST_SERIALIZE = 'serializer.post_serialize';
+ const PRE_DESERIALIZE = 'serializer.pre_deserialize';
const POST_DESERIALIZE = 'serializer.post_deserialize';
final private function __construct() { }
|
Add the PRE_DESERIALIZE event to the Events class
|
schmittjoh_serializer
|
train
|
41136f1a3f08c99e065d56b6315cec11460b6899
|
diff --git a/src/plone/app/mosaic/browser/static/js/mosaic.core.js b/src/plone/app/mosaic/browser/static/js/mosaic.core.js
index <HASH>..<HASH> 100755
--- a/src/plone/app/mosaic/browser/static/js/mosaic.core.js
+++ b/src/plone/app/mosaic/browser/static/js/mosaic.core.js
@@ -270,6 +270,7 @@ define([
break;
case "plone.app.z3cform.wysiwyg.widget.WysiwygWidget":
case "plone.app.z3cform.wysiwyg.widget.WysiwygFieldWidget":
+ case "plone.app.widgets.dx.RichTextWidget":
fieldhtml = $("#" + tile_config.id)
.find('textarea').attr('value');
break;
diff --git a/src/plone/app/mosaic/browser/static/js/mosaic.overlay.js b/src/plone/app/mosaic/browser/static/js/mosaic.overlay.js
index <HASH>..<HASH> 100644
--- a/src/plone/app/mosaic/browser/static/js/mosaic.overlay.js
+++ b/src/plone/app/mosaic/browser/static/js/mosaic.overlay.js
@@ -170,7 +170,7 @@ define([
visible_tabs = formtabs.children(':not(.mosaic-hidden)');
// Select first tab
- visible_tabs.eq(0).children('a').addClass('active');
+ visible_tabs.eq(0).addClass('active');
form.find('#fieldset-' +
visible_tabs.eq(0).attr('href').split('-')[1])
.addClass('active');
|
Fix setting active tab on page properties.
Fix setting rich field.
|
plone_plone.app.mosaic
|
train
|
f4642c1a9ce2e2681d136ffbdb012c72a4ead386
|
diff --git a/lib/helpers/iterate.js b/lib/helpers/iterate.js
index <HASH>..<HASH> 100644
--- a/lib/helpers/iterate.js
+++ b/lib/helpers/iterate.js
@@ -15,7 +15,7 @@ import { isArrayLike, isUndefined } from './checkTypes';
/**
* @function iterate
- * @param {(Object|Array|null)} object - Value to iterate over.
+ * @param {(Object|Array|null|undefined)} object - Value to iterate over.
* @param {IterateCallback} callback - Callback that is called on every iteration.
* @returns {*} If callback returns not undefined then iterate returns this value.
* @description Function for iterating over all types of values.
|
helpers.iterate: iterate over undefined.
|
dwaynejs_dwayne
|
train
|
98c0afa781250515378c66bec09c2b70dbf74e70
|
diff --git a/girder/api/v1/folder.py b/girder/api/v1/folder.py
index <HASH>..<HASH> 100644
--- a/girder/api/v1/folder.py
+++ b/girder/api/v1/folder.py
@@ -109,13 +109,13 @@ class Folder(Resource):
.errorResponse('Read access was denied on the parent resource.', 403)
)
def findPosition(self, folder, parentType, parentId, text, name, limit, offset, sort):
- filters = {}
if len(sort) != 1 or sort[0][0] not in folder:
raise RestException('Invalid sort mode.')
- dir = '$lt' if sort[0][1] == SortDir.ASCENDING else '$gt'
+ sortField, sortDir = sort[0]
+ dir = '$lt' if sortDir == SortDir.ASCENDING else '$gt'
filters = {'$or': [
- {sort[0][0]: {dir: folder.get(sort[0][0])}},
- {sort[0][0]: folder.get(sort[0][0]), '_id': {dir: folder['_id']}}
+ {sortField: {dir: folder.get(sortField)}},
+ {sortField: folder.get(sortField), '_id': {dir: folder['_id']}}
]}
# limit and offset are actually ignored
cursor = self._find(parentType, parentId, text, name, limit, offset, sort, filters)
diff --git a/girder/api/v1/item.py b/girder/api/v1/item.py
index <HASH>..<HASH> 100644
--- a/girder/api/v1/item.py
+++ b/girder/api/v1/item.py
@@ -104,13 +104,13 @@ class Item(Resource):
.errorResponse('Read access was denied on the parent folder.', 403)
)
def findPosition(self, item, folderId, text, name, limit, offset, sort):
- filters = {}
if len(sort) != 1 or sort[0][0] not in item:
raise RestException('Invalid sort mode.')
- dir = '$lt' if sort[0][1] == SortDir.ASCENDING else '$gt'
+ sortField, sortDir = sort[0]
+ dir = '$lt' if sortDir == SortDir.ASCENDING else '$gt'
filters = {'$or': [
- {sort[0][0]: {dir: item.get(sort[0][0])}},
- {sort[0][0]: item.get(sort[0][0]), '_id': {dir: item['_id']}}
+ {sortField: {dir: item.get(sortField)}},
+ {sortField: item.get(sortField), '_id': {dir: item['_id']}}
]}
# limit and offset are actually ignored
cursor = self._find(folderId, text, name, limit, offset, sort, filters)
|
Make some code clearer.
Remove useless lines.
|
girder_girder
|
train
|
b8a654cb195b8f7db38a501e6d7178d2b0b070da
|
diff --git a/tests/bootstrap.php b/tests/bootstrap.php
index <HASH>..<HASH> 100644
--- a/tests/bootstrap.php
+++ b/tests/bootstrap.php
@@ -26,6 +26,6 @@ if (
}
/** @var Composer\Autoload\ClassLoader $loader */
-$loader->addPsr4('Contao\\Composer\\Test\\', __DIR__);
+$loader->addPsr4('Contao\Composer\Test\\', __DIR__);
return $loader;
|
Correctly escape back slashes in single quote strings.
|
contao-components_installer
|
train
|
e42443ca9ec849fb4ecaf47a90e7922081a08d18
|
diff --git a/lib/ok_computer/built_in_checks/cache_check.rb b/lib/ok_computer/built_in_checks/cache_check.rb
index <HASH>..<HASH> 100644
--- a/lib/ok_computer/built_in_checks/cache_check.rb
+++ b/lib/ok_computer/built_in_checks/cache_check.rb
@@ -1,7 +1,12 @@
module OkComputer
+ # Verifies that the Rails cache is set up and can speak with Memcached
+ # running on the given host (defaults to local).
class CacheCheck < Check
+ attr_accessor :host
- ConnectionFailed = Class.new(StandardError)
+ def initialize(host=Socket.gethostname)
+ self.host = host
+ end
# Public: Check whether the cache is active
def check
@@ -14,9 +19,9 @@ module OkComputer
# Public: Outputs stats string for cache
def stats
stats = Rails.cache.stats
- host = stats.select{|k,v| k =~ Regexp.new(Socket.gethostname) }.values[0]
- mem_used = to_megabytes host['bytes']
- mem_max = to_megabytes host['limit_maxbytes']
+ values = stats.select{|k,v| k =~ Regexp.new(host) }.values[0]
+ mem_used = to_megabytes values['bytes']
+ mem_max = to_megabytes values['limit_maxbytes']
return "#{mem_used} / #{mem_max} MB, #{stats.count - 1} peers"
rescue => e
raise ConnectionFailed, e
@@ -28,5 +33,7 @@ module OkComputer
def to_megabytes(bytes)
bytes.to_i / (1024 * 1024)
end
+
+ ConnectionFailed = Class.new(StandardError)
end
end
diff --git a/spec/ok_computer/built_in_checks/cache_check_spec.rb b/spec/ok_computer/built_in_checks/cache_check_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/ok_computer/built_in_checks/cache_check_spec.rb
+++ b/spec/ok_computer/built_in_checks/cache_check_spec.rb
@@ -12,6 +12,18 @@ module OkComputer
subject.should be_a Check
end
+ context "new(host)" do
+ it "remembers the host given to it" do
+ subject = CacheCheck.new("example.com")
+ expect(subject.host).to eq("example.com")
+ end
+
+ it "defaults to the machine's name" do
+ subject = CacheCheck.new
+ expect(subject.host).to eq(Socket.gethostname)
+ end
+ end
+
context "#check" do
let(:stats) { "foo" }
let(:error_message) { "Error message" }
|
Make host configurable in CacheCheck
|
sportngin_okcomputer
|
train
|
7abc3250668fac17c03d43f17007cd1cd20f2423
|
diff --git a/src/main/java/pl/pkk82/filehierarchygenerator/FileHierarchyGenerator.java b/src/main/java/pl/pkk82/filehierarchygenerator/FileHierarchyGenerator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/pl/pkk82/filehierarchygenerator/FileHierarchyGenerator.java
+++ b/src/main/java/pl/pkk82/filehierarchygenerator/FileHierarchyGenerator.java
@@ -73,10 +73,11 @@ public class FileHierarchyGenerator {
}
public FileHierarchyGenerator directory(String directoryName) {
- Path newCurrentDirectory = currentDirectory.resolve(directoryName);
+ Path directoryPath = Paths.get(directoryName);
+ Path newCurrentDirectory = currentDirectory.resolve(directoryPath);
directoriesToCreate.add(newCurrentDirectory);
currentDirectory = newCurrentDirectory;
- level++;
+ level += directoryPath.getNameCount();
return this;
}
@@ -191,7 +192,8 @@ public class FileHierarchyGenerator {
private void validateLevel() {
if (level - 1 < 0) {
- throw new IllegalInvocationException("up method should not be invoked in current context (root directory)");
+ throw new IllegalInvocationException("up method should not be invoked in current context (root " +
+ "directory)");
}
}
diff --git a/src/test/java/pl/pkk82/filehierarchygenerator/FileHierarchyGeneratorTest.java b/src/test/java/pl/pkk82/filehierarchygenerator/FileHierarchyGeneratorTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/pl/pkk82/filehierarchygenerator/FileHierarchyGeneratorTest.java
+++ b/src/test/java/pl/pkk82/filehierarchygenerator/FileHierarchyGeneratorTest.java
@@ -56,8 +56,7 @@ public class FileHierarchyGeneratorTest {
}
@Test
- public void shouldCreateDirectoryHierarchy() {
- // when
+ public void shouldCreateDirectories() {
givenFileHierarchyGenerator("workspace")
.directory("book")
.directory("spring-in-action-2011");
@@ -66,6 +65,18 @@ public class FileHierarchyGeneratorTest {
}
@Test
+ public void shouldCreateDirectoriesWithSeparator() {
+ givenFileHierarchyGenerator("workspace")
+ .directory("book/spring-in-action-2011")
+ .up().up()
+ .directory("book/spring-in-action-2007");
+ whenGenerateFileHierarchy();
+ thenFileHierarchy().hasCountOfSubdirs(3)
+ .containsSubdir("spring-in-action-2011", "book")
+ .containsSubdir("spring-in-action-2007", "book");
+ }
+
+ @Test
public void shouldCreateTwoSubdirectories() {
givenFileHierarchyGenerator("workspace")
.directory("book")
|
Correct behaviour for directories with separator
|
pkk82_file-hierarchy-generator
|
train
|
3d300f5417f026943a50452ba23e54ba15e3cf3f
|
diff --git a/lib/health_inspector/checklists/base.rb b/lib/health_inspector/checklists/base.rb
index <HASH>..<HASH> 100644
--- a/lib/health_inspector/checklists/base.rb
+++ b/lib/health_inspector/checklists/base.rb
@@ -26,16 +26,22 @@ module HealthInspector
@context.knife.ui
end
+ def server_items
+ raise NotImplementedError, "You must implement this method in a subclass"
+ end
+
+ def local_items
+ raise NotImplementedError, "You must implement this method in a subclass"
+ end
+
def all_item_names
( server_items + local_items ).uniq.sort
end
- # Subclasses should collect all items from the server and the local repo,
- # and for each item pair, yield an object that contains a reference to
- # the server item, and the local repo item. A reference can be nil if it does
- # not exist in one of the locations.
def each_item
- raise NotImplementedError, "You must implement this method in a subclass"
+ all_item_names.each do |name|
+ yield load_item(name)
+ end
end
def run
diff --git a/lib/health_inspector/checklists/cookbooks.rb b/lib/health_inspector/checklists/cookbooks.rb
index <HASH>..<HASH> 100644
--- a/lib/health_inspector/checklists/cookbooks.rb
+++ b/lib/health_inspector/checklists/cookbooks.rb
@@ -91,30 +91,22 @@ module HealthInspector
title "cookbooks"
- def each_item
- all_cookbook_names = ( server_cookbooks.keys + local_cookbooks.keys ).uniq.sort
-
- all_cookbook_names.each do |name|
- yield load_item(name)
- end
- end
-
def load_item(name)
Cookbook.new(@context,
:name => name,
- :server => server_cookbooks[name],
- :local => local_cookbooks[name]
+ :server => server_items[name],
+ :local => local_items[name]
)
end
- def server_cookbooks
+ def server_items
@context.rest.get_rest("/cookbooks").inject({}) do |hsh, (name,version)|
hsh[name] = Chef::Version.new(version["versions"].first["version"])
hsh
end
end
- def local_cookbooks
+ def local_items
@context.cookbook_path.
map { |path| Dir["#{path}/*"] }.
flatten.
@@ -129,6 +121,10 @@ module HealthInspector
end
end
+ def all_item_names
+ (server_items.keys + local_items.keys).uniq.sort
+ end
+
end
end
end
diff --git a/lib/health_inspector/checklists/data_bag_items.rb b/lib/health_inspector/checklists/data_bag_items.rb
index <HASH>..<HASH> 100644
--- a/lib/health_inspector/checklists/data_bag_items.rb
+++ b/lib/health_inspector/checklists/data_bag_items.rb
@@ -10,12 +10,6 @@ module HealthInspector
class DataBagItems < Base
title "data bag items"
- def each_item
- all_item_names.each do |name|
- yield load_item(name)
- end
- end
-
def load_item(name)
DataBagItem.new(@context,
:name => name,
diff --git a/lib/health_inspector/checklists/data_bags.rb b/lib/health_inspector/checklists/data_bags.rb
index <HASH>..<HASH> 100644
--- a/lib/health_inspector/checklists/data_bags.rb
+++ b/lib/health_inspector/checklists/data_bags.rb
@@ -9,12 +9,6 @@ module HealthInspector
class DataBags < Base
title "data bags"
- def each_item
- all_item_names.each do |name|
- yield load_item(name)
- end
- end
-
def load_item(name)
DataBag.new(@context,
:name => name,
diff --git a/lib/health_inspector/checklists/environments.rb b/lib/health_inspector/checklists/environments.rb
index <HASH>..<HASH> 100644
--- a/lib/health_inspector/checklists/environments.rb
+++ b/lib/health_inspector/checklists/environments.rb
@@ -16,12 +16,6 @@ module HealthInspector
class Environments < Base
title "environments"
- def each_item
- all_item_names.each do |name|
- yield load_item(name)
- end
- end
-
def load_item(name)
Environment.new(@context,
:name => name,
diff --git a/lib/health_inspector/checklists/roles.rb b/lib/health_inspector/checklists/roles.rb
index <HASH>..<HASH> 100644
--- a/lib/health_inspector/checklists/roles.rb
+++ b/lib/health_inspector/checklists/roles.rb
@@ -11,12 +11,6 @@ module HealthInspector
class Roles < Base
title "roles"
- def each_item
- all_item_names.each do |name|
- yield load_item(name)
- end
- end
-
def load_item(name)
Role.new(@context,
:name => name,
|
Remove duplicate code from checklists
Conflicts:
lib/health_inspector/checklists/cookbooks.rb
|
bmarini_knife-inspect
|
train
|
3cb0fb4c192b946a318a2b89d465b72b8fce3957
|
diff --git a/test/sizzle/selector.js b/test/sizzle/selector.js
index <HASH>..<HASH> 100755
--- a/test/sizzle/selector.js
+++ b/test/sizzle/selector.js
@@ -1,4 +1,4 @@
-var DomUtils = require("htmlparser2").DomUtils,
+var DomUtils = require("domutils"),
helper = require("../tools/helper.js"),
CSSselect = helper.CSSselect,
assert = require("assert"),
|
Fix unit tests
The unit tests rely on a version of the `DomUtils` module that is newer
than the one currently packaged with `HtmlParser2` [1]. This project
already lists a compatible version `DomUtils` as a dependency, so use
the module directly.
[1]
<URL>
|
fb55_css-select
|
train
|
f845943c47f41445e6f865cb483eb9a3aaf068f4
|
diff --git a/system/src/Grav/Common/Page/Collection.php b/system/src/Grav/Common/Page/Collection.php
index <HASH>..<HASH> 100644
--- a/system/src/Grav/Common/Page/Collection.php
+++ b/system/src/Grav/Common/Page/Collection.php
@@ -273,32 +273,18 @@ class Collection extends Iterator
$end = $endDate ? Utils::date2timestamp($endDate) : false;
$date_range = [];
+ foreach ($this->items as $path => $slug) {
+ $page = $this->pages->get($path);
+ if ($page !== null) {
+ $date = $field ? strtotime($page->value($field)) : $page->date();
- if ($end) {
- foreach ($this->items as $path => $slug) {
- $page = $this->pages->get($path);
- if ($page !== null) {
- $date = $field ? strtotime($page->value($field)) : $page->date();
-
- if ($date >= $start && $date <= $end) {
- $date_range[$path] = $slug;
- }
- }
- }
- } else {
- foreach ($this->items as $path => $slug) {
- $page = $this->pages->get($path);
- if ($page !== null) {
- $date = $field ? strtotime($page->value($field)) : $page->date();
-
- if ($date >= $start) {
- $date_range[$path] = $slug;
- }
+ if ($date >= $start && (!$end || $date <= $end)) {
+ $date_range[$path] = $slug;
}
}
}
+
$this->items = $date_range;
-
return $this;
}
|
Optimize PR #<I> (#<I>)
|
getgrav_grav
|
train
|
4c34442de00f8ce06636dcb76869b4694d28fcea
|
diff --git a/src/Controller/DefaultController.php b/src/Controller/DefaultController.php
index <HASH>..<HASH> 100644
--- a/src/Controller/DefaultController.php
+++ b/src/Controller/DefaultController.php
@@ -203,7 +203,13 @@ class DefaultController extends Base
*/
public function postRemap()
{
- $oUri = Factory::service('Uri');
+ $oUri = Factory::service('Uri');
+ // Test that there's not an explicit method defined for this
+ $sMethod = 'post' . ucfirst($oUri->segment(4));
+ if (method_exists($this, $sMethod)) {
+ return $this->$sMethod();
+ }
+
$oInput = Factory::service('Input');
$oHttpCodes = Factory::service('HttpCodes');
$oItemModel = Factory::model(
|
Allowing child controllers to override specific methods
|
nails_module-api
|
train
|
9688dacb196b2822393b856cb29d28a413aeb49b
|
diff --git a/tests/test_main.py b/tests/test_main.py
index <HASH>..<HASH> 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -13,6 +13,7 @@ import os
import sys
from tempfile import mkdtemp, TemporaryFile, NamedTemporaryFile
from shutil import rmtree
+import subprocess
import scuba.__main__ as main
import scuba.constants
@@ -110,6 +111,29 @@ class TestMain(TestCase):
assert_str_equalish('my output', out)
+ def test_no_cmd(self):
+ '''Verify scuba works with no given command'''
+
+ with open('.scuba.yml', 'w') as f:
+ f.write('image: {0}\n'.format(DOCKER_IMAGE))
+
+ with TemporaryFile(prefix='scubatest-stdin', mode='w+t') as stdin:
+ stdin.write('echo okay')
+ stdin.seek(0)
+
+ # This mock exists to pass an extra stdin= arg
+ real_subprocess_call = subprocess.call
+ def mocked_subprocess_call(*args, **kw):
+ assert_false('stdin' in kw)
+ kw['stdin'] = stdin
+ return real_subprocess_call(*args, **kw)
+
+ with mock.patch('subprocess.call', side_effect=mocked_subprocess_call):
+ args = []
+ out, _ = self.run_scuba(args)
+
+ assert_str_equalish('okay', out)
+
def test_config_error(self):
'''Verify config errors are handled gracefully'''
|
scuba: Add unit test for running scuba with no argument
|
JonathonReinhart_scuba
|
train
|
e52bbc24f7e2ee196282f7c726c084fe1aa7ecef
|
diff --git a/gdx-ai/src/com/badlogic/gdx/ai/steer/limiters/NullLimiter.java b/gdx-ai/src/com/badlogic/gdx/ai/steer/limiters/NullLimiter.java
index <HASH>..<HASH> 100644
--- a/gdx-ai/src/com/badlogic/gdx/ai/steer/limiters/NullLimiter.java
+++ b/gdx-ai/src/com/badlogic/gdx/ai/steer/limiters/NullLimiter.java
@@ -48,11 +48,6 @@ public class NullLimiter implements Limiter {
return Float.POSITIVE_INFINITY;
}
- @Override
- public float getZeroLinearSpeedThreshold () {
- return 0.001f;
- }
-
};
/** Creates a {@code NullLimiter}. */
@@ -115,11 +110,9 @@ public class NullLimiter implements Limiter {
throw new UnsupportedOperationException();
}
- /** Guaranteed to throw UnsupportedOperationException.
- * @throws UnsupportedOperationException always */
@Override
public float getZeroLinearSpeedThreshold () {
- throw new UnsupportedOperationException();
+ return 0.001f;
}
/** Guaranteed to throw UnsupportedOperationException.
|
Now NullLimiter supports getZeroLinearSpeedThreshold.
|
libgdx_gdx-ai
|
train
|
8498a56fe2b02ec0bc1c2d7cf133738e1da8c4a8
|
diff --git a/python/ray/tests/test_metrics.py b/python/ray/tests/test_metrics.py
index <HASH>..<HASH> 100644
--- a/python/ray/tests/test_metrics.py
+++ b/python/ray/tests/test_metrics.py
@@ -1,6 +1,5 @@
import os
import platform
-import time
import grpc
import psutil # We must import psutil after ray because we bundle it with ray.
@@ -9,7 +8,7 @@ import requests
import ray
from ray._private.test_utils import (
- RayTestTimeoutException,
+ wait_for_condition,
wait_until_succeeded_without_exception,
)
from ray._private.utils import init_grpc_channel
@@ -20,9 +19,8 @@ _WIN32 = os.name == "nt"
@pytest.mark.skipif(platform.system() == "Windows", reason="Hangs on Windows.")
def test_worker_stats(shutdown_only):
- ray.init(num_cpus=1, include_dashboard=True)
+ ray.init(num_cpus=2, include_dashboard=True)
raylet = ray.nodes()[0]
- num_cpus = raylet["Resources"]["CPU"]
raylet_address = "{}:{}".format(
raylet["NodeManagerAddress"], ray.nodes()[0]["NodeManagerPort"]
)
@@ -91,26 +89,14 @@ def test_worker_stats(shutdown_only):
assert stats.webui_display[""] == "" # Empty proto
assert target_worker_present
- if _WIN32:
- timeout_seconds = 40
- else:
- timeout_seconds = 20
- start_time = time.time()
- while True:
- if time.time() - start_time > timeout_seconds:
- raise RayTestTimeoutException(
- "Timed out while waiting for worker processes"
- )
-
- # Wait for the workers to start.
- if len(reply.core_workers_stats) < num_cpus + 2:
- time.sleep(1)
- reply = try_get_node_stats()
- print(reply)
- continue
+ # 1 actor + 1 worker for task + 1 driver
+ num_workers = 3
+ def verify():
+ reply = try_get_node_stats()
# Check that the rest of the processes are workers, 1 for each CPU.
- assert len(reply.core_workers_stats) == num_cpus + 2
+
+ assert len(reply.core_workers_stats) == num_workers
# Check that all processes are Python.
pids = [worker.pid for worker in reply.core_workers_stats]
processes = [
@@ -129,7 +115,10 @@ def test_worker_stats(shutdown_only):
or "pytest" in process
or "ray" in process
), process
- break
+
+ return True
+
+ wait_for_condition(verify)
def test_multi_node_metrics_export_port_discovery(ray_start_cluster):
|
[Core][fix] Increasing timeout on non-windows for test_metrics (#<I>)
The test was timing out.
A normal pass was ~<I>secs.
|
ray-project_ray
|
train
|
5c816dabca64bf2815a1a8a78a3c3bc7b3f39526
|
diff --git a/middleware.go b/middleware.go
index <HASH>..<HASH> 100644
--- a/middleware.go
+++ b/middleware.go
@@ -7,7 +7,9 @@ package gojiutil
import (
"crypto/rand"
"encoding/base64"
+ "encoding/json"
"fmt"
+ "io"
"net/http"
"runtime"
"strconv"
@@ -225,3 +227,61 @@ func ContextLogger(c *web.C, h http.Handler) http.Handler {
h.ServeHTTP(rw, r)
})
}
+
+// GetJSONBody is a middleware to read and parse an application/json body and store it in
+// c.Env["json"] as a map[string]interface{}, which can be easily mapped to a proper struct
+// using github.com/mitchellh/mapstructure.
+// This middleware is pretty permissive: it allows for having no content-length and no
+// content-type as long as either there's no body or the body parses as json.
+func GetJSONBody(c *web.C, h http.Handler) http.Handler {
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ var err error
+
+ // parse content-length header
+ cl := 0
+ if clh := r.Header.Get("content-length"); clh != "" {
+ if cl, err = strconv.Atoi(clh); err != nil {
+ ErrorString(*c, rw, 400, "Invalid content-length: "+err.Error())
+ return
+ }
+ }
+
+ // parse content-type header
+ if ct := r.Header.Get("content-type"); ct != "" && ct != "application/json" {
+ ErrorString(*c, rw, 400,
+ "Invalid content-type '"+ct+"', application/json expected")
+ return
+ }
+
+ /*
+ // get the context logger, if available
+ log, _ := c.Env[ContextLog].(log15.Logger)
+ if log == nil {
+ log = log15.Root()
+ }
+ */
+
+ // try to read body
+ var js map[string]interface{}
+ err = json.NewDecoder(r.Body).Decode(&js)
+ switch err {
+ case io.EOF:
+ if cl != 0 {
+ ErrorString(*c, rw, 400, "Premature EOF reading post body")
+ return
+ }
+ //log.Debug("HTTP no request body")
+ // got no body, so we're OK
+ case nil:
+ //log.Debug("HTTP Context", "body", js)
+ // great!
+ default:
+ ErrorString(*c, rw, 400, "Cannot parse JSON request body: "+
+ err.Error())
+ return
+ }
+
+ c.Env["json"] = js
+ h.ServeHTTP(rw, r)
+ })
+}
|
added GetJSONBody middleware
|
rightscale_gojiutil
|
train
|
8c8bc9c4ca2ef9c7113012b942facb8bc5670036
|
diff --git a/.travis.yml b/.travis.yml
index <HASH>..<HASH> 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,7 +4,6 @@ php:
- 5.4
- 5.5
- 5.6
- - 7.0
- hhvm
install: composer install --dev
diff --git a/src/Post.php b/src/Post.php
index <HASH>..<HASH> 100644
--- a/src/Post.php
+++ b/src/Post.php
@@ -12,7 +12,7 @@ class Post extends RestMethod {
*/
public function __construct($url){
parent::__construct($url);
- $this->parameters = array();
+ $this->parameters = [];
}
/**
diff --git a/src/RestMethod.php b/src/RestMethod.php
index <HASH>..<HASH> 100644
--- a/src/RestMethod.php
+++ b/src/RestMethod.php
@@ -149,13 +149,13 @@ abstract class RestMethod {
* @return array
*/
protected function getResult(){
- return array(
+ return [
'status' => $this->status,
'time' => $this->time,
'header' => $this->header,
'body' => $this->body,
'error' => $this->error
- );
+ ];
}
private function applyCurlOptions(){
diff --git a/tests/cases/PostTest.php b/tests/cases/PostTest.php
index <HASH>..<HASH> 100644
--- a/tests/cases/PostTest.php
+++ b/tests/cases/PostTest.php
@@ -6,10 +6,10 @@ class PostTest extends TestCase {
$url = 'http://www.slugifier.com/api/generate-slug';
$postMethod = new Post($url);
$postMethod->setParameter('text', 'Read these tips to improve your résumé and get a great job!');
- $postMethod->setParameter('rules', array('improve' => 'improvement'));
+ $postMethod->setParameter('rules', ['improve' => 'improvement']);
$postMethod->setParameter('separator', '_');
$postMethod->setParameter('exclude_stop_words', true);
- $postMethod->setParameter('words_to_exclude', array('read', 'great'));
+ $postMethod->setParameter('words_to_exclude', ['read', 'great']);
$result = $postMethod->execute();
diff --git a/tests/cases/PutTest.php b/tests/cases/PutTest.php
index <HASH>..<HASH> 100644
--- a/tests/cases/PutTest.php
+++ b/tests/cases/PutTest.php
@@ -20,7 +20,7 @@ class PutTest extends TestCase {
$fileName = 'hello_world.html';
$url = 'http://www.httpmirror.com/put/' . $fileName;
$putMethod = new Put($url);
- $data = array('abc' => 'def', 'john' => 'doe');
+ $data = ['abc' => 'def', 'john' => 'doe'];
$result = $putMethod->execute($data);
$this->assertContains('HTTP/1.1 200 OK', $result['header']);
|
Removing PHP 7 environment and changing arrays to bracket arrays.
|
softiciel_php-rest-client
|
train
|
fd7349cf1d9d48ba444a836ccb2565fdab5560a4
|
diff --git a/internal/service/outposts/outpost_instance_types_data_source_test.go b/internal/service/outposts/outpost_instance_types_data_source_test.go
index <HASH>..<HASH> 100644
--- a/internal/service/outposts/outpost_instance_types_data_source_test.go
+++ b/internal/service/outposts/outpost_instance_types_data_source_test.go
@@ -22,14 +22,14 @@ func TestAccOutpostsOutpostInstanceTypesDataSource_basic(t *testing.T) {
{
Config: testAccOutpostInstanceTypesDataSourceConfig(),
Check: resource.ComposeTestCheckFunc(
- testAccCheckOutpostsOutpostInstanceTypesAttributes(dataSourceName),
+ testAccCheckOutpostInstanceTypesAttributes(dataSourceName),
),
},
},
})
}
-func testAccCheckOutpostsOutpostInstanceTypesAttributes(dataSourceName string) resource.TestCheckFunc {
+func testAccCheckOutpostInstanceTypesAttributes(dataSourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[dataSourceName]
if !ok {
diff --git a/internal/service/outposts/outposts_data_source_test.go b/internal/service/outposts/outposts_data_source_test.go
index <HASH>..<HASH> 100644
--- a/internal/service/outposts/outposts_data_source_test.go
+++ b/internal/service/outposts/outposts_data_source_test.go
@@ -20,16 +20,16 @@ func TestAccOutpostsDataSource_basic(t *testing.T) {
CheckDestroy: nil,
Steps: []resource.TestStep{
{
- Config: testAccOutpostsDataSourceConfig(),
+ Config: testAccOutpostsDataSourceConfig_basic(),
Check: resource.ComposeTestCheckFunc(
- testAccCheckOutpostsOutpostsAttributes(dataSourceName),
+ testAccCheckOutpostsAttributes(dataSourceName),
),
},
},
})
}
-func testAccCheckOutpostsOutpostsAttributes(dataSourceName string) resource.TestCheckFunc {
+func testAccCheckOutpostsAttributes(dataSourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[dataSourceName]
if !ok {
@@ -48,7 +48,7 @@ func testAccCheckOutpostsOutpostsAttributes(dataSourceName string) resource.Test
}
}
-func testAccOutpostsDataSourceConfig() string {
+func testAccOutpostsDataSourceConfig_basic() string {
return `
data "aws_outposts_outposts" "test" {}
`
|
outposts: Remove service from funcs
|
terraform-providers_terraform-provider-aws
|
train
|
0748b6a45d2a43446c9184cb7248f71cc25cd8f5
|
diff --git a/lib/ey-deploy/deploy.rb b/lib/ey-deploy/deploy.rb
index <HASH>..<HASH> 100644
--- a/lib/ey-deploy/deploy.rb
+++ b/lib/ey-deploy/deploy.rb
@@ -115,22 +115,20 @@ module EY
# task
def bundle
- roles :app_master, :app, :solo do
- if File.exist?("#{c.latest_release}/Gemfile")
- puts "~> Gemfile detected, bundling gems"
- lockfile = File.join(c.latest_release, "Gemfile.lock")
+ if File.exist?("#{c.latest_release}/Gemfile")
+ puts "~> Gemfile detected, bundling gems"
+ lockfile = File.join(c.latest_release, "Gemfile.lock")
- bundler_version = if File.exist?(lockfile)
- get_bundler_version(lockfile)
- else
- warn_about_missing_lockfile
- DEFAULT_09_BUNDLER
- end
+ bundler_version = if File.exist?(lockfile)
+ get_bundler_version(lockfile)
+ else
+ warn_about_missing_lockfile
+ DEFAULT_09_BUNDLER
+ end
- sudo "#{$0} install_bundler #{bundler_version}"
+ sudo "#{$0} install_bundler #{bundler_version}"
- run "cd #{c.latest_release} && bundle _#{bundler_version}_ install --without=development --without=test"
- end
+ run "cd #{c.latest_release} && bundle _#{bundler_version}_ install --without=development --without=test"
end
end
|
Bundle gems on all instance types.
[#<I> state:merged]
|
engineyard_engineyard-serverside
|
train
|
d12540a3e72f7a7df8fd7e5abc48e3470b74e801
|
diff --git a/scripts/pre-release.py b/scripts/pre-release.py
index <HASH>..<HASH> 100755
--- a/scripts/pre-release.py
+++ b/scripts/pre-release.py
@@ -91,7 +91,7 @@ class Ref(SmartGetitem):
@cache
def serializer_version(self):
- return self.extract_version_macro("src/config/args.hpp", 'serializer')
+ return self.extract_version_macro("src/serializer/log/static_header.cc", 'current_serializer')
@cache
def cluster_version(self, ref):
|
Update pre-release check script for current variable defines
|
rethinkdb_rethinkdb
|
train
|
bf01ee8169a595340cb5092dc23fe770d63da049
|
diff --git a/packages/webcube/server/ssrRoute.js b/packages/webcube/server/ssrRoute.js
index <HASH>..<HASH> 100644
--- a/packages/webcube/server/ssrRoute.js
+++ b/packages/webcube/server/ssrRoute.js
@@ -140,6 +140,7 @@ Object.keys(entries).forEach(entry => {
language: 'en',
i18n: serveri18n,
requestId: 'warmUpRequest',
+ cookies: 'warmUpRequest'
});
});
}
@@ -268,6 +269,7 @@ async function ssrRender({
preloadedAppState,
skipPreload,
requestId,
+ cookies
}) {
const baseUrl = entry === mainEntry ? '' : `/${entry}`;
const context = {};
@@ -313,6 +315,7 @@ async function ssrRender({
}
: undefined,
preloadedAppState: !skipPreload ? preloadedAppState : undefined,
+ cookies
})
)
)
@@ -429,6 +432,7 @@ async function ssrRender({
preloadedAppState: renderInfo,
skipPreload: !isLoaderAllDone,
requestId,
+ cookies
});
}
return Promise.resolve({
@@ -473,7 +477,7 @@ module.exports = async function ssrRoute(req, res) {
logger.info(
`[WEBCUBE] [${requestId}] language: "${req.language}", hostname: "${
req.hostname
- }", url: "${req.url}"`
+ }", url: "${req.url}", cookies: "${req.headers.cookie}"`
);
try {
Entry = require(exportedEntryCodePath);
@@ -493,6 +497,7 @@ module.exports = async function ssrRoute(req, res) {
language: req.language,
i18n: req.i18n,
requestId,
+ cookies: req.headers.cookie
});
// https://github.com/jamiebuilds/react-loadable#------------server-side-rendering
const bundles = getBundles(loadableStats, modules);
|
add cookies to ssrRoute
|
dexteryy_Project-WebCube
|
train
|
649e15373c41b6cddc51c73d391049bad8eaaa11
|
diff --git a/packages/cozy-client/examples/albums-relationships.js b/packages/cozy-client/examples/albums-relationships.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-client/examples/albums-relationships.js
+++ b/packages/cozy-client/examples/albums-relationships.js
@@ -1,4 +1,3 @@
-const minimist = require('minimist')
const { QueryDefinition, HasMany, default: CozyClient } = require('../dist')
global.fetch = require('node-fetch') // in the browser we have native fetch
@@ -49,9 +48,6 @@ const main = async _args => {
if (!token) {
throw new Error('You should provide COZY_TOKEN as an environement variable')
}
- const args = minimist(_args.slice(2), {
- string: ['selector']
- })
const client = new CozyClient({ uri, token, schema })
const query = new QueryDefinition({
doctype: 'io.cozy.files',
diff --git a/packages/cozy-client/examples/file-relationships.js b/packages/cozy-client/examples/file-relationships.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-client/examples/file-relationships.js
+++ b/packages/cozy-client/examples/file-relationships.js
@@ -1,4 +1,3 @@
-const minimist = require('minimist')
const { QueryDefinition, default: CozyClient } = require('../dist')
global.fetch = require('node-fetch') // in the browser we have native fetch
@@ -26,9 +25,6 @@ const main = async _args => {
if (!token) {
throw new Error('You should provide COZY_TOKEN as an environement variable')
}
- const args = minimist(_args.slice(2), {
- string: ['selector']
- })
const client = new CozyClient({ uri, token, schema })
const query = new QueryDefinition({
doctype: 'io.cozy.photos.albums',
|
refactor: Remove warn lint
|
cozy_cozy-client
|
train
|
a0bb5014a658d6d73da6dc05f5b50439d53e24ac
|
diff --git a/examples/estimate_parameters_cross_validation.py b/examples/estimate_parameters_cross_validation.py
index <HASH>..<HASH> 100644
--- a/examples/estimate_parameters_cross_validation.py
+++ b/examples/estimate_parameters_cross_validation.py
@@ -52,23 +52,22 @@ def estimate_via_quic(X, num_folds, metric='log_likelihood'):
refit=True,
verbose=1)
estimator.fit(X)
+ ic_estimator = estimator.best_estimator_
+ ic_score = ic_estimator.score(X) # must score() to find out best lambda index
+ ic_path_index = ic_estimator.score_best_path_scale_index_
print 'Best parameters:'
pprint.pprint(estimator.best_params_)
- print 'Best lambda path scale (pre-score) {}'.format(
- estimator.best_estimator_.score_best_path_scale_)
- print 'Best score: {}'.format(estimator.score(X))
- print 'Best lambda path scale {}'.format(
- estimator.best_estimator_.score_best_path_scale_)
+ print 'Best score: {}'.format(ic_score)
+ print 'Best lambda path scale {} (index= {})'.format(
+ ic_estimator.score_best_path_scale_,
+ ic_estimator.score_best_path_scale_index_)
# get best covariance from QUIC
- best_path_index = estimator.best_estimator_.score_best_path_scale_index_
- cov = np.reshape(
- estimator.best_estimator_.covariance_[best_path_index, :],
- (n_features, n_features))
- prec = np.reshape(
- estimator.best_estimator_.precision_[best_path_index, :],
- (n_features, n_features))
+ cov = np.reshape(ic_estimator.covariance_[ic_path_index, :],
+ (n_features, n_features))
+ prec = np.reshape(ic_estimator.precision_[ic_path_index, :],
+ (n_features, n_features))
return cov, prec
|
Make quic gridsearch cleaner, easier to read
|
skggm_skggm
|
train
|
c47f06bf8143fafe394dd80f00f1f2baa0cf2665
|
diff --git a/unit_tests/test_non_canonical.py b/unit_tests/test_non_canonical.py
index <HASH>..<HASH> 100644
--- a/unit_tests/test_non_canonical.py
+++ b/unit_tests/test_non_canonical.py
@@ -1,3 +1,4 @@
+import copy
import unittest
import isambard_dev as isambard
@@ -14,12 +15,17 @@ class TestConvertProToHyp(unittest.TestCase):
to_convert = [
res for (i, res) in enumerate(col.get_monomers())
if not (i + 1) % 3]
+ ori_pros = copy.deepcopy(to_convert)
for pro in to_convert:
isambard.ampal.non_canonical.convert_pro_to_hyp(pro)
self.assertEqual(col.sequences, ['GPXGPXGPXGPXGPXGPXGPX'] * 3)
- hyps = list(filter(lambda x: x.mol_code == 'HYP', col.get_monomers()))
+ hyps = to_convert
self.assertEqual(len(hyps), 7 * 3)
hyp_atom_labels = ('N', 'CA', 'C', 'O', 'CB', 'CG', 'CD', 'OD1')
- for hyp in hyps:
- self.assertTrue(
- all(map(lambda x: x in hyp_atom_labels, hyp.atoms.keys())))
+ common_atoms = ('N', 'CA', 'C', 'O', 'CB')
+ for (pro, hyp) in zip(ori_pros, hyps):
+ for (label, atom) in hyp.atoms.items():
+ self.assertTrue(label in hyp_atom_labels)
+ if label in common_atoms:
+ self.assertTrue(
+ numpy.allclose(atom.array, hyp[label].array))
|
Adds test for maintaining common atom positions.
|
woolfson-group_isambard
|
train
|
1a3313842cbfb650fa518ac42e7599d4ce24fdfd
|
diff --git a/labelImg.py b/labelImg.py
index <HASH>..<HASH> 100755
--- a/labelImg.py
+++ b/labelImg.py
@@ -1054,7 +1054,10 @@ class MainWindow(QMainWindow, WindowMixin):
self.canvas.setEnabled(False)
if file_path is None:
file_path = self.settings.get(SETTING_FILENAME)
-
+ #Deselect shape when loading new file
+ if self.canvas.selected_shape:
+ self.canvas.selected_shape.selected = False
+ self.canvas.selected_shape = None
# Make sure that filePath is a regular python string, rather than QString
file_path = ustr(file_path)
|
Fixed delete selected shape error. (#<I>)
|
tzutalin_labelImg
|
train
|
8de3bd6b3c0b825a562d02c22291678fb662e7b6
|
diff --git a/src/tuwien/auto/calimero/knxnetip/servicetype/SearchResponse.java b/src/tuwien/auto/calimero/knxnetip/servicetype/SearchResponse.java
index <HASH>..<HASH> 100644
--- a/src/tuwien/auto/calimero/knxnetip/servicetype/SearchResponse.java
+++ b/src/tuwien/auto/calimero/knxnetip/servicetype/SearchResponse.java
@@ -1,6 +1,6 @@
/*
Calimero 2 - A library for KNX network access
- Copyright (c) 2006, 2019 B. Malinowsky
+ Copyright (c) 2006, 2020 B. Malinowsky
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
@@ -165,6 +165,11 @@ public class SearchResponse extends ServiceType
return desc.getDescription();
}
+ /**
+ * @return {@code true} if this is a KNXnet/IP v2 search response, {@code false} otherwise
+ */
+ public final boolean v2() { return svcType == KNXnetIPHeader.SearchResponse; }
+
@Override
public boolean equals(final Object obj)
{
|
Accessor for distinguishing v2 responses
|
calimero-project_calimero-core
|
train
|
15baf090db1ac3c749df6581545a3e0bcd326e7b
|
diff --git a/modules/cmsadmin/resources/js/cmsadmin.js b/modules/cmsadmin/resources/js/cmsadmin.js
index <HASH>..<HASH> 100644
--- a/modules/cmsadmin/resources/js/cmsadmin.js
+++ b/modules/cmsadmin/resources/js/cmsadmin.js
@@ -322,7 +322,7 @@
return FilterService.get();
},
menu : function(NewMenuService) {
- return NewMenuService.get(true);
+ return NewMenuService.get();
}
}
})
@@ -452,6 +452,8 @@
*/
$scope.AdminLangService.load(true);
+ NewMenuService.get();
+
$scope.$watch(function() { return NewMenuService.data; }, function(n) {
$scope.menu = n;
});
|
fixed issue where default sidebar could not be loaded
|
luyadev_luya
|
train
|
e4a2d75456de7bafdcee50dc9c02fa626daf37ec
|
diff --git a/admin/xmldb/actions/get_db_directories/get_db_directories.class.php b/admin/xmldb/actions/get_db_directories/get_db_directories.class.php
index <HASH>..<HASH> 100644
--- a/admin/xmldb/actions/get_db_directories/get_db_directories.class.php
+++ b/admin/xmldb/actions/get_db_directories/get_db_directories.class.php
@@ -153,15 +153,7 @@ class get_db_directories extends XMLDBAction {
$XMLDB->dbdirs[$dbdir->path]->path_exists = file_exists($dbdir->path); //Update status
}
}
-
- /// Now, groups
- $dbdir = new stdClass;
- $dbdir->path = $CFG->dirroot . '/group/db';
- if (!isset($XMLDB->dbdirs[$dbdir->path])) {
- $XMLDB->dbdirs[$dbdir->path] = $dbdir;
- }
- $XMLDB->dbdirs[$dbdir->path]->path_exists = file_exists($dbdir->path); //Update status
-
+
/// Sort by key
ksort($XMLDB->dbdirs);
|
group/db isn't a source of xml files anymore. MDL-<I>
|
moodle_moodle
|
train
|
802401b9fb554f714a108da8f701158360d78f6a
|
diff --git a/src/Charcoal/Translator/Translator.php b/src/Charcoal/Translator/Translator.php
index <HASH>..<HASH> 100644
--- a/src/Charcoal/Translator/Translator.php
+++ b/src/Charcoal/Translator/Translator.php
@@ -129,6 +129,7 @@ class Translator extends SymfonyTranslator
if ($locale === null) {
$locale = $this->getLocale();
}
+
if ($val instanceof Translation) {
return strtr($val[$locale], $parameters);
}
@@ -185,6 +186,10 @@ class Translator extends SymfonyTranslator
*/
public function translateChoice($val, $number, array $parameters = [], $domain = null, $locale = null)
{
+ if ($locale === null) {
+ $locale = $this->getLocale();
+ }
+
if ($val instanceof Translation) {
return strtr($val[$locale], $parameters);
}
|
Amend <I>cbd<I>
Ensure locale is valid for `Translator::translateChoice()`.
|
locomotivemtl_charcoal-translator
|
train
|
56130bd8549f9fdc76269b3a6a1a04d8c19740fb
|
diff --git a/changelog.md b/changelog.md
index <HASH>..<HASH> 100644
--- a/changelog.md
+++ b/changelog.md
@@ -42,4 +42,10 @@
* Allow associating a `Contact` to a `Member`
* Remove `Salutation` and `Middlename` from a `Contact`
-* Add versioning support to a `Contact` and `ContactLocation`
\ No newline at end of file
+* Add versioning support to a `Contact` and `ContactLocation`
+
+## 1.2.1
+
+* Automatically add region selection field if GeoLocations module is installed
+* Hide Version field
+*
\ No newline at end of file
diff --git a/src/extensions/AccountControllerExtension.php b/src/extensions/AccountControllerExtension.php
index <HASH>..<HASH> 100644
--- a/src/extensions/AccountControllerExtension.php
+++ b/src/extensions/AccountControllerExtension.php
@@ -13,6 +13,7 @@ use SilverStripe\Forms\HeaderField;
use SilverStripe\Forms\HiddenField;
use SilverStripe\ORM\PaginatedList;
use SilverStripe\Security\Security;
+use SilverStripe\Control\Controller;
use SilverStripe\Forms\LiteralField;
use SilverStripe\Forms\CheckboxField;
use SilverStripe\Forms\DropdownField;
@@ -21,6 +22,7 @@ use SilverStripe\Forms\RequiredFields;
use SilverStripe\ORM\ValidationResult;
use SilverStripe\Core\Injector\Injector;
use SilverCommerce\ContactAdmin\Model\ContactLocation;
+use SilverCommerce\GeoZones\Forms\RegionSelectionField;
use ilateral\SilverStripe\Users\Control\AccountController;
/**
@@ -216,7 +218,7 @@ class AccountControllerExtension extends Extension
}
}
- /**
+ /**
* Form used for adding or editing addresses
*
* @return Form
@@ -231,6 +233,9 @@ class AccountControllerExtension extends Extension
$fields->merge($location->getFrontEndFields());
+ // Remove the version field
+ $fields->removeByName("Version");
+
$fields->replaceField(
"Country",
DropdownField::create(
@@ -240,6 +245,17 @@ class AccountControllerExtension extends Extension
)->setEmptyString("")
);
+ if (class_exists(RegionSelectionField::class)) {
+ $fields->replaceField(
+ "County",
+ RegionSelectionField::create(
+ "County",
+ $location->fieldLabel("County"),
+ "Country"
+ )
+ );
+ }
+
$fields->replaceField(
"ContactID",
HiddenField::create('ContactID')
@@ -294,8 +310,10 @@ class AccountControllerExtension extends Extension
public function doSaveAddress($data, $form)
{
if (!$data["ID"]) {
+ $new = true;
$address = ContactLocation::create();
} else {
+ $new = false;
$address = ContactLocation::get()->byID($data["ID"]);
}
@@ -312,9 +330,22 @@ class AccountControllerExtension extends Extension
ValidationResult::TYPE_ERROR
);
}
- return $this
- ->getOwner()
- ->redirect($this->getOwner()->Link("addresses"));
+
+ // If a new record, redirect to base, else redirect back to the edit form
+ if ($new && !empty($address)) {
+ return $this
+ ->getOwner()
+ ->redirect(
+ Controller::join_links(
+ $this->getOwner()->Link("editaddress"),
+ $address->ID
+ )
+ );
+ } else {
+ return $this
+ ->getOwner()
+ ->redirectBack();
+ }
}
/**
diff --git a/src/model/ContactLocation.php b/src/model/ContactLocation.php
index <HASH>..<HASH> 100644
--- a/src/model/ContactLocation.php
+++ b/src/model/ContactLocation.php
@@ -27,8 +27,8 @@ class ContactLocation extends DataObject implements PermissionProvider
"Address1" => "Varchar(255)",
"Address2" => "Varchar(255)",
"City" => "Varchar(255)",
- "County" => "Varchar(255)",
"Country" => "Varchar(255)",
+ "County" => "Varchar(255)",
"PostCode" => "Varchar(10)",
"Default" => "Boolean"
];
@@ -46,8 +46,8 @@ class ContactLocation extends DataObject implements PermissionProvider
"Address1",
"Address2",
"City",
- "County",
"Country",
+ "County",
"PostCode",
"Default"
];
|
Remove version number from front end fields, get country/county selection working
|
silvercommerce_contact-admin
|
train
|
4da8884ca4e7743e33d00b56b18ea803d90c7312
|
diff --git a/src/Symfony/Component/DependencyInjection/Compiler/RegisterServiceSubscribersPass.php b/src/Symfony/Component/DependencyInjection/Compiler/RegisterServiceSubscribersPass.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/DependencyInjection/Compiler/RegisterServiceSubscribersPass.php
+++ b/src/Symfony/Component/DependencyInjection/Compiler/RegisterServiceSubscribersPass.php
@@ -92,7 +92,8 @@ class RegisterServiceSubscribersPass extends AbstractRecursivePass
}
if ($serviceMap = array_keys($serviceMap)) {
- $this->container->log($this, sprintf('Service keys "%s" do not exist in the map returned by %s::getSubscribedServices() for service "%s".', implode('", "', $serviceMap), $class, $this->currentId));
+ $message = sprintf(1 < count($serviceMap) ? 'keys "%s" do' : 'key "%s" does', str_replace('%', '%%', implode('", "', $serviceMap)));
+ throw new InvalidArgumentException(sprintf('Service %s not exist in the map returned by %s::getSubscribedServices() for service "%s".', $message, $class, $this->currentId));
}
$serviceLocator = $this->serviceLocator;
diff --git a/src/Symfony/Component/DependencyInjection/Tests/Compiler/RegisterServiceSubscribersPassTest.php b/src/Symfony/Component/DependencyInjection/Tests/Compiler/RegisterServiceSubscribersPassTest.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/DependencyInjection/Tests/Compiler/RegisterServiceSubscribersPassTest.php
+++ b/src/Symfony/Component/DependencyInjection/Tests/Compiler/RegisterServiceSubscribersPassTest.php
@@ -115,4 +115,23 @@ class RegisterServiceSubscribersPassTest extends TestCase
$this->assertEquals($expected, $locator->getArgument(0));
}
+
+ /**
+ * @expectedException \Symfony\Component\DependencyInjection\Exception\InvalidArgumentException
+ * @expectedExceptionMessage Service key "test" does not exist in the map returned by TestServiceSubscriber::getSubscribedServices() for service "foo_service".
+ */
+ public function testExtraServiceSubscriber()
+ {
+ $container = new ContainerBuilder();
+ $container->register('foo_service', 'TestServiceSubscriber')
+ ->setAutowired(true)
+ ->addArgument(new Reference('container'))
+ ->addTag('container.service_subscriber', array(
+ 'key' => 'test',
+ 'id' => 'TestServiceSubscriber',
+ ))
+ ;
+ $container->register('TestServiceSubscriber', 'TestServiceSubscriber');
+ $container->compile();
+ }
}
diff --git a/src/Symfony/Component/DependencyInjection/Tests/Dumper/PhpDumperTest.php b/src/Symfony/Component/DependencyInjection/Tests/Dumper/PhpDumperTest.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/DependencyInjection/Tests/Dumper/PhpDumperTest.php
+++ b/src/Symfony/Component/DependencyInjection/Tests/Dumper/PhpDumperTest.php
@@ -561,7 +561,7 @@ class PhpDumperTest extends TestCase
->setAutowired(true)
->addArgument(new Reference('container'))
->addTag('container.service_subscriber', array(
- 'key' => 'test',
+ 'key' => 'bar',
'id' => 'TestServiceSubscriber',
))
;
diff --git a/src/Symfony/Component/DependencyInjection/Tests/Fixtures/php/services_subscriber.php b/src/Symfony/Component/DependencyInjection/Tests/Fixtures/php/services_subscriber.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/DependencyInjection/Tests/Fixtures/php/services_subscriber.php
+++ b/src/Symfony/Component/DependencyInjection/Tests/Fixtures/php/services_subscriber.php
@@ -102,7 +102,7 @@ class ProjectServiceContainer extends Container
}, 'stdClass' => function () {
$f = function (\stdClass $v = null) { return $v; }; return $f(${($_ = isset($this->services['autowired.stdClass']) ? $this->services['autowired.stdClass'] : $this->getAutowired_StdClassService()) && false ?: '_'});
}, 'bar' => function () {
- $f = function (\stdClass $v) { return $v; }; return $f(${($_ = isset($this->services['autowired.stdClass']) ? $this->services['autowired.stdClass'] : $this->getAutowired_StdClassService()) && false ?: '_'});
+ $f = function (\stdClass $v) { return $v; }; return $f(${($_ = isset($this->services['TestServiceSubscriber']) ? $this->services['TestServiceSubscriber'] : $this->get('TestServiceSubscriber')) && false ?: '_'});
}, 'baz' => function () {
$f = function (\stdClass $v = null) { return $v; }; return $f(${($_ = isset($this->services['autowired.stdClass']) ? $this->services['autowired.stdClass'] : $this->getAutowired_StdClassService()) && false ?: '_'});
})));
|
[DI] Throw on "configured-keys <> getSubscribedServices()" mismatch
|
symfony_symfony
|
train
|
a926fc68ab2e45e9a9e9419041808da4bc1ed5ff
|
diff --git a/tests/test_simplelp.py b/tests/test_simplelp.py
index <HASH>..<HASH> 100644
--- a/tests/test_simplelp.py
+++ b/tests/test_simplelp.py
@@ -44,3 +44,7 @@ def test_nicelp():
assert round(s.getVal(y)) == 0.0
s.free()
+
+if __name__ == "__main__":
+ test_simple()
+ test_nicelp()
|
make test_simplelp run without py.test
|
SCIP-Interfaces_PySCIPOpt
|
train
|
28087a492cb2a9a2a315696f5010740d27f2dc63
|
diff --git a/lib/cli.js b/lib/cli.js
index <HASH>..<HASH> 100755
--- a/lib/cli.js
+++ b/lib/cli.js
@@ -34,6 +34,11 @@ nconf.argv({
describe: 'Filename to use when saving the generated report.',
default: 'generatedReport.html'
},
+ p: {
+ alias: 'prepend-filename',
+ describe: 'Prepend filename to the package name in the report. Helps distinguish between multiple runs/diff browser/same test',
+ default: false
+ },
b: {
alias: 'browser',
describe: 'If true generated report will be opened in the browser.',
@@ -61,6 +66,7 @@ var opts = {
themeName: nconf.get('theme'),
reportFilename: nconf.get('output'),
openBrowser: nconf.get('browser') === true,
+ prependFilename: nconf.get('prepend-filename') === true,
hideSuccess: typeof (nconf.get('compact')) !== 'undefined',
logLevel: nconf.get('log-level'),
debug: {
@@ -95,7 +101,8 @@ async.waterfall([
function parseFiles(data, next) {
async.map(data, function(result, pnext) {
parseString(result.data, function(err, parsed) {
- parsed.testsuites.filename = result.name;
+ if (opts.prependFilename)
+ parsed.testsuites.filename = result.name;
pnext(err, parsed);
});
}, next);
|
Put prepending filename to package name behind an option.
|
jls_nightwatch-html-reporter
|
train
|
c4189887ce8932c0f8b2f87f01604f8db9b0f8a8
|
diff --git a/lib/woodhouse/runners/bunny_runner.rb b/lib/woodhouse/runners/bunny_runner.rb
index <HASH>..<HASH> 100644
--- a/lib/woodhouse/runners/bunny_runner.rb
+++ b/lib/woodhouse/runners/bunny_runner.rb
@@ -6,38 +6,42 @@ class Woodhouse::Runners::BunnyRunner < Woodhouse::Runner
def subscribe
bunny = Bunny.new(@config.server_info)
bunny.start
- bunny.qos(:prefetch_count => 1)
- queue = bunny.queue(@worker.queue_name)
- exchange = bunny.exchange(@worker.exchange_name, :type => :headers)
+ channel = bunny.create_channel
+ channel.prefetch(1)
+ queue = channel.queue(@worker.queue_name)
+ exchange = channel.exchange(@worker.exchange_name, :type => :headers)
queue.bind(exchange, :arguments => @worker.criteria.amqp_headers)
- while not @stopped
- message = queue.pop(:ack => true)
- if message[:header].nil?
- sleep 0.01
- else
- job = make_job(message)
+ queue.subscribe(:ack => true, :block => false) do |delivery, props, payload|
+ begin
+ job = make_job(props, payload)
if can_service_job?(job)
- queue.ack message
- service_job(job)
+ if service_job(job)
+ channel.acknowledge(delivery.delivery_tag, false)
+ else
+ channel.reject(delivery.delivery_tag, false)
+ end
else
- queue.reject message
+ @config.logger.error("Cannot service job #{job.describe} in queue for #{@worker.describe}")
+ channel.reject(delivery.delivery_tag, false)
end
- sleep 0.1
+ rescue => err
+ @config.logger.error("Error bubbled up out of worker. This shouldn't happen. #{err.message}")
+ err.backtrace.each do |btr|
+ @config.logger.error(" #{btr}")
+ end
+ spin_down
end
end
- bunny.stop
- signal :spun_down
+ wait :spin_down
end
def spin_down
- @stopped = true
- wait :spun_down
+ signal :spin_down
end
- def make_job(message)
+ def make_job(properties, payload)
Woodhouse::Job.new(@worker.worker_class_name, @worker.job_method) do |job|
- args = message[:header].properties.merge(:payload => message[:payload])
- args.merge!(args.delete(:headers) || {})
+ args = properties.headers.merge(:payload => payload)
job.arguments = args
end
end
|
Update BunnyRunner to use new asynchronous Bunny consumer API.
|
mboeh_woodhouse
|
train
|
474c191e721daea5bcb7aa761382d4036cc3c75b
|
diff --git a/emma2/msm/analysis/sparse/assessment_test.py b/emma2/msm/analysis/sparse/assessment_test.py
index <HASH>..<HASH> 100644
--- a/emma2/msm/analysis/sparse/assessment_test.py
+++ b/emma2/msm/analysis/sparse/assessment_test.py
@@ -139,6 +139,40 @@ class TestReversible(unittest.TestCase):
self.assertTrue(assessment.is_reversible(self.T, tol=self.tol), \
'matrix should be reversible')
+class TestIsConnected(unittest.TestCase):
+
+ def setUp(self):
+ C1=1.0*np.array([[1, 4, 3], [3, 2, 4], [4, 5, 1]])
+ C2=1.0*np.array([[0, 1], [1, 0]])
+ C3=1.0*np.array([[7]])
+
+ C=scipy.sparse.block_diag((C1, C2, C3))
+
+ C=C.toarray()
+ """Forward transition block 1 -> block 2"""
+ C[2, 3]=1
+ """Forward transition block 2 -> block 3"""
+ C[4, 5]=1
+
+ self.T_connected=scipy.sparse.csr_matrix(C1/C1.sum(axis=1)[:,np.newaxis])
+ self.T_not_connected=scipy.sparse.csr_matrix(C/C.sum(axis=1)[:,np.newaxis])
+
+ def tearDown(self):
+ pass
+
+ def test_connected_count_matrix(self):
+ """Directed"""
+ is_connected=assessment.is_connected(self.T_not_connected)
+ self.assertFalse(is_connected)
+
+ is_connected=assessment.is_connected(self.T_connected)
+ self.assertTrue(is_connected)
+
+ """Undirected"""
+ is_connected=assessment.is_connected(self.T_not_connected, directed=False)
+ self.assertTrue(is_connected)
+
+
if __name__=="__main__":
import cProfile as profiler
unittest.main()
|
[msm/analysis] Unit test for is_connected function
|
markovmodel_PyEMMA
|
train
|
75bb6e3f6308da804947f01b396363930dc6fe83
|
diff --git a/pytds/dbapi.py b/pytds/dbapi.py
index <HASH>..<HASH> 100644
--- a/pytds/dbapi.py
+++ b/pytds/dbapi.py
@@ -1,7 +1,7 @@
"""DB-SIG compliant module for communicating with MS SQL servers"""
__author__ = 'Mikhail Denisenko <denisenkom@gmail.com>'
-__version__ = '1.6.2'
+__version__ = '1.6.3'
import logging
import six
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
from distutils.core import setup
setup(name='python-tds',
- version='1.6.2',
+ version='1.6.3',
description='Python DBAPI driver for MSSQL using pure Python TDS (Tabular Data Stream) protocol implementation',
author='Mikhail Denisenko',
author_email='denisenkom@gmail.com',
|
bumped version to <I>
|
denisenkom_pytds
|
train
|
cc179d96414943e9165df56f203ba2d3828f2014
|
diff --git a/backup/moodle2/backup_stepslib.php b/backup/moodle2/backup_stepslib.php
index <HASH>..<HASH> 100644
--- a/backup/moodle2/backup_stepslib.php
+++ b/backup/moodle2/backup_stepslib.php
@@ -2282,7 +2282,11 @@ class backup_questions_structure_step extends backup_structure_step {
FROM {tag} t
JOIN {tag_instance} ti ON ti.tagid = t.id
WHERE ti.itemid = ?
- AND ti.itemtype = 'question'", array(backup::VAR_PARENTID));
+ AND ti.itemtype = 'question'
+ AND ti.component = 'core_question'",
+ [
+ backup::VAR_PARENTID
+ ]);
// don't need to annotate ids nor files
// (already done by {@link backup_annotate_all_question_files}
|
MDL-<I> question: Query enhancement.
Adding core_question as an additional filter to the tag backup.
|
moodle_moodle
|
train
|
7706d6867c14720535a65ebdc002b1a9a95597e8
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -22,10 +22,14 @@ gulp.task('readme', ['build'], (gulpCallBack) => {
});
});
-gulp.task('fetch', ['build'], function () {
+gulp.task('fetch', ['build'], (gulpCallBack) => {
var fetch = require('./dist/readme/fetch');
- fetch.compareToESLint();
- fetch.compareToTSLint();
+ Promise.all([
+ fetch.compareToESLint(),
+ fetch.compareToTSLint()
+ ]).then(() => {
+ gulpCallBack();
+ });
});
gulp.task('lint', function lint() {
|
[fix:fetch-task] gulp waits till the task finishes
The two tasks at hand are async and thus we must tell gulp when they
are done. They have been wrapped in a Promise.all to accomplish this.
|
buzinas_tslint-eslint-rules
|
train
|
cbea1a54128af980e406e077cd4a8874cd3102c6
|
diff --git a/ToUnderground.py b/ToUnderground.py
index <HASH>..<HASH> 100644
--- a/ToUnderground.py
+++ b/ToUnderground.py
@@ -20,6 +20,8 @@ from datetime import datetime, timedelta
import DataStore
from WeatherStation import dew_point
+def CtoF(C):
+ return (C * 9.0 / 5.0) + 32.0
def ToUnderground(params, data, verbose=1):
password = params.get('underground', 'password', 'undergroudpassword')
station = params.get('underground', 'station', 'undergroundstation')
@@ -34,13 +36,18 @@ def ToUnderground(params, data, verbose=1):
getPars['ID'] = station
getPars['PASSWORD'] = password
getPars['dateutc'] = data_now['idx'].isoformat(' ')
- getPars['winddir'] = '%.0f' % (data_now['wind_dir'] * 22.5)
- getPars['tempf'] = '%.1f' % ((data_now['temp_out'] * 9.0 / 5.0) + 32.0)
- getPars['dewptf'] = '%.1f' % (
- (dew_point(data_now['temp_out'], data_now['hum_out']) * 9.0 / 5.0) + 32.0)
- getPars['windspeedmph'] = '%.2f' % (data_now['wind_ave'] * 3.6 / 1.609344)
- getPars['windgustmph'] = '%.2f' % (data_now['wind_gust'] * 3.6 / 1.609344)
- getPars['humidity'] = '%d' % (data_now['hum_out'])
+ if data_now['wind_dir'] != None and data_now['wind_dir'] <= 16:
+ getPars['winddir'] = '%.0f' % (data_now['wind_dir'] * 22.5)
+ if data_now['temp_out'] != None:
+ getPars['tempf'] = '%.1f' % (CtoF(data_now['temp_out']))
+ if data_now['hum_out'] != None:
+ getPars['dewptf'] = '%.1f' % (
+ CtoF(dew_point(data_now['temp_out'], data_now['hum_out'])))
+ getPars['humidity'] = '%d' % (data_now['hum_out'])
+ if data_now['wind_ave'] != None:
+ getPars['windspeedmph'] = '%.2f' % (data_now['wind_ave'] * 3.6 / 1.609344)
+ if data_now['wind_gust'] != None:
+ getPars['windgustmph'] = '%.2f' % (data_now['wind_gust'] * 3.6 / 1.609344)
getPars['rainin'] = '%g' % (max(data_now['rain'] - data_prev['rain'], 0.0) / 25.4)
if data_now.has_key('rel_pressure'):
baromin = data_now['rel_pressure']
|
Added tests for missing data, e.g. when contact with sensors is lost.
|
jim-easterbrook_pywws
|
train
|
354192d2b8b18f92966f77e156a9f09d0570ec98
|
diff --git a/commands/hugo.go b/commands/hugo.go
index <HASH>..<HASH> 100644
--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -281,8 +281,7 @@ func InitializeConfig() {
themeDir := helpers.GetThemeDir()
if themeDir != "" {
if _, err := os.Stat(themeDir); os.IsNotExist(err) {
- jww.ERROR.Println("Unable to find theme Directory:", themeDir)
- os.Exit(1)
+ jww.FATAL.Fatalln("Unable to find theme Directory:", themeDir)
}
}
|
Log missing theme as FATAL
See #<I>
|
gohugoio_hugo
|
train
|
1951a4eb31bae506361c627cca40409deee80706
|
diff --git a/FloatingGroupExpandableListView/src/com/diegocarloslima/fgelv/lib/WrapperExpandableListAdapter.java b/FloatingGroupExpandableListView/src/com/diegocarloslima/fgelv/lib/WrapperExpandableListAdapter.java
index <HASH>..<HASH> 100644
--- a/FloatingGroupExpandableListView/src/com/diegocarloslima/fgelv/lib/WrapperExpandableListAdapter.java
+++ b/FloatingGroupExpandableListView/src/com/diegocarloslima/fgelv/lib/WrapperExpandableListAdapter.java
@@ -15,6 +15,10 @@ public class WrapperExpandableListAdapter extends BaseExpandableListAdapter {
mWrappedAdapter = adapter;
}
+ public BaseExpandableListAdapter getWrappedAdapter() {
+ return mWrappedAdapter;
+ }
+
@Override
public void registerDataSetObserver(DataSetObserver observer) {
mWrappedAdapter.registerDataSetObserver(observer);
|
Adding getter for wrapped adapter
|
diegocarloslima_FloatingGroupExpandableListView
|
train
|
c7586c68b728dcef51a56d25578d5c0b2eeee0c7
|
diff --git a/model/ListDecorator.php b/model/ListDecorator.php
index <HASH>..<HASH> 100644
--- a/model/ListDecorator.php
+++ b/model/ListDecorator.php
@@ -15,8 +15,7 @@ abstract class SS_ListDecorator extends ViewableData implements SS_List, SS_Sort
protected $list;
public function __construct(SS_List $list) {
- $this->list = $list;
- $this->failover = $this->list;
+ $this->setList($list);
parent::__construct();
}
@@ -30,6 +29,21 @@ abstract class SS_ListDecorator extends ViewableData implements SS_List, SS_Sort
return $this->list;
}
+ /**
+ * Set the list this decorator wraps around.
+ *
+ * Useful for keeping a decorator/paginated list configuration intact while modifying
+ * the underlying list.
+ *
+ * @return SS_List
+ */
+ public function setList(SS_List $list)
+ {
+ $this->list = $list;
+ $this->failover = $this->list;
+ return $this;
+ }
+
// PROXIED METHODS ---------------------------------------------------------
public function offsetExists($key) {
|
feat(ListDecorator): Add setList() function. Useful for keeping a decorator/paginated list configuration intact while modifying the underlying list.
|
silverstripe_silverstripe-framework
|
train
|
74fd384969d7b0162a5975b8878f82770bd7003a
|
diff --git a/go/test/endtoend/vtorc/gracefultakeover/graceful_takeover_test.go b/go/test/endtoend/vtorc/gracefultakeover/graceful_takeover_test.go
index <HASH>..<HASH> 100644
--- a/go/test/endtoend/vtorc/gracefultakeover/graceful_takeover_test.go
+++ b/go/test/endtoend/vtorc/gracefultakeover/graceful_takeover_test.go
@@ -137,3 +137,37 @@ func TestGracefulPrimaryTakeoverAuto(t *testing.T) {
utils.CheckPrimaryTablet(t, clusterInfo, primary, true)
utils.VerifyWritesSucceed(t, clusterInfo, primary, []*cluster.Vttablet{replica, rdonly}, 10*time.Second)
}
+
+// make an api call to graceful primary takeover with a cross-cell replica and check that it errors out
+// covers the test case graceful-master-takeover-fail-cross-region from orchestrator
+func TestGracefulPrimaryTakeoverFailCrossCell(t *testing.T) {
+ defer cluster.PanicHandler(t)
+ utils.SetupVttabletsAndVtorc(t, clusterInfo, 1, 1, nil, "test_config.json")
+ keyspace := &clusterInfo.ClusterInstance.Keyspaces[0]
+ shard0 := &keyspace.Shards[0]
+
+ // find primary from topo
+ primary := utils.ShardPrimaryTablet(t, clusterInfo, keyspace, shard0)
+ assert.NotNil(t, primary, "should have elected a primary")
+
+ // find the rdonly tablet
+ var rdonly *cluster.Vttablet
+ for _, tablet := range shard0.Vttablets {
+ if tablet.Type == "rdonly" {
+ rdonly = tablet
+ }
+ }
+ assert.NotNil(t, rdonly, "could not find rdonly tablet")
+
+ crossCellReplica1 := utils.StartVttablet(t, clusterInfo, utils.Cell2, false)
+ // newly started tablet does not replicate from anyone yet, we will allow orchestrator to fix this too
+ utils.CheckReplication(t, clusterInfo, primary, []*cluster.Vttablet{crossCellReplica1, rdonly}, 25*time.Second)
+
+ status, response := utils.MakeAPICall(t, fmt.Sprintf("http://localhost:3000/api/graceful-primary-takeover/localhost/%d/localhost/%d", primary.MySQLPort, crossCellReplica1.MySQLPort))
+ assert.Equal(t, 500, status)
+ assert.Contains(t, response, "GracefulPrimaryTakeover: constraint failure")
+
+ // check that the cross-cell replica doesn't get promoted and the previous primary is still the primary
+ utils.CheckPrimaryTablet(t, clusterInfo, primary, true)
+ utils.VerifyWritesSucceed(t, clusterInfo, primary, []*cluster.Vttablet{crossCellReplica1, rdonly}, 10*time.Second)
+}
diff --git a/go/test/endtoend/vtorc/gracefultakeover/main_test.go b/go/test/endtoend/vtorc/gracefultakeover/main_test.go
index <HASH>..<HASH> 100644
--- a/go/test/endtoend/vtorc/gracefultakeover/main_test.go
+++ b/go/test/endtoend/vtorc/gracefultakeover/main_test.go
@@ -36,6 +36,12 @@ func TestMain(m *testing.M) {
NumRdonly: 2,
UIDBase: 100,
})
+ cellInfos = append(cellInfos, &utils.CellInfo{
+ CellName: utils.Cell2,
+ NumReplicas: 2,
+ NumRdonly: 0,
+ UIDBase: 200,
+ })
exitcode, err := func() (int, error) {
var err error
|
test: ported over graceful-master-takeover-fail-cross-region from orchestrator
|
vitessio_vitess
|
train
|
4aa920ddb859e542ef720fcd7291fbadc8404a27
|
diff --git a/AUTHORS b/AUTHORS
index <HASH>..<HASH> 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -39,6 +39,7 @@ Nicola Peduzzi <thenikso at gmail.com>
Runrioter Wung <runrioter at gmail.com>
Soroush Pour <me at soroushjp.com>
Stan Putrya <root.vagner at gmail.com>
+Stanley Gunawan <gunawan.stanley at gmail.com>
Xiaobing Jiang <s7v7nislands at gmail.com>
Xiuming Chen <cc at cxm.cc>
diff --git a/driver_test.go b/driver_test.go
index <HASH>..<HASH> 100644
--- a/driver_test.go
+++ b/driver_test.go
@@ -76,6 +76,28 @@ type DBTest struct {
db *sql.DB
}
+func runTestsWithMultiStatement(t *testing.T, dsn string, tests ...func(dbt *DBTest)) {
+ if !available {
+ t.Skipf("MySQL-Server not running on %s", netAddr)
+ }
+
+ dsn3 := dsn + "&multiStatements=true"
+ var db3 *sql.DB
+ if _, err := parseDSN(dsn3); err != errInvalidDSNUnsafeCollation {
+ db3, err = sql.Open("mysql", dsn3)
+ if err != nil {
+ t.Fatalf("Error connecting: %s", err.Error())
+ }
+ defer db3.Close()
+ }
+
+ dbt3 := &DBTest{t, db3}
+ for _, test := range tests {
+ test(dbt3)
+ dbt3.db.Exec("DROP TABLE IF EXISTS test")
+ }
+}
+
func runTests(t *testing.T, dsn string, tests ...func(dbt *DBTest)) {
if !available {
t.Skipf("MySQL server not running on %s", netAddr)
@@ -99,8 +121,19 @@ func runTests(t *testing.T, dsn string, tests ...func(dbt *DBTest)) {
defer db2.Close()
}
+ dsn3 := dsn + "&multiStatements=true"
+ var db3 *sql.DB
+ if _, err := parseDSN(dsn3); err != errInvalidDSNUnsafeCollation {
+ db3, err = sql.Open("mysql", dsn3)
+ if err != nil {
+ t.Fatalf("Error connecting: %s", err.Error())
+ }
+ defer db3.Close()
+ }
+
dbt := &DBTest{t, db}
dbt2 := &DBTest{t, db2}
+ dbt3 := &DBTest{t, db3}
for _, test := range tests {
test(dbt)
dbt.db.Exec("DROP TABLE IF EXISTS test")
@@ -108,6 +141,10 @@ func runTests(t *testing.T, dsn string, tests ...func(dbt *DBTest)) {
test(dbt2)
dbt2.db.Exec("DROP TABLE IF EXISTS test")
}
+ if db3 != nil {
+ test(dbt3)
+ dbt3.db.Exec("DROP TABLE IF EXISTS test")
+ }
}
}
@@ -237,6 +274,50 @@ func TestCRUD(t *testing.T) {
})
}
+func TestMultiQuery(t *testing.T) {
+ runTestsWithMultiStatement(t, dsn, func(dbt *DBTest) {
+ // Create Table
+ dbt.mustExec("CREATE TABLE `test` (`id` int(11) NOT NULL, `value` int(11) NOT NULL) ")
+
+ // Create Data
+ res := dbt.mustExec("INSERT INTO test VALUES (1, 1)")
+ count, err := res.RowsAffected()
+ if err != nil {
+ dbt.Fatalf("res.RowsAffected() returned error: %s", err.Error())
+ }
+ if count != 1 {
+ dbt.Fatalf("Expected 1 affected row, got %d", count)
+ }
+
+ // Update
+ res = dbt.mustExec("UPDATE test SET value = 3 WHERE id = 1; UPDATE test SET value = 4 WHERE id = 1; UPDATE test SET value = 5 WHERE id = 1;")
+ count, err = res.RowsAffected()
+ if err != nil {
+ dbt.Fatalf("res.RowsAffected() returned error: %s", err.Error())
+ }
+ if count != 1 {
+ dbt.Fatalf("Expected 1 affected row, got %d", count)
+ }
+
+ // Read
+ var out int
+ rows := dbt.mustQuery("SELECT value FROM test WHERE id=1;")
+ if rows.Next() {
+ rows.Scan(&out)
+ if 5 != out {
+ dbt.Errorf("5 != %t", out)
+ }
+
+ if rows.Next() {
+ dbt.Error("unexpected data")
+ }
+ } else {
+ dbt.Error("no data")
+ }
+
+ })
+}
+
func TestInt(t *testing.T) {
runTests(t, dsn, func(dbt *DBTest) {
types := [5]string{"TINYINT", "SMALLINT", "MEDIUMINT", "INT", "BIGINT"}
diff --git a/packets.go b/packets.go
index <HASH>..<HASH> 100644
--- a/packets.go
+++ b/packets.go
@@ -543,6 +543,7 @@ func (mc *mysqlConn) handleOkPacket(data []byte) error {
// server_status [2 bytes]
mc.status = readStatus(data[1+n+m : 1+n+m+2])
+ mc.discardMoreResultsIfExists()
// warning count [2 bytes]
if !mc.strict {
|
TestMultiQuery
discard additional OK response after Multi Statement Exec Calls
|
go-sql-driver_mysql
|
train
|
6a89db86ed817f6a7498076e2a06b90f9fce0831
|
diff --git a/src/event.js b/src/event.js
index <HASH>..<HASH> 100644
--- a/src/event.js
+++ b/src/event.js
@@ -683,7 +683,14 @@ jQuery.Event.prototype = {
}
},
stopImmediatePropagation: function() {
+ var e = this.originalEvent;
+
this.isImmediatePropagationStopped = returnTrue;
+
+ if ( e && e.stopImmediatePropagation ) {
+ e.stopImmediatePropagation();
+ }
+
this.stopPropagation();
}
};
diff --git a/test/unit/event.js b/test/unit/event.js
index <HASH>..<HASH> 100644
--- a/test/unit/event.js
+++ b/test/unit/event.js
@@ -386,10 +386,13 @@ test("on immediate propagation", function() {
$p.off( "click", "**" );
});
-test("on bubbling, isDefaultPrevented", function() {
- expect(2);
+test("on bubbling, isDefaultPrevented, stopImmediatePropagation", function() {
+ expect( 3 );
var $anchor2 = jQuery( "#anchor2" ),
$main = jQuery( "#qunit-fixture" ),
+ neverCallMe = function() {
+ ok( false, "immediate propagation should have been stopped" );
+ },
fakeClick = function($jq) {
// Use a native click so we don't get jQuery simulated bubbling
var e = document.createEvent( "MouseEvents" );
@@ -414,6 +417,14 @@ test("on bubbling, isDefaultPrevented", function() {
fakeClick( $anchor2 );
$anchor2.off( "click" );
$main.off( "click", "**" );
+
+ $anchor2.on( "click", function( e ) {
+ e.stopImmediatePropagation();
+ ok( true, "anchor was clicked and prop stopped" );
+ });
+ $anchor2[0].addEventListener( "click", neverCallMe, false );
+ fakeClick( $anchor2 );
+ $anchor2[0].removeEventListener( "click", neverCallMe );
});
test("on(), iframes", function() {
|
Event: Call underlying stopImmediatePropagation when present
Fixes #<I>
|
jquery_jquery
|
train
|
3b8551b1f963c22085466a7279708fa8506d177a
|
diff --git a/lib/io/packet-output-stream.js b/lib/io/packet-output-stream.js
index <HASH>..<HASH> 100644
--- a/lib/io/packet-output-stream.js
+++ b/lib/io/packet-output-stream.js
@@ -11,7 +11,7 @@ const ZERO_BYTE = 0x00;
const SLASH = 0x5c;
//increase by level to avoid buffer copy.
-const SMALL_BUFFER_SIZE = 1024;
+const SMALL_BUFFER_SIZE = 256;
const MEDIUM_BUFFER_SIZE = 16384; //16k
const LARGE_BUFFER_SIZE = 131072; //128k
const BIG_BUFFER_SIZE = 1048576; //1M
|
[CONJS-<I>] initializing smaller buffer streaming sending buffer, in order to maximize use of node buffer pool
|
MariaDB_mariadb-connector-nodejs
|
train
|
3a3a11c6f428330cc1711934ae0b4ad5745ab87e
|
diff --git a/lib/toppings/generators/install/group_generator.rb b/lib/toppings/generators/install/group_generator.rb
index <HASH>..<HASH> 100644
--- a/lib/toppings/generators/install/group_generator.rb
+++ b/lib/toppings/generators/install/group_generator.rb
@@ -27,7 +27,7 @@ module Toppings
end
def templates
- @templates ||= []
+ @templates ||= Set.new
end
end
diff --git a/spec/generators/setup/group_generator_spec.rb b/spec/generators/setup/group_generator_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/generators/setup/group_generator_spec.rb
+++ b/spec/generators/setup/group_generator_spec.rb
@@ -29,7 +29,7 @@ describe Toppings::Generators::Install::GroupGenerator do
}
describe "with no template assigned, templates" do
- it { subject.templates.should eq([]) }
+ it { subject.templates.should be_empty }
end
describe "with a template assigned, templates" do
@@ -48,6 +48,14 @@ describe Toppings::Generators::Install::GroupGenerator do
it { subject.templates.should include('file1', 'file2') }
end
+ describe "with a template assigned multiple times, templates" do
+ before do
+ subject.with_templates "file1", "file1"
+ end
+
+ it { subject.templates.select() { |template| template == 'file1' }.size.should eq(1) }
+ end
+
end
end
|
adjusted template assigning for generator groups to not assign templates redundantly
|
toppings_toppings
|
train
|
65a0ceef4887797e151a8c964206c36acd1886a5
|
diff --git a/shoebot/sbot.py b/shoebot/sbot.py
index <HASH>..<HASH> 100644
--- a/shoebot/sbot.py
+++ b/shoebot/sbot.py
@@ -124,5 +124,7 @@ def run(src, grammar = NODEBOX, format = None, outputfile = None, iterations = 1
raise
else:
print '\nBye.'
+ else:
+ sbot_thread.join()
return bot
|
Don't quit immediately if not using live coding.
|
shoebot_shoebot
|
train
|
2ccbbdaf2825fda77b3c887761f662e612bf2c03
|
diff --git a/Jakefile.js b/Jakefile.js
index <HASH>..<HASH> 100644
--- a/Jakefile.js
+++ b/Jakefile.js
@@ -84,6 +84,9 @@ task('default', ['test']);
desc('compile, jslint, test, tslint, docs, validate-html.');
task('build', ['test', 'tslint', 'docs', 'validate-html']);
+desc('Update version number, tag and push to Github. Use vers=x.y.z argument to set a new version number.');
+task('release', ['build', 'version', 'tag', 'push']);
+
desc('Lint Javascript and JSON files.');
task('jslint', {async: true}, function() {
var commands = TESTS.map(function(file) {
@@ -148,7 +151,7 @@ task('validate-html', {async: true}, function() {
exec(commands);
});
-desc('Display or update the project version number. Use vers=x.y.z syntax to set a new version number.');
+desc('Display or update the project version number. Use vers=x.y.z argument to set a new version number.');
task('version', function() {
var version = process.env.vers;
if (!version) {
|
Jakefile.js: Added 'release' task.
|
srackham_rimu
|
train
|
88c1c8f275df437d6deec3cd9ceb1850202f06ce
|
diff --git a/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java b/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java
index <HASH>..<HASH> 100644
--- a/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java
+++ b/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java
@@ -214,7 +214,7 @@ public class KaryonServer implements Closeable {
if (null != initializer) {
initializer.close();
}
- Closeables.closeQuietly(lifecycleManager);
+ Closeables.close(lifecycleManager, true);
logger.info("Successfully shut down karyon.");
}
|
This fixes #<I> by replacing the call to Guava's Closeables.closeQuitely() in the KaryonServer's shutdown with its new version
|
Netflix_karyon
|
train
|
de6dec300a3e8145a80945dd868c343194cbddd5
|
diff --git a/lib/metriks/middleware.rb b/lib/metriks/middleware.rb
index <HASH>..<HASH> 100644
--- a/lib/metriks/middleware.rb
+++ b/lib/metriks/middleware.rb
@@ -10,7 +10,7 @@ module Metriks
def call(env)
time_response(env) do
- record_heroku_queue_status env
+ record_heroku_status env
record_error_rate env
call_downstream env
end
@@ -28,12 +28,14 @@ module Metriks
end
end
- def record_heroku_queue_status(env)
+ def record_heroku_status(env)
queue_wait = env['HTTP_X_HEROKU_QUEUE_WAIT_TIME']
queue_depth = env['HTTP_X_HEROKU_QUEUE_DEPTH']
+ dynos_in_use = env['HTTP_X_HEROKU_DYNOS_IN_USE']
- Metriks.histogram("queue.wait") .update(queue_wait.to_i) if queue_wait
- Metriks.histogram("queue.depth").update(queue_depth.to_i) if queue_depth
+ Metriks.histogram("queue.wait") .update(queue_wait.to_i) if queue_wait
+ Metriks.histogram("queue.depth") .update(queue_depth.to_i) if queue_depth
+ Metriks.histogram("dynos.in_use").update(dynos_in_use.to_i) if dynos_in_use
end
def record_error_rate(env)
diff --git a/test/sync_app_test.rb b/test/sync_app_test.rb
index <HASH>..<HASH> 100644
--- a/test/sync_app_test.rb
+++ b/test/sync_app_test.rb
@@ -98,13 +98,16 @@ class SyncAppTest < Test::Unit::TestCase
def test_records_heroku_queue_metrics
@env.merge! 'HTTP_X_HEROKU_QUEUE_WAIT_TIME' => '42',
- 'HTTP_X_HEROKU_QUEUE_DEPTH' => '24'
+ 'HTTP_X_HEROKU_QUEUE_DEPTH' => '24',
+ 'HTTP_X_HEROKU_DYNOS_IN_USE' => '3'
Metriks::Middleware.new(@downstream).call(@env)
wait = Metriks.histogram('queue.wait').mean
depth = Metriks.histogram('queue.depth').mean
+ used = Metriks.histogram('dynos.in_use').mean
assert_equal 42, wait
assert_equal 24, depth
+ assert_equal 3, used
end
end
|
track the number of dynos in use
|
lmarburger_metriks-middleware
|
train
|
4dc869756f9d4076bbced24c1715067cd45db3bf
|
diff --git a/upup/pkg/fi/cloudup/awstasks/sshkey.go b/upup/pkg/fi/cloudup/awstasks/sshkey.go
index <HASH>..<HASH> 100644
--- a/upup/pkg/fi/cloudup/awstasks/sshkey.go
+++ b/upup/pkg/fi/cloudup/awstasks/sshkey.go
@@ -80,7 +80,8 @@ func (e *SSHKey) Find(c *fi.Context) (*SSHKey, error) {
return actual, nil
}
-func computeAwsKeyFingerprint(publicKey *fi.ResourceHolder) (string, error) {
+// computeAWSKeyFingerprint computes the AWS-specific fingerprint of the SSH public key
+func computeAWSKeyFingerprint(publicKey *fi.ResourceHolder) (string, error) {
publicKeyString, err := publicKey.AsString()
if err != nil {
return "", fmt.Errorf("error reading SSH public key: %v", err)
@@ -88,12 +89,12 @@ func computeAwsKeyFingerprint(publicKey *fi.ResourceHolder) (string, error) {
tokens := strings.Split(publicKeyString, " ")
if len(tokens) < 2 {
- return "", fmt.Errorf("error parsing SSH public key: %s", publicKeyString)
+ return "", fmt.Errorf("error parsing SSH public key: %q", publicKeyString)
}
sshPublicKeyBytes, err := base64.StdEncoding.DecodeString(tokens[1])
if len(tokens) < 2 {
- return "", fmt.Errorf("error decoding SSH public key: %s", publicKeyString)
+ return "", fmt.Errorf("error decoding SSH public key: %q", publicKeyString)
}
sshPublicKey, err := ssh.ParsePublicKey(sshPublicKeyBytes)
@@ -152,7 +153,7 @@ func toDER(pubkey ssh.PublicKey) ([]byte, error) {
func (e *SSHKey) Run(c *fi.Context) error {
if e.KeyFingerprint == nil && e.PublicKey != nil {
- keyFingerprint, err := computeAwsKeyFingerprint(e.PublicKey)
+ keyFingerprint, err := computeAWSKeyFingerprint(e.PublicKey)
if err != nil {
return fmt.Errorf("error computing key fingerpring for SSH key: %v", err)
}
|
Quote public key in error message
Just in case it contains invalid characters
Issue #<I>
|
kubernetes_kops
|
train
|
58e1e0268861802c2613d7fa346e6059af450428
|
diff --git a/etcd/etcd.go b/etcd/etcd.go
index <HASH>..<HASH> 100644
--- a/etcd/etcd.go
+++ b/etcd/etcd.go
@@ -233,6 +233,6 @@ func (e *Etcd) Stop() {
// ReadyNotify returns a channel that is going to be closed
// when the etcd instance is ready to accept connections.
-func (e *Etcd) ReadyNotify() chan bool {
+func (e *Etcd) ReadyNotify() <-chan bool {
return e.readyC
}
|
refactor(main): return only receiving channel from Etcd.ReadyNotify()
|
etcd-io_etcd
|
train
|
51d7b2f46fc8008f277bc2b339f3d97b8a4a6f64
|
diff --git a/tasks/bower-concat.js b/tasks/bower-concat.js
index <HASH>..<HASH> 100644
--- a/tasks/bower-concat.js
+++ b/tasks/bower-concat.js
@@ -137,22 +137,33 @@ module.exports = function(grunt) {
}
function findMainFiles(name, component) {
+ grunt.verbose.writeln();
+ grunt.verbose.writeln('Finding main file for ' + name + '...');
+ var mainFiles = ensureArray(component);
+
// Main file explicitly defined in bower_concat options
if (mains[name]) {
+ // Component could be either filename or folder, we need folder
+ var componentDir = mainFiles[0];
+ if (fs.lstatSync(path.join(bowerDir, componentDir)).isFile()) {
+ componentDir = path.dirname(componentDir);
+ }
+
var manualMainFiles = ensureArray(mains[name]);
manualMainFiles = _.map(manualMainFiles, function(filepath) {
- return path.join(bowerDir, component, filepath);
+ return path.join(bowerDir, componentDir, filepath);
});
+ grunt.verbose.writeln('Main file was specified in bower_concat options: ' + manualMainFiles);
return manualMainFiles;
}
// Bower knows main JS file?
- var mainFiles = ensureArray(component);
mainFiles = _.map(mainFiles, function(filepath) {
return path.join(bowerDir, filepath);
});
var mainJSFiles = _.filter(mainFiles, isJsFile);
if (mainJSFiles.length) {
+ grunt.verbose.writeln('Main file was specified in bower.json: ' + mainJSFiles);
return mainJSFiles;
}
@@ -166,15 +177,18 @@ module.exports = function(grunt) {
if (jsFiles.length === 1) {
// Only one JS file: no doubt it’s main file
+ grunt.verbose.writeln('Considering the only JS file in a component’s folder as a main file: ' + jsFiles);
return jsFiles;
}
else {
// More than one JS file: try to guess
var bestFile = guessBestFile(name, jsFiles);
if (bestFile) {
+ grunt.verbose.writeln('Guessing the best JS file in a component’s folder: ' + [bestFile]);
return [bestFile];
}
else {
+ grunt.verbose.writeln('Main file not found');
return [];
}
}
|
Component could be either filename or folder, we need folder. Fix #<I>.
|
sapegin_grunt-bower-concat
|
train
|
958e2614f66ad1471df15a72b1d98f4b6dd50def
|
diff --git a/gzip_cache/gzip_cache.py b/gzip_cache/gzip_cache.py
index <HASH>..<HASH> 100644
--- a/gzip_cache/gzip_cache.py
+++ b/gzip_cache/gzip_cache.py
@@ -42,6 +42,7 @@ EXCLUDE_TYPES = [
# Internally-compressed fonts. gzip can often shave ~50 more bytes off,
# but it's not worth it.
'.woff',
+ '.woff2',
]
COMPRESSION_LEVEL = 9 # Best Compression
|
Excluding internally-compressed woff2
|
getpelican_pelican-plugins
|
train
|
1b030b22981cb9ccaeba1476c2cb3c12171611a7
|
diff --git a/sc2/main.py b/sc2/main.py
index <HASH>..<HASH> 100644
--- a/sc2/main.py
+++ b/sc2/main.py
@@ -126,6 +126,9 @@ async def _play_game_ai(client, player_id, ai, realtime, step_time_limit, game_t
out_of_budget = False
budget = time_limit - time_window.available
+ # Tell the bot how much time it has left attribute
+ ai.time_budget_available = budget
+
if budget < 0:
logger.warning(f"Running AI step: out of budget before step")
step_time = 0.0
|
Inform bot about it's time budget with it's limited
|
Dentosal_python-sc2
|
train
|
ba22e43c94c5a1c4827c86f4276e6ee2e0daf416
|
diff --git a/clkhash/rest_client.py b/clkhash/rest_client.py
index <HASH>..<HASH> 100644
--- a/clkhash/rest_client.py
+++ b/clkhash/rest_client.py
@@ -259,6 +259,6 @@ class RestClient:
'{}/api/v1/projects/{}/runs/{}'.format(self.server, project, run),
headers={"Authorization": apikey}
)
- if response.status_code != 200:
+ if response.status_code not in (200, 204):
raise ServiceError("Error deleting run", response)
return response.text
diff --git a/tests/test_cli.py b/tests/test_cli.py
index <HASH>..<HASH> 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -263,7 +263,6 @@ class TestHashCommand(unittest.TestCase):
assert result.exit_code != 0
-
@unittest.skipUnless("INCLUDE_CLI" in os.environ,
"Set envvar INCLUDE_CLI to run. Disabled for jenkins")
class TestHasherDefaultSchema(unittest.TestCase):
@@ -504,14 +503,7 @@ class TestCliInteractionWithService(CLITestHelper):
def test_create_project_2_party(self):
out = self._create_project(project_args={'parties': '2'})
-
- self.assertIn('project_id', out)
- self.assertIn('result_token', out)
- self.assertIn('update_tokens', out)
-
- self.assertGreaterEqual(len(out['project_id']), 16)
- self.assertGreaterEqual(len(out['result_token']), 16)
- self.assertGreaterEqual(len(out['update_tokens']), 2)
+ self._test_create_project(out)
def test_create_project_multi_party(self):
out = self._create_project(
@@ -608,14 +600,7 @@ class TestCliInteractionWithService(CLITestHelper):
def test_create_with_optional_name(self):
out = self._create_project({'name': 'testprojectname'})
-
- self.assertIn('project_id', out)
- self.assertIn('result_token', out)
- self.assertIn('update_tokens', out)
-
- self.assertGreaterEqual(len(out['project_id']), 16)
- self.assertGreaterEqual(len(out['result_token']), 16)
- self.assertGreaterEqual(len(out['update_tokens']), 2)
+ self._test_create_project(out)
def test_create_with_bad_schema(self):
# Make sure we don't succeed with bad schema.
@@ -696,8 +681,11 @@ class TestCliInteractionWithService(CLITestHelper):
self.assertIn('receipt_token', bob_upload)
- # Give the server a small amount of time to process
- time.sleep(5.0)
+ # Use the rest client to wait until the run is complete
+ self.rest_client.wait_for_run(project['project_id'],
+ run['run_id'],
+ project['result_token'],
+ timeout=10)
results_raw = get_coord_results()
res = json.loads(results_raw)
diff --git a/tests/test_rest_client.py b/tests/test_rest_client.py
index <HASH>..<HASH> 100644
--- a/tests/test_rest_client.py
+++ b/tests/test_rest_client.py
@@ -121,10 +121,10 @@ class TestRestClientInteractionWithService(unittest.TestCase):
print(format_run_status(status1))
# Check we can watch the run progress this will raise if not
- # completed in 10 seconds
+ # completed in 20 seconds
for status_update in self.rest_client.watch_run_status(p_id, r_id,
p['result_token'],
- 10, 0.5):
+ 20, 0.5):
print(format_run_status(status_update))
# Check that we can still "wait" on a completed run and get a valid
|
Update rest client and tests (#<I>)
* rest client update to accept <I> status on run deletion
* Remove duplicate test code
* Remove a hard coded wait flaky test
|
data61_clkhash
|
train
|
a750c8f0f50abc854a22aeb4a78ae9018f1f2a18
|
diff --git a/lib/CORL/action/spawn.rb b/lib/CORL/action/spawn.rb
index <HASH>..<HASH> 100644
--- a/lib/CORL/action/spawn.rb
+++ b/lib/CORL/action/spawn.rb
@@ -21,13 +21,10 @@ class Spawn < Plugin::CloudAction
keypair_config
- bootstrap_config = CORL.action_config(:bootstrap)
- config.defaults(bootstrap_config) if bootstrap_config
+ config.defaults(CORL.action_config(:bootstrap))
+ config.defaults(CORL.action_config(:seed))
- if seed_config = CORL.action_config(:seed)
- seed_config[:project_reference].default = "github:::coraltech/cluster-test[master]"
- config.defaults(seed_config)
- end
+ config[:project_reference].default = "github:::coraltech/cluster-test[master]"
end
end
|
Updating the configure method of the spawn action provider to make combined option merging more concise.
|
coralnexus_corl
|
train
|
6e4c09db98d66665acfb86dd988ea9bdf468475b
|
diff --git a/files/index.php b/files/index.php
index <HASH>..<HASH> 100644
--- a/files/index.php
+++ b/files/index.php
@@ -488,7 +488,7 @@ function displaydir ($wdir) {
global $basedir;
global $id;
- global $USER;
+ global $USER, $CFG;
$fullpath = $basedir.$wdir;
|
slasharguments wasn't being found (CFG not declared global)
|
moodle_moodle
|
train
|
60a7329937ae351f1d7c3beaa517845a0900314b
|
diff --git a/ldapcherry/backend/backendLdap.py b/ldapcherry/backend/backendLdap.py
index <HASH>..<HASH> 100644
--- a/ldapcherry/backend/backendLdap.py
+++ b/ldapcherry/backend/backendLdap.py
@@ -248,9 +248,9 @@ class Backend(ldapcherry.backend.Backend):
ldap_client.unbind_s()
- def add_to_group(self, username, groups):
+ def add_to_groups(self, username, groups):
ldap_client = self._bind()
- tmp = self._get_user(username, NO_ATTR)
+ tmp = self._get_user(username, ALL_ATTRS)
dn = tmp[0]
attrs = tmp[1]
attrs['dn'] = dn
@@ -261,9 +261,9 @@ class Backend(ldapcherry.backend.Backend):
ldap_client.add_s(group,ldif)
ldap_client.unbind_s()
- def rm_from_group(self, username):
+ def del_from_groups(self, username, groups):
ldap_client = self._bind()
- tmp = self._get_user(username, NO_ATTR)
+ tmp = self._get_user(username, ALL_ATTRS)
dn = tmp[0]
attrs = tmp[1]
attrs['dn'] = dn
|
fix API for backend ldap on groups handling
|
kakwa_ldapcherry
|
train
|
b252010e87446bf4d53c8da24440e2ab7d162346
|
diff --git a/clientpool/loggregator_client_pool.go b/clientpool/loggregator_client_pool.go
index <HASH>..<HASH> 100644
--- a/clientpool/loggregator_client_pool.go
+++ b/clientpool/loggregator_client_pool.go
@@ -16,14 +16,12 @@ var ErrorEmptyClientPool = errors.New("loggregator client pool is empty")
type LoggregatorClientPool struct {
clients map[string]loggregatorclient.LoggregatorClient
logger *gosteno.Logger
- createClientsEnabled bool
loggregatorPort int
sync.RWMutex
}
-func NewLoggregatorClientPool(logger *gosteno.Logger, port int, createClients bool) *LoggregatorClientPool {
+func NewLoggregatorClientPool(logger *gosteno.Logger, port int) *LoggregatorClientPool {
return &LoggregatorClientPool{
- createClientsEnabled: createClients,
loggregatorPort: port,
clients: make(map[string]loggregatorclient.LoggregatorClient),
logger: logger,
@@ -86,9 +84,7 @@ func (pool *LoggregatorClientPool) syncWithNodes(nodes []storeadapter.StoreNode)
}
var client loggregatorclient.LoggregatorClient
- if pool.createClientsEnabled {
- client = loggregatorclient.NewLoggregatorClient(addr, pool.logger, loggregatorclient.DefaultBufferSize)
- }
+ client = loggregatorclient.NewLoggregatorClient(addr, pool.logger, loggregatorclient.DefaultBufferSize)
pool.clients[addr] = client
}
diff --git a/clientpool/loggregator_client_pool_test.go b/clientpool/loggregator_client_pool_test.go
index <HASH>..<HASH> 100644
--- a/clientpool/loggregator_client_pool_test.go
+++ b/clientpool/loggregator_client_pool_test.go
@@ -33,7 +33,7 @@ var _ = Describe("LoggregatorClientPool", func() {
stopChan = make(chan struct{})
logger = steno.NewLogger("TestLogger")
- pool = clientpool.NewLoggregatorClientPool(logger, 3456, false)
+ pool = clientpool.NewLoggregatorClientPool(logger, 3456)
})
Describe("RandomClient", func() {
@@ -94,26 +94,14 @@ var _ = Describe("LoggregatorClientPool", func() {
})
}
- Context("with 'create clients' disabled", func() {
- It("a nil client eventually appears in the pool", func() {
- defer close(stopChan)
- addServer()
-
- Eventually(pool.ListClients).Should(HaveLen(1))
- Expect(pool.ListClients()[0]).To(BeNil())
- })
- })
-
- Context("with 'create clients' enabled", func() {
- It("a non-nil client eventually appears in the pool", func() {
- defer close(stopChan)
- pool = clientpool.NewLoggregatorClientPool(logger, 3456, true)
+ It("a non-nil client eventually appears in the pool", func() {
+ defer close(stopChan)
+ pool = clientpool.NewLoggregatorClientPool(logger, 3456)
- addServer()
+ addServer()
- Eventually(pool.ListClients).Should(HaveLen(1))
- Expect(pool.ListClients()[0]).ToNot(BeNil())
- })
+ Eventually(pool.ListClients).Should(HaveLen(1))
+ Expect(pool.ListClients()[0]).ToNot(BeNil())
})
It("adds more servers later", func() {
|
Remove clientpool option to not build clients
|
cloudfoundry-attic_loggregatorlib
|
train
|
2e7e4e8b16eede69ad6d4022ad4eb5ace24730c7
|
diff --git a/sovrin_client/test/agent/base_agent.py b/sovrin_client/test/agent/base_agent.py
index <HASH>..<HASH> 100644
--- a/sovrin_client/test/agent/base_agent.py
+++ b/sovrin_client/test/agent/base_agent.py
@@ -64,10 +64,10 @@ class BaseAgent(TestWalletedAgent):
if os.path.isfile(claimVersionFilePath):
try:
with open(claimVersionFilePath, mode='r+') as file:
- claimVersionNumber = float(file.read()) + 0.001
+ self.claimVersionNumber = float(file.read()) + 0.001
file.seek(0)
# increment version and update file
- file.write(str(claimVersionNumber))
+ file.write(str(self.claimVersionNumber))
file.truncate()
except OSError as e:
self.logger.warn('Error occurred while reading version file: '
|
fixed agent restart issue which was not updating claim version accordingly
|
hyperledger-archives_indy-client
|
train
|
c0cddd95d8abddca77440aec089a1266c3d22571
|
diff --git a/stakemachine/__main__.py b/stakemachine/__main__.py
index <HASH>..<HASH> 100755
--- a/stakemachine/__main__.py
+++ b/stakemachine/__main__.py
@@ -130,7 +130,9 @@ def main() :
"Need either a wif key or connection details for to the cli wallet."
)
- log.info("Configuration: %s" % json.dumps(config, indent=4))
+ clean_config = config.copy()
+ clean_config.pop("wif", None)
+ log.info("Configuration: %s" % json.dumps(clean_config, indent=4))
# initialize the bot infrastructure with our settings
bot.init(config)
|
[main] don't show private-key
|
xeroc_stakemachine
|
train
|
f09a55e8d103b9a5837f98c92d83e74a0a329aff
|
diff --git a/test/test_utf8_sanitizer.rb b/test/test_utf8_sanitizer.rb
index <HASH>..<HASH> 100644
--- a/test/test_utf8_sanitizer.rb
+++ b/test/test_utf8_sanitizer.rb
@@ -153,15 +153,18 @@ describe Rack::UTF8Sanitizer do
end
describe "with form data" do
- def sanitize_form_data
+ def request_env
@plain_input = "foo bar лол".force_encoding('UTF-8')
- @uri_input = "http://bar/foo+%2F%3A+bar+%D0%BB%D0%BE%D0%BB".force_encoding('UTF-8')
- env = @app.({
+ {
"REQUEST_METHOD" => "POST",
"CONTENT_TYPE" => "application/x-www-form-urlencoded;foo=bar",
"HTTP_USER_AGENT" => @plain_input,
"rack.input" => @rack_input,
- })
+ }
+ end
+ def sanitize_form_data(request_env = request_env)
+ @uri_input = "http://bar/foo+%2F%3A+bar+%D0%BB%D0%BE%D0%BB".force_encoding('UTF-8')
+ env = @app.(request_env)
sanitized_input = env['rack.input'].read
sanitized_input.encoding.should == Encoding::UTF_8
sanitized_input.should.be.valid_encoding
@@ -183,6 +186,26 @@ describe Rack::UTF8Sanitizer do
end
end
+ it "cannot handle nil CONTENT_TYPE" do
+ input = "foo=bla&quux=bar"
+ @rack_input = StringIO.new input
+
+ env = request_env.update('CONTENT_TYPE' => nil)
+ lambda {
+ sanitize_form_data(env)
+ }.should.raise(NoMethodError)
+ end
+
+ it "cannot handle empty CONTENT_TYPE" do
+ input = "foo=bla&quux=bar"
+ @rack_input = StringIO.new input
+
+ env = request_env.update('CONTENT_TYPE' => '')
+ lambda {
+ sanitize_form_data(env)
+ }.should.raise(NoMethodError)
+ end
+
it "sanitizes StringIO rack.input with bad encoding" do
input = "foo=bla&quux=bar\xED"
@rack_input = StringIO.new input
|
Test that an exception is raised on no CONTENT_TYPE
|
whitequark_rack-utf8_sanitizer
|
train
|
270088f9f1bf89e45d229579f1a856788978ae1d
|
diff --git a/sorl/thumbnail/kvstores/base.py b/sorl/thumbnail/kvstores/base.py
index <HASH>..<HASH> 100644
--- a/sorl/thumbnail/kvstores/base.py
+++ b/sorl/thumbnail/kvstores/base.py
@@ -114,7 +114,8 @@ class KVStoreBase(object):
want to use the ``cleanup`` method instead.
"""
all_keys = self._find_keys_raw(settings.THUMBNAIL_KEY_PREFIX)
- self._delete_raw(*all_keys)
+ if all_keys:
+ self._delete_raw(*all_keys)
def _get(self, key, identity='image'):
"""
|
fix: empty list of keys in clear method
|
jazzband_sorl-thumbnail
|
train
|
2a77001f754d7fe793b757c614a7d5c48ae57158
|
diff --git a/conf/full/layers.commented.json b/conf/full/layers.commented.json
index <HASH>..<HASH> 100644
--- a/conf/full/layers.commented.json
+++ b/conf/full/layers.commented.json
@@ -100,6 +100,34 @@
}
},
"style": "#defaultStyle"
+ },
+ {
+ "id": "f3",
+ "title": {
+ "de": "Bonn: Ortsteile",
+ "en": "Bonn: Neighbourhoods"
+ },
+ "type": "GeoJSON",
+ "source": {
+ "url": "http://stadtplan.bonn.de/geojson?Thema=21247&koordsys=4326",
+ "useProxy": true,
+ "attribution": "<a href='https://creativecommons.org/publicdomain/zero/1.0/deed.de'>Creative Commons Zero (CC0)</a>"
+ },
+ "style": "#defaultStyle"
+ },
+ {
+ "id": "f4",
+ "title": {
+ "de": "Bonn: Sirenenstandorte",
+ "en": "Bonn: Siren locations"
+ },
+ "type": "GeoJSON",
+ "source": {
+ "url": "http://stadtplan.bonn.de/geojson?Thema=14198&koordsys=4326",
+ "useProxy": true,
+ "attribution": "<a href='https://creativecommons.org/publicdomain/zero/1.0/deed.de'>Creative Commons Zero (CC0)</a>"
+ },
+ "style": "#defaultStyle"
}
]
}
diff --git a/conf/full/webpack.js b/conf/full/webpack.js
index <HASH>..<HASH> 100644
--- a/conf/full/webpack.js
+++ b/conf/full/webpack.js
@@ -31,7 +31,8 @@ module.exports = {
proxyValidRequests: [
'a.tile.openstreetmap.org',
'b.tile.openstreetmap.org',
- 'c.tile.openstreetmap.org'
+ 'c.tile.openstreetmap.org',
+ 'stadtplan.bonn.de'
]
}
},
diff --git a/src/configurators/LayerFactory.js b/src/configurators/LayerFactory.js
index <HASH>..<HASH> 100644
--- a/src/configurators/LayerFactory.js
+++ b/src/configurators/LayerFactory.js
@@ -22,6 +22,7 @@ export const SuperType = {
export const LayerType = {
CATEGORY: 'Category',
+ GEOJSON: 'GeoJSON',
KML: 'KML',
WMS: 'WMS',
TILEWMS: 'TileWMS',
@@ -307,6 +308,19 @@ export class LayerFactory {
}
break
+ case LayerType.GEOJSON:
+ this.configureLayerSourceLoadingStrategy_(optionsCopy.source)
+ optionsCopy.source.defaultStyle = this.map_.get('styling').getStyle(optionsCopy.style || '#defaultStyle')
+
+ optionsCopy.source.type = 'GeoJSON'
+
+ if (superType === SuperType.QUERYLAYER) {
+ optionsCopy.source = new QuerySource(optionsCopy.source)
+ } else {
+ optionsCopy.source = new SourceServerVector(optionsCopy.source)
+ }
+ layer = new VectorLayer(optionsCopy)
+ break
case LayerType.KML:
this.configureLayerSourceLoadingStrategy_(optionsCopy.source)
|
GeoJSON was lacking a few straightforward LOC (#<I>)
* GeoJSON was lacking a few straightforward LOC
* changed case
* Added demonstration for GeoJSON (polygons, points)
|
KlausBenndorf_guide4you
|
train
|
eea48c40988af5bf40d6ac1e5941b864b1bb9316
|
diff --git a/lib/env/configFile.js b/lib/env/configFile.js
index <HASH>..<HASH> 100644
--- a/lib/env/configFile.js
+++ b/lib/env/configFile.js
@@ -47,6 +47,6 @@ module.exports = {
async read() {
const configPath = getConfigPath();
- return require(configPath); // eslint-disable-line
+ return Promise.resolve(require(configPath)); // eslint-disable-line
}
};
diff --git a/samples/migrate-mongo-config.js b/samples/migrate-mongo-config.js
index <HASH>..<HASH> 100644
--- a/samples/migrate-mongo-config.js
+++ b/samples/migrate-mongo-config.js
@@ -23,4 +23,4 @@ const config = {
};
//Return the config as a promise
-module.exports = Promise.resolve(config);
+module.exports = config;
|
Updated so that it can take a promise or a flat file
|
seppevs_migrate-mongo
|
train
|
6d219dc3766dffe64606f013054b81dae83e994e
|
diff --git a/src/main/java/org/kurento/modulecreator/VersionManager.java b/src/main/java/org/kurento/modulecreator/VersionManager.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/kurento/modulecreator/VersionManager.java
+++ b/src/main/java/org/kurento/modulecreator/VersionManager.java
@@ -132,6 +132,7 @@ public class VersionManager {
version = removeDevSuffix(version);
} else {
version = gitRepo + "#develop";
+ return version;
}
}
|
If node version get from a git repo you don't need to parse it for semantic version
Change-Id: I9f<I>d<I>d<I>d<I>f<I>fd7a<I>eb0ba8
|
Kurento_kurento-module-creator
|
train
|
23a85febbd0a0abb6b8f033bc1a7c4141c7f1de6
|
diff --git a/pyquil/reference_simulator.py b/pyquil/reference_simulator.py
index <HASH>..<HASH> 100644
--- a/pyquil/reference_simulator.py
+++ b/pyquil/reference_simulator.py
@@ -166,19 +166,29 @@ class ReferenceDensitySimulator(AbstractQuantumSimulator):
self.density = np.zeros((2 ** n_qubits, 2 ** n_qubits), dtype=np.complex128)
self.density[0, 0] = complex(1.0, 0)
- def sample_bitstrings(self, n_samples):
+ def sample_bitstrings(self, n_samples, tol_factor: float = 1e8):
"""
Sample bitstrings from the distribution defined by the wavefunction.
Qubit 0 is at ``out[:, 0]``.
:param n_samples: The number of bitstrings to sample
+ :param tol_factor: Tolerance to set imaginary probabilities to zero, relative to
+ machine epsilon.
:return: An array of shape (n_samples, n_qubits)
"""
if self.rs is None:
raise ValueError("You have tried to perform a stochastic operation without setting the "
"random state of the simulator. Might I suggest using a PyQVM object?")
- probabilities = np.real_if_close(np.diagonal(self.density))
+
+ # for np.real_if_close the actual tolerance is (machine_eps * tol_factor),
+ # where `machine_epsilon = np.finfo(float).eps`. If we use tol_factor = 1e8, then the
+ # overall tolerance is \approx 2.2e-8.
+ probabilities = np.real_if_close(np.diagonal(self.density), tol=tol_factor)
+ # Next set negative probabilities to zero
+ probabilities = [0 if p < 0.0 else p for p in probabilities]
+ # Ensure they sum to one
+ probabilities = probabilities / np.sum(probabilities)
possible_bitstrings = all_bitstrings(self.n_qubits)
inds = self.rs.choice(2 ** self.n_qubits, n_samples, p=probabilities)
bitstrings = possible_bitstrings[inds, :]
diff --git a/pyquil/tests/test_reference_density_simulator.py b/pyquil/tests/test_reference_density_simulator.py
index <HASH>..<HASH> 100644
--- a/pyquil/tests/test_reference_density_simulator.py
+++ b/pyquil/tests/test_reference_density_simulator.py
@@ -1,5 +1,6 @@
import numpy as np
import pytest
+import networkx as nx
import pyquil.gate_matrices as qmats
from pyquil import Program
@@ -7,6 +8,12 @@ from pyquil.gates import *
from pyquil.pyqvm import PyQVM
from pyquil.reference_simulator import ReferenceDensitySimulator, ReferenceWavefunctionSimulator
from pyquil.unitary_tools import lifted_gate_matrix
+from pyquil.paulis import sI, sX, sY, sZ
+from pyquil.device import NxDevice
+from pyquil.api import QuantumComputer
+from pyquil.api._qac import AbstractCompiler
+from pyquil.operator_estimation import (measure_observables, ExperimentSetting,
+ TomographyExperiment, zeros_state)
def test_qaoa_density():
@@ -255,3 +262,50 @@ def test_multiqubit_decay_bellstate():
qam.execute(program)
assert np.allclose(qam.wf_simulator.density, state)
+
+
+def test_for_negative_probabilities():
+ # trivial program to do state tomography on
+ prog = Program(I(0))
+
+ # make TomographyExperiment
+ expt_settings = [ExperimentSetting(zeros_state([0]), pt) for pt in [sI(0), sX(0), sY(0), sZ(0)]]
+ experiment_1q = TomographyExperiment(settings=expt_settings, program=prog)
+
+ # make an abstract compiler
+ class DummyCompiler(AbstractCompiler):
+ def get_version_info(self):
+ return {}
+
+ def quil_to_native_quil(self, program: Program):
+ return program
+
+ def native_quil_to_executable(self, nq_program: Program):
+ return nq_program
+
+ # make a quantum computer object
+ device = NxDevice(nx.complete_graph(1))
+ qc_density = QuantumComputer(name='testy!',
+ qam=PyQVM(n_qubits=1,
+ quantum_simulator_type=ReferenceDensitySimulator),
+ device=device,
+ compiler=DummyCompiler())
+
+ # initialize with a pure state
+ initial_density = np.array([[1.0, 0.0], [0.0, 0.0]])
+ qc_density.qam.wf_simulator.density = initial_density
+
+ try:
+ list(measure_observables(qc=qc_density, tomo_experiment=experiment_1q, n_shots=3000))
+ except ValueError as e:
+ # the error is from np.random.choice by way of self.rs.choice in ReferenceDensitySimulator
+ assert str(e) != 'probabilities are not non-negative'
+
+ # initialize with a mixed state
+ initial_density = np.array([[0.9, 0.0], [0.0, 0.1]])
+ qc_density.qam.wf_simulator.density = initial_density
+
+ try:
+ list(measure_observables(qc=qc_density, tomo_experiment=experiment_1q, n_shots=3000))
+ except ValueError as e:
+ assert str(e) != 'probabilities are not non-negative'
|
Bugfix negative probabilities in reference simulator (#<I>)
|
rigetti_pyquil
|
train
|
fdcb3501acb51474be13854f76fbae4033a694f1
|
diff --git a/lib/right_api_client/client.rb b/lib/right_api_client/client.rb
index <HASH>..<HASH> 100644
--- a/lib/right_api_client/client.rb
+++ b/lib/right_api_client/client.rb
@@ -347,6 +347,8 @@ module RightApi
# will be used later to add relevant methods to relevant resources
type = if result.content_type.index('rightscale')
get_resource_type(result.content_type)
+ elsif result.content_type.index('text/plain')
+ 'text'
else
''
end
@@ -369,11 +371,7 @@ module RightApi
data = if resource_type == 'text'
{ 'text' => body }
else
- if res && res.headers[:content_type].split(';').first.strip == 'text/plain'
- {:value => body}
- else
- JSON.parse(body, :allow_nan => true)
- end
+ JSON.parse(body, :allow_nan => true)
end
[resource_type, path, data]
|
Move text handling to match up with what was already there.
|
rightscale_right_api_client
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.