hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
fccf5476154e5a51e7a0c6c78417b3f834a526db
|
diff --git a/django_tenants/migration_executors/base.py b/django_tenants/migration_executors/base.py
index <HASH>..<HASH> 100644
--- a/django_tenants/migration_executors/base.py
+++ b/django_tenants/migration_executors/base.py
@@ -16,7 +16,7 @@ def run_migrations(args, options, executor_codename, schema_name, allow_atomic=T
def style_func(msg):
percent_str = ''
if idx is not None and count is not None and count > 0:
- percent_str = '%d/%d (%s%%) ' % (idx, count, int(100*idx/count))
+ percent_str = '%d/%d (%s%%) ' % (idx + 1, count, int(100*(idx + 1)/count))
return '[%s%s:%s] %s' % (
percent_str,
style.NOTICE(executor_codename),
|
ITAL-<I> use increased index for progress reporting
|
tomturner_django-tenants
|
train
|
5d87ea09761ed485aae8a631fbee82c99e0f9199
|
diff --git a/tests/Deployment/Connection/ConnectedConnectionAdapterTestCase.php b/tests/Deployment/Connection/ConnectedConnectionAdapterTestCase.php
index <HASH>..<HASH> 100644
--- a/tests/Deployment/Connection/ConnectedConnectionAdapterTestCase.php
+++ b/tests/Deployment/Connection/ConnectedConnectionAdapterTestCase.php
@@ -46,6 +46,14 @@ abstract class ConnectedConnectionAdapterTestCase extends ConnectionAdapterTestC
}
/**
+ * Tests if ConnectionAdapterInterface::changeWorkingDirectory returns false without connection.
+ */
+ public function testChangeWorkingDirectoryReturnsFalseWhenNotConnected()
+ {
+ $this->assertFalse($this->connectionAdapter->changeWorkingDirectory($this->workspaceUtility->getWorkspacePath()));
+ }
+
+ /**
* Tests if ConnectionAdapterInterface::executeCommand returns ProcessExecutionResult with failure exit code and error output without connection.
*/
public function testExecuteCommandReturnsFalseWhenNotConnected()
@@ -58,6 +66,14 @@ abstract class ConnectedConnectionAdapterTestCase extends ConnectionAdapterTestC
}
/**
+ * Tests if ConnectionAdapterInterface::getWorkingDirectory returns false without connection.
+ */
+ public function testGetWorkingDirectoryReturnsFalseWhenNotConnected()
+ {
+ $this->assertFalse($this->connectionAdapter->getWorkingDirectory());
+ }
+
+ /**
* Tests if ConnectionAdapterInterface::getDirectoryContentsList returns false without connection.
*/
public function testGetDirectoryContentsListReturnsEmtpyArrayWhenNotConnected()
diff --git a/tests/Deployment/Connection/ConnectionAdapterTestCase.php b/tests/Deployment/Connection/ConnectionAdapterTestCase.php
index <HASH>..<HASH> 100644
--- a/tests/Deployment/Connection/ConnectionAdapterTestCase.php
+++ b/tests/Deployment/Connection/ConnectionAdapterTestCase.php
@@ -145,6 +145,30 @@ abstract class ConnectionAdapterTestCase extends PHPUnit_Framework_TestCase
}
/**
+ * Tests if ConnectionAdapterInterface::changeWorkingDirectory returns true on successful change to a different working directory.
+ *
+ * @depends testConnectReturnsTrue
+ */
+ public function testChangeWorkingDirectoryReturnsTrue()
+ {
+ $this->connectionAdapter->connect();
+
+ $this->assertTrue($this->connectionAdapter->changeWorkingDirectory($this->workspaceUtility->getWorkspacePath()));
+ }
+
+ /**
+ * Tests if ConnectionAdapterInterface::changeWorkingDirectory returns false when trying to change to a non-existing working directory.
+ *
+ * @depends testChangeWorkingDirectoryReturnsTrue
+ */
+ public function testChangeWorkingDirectoryWithNonExistingDirectoryReturnsFalse()
+ {
+ $this->connectionAdapter->connect();
+
+ $this->assertFalse($this->connectionAdapter->changeWorkingDirectory($this->workspaceUtility->getWorkspacePath().'/non-existing-directory'));
+ }
+
+ /**
* Tests if ConnectionAdapterInterface::executeCommand returns the expected output.
*
* @depends testDisconnectReturnsTrue
@@ -161,6 +185,19 @@ abstract class ConnectionAdapterTestCase extends PHPUnit_Framework_TestCase
}
/**
+ * Tests if ConnectionAdapterInterface::getWorkingDirectory returns the expected working directory.
+ *
+ * @depends testChangeWorkingDirectoryWithNonExistingDirectoryReturnsFalse
+ */
+ public function testGetWorkingDirectory()
+ {
+ $this->connectionAdapter->connect();
+ $this->connectionAdapter->changeWorkingDirectory($this->workspaceUtility->getWorkspacePath());
+
+ $this->assertSame(realpath($this->workspaceUtility->getWorkspacePath()), $this->connectionAdapter->getWorkingDirectory());
+ }
+
+ /**
* Tests if ConnectionAdapterInterface::getDirectoryContentsList returns the expected array.
*
* @depends testConnectReturnsTrue
|
Added unit tests for ConnectionAdapterInterface::changeWorkingDirectory and ConnectionAdapterInterface::getWorkingDirectory
|
accompli_accompli
|
train
|
79cd83c6ed9ea91d1da92304becfb96716489209
|
diff --git a/modules/wycs/src/wycs/transforms/VerificationCheck.java b/modules/wycs/src/wycs/transforms/VerificationCheck.java
index <HASH>..<HASH> 100644
--- a/modules/wycs/src/wycs/transforms/VerificationCheck.java
+++ b/modules/wycs/src/wycs/transforms/VerificationCheck.java
@@ -162,13 +162,11 @@ public class VerificationCheck implements Transform<WycsFile> {
SimpleRewriter rewriter = new SimpleRewriter(Solver.inferences,
Solver.reductions, Solver.SCHEMA);
rewriter.apply(automaton);
- //Solver.infer(automaton);
if(!automaton.get(automaton.getRoot(0)).equals(Solver.False)) {
String msg = stmt.message;
msg = msg == null ? "assertion failure" : msg;
- throw new AssertionFailure(msg,stmt,rewriter,automaton,original);
- //throw new AssertionFailure(msg,stmt,null,automaton,original);
+ throw new AssertionFailure(msg,stmt,rewriter,automaton,original);
}
long endTime = System.currentTimeMillis();
diff --git a/modules/wyrl/src/wyautl/rw/SimpleRewriter.java b/modules/wyrl/src/wyautl/rw/SimpleRewriter.java
index <HASH>..<HASH> 100644
--- a/modules/wyrl/src/wyautl/rw/SimpleRewriter.java
+++ b/modules/wyrl/src/wyautl/rw/SimpleRewriter.java
@@ -104,9 +104,9 @@ public class SimpleRewriter implements RewriteSystem {
* Provies a the limit on the number of probes which are permitted during a
* single call to <code>apply()</code>. After this point is reached, the
* method will return immediately (i.e. even if there are more reductions
- * that could be applied). The default value is 100000.
+ * that could be applied). The default value is currently 500000.
*/
- private int maxProbes = 100000;
+ private int maxProbes = 500000;
public SimpleRewriter(InferenceRule[] inferences, ReductionRule[] reductions, Schema schema) {
this.inferences = inferences;
|
WYCS: ok, we're almost back to where we were a month ago ... only it's taking a lot less time!!
|
Whiley_WhileyCompiler
|
train
|
ca858fc4685e862fdd5615dfd06dabf9c9391f6f
|
diff --git a/aeron-system-tests/src/test/java/io/aeron/ExclusivePublicationTest.java b/aeron-system-tests/src/test/java/io/aeron/ExclusivePublicationTest.java
index <HASH>..<HASH> 100644
--- a/aeron-system-tests/src/test/java/io/aeron/ExclusivePublicationTest.java
+++ b/aeron-system-tests/src/test/java/io/aeron/ExclusivePublicationTest.java
@@ -153,47 +153,46 @@ public class ExclusivePublicationTest
Tests.awaitConnections(subscription, 2);
final ExecutorService threadPool = Executors.newFixedThreadPool(2);
- final CountDownLatch latch = new CountDownLatch(2);
- threadPool.submit(
- () ->
- {
- latch.countDown();
- latch.await();
- for (int count = 0; count < fragmentsPerThread; count++)
+ try
+ {
+ final CountDownLatch latch = new CountDownLatch(2);
+ threadPool.submit(
+ () ->
{
- while (publicationOne.offer(srcBuffer, 0, MESSAGE_LENGTH) < 0L)
+ latch.countDown();
+ latch.await();
+ for (int count = 0; count < fragmentsPerThread; count++)
{
- Tests.yield();
+ while (publicationOne.offer(srcBuffer, 0, MESSAGE_LENGTH) < 0L)
+ {
+ Tests.yield();
+ }
}
- }
- return null;
- });
- threadPool.submit(
- () ->
- {
- latch.countDown();
- latch.await();
- for (int count = 0; count < fragmentsPerThread; count++)
+ return null;
+ });
+ threadPool.submit(
+ () ->
{
- while (publicationTwo.offer(srcBuffer, 0, MESSAGE_LENGTH) < 0L)
+ latch.countDown();
+ latch.await();
+ for (int count = 0; count < fragmentsPerThread; count++)
{
- Tests.yield();
+ while (publicationTwo.offer(srcBuffer, 0, MESSAGE_LENGTH) < 0L)
+ {
+ Tests.yield();
+ }
}
- }
- return null;
- });
- threadPool.shutdown();
+ return null;
+ });
- int totalFragmentsRead = 0;
- try
- {
+ int totalFragmentsRead = 0;
do
{
totalFragmentsRead += pollFragments(subscription, fragmentHandler);
}
while (totalFragmentsRead < expectedNumberOfFragments);
}
- catch (final Exception ex)
+ finally
{
threadPool.shutdownNow();
threadPool.awaitTermination(1, TimeUnit.SECONDS);
@@ -318,49 +317,49 @@ public class ExclusivePublicationTest
pubTwoPayload.setMemory(0, MESSAGE_LENGTH, Byte.MAX_VALUE);
final ExecutorService threadPool = Executors.newFixedThreadPool(2);
- final CountDownLatch latch = new CountDownLatch(2);
- threadPool.submit(
- () ->
- {
- latch.countDown();
- latch.await();
- for (int count = 0; count < fragmentsPerThread; count++)
+ try
+ {
+ final CountDownLatch latch = new CountDownLatch(2);
+ threadPool.submit(
+ () ->
{
- while (publicationOne
- .offer(pubOneHeader, 0, SIZE_OF_INT, pubOnePayload, 0, MESSAGE_LENGTH) < 0L)
+ latch.countDown();
+ latch.await();
+ for (int count = 0; count < fragmentsPerThread; count++)
{
- Tests.yield();
+ while (publicationOne
+ .offer(pubOneHeader, 0, SIZE_OF_INT, pubOnePayload, 0, MESSAGE_LENGTH) < 0L)
+ {
+ Tests.yield();
+ }
}
- }
- return null;
- });
- threadPool.submit(
- () ->
- {
- latch.countDown();
- latch.await();
- for (int count = 0; count < fragmentsPerThread; count++)
+ return null;
+ });
+ threadPool.submit(
+ () ->
{
- while (publicationTwo
- .offer(pubTwoHeader, 0, SIZE_OF_INT, pubTwoPayload, 0, MESSAGE_LENGTH) < 0L)
+ latch.countDown();
+ latch.await();
+ for (int count = 0; count < fragmentsPerThread; count++)
{
- Tests.yield();
+ while (publicationTwo
+ .offer(pubTwoHeader, 0, SIZE_OF_INT, pubTwoPayload, 0, MESSAGE_LENGTH) < 0L)
+ {
+ Tests.yield();
+ }
}
- }
- return null;
- });
- threadPool.shutdown();
+ return null;
+ });
+ threadPool.shutdown();
- int totalFragmentsRead = 0;
- try
- {
+ int totalFragmentsRead = 0;
do
{
totalFragmentsRead += pollFragments(subscription, fragmentHandler);
}
while (totalFragmentsRead < expectedNumberOfFragments);
}
- catch (final Exception ex)
+ finally
{
threadPool.shutdownNow();
threadPool.awaitTermination(1, TimeUnit.SECONDS);
|
[Java] Ensure proper termination of the ExecutorServices.
|
real-logic_aeron
|
train
|
3bd42f74725c51d9af130c0be68917dc4129f409
|
diff --git a/grammpy/Grammars/RawGrammar.py b/grammpy/Grammars/RawGrammar.py
index <HASH>..<HASH> 100644
--- a/grammpy/Grammars/RawGrammar.py
+++ b/grammpy/Grammars/RawGrammar.py
@@ -7,10 +7,11 @@ Part of grammpy
"""
import inspect
-from grammpy.Terminal import Terminal
-from grammpy.Nonterminal import Nonterminal
-from grammpy.HashContainer import HashContainer
-from grammpy.exceptions import NotNonterminalException
+from ..Terminal import Terminal
+from ..Nonterminal import Nonterminal
+from ..HashContainer import HashContainer
+from ..exceptions import NotNonterminalException, NotRuleException
+from ..IsMethodsRuleExtension import IsMethodsRuleExtension
class RawGrammar:
@@ -48,8 +49,7 @@ class RawGrammar:
return self.get_term(term)
def terms(self):
- return [Terminal(term,self) for term in self.__terminals.all()]
-
+ return [Terminal(term, self) for term in self.__terminals.all()]
def terms_count(self):
return self.__terminals.count()
@@ -95,23 +95,41 @@ class RawGrammar:
return self.__nonterminals.count()
# Rules part
- def add_rule(self, rules):
- raise NotImplementedError()
-
- def remove_rule(self, nonterms=None):
- raise NotImplementedError()
+ def __control_rules(self, rules):
+ rules = HashContainer.to_iterable(rules)
+ for rule in rules:
+ if not inspect.isclass(rule) or not issubclass(rule, IsMethodsRuleExtension):
+ raise NotRuleException(rule)
+ rule.validate(self)
+ return rules
- def have_rule(self, nonterms):
- raise NotImplementedError()
-
- def get_rule(self, nonterms=None):
- raise NotImplementedError()
-
- def rule(self, nonterms=None):
- raise NotImplementedError()
+ def add_rule(self, rules):
+ rules = self.__control_rules(rules)
+ return self.__rules.add(rules)
+
+ def remove_rule(self, rules=None):
+ if rules is None:
+ return self.__rules.remove()
+ rules = self.__control_rules(rules)
+ return self.__rules.remove(rules)
+
+ def have_rule(self, rules):
+ rules = self.__control_rules(rules)
+ return self.__rules.have(rules)
+
+ def get_rule(self, rules=None):
+ if rules is None:
+ return self.__rules.get()
+ converted = self.__control_rules(rules)
+ if not HashContainer.is_iterable(rules):
+ return self.__rules.get(converted)[0]
+ return self.__rules.get(converted)
+
+ def rule(self, rules=None):
+ return self.get_rule(rules)
def rules(self):
- raise NotImplementedError()
+ return [rule for rule in self.__rules.get() if rule._active]
def rules_count(self):
- raise NotImplementedError()
+ return len(self.rules())
|
Add implementation of rules* functions on grammar
|
PatrikValkovic_grammpy
|
train
|
c7e25a050373d7986b9626a2f283a74914a90845
|
diff --git a/shared/desktop/app/installer.desktop.js b/shared/desktop/app/installer.desktop.js
index <HASH>..<HASH> 100644
--- a/shared/desktop/app/installer.desktop.js
+++ b/shared/desktop/app/installer.desktop.js
@@ -3,7 +3,7 @@ import * as SafeElectron from '../../util/safe-electron.desktop'
import exec from './exec.desktop'
import {keybaseBinPath} from './paths.desktop'
import {quit} from './ctl.desktop'
-import {isWindows} from '../../constants/platform'
+import {isLinux, isWindows} from '../../constants/platform'
import logger from '../../logger'
import {
ExitCodeFuseKextError,
@@ -45,8 +45,8 @@ type CheckErrorsResult = {
// Reminder: hot-server doesn't reload code in here (/desktop)
export default (callback: (err: any) => void): void => {
logger.info('Installer check starting now')
- if (isWindows) {
- logger.info('Skipping installer on win32')
+ if (isWindows || isLinux) {
+ logger.info('Skipping installer on this platform')
callback(null)
return
}
|
Don't try to run the installer binary on Linux (#<I>)
* Don't try to run the installer binary on Linux
* fix import
|
keybase_client
|
train
|
207ec5a9d6adbce7792c17aa74008ace0ccf8661
|
diff --git a/zinnia/tests/test_mixins.py b/zinnia/tests/test_mixins.py
index <HASH>..<HASH> 100644
--- a/zinnia/tests/test_mixins.py
+++ b/zinnia/tests/test_mixins.py
@@ -164,6 +164,7 @@ class MixinTestCase(TestCase):
instance = EntryArchiveTemplateResponseMixin()
instance.get_year = get_year
instance.get_month = get_month
+ instance.get_week = get_week
instance.get_day = get_day
instance.object = FakeEntry()
self.assertEqual(
|
Completing the tests of EntryArchiveTemplateResponseMixin
|
Fantomas42_django-blog-zinnia
|
train
|
91608ada46031a31f3af9c453f41d69377b7305b
|
diff --git a/Brocfile.js b/Brocfile.js
index <HASH>..<HASH> 100644
--- a/Brocfile.js
+++ b/Brocfile.js
@@ -1,3 +1,5 @@
+/* jshint node: true */
+
var fs = require('fs');
var util = require('util');
var path = require('path');
|
Brocfile - Inform JSHint that `process` is a global
|
emberjs_ember.js
|
train
|
4533875c75c23edf3dad57f869df883b236f8172
|
diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -22,9 +22,9 @@ module.exports = function( grunt ) {
template: require('grunt-template-jasmine-requirejs'),
templateOptions: {
requireConfig: {
- baseUrl: 'lib',
+ baseUrl: './lib/',
paths: {
- app: '../src/js' //fails without trailing slash
+ app: '../src/js/' //fails without trailing slash
},
shim: {
'xpath/build/xpathjs_javarosa': {
|
another attempt to resolve issue running tests in Travis
|
enketo_enketo-core
|
train
|
a7abcaefd91f67223a05360a432d5137a592b882
|
diff --git a/dev/com.ibm.ws.security.oauth.oidc_fat.common/src/com/ibm/ws/security/oauth_oidc/fat/commonTest/CommonValidationTools.java b/dev/com.ibm.ws.security.oauth.oidc_fat.common/src/com/ibm/ws/security/oauth_oidc/fat/commonTest/CommonValidationTools.java
index <HASH>..<HASH> 100644
--- a/dev/com.ibm.ws.security.oauth.oidc_fat.common/src/com/ibm/ws/security/oauth_oidc/fat/commonTest/CommonValidationTools.java
+++ b/dev/com.ibm.ws.security.oauth.oidc_fat.common/src/com/ibm/ws/security/oauth_oidc/fat/commonTest/CommonValidationTools.java
@@ -6,7 +6,7 @@
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
- * IBM Corporation - initial API and implementation
+ * IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.security.oauth_oidc.fat.commonTest;
@@ -781,16 +781,16 @@ public class CommonValidationTools {
msgUtils.assertTrueAndLog(thisMethod, "Expires in is null", value != null);
Long expectedExpires = setAccessTimeout(settings);
Long actualExpires = Long.valueOf(value).longValue();
- if ((actualExpires <= expectedExpires) && (actualExpires > expectedExpires - 10L)) {
- Log.info(thisClass, thisMethod, "expires in was within 10ms of expected time");
+ if ((actualExpires <= expectedExpires) && (actualExpires > expectedExpires - 20L)) {
+ Log.info(thisClass, thisMethod, "expires in was within 20sec of expected time");
} else {
- fail("Expires in value expected: " + expectedExpires + " but received: " + actualExpires + " Test expects it within 10ms");
+ fail("Expires in value expected: " + expectedExpires + " but received: " + actualExpires + " Test expects it within 20sec");
}
}
if (key.equals(Constants.STATE_KEY)) {
msgUtils.assertTrueAndLog(thisMethod, "State is null", value != null);
if (Constants.EXIST_WITH_ANY_VALUE.equals(settings.getState())) {
- Log.info(thisClass,thisMethod, "Skipping state check at callers request") ;
+ Log.info(thisClass, thisMethod, "Skipping state check at callers request");
} else {
msgUtils.assertTrueAndLog(thisMethod, "State value expected: " + settings.getState() + " but received: " + value, value.equals(settings.getState()));
}
@@ -1681,7 +1681,7 @@ public class CommonValidationTools {
tokenVerifier.verifyAndDeserialize();
result = tokenVerifier.isSigned();
} else {
- if (isInList(Constants.ALL_TEST_SIGALGS, sigAlg) ) {
+ if (isInList(Constants.ALL_TEST_SIGALGS, sigAlg)) {
// return true - we're not ready to validate RS256 quite yet
return true;
} else {
@@ -2294,8 +2294,8 @@ public class CommonValidationTools {
public Expectations getDefault404Expectations(String testAction) {
Expectations expectations = new Expectations();
expectations.addExpectation(new ResponseStatusExpectation(testAction, HttpServletResponse.SC_NOT_FOUND));
- expectations.addExpectation(new ResponseFullExpectation(testAction, Constants.STRING_MATCHES, MessageConstants.CWOAU0073E_FRONT_END_ERROR + ".+",
- "Did not get public facing error message saying authentication failed."));
+ expectations.addExpectation(new ResponseFullExpectation(testAction, Constants.STRING_MATCHES, MessageConstants.CWOAU0073E_FRONT_END_ERROR + ".+",
+ "Did not get public facing error message saying authentication failed."));
return expectations;
}
|
Allow a larger window when assuming exp time is valid
|
OpenLiberty_open-liberty
|
train
|
8dcefb5c5554418445f670933fb5ef22b57c8529
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -61,7 +61,7 @@ See [cgminer\_api\_client](https://github.com/jramos/cgminer_api_client#configur
Add the following to your ``Gemfile``:
- gem 'cgminer_monitor', '~> 0.0.8'
+ gem 'cgminer_monitor', '~> 0.0.9'
Update your ``config/routes.rb`` file to mount the engine:
@@ -82,7 +82,7 @@ Endpoints:
Data point format:
- [timestamp, avg_hashrate, pool_rejected_hashrate, hardware_error_hashrate]
+ [timestamp, avg_hashrate, pool_rejected_hashrate, pool_stale_hashrate, hardware_error_hashrate]
##### Temperatures
diff --git a/app/controllers/cgminer_monitor/api/v1/graph_data_controller.rb b/app/controllers/cgminer_monitor/api/v1/graph_data_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/cgminer_monitor/api/v1/graph_data_controller.rb
+++ b/app/controllers/cgminer_monitor/api/v1/graph_data_controller.rb
@@ -13,6 +13,9 @@ module CgminerMonitor
(miner_result.first[:ghs_5s] * miner_result.first[:'pool_rejected%'] / 100).round(2) rescue 0
end.sum,
summary[:results].collect do |miner_result|
+ (miner_result.first[:ghs_5s] * miner_result.first[:'pool_stale%'] / 100).round(2) rescue 0
+ end.sum,
+ summary[:results].collect do |miner_result|
(miner_result.first[:ghs_5s] * miner_result.first[:'device_hardware%'] / 100).round(2) rescue 0
end.sum
]
@@ -52,6 +55,7 @@ module CgminerMonitor
summary[:created_at].to_i,
(miner_summary[:ghs_5s].round(2) rescue nil),
((miner_summary[:ghs_5s] * miner_summary[:'pool_rejected%'] / 100).round(2) rescue nil),
+ ((miner_summary[:ghs_5s] * miner_summary[:'pool_stale%'] / 100).round(2) rescue nil),
((miner_summary[:ghs_5s] * miner_summary[:'device_hardware%'] / 100).round(2) rescue nil)
] if summary[:results][miner_id]
end
diff --git a/lib/cgminer_monitor/version.rb b/lib/cgminer_monitor/version.rb
index <HASH>..<HASH> 100644
--- a/lib/cgminer_monitor/version.rb
+++ b/lib/cgminer_monitor/version.rb
@@ -1,3 +1,3 @@
module CgminerMonitor
- VERSION = "0.0.8"
+ VERSION = "0.0.9"
end
|
adding pool stale hashrate
|
jramos_cgminer_monitor
|
train
|
a810189a8e93527974e22667342e8fa01d782efa
|
diff --git a/pkg.go b/pkg.go
index <HASH>..<HASH> 100644
--- a/pkg.go
+++ b/pkg.go
@@ -15,6 +15,7 @@ import (
"os"
"path"
"strconv"
+ "strings"
"sync"
"text/tabwriter"
@@ -38,6 +39,8 @@ var (
meta string
releaseNotes string
saveDir string
+ bulkDir string
+ baseUrl string
}
cmdPackage = &Command{
@@ -62,6 +65,9 @@ var (
Usage: "[OPTION]...",
Description: `Create a new package for an application.`,
Run: packageCreate,
+ Subcommands: []*Command{
+ cmdPackageCreateBulk,
+ },
}
cmdPackageDelete = &Command{
Name: "package delete",
@@ -75,6 +81,12 @@ var (
Description: `Download published packages to local disk.`,
Run: packageDownload,
}
+ cmdPackageCreateBulk = &Command{
+ Name: "package create bulk",
+ Usage: "[OPTION]...",
+ Description: `Upload package from a folder output by 'package donload'.`,
+ Run: packageCreateBulk,
+ }
)
func init() {
@@ -95,6 +107,13 @@ func init() {
"release-notes", "",
"File contianing release notes for package.")
+ cmdPackageCreateBulk.Flags.StringVar(&packageFlags.bulkDir,
+ "dir", "",
+ "Directory containing files to upload.")
+ cmdPackageCreateBulk.Flags.StringVar(&packageFlags.baseUrl,
+ "base-url", "",
+ "URL base packages are stored at.")
+
cmdPackageDelete.Flags.Var(&packageFlags.appId, "app-id",
"Application with package to delete.")
cmdPackageDelete.Flags.Var(&packageFlags.version, "version",
@@ -186,6 +205,65 @@ func packageCreate(args []string, service *update.Service, out *tabwriter.Writer
return OK
}
+func packageCreateBulk(args []string, service *update.Service, out *tabwriter.Writer) int {
+ bulkDir := packageFlags.bulkDir
+ if bulkDir == "" {
+ cwd, err := os.Getwd()
+ if err != nil {
+ log.Print(err)
+ return ERROR_USAGE
+ }
+ bulkDir = cwd
+ }
+
+ files, err := ioutil.ReadDir(bulkDir)
+ if err != nil {
+ log.Print(err)
+ return ERROR_USAGE
+ }
+
+ for _, file := range files {
+ if file.Mode().IsRegular() && strings.HasSuffix(file.Name(), "info.json") {
+ // Load metadata from package info.json into struct
+ pkg := new(update.Package)
+ jsonBody, err := ioutil.ReadFile(path.Join(bulkDir, file.Name()))
+ if err != nil {
+ log.Println(err)
+ continue
+ }
+
+ err = json.Unmarshal(jsonBody, &pkg)
+ if err != nil {
+ log.Println(err)
+ continue
+ }
+
+ log.Printf("Creating package with AppId=%s and Version=%s", pkg.AppId, pkg.Version)
+
+ // If --base-url specified, rewrite hosting URL
+ baseUrl := packageFlags.baseUrl
+ if baseUrl != "" {
+ filename := fmt.Sprintf(
+ "%s_%s_%s",
+ pkg.AppId, pkg.Version,
+ path.Base(pkg.Url),
+ )
+ pkg.Url = path.Join(baseUrl, filename)
+ }
+
+ // Add package
+ call := service.App.Package.Insert(pkg.AppId, pkg.Version, pkg)
+ pkg, err = call.Do()
+
+ if err != nil {
+ log.Println(err)
+ continue
+ }
+ }
+ }
+ return OK
+}
+
func packageList(args []string, service *update.Service, out *tabwriter.Writer) int {
if packageFlags.appId.Get() == nil {
return ERROR_USAGE
|
package: add 'package create bulk' command
Allows user to take output of `package download`
command and create packages from the directory's
`*info.json` files.
|
coreos_updateservicectl
|
train
|
7c4fd65eed9450dad74115926a20c5626642b9fc
|
diff --git a/src/main/java/com/googlecode/ipv6/IPv6Address.java b/src/main/java/com/googlecode/ipv6/IPv6Address.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/googlecode/ipv6/IPv6Address.java
+++ b/src/main/java/com/googlecode/ipv6/IPv6Address.java
@@ -33,16 +33,23 @@ public final class IPv6Address implements Comparable<IPv6Address>
private final long lowBits;
+ IPv6Address(long highBits, long lowBits)
+ {
+ this.highBits = highBits;
+ this.lowBits = lowBits;
+ }
+
/**
- * Construct an IPv6Address from two longs representing the 64 highest and 64 lowest bits.
+ * Construct an IPv6Address from two longs representing the 64 highest and 64 lowest bits. It is usually easier to construct
+ * IPv6Addresses from a {@link String} or an {@link java.net.InetAddress}. The internal representation of an IPv6Address is exactly
+ * these two longs though, so if you already happen to have them, this provides a very efficient way to construct an IPv6Address.
*
* @param highBits highest order bits
* @param lowBits lowest order bits
*/
- IPv6Address(long highBits, long lowBits)
+ public static IPv6Address fromLongs(long highBits, long lowBits)
{
- this.highBits = highBits;
- this.lowBits = lowBits;
+ return new IPv6Address(highBits, lowBits);
}
/**
|
Added factory to construct an IPv6Address from two longs.
|
janvanbesien_java-ipv6
|
train
|
e628bb5668bb969b58feca4a63d6785370e46ff5
|
diff --git a/ruuvitag_sensor/data_formats.py b/ruuvitag_sensor/data_formats.py
index <HASH>..<HASH> 100644
--- a/ruuvitag_sensor/data_formats.py
+++ b/ruuvitag_sensor/data_formats.py
@@ -1,3 +1,28 @@
+import logging
+
+log = logging.getLogger(__name__)
+
+
+def _dechunk(raw):
+ """
+ Given a BLE advertisement in hex format, interpret the first
+ byte as a length byte, return the data indicated by the length
+ byte, and the remainder of the data in a tuple.
+
+ The lenght byte itself is not included in the length.
+
+ If the length indicated is longer than the data, raise a ValueError
+ """
+ if len(raw) < 2:
+ raise ValueError("Data too short")
+
+ dlen = int(raw[:2], 16)
+ if (dlen + 1) * 2 > len(raw):
+ raise ValueError("Cannot read %d bytes, data too short: %s" % (dlen, raw))
+
+ return raw[2:(dlen * 2) + 2], raw[(dlen * 2) + 2:]
+
+
class DataFormats(object):
"""
RuuviTag broadcasted raw data handling for each data format
@@ -8,25 +33,75 @@ class DataFormats(object):
"""
Validate that data is from RuuviTag and get correct data part.
+ There is a special case where this function will return
+ None for the data format, and '' for the data. This indicates
+ that we just heard an advertisement from a Ruuvi tag that
+ doesn't contain any data, but was sent for discovery purposes
+ (firmware 3.x does this).
+
Returns:
tuple (int, string): Data Format type and Sensor data
"""
- data = DataFormats._get_data_format_3(DataFormats._parse_raw(raw, 3))
-
- if data is not None:
- return (3, data)
+ log.debug("Parsing advertisement data: %s", raw)
- data = DataFormats._get_data_format_5(DataFormats._parse_raw(raw, 5))
-
- if data is not None:
- return (5, data)
-
- # TODO: Check from raw data correct data format
- # Now this returns 2 also for Data Format 4
- data = DataFormats._get_data_format_2and4(DataFormats._parse_raw(raw, 2))
-
- if data is not None:
- return (2, data)
+ try:
+ # The data starts with a length byte, covering the data
+ # length, minus the length byte itself. There might be additional
+ # data at the end (an RSSI value) which we're ignoring
+ data, _ = _dechunk(raw)
+
+ # The remaining data is a list of length:type:data chunks.
+ # We look for a chunk with vendor specific data (type 0xff),
+ # used by formats 3 and 5, or a chunk with serivice data (type 0x16)
+ # used by formats 2 and 4.
+ #
+ # Firmware 3.x also sends advertisements that contain chunks
+ # of type 0x09, followed by 'Ruuvi', in ASCII encoding.
+ candidate = None
+ while data != '':
+ cdata, data = _dechunk(data)
+
+ ctype = cdata[:2]
+ log.debug("Found chunk of type %s: %s", ctype, cdata)
+
+ # See if we found a potential candidate. Break
+ # the loop
+ if ctype in ('FF', '16', '09'):
+ candidate = cdata
+ break
+ except Exception:
+ log.exception("Invalid advertisement data: %s", raw)
+ return (None, None)
+
+ if candidate is None:
+ log.debug("No candidate found")
+ return (None, None)
+
+ log.debug("Found candidate %s", candidate)
+
+ # Ruuvi advertisements start with FF9904 (for format 3 and 5),
+ # or 16AAFE (for format 2 and 4).
+ if candidate.startswith("FF990403"):
+ return (3, candidate[6:])
+
+ elif candidate.startswith("FF990405"):
+ return (5, candidate[6:])
+
+ elif candidate.startswith("16AAFE"):
+ # TODO: Check from raw data correct data format
+ # Now this returns 2 also for Data Format 4
+ data = DataFormats._get_data_format_2and4(DataFormats._parse_raw(raw, 2))
+
+ if data is not None:
+ return (2, data)
+
+ elif candidate.startswith("095275757669"):
+ # This is a Ruuvitag, but this advertisement does not
+ # contain any data.
+ #
+ # Set the format to None, and data to '', this allows the
+ # caller to determine that we did indeed see a Ruuvitag.
+ return (None, '')
return (None, None)
diff --git a/ruuvitag_sensor/ruuvi.py b/ruuvitag_sensor/ruuvi.py
index <HASH>..<HASH> 100644
--- a/ruuvitag_sensor/ruuvi.py
+++ b/ruuvitag_sensor/ruuvi.py
@@ -162,12 +162,18 @@ class RuuviTagSensor(object):
break
# Check MAC whitelist if advertised MAC available
if ble_data[0] and macs and not ble_data[0] in macs:
+ log.debug('MAC not whitelisted: %s', ble_data[0])
continue
(data_format, data) = DataFormats.convert_data(ble_data[1])
# Check that encoded data is valid RuuviTag data and it is sensor data
# If data is not valid RuuviTag data add MAC to blacklist if MAC is available
if data is not None:
+ if data_format is None:
+ # Whatever we heard was from a Ruuvitag, but did not contain
+ # any measurements. Ignore this.
+ continue
+
decoded = get_decoder(data_format).decode_data(data)
if decoded is not None:
# If advertised MAC is missing, try to parse it from the payload
@@ -175,10 +181,12 @@ class RuuviTagSensor(object):
parse_mac(data_format, decoded['mac']) if decoded['mac'] else None
# Check whitelist using MAC from decoded data if advertised MAC is not available
if mac and macs and mac not in macs:
+ log.debug('MAC not whitelisted: %s', ble_data[0])
continue
yield (mac, decoded)
else:
log.error('Decoded data is null. MAC: %s - Raw: %s', ble_data[0], ble_data[1])
else:
if ble_data[0]:
+ log.debug("Blacklisting MAC %s", ble_data[0])
mac_blacklist.append(ble_data[0])
|
ruuvitag_sensor.data_formats: Rework parsing
This reworks the data parsing in ruuvitag_sensor.data_formats to more
closely follow the structure defined for BLE advertisements. It checks
for types that we're interested in for the different firmware versions,
and returns the appropriate data.
This will give us more confidence that the data we found is actually
good.
It also allows for dealing with non-measurement advertisements sent from
tags with firmware 3.x.
|
ttu_ruuvitag-sensor
|
train
|
801649e1efec04e439ad79262200b0be7fc6f55b
|
diff --git a/app/controllers/katello/api/v2/repositories_controller.rb b/app/controllers/katello/api/v2/repositories_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/katello/api/v2/repositories_controller.rb
+++ b/app/controllers/katello/api/v2/repositories_controller.rb
@@ -35,9 +35,9 @@ module Katello
def_param_group :repo do
param :url, String, :desc => N_("repository source url")
param :gpg_key_id, :number, :desc => N_("id of the gpg key that will be assigned to the new repository")
- param :ssl_ca_cert_id, :number, :desc => N_("Idenifier of the SSL CA Cert")
- param :ssl_client_cert_id, :number, :desc => N_("Identifier of the SSL Client Cert")
- param :ssl_client_key_id, :number, :desc => N_("Identifier of the SSL Client Key")
+ param :ssl_ca_cert_id, :number, :desc => N_("Identifier of the content credential containing the SSL CA Cert")
+ param :ssl_client_cert_id, :number, :desc => N_("Identifier of the content credential containing the SSL Client Cert")
+ param :ssl_client_key_id, :number, :desc => N_("Identifier of the content credential containing the SSL Client Key")
param :unprotected, :bool, :desc => N_("true if this repository can be published via HTTP")
param :checksum_type, String, :desc => N_("Checksum of the repository, currently 'sha1' & 'sha256' are supported")
param :docker_upstream_name, String, :desc => N_("Name of the upstream docker repository")
|
Refs #<I> - Update API docs for repo create
It isn't very clear that the IDs for ssl-*-id params are content credentials, this commit adds some clarification and fixes a typo as well
|
Katello_katello
|
train
|
dccdeff2397a2dbcbfb4c7df0ae904427e8b5ab7
|
diff --git a/openquake/baselib/hdf5.py b/openquake/baselib/hdf5.py
index <HASH>..<HASH> 100644
--- a/openquake/baselib/hdf5.py
+++ b/openquake/baselib/hdf5.py
@@ -310,8 +310,7 @@ class File(h5py.File):
totlen += len(val)
length = len(dset)
dset.resize((length + len(data),) + shape[1:])
- for i, arr in enumerate(data):
- dset[length + i] = arr
+ dset[length:length + len(data)] = data
dset.attrs['nbytes'] = nbytes
dset.attrs['totlen'] = totlen
|
Optimized save_vlen
|
gem_oq-engine
|
train
|
9689f96e9b7f00a1a3830fb063e84ab16ce04631
|
diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.canvasCustomSVG.js b/nunaliit2-js/src/main/js/nunaliit2/n2.canvasCustomSVG.js
index <HASH>..<HASH> 100644
--- a/nunaliit2-js/src/main/js/nunaliit2/n2.canvasCustomSVG.js
+++ b/nunaliit2-js/src/main/js/nunaliit2/n2.canvasCustomSVG.js
@@ -66,6 +66,7 @@ var CustomSvgCanvas = $n2.Class({
,svgAttachment: null
,cssAttachment: null
,elemIdToDocId: null
+ ,unselectIds: null
,onSuccess: function(){}
,onError: function(err){}
},opts_);
@@ -99,6 +100,20 @@ var CustomSvgCanvas = $n2.Class({
};
};
+ // Add information about "unselect"
+ if( $n2.isArray(opts.unselectIds) ){
+ for(var i=0,e=opts.unselectIds.length; i<e; ++i){
+ var elemId = opts.unselectIds[i];
+
+ var node = {
+ nodeId: elemId
+ ,unselect: true
+ };
+
+ this.nodesById[elemId] = node;
+ };
+ };
+
// Create intent view to keep track of user requests
if( this.dispatchService ){
this.intentView = new $n2.userIntentView.IntentView({
@@ -231,20 +246,27 @@ var CustomSvgCanvas = $n2.Class({
// were specified using elemIdToDocId option
for(var nodeId in this.nodesById){
var node = this.nodesById[nodeId];
- var docId = node.n2_id;
- $d.select('#'+nodeId)
- .attr('n2-doc-id', docId)
- .on('mouseover',function(d,i){
- _this._mouseOver($d.select(this),$d.event);
- })
- .on('mouseout',function(d,i){
- _this._mouseOut($d.select(this),$d.event);
- })
- .on('click',function(d,i){
- _this._mouseClick($d.select(this),$d.event);
- })
- ;
+ if( node.n2_id ){
+ var docId = node.n2_id;
+ $d.select('#'+nodeId)
+ .attr('n2-doc-id', docId)
+ .on('mouseover',function(d,i){
+ _this._mouseOver($d.select(this),$d.event);
+ })
+ .on('mouseout',function(d,i){
+ _this._mouseOut($d.select(this),$d.event);
+ })
+ .on('click',function(d,i){
+ _this._mouseClick($d.select(this),$d.event);
+ })
+ ;
+ } else if( node.unselect ) {
+ $d.select('#'+nodeId)
+ .on('click',function(d,i){
+ _this._mouseUnselect($d.select(this),$d.event);
+ });
+ };
};
|
nunaliit-js: Add provisions to custom SVG for specifying the elements
used in un-select.
Issue #<I>
|
GCRC_nunaliit
|
train
|
7ef0964b77b4ee6fa03b9735695c598b28e65582
|
diff --git a/polyaxon/conf/options/mount_paths.py b/polyaxon/conf/options/mount_paths.py
index <HASH>..<HASH> 100644
--- a/polyaxon/conf/options/mount_paths.py
+++ b/polyaxon/conf/options/mount_paths.py
@@ -3,4 +3,4 @@ import conf
from options.registry import mount_paths
conf.subscribe(mount_paths.MountPathsNvidia)
-conf.subscribe(mount_paths.Option)
+conf.subscribe(mount_paths.DirsNvidia)
|
Subscribe to nvidia mount paths option
|
polyaxon_polyaxon
|
train
|
ea5390ad4a17ccd05a255b2ccc50b24302fd1e0e
|
diff --git a/estnltk/converters/layer_dict_converter.py b/estnltk/converters/layer_dict_converter.py
index <HASH>..<HASH> 100644
--- a/estnltk/converters/layer_dict_converter.py
+++ b/estnltk/converters/layer_dict_converter.py
@@ -1,10 +1,11 @@
-from estnltk.converters import serialisation_modules
+from estnltk.converters.serialisation_modules import default
+from estnltk.converters.serialisation_modules import legacy_v0
from .serialisation_modules.serialisation_map import layer_converter_collection
def layer_to_dict(layer):
if layer.serialisation_module is None:
- return serialisation_modules.default.layer_to_dict(layer)
+ return default.layer_to_dict(layer)
if layer.serialisation_module in layer_converter_collection:
return layer_converter_collection[layer.serialisation_module].layer_to_dict(layer)
@@ -21,8 +22,8 @@ def dict_to_layer(layer_dict: dict, text_object=None, serialisation_module=None)
# check for legacy format
# TODO: to be removed by rewriting tests
if 'meta' not in layer_dict:
- return serialisation_modules.legacy_v0.dict_to_layer(layer_dict, text_object)
- return serialisation_modules.default.dict_to_layer(layer_dict, text_object)
+ return legacy_v0.dict_to_layer(layer_dict, text_object)
+ return default.dict_to_layer(layer_dict, text_object)
if serialisation_module in layer_converter_collection:
return layer_converter_collection[serialisation_module].dict_to_layer(layer_dict, text_object)
|
Fix in layer_dict_converter.py: better import handling
|
estnltk_estnltk
|
train
|
5e785b96157f09cd841f00ed4a0359f913c0997a
|
diff --git a/airtest/core/win/win.py b/airtest/core/win/win.py
index <HASH>..<HASH> 100644
--- a/airtest/core/win/win.py
+++ b/airtest/core/win/win.py
@@ -2,8 +2,7 @@
from airtest import aircv
from airtest.core.device import Device
from pywinauto.application import Application
-from pywinauto import Desktop
-from pywinauto.win32functions import SetForegroundWindow, ShowWindow, MoveWindow #, SetProcessDPIAware
+from pywinauto.win32functions import SetForegroundWindow # ,SetProcessDPIAware
from pywinauto import mouse, keyboard
from functools import wraps
from .screen import screenshot
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -39,6 +39,21 @@ import airtest
from recommonmark.parser import CommonMarkParser
+import mock
+
+
+if not sys.platform.startswith('win'):
+ for mod_name in [
+ 'win32api',
+ 'win32con',
+ 'win32gui',
+ 'win32ui',
+ 'pywinauto',
+ 'pywinauto.application',
+ 'pywinauto.win32functions'
+ ]:
+
+ sys.modules[mod_name] = mock.MagicMock()
extensions = ['sphinx.ext.autodoc',
diff --git a/docs/install.sh b/docs/install.sh
index <HASH>..<HASH> 100644
--- a/docs/install.sh
+++ b/docs/install.sh
@@ -1,3 +1,3 @@
pip install sphinx
pip install recommonmark
-pip install sphinx_rtd_theme
\ No newline at end of file
+pip install sphinx_rtd_theme
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,18 +10,28 @@ setup(
version='1.0.0',
author='Netease Games',
author_email='gzliuxin@corp.netease.com',
- description='Automated test framework for android/iOS/Windows',
- long_description='Automated test framework for android/iOS/Windows, present by NetEase Games',
+ description='UI Test Automation Framework for Games and Apps',
+ long_description='UI Test Automation Framework for Games and Apps, present by NetEase Games',
url='https://github.com/AirtestProject/Airtest',
license='Apache License 2.0',
- keywords=['automation', 'test', 'android', 'opencv'],
- packages=find_packages(exclude=['cover', 'examples', 'tests', 'dist', 'new_test']),
+ keywords=['game', 'automation', 'test', 'android', 'windows', 'opencv'],
+ packages=find_packages(exclude=['cover', 'examples', 'tests', 'dist']),
package_data={
'android_deps': ["*.apk", "airtest/core/android/static"],
'html_statics': ["airtest/report"]
},
include_package_data=True,
install_requires=reqs,
+ extras_require={
+ 'tests': [
+ 'nose',
+ ],
+ 'docs': [
+ 'sphinx',
+ 'recommonmark',
+ 'sphinx_rtd_theme',
+ 'mock',
+ ]},
classifiers=[
'Programming Language :: Python :: 2.7',
],
|
add mock in docs for building win module on linux/mac
|
AirtestProject_Airtest
|
train
|
9eb04a1e7d19eadce850b5f26bec870cef10074f
|
diff --git a/lib/chessmonger/armory.rb b/lib/chessmonger/armory.rb
index <HASH>..<HASH> 100644
--- a/lib/chessmonger/armory.rb
+++ b/lib/chessmonger/armory.rb
@@ -22,9 +22,13 @@ module Chessmonger
@behaviors.keys
end
- def train name, game, player = nil
- behavior = @behaviors[name].create game
- Chessmonger::Piece.new behavior, player
+ def train name, game, player
+ piece = Chessmonger::Piece.new
+ behavior = @behaviors[name].create game, piece
+ piece.tap do |p|
+ p.player = player
+ p.behavior = behavior
+ end
end
end
diff --git a/lib/chessmonger/piece.rb b/lib/chessmonger/piece.rb
index <HASH>..<HASH> 100644
--- a/lib/chessmonger/piece.rb
+++ b/lib/chessmonger/piece.rb
@@ -2,18 +2,14 @@
module Chessmonger
class Piece
- attr_accessor :player
+ attr_accessor :behavior, :player
- def initialize behavior, player = nil
- @behavior, @player = behavior, player
+ def each_action origin, &block
+ @behavior.each_action origin, &block
end
- def each_action game, origin, &block
- @behavior.each_action game, self, origin, &block
- end
-
- def can_attack? game, origin, target
- @behavior.can_attack? game, self, origin, target
+ def can_attack? origin, target
+ @behavior.can_attack? origin, target
end
end
end
diff --git a/spec/armory_spec.rb b/spec/armory_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/armory_spec.rb
+++ b/spec/armory_spec.rb
@@ -45,7 +45,7 @@ describe 'Armory' do
it "should train pieces with a new instance of the behavior specified by name" do
- piece = double
+ piece = double :player= => nil, :behavior= => nil
Chessmonger::Piece.stub(:new).and_return piece
behavior = double
behavior_factory = double :create => behavior
@@ -54,8 +54,10 @@ describe 'Armory' do
Chessmonger::Armory.instance.register 'aBehavior', behavior_factory
- behavior_factory.should_receive(:create).with game
- Chessmonger::Piece.should_receive(:new).with behavior, player
+ behavior_factory.should_receive(:create).with game, piece
+ Chessmonger::Piece.should_receive :new
+ piece.should_receive(:player=).with player
+ piece.should_receive(:behavior=).with behavior
result = Chessmonger::Armory.instance.train 'aBehavior', game, player
result.should be(piece)
diff --git a/spec/piece_spec.rb b/spec/piece_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/piece_spec.rb
+++ b/spec/piece_spec.rb
@@ -1,23 +1,8 @@
describe 'Piece' do
- before :each do
-
- @player = double :name => 'John Doe'
- @behavior = double :each_action => nil, :can_attack? => false
- end
-
- it "should be initializable with a behavior" do
- lambda{ Chessmonger::Piece.new @behavior }.should_not raise_error
- end
-
- it "should be initializable with a behavior and a player" do
- lambda{ Chessmonger::Piece.new @behavior, @player }.should_not raise_error
- end
-
- it "should have the specified player" do
- piece = Chessmonger::Piece.new @behavior, @player
- piece.player.should be(@player)
+ it "should be initializable with nothing" do
+ lambda{ Chessmonger::Piece.new }.should_not raise_error
end
describe 'when used' do
@@ -25,27 +10,35 @@ describe 'Piece' do
before :each do
@game = double
+ @player = double :name => 'John Doe'
+ @behavior = double :each_action => nil, :can_attack? => false
@origin = double
- @piece = Chessmonger::Piece.new @behavior, @player
+ @piece = Chessmonger::Piece.new
+ end
+
+ it "should allow its behavior to be set" do
+ @piece.behavior = @behavior
+ @piece.behavior.should be(@behavior)
end
- it "should allow its player to be changed" do
- other = double :name => 'Jane Doe'
- @piece.player = other
- @piece.player.should be(other)
+ it "should allow its player to be set" do
+ @piece.player = @player
+ @piece.player.should be(@player)
end
it "should ask its behavior to iterate over the possible actions" do
+ @piece.behavior = @behavior
block = lambda{}
- @behavior.should_receive(:each_action).with @game, @piece, @origin, &block
- @piece.each_action @game, @origin, &block
+ @behavior.should_receive(:each_action).with @origin, &block
+ @piece.each_action @origin, &block
end
it "should ask its behavior whether it can attack the specified target" do
+ @piece.behavior = @behavior
target = double
- @behavior.should_receive(:can_attack?).with @game, @piece, @origin, target
- @piece.can_attack? @game, @origin, target
+ @behavior.should_receive(:can_attack?).with @origin, target
+ @piece.can_attack? @origin, target
end
end
end
|
Simplified piece and behavior method arguments.
|
AlphaHydrae_chessmonger
|
train
|
0da9e167d4cdda6f41d0b0e2fc72c5eeefd7aafc
|
diff --git a/Scripts/typo3cms.php b/Scripts/typo3cms.php
index <HASH>..<HASH> 100644
--- a/Scripts/typo3cms.php
+++ b/Scripts/typo3cms.php
@@ -17,7 +17,7 @@ call_user_func(function () {
if (getenv('TYPO3_PATH_WEB')) {
// In case we are symlinked (like for travis tests),
// we need to accept the location from the outside to find the autoload.php
- $typo3Root = getenv('TYPO3_PATH_WEB');
+ $typo3Root = rtrim(getenv('TYPO3_PATH_WEB'), '/\\');
} else {
// Not symlinked (hopefully), so we can assume the docroot from the location of this file
$typo3Root = dirname(dirname(dirname(dirname(__DIR__))));
@@ -30,9 +30,11 @@ call_user_func(function () {
putenv('TYPO3_PATH_WEB=' . $typo3Root);
}
- define('PATH_site', strtr(getenv('TYPO3_PATH_WEB'), '\\', '/') . '/');
+ define('PATH_site', str_replace('\\', '/', getenv('TYPO3_PATH_WEB')) . '/');
define('PATH_thisScript', realpath(PATH_site . 'typo3/cli_dispatch.phpsh'));
+ // This require is needed so that the console works in non composer mode,
+ // where requiring the main autoload.php is not enough to load extension classes
require __DIR__ . '/../Classes/Core/ConsoleBootstrap.php';
$bootstrap = new \Helhum\Typo3Console\Core\ConsoleBootstrap(getenv('TYPO3_CONTEXT') ?: 'Production');
$bootstrap->run($classLoader);
|
[CLEANUP] Harden entry script
|
TYPO3-Console_TYPO3-Console
|
train
|
7145a2c8803856be67b731c953ea527c65e32912
|
diff --git a/lib/httpful.php b/lib/httpful.php
index <HASH>..<HASH> 100644
--- a/lib/httpful.php
+++ b/lib/httpful.php
@@ -118,7 +118,8 @@ class Http {
class Response {
public $body, $raw_body, $headers, $request,
- $code = 0, $content_type, $charset;
+ $code = 0, $content_type, $charset,
+ $use_detect_payload = true;
/**
* @param string $body
@@ -448,6 +449,15 @@ class Request {
public function withStrictSSL() { return $this->strictSSL(true); }
/**
+ * Automatically serialize the payload
+ * @return Request $this
+ * @param bool $serialize
+ */
+ public function serializePayload($serialize = true) {
+ $this->use_detect_payload = $serialize;
+ }
+
+ /**
* Add an additional header to the request
* Can also use the cleaner syntax of
* $Request->withMyHeaderName($my_value); See the
@@ -662,8 +672,10 @@ class Request {
}
curl_setopt($ch, CURLOPT_HTTPHEADER, $headers);
- if (isset($this->payload))
- curl_setopt($ch, CURLOPT_POSTFIELDS, $this->_detectPayload($this->payload));
+ if (isset($this->payload)) {
+ $payload = $this->use_detect_payload ? $this->_detectPayload($this->payload) : $this->payload;
+ curl_setopt($ch, CURLOPT_POSTFIELDS, $payload);
+ }
if ($this->_debug) {
curl_setopt($ch, CURLOPT_VERBOSE, true);
|
don't assume the user wants us to serialize their payload
|
nategood_httpful
|
train
|
adef2638f41b4f712bd61d86ac04af51668fe275
|
diff --git a/lib/promise.js b/lib/promise.js
index <HASH>..<HASH> 100644
--- a/lib/promise.js
+++ b/lib/promise.js
@@ -31,7 +31,11 @@ const path = require("path");
const _split = (self, rest) => {
const _ = require("..");
- return _.flatten(rest.map(key => key.split(",")))
+ return _.flatten(
+ rest
+ .filter(key => _.is.String(key))
+ .map(key => key.split(","))
+ )
.filter(key => key)
.map(key => key.split(":"))
.map(parts => parts.length === 1 ? {
|
gracefully handly null in end()
|
dpjanes_iotdb-helpers
|
train
|
8387d59ad1897fca1e692916f76ee8ac9f6356b1
|
diff --git a/lxd/response/swagger.go b/lxd/response/swagger.go
index <HASH>..<HASH> 100644
--- a/lxd/response/swagger.go
+++ b/lxd/response/swagger.go
@@ -1,3 +1,6 @@
+// Package response contains helpers for rendering LXD HTTP responses.
+//
+//nolint:deadcode
package response
import (
|
lxd/response: Adds nolint directive for deadcode.
The swagger definitions contained in this file are intentionally unused
so must be ignored by deadcode. The directive must be placed above
"package" to ignore the whole file (this does not ignore the whole
package). To satisfy another linter we must also format the package
comment correctly.
|
lxc_lxd
|
train
|
2b8d0c4785e855226b0c03b0145c878a5cafe5d3
|
diff --git a/api/acl_test.go b/api/acl_test.go
index <HASH>..<HASH> 100644
--- a/api/acl_test.go
+++ b/api/acl_test.go
@@ -1,26 +1,14 @@
package api
import (
- "os"
"testing"
)
-// ROOT is a management token for the tests
-var CONSUL_ROOT string
-
-func init() {
- CONSUL_ROOT = os.Getenv("CONSUL_ROOT")
-}
-
func TestACL_CreateDestroy(t *testing.T) {
t.Parallel()
- if CONSUL_ROOT == "" {
- t.SkipNow()
- }
- c, s := makeClient(t)
+ c, s := makeACLClient(t)
defer s.Stop()
- c.config.Token = CONSUL_ROOT
acl := c.ACL()
ae := ACLEntry{
@@ -63,16 +51,12 @@ func TestACL_CreateDestroy(t *testing.T) {
func TestACL_CloneDestroy(t *testing.T) {
t.Parallel()
- if CONSUL_ROOT == "" {
- t.SkipNow()
- }
- c, s := makeClient(t)
+ c, s := makeACLClient(t)
defer s.Stop()
- c.config.Token = CONSUL_ROOT
acl := c.ACL()
- id, wm, err := acl.Clone(CONSUL_ROOT, nil)
+ id, wm, err := acl.Clone(c.config.Token, nil)
if err != nil {
t.Fatalf("err: %v", err)
}
@@ -97,16 +81,12 @@ func TestACL_CloneDestroy(t *testing.T) {
func TestACL_Info(t *testing.T) {
t.Parallel()
- if CONSUL_ROOT == "" {
- t.SkipNow()
- }
- c, s := makeClient(t)
+ c, s := makeACLClient(t)
defer s.Stop()
- c.config.Token = CONSUL_ROOT
acl := c.ACL()
- ae, qm, err := acl.Info(CONSUL_ROOT, nil)
+ ae, qm, err := acl.Info(c.config.Token, nil)
if err != nil {
t.Fatalf("err: %v", err)
}
@@ -118,20 +98,16 @@ func TestACL_Info(t *testing.T) {
t.Fatalf("bad: %v", qm)
}
- if ae == nil || ae.ID != CONSUL_ROOT || ae.Type != ACLManagementType {
+ if ae == nil || ae.ID != c.config.Token || ae.Type != ACLManagementType {
t.Fatalf("bad: %#v", ae)
}
}
func TestACL_List(t *testing.T) {
t.Parallel()
- if CONSUL_ROOT == "" {
- t.SkipNow()
- }
- c, s := makeClient(t)
+ c, s := makeACLClient(t)
defer s.Stop()
- c.config.Token = CONSUL_ROOT
acl := c.ACL()
acls, qm, err := acl.List(nil)
diff --git a/api/api_test.go b/api/api_test.go
index <HASH>..<HASH> 100644
--- a/api/api_test.go
+++ b/api/api_test.go
@@ -20,6 +20,16 @@ func makeClient(t *testing.T) (*Client, *testutil.TestServer) {
return makeClientWithConfig(t, nil, nil)
}
+func makeACLClient(t *testing.T) (*Client, *testutil.TestServer) {
+ return makeClientWithConfig(t, func(clientConfig *Config) {
+ clientConfig.Token = "root"
+ }, func(serverConfig *testutil.TestServerConfig) {
+ serverConfig.ACLMasterToken = "root"
+ serverConfig.ACLDatacenter = "dc1"
+ serverConfig.ACLDefaultPolicy = "deny"
+ })
+}
+
func makeClientWithConfig(
t *testing.T,
cb1 configCallback,
|
api: run ACL tests by default
|
hashicorp_consul
|
train
|
6b6d63efa7dd1b7d0c6d0d670005e963ae24bc7e
|
diff --git a/lib/validator.js b/lib/validator.js
index <HASH>..<HASH> 100644
--- a/lib/validator.js
+++ b/lib/validator.js
@@ -69,6 +69,16 @@ var validators = {
},
notRegex: function(str, pattern, modifiers) {
return !this.regex(str, pattern, modifiers);
+ },
+ isUUID: function(str, version) {
+ if (version == 3 || version == 'v3') {
+ pattern = /[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i;
+ } else if (version == 4 || version == 'v4') {
+ pattern = /[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i;
+ } else {
+ pattern = /[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i;
+ }
+ return str.match(pattern);
}
};
@@ -271,14 +281,7 @@ Validator.prototype.len = function(min, max) {
//Thanks to github.com/sreuter for the idea.
Validator.prototype.isUUID = function(version) {
- if (version == 3 || version == 'v3') {
- pattern = /[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i;
- } else if (version == 4 || version == 'v4') {
- pattern = /[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i;
- } else {
- pattern = /[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i;
- }
- if (!this.str.match(pattern)) {
+ if (!validators.isUUID(this.str, version)) {
return this.error(this.msg || 'Not a UUID');
}
return this;
|
refactored isUUID
|
chriso_validator.js
|
train
|
c033b4293e13f9b75a09b827a4d276363f368930
|
diff --git a/lib/puppet/node.rb b/lib/puppet/node.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/node.rb
+++ b/lib/puppet/node.rb
@@ -39,7 +39,7 @@ class Puppet::Node
def to_data_hash
result = {
'name' => name,
- 'environment' => environment.name,
+ 'environment' => environment.name.to_s,
}
result['classes'] = classes unless classes.empty?
result['parameters'] = parameters unless parameters.empty?
diff --git a/lib/puppet/resource.rb b/lib/puppet/resource.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/resource.rb
+++ b/lib/puppet/resource.rb
@@ -105,14 +105,15 @@ class Puppet::Resource
self.to_hash.each_pair do |param, value|
# Don't duplicate the title as the namevar
unless param == namevar && value == title
+ name = param.to_s
value = Puppet::Resource.value_to_json_data(value)
if is_json_type?(value)
- params[param] = value
+ params[name] = value
elsif !rich_data_enabled
Puppet.warning(_("Resource '%{resource}' contains a %{klass} value. It will be converted to the String '%{value}'") % { resource: to_s, klass: value.class.name, value: value })
- params[param] = value
+ params[name] = value
else
- ext_params[param] = value
+ ext_params[name] = value
end
end
end
diff --git a/lib/puppet/transaction/event.rb b/lib/puppet/transaction/event.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/transaction/event.rb
+++ b/lib/puppet/transaction/event.rb
@@ -57,7 +57,7 @@ class Puppet::Transaction::Event
'desired_value' => @desired_value,
'historical_value' => @historical_value,
'message' => @message,
- 'name' => @name,
+ 'name' => @name.nil? ? nil : @name.to_s,
'status' => @status,
'time' => @time.iso8601(9),
'redacted' => @redacted,
diff --git a/lib/puppet/util/log.rb b/lib/puppet/util/log.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/util/log.rb
+++ b/lib/puppet/util/log.rb
@@ -326,7 +326,7 @@ class Puppet::Util::Log
def to_data_hash
{
- 'level' => @level,
+ 'level' => @level.to_s,
'message' => to_s,
'source' => @source,
'tags' => @tags.to_a,
diff --git a/spec/unit/util/log/destinations_spec.rb b/spec/unit/util/log/destinations_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/util/log/destinations_spec.rb
+++ b/spec/unit/util/log/destinations_spec.rb
@@ -133,7 +133,7 @@ describe Puppet::Util::Log.desttypes[:logstash_event] do
dest = described_class.new
result = dest.format(@msg)
expect(result["version"]).to eq(1)
- expect(result["level"]).to eq(:info)
+ expect(result["level"]).to eq('info')
expect(result["message"]).to eq("So long, and thanks for all the fish.")
expect(result["source"]).to eq("a dolphin")
# timestamp should be within 10 seconds
|
(PUP-<I>) Avoid symbols when serializing
This commit ensures that the result of doing a to_data_hash does not
contain symbols. Although symbols work in JSON (they are automatically
converted to strings), they generate unnecessary overhead when used with
YAML. There, an extra tag is added for each Symbol.
|
puppetlabs_puppet
|
train
|
f1d59ca1eb98b5507382b13cb05b14ebb2f89bcb
|
diff --git a/lib/veritas/relation/operation/limit.rb b/lib/veritas/relation/operation/limit.rb
index <HASH>..<HASH> 100644
--- a/lib/veritas/relation/operation/limit.rb
+++ b/lib/veritas/relation/operation/limit.rb
@@ -71,7 +71,7 @@ module Veritas
#
# @api private
def self.assert_valid_limit(limit)
- if limit < 0
+ if limit.nil? || limit < 0
raise InvalidLimitError, "limit must be greater than or equal to 0, but was #{limit.inspect}"
end
end
diff --git a/lib/veritas/relation/operation/offset.rb b/lib/veritas/relation/operation/offset.rb
index <HASH>..<HASH> 100644
--- a/lib/veritas/relation/operation/offset.rb
+++ b/lib/veritas/relation/operation/offset.rb
@@ -71,7 +71,7 @@ module Veritas
#
# @api private
def self.assert_valid_offset(offset)
- if offset < 0
+ if offset.nil? || offset < 0
raise InvalidOffsetError, "offset must be greater than or equal to 0, but was #{offset.inspect}"
end
end
diff --git a/spec/unit/veritas/relation/operation/limit/class_methods/new_spec.rb b/spec/unit/veritas/relation/operation/limit/class_methods/new_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/veritas/relation/operation/limit/class_methods/new_spec.rb
+++ b/spec/unit/veritas/relation/operation/limit/class_methods/new_spec.rb
@@ -35,4 +35,11 @@ describe Relation::Operation::Limit, '.new' do
specify { expect { subject }.to raise_error(InvalidLimitError, 'limit must be greater than or equal to 0, but was -1') }
end
+
+ context 'with a nil limit' do
+ let(:relation) { original_relation.order { |r| r[:id] } }
+ let(:limit) { nil }
+
+ specify { expect { subject }.to raise_error(InvalidLimitError, 'limit must be greater than or equal to 0, but was nil') }
+ end
end
diff --git a/spec/unit/veritas/relation/operation/offset/class_methods/new_spec.rb b/spec/unit/veritas/relation/operation/offset/class_methods/new_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/veritas/relation/operation/offset/class_methods/new_spec.rb
+++ b/spec/unit/veritas/relation/operation/offset/class_methods/new_spec.rb
@@ -35,4 +35,11 @@ describe Relation::Operation::Offset, '.new' do
specify { expect { subject }.to raise_error(InvalidOffsetError, 'offset must be greater than or equal to 0, but was -1') }
end
+
+ context 'with a nil offset' do
+ let(:relation) { original_relation.order { |r| r[:id] } }
+ let(:offset) { nil }
+
+ specify { expect { subject }.to raise_error(InvalidOffsetError, 'offset must be greater than or equal to 0, but was nil') }
+ end
end
|
Make sure the relation.drop(nil) and relation.take(nil) raise a nicer exception
|
dkubb_axiom
|
train
|
f17660441971ed532c5e949db0a20b38b197227c
|
diff --git a/entitlement/src/test/java/com/ning/billing/entitlement/api/TestDefaultSubscriptionApi.java b/entitlement/src/test/java/com/ning/billing/entitlement/api/TestDefaultSubscriptionApi.java
index <HASH>..<HASH> 100644
--- a/entitlement/src/test/java/com/ning/billing/entitlement/api/TestDefaultSubscriptionApi.java
+++ b/entitlement/src/test/java/com/ning/billing/entitlement/api/TestDefaultSubscriptionApi.java
@@ -46,8 +46,8 @@ public class TestDefaultSubscriptionApi extends EntitlementTestSuiteWithEmbedded
final Account account = accountApi.createAccount(getAccountData(7), callContext);
final PlanPhaseSpecifier spec = new PlanPhaseSpecifier("Shotgun", ProductCategory.BASE, BillingPeriod.MONTHLY, PriceListSet.DEFAULT_PRICELIST_NAME, null);
testListener.pushExpectedEvents(NextEvent.CREATE, NextEvent.CREATE, NextEvent.BLOCK);
- final Entitlement entitlement1 = entitlementApi.createBaseEntitlement(account.getId(), spec, UUID.randomUUID().toString(), initialDate, callContext);
- final Entitlement entitlement2 = entitlementApi.createBaseEntitlement(account.getId(), spec, UUID.randomUUID().toString(), initialDate, callContext);
+ final Entitlement entitlement1 = entitlementApi.createBaseEntitlement(account.getId(), spec, UUID.fromString("d87c78b4-c6de-4387-8a3b-4a5850dc29fc").toString(), initialDate, callContext);
+ final Entitlement entitlement2 = entitlementApi.createBaseEntitlement(account.getId(), spec, UUID.fromString("c56245e7-11a5-4a41-8854-3d31e24bcdcc").toString(), initialDate, callContext);
entitlementUtils.setBlockingStateAndPostBlockingTransitionEvent(new DefaultBlockingState(account.getId(), BlockingStateType.ACCOUNT, "stateName", "service", false, false, false, clock.getUTCNow()),
internalCallContextFactory.createInternalCallContext(account.getId(), callContext));
assertListenerStatus();
|
Fix flaky tests by hardcoding the UUID so when comparison defaults to ID level this is predictable
|
killbill_killbill
|
train
|
1d73faed49383a31fabc29dbe9021883ab2c8f7d
|
diff --git a/src/core/schema.js b/src/core/schema.js
index <HASH>..<HASH> 100644
--- a/src/core/schema.js
+++ b/src/core/schema.js
@@ -51,6 +51,9 @@ export const column = {
getAggFN: function getAggFN(name) {
let s = name.substr(AGG_PREFIX.length);
return s.substr(0, s.indexOf('_'));
+ },
+ aggColumn(name, aggFN) {
+ return `${AGG_PREFIX}${aggFN}_${name}`;
}
};
diff --git a/src/core/style/expressions/aggregation.js b/src/core/style/expressions/aggregation.js
index <HASH>..<HASH> 100644
--- a/src/core/style/expressions/aggregation.js
+++ b/src/core/style/expressions/aggregation.js
@@ -1,4 +1,5 @@
import Expression from './expression';
+import * as schema from '../../schema';
// Aggregation ops
export const Max = genAggregationOp('max');
@@ -26,19 +27,19 @@ function genAggregationOp(aggName) {
_applyToShaderSource(uniformIDMaker, propertyTIDMaker) {
return {
preface: '',
- inline: `p${propertyTIDMaker(`_cdb_agg_${aggName}_${this.property.name}`)}`
+ inline: `p${propertyTIDMaker(schema.column.aggColumn(this.property.name, aggName))}`
};
}
eval(feature) {
- return feature[`_cdb_agg_${aggName}_${this.property.name}`];
+ return feature[schema.column.aggColumn(this.property.name, aggName)];
}
_postShaderCompile() { }
_getMinimumNeededSchema() {
return {
columns: [
- `_cdb_agg_${aggName}_${this.property.name}`
+ schema.column.aggColumn(this.property.name, aggName)
]
};
}
};
-}
+}
|
Complete aggregation column names refactor
|
CartoDB_carto-vl
|
train
|
531f24581796a22735233559d22c99727b729c3f
|
diff --git a/manticore/core/cpu/arm.py b/manticore/core/cpu/arm.py
index <HASH>..<HASH> 100644
--- a/manticore/core/cpu/arm.py
+++ b/manticore/core/cpu/arm.py
@@ -626,9 +626,9 @@ class Armv7Cpu(Cpu):
@instruction
def PUSH(cpu, *regs):
- high_to_low_regs = regs[::-1]
+ high_to_low_regs = [r.read() for r in regs[::-1]]
for reg in high_to_low_regs:
- cpu.stack_push(reg.read())
+ cpu.stack_push(reg)
@instruction
|
Fix arm PUSH (#<I>)
If sp is in the list, it gets pushed incorrectly because it gets
updated by the stack_pushes for registers that occur before it
|
trailofbits_manticore
|
train
|
11dee420f0ebf7979acf0c985de15a8d6ddc401e
|
diff --git a/birdhousebuilder/recipe/supervisor/__init__.py b/birdhousebuilder/recipe/supervisor/__init__.py
index <HASH>..<HASH> 100644
--- a/birdhousebuilder/recipe/supervisor/__init__.py
+++ b/birdhousebuilder/recipe/supervisor/__init__.py
@@ -106,11 +106,11 @@ class Recipe(object):
self.options['killasgroup'] = self.options.get('killasgroup', 'true')
self.options['stopsignal'] = self.options.get('stopsignal', 'TERM')
env_templ = \
- 'USER={0},LOGNAME={0},HOME={1},PATH="/bin:/usr/bin:{2}",LD_LIBRARY_PATH="{3}",PYTHON_EGG_CACHE="{4}"'
+ 'USER={0},LOGNAME={0},HOME={1},PATH="/bin:/usr/bin:{2}",PYTHON_EGG_CACHE="{3}"'
self.options['environment'] = self.options.get(
'environment',
env_templ.format(self.options['user'], self.options['home'],
- bin_path, lib_path, self.options['cache-directory']))
+ bin_path, self.options['cache-directory']))
def install(self, update=False):
installed = []
|
don't set LD_LIBRARY_PATH
|
bird-house_birdhousebuilder.recipe.supervisor
|
train
|
2c97b828f2741eefb9a382f15a52c541ae69cc31
|
diff --git a/knot.py b/knot.py
index <HASH>..<HASH> 100644
--- a/knot.py
+++ b/knot.py
@@ -81,6 +81,12 @@ class Container(dict):
dictionary methods are available without any modifications.
"""
+ def __call__(self, *args):
+ """A shortcut method for convenience.
+ For more information see :meth:`Container.provide`.
+ """
+ return self.provide(*args)
+
def provide(self, name, default=None):
"""Gets the value registered with ``name`` and determines whether the
value is a provider or a configuration setting. The ``default`` value
diff --git a/test_knot.py b/test_knot.py
index <HASH>..<HASH> 100755
--- a/test_knot.py
+++ b/test_knot.py
@@ -27,6 +27,13 @@ class TestContainer(unittest.TestCase):
self.assertEqual(c.provide('foo', 'bar'), 'bar')
+ def test_forwards_to_provide(self):
+ c = Container()
+ c.provide = MagicMock()
+
+ c.provide('foo', 'bar', 'default')
+ c.provide.assert_called_with('foo', 'bar', 'default')
+
def test_caches_return_value_provider(self):
c = Container()
|
Put back `__call__` as a shortcut for `provide`.
|
jaapverloop_knot
|
train
|
df8a7f4295dbe5ea635970d262ca16b8bb7edeb4
|
diff --git a/spec/live/custom_status_spec.rb b/spec/live/custom_status_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/live/custom_status_spec.rb
+++ b/spec/live/custom_status_spec.rb
@@ -4,12 +4,12 @@ require 'securerandom'
describe ZendeskAPI::CustomStatus, :delete_after do
def valid_attributes
{
- :status_category => 'open',
- :agent_label => "Agent Label #{SecureRandom.hex(6)}",
- :end_user_label => "End User Label #{SecureRandom.hex(6)}",
- :description => "Description #{SecureRandom.hex(6)}",
- :end_user_description => "End User Description #{SecureRandom.hex(6)}",
- :active => false
+ status_category: 'open',
+ agent_label: "Agent Label #{SecureRandom.hex(6)}",
+ end_user_label: "End User Label #{SecureRandom.hex(6)}",
+ description: "Description #{SecureRandom.hex(6)}",
+ end_user_description: "End User Description #{SecureRandom.hex(6)}",
+ active: false
}
end
|
Update custom_status_spec.rb to use Ruby <I> hash syntax
|
zendesk_zendesk_api_client_rb
|
train
|
8f8c31e5cfc17029e9f87083dd5965bf84b39ca4
|
diff --git a/cmd/localkube/localkube.go b/cmd/localkube/localkube.go
index <HASH>..<HASH> 100644
--- a/cmd/localkube/localkube.go
+++ b/cmd/localkube/localkube.go
@@ -23,6 +23,7 @@ import (
"flag"
"fmt"
"log"
+ "math/rand"
"net/http"
"os"
"time"
@@ -90,8 +91,10 @@ func api_server() {
Client: http.DefaultClient,
Port: *kubelet_port,
}
+ random := rand.New(rand.NewSource(int64(time.Now().Nanosecond())))
+
storage := map[string]apiserver.RESTStorage{
- "pods": registry.MakePodRegistryStorage(podRegistry, containerInfo, registry.MakeFirstFitScheduler(machineList, podRegistry)),
+ "pods": registry.MakePodRegistryStorage(podRegistry, containerInfo, registry.MakeFirstFitScheduler(machineList, podRegistry, random)),
"replicationControllers": registry.MakeControllerRegistryStorage(controllerRegistry),
"services": registry.MakeServiceRegistryStorage(serviceRegistry),
}
|
Fix localkube due to a bad merge.
|
kubernetes_kubernetes
|
train
|
5ac45f08bb943c097d3d10ee088a4a8f4f8e4de8
|
diff --git a/lib/rules/no-only-tests.js b/lib/rules/no-only-tests.js
index <HASH>..<HASH> 100644
--- a/lib/rules/no-only-tests.js
+++ b/lib/rules/no-only-tests.js
@@ -1,36 +1,7 @@
'use strict';
const utils = require('../utils');
-
-/**
- * Checks if the given token is a comma token or not.
- * From: https://github.com/eslint/eslint/blob/master/lib/rules/utils/ast-utils.js
- * @param {Token} token The token to check.
- * @returns {boolean} `true` if the token is a comma token.
- */
-function isCommaToken (token) {
- return token.value === ',' && token.type === 'Punctuator';
-}
-
-/**
- * Checks if the given token is an opening brace token or not.
- * From: https://github.com/eslint/eslint/blob/master/lib/rules/utils/ast-utils.js
- * @param {Token} token The token to check.
- * @returns {boolean} `true` if the token is an opening brace token.
- */
-function isOpeningBraceToken (token) {
- return token.value === '{' && token.type === 'Punctuator';
-}
-
-/**
- * Checks if the given token is a closing brace token or not.
- * From: https://github.com/eslint/eslint/blob/master/lib/rules/utils/ast-utils.js
- * @param {Token} token The token to check.
- * @returns {boolean} `true` if the token is a closing brace token.
- */
-function isClosingBraceToken (token) {
- return token.value === '}' && token.type === 'Punctuator';
-}
+const { isCommaToken, isOpeningBraceToken, isClosingBraceToken } = require('eslint-utils');
module.exports = {
meta: {
|
Fix: Use token utilities from eslint-utils (#<I>)
|
not-an-aardvark_eslint-plugin-eslint-plugin
|
train
|
876c94d556e38b9cbe33b8a34ded0ff8f5d97317
|
diff --git a/tests/frontend/org/voltdb/TestJSONInterface.java b/tests/frontend/org/voltdb/TestJSONInterface.java
index <HASH>..<HASH> 100644
--- a/tests/frontend/org/voltdb/TestJSONInterface.java
+++ b/tests/frontend/org/voltdb/TestJSONInterface.java
@@ -98,7 +98,6 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
-import com.fasterxml.jackson.databind.ObjectMapper;
import org.json_voltpatches.JSONArray;
import org.json_voltpatches.JSONException;
import org.json_voltpatches.JSONObject;
@@ -131,6 +130,8 @@ import org.voltdb.utils.Base64;
import org.voltdb.utils.Encoder;
import org.voltdb.utils.MiscUtils;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
import junit.framework.TestCase;
public class TestJSONInterface extends TestCase {
@@ -2263,7 +2264,7 @@ public class TestJSONInterface extends TestCase {
// wait for everything to be done and check status
executor.shutdown();
- if (!executor.awaitTermination(60, TimeUnit.SECONDS)) {
+ if (!executor.awaitTermination(120, TimeUnit.SECONDS)) {
fail("Workers should have finished execution by now");
}
assertTrue(TestWorker.s_success);
|
Increase test timeout because it fails on some machines.
|
VoltDB_voltdb
|
train
|
4ec3e711177b0407241ef204487f75ac90765d05
|
diff --git a/ejb3/src/main/java/org/jboss/as/ejb3/EjbMessages.java b/ejb3/src/main/java/org/jboss/as/ejb3/EjbMessages.java
index <HASH>..<HASH> 100644
--- a/ejb3/src/main/java/org/jboss/as/ejb3/EjbMessages.java
+++ b/ejb3/src/main/java/org/jboss/as/ejb3/EjbMessages.java
@@ -1980,4 +1980,13 @@ public interface EjbMessages {
@Message(id=14551, value = "<session-type> not specified for ejb %s. This must be present in ejb-jar.xml")
DeploymentUnitProcessingException sessionTypeNotSpecified(String bean);
+
+ /**
+ * Creates an exception indicating Default interceptors specify an absolute ordering
+ *
+ * @return a {@link DeploymentUnitProcessingException} for the error.
+ */
+ @Message(id = 14552, value = "Default interceptors cannot specify an <interceptor-order> element in ejb-jar.xml")
+ DeploymentUnitProcessingException defaultInterceptorsNotSpecifyOrder();
+
}
\ No newline at end of file
diff --git a/ejb3/src/main/java/org/jboss/as/ejb3/deployment/processors/dd/DeploymentDescriptorInterceptorBindingsProcessor.java b/ejb3/src/main/java/org/jboss/as/ejb3/deployment/processors/dd/DeploymentDescriptorInterceptorBindingsProcessor.java
index <HASH>..<HASH> 100644
--- a/ejb3/src/main/java/org/jboss/as/ejb3/deployment/processors/dd/DeploymentDescriptorInterceptorBindingsProcessor.java
+++ b/ejb3/src/main/java/org/jboss/as/ejb3/deployment/processors/dd/DeploymentDescriptorInterceptorBindingsProcessor.java
@@ -95,6 +95,9 @@ public class DeploymentDescriptorInterceptorBindingsProcessor implements Deploym
if (binding.getMethod() != null) {
throw MESSAGES.defaultInterceptorsNotBindToMethod();
}
+ if(binding.getInterceptorOrder() != null) {
+ throw MESSAGES.defaultInterceptorsNotSpecifyOrder();
+ }
defaultInterceptorBindings.add(binding);
} else {
List<InterceptorBindingMetaData> bindings = bindingsPerComponent.get(binding.getEjbName());
|
AS7-<I> Throw exception on interceptor-order is applied to the default interceptors, as this is not allowed by the spec, and is currently silently ignored
|
wildfly_wildfly
|
train
|
26e320750cedd9f56721263cf65238e47d30af5e
|
diff --git a/parsl/utils.py b/parsl/utils.py
index <HASH>..<HASH> 100644
--- a/parsl/utils.py
+++ b/parsl/utils.py
@@ -142,7 +142,10 @@ def wtime_to_minutes(time_string):
'''
hours, mins, seconds = time_string.split(':')
- return int(hours) * 60 + int(mins) + 1
+ total_mins = int(hours) * 60 + int(mins)
+ if total_mins < 1:
+ logger.warning("Time string '{}' parsed to {} minutes, less than 1".format(time_string, total_mins))
+ return total_mins
class RepresentationMixin(object):
|
Parse walltime strings to actual number of minutes
Previously, one minute was added, to attempt to deal with
a specific degenerate situation of 0 minutes being specified.
Now, zero minutes will be parsed as 0 minutes, but a warning will
be emitted if the return vales is <1, to nudge the user in the
right direction.
|
Parsl_parsl
|
train
|
7e421d169cf11269228646fe3ae3ba3152e084ab
|
diff --git a/src/node_modules/lib/transaction.js b/src/node_modules/lib/transaction.js
index <HASH>..<HASH> 100644
--- a/src/node_modules/lib/transaction.js
+++ b/src/node_modules/lib/transaction.js
@@ -79,6 +79,15 @@ class Transaction {
return buf;
}
+
+ /**
+ * @param {number} byte
+ * @param {number} [offset]
+ * @returns {number}
+ */
+ indexOf(byte, offset = 0) {
+ return this.stream.indexOf(byte, this.index + offset) - this.index;
+ }
}
const methods = {
diff --git a/test/transaction.js b/test/transaction.js
index <HASH>..<HASH> 100644
--- a/test/transaction.js
+++ b/test/transaction.js
@@ -33,14 +33,16 @@ test('should read buffer', () => {
const transaction = new Transaction(stream);
stream.append(Buffer.from([1, 2, 3, 4]));
- stream.append(Buffer.from([5, 6, 7, 8, 9]));
+ stream.append(Buffer.from([1, 2, 3, 4, 9]));
const size = stream.length;
expect(transaction.length).toEqual(size);
+ expect(transaction.indexOf(3)).toBe(2);
expect(transaction.readBuffer(3)).toEqual(Buffer.from([1, 2, 3]));
expect(transaction.get(0)).toEqual(4);
- expect(transaction.readBuffer(5)).toEqual(Buffer.from([4, 5, 6, 7, 8]));
+ expect(transaction.indexOf(3)).toBe(3);
+ expect(transaction.readBuffer(5)).toEqual(Buffer.from([4, 1, 2, 3, 4]));
expect(stream.length).toEqual(size);
expect(transaction.length).toEqual(size);
|
added missing method transaction#indexOf()
|
reklatsmasters_binary-data
|
train
|
eae3b1946a276ac099e0018fc792d9e8c3bfda6d
|
diff --git a/params/version.go b/params/version.go
index <HASH>..<HASH> 100644
--- a/params/version.go
+++ b/params/version.go
@@ -21,10 +21,10 @@ import (
)
const (
- VersionMajor = 1 // Major version component of the current release
- VersionMinor = 10 // Minor version component of the current release
- VersionPatch = 9 // Patch version component of the current release
- VersionMeta = "unstable" // Version metadata to append to the version string
+ VersionMajor = 1 // Major version component of the current release
+ VersionMinor = 10 // Minor version component of the current release
+ VersionPatch = 9 // Patch version component of the current release
+ VersionMeta = "stable" // Version metadata to append to the version string
)
// Version holds the textual version string.
|
params: release go-ethereum <I> stable
|
ethereum_go-ethereum
|
train
|
76aa4512452e067b574d77e082bcc21bda70ab25
|
diff --git a/src/frontend/org/voltdb/client/Client.java b/src/frontend/org/voltdb/client/Client.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/client/Client.java
+++ b/src/frontend/org/voltdb/client/Client.java
@@ -172,9 +172,17 @@ public interface Client {
* Invoke a procedure with specified query timeout. This is a synchronous call:
* it blocks until a result is available.
* <p>
+ * The specified query timeout applies to a read-only query or batch of read-only
+ * queries, and may override the global <code>querytimeout</code> value in the
+ * VoltDB cluster's configuration file. Only callers with admin privilege are
+ * permitted to use a timeout longer than the global setting.
+ * <p>
+ * A query timeout of zero means there is no timeout applied to the query
+ * or batch of queries.
+ * <p>
* For more details, refer to {@link #callProcedure(String, Object...)}.
*
- * @param queryTimeout timeout (in milliseconds) for queries in a batch for read-only procedures.
+ * @param queryTimeout timeout (in milliseconds) for read-only queries or batches of queries.
* @param procName <code>class</code> name (not qualified by package) of the procedure to execute.
* @param parameters vararg list of procedure's parameter values.
* @return {@link ClientResponse} instance of procedure call results.
@@ -191,10 +199,18 @@ public interface Client {
* queued within the configured timeout. Check the return value to determine
* if queueing actually took place.
* <p>
+ * The specified query timeout applies to a read-only query or batch of read-only
+ * queries, and may override the global <code>querytimeout</code> value in the
+ * VoltDB cluster's configuration file. Only callers with admin privilege are
+ * permitted to use a timeout longer than the global setting.
+ * <p>
+ * A query timeout of zero means there is no timeout applied to the query
+ * or batch of queries.
+ * <p>
* For more details, refer to {@link #callProcedure(ProcedureCallback, String, Object...)}.
*
* @param callback {@link ProcedureCallback} that will be invoked with procedure results.
- * @param queryTimeout timeout (in milliseconds) for queries in a batch for read-only procedures.
+ * @param queryTimeout timeout (in milliseconds) for read-only queries or batches of queries.
* @param procName class name (not qualified by package) of the procedure to execute.
* @param parameters vararg list of procedure's parameter values.
* @return <code>true</code> if the procedure was queued and <code>false</code> otherwise.
@@ -206,9 +222,14 @@ public interface Client {
/**
* Synchronously invoke a procedure call, blocking until a result is available,
- * with caller-specified procedure timeout.
+ * with caller-specified client timeout and query timeout.
+ * <p>
+ * The client timeout overrides the default set up by {@link ClientConfig#setProcedureCallTimeout}.
+ * <p>
+ * See {@link #callProcedureWithTimeout(int, String, Object...)} for details
+ * of the query timeout.
*
- * @param batchTimeout procedure invocation batch timeout (milliseconds)
+ * @param queryTimeout timeout (in milliseconds) for read-only queries or batches of queries
* @param procName class name (not qualified by package) of the procedure to execute.
* @param clientTimeout timeout for the procedure
* @param unit TimeUnit of procedure timeout
@@ -218,7 +239,7 @@ public interface Client {
* @throws NoConnectionsException if this {@link Client} instance is not connected to any servers.
* @throws IOException if there is a Java network or connection problem.
*/
- public ClientResponse callProcedureWithClientTimeout(int batchTimeout,
+ public ClientResponse callProcedureWithClientTimeout(int queryTimeout,
String procName,
long clientTimeout,
TimeUnit unit,
@@ -226,10 +247,15 @@ public interface Client {
throws IOException, NoConnectionsException, ProcCallException;
/**
- * Asynchronously invoke a procedure call with specified batch and query timeouts.
+ * Asynchronously invoke a procedure call with specified client and query timeouts.
+ * <p>
+ * The client timeout overrides the default set up by {@link ClientConfig#setProcedureCallTimeout}.
+ * <p>
+ * See {@link #callProcedureWithTimeout(ProcedureCallback, int, String, Object...)} for details
+ * of the query timeout.
*
* @param callback TransactionCallback that will be invoked with procedure results.
- * @param batchTimeout procedure invocation batch timeout (in milliseconds)
+ * @param queryTimeout timeout (in milliseconds) for read-only queries or batches of queries
* @param procName class name (not qualified by package) of the procedure to execute.
* @param clientTimeout query timeout
* @param clientTimeoutUnit units for query timeout
@@ -239,7 +265,7 @@ public interface Client {
* @throws IOException if there is a Java network or connection problem.
*/
public boolean callProcedureWithClientTimeout(ProcedureCallback callback,
- int batchTimeout,
+ int queryTimeout,
String procName,
long clientTimeout,
TimeUnit clientTimeoutUnit,
|
better doc for batch/query timeout (#<I>)
|
VoltDB_voltdb
|
train
|
a1ec89b5f84ef5ee86ec87bb9b15dd8784a56eab
|
diff --git a/util/progress_writer.go b/util/progress_writer.go
index <HASH>..<HASH> 100644
--- a/util/progress_writer.go
+++ b/util/progress_writer.go
@@ -33,6 +33,7 @@ func (w *ProgressWriter) Write(b []byte) (int, error) {
func (w *ProgressWriter) Close() error {
if w.written > 0 && w.Writer != nil {
_, e := fmt.Fprint(w.Writer, "\n")
+ w.Writer = nil
return e
}
return nil
|
set writer to nil when closed
|
dynport_dgtk
|
train
|
f902119919bbab0c047eb4bc08492d92ea8c9fa1
|
diff --git a/sample/src/com/nostra13/example/universalimageloader/Constants.java b/sample/src/com/nostra13/example/universalimageloader/Constants.java
index <HASH>..<HASH> 100644
--- a/sample/src/com/nostra13/example/universalimageloader/Constants.java
+++ b/sample/src/com/nostra13/example/universalimageloader/Constants.java
@@ -55,7 +55,6 @@ public final class Constants {
"http://www.bandwidthblog.com/wp-content/uploads/2011/11/twitter-logo.png",
"http://weloveicons.s3.amazonaws.com/icons/100907_itunes1.png",
"http://weloveicons.s3.amazonaws.com/icons/100929_applications.png",
- "http://t2.gstatic.com/images?q=tbn:ANd9GcTJixLIo_zlOPOILuxNWc5evK333pZCH8rugaTtv3SZSfiI39T0-3vWYQ",
"http://www.idyllicmusic.com/index_files/get_apple-iphone.png",
"http://www.frenchrevolutionfood.com/wp-content/uploads/2009/04/Twitter-Bird.png",
"http://3.bp.blogspot.com/-ka5MiRGJ_S4/TdD9OoF6bmI/AAAAAAAAE8k/7ydKtptUtSg/s1600/Google_Sky%2BMaps_Android.png",
diff --git a/sample/src/com/nostra13/example/universalimageloader/ImagePagerActivity.java b/sample/src/com/nostra13/example/universalimageloader/ImagePagerActivity.java
index <HASH>..<HASH> 100644
--- a/sample/src/com/nostra13/example/universalimageloader/ImagePagerActivity.java
+++ b/sample/src/com/nostra13/example/universalimageloader/ImagePagerActivity.java
@@ -1,4 +1,5 @@
package com.nostra13.example.universalimageloader;
+
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.os.Bundle;
diff --git a/sample/src/com/nostra13/example/universalimageloader/UILApplication.java b/sample/src/com/nostra13/example/universalimageloader/UILApplication.java
index <HASH>..<HASH> 100644
--- a/sample/src/com/nostra13/example/universalimageloader/UILApplication.java
+++ b/sample/src/com/nostra13/example/universalimageloader/UILApplication.java
@@ -19,13 +19,13 @@ public class UILApplication extends Application {
@SuppressWarnings("unused")
@Override
public void onCreate() {
- super.onCreate();
-
if (Config.DEVELOPER_MODE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
- StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder().detectAll().penaltyLog().build());
- StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder().detectAll().penaltyLog().build());
+ StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder().detectAll().penaltyDialog().build());
+ StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder().detectAll().penaltyDeath().build());
}
+ super.onCreate();
+
initImageLoader(getApplicationContext());
}
|
Sample: Removed deprecated image link.
|
nostra13_Android-Universal-Image-Loader
|
train
|
28f38ccb095e634433b39849d078d576192e0c81
|
diff --git a/tests/muc/test_e2e.py b/tests/muc/test_e2e.py
index <HASH>..<HASH> 100644
--- a/tests/muc/test_e2e.py
+++ b/tests/muc/test_e2e.py
@@ -73,13 +73,26 @@ class TestMuc(TestCase):
logging.debug("thirdwitch is %s", self.thirdwitch.local_jid)
# make firstwitch and secondwitch join
- self.firstroom, fut = self.firstwitch.summon(
- aioxmpp.MUCClient
- ).join(
+ firstmuc = self.firstwitch.summon(aioxmpp.MUCClient)
+ self.firstroom, fut = firstmuc.join(
self.mucjid,
"firstwitch",
)
+ # configure room to be open (this also alleviates any locking)
+ try:
+ form = aioxmpp.muc.xso.ConfigurationForm.from_xso(
+ (yield from firstmuc.get_room_config(self.firstroom.jid))
+ )
+ form.membersonly.value = False
+ yield from firstmuc.set_room_config(self.firstroom.jid,
+ form.render_reply())
+ except aioxmpp.errors.XMPPError:
+ logging.warning(
+ "failed to configure room for the tests",
+ exc_info=True,
+ )
+
# we want firstwitch to join first so that we have a deterministic
# owner of the muc
yield from fut
|
muc: make sure to configure room in e2e tests
|
horazont_aioxmpp
|
train
|
0132bb463ad81a67158cd76d7636469a64ae955e
|
diff --git a/availability/ports.go b/availability/ports.go
index <HASH>..<HASH> 100644
--- a/availability/ports.go
+++ b/availability/ports.go
@@ -26,6 +26,9 @@ func Check(address *net.TCPAddr, timeout time.Duration) error {
}
func isListening(address *net.TCPAddr) bool {
- _, err := net.DialTCP("tcp", nil, address)
+ connection, err := net.DialTCP("tcp", nil, address)
+ if connection != nil {
+ connection.Close()
+ }
return err == nil
}
|
close tcp connection when finished
|
pivotal-cf_cf-redis-broker
|
train
|
50ade57700fe6e48f3a1b975ab484cc093376317
|
diff --git a/spec/api-native-image-spec.js b/spec/api-native-image-spec.js
index <HASH>..<HASH> 100644
--- a/spec/api-native-image-spec.js
+++ b/spec/api-native-image-spec.js
@@ -95,9 +95,11 @@ describe('nativeImage module', () => {
})
assert.deepEqual(imageB.getSize(), {width: 269, height: 95})
assert.equal(imageB.hasRepresentation(1.0), false)
+ assert.equal(imageB.hasRepresentation(2.0), true)
const imageC = nativeImage.createFromDataURL(imageB.toDataURL())
assert.deepEqual(imageC.getSize(), {width: 538, height: 190})
+ assert.equal(imageC.hasRepresentation(1.0), false)
assert(imageB.toBitmap().equals(imageC.toBitmap()))
})
})
|
Add more hasRepresentation asserts
|
electron_electron
|
train
|
3e705a7820aba16bcb661f2c4a8764b50445e996
|
diff --git a/annis-service/src/main/java/annis/dao/QueryDaoImpl.java b/annis-service/src/main/java/annis/dao/QueryDaoImpl.java
index <HASH>..<HASH> 100644
--- a/annis-service/src/main/java/annis/dao/QueryDaoImpl.java
+++ b/annis-service/src/main/java/annis/dao/QueryDaoImpl.java
@@ -26,7 +26,15 @@ import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
+import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
+import java.nio.file.FileSystems;
+import java.nio.file.Path;
+import java.nio.file.StandardWatchEventKinds;
+import java.nio.file.WatchEvent;
+import java.nio.file.WatchService;
+import java.nio.file.WatchEvent.Kind;
+import java.nio.file.WatchKey;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
@@ -50,11 +58,14 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.aeonbits.owner.ConfigFactory;
import org.apache.commons.dbutils.handlers.ColumnListHandler;
import org.apache.commons.dbutils.handlers.ScalarHandler;
import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.input.ReversedLinesFileReader;
import org.corpus_tools.graphannis.CorpusStorageManager;
import org.corpus_tools.graphannis.CorpusStorageManager.QueryLanguage;
import org.corpus_tools.graphannis.CorpusStorageManager.ResultOrder;
@@ -353,14 +364,83 @@ public class QueryDaoImpl extends AbstractDao implements QueryDao {
private final ByteHelper byteHelper = new ByteHelper();
private final MetaByteHelper metaByteHelper = new MetaByteHelper();
+
+ private WatchService graphannisLogfileWatcher;
protected QueryDaoImpl() throws GraphANNISException {
- File logfile = new File(this.getGraphANNISDir(), "graphannis.log");
+ final File logfile = new File(this.getGraphANNISDir(), "graphannis.log");
this.corpusStorageMgr = new CorpusStorageManager(QueryDaoImpl.this.getGraphANNISDir().getAbsolutePath(),
- logfile.getAbsolutePath(), true, LogLevel.Debug);
+ logfile.getAbsolutePath(), true, LogLevel.Info);
// initialize timeout with value from config (can be overwritten by API)
this.timeout = cfg.timeout();
+
+ // add a watcher for the logfile and emit a logging event whenever the logfile
+ // changes
+ try {
+ graphannisLogfileWatcher = FileSystems.getDefault().newWatchService();
+
+ final Path logfilePath = logfile.toPath();
+ logfilePath.getParent().register(graphannisLogfileWatcher, StandardWatchEventKinds.ENTRY_MODIFY);
+ // start a background thread
+ new Thread(() -> {
+ while (graphannisLogfileWatcher != null) {
+ try {
+ WatchKey wk = graphannisLogfileWatcher.take();
+ for (WatchEvent<?> event : wk.pollEvents()) {
+ if (event.context() instanceof Path) {
+ Path changed = (Path) event.context();
+ if (changed.toString().equals("graphannis.log")) {
+ // read the last line of the logfile and log it
+ try (ReversedLinesFileReader reader = new ReversedLinesFileReader(logfile, 4096,
+ StandardCharsets.UTF_8)) {
+ parseAndReportGraphANNISLogEntry(reader);
+ }
+ }
+ }
+ }
+ wk.reset();
+ } catch (InterruptedException | IOException ex) {
+ log.error("Error when reading graphANNIS logfile", ex);
+ }
+ }
+ }).start();;
+ } catch (IOException ex) {
+ log.error("Could not register service to check the graphANNIS logfile", ex);
+ }
+
+ }
+
+ private void parseAndReportGraphANNISLogEntry(ReversedLinesFileReader reader) throws IOException {
+ String lastLine = reader.readLine();
+
+ Pattern formatPattern = Pattern.compile("^[0-9]+:[0-9]+:[0-9]+ \\[(.+)\\] (.*)");
+ while (lastLine != null) {
+
+ Matcher m = formatPattern.matcher(lastLine);
+ if (m.matches()) {
+ switch (m.group(1)) {
+ case "DEBUG":
+ log.debug(m.group(2));
+ break;
+ case "TRACE":
+ log.trace(m.group(2));
+ break;
+ case "WARN":
+ log.warn(m.group(2));
+ break;
+ case "ERROR":
+ log.error(m.group(2));
+ break;
+ default:
+ log.info(m.group(2));
+ break;
+ }
+ return;
+ }
+
+ lastLine = reader.readLine();
+ }
}
public static QueryDao create() throws GraphANNISException {
|
Use the graphannis.log logfile to report log messages from graphANNIS to the ANNIS service (e.g. on import)
|
korpling_ANNIS
|
train
|
47c4dad85cfdfe6d2e229d4c3a419b62f0740d83
|
diff --git a/anpy/dossier_from_opendata.py b/anpy/dossier_from_opendata.py
index <HASH>..<HASH> 100644
--- a/anpy/dossier_from_opendata.py
+++ b/anpy/dossier_from_opendata.py
@@ -213,17 +213,10 @@ def an_text_url(identifiant, code):
return host + leg + "/" + datas[type]['repertoire'] + "/" + datas[type]['prefixe'] + num + datas[type]['suffixe'] + ".asp"
-def parse(url, verbose=True, logfile=sys.stderr, cached_opendata_an={}):
- if not verbose:
-
- def _log(*x):
- return None
-
- else:
-
- def _log(*args):
- nonlocal logfile
- print(*args, file=logfile)
+def parse(url, logfile=sys.stderr, cached_opendata_an={}):
+ def _log(*args):
+ nonlocal logfile
+ print(*args, file=logfile)
legislature, _ = parse_national_assembly_url(url)
if legislature and legislature in cached_opendata_an:
diff --git a/anpy/dossier_like_senapy.py b/anpy/dossier_like_senapy.py
index <HASH>..<HASH> 100644
--- a/anpy/dossier_like_senapy.py
+++ b/anpy/dossier_like_senapy.py
@@ -74,7 +74,7 @@ def merge_previous_works_an(older_dos, dos):
return dos
-def historic_doslegs_parse(html, url_an=None, verbose=True, logfile=sys.stderr, nth_dos_in_page=0, parse_previous_works=True, parse_next_works=True):
+def historic_doslegs_parse(html, url_an=None, logfile=sys.stderr, nth_dos_in_page=0, parse_previous_works=True, parse_next_works=True):
"""
Parse an AN dosleg like http://www.assemblee-nationale.fr/13/dossiers/accord_Montenegro_mobilite_jeunes.asp
@@ -94,11 +94,6 @@ def historic_doslegs_parse(html, url_an=None, verbose=True, logfile=sys.stderr,
log_error = _log_error
log_warning = _log_warning
- if not verbose:
- def log_error(*x): return None
-
- def log_warning(*x): return None
-
soup = BeautifulSoup(html, 'lxml')
legislature, slug = parse_national_assembly_url(data['url_dossier_assemblee'])
@@ -367,7 +362,7 @@ def historic_doslegs_parse(html, url_an=None, verbose=True, logfile=sys.stderr,
resp = download_historic_dosleg(previous_works)
prev_data = historic_doslegs_parse(
resp.text, previous_works,
- logfile=logfile, verbose=verbose,
+ logfile=logfile,
nth_dos_in_page=nth_dos_in_page, parse_next_works=False)
if prev_data:
prev_data = prev_data[nth_dos_in_page] if len(prev_data) > 1 else prev_data[0]
@@ -383,7 +378,7 @@ def historic_doslegs_parse(html, url_an=None, verbose=True, logfile=sys.stderr,
if resp.status_code == 200:
recent_data = historic_doslegs_parse(
resp.text, resp.url,
- logfile=logfile, verbose=verbose,
+ logfile=logfile,
nth_dos_in_page=nth_dos_in_page, parse_previous_works=False)
if recent_data:
log_warning('FOUND MORE RECENT WORKS', resp.url)
@@ -391,22 +386,22 @@ def historic_doslegs_parse(html, url_an=None, verbose=True, logfile=sys.stderr,
data = merge_previous_works_an(data, recent_data)
if another_dosleg_inside:
- others = historic_doslegs_parse(another_dosleg_inside, url_an, logfile=logfile, verbose=verbose, nth_dos_in_page=nth_dos_in_page+1)
+ others = historic_doslegs_parse(another_dosleg_inside, url_an, logfile=logfile, nth_dos_in_page=nth_dos_in_page+1)
if others:
return [data] + others
return [data]
-def parse(url, verbose=True, logfile=sys.stderr, cached_opendata_an={}):
+def parse(url, logfile=sys.stderr, cached_opendata_an={}):
url = clean_url(url)
if '/dyn/' in url:
- parsed = opendata_parse(url, verbose=verbose, logfile=logfile, cached_opendata_an=cached_opendata_an)
+ parsed = opendata_parse(url, logfile=logfile, cached_opendata_an=cached_opendata_an)
if parsed:
return [parsed]
resp = download_historic_dosleg(url)
- return historic_doslegs_parse(resp.text, resp.url, verbose=verbose, logfile=logfile)
+ return historic_doslegs_parse(resp.text, resp.url, logfile=logfile)
"""
|
remove "verbose" option since it's not used anymore
|
regardscitoyens_anpy
|
train
|
f8fb4f5214770be9b823073708f634d9e722d73d
|
diff --git a/api-spec-testing/test_file.rb b/api-spec-testing/test_file.rb
index <HASH>..<HASH> 100644
--- a/api-spec-testing/test_file.rb
+++ b/api-spec-testing/test_file.rb
@@ -199,34 +199,7 @@ module Elasticsearch
end
def clear_indices(client)
- indices = client.indices.get(index: '_all').keys.reject do |i|
- i.start_with?('.security') || i.start_with?('.watches')
- end
- indices.each do |index|
- client.indices.delete_alias(index: index, name: '*', ignore: 404)
- client.indices.delete(index: index, ignore: 404)
- end
- # See cat.aliases/10_basic.yml, test_index is not return in client.indices.get(index: '_all')
- client.indices.delete(index: 'index_3', ignore: 404)
- client.indices.delete(index: 'index_1', ignore: 404)
- client.indices.delete(index: 'index_2', ignore: 404)
- client.indices.delete(index: 'index_to_monitor', ignore: 404)
- client.indices.delete(index: 'index3', ignore: 404)
- client.indices.delete(index: 'test-close', ignore: 404)
- client.indices.delete(index: 'test_closed', ignore: 404)
- client.indices.delete(index: 'index', ignore: 404)
- client.indices.delete(index: 'index2', ignore: 404)
- client.indices.delete(index: 'test', ignore: 404)
- client.indices.delete(index: 'test_2', ignore: 404)
- client.indices.delete(index: 'index-2', ignore: 404)
- client.indices.delete(index: 'test_index', ignore: 404)
- client.indices.delete(index: 'index1', ignore: 404)
- client.indices.delete(index: 'index_closed', ignore: 404)
- client.indices.delete(index: 'bar', ignore: 404)
- client.indices.delete(index: 'test_close_index', ignore: 404)
- client.indices.delete(index: 'test_index_3', ignore: 404)
- client.indices.delete(index: 'test_index_2', ignore: 404)
- client.indices.delete(index: 'test-xyy', ignore: 404)
+ client.indices.delete(index: '*')
end
end
end
|
[API] Fix clear_indices in test_file to actually delete all indices
|
elastic_elasticsearch-ruby
|
train
|
51e97ec3b0b8609efbbf6c2126c1f6575066ef7b
|
diff --git a/src/sap.ui.core/src/sap/ui/model/odata/v2/ODataModel.js b/src/sap.ui.core/src/sap/ui/model/odata/v2/ODataModel.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.core/src/sap/ui/model/odata/v2/ODataModel.js
+++ b/src/sap.ui.core/src/sap/ui/model/odata/v2/ODataModel.js
@@ -3352,7 +3352,7 @@ sap.ui.define([
abortRequest(oRequest);
}
});
- } else if (sGroupId){
+ } else if (sGroupId && !mParameters){
each(oRequestGroup.map, function(sKey, oRequest){
abortRequest(oRequest);
});
diff --git a/src/sap.ui.core/test/sap/ui/core/qunit/odata/v2/PendingChanges.qunit.js b/src/sap.ui.core/test/sap/ui/core/qunit/odata/v2/PendingChanges.qunit.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.core/test/sap/ui/core/qunit/odata/v2/PendingChanges.qunit.js
+++ b/src/sap.ui.core/test/sap/ui/core/qunit/odata/v2/PendingChanges.qunit.js
@@ -208,4 +208,36 @@ sap.ui.define([
});
});
});
+
+ QUnit.test("Abort requests with same key only", function(assert) {
+ var done = assert.async();
+ var that = this;
+ var bAborted = false;
+
+ that.oModel.metadataLoaded().then(function () {
+
+ that.oModel.read("/ProductSet('AD-1000')", {
+ success: function(){
+ that.oModel.setProperty("/ProductSet('AD-1000')/Name", "newName");
+
+ that.oModel.submitChanges({success: function(){
+ assert.ok(!bAborted, "Not related change was not aborted.");
+ done();
+ }});
+
+ //Trigger other request with same change group, which shouldn't be aborted
+ that.oModel.update("/ProductSet('HT-1000')", {"Name": "Should not be aborted"}, {
+ groupId: "changes",
+ error: function(){
+ bAborted = true;
+ }
+ });
+ }
+ });
+
+ });
+
+
+ });
+
});
\ No newline at end of file
|
[FIX] v2/ODataModel: Prevent unexpected error handler calls
This change fixes an issue with called error handlers of deferred requests.
BCP: <I>
Change-Id: I<I>fc<I>cc<I>c<I>a7ee<I>eaff<I>befbde5e8
|
SAP_openui5
|
train
|
fb9765aca106d56a91897956c40901255c81d800
|
diff --git a/recast-demo/src/main/java/org/recast4j/demo/builder/TileNavMeshBuilder.java b/recast-demo/src/main/java/org/recast4j/demo/builder/TileNavMeshBuilder.java
index <HASH>..<HASH> 100644
--- a/recast-demo/src/main/java/org/recast4j/demo/builder/TileNavMeshBuilder.java
+++ b/recast-demo/src/main/java/org/recast4j/demo/builder/TileNavMeshBuilder.java
@@ -43,7 +43,8 @@ public class TileNavMeshBuilder extends AbstractNavMeshBuilder {
private final ExecutorService executor;
public TileNavMeshBuilder() {
- executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() / 2, new RecastBuilderThreadFactory());
+ executor = Executors.newFixedThreadPool(Math.max(1, Runtime.getRuntime().availableProcessors() / 2),
+ new RecastBuilderThreadFactory());
}
public Tupple2<List<RecastBuilderResult>, NavMesh> build(DemoInputGeomProvider m_geom, PartitionType m_partitionType,
diff --git a/recast-demo/src/main/java/org/recast4j/demo/tool/DynamicUpdateTool.java b/recast-demo/src/main/java/org/recast4j/demo/tool/DynamicUpdateTool.java
index <HASH>..<HASH> 100644
--- a/recast-demo/src/main/java/org/recast4j/demo/tool/DynamicUpdateTool.java
+++ b/recast-demo/src/main/java/org/recast4j/demo/tool/DynamicUpdateTool.java
@@ -120,7 +120,8 @@ public class DynamicUpdateTool implements Tool {
private float[] raycastHitPos;
public DynamicUpdateTool() {
- executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() / 2, new RecastBuilderThreadFactory());
+ executor = Executors.newFixedThreadPool(Math.max(1, Runtime.getRuntime().availableProcessors() / 2),
+ new RecastBuilderThreadFactory());
bridgeGeom = new ObjImporter().load(getClass().getClassLoader().getResourceAsStream("bridge.obj"));
houseGeom = new ObjImporter().load(getClass().getClassLoader().getResourceAsStream("house.obj"));
convexGeom = new ObjImporter().load(getClass().getClassLoader().getResourceAsStream("convex.obj"));
|
Limit min number of processors when creating thread pools (fixes #<I>)
|
ppiastucki_recast4j
|
train
|
d39df1d811050c812a038dc00b6fc2bb85788e52
|
diff --git a/EloquentUserProvider.php b/EloquentUserProvider.php
index <HASH>..<HASH> 100755
--- a/EloquentUserProvider.php
+++ b/EloquentUserProvider.php
@@ -75,9 +75,13 @@ class EloquentUserProvider implements UserProvider
{
$user->setRememberToken($token);
+ $timestamps = $user->timestamps;
+
$user->timestamps = false;
$user->save();
+
+ $user->timestamps = $timestamps;
}
/**
|
Restore user timestamps property after save.
|
illuminate_auth
|
train
|
2eae37d58bdcea958638c77fc7ac8f58cc1c2145
|
diff --git a/src/Sulu/Bundle/WebsiteBundle/Admin/SuluWebsiteAdmin.php b/src/Sulu/Bundle/WebsiteBundle/Admin/SuluWebsiteAdmin.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/WebsiteBundle/Admin/SuluWebsiteAdmin.php
+++ b/src/Sulu/Bundle/WebsiteBundle/Admin/SuluWebsiteAdmin.php
@@ -17,9 +17,9 @@ use Sulu\Bundle\AdminBundle\Navigation\NavigationItem;
class SuluWebsiteAdmin extends Admin
{
- public function __construct()
+ public function __construct($title)
{
- $rootNavigationItem = new NavigationItem('Root');
+ $rootNavigationItem = new NavigationItem($title);
$this->setNavigation(new Navigation($rootNavigationItem));
}
@@ -31,4 +31,4 @@ class SuluWebsiteAdmin extends Admin
return array();
}
-}
\ No newline at end of file
+}
diff --git a/src/Sulu/Bundle/WebsiteBundle/Resources/config/services.yml b/src/Sulu/Bundle/WebsiteBundle/Resources/config/services.yml
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/WebsiteBundle/Resources/config/services.yml
+++ b/src/Sulu/Bundle/WebsiteBundle/Resources/config/services.yml
@@ -5,6 +5,7 @@ parameters:
services:
sulu_website.admin:
class: %sulu_website.admin.class%
+ arguments: [%sulu_admin.name%]
tags:
- { name: sulu.admin }
diff --git a/src/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProvider.php b/src/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProvider.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProvider.php
+++ b/src/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProvider.php
@@ -78,7 +78,7 @@ class PortalRouteProvider implements RouteProviderInterface
$language = $this->requestAnalyzer->getCurrentLocalization()->getLanguage();
// Set current theme
- $this->activeTheme->setName($portal->getTheme()->getKey());
+ $this->activeTheme->setName($portal->getWorkspace()->getTheme()->getKey());
try {
$content = $this->contentMapper->loadByResourceLocator(
diff --git a/src/Sulu/Bundle/WebsiteBundle/Tests/Unit/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProviderTest.php b/src/Sulu/Bundle/WebsiteBundle/Tests/Unit/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProviderTest.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/WebsiteBundle/Tests/Unit/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProviderTest.php
+++ b/src/Sulu/Bundle/WebsiteBundle/Tests/Unit/Sulu/Bundle/WebsiteBundle/Routing/PortalRouteProviderTest.php
@@ -14,6 +14,7 @@ use Sulu\Component\Content\Exception\ResourceLocatorNotFoundException;
use Sulu\Component\Workspace\Localization;
use Sulu\Component\Workspace\Portal;
use Sulu\Component\Workspace\Theme;
+use Sulu\Component\Workspace\Workspace;
use Symfony\Component\HttpFoundation\Request;
class PortalRouteProviderTest extends \PHPUnit_Framework_TestCase
@@ -27,7 +28,9 @@ class PortalRouteProviderTest extends \PHPUnit_Framework_TestCase
$portal->setKey('portal');
$theme = new Theme();
$theme->setKey('theme');
- $portal->setTheme($theme);
+ $workspace = new Workspace();
+ $workspace->setTheme($theme);
+ $portal->setWorkspace($workspace);
$localization = new Localization();
$localization->setLanguage('de');
@@ -58,7 +61,9 @@ class PortalRouteProviderTest extends \PHPUnit_Framework_TestCase
$portal->setKey('portal');
$theme = new Theme();
$theme->setKey('theme');
- $portal->setTheme($theme);
+ $workspace = new Workspace();
+ $workspace->setTheme($theme);
+ $portal->setWorkspace($workspace);
$localization = new Localization();
$localization->setLanguage('de');
@@ -91,7 +96,9 @@ class PortalRouteProviderTest extends \PHPUnit_Framework_TestCase
$portal->setKey('portal');
$theme = new Theme();
$theme->setKey('theme');
- $portal->setTheme($theme);
+ $workspace = new Workspace();
+ $workspace->setTheme($theme);
+ $portal->setWorkspace($workspace);
$structure = $this->getStructureMock($uuid);
$requestAnalyzer = $this->getRequestAnalyzerRedirectMock($portal);
|
corrected theme config due to changes in library
|
sulu_sulu
|
train
|
8786a8e5f56ff1f88ad8f9f4de02385ae3d97f69
|
diff --git a/dist/component.js b/dist/component.js
index <HASH>..<HASH> 100644
--- a/dist/component.js
+++ b/dist/component.js
@@ -30,7 +30,7 @@ var Component = (function (_React$Component) {
_get(_React$Component.prototype, "constructor", this).call(this, props);
// Set initial state to merged object from '_getInitialState()'
- this.state = Object.assign({}, this._getInitialState());
+ this.state = Object.assign({}, this._getInitialState(), this.state);
// If options `shouldAutoBind` is true (default),
// bind all methods to class instance (instead of window in browser)
diff --git a/lib/component.js b/lib/component.js
index <HASH>..<HASH> 100644
--- a/lib/component.js
+++ b/lib/component.js
@@ -15,7 +15,7 @@ export default class Component extends React.Component {
super(props);
// Set initial state to merged object from '_getInitialState()'
- this.state = Object.assign({}, this._getInitialState());
+ this.state = Object.assign({}, this._getInitialState(), this.state);
// If options `shouldAutoBind` is true (default),
// bind all methods to class instance (instead of window in browser)
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "react-class-helper",
"description": "Helper for ES6 class with React (autobind, mixins, ...)",
- "version": "0.1.0",
+ "version": "0.1.1",
"homepage": "https://github.com/SimonDegraeve/react-class-helper",
"author": "Simon Degraeve <simon.degraeve@gmail.com>",
"repository": {
|
Fix issue if state is defined before calling parent constructor
|
SimonDegraeve_react-class-helper
|
train
|
e4a05f177e95ab62a7ddb6204377a87dcff2b2db
|
diff --git a/src/main/java/org/mariadb/jdbc/client/impl/StandardClient.java b/src/main/java/org/mariadb/jdbc/client/impl/StandardClient.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/mariadb/jdbc/client/impl/StandardClient.java
+++ b/src/main/java/org/mariadb/jdbc/client/impl/StandardClient.java
@@ -13,10 +13,10 @@ import java.net.SocketException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLNonTransientConnectionException;
+import java.time.DateTimeException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
-import java.time.zone.ZoneRulesException;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.locks.ReentrantLock;
@@ -399,11 +399,12 @@ public class StandardClient implements Client, AutoCloseable {
// try to avoid timezone consideration if server use the same one
try {
- if (ZoneId.of(serverTz).normalized().equals(clientZoneId)
- || ZoneId.of(serverTz, ZoneId.SHORT_IDS).equals(clientZoneId)) {
+ ZoneId serverZoneId = ZoneId.of(serverTz);
+ if (serverZoneId.normalized().equals(clientZoneId)
+ || ZoneId.of(serverTz, ZoneId.SHORT_IDS).equals(clientZoneId)) {
mustSetTimezone = false;
}
- } catch (ZoneRulesException e) {
+ } catch (DateTimeException e) {
// eat
}
|
[misc] ensure timezone setting for Windows unknown Zone id
|
MariaDB_mariadb-connector-j
|
train
|
4b73d48254749bbf08c42126f10b16ac2d71f004
|
diff --git a/wkhtmltopdf/utils.py b/wkhtmltopdf/utils.py
index <HASH>..<HASH> 100644
--- a/wkhtmltopdf/utils.py
+++ b/wkhtmltopdf/utils.py
@@ -93,8 +93,13 @@ def wkhtmltopdf(pages, output=None, **kwargs):
list(pages),
[output]))
ck_kwargs = {'env': env}
- if hasattr(sys.stderr, 'fileno'):
+ try:
+ i = sys.stderr.fileno()
ck_kwargs['stderr'] = sys.stderr
+ except AttributeError:
+ # can't call fileno() on mod_wsgi stderr object
+ pass
+
return check_output(ck_args, **ck_kwargs)
|
Patch python3 / mod_wsgi incompatibility on sys.stderrr.fileno() call
Root cause of this issue is in python3 / mod_wsgi. See: <URL> and simply skips setting stderr if there is to avoid the AttributeError that occurs otherwise. Pull <URL> method cannot be called.
|
incuna_django-wkhtmltopdf
|
train
|
9e654aecbe8675f20ca13a52c6314f817dbe057e
|
diff --git a/chunkypipes/util/__init__.py b/chunkypipes/util/__init__.py
index <HASH>..<HASH> 100644
--- a/chunkypipes/util/__init__.py
+++ b/chunkypipes/util/__init__.py
@@ -45,10 +45,10 @@ def execute_from_command_line(argv=None):
print_help_text()
sys.exit(0)
- chunky_home_root = os.environ.get('CHUNKY_HOME') or os.path.expanduser('~')
- if not os.path.exists(os.path.join(chunky_home_root, '.chunky')):
- print_no_init()
- sys.exit(1)
+ # chunky_home_root = os.environ.get('CHUNKY_HOME') or os.path.expanduser('~')
+ # if not os.path.exists(os.path.join(chunky_home_root, '.chunky')):
+ # print_no_init()
+ # sys.exit(1)
send_argv = []
if len(argv) > 2:
|
Bug fix, where no command worked without chunky init, not even chunky init
|
djf604_chunky-pipes
|
train
|
228395b0f7621c00a9f591fd00c6599929f998a3
|
diff --git a/deltas/segmenters/paragraphs_sentences_and_whitespace.py b/deltas/segmenters/paragraphs_sentences_and_whitespace.py
index <HASH>..<HASH> 100644
--- a/deltas/segmenters/paragraphs_sentences_and_whitespace.py
+++ b/deltas/segmenters/paragraphs_sentences_and_whitespace.py
@@ -1,14 +1,14 @@
-import re
-
from ..util import LookAhead
from .segmenter import Segmenter
from .segments import MatchableSegment, Segment
-WHITESPACE = set(["whitespace", "break"])
-PARAGRAPH_END = set(["break"])
-SENTENCE_END = set(["period", "epoint", "qmark", "tab_open"])
-SUB_OPEN = set(["brack_open", "dbrack_open", "paren_open", "ref_open"])
-SUB_CLOSE = set(["brack_close", "dbrack_close", "paren_close", "ref_close"])
+WHITESPACE = {"whitespace", "break"}
+PARAGRAPH_END = {"break"}
+SENTENCE_END = {"period", "epoint", "qmark", "tab_open"}
+SUB_OPEN = {"brack_open", "dbrack_open", "paren_open", "ref_open",
+ "comment_start"}
+SUB_CLOSE = {"brack_close", "dbrack_close", "paren_close", "ref_close",
+ "comment_end"}
MIN_SENTENCE = 3
diff --git a/deltas/segmenters/tests/test_paragraphs_sentences_and_whitespace.py b/deltas/segmenters/tests/test_paragraphs_sentences_and_whitespace.py
index <HASH>..<HASH> 100644
--- a/deltas/segmenters/tests/test_paragraphs_sentences_and_whitespace.py
+++ b/deltas/segmenters/tests/test_paragraphs_sentences_and_whitespace.py
@@ -13,7 +13,7 @@ def test_segment():
segmenter = ParagraphsSentencesAndWhitespace()
text = 'This is some text. This is some other text.\n ' + \
- 'A. Peterson is a name that I made up.\n' + \
+ 'A. Peterson is a name that <!-- This is derp. --> I made up.\n' + \
'This is an additional sentence.\n' + \
'\n' + \
'== OMG HEADER ==\n' + \
|
Fixes sentence segmentation for sentences with a comment in them
|
halfak_deltas
|
train
|
8f6345e5fdee48b7a96082423c82d959a56bd882
|
diff --git a/spec/mongo/server/connection_spec.rb b/spec/mongo/server/connection_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mongo/server/connection_spec.rb
+++ b/spec/mongo/server/connection_spec.rb
@@ -447,22 +447,22 @@ describe Mongo::Server::Connection do
context 'when the socket_timeout is negative' do
- let(:socket) do
- connection.connect!
- connection.send(:socket)
+ let(:messages) do
+ [ insert ]
end
before do
- allow(socket).to receive(:timeout).and_return(-(Time.now.to_i))
+ connection.send(:write, messages)
+ connection.send(:socket).instance_variable_set(:@timeout, -(Time.now.to_i))
end
- let(:query) do
- Mongo::Protocol::Query.new(TEST_DB, TEST_COLL, { 'name' => 'testing' })
+ let(:reply) do
+ connection.send(:read, messages.last.request_id)
end
it 'raises a timeout error' do
expect {
- connection.dispatch([ query ])
+ reply
}.to raise_exception(Timeout::Error)
end
end
|
RUBY-<I> Try another method of mocking socket timeout
|
mongodb_mongo-ruby-driver
|
train
|
434e1f988efd36a9b1c3dcfdde4ba06eea8f0644
|
diff --git a/lib/omnibus/software.rb b/lib/omnibus/software.rb
index <HASH>..<HASH> 100644
--- a/lib/omnibus/software.rb
+++ b/lib/omnibus/software.rb
@@ -279,7 +279,7 @@ module Omnibus
# implementation
# @todo Why the caching of the URI?
def source_uri
- @source_uri ||= URI(@source[:url])
+ @source_uri ||= URI(source[:url])
end
# @param val [Boolean]
@@ -304,7 +304,7 @@ module Omnibus
# across two classes, one of which is a specific interface
# implementation
def checksum
- @source[:md5]
+ source[:md5]
end
# @todo Should this ever be legitimately used in the DSL? It
|
use the source method when creating the uri and checksum
This way the overrides are applied instead of just using the default values in `@source`
|
chef_omnibus
|
train
|
2e98b15e5f2a884c7e5b171f1d7691f612769f2a
|
diff --git a/test/test_cursor.py b/test/test_cursor.py
index <HASH>..<HASH> 100644
--- a/test/test_cursor.py
+++ b/test/test_cursor.py
@@ -182,6 +182,18 @@ class TestCursor(unittest.TestCase):
for x in range(500):
db.test.save({"x": x})
+ curs = db.test.find().limit(0).batch_size(10)
+ curs.next()
+ self.assertEquals(10, curs._Cursor__retrieved)
+
+ curs = db.test.find().limit(-2).batch_size(0)
+ curs.next()
+ self.assertEquals(2, curs._Cursor__retrieved)
+
+ curs = db.test.find().limit(-4).batch_size(5)
+ curs.next()
+ self.assertEquals(4, curs._Cursor__retrieved)
+
curs = db.test.find().limit(50).batch_size(500)
curs.next()
self.assertEquals(50, curs._Cursor__retrieved)
@@ -194,7 +206,7 @@ class TestCursor(unittest.TestCase):
curs.next()
self.assertEquals(50, curs._Cursor__retrieved)
- # this one might be shaky, as the default
+ # these two might be shaky, as the default
# is set by the server. as of 2.0.0-rc0, 101
# or 1MB (whichever is smaller) is default
# for queries without ntoreturn
@@ -202,6 +214,10 @@ class TestCursor(unittest.TestCase):
curs.next()
self.assertEquals(101, curs._Cursor__retrieved)
+ curs = db.test.find().limit(0).batch_size(0)
+ curs.next()
+ self.assertEquals(101, curs._Cursor__retrieved)
+
def test_skip(self):
db = self.db
|
Add a few more tests PYTHON-<I>
|
mongodb_mongo-python-driver
|
train
|
891c0e5c73b0b59cc18bfc70e6c1ebadf93ab51a
|
diff --git a/recsql/rest_table.py b/recsql/rest_table.py
index <HASH>..<HASH> 100644
--- a/recsql/rest_table.py
+++ b/recsql/rest_table.py
@@ -63,7 +63,15 @@ with
Module content
--------------
-.. See the autogenerated content in the online docs or the source code.
+.. See the autogenerated content in the online docs or the source
+code.
+
+.. autoclass:: Table2array
+.. autoclass:: Autoconverter
+.. autofunction:: besttype
+
+.. autoexception:: ParseError
+
"""
@@ -128,8 +136,17 @@ class Table2array(object):
interpreted as integers (1 in this case).
"""
- def __init__(self, string):
- """Table2array(string) --> parser"""
+ def __init__(self, string, autoconvert=False):
+ """Table2array(string) --> parser
+
+ :Arguments:
+ *string*
+ string to be parsed
+ *autoconvert*
+ EXPERIMENTAL. ``True``: replace certain values
+ with special python values. ``False``: leave everything
+ as it is (numbers as numbers and strings as strings).
+ """
self.string = string
m = TABLE.search(string) # extract table from string with regular expression
if m is None:
@@ -142,6 +159,8 @@ class Table2array(object):
#: parsed table as records (populate with :meth:`Table2array.parse`)
self.records = None
+ self.autoconvert = Autoconverter(active=autoconvert).convert
+
def parse(self):
"""Parse the table data string into records."""
@@ -150,7 +169,7 @@ class Table2array(object):
for line in self.t['data'].split('\n'):
if EMPTY_ROW.match(line):
continue
- row = [besttype(line[start_field:end_field+1])
+ row = [self.autoconvert(besttype(line[start_field:end_field+1]))
for start_field, end_field in self.fields]
records.append(tuple(row))
self.records = records
@@ -192,6 +211,57 @@ class Table2array(object):
self.names = names
self.fields = fields
+class Autoconverter(object):
+ """Automatically convert an input value to a special python object.
+
+ The :meth:`Autoconverter.convert` method turns the value into a special
+ python value, for instance
+ '---' ---> ``None``
+ 'x' ---> ``True``
+
+ .. function:: Autoconverter.convert(x)
+ Convert *x* (if in the active state)
+ .. attribute:: active
+ If set to ``True`` then conversion takes place;
+ ``False`` just returns the input values.
+ """
+
+ def __init__(self, mapping=None, active=True):
+ """Initialize the converter.
+
+ :Arguments:
+ - *mapping*: any dict-like mapping that supports lookup. If
+ None then the hard-coded defaults (See source) are used.
+ - *active* = True. initial state of the
+ :attr:`Autoconverter.active` toggle.
+ """
+ if mapping is None:
+ mapping = {'---': None, 'none':None, '':None,
+ 'True':True, 'x': True, 'X':True, 'yes':True,
+ 'False':False, 'no': False, '-':False}
+ self.mapping = mapping
+ self.__active = None
+ self.active = active
+
+ def active():
+ doc = """Toggle the state of the Autoconverter."""
+ def fget(self):
+ return self.__active
+ def fset(self, x):
+ self.__active = x
+ if self.__active:
+ self.convert = self._convert
+ else:
+ self.convert = lambda x: x
+ return locals()
+ active = property(**active())
+
+ def _convert(self, x):
+ try:
+ return self.mapping[x]
+ except KeyError:
+ return x
+
def besttype(x):
"""Convert string x to the most useful type, i.e. int, float or str.
|
added Autoconverter and autoconversion to None, True, False
git-svn-id: svn+ssh://gonzo.med.jhmi.edu/scratch/svn/woolf_repository/users/oliver/Library/RecSQL@<I> df5ba8eb-4b0b-<I>-8c<I>-c<I>f<I>b<I>c
|
orbeckst_RecSQL
|
train
|
706e66dac26799c5cde0b6b67604488e1081904f
|
diff --git a/src/mimerender.py b/src/mimerender.py
index <HASH>..<HASH> 100644
--- a/src/mimerender.py
+++ b/src/mimerender.py
@@ -286,7 +286,7 @@ try:
del flask.request.environ[key]
def _make_response(self, content, content_type, status):
- response = flask._make_response(content)
+ response = flask.make_response(content)
response.status = status
response.headers['Content-Type'] = content_type
return response
|
fixed call to flask.make_response
|
martinblech_mimerender
|
train
|
5c874584bedd894f4d06516c1f628cd6f0f38e0d
|
diff --git a/command/agent/http.go b/command/agent/http.go
index <HASH>..<HASH> 100644
--- a/command/agent/http.go
+++ b/command/agent/http.go
@@ -52,6 +52,8 @@ func (s *HTTPServer) Shutdown() {
// registerHandlers is used to attach our handlers to the mux
func (s *HTTPServer) registerHandlers() {
+ s.mux.HandleFunc("/", s.Index)
+
s.mux.HandleFunc("/v1/status/leader", s.wrap(s.StatusLeader))
s.mux.HandleFunc("/v1/status/peers", s.wrap(s.StatusPeers))
@@ -96,6 +98,15 @@ func (s *HTTPServer) wrap(handler func(resp http.ResponseWriter, req *http.Reque
return f
}
+// Renders a simple index page
+func (s *HTTPServer) Index(resp http.ResponseWriter, req *http.Request) {
+ if req.URL.Path == "/" {
+ resp.Write([]byte("Consul Agent"))
+ } else {
+ resp.WriteHeader(404)
+ }
+}
+
// decodeBody is used to decode a JSON request body
func decodeBody(req *http.Request, out interface{}) error {
dec := json.NewDecoder(req.Body)
|
Adding index page with <I> catchall
|
hashicorp_consul
|
train
|
4763690c03e47be195a1bd4ce4c1ffd21c1b9dcf
|
diff --git a/webpack.config.js b/webpack.config.js
index <HASH>..<HASH> 100644
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -2,6 +2,21 @@ const path = require('path')
const webpack = require('webpack')
const config = require('./config')
+let plugins = [
+ new webpack.optimize.DedupePlugin(),
+ // Webpack 1.0
+ new webpack.optimize.OccurenceOrderPlugin(),
+ // Webpack 2.0 fixed this mispelling
+ // new webpack.optimize.OccurrenceOrderPlugin(),
+ new webpack.HotModuleReplacementPlugin(),
+ new webpack.NoErrorsPlugin()
+]
+
+if (process.env.NODE_ENV === 'production') {
+ plugins = [new webpack.optimize.UglifyJsPlugin()]
+ .concat(plugins)
+}
+
module.exports = {
context: __dirname,
entry: [
@@ -16,14 +31,7 @@ module.exports = {
publicPath: config.protocol + '://' + config.domain + ':' + config.port + '/dist/',
filename: 'bundle.js'
},
- plugins: [
- // Webpack 1.0
- new webpack.optimize.OccurenceOrderPlugin(),
- // Webpack 2.0 fixed this mispelling
- // new webpack.optimize.OccurrenceOrderPlugin(),
- new webpack.HotModuleReplacementPlugin(),
- new webpack.NoErrorsPlugin()
- ],
+ plugins,
module: {
loaders: [{
test: /\.js|\.jsx/,
|
add minification and dedupe for production builds
|
jeffshaver_safe-app
|
train
|
0d718da666f9e092f12fdf9e2ae2d4d2d6931633
|
diff --git a/src/structure/immutable/__tests__/keys.spec.js b/src/structure/immutable/__tests__/keys.spec.js
index <HASH>..<HASH> 100644
--- a/src/structure/immutable/__tests__/keys.spec.js
+++ b/src/structure/immutable/__tests__/keys.spec.js
@@ -25,4 +25,15 @@ describe('structure.immutable.keys', () => {
List(['a', 'b', 'c'])
)
})
+
+ it('should return keys from plain object', () => {
+ expectEqual(
+ keys({
+ a: 1,
+ b: 2,
+ c: 3
+ }),
+ List(['a', 'b', 'c'])
+ )
+ })
})
diff --git a/src/structure/immutable/keys.js b/src/structure/immutable/keys.js
index <HASH>..<HASH> 100644
--- a/src/structure/immutable/keys.js
+++ b/src/structure/immutable/keys.js
@@ -1,7 +1,14 @@
import {Iterable, List} from 'immutable'
+import plainKeys from '../plain/keys'
const empty = List()
-const keys = value => (Iterable.isIterable(value) ? value.keySeq() : empty)
+const keys = value => {
+ if (Iterable.isIterable(value)) {
+ return value.keySeq()
+ }
+
+ return value ? List(plainKeys(value)) : empty
+}
export default keys
|
add support for plain objects in keys extractor from immutable structure (#<I>)
|
erikras_redux-form
|
train
|
d7b4adc852e06159f420539b0c09da3be0ce9b66
|
diff --git a/ttyutils_darwin.go b/ttyutils_darwin.go
index <HASH>..<HASH> 100644
--- a/ttyutils_darwin.go
+++ b/ttyutils_darwin.go
@@ -9,15 +9,12 @@ import (
)
const (
- sys_TIOCGETA = 0x40487413
- sys_TIOCSETA = 0x80487414
sys_ISTRIP = 0x20
sys_INLCR = 0x40
sys_ICRNL = 0x100
sys_IGNCR = 0x80
sys_IXON = 0x200
sys_IXOFF = 0x400
- sys_ECHO = 0x8
sys_ICANON = 0x100
sys_ISIG = 0x80
termios_NCCS = 20
@@ -39,7 +36,7 @@ type Termios struct {
func IsTerminal(fd uintptr) bool {
var termios Termios
- _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, uintptr(sys_TIOCGETA), uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
+ _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, uintptr(syscall.TIOCGETA), uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
return err == 0
}
@@ -71,14 +68,14 @@ func ioctl(fd uintptr, cmd uintptr, ptr uintptr) error {
func MakeTerminalRaw(fd uintptr) (*Termios, error) {
var s Termios
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, uintptr(sys_TIOCGETA), uintptr(unsafe.Pointer(&s)), 0, 0, 0); err != 0 {
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, uintptr(syscall.TIOCGETA), uintptr(unsafe.Pointer(&s)), 0, 0, 0); err != 0 {
return nil, err
}
oldState := s
s.Iflag &^= sys_ISTRIP | sys_INLCR | sys_ICRNL | sys_IGNCR | sys_IXON | sys_IXOFF
- s.Lflag &^= sys_ECHO | sys_ICANON | sys_ISIG
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, uintptr(sys_TIOCSETA), uintptr(unsafe.Pointer(&s)), 0, 0, 0); err != 0 {
+ s.Lflag &^= syscall.ECHO | sys_ICANON | sys_ISIG
+ if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, uintptr(syscall.TIOCSETA), uintptr(unsafe.Pointer(&s)), 0, 0, 0); err != 0 {
return nil, err
}
@@ -86,7 +83,7 @@ func MakeTerminalRaw(fd uintptr) (*Termios, error) {
}
func RestoreTerminalState(fd uintptr, termios *Termios) error {
- _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), uintptr(sys_TIOCSETA), uintptr(unsafe.Pointer(termios)), 0, 0, 0)
+ _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), uintptr(syscall.TIOCSETA), uintptr(unsafe.Pointer(termios)), 0, 0, 0)
return err
}
|
Removed some constants that ARE actually defined by Go on darwin.
|
burke_ttyutils
|
train
|
b15014cb6a6cd03575ca449a8fd2ceeabdc99891
|
diff --git a/src/Surfnet/StepupMiddlewareClient/Identity/Service/IdentityService.php b/src/Surfnet/StepupMiddlewareClient/Identity/Service/IdentityService.php
index <HASH>..<HASH> 100644
--- a/src/Surfnet/StepupMiddlewareClient/Identity/Service/IdentityService.php
+++ b/src/Surfnet/StepupMiddlewareClient/Identity/Service/IdentityService.php
@@ -60,7 +60,7 @@ class IdentityService
*/
public function search(IdentitySearchQuery $searchQuery)
{
- return $this->apiService->read('identity' . $searchQuery->toHttpQuery());
+ return $this->apiService->read('identity', [], $searchQuery);
}
/**
diff --git a/src/Surfnet/StepupMiddlewareClient/Service/ApiService.php b/src/Surfnet/StepupMiddlewareClient/Service/ApiService.php
index <HASH>..<HASH> 100644
--- a/src/Surfnet/StepupMiddlewareClient/Service/ApiService.php
+++ b/src/Surfnet/StepupMiddlewareClient/Service/ApiService.php
@@ -22,6 +22,7 @@ use GuzzleHttp\ClientInterface;
use Surfnet\StepupMiddlewareClient\Exception\AccessDeniedToResourceException;
use Surfnet\StepupMiddlewareClient\Exception\MalformedResponseException;
use Surfnet\StepupMiddlewareClient\Exception\ResourceReadException;
+use Surfnet\StepupMiddlewareClient\Identity\Dto\HttpQuery;
/**
* Provides remote read access to the Middleware's API.
@@ -42,18 +43,39 @@ class ApiService
}
/**
- * @param string $resource A URL path, optionally containing printf parameters (e.g. '/a/b/%s/d'). The parameters
+ * @param string $path A URL path, optionally containing printf parameters (e.g. '/a/b/%s/d'). The parameters
* will be URL encoded and formatted into the path string.
* Example: '/institution/%s/identity/%s', ['institution' => 'ab-cd', 'identity' => 'ef']
* @param array $parameters An array containing the parameters to replace in the path.
+ * @param HttpQuery $httpQuery|null
* @return null|mixed Most likely an array structure, null when the resource doesn't exist.
* @throws AccessDeniedToResourceException When the consumer isn't authorised to access given resource.
* @throws ResourceReadException When the server doesn't respond with the resource.
* @throws MalformedResponseException When the server doesn't respond with (well-formed) JSON.
*/
- public function read($resource, array $parameters = [])
+ public function read($path, array $parameters = [], HttpQuery $httpQuery = null)
{
- $resource = vsprintf($resource, array_map('urlencode', $parameters));
+ if (count($parameters) > 0) {
+ $resource = vsprintf($path, array_map('urlencode', $parameters));
+ } else {
+ $resource = $path;
+ }
+
+ if (empty($resource)) {
+ throw new \RuntimeException(
+ sprintf(
+ 'Could not construct resource path from path "%s", parameters "%s" and search query "%s"',
+ $path,
+ implode('","', $parameters),
+ $httpQuery ? $httpQuery->toHttpQuery() : ''
+ )
+ );
+ }
+
+ if ($httpQuery !== null) {
+ $resource .= $httpQuery->toHttpQuery();
+ }
+
$response = $this->guzzleClient->get($resource, ['exceptions' => false]);
$statusCode = $response->getStatusCode();
|
Make API resource path construction more robust.
|
OpenConext_Stepup-Middleware-clientbundle
|
train
|
fff13f5910a75f15f24ca507627a446337062bf7
|
diff --git a/tests/test_phonetic.py b/tests/test_phonetic.py
index <HASH>..<HASH> 100644
--- a/tests/test_phonetic.py
+++ b/tests/test_phonetic.py
@@ -25,7 +25,7 @@ import unittest
import os
import random
import math
-import bz2
+import codecs
from abydos._compat import _unicode
from abydos.phonetic import russell_index, russell_index_num_to_alpha, \
russell_index_alpha, soundex, dm_soundex, koelner_phonetik, \
@@ -3348,9 +3348,9 @@ class CaverphoneTestCases(unittest.TestCase):
"""test abydos.phonetic.caverphone (PHP version testset)
"""
# https://raw.githubusercontent.com/kiphughes/caverphone/master/unit_tests.php
- with bz2.BZ2File(TESTDIR + '/corpora/php_caverphone.csv.bz2') as php_testset:
+ with open(TESTDIR + '/corpora/php_caverphone.csv') as php_testset:
for php_line in php_testset:
- (word, caver) = php_line.strip().decode('utf-8').split(',')
+ (word, caver) = php_line.strip().split(',')
self.assertEqual(caverphone(word), caver)
def test_caverphone1(self):
@@ -3366,13 +3366,13 @@ class CaverphoneTestCases(unittest.TestCase):
def test_caversham(self):
"""test using Caversham test set (SoundEx, Metaphone, & Caverphone)
"""
- with bz2.BZ2File(TESTDIR + '/corpora/variantNames.csv.bz2') as cav_testset:
+ with open(TESTDIR + '/corpora/variantNames.csv') as cav_testset:
next(cav_testset)
for cav_line in cav_testset:
(name1, soundex1, metaphone1, caverphone1,
name2, soundex2, metaphone2, caverphone2,
soundex_same, metaphone_same, caverphone_same) = \
- cav_line.decode('utf-8').strip().split(',')
+ cav_line.strip().split(',')
self.assertEqual(soundex(name1), soundex1)
self.assertEqual(soundex(name2), soundex2)
@@ -3995,10 +3995,10 @@ class PhonetTestCases(unittest.TestCase):
"""
if not ALLOW_RANDOM:
return
- with bz2.BZ2File(TESTDIR + '/corpora/nachnamen.csv.bz2') as nachnamen_testset:
+ with codecs.open(TESTDIR + '/corpora/nachnamen.csv', encoding='utf-8') as nachnamen_testset:
for nn_line in nachnamen_testset:
if nn_line[0] != '#':
- nn_line = nn_line.strip().decode('utf-8').split(',')
+ nn_line = nn_line.strip().split(',')
# This test set is very large (~10000 entries)
# so let's just randomly select about 100 for testing
if len(nn_line) >= 3 and one_in(100):
@@ -4011,10 +4011,10 @@ class PhonetTestCases(unittest.TestCase):
"""
if not ALLOW_RANDOM:
return
- with bz2.BZ2File(TESTDIR + '/corpora/ngerman.csv.bz2') as ngerman_testset:
+ with codecs.open(TESTDIR + '/corpora/ngerman.csv', encoding='utf-8') as ngerman_testset:
for ng_line in ngerman_testset:
if ng_line[0] != '#':
- ng_line = ng_line.strip().decode('utf-8').split(',')
+ ng_line = ng_line.strip().split(',')
# This test set is very large (~3000000 entries)
# so let's just randomly select about 30 for testing
if len(ng_line) >= 3 and one_in(10000):
@@ -4344,10 +4344,10 @@ class BeiderMorseTestCases(unittest.TestCase):
"""
if not ALLOW_RANDOM:
return
- with bz2.BZ2File(TESTDIR + '/corpora/nachnamen.bm.csv.bz2') as nachnamen_testset:
+ with codecs.open(TESTDIR + '/corpora/nachnamen.bm.csv', encoding='utf-8') as nachnamen_testset:
next(nachnamen_testset)
for nn_line in nachnamen_testset:
- nn_line = nn_line.strip().decode('utf-8').split(',')
+ nn_line = nn_line.strip().split(',')
# This test set is very large (~10000 entries)
# so let's just randomly select about 20 for testing
if nn_line[0] != '#' and one_in(500):
@@ -4361,10 +4361,10 @@ class BeiderMorseTestCases(unittest.TestCase):
"""
if not ALLOW_RANDOM:
return
- with bz2.BZ2File(TESTDIR + '/corpora/uscensus2000.bm.csv.bz2') as uscensus_testset:
+ with open(TESTDIR + '/corpora/uscensus2000.bm.csv') as uscensus_testset:
next(uscensus_testset)
for cen_line in uscensus_testset:
- cen_line = cen_line.decode('utf-8').strip().split(',')
+ cen_line = cen_line.strip().split(',')
# This test set is very large (~150000 entries)
# so let's just randomly select about 20 for testing
if cen_line[0] != '#' and one_in(7500):
|
switched bz2s to csvs
|
chrislit_abydos
|
train
|
355c8f98f43c5e3fa987f072e57ca419bdb3d132
|
diff --git a/geopy/geocoders/__init__.py b/geopy/geocoders/__init__.py
index <HASH>..<HASH> 100644
--- a/geopy/geocoders/__init__.py
+++ b/geopy/geocoders/__init__.py
@@ -1,6 +1,6 @@
"""
Each geolocation service you might use, such as Google Maps, Bing Maps, or
-Yahoo BOSS, has its own class in ``geopy.geocoders`` abstracting the service's
+Nominatim, has its own class in ``geopy.geocoders`` abstracting the service's
API. Geocoders each define at least a ``geocode`` method, for resolving a
location from a string, and may define a ``reverse`` method, which resolves a
pair of coordinates to an address. Each Geocoder accepts any credentials
@@ -33,7 +33,8 @@ To find the address corresponding to a set of coordinates:
{'place_id': '654513', 'osm_type': 'node', ...}
Locators' ``geolocate`` and ``reverse`` methods require the argument ``query``,
-and also accept at least the argument ``exactly_one``, which is ``True``.
+and also accept at least the argument ``exactly_one``, which is ``True`` by
+default.
Geocoders may have additional attributes, e.g., Bing accepts ``user_location``,
the effect of which is to bias results near that location. ``geolocate``
and ``reverse`` methods may return three types of values:
@@ -44,30 +45,30 @@ and ``reverse`` methods may return three types of values:
result is found, returns a :class:`geopy.location.Location` object, which
can be iterated over as:
- (address<String>, (latitude<Float>, longitude<Float>))
+ ``(address<String>, (latitude<Float>, longitude<Float>))``
- Or can be accessed as `Location.address`, `Location.latitude`,
- `Location.longitude`, `Location.altitude`, and `Location.raw`. The
- last contains the geocoder's unparsed response for this result.
+ Or can be accessed as ``Location.address``, ``Location.latitude``,
+ ``Location.longitude``, ``Location.altitude``, and ``Location.raw``. The
+ last contains the full geocoder's response for this result.
- When ``exactly_one`` is False, and there is at least one result, returns a
list of :class:`geopy.location.Location` objects, as above:
- [Location, [...]]
+ ``[Location, [...]]``
If a service is unavailable or otherwise returns a non-OK response, or doesn't
receive a response in the allotted timeout, you will receive one of the
`Exceptions`_ detailed below.
-Every geocoder accepts an argument ``format_string`` that defaults to '%s'
+Every geocoder accepts an argument ``format_string`` that defaults to ``'%s'``
where the input string to geocode is interpolated. For example, if you only
need to geocode locations in Cleveland, Ohio, you could do::
- >>> from geopy.geocoders import GeocoderDotUS
- >>> geolocator = GeocoderDotUS(format_string="%s, Cleveland OH")
+ >>> from geopy.geocoders import GoogleV3
+ >>> geolocator = GoogleV3(format_string="%s, Cleveland OH")
>>> address, (latitude, longitude) = geolocator.geocode("11111 Euclid Ave")
>>> print(address, latitude, longitude)
- 11111 Euclid Ave, Cleveland, OH 44106 41.506784 -81.608148
+ Thwing Center, 11111 Euclid Ave, Cleveland, OH 44106, USA 41.5074066 -81.60832649999999
"""
|
Geocoders doc: remove refs to Yahoo and GeocoderDotUS
|
geopy_geopy
|
train
|
da907fd7811947b9c27a8f5b6cc31fe50f3caa7e
|
diff --git a/src/nsPopover.js b/src/nsPopover.js
index <HASH>..<HASH> 100644
--- a/src/nsPopover.js
+++ b/src/nsPopover.js
@@ -67,7 +67,13 @@
var displayer_ = {
id_: undefined,
- display: function(popover, delay, e) {
+ /**
+ * Set the display property of the popover to 'block' after |delay| milliseconds.
+ *
+ * @param delay {Number} The time (in seconds) to wait before set the display property.
+ * @param e {Event} The event which caused the popover to be shown.
+ */
+ display: function(delay, e) {
// Disable popover if ns-popover value is false
if ($parse(attrs.nsPopover)(scope) === false) {
return;
@@ -126,12 +132,9 @@
/**
* Set the display property of the popover to 'none' after |delay| milliseconds.
*
- * @param popover {Object} The popover to set the display property.
* @param delay {Number} The time (in seconds) to wait before set the display property.
- * @returns {Object|promise} A promise returned from the $timeout service that can be used
- * to cancel the hiding operation.
*/
- hide: function(popover, delay) {
+ hide: function(delay) {
$timeout.cancel(hider_.id_);
// delay the hiding operation for 1.5s by default.
@@ -145,7 +148,7 @@
elm.off('click', buttonClickHandler);
$popover.isOpen = false;
displayer_.cancel();
- popover.css('display', 'none');
+ $popover.css('display', 'none');
}, delay*1000);
},
@@ -204,12 +207,12 @@
});
scope.hidePopover = function() {
- hider_.hide($popover, 0);
+ hider_.hide(0);
};
scope.$on('ns:popover:hide', function(ev, group) {
if (options.group === group) {
- scope.hidePopover();
+ scope.hidePopover();
}
});
@@ -243,19 +246,19 @@
if (options.angularEvent) {
$rootScope.$on(options.angularEvent, function() {
hider_.cancel();
- displayer_.display($popover, options.popupDelay);
+ displayer_.display(options.popupDelay);
});
} else {
elm.on(options.trigger, function(e) {
e.preventDefault();
hider_.cancel();
- displayer_.display($popover, options.popupDelay, e);
+ displayer_.display(options.popupDelay, e);
});
}
elm
.on('mouseout', function() {
- hider_.hide($popover, options.timeout);
+ hider_.hide(options.timeout);
})
.on('mouseover', function() {
hider_.cancel();
@@ -263,7 +266,7 @@
$popover
.on('mouseout', function(e) {
- hider_.hide($popover, options.timeout);
+ hider_.hide(options.timeout);
})
.on('mouseover', function() {
hider_.cancel();
@@ -415,7 +418,7 @@
function insideClickHandler() {
if ($popover.isOpen) {
- hider_.hide($popover, 0);
+ hider_.hide(0);
}
}
@@ -423,7 +426,7 @@
if ($popover.isOpen && e.target !== elm[0]) {
var id = $popover[0].id;
if (!isInPopover(e.target)) {
- hider_.hide($popover, 0);
+ hider_.hide(0);
}
}
@@ -449,7 +452,7 @@
function buttonClickHandler() {
if ($popover.isOpen) {
- hider_.hide($popover, 0);
+ hider_.hide(0);
}
}
}
|
Remove 'popover' parameter from displayer/hider as it's redundant.
|
nohros_nsPopover
|
train
|
1639d01d70726e50db27d4a1d646ff673ef4338b
|
diff --git a/src/main/java/hex/glm/GLMModel.java b/src/main/java/hex/glm/GLMModel.java
index <HASH>..<HASH> 100644
--- a/src/main/java/hex/glm/GLMModel.java
+++ b/src/main/java/hex/glm/GLMModel.java
@@ -155,13 +155,6 @@ public class GLMModel extends Model implements Comparable<GLMModel> {
rank = r;
this.sparseCoef = sparseCoef;
}
- @Override
- public Submodel clone(){
- Submodel sm = new Submodel(lambda_value,beta == null?null:beta.clone(),norm_beta == null?null:norm_beta.clone(),run_time,iteration,sparseCoef);
- sm.validation = validation;
- sm.xvalidation = xvalidation;
- return sm;
- }
}
@API(help = "models computed for particular lambda_value values")
@@ -264,7 +257,7 @@ public class GLMModel extends Model implements Comparable<GLMModel> {
_res = (GLMModel)H2O.get(_modelKey).get().clone();
Submodel sm = _res.submodelForLambda(_lambda);
assert sm != null:"GLM[" + _modelKey + "]: missing submodel for lambda " + _lambda;
- sm = sm.clone();
+ sm = (Submodel)sm.clone();
_res.submodels = new Submodel[]{sm};
_res.setSubmodelIdx(0);
tryComplete();
@@ -278,7 +271,7 @@ public class GLMModel extends Model implements Comparable<GLMModel> {
public GLMModel atomic(GLMModel old) {
old.submodels = old.submodels.clone();
int id = old.submodelIdForLambda(lambda);
- old.submodels[id] = old.submodels[id].clone();
+ old.submodels[id] = (Submodel)old.submodels[id].clone();
old.submodels[id].xvalidation = val;
old.pickBestModel(false);
return old;
@@ -295,15 +288,16 @@ public class GLMModel extends Model implements Comparable<GLMModel> {
if(old.submodels == null){
old.submodels = new Submodel[]{sm};
} else {
- old.submodels = old.submodels.clone();
int id = old.submodelIdForLambda(lambda);
if (id < 0) {
id = -id - 1;
old.submodels = Arrays.copyOf(old.submodels, old.submodels.length + 1);
for (int i = old.submodels.length - 1; i > id; --i)
old.submodels[i] = old.submodels[i - 1];
- } else if(old.submodels[id].iteration >= sm.iteration)
+ } else if (old.submodels[id].iteration > sm.iteration)
return old;
+ else
+ old.submodels = old.submodels.clone();
old.submodels[id] = sm;
old.run_time = Math.max(old.run_time,sm.run_time);
}
|
GLM2 bugfix: removed outdated Submodel clone.
|
h2oai_h2o-2
|
train
|
41f4d269b06101c13357a558eaddd033a6770a9f
|
diff --git a/system/src/Grav/Common/Filesystem/Folder.php b/system/src/Grav/Common/Filesystem/Folder.php
index <HASH>..<HASH> 100644
--- a/system/src/Grav/Common/Filesystem/Folder.php
+++ b/system/src/Grav/Common/Filesystem/Folder.php
@@ -60,13 +60,10 @@ abstract class Folder
/** @var \RecursiveDirectoryIterator $file */
foreach ($itr as $file) {
- if (!$file->isDir()) {
- $file_modified = $file->getMTime();
- if ($file_modified > $last_modified) {
- $last_modified = $file_modified;
- }
+ $file_modified = $file->getMTime();
+ if ($file_modified > $last_modified) {
+ $last_modified = $file_modified;
}
-
}
return $last_modified;
@@ -277,7 +274,7 @@ abstract class Folder
class GravRecursiveFilterIterator extends \RecursiveFilterIterator
{
public static $FILTERS = array(
- '.', '..', '.DS_Store'
+ '..', '.DS_Store'
);
public function accept()
diff --git a/system/src/Grav/Common/Page/Page.php b/system/src/Grav/Common/Page/Page.php
index <HASH>..<HASH> 100644
--- a/system/src/Grav/Common/Page/Page.php
+++ b/system/src/Grav/Common/Page/Page.php
@@ -750,33 +750,36 @@ class Page
$this->metadata = array();
$page_header = $this->header;
-
// Set the Generator tag
$this->metadata['generator'] = array('name'=>'generator', 'content'=>'Grav ' . GRAV_VERSION);
- // Merge any site.metadata settings in with page metadata
- $defaults = (array) self::$grav['config']->get('site.metadata');
- if (isset($page_header->metadata)) {
- $page_header->metadata = array_merge($defaults, $page_header->metadata);
- } else {
- $page_header->metadata = $defaults;
- }
-
- // Build an array of meta objects..
- foreach((array)$page_header->metadata as $key => $value) {
+ // Safety check to ensure we have a header
+ if ($page_header) {
+ // Merge any site.metadata settings in with page metadata
+ $defaults = (array) self::$grav['config']->get('site.metadata');
- // If this is a property type metadata: "og", "twitter", "facebook" etc
- if (is_array($value)) {
- foreach ($value as $property => $prop_value) {
- $prop_key = $key.":".$property;
- $this->metadata[$prop_key] = array('property'=>$prop_key, 'content'=>$prop_value);
- }
- // If it this is a standard meta data type
+ if (isset($page_header->metadata)) {
+ $page_header->metadata = array_merge($defaults, $page_header->metadata);
} else {
- if (in_array($key, $header_tag_http_equivs)) {
- $this->metadata[$key] = array('http_equiv'=>$key, 'content'=>$value);
+ $page_header->metadata = $defaults;
+ }
+
+ // Build an array of meta objects..
+ foreach((array)$page_header->metadata as $key => $value) {
+
+ // If this is a property type metadata: "og", "twitter", "facebook" etc
+ if (is_array($value)) {
+ foreach ($value as $property => $prop_value) {
+ $prop_key = $key.":".$property;
+ $this->metadata[$prop_key] = array('property'=>$prop_key, 'content'=>$prop_value);
+ }
+ // If it this is a standard meta data type
} else {
- $this->metadata[$key] = array('name'=>$key, 'content'=>$value);
+ if (in_array($key, $header_tag_http_equivs)) {
+ $this->metadata[$key] = array('http_equiv'=>$key, 'content'=>$value);
+ } else {
+ $this->metadata[$key] = array('name'=>$key, 'content'=>$value);
+ }
}
}
}
diff --git a/system/src/Grav/Common/Page/Pages.php b/system/src/Grav/Common/Page/Pages.php
index <HASH>..<HASH> 100644
--- a/system/src/Grav/Common/Page/Pages.php
+++ b/system/src/Grav/Common/Page/Pages.php
@@ -465,13 +465,18 @@ class Pages
// set current modified of page
$last_modified = $page->modified();
+ // flat for content availability
+ $content_exists = false;
+
/** @var \DirectoryIterator $file */
foreach ($iterator as $file) {
$name = $file->getFilename();
+ $modified = $file->getMTime();
if ($file->isFile() && Utils::endsWith($name, CONTENT_EXT)) {
$page->init($file);
+ $content_exists = true;
if ($config->get('system.pages.events.page')) {
$this->grav->fireEvent('onPageProcessed', new Event(['page' => $page]));
@@ -494,24 +499,23 @@ class Pages
// set the modified time if not already set
if (!$page->date()) {
- $page->date($file->getMTime());
+ $page->date($modified);
}
- // set the last modified time on pages
- $this->lastModified($file->getMTime());
-
if ($config->get('system.pages.events.page')) {
$this->grav->fireEvent('onFolderProcessed', new Event(['page' => $page]));
}
}
// Update the last modified if it's newer than already found
- $date = $file->getMTime();
- if ($date > $last_modified) {
- $last_modified = $date;
+ if ($modified > $last_modified) {
+ $last_modified = $modified;
}
+ }
-
+ // Set routability to false if no page found
+ if (!$content_exists) {
+ $page->routable(false);
}
// Override the modified and ID so that it takes the latest change into account
|
Various tweaks and hopefully improvements to file change detection
|
getgrav_grav
|
train
|
c6877ca48578b4dac468e62083fd0038b8434de8
|
diff --git a/sphinx.go b/sphinx.go
index <HASH>..<HASH> 100644
--- a/sphinx.go
+++ b/sphinx.go
@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"math/big"
+ "sync"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcd/chaincfg"
@@ -701,6 +702,8 @@ type Tx struct {
// only be accessed if the index is *not* included in the replay set, or
// otherwise failed any other stage of the processing.
packets []ProcessedPacket
+
+ sync.Mutex
}
// BeginTxn creates a new transaction that can later be committed back to the
@@ -751,6 +754,9 @@ func (t *Tx) ProcessOnionPacket(seqNum uint16, onionPkt *OnionPacket,
return err
}
+ t.Lock()
+ defer t.Unlock()
+
// Add the hash prefix to pending batch of shared secrets that will be
// written later via Commit().
err = t.batch.Put(seqNum, hashPrefix, incomingCltv)
|
sphinx: allow concurrent onion packet processing
|
lightningnetwork_lightning-onion
|
train
|
dcd341c5a1b9ae84b5b266d6f4496b523cbde3cf
|
diff --git a/cli.go b/cli.go
index <HASH>..<HASH> 100644
--- a/cli.go
+++ b/cli.go
@@ -94,12 +94,7 @@ func (cli *CLI) Run(args []string) int {
// Listen for signals
signalCh := make(chan os.Signal, 1)
- signal.Notify(signalCh,
- syscall.SIGHUP,
- syscall.SIGINT,
- syscall.SIGTERM,
- syscall.SIGQUIT,
- )
+ signal.Notify(signalCh, Signals...)
for {
select {
@@ -108,6 +103,8 @@ func (cli *CLI) Run(args []string) int {
case <-runner.DoneCh:
return ExitCodeOK
case s := <-signalCh:
+ // Propogate the signal to the child process
+ runner.Signal(s)
switch s {
case syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT:
fmt.Fprintf(cli.errStream, "Received interrupt, cleaning up...\n")
diff --git a/runner.go b/runner.go
index <HASH>..<HASH> 100644
--- a/runner.go
+++ b/runner.go
@@ -13,6 +13,7 @@ import (
"runtime"
"strconv"
"strings"
+ "syscall"
"time"
dep "github.com/hashicorp/consul-template/dependency"
@@ -67,6 +68,9 @@ type Runner struct {
// brain is the internal storage database of returned dependency data.
brain *Brain
+ // cmd is the last known instance of the running command.
+ cmd *exec.Cmd
+
// quiescenceMap is the map of templates to their quiescence timers.
// quiescenceCh is the channel where templates report returns from quiescence
// fires.
@@ -271,6 +275,26 @@ func (r *Runner) Receive(d dep.Dependency, data interface{}) {
}
}
+// Signal sends a signal to the child process, if it exists. Any errors that
+// occur are returned.
+func (r *Runner) Signal(sig os.Signal) error {
+ // Do nothing if we aren't in exec mode - there will be no subprocess to
+ // forward a signal to.
+ if r.config.Exec.Command == "" {
+ return nil
+ }
+
+ log.Printf("[DEBUG] (runner) proxying signal %s", sig.String())
+
+ if r.cmd == nil || r.cmd.Process == nil {
+ log.Printf("[WARN] (runner) attempted to send %s to subprocess, "+
+ "but it does not exist", sig.String())
+ return nil
+ }
+
+ return r.cmd.Process.Signal(sig)
+}
+
// Run iterates over each template in this Runner and conditionally executes
// the template rendering and command execution.
//
|
Watch all signals and propagate to the runner
|
hashicorp_consul-template
|
train
|
7a15d3a4d7fb2950ae5ac5365977ed76a5c4c985
|
diff --git a/pkg/kubelet/cm/cpumanager/cpu_manager_test.go b/pkg/kubelet/cm/cpumanager/cpu_manager_test.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/cm/cpumanager/cpu_manager_test.go
+++ b/pkg/kubelet/cm/cpumanager/cpu_manager_test.go
@@ -19,6 +19,7 @@ package cpumanager
import (
"fmt"
"reflect"
+ "strconv"
"strings"
"testing"
"time"
@@ -156,7 +157,7 @@ func makeMultiContainerPod(initCPUs, appCPUs []struct{ request, limit string })
for i, cpu := range initCPUs {
pod.Spec.InitContainers = append(pod.Spec.InitContainers, v1.Container{
- Name: "initContainer-" + string(i),
+ Name: "initContainer-" + strconv.Itoa(i),
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceName(v1.ResourceCPU): resource.MustParse(cpu.request),
@@ -172,7 +173,7 @@ func makeMultiContainerPod(initCPUs, appCPUs []struct{ request, limit string })
for i, cpu := range appCPUs {
pod.Spec.Containers = append(pod.Spec.Containers, v1.Container{
- Name: "appContainer-" + string(i),
+ Name: "appContainer-" + strconv.Itoa(i),
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceName(v1.ResourceCPU): resource.MustParse(cpu.request),
|
Fix bug in parsing int to string in CPUManager tests
|
kubernetes_kubernetes
|
train
|
5b87c96289ec6c54f50a1f32800e864859776406
|
diff --git a/testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/security/loginmodules/negotiation/SPNEGOLoginModuleTestCase.java b/testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/security/loginmodules/negotiation/SPNEGOLoginModuleTestCase.java
index <HASH>..<HASH> 100644
--- a/testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/security/loginmodules/negotiation/SPNEGOLoginModuleTestCase.java
+++ b/testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/security/loginmodules/negotiation/SPNEGOLoginModuleTestCase.java
@@ -24,13 +24,16 @@ package org.jboss.as.test.integration.security.loginmodules.negotiation;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
+import java.net.SocketPermission;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.Security;
import java.util.HashMap;
import java.util.Map;
+import java.util.PropertyPermission;
+import javax.security.auth.kerberos.ServicePermission;
import javax.security.auth.login.LoginException;
import javax.servlet.http.HttpServletResponse;
@@ -72,6 +75,8 @@ import org.jboss.as.test.integration.security.common.negotiation.KerberosTestUti
import org.jboss.as.test.integration.security.common.servlets.SimpleSecuredServlet;
import org.jboss.as.test.integration.security.common.servlets.SimpleServlet;
import org.jboss.as.test.integration.security.loginmodules.LdapExtLoginModuleTestCase;
+import org.jboss.as.test.shared.TestSuiteEnvironment;
+import org.jboss.as.test.shared.integration.ejb.security.PermissionUtils;
import org.jboss.logging.Logger;
import org.jboss.security.SecurityConstants;
import org.jboss.shrinkwrap.api.ShrinkWrap;
@@ -123,7 +128,21 @@ public class SPNEGOLoginModuleTestCase {
@Deployment(name = "WEB", testable = false)
public static WebArchive deployment() {
LOGGER.debug("Web deployment");
- return createWebApp(WEBAPP_NAME, "web-spnego-authn.xml", "SPNEGO");
+ final WebArchive war = createWebApp(WEBAPP_NAME, "web-spnego-authn.xml", "SPNEGO");
+ war.addAsManifestResource(PermissionUtils.createPermissionsXmlAsset(
+ // Permissions for PropagateIdentityServlet to get delegation credentials DelegationCredentialContext.getDelegCredential()
+ new RuntimePermission("org.jboss.security.negotiation.getDelegCredential"),
+ // Permissions for PropagateIdentityServlet to read properties
+ new PropertyPermission(GSSTestConstants.PROPERTY_PORT,"read"),
+ new PropertyPermission(GSSTestConstants.PROPERTY_PRINCIPAL,"read"),
+ new PropertyPermission(GSSTestConstants.PROPERTY_PASSWORD,"read"),
+ // Permissions for GSSTestClient to connect to GSSTestServer
+ new SocketPermission(TestSuiteEnvironment.getServerAddress(),"resolve,connect"),
+ // Permissions for GSSTestClient to initiate gss context
+ new ServicePermission(GSSTestConstants.PRINCIPAL, "initiate"),
+ new ServicePermission("krbtgt/JBOSS.ORG@JBOSS.ORG", "initiate")),
+ "permissions.xml");
+ return war;
}
/**
|
prepare SPNEGOLoginModuleTestCase for run with security manager
|
wildfly_wildfly
|
train
|
58a766f63635cce81da36bdfda312c2cc6b6d4ca
|
diff --git a/posterior.py b/posterior.py
index <HASH>..<HASH> 100644
--- a/posterior.py
+++ b/posterior.py
@@ -47,12 +47,12 @@ class oneD:
self.bins = bins / bins.sum()
# Calculate the smoothed bins.
- self.bins_smoothed = ndimage.gaussian_filter(
- self.bins,
- sigma = self.bin_widths[0],
- order = 0)
+ # self.bins_smoothed = ndimage.gaussian_filter(
+ # self.bins,
+ # sigma = self.bin_widths[0],
+ # order = 0)
- self.bins_smoothed = self.bins_smoothed / self.bins_smoothed.sum()
+ #self.bins_smoothed = self.bins_smoothed / self.bins_smoothed.sum()
f.close()
@@ -74,7 +74,7 @@ class oneD:
color='red')
plt.ylim(0, pdf.max()*1.1)
- plt.xlabel('%s |%s]' % (self.name, self.unit))
+ plt.xlabel('%s [%s]' % (self.name, self.unit))
fig.savefig(path)
@@ -134,12 +134,12 @@ class twoD:
# Calculate the smoothed bins.
- self.bins_smoothed = ndimage.gaussian_filter(
- self.bins,
- sigma=(self.xbin_widths[0], self.ybin_widths[0]),
- order = 0)
+ #self.bins_smoothed = ndimage.gaussian_filter(
+ # self.bins,
+ # sigma=(self.xbin_widths[0], self.ybin_widths[0]),
+ # order = 0)
- self.bins_smoothed = self.bins_smoothed / self.bins_smoothed.sum()
+ #self.bins_smoothed = self.bins_smoothed / self.bins_smoothed.sum()
f.close()
diff --git a/profilelikelihood.py b/profilelikelihood.py
index <HASH>..<HASH> 100644
--- a/profilelikelihood.py
+++ b/profilelikelihood.py
@@ -64,7 +64,7 @@ class oneD:
color='red')
plt.ylim(0, self.proflike.max()*1.1)
- plt.xlabel('%s |%s]' % (self.name, self.unit))
+ plt.xlabel('%s [%s]' % (self.name, self.unit))
fig.savefig(path)
@@ -125,12 +125,12 @@ class twoD:
self.bins = bins / bins.sum()
# Calculate the smoothed bins.
- self.bins_smoothed = ndimage.gaussian_filter(
- self.bins,
- sigma=(self.xbin_widths[0], self.ybin_widths[0]),
- order = 0)
+ # self.bins_smoothed = ndimage.gaussian_filter(
+ # self.bins,
+ # sigma=(self.xbin_widths[0], self.ybin_widths[0]),
+ # order = 0)
- self.bins_smoothed = self.bins_smoothed / self.bins_smoothed.sum()
+ #self.bins_smoothed = self.bins_smoothed / self.bins_smoothed.sum()
f.close()
|
Disable smoothing, and spelling
ndimage.gaussian_filter doesn’t work with very spread out data sets.
Index overflow.
|
sliem_barrett
|
train
|
de57e060737681e7b8e50d12f658cee9a402e25e
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -139,17 +139,21 @@ const generateCriticalCssWrapped = async function generateCriticalCssWrapped (
'\ncss length: ' +
options.cssString.length
)
- await restartBrowser({
- width,
- height,
- getBrowser: options.puppeteer && options.puppeteer.getBrowser
- })
- // retry
- resolve(
- generateCriticalCssWrapped(options, {
- forceTryRestartBrowser: true
+ try {
+ await restartBrowser({
+ width,
+ height,
+ getBrowser: options.puppeteer && options.puppeteer.getBrowser
})
- )
+ // retry
+ resolve(
+ generateCriticalCssWrapped(options, {
+ forceTryRestartBrowser: true
+ })
+ )
+ } catch (e) {
+ reject(e)
+ }
return
}
reject(e)
@@ -222,13 +226,13 @@ module.exports = function (options, callback) {
const width = parseInt(options.width || DEFAULT_VIEWPORT_WIDTH, 10)
const height = parseInt(options.height || DEFAULT_VIEWPORT_HEIGHT, 10)
- // launch the browser
- await launchBrowserIfNeeded({
- getBrowser: options.puppeteer && options.puppeteer.getBrowser,
- width,
- height
- })
try {
+ // launch the browser
+ await launchBrowserIfNeeded({
+ getBrowser: options.puppeteer && options.puppeteer.getBrowser,
+ width,
+ height
+ })
const criticalCss = await generateCriticalCssWrapped(options)
cleanupAndExit({ returnValue: criticalCss })
} catch (err) {
|
handle uncaught exceptions from launching puppeteer browser
|
pocketjoso_penthouse
|
train
|
271464d3293704b29a24e5a4f140deb2b3985cda
|
diff --git a/packages/eslint-plugin-patternfly-react/lib/rules/import-tokens-icons.js b/packages/eslint-plugin-patternfly-react/lib/rules/import-tokens-icons.js
index <HASH>..<HASH> 100644
--- a/packages/eslint-plugin-patternfly-react/lib/rules/import-tokens-icons.js
+++ b/packages/eslint-plugin-patternfly-react/lib/rules/import-tokens-icons.js
@@ -4,6 +4,9 @@
* @returns string of non-treeshaken import
*/
function makeImport(specifier, moduleName) {
+ if (moduleName.endsWith('createIcon') && moduleName.startsWith('@patternfly/react-icons')) {
+ return `import { ${specifier.local.name} } from '@patternfly/react-icons/dist/js/createIcon';`;
+ }
let res = `import ${specifier.local.name} from '`;
res += moduleName.replace(/\/dist\/(js|esm)/, '');
res += '/dist/js';
@@ -38,7 +41,14 @@ module.exports = {
return {
ImportDeclaration(node) {
if (/@patternfly\/react-(tokens|icons)(\/dist\/(js|esm))?/.test(node.source.value)) {
- const esmSpecifiers = node.specifiers.filter(specifier => specifier.type === 'ImportSpecifier');
+ const esmSpecifiers = node.specifiers.filter(
+ specifier =>
+ specifier.type === 'ImportSpecifier' &&
+ !(
+ node.source.value.startsWith('@patternfly/react-icons') &&
+ node.source.value.endsWith('/dist/js/createIcon')
+ )
+ );
if (esmSpecifiers.length > 0) {
context.report({
node,
|
fix(eslint): fix @patternfly/react-icons/createIcon differently (#<I>)
|
patternfly_patternfly-react
|
train
|
a00eec773665c0243edc3f952bd3bbd0b982cbef
|
diff --git a/tilequeue/process.py b/tilequeue/process.py
index <HASH>..<HASH> 100644
--- a/tilequeue/process.py
+++ b/tilequeue/process.py
@@ -78,7 +78,9 @@ def _preprocess_data(feature_layers):
preproc_feature_layer = dict(
name=layer_datum['name'],
layer_datum=layer_datum,
- features=features)
+ features=features,
+ padded_bounds=padded_bounds,
+ )
preproc_feature_layers.append(preproc_feature_layer)
return preproc_feature_layers
@@ -153,7 +155,9 @@ def _cut_coord(feature_layers, unpadded_bounds, meters_per_pixel, buffer_cfg):
cut_feature_layer = dict(
name=feature_layer['name'],
layer_datum=feature_layer['layer_datum'],
- features=cut_features)
+ features=cut_features,
+ padded_bounds=padded_bounds,
+ )
cut_feature_layers.append(cut_feature_layer)
return cut_feature_layers
@@ -231,7 +235,7 @@ def _simplify_data(
padded_bounds_fn = _create_query_bounds_pad_fn(
buffer_cfg, feature_layer['name'])
- padded_bounds = padded_bounds_fn(unpadded_bounds)
+ padded_bounds = padded_bounds_fn(unpadded_bounds, meters_per_pixel)
layer_padded_bounds = \
calculate_padded_bounds(clip_factor, padded_bounds)
area_threshold_pixels = layer_datum['area_threshold']
@@ -292,6 +296,7 @@ def _simplify_data(
name=feature_layer['name'],
features=simplified_features,
layer_datum=layer_datum,
+ padded_bounds=padded_bounds,
)
simplified_feature_layers.append(simplified_feature_layer)
@@ -352,8 +357,12 @@ def _process_feature_layers(
sort_fn = resolve(sort_fn_name)
processed_features = sort_fn(processed_features, coord.zoom)
- feature_layer = dict(name=layer_name, features=processed_features,
- layer_datum=layer_datum)
+ feature_layer = dict(
+ name=layer_name,
+ features=processed_features,
+ layer_datum=layer_datum,
+ padded_bounds=feature_layer['padded_bounds'],
+ )
processed_feature_layers.append(feature_layer)
# post-process data here, before it gets formatted
|
Pass query padded bounds through pipeline
Passing the query padded bounds through the pipeline allows the
transforms to have access to it.
|
tilezen_tilequeue
|
train
|
6235ab5514b05d335ae526ce62e0a3f505b4b7ab
|
diff --git a/alot/commands/envelope.py b/alot/commands/envelope.py
index <HASH>..<HASH> 100644
--- a/alot/commands/envelope.py
+++ b/alot/commands/envelope.py
@@ -10,10 +10,10 @@ from twisted.internet import defer
from alot.commands import Command, registerCommand
from alot import settings
from alot import helper
-from alot.message import decode_to_unicode
from alot.message import decode_header
from alot.message import encode_header
from alot.message import extract_headers
+from alot.message import extract_body
from alot.commands.globals import EditCommand
from alot.commands.globals import BufferCloseCommand
from alot.commands.globals import EnvelopeOpenCommand
@@ -186,13 +186,7 @@ class EnvelopeEditCommand(Command):
# decode header
headertext = extract_headers(self.mail, self.edit_headers)
- if self.mail.is_multipart():
- for part in self.mail.walk():
- if part.get_content_maintype() == 'text':
- bodytext = decode_to_unicode(part)
- break
- else:
- bodytext = decode_to_unicode(self.mail)
+ bodytext = extract_body(self.mail)
# call pre-edit translate hook
translate = settings.hooks.get('pre_edit_translate')
|
use extract_body in envelope reedit
|
pazz_alot
|
train
|
e71603be2cb4c09176f8aa747b77b1e662d13297
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -6,7 +6,7 @@ var fs = require('fs');
function normalize(filepath) {
// resolve to an absolute ppath
- if (!path.isAbsolute(filepath))
+ if (!path.isAbsolute || !path.isAbsolute(filepath))
filepath = path.resolve(path.dirname(module.parent.filename), filepath);
// tack .json on the end if need be
@@ -20,7 +20,7 @@ function normalize(filepath) {
function parse(contents, retained, parser) {
var errors = [], data = [], lines = 0;
- // optional parser
+ // optional parser
if (!parser) parser = JSON;
// process each line of the file
|
fixes #2, assuming not relative in <I>
|
weisjohn_jsonload
|
train
|
f20ae2964303a4a2557b37482f925cc5f043a11e
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,6 @@ setup(
'templates/salmonella/admin/widgets/*.html'
]},
url="http://github.com/lincolnloop/django-salmonella/",
- install_requires=['setuptools'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
|
Removed requirements for setuptools because to use this requirement we already require setuptools.
|
lincolnloop_django-dynamic-raw-id
|
train
|
fcdec3e40d69dbaa4927e840c8e564d76249a965
|
diff --git a/lib/tools/apk-utils.js b/lib/tools/apk-utils.js
index <HASH>..<HASH> 100644
--- a/lib/tools/apk-utils.js
+++ b/lib/tools/apk-utils.js
@@ -12,11 +12,8 @@ apkUtilsMethods.isAppInstalled = async function (pkg) {
try {
let installed = false;
log.debug(`Getting install status for ${pkg}`);
- let apiLevel = await this.getApiLevel();
- let thirdparty = apiLevel >= 15 ? "-3" : "";
- let stdout = await this.shell(['pm', 'list', 'packages', thirdparty, pkg]);
- let apkInstalledRgx = new RegExp(`^package:${pkg.replace(/(\.)/g, "\\$1")}$`,
- 'm');
+ let stdout = await this.shell(['pm', 'list', 'packages', pkg]);
+ let apkInstalledRgx = new RegExp(`^package:${pkg.replace(/(\.)/g, "\\$1")}$`, 'm');
installed = apkInstalledRgx.test(stdout);
log.debug(`App is${!installed ? ' not' : ''} installed`);
return installed;
diff --git a/test/functional/apk-utils-e2e-specs.js b/test/functional/apk-utils-e2e-specs.js
index <HASH>..<HASH> 100644
--- a/test/functional/apk-utils-e2e-specs.js
+++ b/test/functional/apk-utils-e2e-specs.js
@@ -22,6 +22,9 @@ describe('apk utils', function () {
before(async () => {
adb = await ADB.createADB();
});
+ it('should be able to check status of third party app', async () => {
+ (await adb.isAppInstalled('com.android.phone')).should.be.true;
+ });
it('should be able to install/remove app and detect its status', async () => {
(await adb.isAppInstalled('foo')).should.be.false;
await adb.install(contactManagerPath);
diff --git a/test/unit/apk-utils-specs.js b/test/unit/apk-utils-specs.js
index <HASH>..<HASH> 100644
--- a/test/unit/apk-utils-specs.js
+++ b/test/unit/apk-utils-specs.js
@@ -24,22 +24,16 @@ describe('Apk-utils', () => {
describe('isAppInstalled', withMocks({adb}, (mocks) => {
it('should parse correctly and return true', async () => {
const pkg = 'dummy.package';
- mocks.adb.expects('getApiLevel')
- .once().withExactArgs()
- .returns("17");
mocks.adb.expects('shell')
- .once().withExactArgs(['pm', 'list', 'packages', '-3', pkg])
+ .once().withExactArgs(['pm', 'list', 'packages', pkg])
.returns(`package:${pkg}`);
(await adb.isAppInstalled(pkg)).should.be.true;
mocks.adb.verify();
});
it('should parse correctly and return false', async () => {
const pkg = 'dummy.package';
- mocks.adb.expects('getApiLevel')
- .once().withExactArgs()
- .returns("17");
mocks.adb.expects('shell')
- .once().withExactArgs(['pm', 'list', 'packages', '-3', pkg])
+ .once().withExactArgs(['pm', 'list', 'packages', pkg])
.returns("");
(await adb.isAppInstalled(pkg)).should.be.false;
mocks.adb.verify();
|
Remove third-party restriction from app installed check
|
appium_appium-adb
|
train
|
6ca5123d190dfb6e3ad0bbbc0cfc9b67f4ee04fe
|
diff --git a/config/routes/api/v2.rb b/config/routes/api/v2.rb
index <HASH>..<HASH> 100644
--- a/config/routes/api/v2.rb
+++ b/config/routes/api/v2.rb
@@ -255,8 +255,8 @@ Katello::Engine.routes.draw do
collection do
get :auto_complete_search
match ':sync_plan_id/available_products', :to => 'products#index',
- :available_for => 'sync_plan'
- match ':sync_plan_id/products', :to => 'products#index'
+ :available_for => 'sync_plan', :via => :get
+ match ':sync_plan_id/products', :to => 'products#index', :via => :get
end
end
api_resources :systems, :only => [:create] do
|
refs #<I> - fixing routing errors
Routes was complaining about using `#match()` without specifying a http
method.
|
Katello_katello
|
train
|
b83021510684c9b20610d2206e31c01e9c932783
|
diff --git a/src/ValidationException.php b/src/ValidationException.php
index <HASH>..<HASH> 100644
--- a/src/ValidationException.php
+++ b/src/ValidationException.php
@@ -18,36 +18,22 @@ class ValidationException extends Exception
*
* @return \Exception
*/
- public function __construct($errors, $model = null, $intent = null)
+ public function __construct(array $errors, $model = null, $intent = null)
{
- $this->errors = $errors;
-
- $c = is_array($errors) ? count($errors) : 0;
- if ($c > 1) {
- return parent::__construct([
- 'Multiple unhandled validation errors',
- 'errors' => $errors,
- 'intent' => $intent,
- 'model' => $model,
- ]);
+ if (count($errors) === 0) {
+ throw new Exception('Incorrect use of ValidationException, at least one error must be given');
}
- if ($c === 1) {
- // foreach here just to get key/value from a single member
- foreach ($errors as $field => $error) {
- return parent::__construct([
- $error,
- 'field' => $field,
- 'model' => $model,
- ]);
- }
- }
+ $this->errors = $errors;
- return parent::__construct([
- 'Incorrect use of ValidationException, argument should be an array',
- 'errors' => $errors,
- 'intent' => $intent,
- 'model' => $model,
- ]);
+ if (count($errors) === 1) {
+ parent::__construct(reset($errors));
+ $this->addMoreInfo('field', key($errors));
+ } else {
+ parent::__construct('Multiple unhandled validation errors');
+ $this->addMoreInfo('errors', $errors)
+ ->addMoreInfo('intent', $intent);
+ }
+ $this->addMoreInfo('model', $model);
}
}
diff --git a/tests/ValidationTest.php b/tests/ValidationTest.php
index <HASH>..<HASH> 100644
--- a/tests/ValidationTest.php
+++ b/tests/ValidationTest.php
@@ -101,8 +101,7 @@ class ValidationTests extends AtkPhpunit\TestCase
}
/**
- * @expectedException \Exception
- * @expectedExceptionMessage Incorrect use of ValidationException, argument should be an array
+ * Incorrect use of ValidationException, argument should be an array.
*/
public function testValidate5()
{
@@ -111,6 +110,7 @@ class ValidationTests extends AtkPhpunit\TestCase
$m = new BadValidationModel($p);
$m->set('name', 'john');
+ $this->expectException(\TypeError::class);
$m->save();
}
|
Fix exception constructor refactoring (#<I>)
* Fix exception constructor refactoring
* fix bad original usage
* fix test
* fix CS
|
atk4_data
|
train
|
b2a6ec633409132e28576fea6b96131681dbfd52
|
diff --git a/Entity/MailChimpNewsletter.php b/Entity/MailChimpNewsletter.php
index <HASH>..<HASH> 100755
--- a/Entity/MailChimpNewsletter.php
+++ b/Entity/MailChimpNewsletter.php
@@ -12,6 +12,7 @@ namespace CampaignChain\Operation\MailChimpBundle\Entity;
use CampaignChain\CoreBundle\Entity\Meta;
use Doctrine\ORM\Mapping as ORM;
+use CampaignChain\CoreBundle\Util\ParserUtil;
/**
* @ORM\Entity
@@ -190,7 +191,7 @@ class MailChimpNewsletter extends Meta
*/
public function setArchiveUrlLong($archiveUrlLong)
{
- $this->archiveUrlLong = $archiveUrlLong;
+ $this->archiveUrlLong = ParserUtil::sanitizeUrl($archiveUrlLong);
}
/**
|
CE-<I> ParserUtil::sanitizeUrl() in Location::setUrl() adds unnecessary trailing slash that causes e.g. GoToWebinar links to not work
|
CampaignChain_operation-mailchimp
|
train
|
e715f63c0bd1deb2de80fccd613e10a39d790b7f
|
diff --git a/plugins/database/influxdb/connection_producer.go b/plugins/database/influxdb/connection_producer.go
index <HASH>..<HASH> 100644
--- a/plugins/database/influxdb/connection_producer.go
+++ b/plugins/database/influxdb/connection_producer.go
@@ -168,7 +168,7 @@ func (i *influxdbConnectionProducer) createClient() (influx.Client, error) {
}
if i.TLS {
- var tlsConfig *tls.Config
+ tlsConfig := &tls.Config{}
if len(i.certificate) > 0 || len(i.issuingCA) > 0 {
if len(i.certificate) > 0 && len(i.privateKey) == 0 {
return nil, fmt.Errorf("found certificate for TLS authentication but no private key")
|
fix uninitialized tlsConfig in influxdb plugin (#<I>)
|
hashicorp_vault
|
train
|
3c799d92c870413ba3414092fab274e1511b1f7b
|
diff --git a/java/src/com/google/template/soy/jbcsrc/SoyNodeCompiler.java b/java/src/com/google/template/soy/jbcsrc/SoyNodeCompiler.java
index <HASH>..<HASH> 100644
--- a/java/src/com/google/template/soy/jbcsrc/SoyNodeCompiler.java
+++ b/java/src/com/google/template/soy/jbcsrc/SoyNodeCompiler.java
@@ -876,7 +876,6 @@ final class SoyNodeCompiler extends AbstractReturningSoyNodeVisitor<Statement> {
final Label restartPoint = new Label();
final Expression veData = exprCompiler.compile(node.getVeDataExpression(), restartPoint);
final Expression hasLogger = parameterLookup.getRenderContext().hasLogger();
- final Statement body = Statement.concat(visitChildren(node));
final Statement exitStatement =
ControlFlow.IfBlock.create(
hasLogger, appendableExpression.exitLoggableElement().toStatement())
@@ -884,6 +883,9 @@ final class SoyNodeCompiler extends AbstractReturningSoyNodeVisitor<Statement> {
if (node.getLogonlyExpression() != null) {
final Expression logonlyExpression =
exprCompiler.compile(node.getLogonlyExpression(), restartPoint).unboxAsBoolean();
+ // needs to be called after evaluating the logonly expression so variables defined in the
+ // block aren't part of the save restore state for the logonly expression.
+ final Statement body = Statement.concat(visitChildrenInNewScope(node));
return new Statement() {
@Override
protected void doGen(CodeBuilder cb) {
@@ -935,7 +937,8 @@ final class SoyNodeCompiler extends AbstractReturningSoyNodeVisitor<Statement> {
.toStatement()
.labelStart(restartPoint))
.asStatement();
- return Statement.concat(enterStatement, body, exitStatement);
+ return Statement.concat(
+ enterStatement, Statement.concat(visitChildrenInNewScope(node)), exitStatement);
}
}
diff --git a/java/tests/com/google/template/soy/jbcsrc/VeLoggingTest.java b/java/tests/com/google/template/soy/jbcsrc/VeLoggingTest.java
index <HASH>..<HASH> 100644
--- a/java/tests/com/google/template/soy/jbcsrc/VeLoggingTest.java
+++ b/java/tests/com/google/template/soy/jbcsrc/VeLoggingTest.java
@@ -154,11 +154,15 @@ public final class VeLoggingTest {
StringBuilder sb = new StringBuilder();
TestLogger testLogger = new TestLogger();
renderTemplate(
- ImmutableMap.of("t", true, "f", false),
+ ImmutableMap.of("t", true, "f", false, "n", 0),
OutputAppendable.create(sb, testLogger),
"{@param t : bool}",
"{@param f : bool}",
- "{velog Foo logonly=\"$t\"}<div data-id=1></div>{/velog}",
+ "{@param n : int}",
+ // add the let as a regression test for a bug where we would generate code in the wrong
+ // order which would cause us to try to save/restore the let value which hadn't been defined
+ // yet!
+ "{velog Foo logonly=\"$t\"}<div data-id=1>{let $foo: 1 + $n /}{$foo + $foo}</div>{/velog}",
"{velog Bar logonly=\"$f\"}<div data-id=2></div>{/velog}");
// logonly ve's disable content generation
assertThat(sb.toString()).isEqualTo("<div data-id=2></div>");
diff --git a/testdata/javascript/soy_usegoog_lib.js b/testdata/javascript/soy_usegoog_lib.js
index <HASH>..<HASH> 100644
--- a/testdata/javascript/soy_usegoog_lib.js
+++ b/testdata/javascript/soy_usegoog_lib.js
@@ -30472,23 +30472,19 @@ goog.soy.IjData = function() {};
*/
goog.soy.CompatibleIj_;
-// TODO(b/36644846): remove the second half of the function type union
/**
* Type definition for strict Soy templates. Very useful when passing a template
* as an argument.
- * @typedef {function(?, ?goog.soy.CompatibleIj_=):
- * !goog.soy.data.SanitizedContent| function(?, null=, ?Object<string, *>=):
- * !goog.soy.data.SanitizedContent}
+ * @typedef {function(?=,
+ * ?goog.soy.CompatibleIj_=):(string|!goog.soy.data.SanitizedContent)}
*/
goog.soy.StrictTemplate;
-// TODO(b/36644846): remove the second half of the function type union
/**
* Type definition for strict Soy HTML templates. Very useful when passing
* a template as an argument.
- * @typedef {function(?, ?goog.soy.CompatibleIj_=):
- * !goog.soy.data.SanitizedHtml| function(?, null=, ?Object<string, *>=):
- * !goog.soy.data.SanitizedHtml}
+ * @typedef {function(?=,
+ * ?goog.soy.CompatibleIj_=):!goog.soy.data.SanitizedHtml}
*/
goog.soy.StrictHtmlTemplate;
|
Fix a bug in how we generate code for velog nodes
if a velog had a non trivial logonly expression and defined a new variable in the body, the detach logic could get screwed up where we would try to save the variables defined inside the loop for the logonly expression.
Also create a new variable scope for the velog node. This would have also prevented this bug.
GITHUB_BREAKING_CHANGES=none
-------------
Created by MOE: <URL>
|
google_closure-templates
|
train
|
15001c1c60cc376e6b376dd8ad5e4684375a8fd9
|
diff --git a/reporter.php b/reporter.php
index <HASH>..<HASH> 100644
--- a/reporter.php
+++ b/reporter.php
@@ -131,7 +131,7 @@
* @access public
*/
function paintFormattedMessage($message) {
- print "<pre>$message</pre>";
+ echo '<pre>', htmlentities($message), '</pre>';
}
}
diff --git a/web_tester.php b/web_tester.php
index <HASH>..<HASH> 100644
--- a/web_tester.php
+++ b/web_tester.php
@@ -42,7 +42,7 @@
* @access public
*/
function showSource() {
- $this->dump(htmlentities($this->_browser->getContent()));
+ $this->dump($this->_browser->getContent());
}
/**
|
moved htmlentities() for dump to HtmlReporter
|
simpletest_simpletest
|
train
|
2140cca1db79338f79309b30d46f54f058468008
|
diff --git a/mod/forum/user.php b/mod/forum/user.php
index <HASH>..<HASH> 100644
--- a/mod/forum/user.php
+++ b/mod/forum/user.php
@@ -389,9 +389,19 @@ if (isset($courseid) && $courseid != SITEID) {
}
echo $OUTPUT->header();
-echo $OUTPUT->heading($inpageheading);
echo html_writer::start_tag('div', array('class' => 'user-content'));
+if ($isspecificcourse) {
+ $userheading = array(
+ 'heading' => fullname($user),
+ 'user' => $user,
+ 'usercontext' => $usercontext
+ );
+ echo $OUTPUT->context_header($userheading, 2);
+} else {
+ echo $OUTPUT->heading($inpageheading);
+}
+
if (!empty($postoutput)) {
echo $OUTPUT->paging_bar($result->totalcount, $page, $perpage, $url);
foreach ($postoutput as $post) {
|
MDL-<I> mod_forum: Display sub-header on user posts/discussions page
|
moodle_moodle
|
train
|
836445d3048e0a1243ba939e1d03e73fb7fd1635
|
diff --git a/idx/memory/meta_tags.go b/idx/memory/meta_tags.go
index <HASH>..<HASH> 100644
--- a/idx/memory/meta_tags.go
+++ b/idx/memory/meta_tags.go
@@ -96,10 +96,12 @@ func (m metaTagIndex) insertRecord(keyValue tagquery.Tag, id recordId) {
// getMetaRecordIdsByExpression takes an expression and returns all meta record
// ids of the records which match it.
-// It is important to note that negative expressions get evaluated as their
-// positive equivalent, f.e. != gets evaluated as =, to reduce the size of the
-// result set. The caller will then need to handle the negation according to the
-// expression type.
+// It is important to note that if an expression indicates that the result set will likely
+// be smaller if it's negated, then this returns the negative of the actual result set to
+// reduce the size of the returned result set.
+// The caller, after receiving the result set, needs to check whether this expression
+// indicates that the result set will likely be smaller if it's negated in order to be able
+// to interpret the result.
func (m metaTagIndex) getMetaRecordIdsByExpression(expr tagquery.Expression) []recordId {
if expr.OperatesOnTag() {
return m.getByTag(expr)
@@ -108,12 +110,19 @@ func (m metaTagIndex) getMetaRecordIdsByExpression(expr tagquery.Expression) []r
}
func (m metaTagIndex) getByTag(expr tagquery.Expression) []recordId {
+ // TODO implement negated results
var res []recordId
for key := range m {
- if !expr.Matches(key) {
- continue
+ if expr.ResultIsSmallerWhenNegated() {
+ if expr.Matches(key) {
+ continue
+ }
+ } else {
+ if !expr.Matches(key) {
+ continue
+ }
}
for _, ids := range m[key] {
@@ -125,13 +134,21 @@ func (m metaTagIndex) getByTag(expr tagquery.Expression) []recordId {
}
func (m metaTagIndex) getByTagValue(expr tagquery.Expression) []recordId {
+ negateResults := expr.ResultIsSmallerWhenNegated()
+
if expr.MatchesExactly() {
return m[expr.GetKey()][expr.GetValue()]
}
var res []recordId
for value, ids := range m[expr.GetKey()] {
- if !expr.Matches(value) {
+ passes := expr.Matches(value)
+
+ if negateResults {
+ passes = !passes
+ }
+
+ if !passes {
continue
}
|
use negated operators to look up meta tag records when appropriate
|
grafana_metrictank
|
train
|
a47265bdc20730fe617339456be064a0af3312bc
|
diff --git a/pandas/tests/frame/methods/test_duplicated.py b/pandas/tests/frame/methods/test_duplicated.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/frame/methods/test_duplicated.py
+++ b/pandas/tests/frame/methods/test_duplicated.py
@@ -64,7 +64,6 @@ def test_duplicated_nan_none(keep, expected):
tm.assert_series_equal(result, expected)
-@pytest.mark.parametrize("keep", ["first", "last", False])
@pytest.mark.parametrize("subset", [None, ["A", "B"], "A"])
def test_duplicated_subset(subset, keep):
df = DataFrame(
diff --git a/pandas/tests/indexes/multi/test_duplicates.py b/pandas/tests/indexes/multi/test_duplicates.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/indexes/multi/test_duplicates.py
+++ b/pandas/tests/indexes/multi/test_duplicates.py
@@ -238,7 +238,6 @@ def test_duplicated(idx_dup, keep, expected):
tm.assert_numpy_array_equal(result, expected)
-@pytest.mark.parametrize("keep", ["first", "last", False])
def test_duplicated_large(keep):
# GH 9125
n, k = 200, 5000
|
using keep fixture in more places (#<I>)
|
pandas-dev_pandas
|
train
|
f2175ae4424c113a5daaaf96ba155b64b74a7d5f
|
diff --git a/frame.go b/frame.go
index <HASH>..<HASH> 100644
--- a/frame.go
+++ b/frame.go
@@ -730,10 +730,10 @@ func (f *framer) parseResultMetadata() resultMetadata {
table = f.readString()
}
- cols := make([]ColumnInfo, colCount)
+ var cols []ColumnInfo
for i := 0; i < colCount; i++ {
- col := &cols[i]
+ var col ColumnInfo
if !globalSpec {
col.Keyspace = f.readString()
@@ -751,6 +751,8 @@ func (f *framer) parseResultMetadata() resultMetadata {
// -1 because we already included the tuple column
meta.actualColCount += len(v.Elems) - 1
}
+
+ cols = append(cols, col)
}
meta.columns = cols
diff --git a/frame_test.go b/frame_test.go
index <HASH>..<HASH> 100644
--- a/frame_test.go
+++ b/frame_test.go
@@ -23,6 +23,8 @@ func TestFuzzBugs(t *testing.T) {
[]byte("\x8200\b\x00\x00\x00\b0\x00\x00\x00\x040000"),
[]byte("\x8200\x00\x00\x00\x00\x100\x00\x00\x12\x00\x00\x0000000" +
"00000"),
+ []byte("\x83000\b\x00\x00\x00\x14\x00\x00\x00\x020000000" +
+ "000000000"),
}
for i, test := range tests {
|
Dont allocate all the columninfo upfront
If the column count for a row result is huge it will trigger either
a makeslice: len out of range or the program will do lots of gc
which make the application unresponsive, even if there are not really
that many columns.
|
gocql_gocql
|
train
|
c4a10cdc2deee0d711567e2f0c32e5503f43a562
|
diff --git a/scripts/test_matrix.py b/scripts/test_matrix.py
index <HASH>..<HASH> 100644
--- a/scripts/test_matrix.py
+++ b/scripts/test_matrix.py
@@ -55,6 +55,8 @@ def get_parser():
parser.add_argument('--dry-run', action='store_true', default=False,
help="""Display commands that will be run in each environment
without executing them.""")
+ parser.add_argument('--skip-tests', action='store_true',
+ help="""Skip running tests.""")
# parser.add_argument('')
return parser
@@ -212,22 +214,46 @@ if __name__ == '__main__':
" ipython scipy websocket-client multiuserblazeserver",
])
},
- # "py34_conda_clean" : {
- # "init" : "python=3.4 nose mock",
- # "install" : "conda install --yes -c bokeh/channel/dev bokeh"
- # },
- # "py34_conda_update" : {
- # "init" : "python=3.4 nose mock bokeh=%s" % preversion,
- # "install" : "conda update --yes -c bokeh/channel/dev bokeh"
- # },
- # "py34_pip_clean" : {
- # "init" : "python=3.4 nose mock pip",
- # "install" : "pip install --pre -i https://pypi.binstar.org/bokeh/channel/dev/simple bokeh --extra-index-url https://pypi.python.org/simple/"
- # },
- # "py34_pip_update" : {
- # "init" : "python=3.4 pip nose mock bokeh=%s" % preversion,
- # "install" : "pip install --upgrade --pre -i https://pypi.binstar.org/bokeh/channel/dev/simple bokeh --extra-index-url https://pypi.python.org/simple/"
- # }
+ "py34_conda_clean" : {
+ "init" : "python=3.4 nose mock",
+ "install" : '; '.join([
+ # install latest version from dev channel
+ "conda install --yes -c bokeh/channel/dev bokeh",
+ # install dependencies needed for testing
+ "conda install --yes -c bokeh nose mock blaze abstract-rendering beautiful-soup "
+ "ipython scipy multiuserblazeserver pillow",
+ ])
+ },
+ "py34_conda_update" : {
+ "init" : "python=3.4 nose mock bokeh=%s" % preversion,
+ "install" : '; '.join([
+ "conda update --yes -c bokeh/channel/dev bokeh",
+ # install dependencies needed for testing
+ "conda install --yes -c bokeh nose mock blaze abstract-rendering beautiful-soup "
+ "ipython scipy multiuserblazeserver pillow",
+ ])
+ },
+ "py34_pip_clean" : {
+ "init" : "python=3.4 nose mock pip",
+ "install" : '; '.join([
+ "pip install --pre -i https://pypi.binstar.org/bokeh/channel/dev/simple"
+ " bokeh --extra-index-url https://pypi.python.org/simple/",
+ # install dependencies needed for testing
+ "pip install nose mock blaze abstract-rendering beautifulsoup4"
+ " ipython scipy websocket-client multiuserblazeserver",
+ ])
+ },
+ "py34_pip_update" : {
+ "init" : "python=3.4 pip nose mock bokeh=%s" % preversion,
+ "install" : '; '.join([
+ "pip install --upgrade --pre -i "
+ "https://pypi.binstar.org/bokeh/channel/dev/simple "
+ "bokeh --extra-index-url https://pypi.python.org/simple/",
+ # install dependencies needed for testing
+ "pip install nose mock blaze abstract-rendering beautifulsoup4"
+ " ipython scipy websocket-client multiuserblazeserver",
+ ])
+ },
}
results = {}
@@ -248,15 +274,24 @@ if __name__ == '__main__':
results[environment]['version'] = version_check(environment, current_version)
- results[environment]['test'], failure = run_tests(environment)
+ if not ops.skip_tests:
+ results[environment]['test'], failure = run_tests(environment)
+
+ if not ops.keep:
+ cleaner(os.path.join(root, "envs", environment))
+ if failure:
+ test_failures.append(failure)
+
+ print ("*********************")
+ print ("RESULTS")
+ print(results)
+ print ()
+ print ("*********************")
- if not ops.keep:
- cleaner(os.path.join(root, "envs", environment))
- if failure:
- test_failures.append(failure)
+ if ops.skip_tests:
+ print ("TESTS SKIPPED")
+ elif not not ops.dry_run:
- if not ops.dry_run:
- print(results)
if test_failures:
logfile = 'logfile.txt'
print()
|
update script with instructions closer to BEP2. Also update add new skip-tests argument
|
bokeh_bokeh
|
train
|
08436a7f0598268fbf5c392314199e341fa7e437
|
diff --git a/.travis.yml b/.travis.yml
index <HASH>..<HASH> 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,7 +17,7 @@ before_script:
- composer install
script:
- - vendor/bin/php-cs-fixer fix --dry-run
+ - vendor/bin/php-cs-fixer fix --dry-run --diff --diff-format=udiff
- vendor/bin/phpstan analyze lib/ --level=2
- vendor/bin/phpunit
- vendor/bin/phpbench run --report=aggregate --progress=travis --store --iterations=1
diff --git a/lib/Core/Inference/SymbolContextResolver.php b/lib/Core/Inference/SymbolContextResolver.php
index <HASH>..<HASH> 100644
--- a/lib/Core/Inference/SymbolContextResolver.php
+++ b/lib/Core/Inference/SymbolContextResolver.php
@@ -534,9 +534,9 @@ class SymbolContextResolver
): SymbolContext {
if (null === $node->accessExpression) {
$info = $info->withIssue(sprintf(
- 'Subscript expression "%s" is incomplete',
- (string) $node->getText()
- ));
+ 'Subscript expression "%s" is incomplete',
+ (string) $node->getText()
+ ));
return $info;
}
@@ -544,9 +544,9 @@ class SymbolContextResolver
if ($info->type() != Type::array()) {
$info = $info->withIssue(sprintf(
- 'Not resolving subscript expression of type "%s"',
- (string) $info->type()
- ));
+ 'Not resolving subscript expression of type "%s"',
+ (string) $info->type()
+ ));
return $info;
}
@@ -554,10 +554,10 @@ class SymbolContextResolver
if (false === is_array($subjectValue)) {
$info = $info->withIssue(sprintf(
- 'Array value for symbol "%s" is not an array, is a "%s"',
- (string) $info->symbol(),
- gettype($subjectValue)
- ));
+ 'Array value for symbol "%s" is not an array, is a "%s"',
+ (string) $info->symbol(),
+ gettype($subjectValue)
+ ));
return $info;
}
@@ -572,9 +572,9 @@ class SymbolContextResolver
}
$info = $info->withIssue(sprintf(
- 'Did not resolve access expression for node type "%s"',
- get_class($node)
- ));
+ 'Did not resolve access expression for node type "%s"',
+ get_class($node)
+ ));
return $info;
}
diff --git a/lib/Core/SourceCodeLocator/StubSourceLocator.php b/lib/Core/SourceCodeLocator/StubSourceLocator.php
index <HASH>..<HASH> 100644
--- a/lib/Core/SourceCodeLocator/StubSourceLocator.php
+++ b/lib/Core/SourceCodeLocator/StubSourceLocator.php
@@ -78,7 +78,7 @@ final class StubSourceLocator implements SourceCodeLocator
private function serializedMapPath()
{
- return $this->cacheDir . '/stubmap.map';
+ return $this->cacheDir . '/' . md5($this->stubPath) . '.map';
}
private function fileIterator()
|
Stub hash (#<I>)
* use md5 hash of the as the stub cache filename
in order to support multiple stub locators, use a hash of the stub path
* show diff on cs fail
* fix cs
|
phpactor_worse-reflection
|
train
|
804cffa843692a6d33fc2f2086e32c15c482b637
|
diff --git a/examples/filter_private_profiles.py b/examples/filter_private_profiles.py
index <HASH>..<HASH> 100644
--- a/examples/filter_private_profiles.py
+++ b/examples/filter_private_profiles.py
@@ -16,5 +16,5 @@ from instabot import Bot
bot = Bot(filter_users=True,
filter_private_users=True)
bot.login()
-private_user_input = input("\n \n Enter a private profile nickname: ")
+private_user_input = input("\n Enter a private user: ")
bot.follow(bot.get_user_id_from_username(private_user_input))
|
Update filter_private_profiles.py
|
instagrambot_instabot
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.