hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
c9d1977d9d4957c048244813a8bc4d712739ee1d
|
diff --git a/core-bundle/contao/library/Contao/Controller.php b/core-bundle/contao/library/Contao/Controller.php
index <HASH>..<HASH> 100644
--- a/core-bundle/contao/library/Contao/Controller.php
+++ b/core-bundle/contao/library/Contao/Controller.php
@@ -2043,10 +2043,10 @@ abstract class Controller extends \System
}
}
- // Create the aggregated script
+ // Create the aggregated script and add it before the non-static scripts (see #4890)
if ($objCombiner->hasEntries())
{
- $strScripts .= '<script' . ($blnXhtml ? ' type="text/javascript"' : '') . ' src="' . $objCombiner->getCombinedFile() . '"></script>' . "\n";
+ $strScripts = '<script' . ($blnXhtml ? ' type="text/javascript"' : '') . ' src="' . $objCombiner->getCombinedFile() . '"></script>' . "\n" . $strScripts;
}
}
|
[Core] Add the static JavaScript file before the non-static ones (see #<I>)
|
contao_contao
|
train
|
bba490f318405a8308ea28c7a138c0cb4c94ee03
|
diff --git a/src/util/graphic.js b/src/util/graphic.js
index <HASH>..<HASH> 100644
--- a/src/util/graphic.js
+++ b/src/util/graphic.js
@@ -1071,8 +1071,7 @@ function rollbackDefaultTextStyle(style) {
}
export function getFont(opt, ecModel) {
- // ecModel or default text style model.
- var gTextStyleModel = ecModel || ecModel.getModel('textStyle');
+ var gTextStyleModel = ecModel && ecModel.getModel('textStyle');
return zrUtil.trim([
// FIXME in node-canvas fontWeight is before fontStyle
opt.fontStyle || gTextStyleModel && gTextStyleModel.getShallow('fontStyle') || '',
|
fix get font configs issue (#<I>)
The original logical will never get font configs from the `textStyle` option, and also the `ecModel` doesn't have other font configs except the `textStyle` option.
`var gTextStyleModel = ecModel || ecModel.getModel('textStyle');`
|
apache_incubator-echarts
|
train
|
ba90af7d324e2af522db93d97b4a53b894a57371
|
diff --git a/dagobah/backend/mongo.py b/dagobah/backend/mongo.py
index <HASH>..<HASH> 100644
--- a/dagobah/backend/mongo.py
+++ b/dagobah/backend/mongo.py
@@ -30,7 +30,8 @@ class MongoBackend(BaseBackend):
'version': '2.5'}]
def __init__(self, host, port, db, dagobah_collection='dagobah',
- job_collection='dagobah_job', log_collection='dagobah_log'):
+ job_collection='dagobah_job', log_collection='dagobah_log',
+ host_collection='dagobah_host'):
super(MongoBackend, self).__init__()
self.host = host
@@ -47,6 +48,7 @@ class MongoBackend(BaseBackend):
self.dagobah_coll = self.db[dagobah_collection]
self.job_coll = self.db[job_collection]
self.log_coll = self.db[log_collection]
+ self.host_coll = self.db[host_collection]
def __repr__(self):
return '<MongoBackend (host: %s, port: %s)>' % (self.host, self.port)
@@ -69,6 +71,12 @@ class MongoBackend(BaseBackend):
if not self.job_coll.find_one({'_id': candidate}):
return candidate
+ def get_new_host_id(self):
+ while True:
+ candidate = ObjectId()
+ if not self.host_coll.find_one({'_id': candidate}):
+ return candidate
+
def get_new_log_id(self):
while True:
candidate = ObjectId()
@@ -109,9 +117,17 @@ class MongoBackend(BaseBackend):
append = {'save_date': datetime.utcnow()}
self.job_coll.save(dict(job_json.items() + append.items()))
+ def commit_host(self, host_json):
+ host_json['_id'] = host_json['host_id']
+ append = {'save_date': datetime.utcnow()}
+ self.host_coll.save(dict(host_json.items() + append.items()))
+
def delete_job(self, job_id):
self.job_coll.remove({'_id': job_id})
+ def delete_host(self, host_id):
+ self.host_coll.remove({'_id': host_id})
+
def commit_log(self, log_json):
""" Commits a run log to the Mongo backend.
|
Mongo backend support for remote host execution
|
thieman_dagobah
|
train
|
290a9f4f02ffcab67ea9c47c101fd286222018a9
|
diff --git a/appium/spec/screen-object/text_spec.rb b/appium/spec/screen-object/text_spec.rb
index <HASH>..<HASH> 100644
--- a/appium/spec/screen-object/text_spec.rb
+++ b/appium/spec/screen-object/text_spec.rb
@@ -6,6 +6,7 @@ describe "Interface" do
let(:selenium_driver) {double('')}
let(:text) {ScreenObject::AppElements::TextField.new(locator)}
+
context "interaction with clear method" do
it "should clear the text field." do
expect(text).to receive(:element).and_return(selenium_driver)
|
adding file text_spec.rb and initial commit
|
capitalone_screen-object
|
train
|
afcb1a0e9a70536eef4e7ab7ed0cf23a217b9095
|
diff --git a/mongodbshell/__init__.py b/mongodbshell/__init__.py
index <HASH>..<HASH> 100644
--- a/mongodbshell/__init__.py
+++ b/mongodbshell/__init__.py
@@ -334,7 +334,7 @@ class Client:
f"collection : '{self.collection.name}'"
def __repr__(self):
- return f"Proxy('{self.database.name}', '{self.collection.name}', '{self.uri}')"
+ return f"mongodbshell.Client('{self.database.name}', '{self.collection.name}', '{self.uri}')"
mongo_client = Client()
|
Fixed repr to match new ctor name"
|
jdrumgoole_mongodbshell
|
train
|
1cd4e6a747eaa75fd35a0b5650a108d50de40561
|
diff --git a/lib/ecs_compose/cli.rb b/lib/ecs_compose/cli.rb
index <HASH>..<HASH> 100644
--- a/lib/ecs_compose/cli.rb
+++ b/lib/ecs_compose/cli.rb
@@ -17,6 +17,28 @@ module EcsCompose
aliases: %w(-i),
desc: "Type and name for use with --file [ex: 'service:hello' or 'task:migrate']")
+ desc("up [SERVICES...]", "Register ECS task definitions and update services")
+ def up(*services)
+ available = manifest.task_definitions.select {|td| td.type == :service }
+ chosen = all_or_specified(available, services)
+
+ chosen.each do |service|
+ json = EcsCompose::JsonGenerator.new(service.name, service.yaml).json
+ EcsCompose::Ecs.update_service_with_json(service.name, json)
+ end
+ end
+
+ desc("register [TASK_DEFINITIONS...]", "Register ECS task definitions")
+ def register(*task_definitions)
+ available = manifest.task_definitions
+ chosen = all_or_specified(available, task_definitions)
+
+ chosen.each do |td|
+ json = EcsCompose::JsonGenerator.new(td.name, td.yaml).json
+ EcsCompose::Ecs.register_task_definition(json)
+ end
+ end
+
desc("json [TASK_DEFINITION]",
"Convert a task definition to ECS JSON format")
def json(task_definition=nil)
@@ -37,24 +59,18 @@ module EcsCompose
puts EcsCompose::JsonGenerator.new(found.name, found.yaml).json
end
- desc("up [SERVICES...]", "Update ECS services")
- def up(*services)
- available = manifest.task_definitions.select {|td| td.type == :service }
- chosen =
- if available.empty?
- available
- else
- available.select {|td| services.include?(td.name) }
- end
+ protected
- chosen.each do |service|
- json = EcsCompose::JsonGenerator.new(service.name, service.yaml).json
- EcsCompose::Ecs.update_service_with_json(service.name, json)
- end
+ # Choose either all items in `available`, or just those with the
+ # specified `names`.
+ def all_or_specified(available, names)
+ if names.empty?
+ available
+ else
+ available.select {|td| names.include?(td.name) }
+ end
end
- protected
-
# Figure out whether we have a manifest or a docker-compose.yml. We
# check supplied flags first, then defaults, and we prefer manifests
# when there's a tie.
|
Allow service registration, and re-order commands
We want to put the most important commands on top.
|
faradayio_ecs_compose
|
train
|
44afd8ca312da182726cb6c051747e621961ed28
|
diff --git a/stagemonitor-jdbc/src/test/java/org/stagemonitor/jdbc/ConnectionMonitoringTransformerTest.java b/stagemonitor-jdbc/src/test/java/org/stagemonitor/jdbc/ConnectionMonitoringTransformerTest.java
index <HASH>..<HASH> 100644
--- a/stagemonitor-jdbc/src/test/java/org/stagemonitor/jdbc/ConnectionMonitoringTransformerTest.java
+++ b/stagemonitor-jdbc/src/test/java/org/stagemonitor/jdbc/ConnectionMonitoringTransformerTest.java
@@ -108,8 +108,8 @@ public class ConnectionMonitoringTransformerTest {
requestInformation.getRequestTraceReporterFuture().get();
final Map<MetricName, Timer> timers = metric2Registry.getTimers();
assertTrue(timers.keySet().toString(), timers.size() > 1);
- assertNotNull(timers.keySet().toString(), timers.get(name("external_request_response_time").type("jdbc").tag("method", "SELECT").tag("signature", "All").build()));
- assertNotNull(timers.keySet().toString(), timers.get(name("external_request_response_time").type("jdbc").tag("method", "SELECT").tag("signature", "ConnectionMonitoringTransformerTest$TestDao#executePreparedStatement").build()));
+ assertNotNull(timers.keySet().toString(), timers.get(name("external_request_response_time").type("jdbc").tag("signature", "All").tag("method", "SELECT").build()));
+ assertNotNull(timers.keySet().toString(), timers.get(name("external_request_response_time").type("jdbc").tag("signature", "ConnectionMonitoringTransformerTest$TestDao#executePreparedStatement").tag("method", "SELECT").build()));
final CallStackElement callStack = requestInformation.getRequestTrace().getCallStack();
assertEquals("testRecordSqlPreparedStatement", callStack.getSignature());
assertEquals("void org.stagemonitor.jdbc.ConnectionMonitoringTransformerTest$TestDao.executePreparedStatement()",
@@ -131,8 +131,8 @@ public class ConnectionMonitoringTransformerTest {
final Map<MetricName, Timer> timers = metric2Registry.getTimers();
final String message = timers.keySet().toString();
assertTrue(message, timers.size() > 1);
- assertEquals(message, 1, timers.get(name("external_request_response_time").type("jdbc").tag("method", "SELECT").tag("signature", "ConnectionMonitoringTransformerTest$TestDao#executeStatement").build()).getCount());
- assertEquals(message, 1, timers.get(name("external_request_response_time").type("jdbc").tag("method", "SELECT").tag("signature", "All").build()).getCount());
+ assertEquals(message, 1, timers.get(name("external_request_response_time").type("jdbc").tag("signature", "ConnectionMonitoringTransformerTest$TestDao#executeStatement").tag("method", "SELECT").build()).getCount());
+ assertEquals(message, 1, timers.get(name("external_request_response_time").type("jdbc").tag("signature", "All").tag("method", "SELECT").build()).getCount());
final CallStackElement callStack = requestInformation.getRequestTrace().getCallStack();
assertEquals("testRecordSqlStatement", callStack.getSignature());
assertEquals("void org.stagemonitor.jdbc.ConnectionMonitoringTransformerTest$TestDao.executeStatement()",
|
Fix tag order in ConnectionMonitoringTransformerTest
|
stagemonitor_stagemonitor
|
train
|
312128ded830ad4e8d9fd5fdcca5b7c099eee3bd
|
diff --git a/presto-raptor/src/main/java/com/facebook/presto/raptor/RaptorSplitManager.java b/presto-raptor/src/main/java/com/facebook/presto/raptor/RaptorSplitManager.java
index <HASH>..<HASH> 100644
--- a/presto-raptor/src/main/java/com/facebook/presto/raptor/RaptorSplitManager.java
+++ b/presto-raptor/src/main/java/com/facebook/presto/raptor/RaptorSplitManager.java
@@ -34,6 +34,7 @@ import com.facebook.presto.spi.TupleDomain;
import com.google.common.collect.ImmutableList;
import javax.annotation.PreDestroy;
+import javax.annotation.concurrent.GuardedBy;
import javax.inject.Inject;
import java.util.Collection;
@@ -51,6 +52,7 @@ import static com.facebook.presto.raptor.util.Types.checkType;
import static com.facebook.presto.spi.StandardErrorCode.NO_NODES_AVAILABLE;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Maps.uniqueIndex;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
@@ -140,6 +142,7 @@ public class RaptorSplitManager
private final TupleDomain<RaptorColumnHandle> effectivePredicate;
private final CloseableIterator<ShardNodes> iterator;
+ @GuardedBy("this")
private CompletableFuture<List<ConnectorSplit>> future;
public RaptorSplitSource(long tableId, TupleDomain<RaptorColumnHandle> effectivePredicate)
@@ -156,14 +159,15 @@ public class RaptorSplitManager
}
@Override
- public CompletableFuture<List<ConnectorSplit>> getNextBatch(int maxSize)
+ public synchronized CompletableFuture<List<ConnectorSplit>> getNextBatch(int maxSize)
{
+ checkState((future == null) || future.isDone(), "previous batch not completed");
future = supplyAsync(batchSupplier(maxSize), executor);
return future;
}
@Override
- public void close()
+ public synchronized void close()
{
if (future != null) {
future.cancel(true);
|
Clarify concurrency semantics of RaptorSplitSource
The previous version had unclear semantics and relied on an implicit
contract with the caller to use it correctly. The new version guards
these invariants explicitly.
|
prestodb_presto
|
train
|
e4d428d58954472ff8032e35f9f8d043be4fd9b8
|
diff --git a/lib/identity_cache/memoized_cache_proxy.rb b/lib/identity_cache/memoized_cache_proxy.rb
index <HASH>..<HASH> 100644
--- a/lib/identity_cache/memoized_cache_proxy.rb
+++ b/lib/identity_cache/memoized_cache_proxy.rb
@@ -49,8 +49,11 @@ module IdentityCache
memoizing = memoizing?
ActiveSupport::Notifications.instrument('cache_delete.identity_cache', memoizing: memoizing) do
memoized_key_values.delete(key) if memoizing
- result = @cache_fetcher.delete(key)
- IdentityCache.logger.debug {"[IdentityCache] delete #{ result ? 'recorded' : 'failed' } for #{key}"}
+ if result = @cache_fetcher.delete(key)
+ IdentityCache.logger.debug {"[IdentityCache] delete recorded for #{key}"}
+ else
+ IdentityCache.logger.error {"[IdentityCache] delete failed for #{key}"}
+ end
result
end
end
|
Log delete failed on error level (#<I>)
|
Shopify_identity_cache
|
train
|
ff1ccda3a6559d32c54b9b85f52bea5ad108d31c
|
diff --git a/tests/test_regressions.py b/tests/test_regressions.py
index <HASH>..<HASH> 100644
--- a/tests/test_regressions.py
+++ b/tests/test_regressions.py
@@ -535,3 +535,32 @@ from smtplib import SMTPConnectError, SMTPNotSupportedError # important comment
from appsettings import AppSettings, ObjectSetting, StringSetting # type: ignore
"""
assert "# type: ignore" in isort.code(test_input, combine_as_imports=True)
+
+
+def test_incorrect_grouping_when_comments_issue_1396():
+ """Test to ensure isort groups import correct independent of the comments present.
+ See: https://github.com/timothycrosley/isort/issues/1396
+ """
+ assert isort.code(
+ """from django.shortcuts import render
+from apps.profiler.models import Project
+from django.contrib.auth.decorators import login_required
+from django.views.generic import (
+ # ListView,
+ # DetailView,
+ TemplateView,
+ # CreateView,
+ # View
+)
+""",
+ line_length=88,
+ known_first_party=["apps"],
+ known_django=["django"],
+ sections=["FUTURE", "STDLIB", "DJANGO", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
+ ) == """from django.contrib.auth.decorators import login_required
+from django.shortcuts import render
+from django.views.generic import \\
+ TemplateView # ListView,; DetailView,; CreateView,; View
+
+from apps.profiler.models import Project
+"""
|
Add test case for issue #<I>
|
timothycrosley_isort
|
train
|
fa705b4f117e16c6a06027065b8a0a0fceda4b63
|
diff --git a/src/Engine/PhpGd/Extension/Core/ImageInfo.php b/src/Engine/PhpGd/Extension/Core/ImageInfo.php
index <HASH>..<HASH> 100644
--- a/src/Engine/PhpGd/Extension/Core/ImageInfo.php
+++ b/src/Engine/PhpGd/Extension/Core/ImageInfo.php
@@ -78,14 +78,14 @@ class ImageInfo
{
rewind($fp);
$contents = fread($fp, 34);
- if (preg_match('/(?s)\\ARIFF.{4}WEBPVP8(X|L)/', $contents, $matches)) {
+ if (preg_match('/(?s)\\ARIFF.{4}WEBPVP8(X|L)/s', $contents, $matches)) {
$supported = $this->context->getSupportedImageFormatsToString();
throw new InvalidImageException(
'unsupported.image.format.or.file.corrupted.%unsupported%.%supported%',
['%unsupported%' => '"WEBP (VP8'.$matches[1].')"', '%supported%' => $supported]
);
}
- $pattern = '/(?s)\\ARIFF.{4}WEBPVP8\\s.{10}(?<width>.{2})(?<height>.{2})/';
+ $pattern = '/(?s)\\ARIFF.{4}WEBPVP8\\s.{10}(?<width>.{2})(?<height>.{2})/s';
if (preg_match($pattern, $contents, $matches)) {
$width = unpack('v', $matches['width'])[1];
$height = unpack('v', $matches['height'])[1];
@@ -108,7 +108,7 @@ class ImageInfo
{
rewind($fp);
$contents = fread($fp, 10);
- if (preg_match('/(?s)\\AGIF8(7|9)a(?<width>.{2})(?<height>.{2})/', $contents, $matches)) {
+ if (preg_match('/(?s)\\AGIF8(7|9)a(?<width>.{2})(?<height>.{2})/s', $contents, $matches)) {
$width = unpack('v', $matches['width'])[1];
$height = unpack('v', $matches['height'])[1];
@@ -131,7 +131,7 @@ class ImageInfo
rewind($fp);
$contents = fread($fp, 25);
if (preg_match(
- '/\\A\\x89PNG\\x0d\\x0a\\x1a\\x0a(?:.{4})IHDR(?<width>.{4})(?<height>.{4})/',
+ '/\\A\\x89PNG\\x0d\\x0a\\x1a\\x0a(?:.{4})IHDR(?<width>.{4})(?<height>.{4})/s',
$contents,
$matches
)) {
|
Added /s modifier to image recognition patterns, so that newlines are not excluded from the .
|
coldume_imagecraft
|
train
|
275319193a5d173fe4b14f7ef8f663450e4d2c2a
|
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index <HASH>..<HASH> 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -361,9 +361,9 @@ def refresh_db():
# Strip filesize from end of line
ident = re.sub(r' \[.+B\]$', '', ident)
ret[ident] = True
- elif cols[0] == 'Ign':
+ elif 'Ign' in cols[0]:
ret[ident] = False
- elif cols[0] == 'Hit':
+ elif 'Hit' in cols[0]:
ret[ident] = None
return ret
|
Check for Ign/Hit membership instead of == in aptpkg.refresh_db
|
saltstack_salt
|
train
|
1a3b1b5f73f420a920f5497352433ab444550be8
|
diff --git a/addon/components/sl-button.js b/addon/components/sl-button.js
index <HASH>..<HASH> 100755
--- a/addon/components/sl-button.js
+++ b/addon/components/sl-button.js
@@ -82,7 +82,7 @@ export default Ember.Component.extend( StreamEnabled, TooltipEnabled, {
// Events
/**
- * @function
+ * @function
* @returns {Boolean} - The `bubbles` property value
*/
click() {
@@ -260,3 +260,4 @@ export default Ember.Component.extend( StreamEnabled, TooltipEnabled, {
)
});
+
diff --git a/addon/components/sl-chart.js b/addon/components/sl-chart.js
index <HASH>..<HASH> 100755
--- a/addon/components/sl-chart.js
+++ b/addon/components/sl-chart.js
@@ -80,8 +80,8 @@ export default Ember.Component.extend({
* Check passed parameters on initialization
*
* @function
- * @throws {ember.error} Series property must be an Array
- * @throws {ember.error} Options property must be an Object
+ * @throws {ember/Error} Series property must be an Array
+ * @throws {ember/Error} Options property must be an Object
* @returns {undefined}
*/
initialize: Ember.on(
@@ -248,3 +248,4 @@ export default Ember.Component.extend({
)
});
+
|
my editor left a bunch of empty space on a comment line for some reason. Also corrected jsdoc 'ember.error' to 'ember/Error'
|
softlayer_sl-ember-components
|
train
|
8029055c5173eada68cc565ec669e0ead003c2c2
|
diff --git a/js/xena.js b/js/xena.js
index <HASH>..<HASH> 100644
--- a/js/xena.js
+++ b/js/xena.js
@@ -171,6 +171,12 @@ module.exports = class xena extends Exchange {
//
// [
// {
+ // "type": "Index",
+ // "symbol": ".ADAUSD",
+ // "tickSize": 4,
+ // "enabled": true
+ // },
+ // {
// "id":"ETHUSD_3M_250920",
// "type":"Margin",
// "marginType":"XenaFuture",
@@ -260,58 +266,75 @@ module.exports = class xena extends Exchange {
const marginType = this.safeString (market, 'marginType');
const baseId = this.safeString (market, 'baseCurrency');
const quoteId = this.safeString (market, 'quoteCurrency');
+ const settleId = this.safeString (market, 'settlCurrency');
const base = this.safeCurrencyCode (baseId);
const quote = this.safeCurrencyCode (quoteId);
+ const settle = this.safeCurrencyCode (settleId);
+ const expiryDate = this.safeString (market, 'expiryDate');
+ const expiryTimestamp = this.parse8601 (expiryDate);
let symbol = id;
+ let future = false;
+ let swap = false;
if (type === 'margin') {
+ symbol = base + '/' + quote + ':' + settle;
if (marginType === 'XenaFuture') {
+ symbol = symbol + '-' + this.yymmdd (expiryTimestamp);
type = 'future';
+ future = true;
} else if (marginType === 'XenaListedPerpetual') {
type = 'swap';
- symbol = base + '/' + quote;
+ swap = true;
}
}
- const future = (type === 'future');
- const swap = (type === 'swap');
- const pricePrecision = this.safeInteger2 (market, 'tickSize', 'pricePrecision');
- const precision = {
- 'price': pricePrecision,
- 'amount': 0,
- };
- const maxCost = this.safeNumber (market, 'maxOrderQty');
- const minCost = this.safeNumber (market, 'minOrderQuantity');
- const limits = {
- 'amount': {
- 'min': undefined,
- 'max': undefined,
- },
- 'price': {
- 'min': undefined,
- 'max': undefined,
- },
- 'cost': {
- 'min': minCost,
- 'max': maxCost,
- },
- };
- const active = this.safeValue (market, 'enabled', false);
const inverse = this.safeValue (market, 'inverse', false);
+ const contract = swap || future;
result.push ({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
+ 'settle': settle,
'baseId': baseId,
'quoteId': quoteId,
+ 'settleId': settleId,
'numericId': numericId,
- 'active': active,
'type': type,
'spot': false,
- 'future': future,
+ 'margin': false,
'swap': swap,
- 'inverse': inverse,
- 'precision': precision,
- 'limits': limits,
+ 'future': future,
+ 'option': false,
+ 'active': this.safeValue (market, 'enabled', false),
+ 'contract': contract,
+ 'linear': contract ? !inverse : undefined,
+ 'inverse': contract ? inverse : undefined,
+ 'contractSize': this.safeNumber (market, 'contractValue'),
+ 'expiry': expiryTimestamp,
+ 'expiryDatetime': this.iso8601 (expiryTimestamp),
+ 'strike': undefined,
+ 'optionType': undefined,
+ 'precision': {
+ 'price': this.safeInteger2 (market, 'tickSize', 'pricePrecision'),
+ 'amount': 0,
+ },
+ 'limits': {
+ 'leverage': {
+ 'min': undefined,
+ 'max': undefined,
+ },
+ 'amount': {
+ 'min': undefined,
+ 'max': undefined,
+ },
+ 'price': {
+ 'min': undefined,
+ 'max': undefined,
+ },
+ 'cost': {
+ 'min': this.safeNumber (market, 'minOrderQuantity'),
+ 'max': this.safeNumber (market, 'maxOrderQty'),
+ },
+ },
'info': market,
});
}
|
xena.fetchMarkets unified
|
ccxt_ccxt
|
train
|
4c5e3734433f8159cccd794211a4d356117b6404
|
diff --git a/pymatbridge/tests/test_publish.py b/pymatbridge/tests/test_publish.py
index <HASH>..<HASH> 100644
--- a/pymatbridge/tests/test_publish.py
+++ b/pymatbridge/tests/test_publish.py
@@ -30,5 +30,5 @@ def test_lines_to_notebook():
nb = publish.lines_to_notebook(lines)
- npt.assert_equal(nb['worksheets'][0]['cells'][0]['source'][0],
+ npt.assert_equal(nb['worksheets'][0]['cells'][1]['source'][0],
' This is a first line\n\n')
|
TST: The test now accomodates the automatic prepending of the magic.
|
arokem_python-matlab-bridge
|
train
|
e64cc13fd5b0f486b05278778ffe8e604e20a61a
|
diff --git a/src/arcrest/manageags/administration.py b/src/arcrest/manageags/administration.py
index <HASH>..<HASH> 100644
--- a/src/arcrest/manageags/administration.py
+++ b/src/arcrest/manageags/administration.py
@@ -49,6 +49,8 @@ class AGSAdministration(BaseAGSServer):
proxy_url=None, proxy_port=None,
initialize=False):
"""Constructor"""
+ if url.lower().endswith('/admin') == False:
+ url = "%s/admin" % url
self._url = url
if securityHandler is not None:
if isinstance(securityHandler, PKISecurityHandler):
@@ -68,7 +70,7 @@ class AGSAdministration(BaseAGSServer):
elif isinstance(securityHandler, PortalServerSecurityHandler):
self._securityHandler = securityHandler
elif isinstance(securityHandler,AGSTokenSecurityHandler):
- self._securityHandler = securityHandler
+ self._securityHandler = securityHandler
self._proxy_url = proxy_url
self._proxy_port =proxy_port
diff --git a/src/arcrest/manageorg/_portals.py b/src/arcrest/manageorg/_portals.py
index <HASH>..<HASH> 100644
--- a/src/arcrest/manageorg/_portals.py
+++ b/src/arcrest/manageorg/_portals.py
@@ -1,4 +1,4 @@
-from ..security.security import OAuthSecurityHandler, AGOLTokenSecurityHandler
+from ..security.security import OAuthSecurityHandler, AGOLTokenSecurityHandler, PortalServerSecurityHandler
from ..manageags import AGSAdministration
from ..hostedservice import Services
from ..common.general import local_time_to_online,online_time_to_string
@@ -943,13 +943,6 @@ class Portal(BaseAGOLClass):
#----------------------------------------------------------------------
@property
def tileServers(self):
- """gets the tile server base urls"""
- if self.urls == {}:
- return {}
- return self.urls["urls"]['tiles']
- #----------------------------------------------------------------------
- @property
- def tileServersF(self):
"""
Returns the objects to manage site's tile hosted services/servers. It returns
AGSAdministration object if the site is Portal and it returns a
@@ -966,7 +959,6 @@ class Portal(BaseAGOLClass):
else:
res = urls['http']
for https in res:
- #http://tiles.arcgis.com/tiles/PWJUSsdoJDp7SgLj/arcgis/admin/services/TileServiceTest.MapServer?f=pjson
if ishttps:
scheme = "https"
else:
@@ -982,48 +974,18 @@ class Portal(BaseAGOLClass):
servers = self.servers
for server in servers.servers:
url = server.adminUrl
- sh = PortalServerSecurityHandler(portalTokenHandler=self._securityHandler,
+ sh = PortalServerSecurityHandler(tokenHandler=self._securityHandler,
serverUrl=url,
- referer=server['name'].replace(":6080", ":6443")
+ referer=server.name.split(":")[0]
)
services.append(
AGSAdministration(url=url,
securityHandler=sh,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
- initialize=False)
+ initialize=True)
)
- print 'stop'
return services
- #if urls != {}:
- #for https in portal.tileServers['https']:
- ##http://tiles.arcgis.com/tiles/PWJUSsdoJDp7SgLj/arcgis/admin/services/TileServiceTest.MapServer?f=pjson
- #if isinstance(self._securityHandler, AGOLTokenSecurityHandler):
- #url = "https://%s/tiles/%s/arcgis/rest/admin" % (https, portalId)
- #if url.endswith(r'/services') == False:
- #url = url
- #else:
- #url = "https://%s/%s/ArcGIS/rest/admin" % (https, portal.portalId)
- #services.append(Services(url=url,
- #securityHandler=self._securityHandler,
- #proxy_url=self._proxy_url,
- #proxy_port=self._proxy_port))
- #return services
- #else:
- #for server in portal.servers['servers']:
- #url = server['adminUrl'] + "/admin"
- #sh = PortalServerSecurityHandler(portalTokenHandler=self._securityHandler,
- #serverUrl=url,
- #referer=server['name'].replace(":6080", ":6443")
- #)
- #services.append(
- #AGSAdministration(url=url,
- #securityHandler=sh,
- #proxy_url=self._proxy_url,
- #proxy_port=self._proxy_port,
- #initialize=False)
- #)
- #return services
#----------------------------------------------------------------------
@property
def purchases(self):
@@ -1584,14 +1546,14 @@ class Servers(BaseAGOLClass):
def __init(self):
"""loads the property data into the class"""
params = {
- "f" : "json"
+ "f" : "pjson"
}
json_dict = self._do_get(url=self._surl,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
- self._json_dict = json.loads(json_dict)
+ self._json_dict = json_dict
self._json = json.dumps(json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
|
added tileServers support off of the portal Class. This allows users to access the tile servers on both agol and ags.
|
Esri_ArcREST
|
train
|
3c369eb328132dee38093a180400a514e85f4f60
|
diff --git a/safe/impact_functions/test/test_registry.py b/safe/impact_functions/test/test_registry.py
index <HASH>..<HASH> 100644
--- a/safe/impact_functions/test/test_registry.py
+++ b/safe/impact_functions/test/test_registry.py
@@ -77,7 +77,8 @@ EXPECTED_IF = [
'Polygon volcano on buildings',
'Point volcano on population',
'Polygon volcano on population',
- 'Ash raster on land cover'
+ 'Ash raster on land cover',
+ 'Ash raster on population'
]
|
Fix failed test on travis.
|
inasafe_inasafe
|
train
|
170b1d80b5c01da5326be591a742b454c193525c
|
diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py
index <HASH>..<HASH> 100644
--- a/plugins/postgres/dbt/adapters/postgres/connections.py
+++ b/plugins/postgres/dbt/adapters/postgres/connections.py
@@ -91,7 +91,7 @@ class PostgresConnectionManager(SQLConnectionManager):
search_path.replace(' ', '\\ '))
if credentials.sslmode:
- kwargs['sslmode'] = credentials.sslmode
+ kwargs['sslmode'] = credentials.sslmode
try:
handle = psycopg2.connect(
|
Update plugins/postgres/dbt/adapters/postgres/connections.py
|
fishtown-analytics_dbt
|
train
|
c3e6fc0302b30c5a3ddccc36c013097a38053064
|
diff --git a/tests/test_copy.py b/tests/test_copy.py
index <HASH>..<HASH> 100644
--- a/tests/test_copy.py
+++ b/tests/test_copy.py
@@ -1,6 +1,7 @@
from __future__ import unicode_literals
import os
+import time
import unittest
import tempfile
import shutil
@@ -69,6 +70,78 @@ class TestCopy(unittest.TestCase):
f.write('1' * write_chars)
return filepath
+ def test_copy_file_if_newer_dst_older(self):
+ try:
+ #create first dst ==> dst is older the src ==> file should be copied
+ dst_dir = self._create_sandbox_dir()
+ dst_file1 = self._touch(dst_dir, "file1.txt")
+ self._write_file(dst_file1)
+
+ time.sleep(1) #sleep 1 sec to ensure dst_file1 is older
+
+ src_dir = self._create_sandbox_dir()
+ src_file1 = self._touch(src_dir, "file1.txt")
+ self._write_file(src_file1)
+
+ src_fs = open_fs('osfs://' + src_dir)
+ dst_fs = open_fs('osfs://' + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
+ copied = fs.copy.copy_file_if_newer(src_fs, "/file1.txt", dst_fs, "/file1.txt")
+
+ self.assertEqual(copied, "/file1.txt")
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_file_if_newer_dst_doesnt_exists(self):
+ try:
+ src_dir = self._create_sandbox_dir()
+ src_file1 = self._touch(src_dir, "file1.txt")
+ self._write_file(src_file1)
+
+ dst_dir = self._create_sandbox_dir()
+
+ src_fs = open_fs('osfs://' + src_dir)
+ dst_fs = open_fs('osfs://' + dst_dir)
+
+
+ copied = fs.copy.copy_file_if_newer(src_fs, "/file1.txt", dst_fs, "/file1.txt")
+
+ self.assertEqual(copied, "/file1.txt")
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_file_if_newer_dst_is_newer(self):
+ try:
+ src_dir = self._create_sandbox_dir()
+ src_file1 = self._touch(src_dir, "file1.txt")
+ self._write_file(src_file1)
+
+ time.sleep(1) #sleep 1 sec to ensure src_file1 is older
+
+ dst_dir = self._create_sandbox_dir()
+ dst_file1 = self._touch(dst_dir, "file1.txt")
+ self._write_file(dst_file1)
+
+ src_fs = open_fs('osfs://' + src_dir)
+ dst_fs = open_fs('osfs://' + dst_dir)
+
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
+ copied = fs.copy.copy_file_if_newer(src_fs, "/file1.txt", dst_fs, "/file1.txt")
+
+ self.assertEqual(copied, None)
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+
def test_copy_fs_if_newer_dst_older(self):
try:
#create first dst ==> dst is older the src ==> file should be copied
@@ -76,6 +149,8 @@ class TestCopy(unittest.TestCase):
dst_file1 = self._touch(dst_dir, "file1.txt")
self._write_file(dst_file1)
+ time.sleep(1) #sleep 1 sec to ensure dst_file1 is older
+
src_dir = self._create_sandbox_dir()
src_file1 = self._touch(src_dir, "file1.txt")
self._write_file(src_file1)
@@ -132,6 +207,8 @@ class TestCopy(unittest.TestCase):
src_file1 = self._touch(src_dir, "file1.txt")
self._write_file(src_file1)
+ time.sleep(1) #sleep 1 sec to ensure src_file1 is older
+
dst_dir = self._create_sandbox_dir()
dst_file1 = self._touch(dst_dir, "file1.txt")
self._write_file(dst_file1)
@@ -163,6 +240,8 @@ class TestCopy(unittest.TestCase):
src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt")
self._write_file(src_file2)
+ time.sleep(1) #sleep 1 sec to ensure src_file1 is older
+
dst_dir = self._create_sandbox_dir()
dst_file1 = self._touch(dst_dir, "file1.txt")
self._write_file(dst_file1)
|
test methods to cover copy_file_if_newer
|
PyFilesystem_pyfilesystem2
|
train
|
48e6f9f16327d5eba481cc3f652aa76959b8e26f
|
diff --git a/VisualTimer.js b/VisualTimer.js
index <HASH>..<HASH> 100644
--- a/VisualTimer.js
+++ b/VisualTimer.js
@@ -108,6 +108,7 @@
var timer = node.game.gameLoop.getAllParams(node.game.gameState).timer;
if (timer) {
+ that.timerDiv.className = '';
var options = ('number' === typeof timer) ? {milliseconds: timer} : timer;
if (!options.timeup) {
options.timeup = 'DONE';
@@ -117,6 +118,10 @@
that.start();
}
});
+
+ node.on('DONE', function(){
+ that.timerDiv.className = 'strike';
+ })
};
})(node.window.widgets);
\ No newline at end of file
|
VisualTimer strikes the time, once fired DONE
|
nodeGame_nodegame-widgets
|
train
|
b48c497fcea2c8ea02aabf730c01ef92bd24974d
|
diff --git a/client/post-editor/controller.js b/client/post-editor/controller.js
index <HASH>..<HASH> 100644
--- a/client/post-editor/controller.js
+++ b/client/post-editor/controller.js
@@ -27,6 +27,7 @@ import { getSelectedEditor } from 'state/selectors/get-selected-editor';
import { requestSelectedEditor, setSelectedEditor } from 'state/selected-editor/actions';
import { getGutenbergEditorUrl } from 'state/selectors/get-gutenberg-editor-url';
import { shouldLoadGutenberg } from 'state/selectors/should-load-gutenberg';
+import { shouldRedirectGutenberg } from 'state/selectors/should-redirect-gutenberg';
function getPostID( context ) {
if ( ! context.params.post || 'new' === context.params.post ) {
@@ -189,9 +190,13 @@ async function redirectIfBlockEditor( context, next ) {
const postType = determinePostType( context );
const postId = getPostID( context );
- const url = getGutenbergEditorUrl( state, siteId, postId, postType );
// pass along parameters, for example press-this
- return window.location.replace( addQueryArgs( context.query, url ) );
+ const gutenbergUrl = getGutenbergEditorUrl( state, siteId, postId, postType );
+ const url = addQueryArgs( context.query, gutenbergUrl );
+ if ( shouldRedirectGutenberg( state, siteId ) ) {
+ return window.location.replace( url );
+ }
+ return page.redirect( url );
}
export default {
diff --git a/client/state/selectors/get-last-non-editor-route.js b/client/state/selectors/get-last-non-editor-route.js
index <HASH>..<HASH> 100644
--- a/client/state/selectors/get-last-non-editor-route.js
+++ b/client/state/selectors/get-last-non-editor-route.js
@@ -14,14 +14,21 @@ import createSelector from 'lib/create-selector';
* Get the last non-editor route while ignoring navigation in block editor.
*
* @param {object} state Global state tree
- * @returns {string} The last non block editor route -- empty string if none.
+ * @returns {string} The last non editor route -- empty string if none.
*/
const getLastNonEditorRoute = createSelector(
state => {
const previousPath = getPreviousPath( state );
- const blockEditorPattern = /^\/block-editor/;
- if ( previousPath && ! blockEditorPattern.test( previousPath ) ) {
+ /**
+ * Include paths which start in the classic editor because it is common
+ * to redirect from classic to block editor. For example, to create a new
+ * page, you go to `/page`, which then redirects to `/block-editor/page`.
+ * Matching page or post handles that case.
+ */
+ const editorPattern = /^\/(block-editor|page|post)/;
+
+ if ( previousPath && ! editorPattern.test( previousPath ) ) {
return previousPath;
}
@@ -30,7 +37,7 @@ const getLastNonEditorRoute = createSelector(
last(
dropRightWhile(
getRouteHistory( state ),
- ( { path } ) => path && blockEditorPattern.test( path )
+ ( { path } ) => path && editorPattern.test( path )
)
),
'path',
|
Fix back button from creating new pages/posts (#<I>)
* Dynamically redirect to the block editor route
* Also match `post` and `page` for last non-editor route selector.
* Remove uneccessary comment
* Use shouldRedirectGutenberg selector instead
|
Automattic_wp-calypso
|
train
|
8639ae1ddc4f40df742ac9308eeebd3c0b14df60
|
diff --git a/app/models/address.rb b/app/models/address.rb
index <HASH>..<HASH> 100644
--- a/app/models/address.rb
+++ b/app/models/address.rb
@@ -1,6 +1,20 @@
class Address < ActiveRecord::Base
belongs_to :vcard
+ # Validations
+ include I18nRailsHelpers
+
+ def validate_address
+ errors.add_on_blank(:postal_code)
+ errors.add_on_blank(:locality)
+
+ if street_address.blank? and extended_address.blank? and post_office_box.blank?
+ errors.add(:street_address, "#{t_attr(:street_address, Vcard)} #{I18n.translate('errors.messages.empty')}")
+ errors.add(:extended_address, "#{t_attr(:extended_address, Vcard)} #{I18n.translate('errors.messages.empty')}")
+ errors.add(:post_office_box, "#{t_attr(:post_office_box, Vcard)} #{I18n.translate('errors.messages.empty')}")
+ end
+ end
+
# Helpers
def to_s
I18n.translate('has_vcards.address.to_s',
diff --git a/app/models/vcard.rb b/app/models/vcard.rb
index <HASH>..<HASH> 100644
--- a/app/models/vcard.rb
+++ b/app/models/vcard.rb
@@ -26,6 +26,16 @@ class Vcard < ActiveRecord::Base
belongs_to :object, :polymorphic => true
+ # Validations
+ include I18nRailsHelpers
+
+ def validate_name
+ if full_name.blank?
+ errors.add(:full_name, "#{t_attr(:full_name, Vcard)} #{I18n.translate('errors.messages.empty')}")
+ errors.add(:family_name, "#{t_attr(:family_name, Vcard)} #{I18n.translate('errors.messages.empty')}")
+ errors.add(:given_name, "#{t_attr(:given_name, Vcard)} #{I18n.translate('errors.messages.empty')}")
+ end
+ end
# Convenience accessors
def full_name
|
Add Address.validate_address and Vcard.validate_name.
|
huerlisi_has_vcards
|
train
|
cd8a08650fe5c8cd42c1c4ab0158b792e995c185
|
diff --git a/src/Hal/Metric/Consolidated.php b/src/Hal/Metric/Consolidated.php
index <HASH>..<HASH> 100644
--- a/src/Hal/Metric/Consolidated.php
+++ b/src/Hal/Metric/Consolidated.php
@@ -83,6 +83,7 @@ class Consolidated
'efferentCoupling' => [],
'difficulty' => [],
'lcom' => [],
+ 'mi' => [],
];
foreach ($metrics->all() as $key => $item) {
diff --git a/src/Hal/Report/Html/Reporter.php b/src/Hal/Report/Html/Reporter.php
index <HASH>..<HASH> 100644
--- a/src/Hal/Report/Html/Reporter.php
+++ b/src/Hal/Report/Html/Reporter.php
@@ -92,6 +92,10 @@ class Reporter
sprintf('%s/js/history-%d.json', $logDir, $next),
json_encode($today, JSON_PRETTY_PRINT)
);
+ file_put_contents(
+ sprintf('%s/js/latest.json', $logDir, $next),
+ json_encode($today, JSON_PRETTY_PRINT)
+ );
// json data
file_put_contents(
|
added a way to get latest results directly in json
|
phpmetrics_PhpMetrics
|
train
|
f14f79a9022641069e2346cb325eb173ef9c29ba
|
diff --git a/test/main.js b/test/main.js
index <HASH>..<HASH> 100644
--- a/test/main.js
+++ b/test/main.js
@@ -7,6 +7,9 @@ var merkletools = require('../merkletools.js');
var bLeft = new Buffer('a292780cc748697cb499fdcc8cb89d835609f11e502281dfe3f6690b1cc23dcb', 'hex');
var bRight = new Buffer('cb4990b9a8936bbc137ddeb6dcab4620897b099a450ecdc5f3e86ef4b3a7135c', 'hex');
var mRoot = crypto.createHash('sha256').update(Buffer.concat([bLeft, bRight])).digest();
+var bLeftmd5 = new Buffer('0cc175b9c0f1b6a831c399e269772661', 'hex');
+var bRightmd5 = new Buffer('92eb5ffee6ae2fec3ad71c777531578f', 'hex');
+var mRootmd5 = crypto.createHash('md5').update(Buffer.concat([bLeftmd5, bRightmd5])).digest();
describe("make tree with addLeaves hex", function () {
@@ -150,6 +153,18 @@ describe("make tree with 5 leaves at once needing hashing", function () {
});
+describe("make tree using md5", function () {
+
+ var merkleTools = new merkletools({ hashType: 'md5'});
+ merkleTools.addLeaves([ bLeftmd5, bRightmd5]);
+ merkleTools.makeTree();
+
+ it("merkle root value should be correct", function () {
+ assert.equal(merkleTools.getMerkleRoot(), mRootmd5.toString('hex'));
+ });
+
+});
+
describe("proof left node", function () {
var merkleTools = new merkletools();
|
Added md5 tree test
|
Tierion_merkle-tools
|
train
|
a9c6bfb32a3dbc8f84e76f0ff7762560b6fb0fe7
|
diff --git a/lib/gcli/types/union.js b/lib/gcli/types/union.js
index <HASH>..<HASH> 100644
--- a/lib/gcli/types/union.js
+++ b/lib/gcli/types/union.js
@@ -54,8 +54,9 @@ exports.items = [
var onError = function(i) {
if (i >= this.types.length) {
- return Promise.reject(new Conversion(undefined, arg, Status.ERROR,
- l10n.lookup("commandParseError")));
+ var msg = l10n.lookup('commandParseError');
+ var conversion = new Conversion(undefined, arg, Status.ERROR, msg);
+ return Promise.resolve(conversion);
}
else {
return tryNext(i + 1);
|
union-<I>: resolve the conversion rather than rejecting it
Nothing has actually broken, it's just that the user typed something
that we can't understand right now, so we resolve with a Status.ERROR
rather than rejecting.
|
joewalker_gcli
|
train
|
93abd34437d8e7e9e4429d5ccd3952e3f6f0ad26
|
diff --git a/datastore/src/main/java/org/jboss/pnc/datastore/predicates/rsql/AbstractTransformer.java b/datastore/src/main/java/org/jboss/pnc/datastore/predicates/rsql/AbstractTransformer.java
index <HASH>..<HASH> 100644
--- a/datastore/src/main/java/org/jboss/pnc/datastore/predicates/rsql/AbstractTransformer.java
+++ b/datastore/src/main/java/org/jboss/pnc/datastore/predicates/rsql/AbstractTransformer.java
@@ -63,7 +63,7 @@ abstract class AbstractTransformer<Entity> implements Transformer<Entity> {
}
if (isFieldEmbedded) {
- logger.info("field {} is EMBEDDED {}", fields[i], isFieldEmbedded);
+ logger.trace("field {} is EMBEDDED {}", fields[i], isFieldEmbedded);
}
if (i == 0) {
|
Change log level of RSQL log
|
project-ncl_pnc
|
train
|
3416ba745621bf34d47c286d1b221760aa5b9ef6
|
diff --git a/Kwf_js/EyeCandy/List/Plugins/ActiveListener/LargeContentAjax.js b/Kwf_js/EyeCandy/List/Plugins/ActiveListener/LargeContentAjax.js
index <HASH>..<HASH> 100644
--- a/Kwf_js/EyeCandy/List/Plugins/ActiveListener/LargeContentAjax.js
+++ b/Kwf_js/EyeCandy/List/Plugins/ActiveListener/LargeContentAjax.js
@@ -73,6 +73,18 @@ Kwf.EyeCandy.List.Plugins.ActiveListener.LargeContentAjax = Ext.extend(Kwf.EyeCa
});
},
+
+ _getLargeContentHeight: function(item) {
+ var height = this.largeContent[item.id].getHeight();
+ if (this.largeContainer.getStyle('margin-top') && this.largeContainer.getStyle('margin-top').substr(-2)=='px') {
+ height += parseInt(this.largeContainer.getStyle('margin-top'));
+ }
+ if (this.largeContainer.getStyle('margin-bottom') && this.largeContainer.getStyle('margin-bottom').substr(-2)=='px') {
+ height += parseInt(this.largeContainer.getStyle('margin-bottom'));
+ }
+ return height;
+ },
+
_activate: function(item)
{
var nextItem = this.list.getItem(item.listIndex+1);
@@ -86,14 +98,7 @@ Kwf.EyeCandy.List.Plugins.ActiveListener.LargeContentAjax = Ext.extend(Kwf.EyeCa
this.largeContent[item.id] = this.largeContainer.child('div');
this.largeContent[item.id].enableDisplayMode('block');
this.largeContent[item.id].setStyle('position', 'absolute');
- var height = this.largeContent[item.id].getHeight();
- if (this.largeContainer.getStyle('margin-top') && this.largeContainer.getStyle('margin-top').substr(-2)=='px') {
- height += parseInt(this.largeContainer.getStyle('margin-top'));
- }
- if (this.largeContainer.getStyle('margin-bottom') && this.largeContainer.getStyle('margin-bottom').substr(-2)=='px') {
- height += parseInt(this.largeContainer.getStyle('margin-bottom'));
- }
- this.largeContainer.setHeight(height);
+ this.largeContainer.setHeight(this._getLargeContentHeight(item));
this.activeItem = item;
return;
}
@@ -106,10 +111,11 @@ Kwf.EyeCandy.List.Plugins.ActiveListener.LargeContentAjax = Ext.extend(Kwf.EyeCa
if (this.fetchedItems[item.id]) {
nextEl.show();
var oldHeight = this.largeContainer.getHeight();
- this.largeContainer.setHeight(nextEl.getHeight()); //set new height without animation
+ var newHeight = this._getLargeContentHeight(item);
+ this.largeContainer.setHeight(newHeight); //set new height without animation
Kwf.callOnContentReady(nextEl.dom, {newRender: false});
this.largeContainer.setHeight(oldHeight); //set previous height without animation
- this.largeContainer.setHeight(nextEl.getHeight(), true); //and now animate to new height
+ this.largeContainer.setHeight(newHeight, true); //and now animate to new height
nextEl.hide();
}
|
also calculate height correctly when switching to other item
|
koala-framework_koala-framework
|
train
|
b3076e068ee6b2a81b8eb55a2b87befae7165d41
|
diff --git a/prometheus/promhttp/delegator.go b/prometheus/promhttp/delegator.go
index <HASH>..<HASH> 100644
--- a/prometheus/promhttp/delegator.go
+++ b/prometheus/promhttp/delegator.go
@@ -76,16 +76,16 @@ type flusherDelegator struct{ *responseWriterDelegator }
type hijackerDelegator struct{ *responseWriterDelegator }
type readerFromDelegator struct{ *responseWriterDelegator }
-func (d *closeNotifierDelegator) CloseNotify() <-chan bool {
+func (d closeNotifierDelegator) CloseNotify() <-chan bool {
return d.ResponseWriter.(http.CloseNotifier).CloseNotify()
}
-func (d *flusherDelegator) Flush() {
+func (d flusherDelegator) Flush() {
d.ResponseWriter.(http.Flusher).Flush()
}
-func (d *hijackerDelegator) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+func (d hijackerDelegator) Hijack() (net.Conn, *bufio.ReadWriter, error) {
return d.ResponseWriter.(http.Hijacker).Hijack()
}
-func (d *readerFromDelegator) ReadFrom(re io.Reader) (int64, error) {
+func (d readerFromDelegator) ReadFrom(re io.Reader) (int64, error) {
if !d.wroteHeader {
d.WriteHeader(http.StatusOK)
}
|
Make delegators wrapping responseWriterDelegator have value receivers
|
prometheus_client_golang
|
train
|
2541bf70c43fc06082cc46316c22428920da80d5
|
diff --git a/src/barchart.js b/src/barchart.js
index <HASH>..<HASH> 100644
--- a/src/barchart.js
+++ b/src/barchart.js
@@ -62,14 +62,27 @@ ngCubes.directive('cubesBarchart', ['$rootScope', '$http', function($rootScope,
y = asArray(state.y)[0];
xType = isAggregate(model.aggregates, x) ? "Q" : "O";
yType = isAggregate(model.aggregates, y) ? "Q" : "O";
+ ySlug = y.replace(/\./g,"-");
+ xSlug = x.replace(/\./g,"-");
+ var dataCells = [];
+ data.cells.forEach(function(d) {
+ dCell = {};
+ Object.keys(d).forEach(function(key){
+ var value = d[key];
+ key = key.replace(/\./g,'-');
+ dCell[key] = value;
+ });
+ dataCells.push(dCell);
+ });
+
shorthand = {
"data": {
- "values": data.cells
+ "values": dataCells
},
"marktype": "bar",
"encoding": {
- "y": {"type": yType, "name": y},
- "x": {"type": xType, "name": x}
+ "y": {"type": yType, "name": ySlug},
+ "x": {"type": xType, "name": xSlug}
}
};
wrapper = element.querySelectorAll('.barchart-cubes')[0]
|
First way of sluggifying the data keys
might be a far better solution possible
but it works for now
|
openspending_babbage.ui
|
train
|
40727f5079ebb2a22617f9b17bbb50d8abceb50e
|
diff --git a/lib/render/schema.rb b/lib/render/schema.rb
index <HASH>..<HASH> 100644
--- a/lib/render/schema.rb
+++ b/lib/render/schema.rb
@@ -66,7 +66,12 @@ module Render
private
def require_attributes!
- definition.fetch(:required, []).each do |required_attribute|
+ return unless definition.has_key?(:required)
+
+ required_attributes = definition.fetch(:required)
+ return if [true, false].include?(required_attributes)
+
+ required_attributes.each do |required_attribute|
attribute = attributes.detect { |attribute| attribute.name == required_attribute.to_sym }
attribute.required = true
end
diff --git a/spec/functional/render/schema_spec.rb b/spec/functional/render/schema_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/render/schema_spec.rb
+++ b/spec/functional/render/schema_spec.rb
@@ -80,6 +80,20 @@ module Render
schema.attributes[0].required.should_not be
schema.attributes[1].required.should be
end
+
+ it "is silently ignores draft-3 boolean requires" do
+ draft_3_definition = {
+ type: Object,
+ required: true,
+ properties: {
+ title: { type: String }
+ }
+ }
+
+ expect {
+ Schema.new(draft_3_definition).render!
+ }.to_not raise_error
+ end
end
end
end
|
do not blow up for schemas requiring with draft-3 syntax
|
stevenweber_render
|
train
|
32096ca54bd111e971d0de35f58dfc9c9f8da437
|
diff --git a/aiogram/dispatcher/middlewares.py b/aiogram/dispatcher/middlewares.py
index <HASH>..<HASH> 100644
--- a/aiogram/dispatcher/middlewares.py
+++ b/aiogram/dispatcher/middlewares.py
@@ -1,5 +1,5 @@
-from aiogram.dispatcher import Handler
-from aiogram.dispatcher.handler import SkipHandler
+from . import Handler
+from .handler import SkipHandler
class Middleware:
|
Middlewared. Use relative imports.
|
aiogram_aiogram
|
train
|
49ebb8b8deb75c9af75e0667c96fae80d5daf52b
|
diff --git a/lib/sprockets/environment.rb b/lib/sprockets/environment.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/environment.rb
+++ b/lib/sprockets/environment.rb
@@ -36,7 +36,7 @@ module Sprockets
if concatenatable?(pathname.format_extension)
ConcatenatedAsset.new(self, pathname)
else
- StaticAsset.new(self, pathname)
+ StaticAsset.new(pathname)
end
end
diff --git a/lib/sprockets/static_asset.rb b/lib/sprockets/static_asset.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/static_asset.rb
+++ b/lib/sprockets/static_asset.rb
@@ -5,7 +5,7 @@ module Sprockets
class StaticAsset
attr_reader :pathname, :mtime, :length, :digest
- def initialize(environment, pathname)
+ def initialize(pathname)
@pathname = pathname
contents = read
|
No need to pass environment to StaticAsset.new
|
rails_sprockets
|
train
|
c28591a4020800fe7e49892b085091e4a35c5bf2
|
diff --git a/lib/ditty/helpers/component.rb b/lib/ditty/helpers/component.rb
index <HASH>..<HASH> 100644
--- a/lib/ditty/helpers/component.rb
+++ b/lib/ditty/helpers/component.rb
@@ -13,10 +13,10 @@ module Ditty
end
def list
- params['count'] ||= 10
- params['page'] ||= 1
+ count = params['count'] || 10
+ page = params['page'] || 1
- dataset.select.paginate(params['page'].to_i, params['count'].to_i)
+ dataset.select.paginate(page.to_i, count.to_i)
end
def heading(action = nil)
|
fix: Don't modify params when getting the list
|
EagerELK_ditty
|
train
|
9c5eb9aa4d8de3d3060c7c6551b1e726d7577f57
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,20 +1,13 @@
from setuptools import setup, find_packages
-with open('README.rst') as f:
- readme = f.read()
setup(
name = "django-disposable-email-checker",
version = "0.1.1",
packages = find_packages(),
- package_data = {
- # If any package contains *.txt or *.rst files, include them:
- '': ['*.txt', '*.rst'],
- },
author = "Aaron Bassett",
author_email = "me@aaronbassett.com",
description = "Python class for use with Django to detect Disposable Emails",
- long_description=readme,
license = "MIT License",
keywords = "django email disposable validation",
url = "https://github.com/aaronbassett/DisposableEmailChecker",
|
Remove long description as causing pip error
|
aaronbassett_DisposableEmailChecker
|
train
|
054e9028659e9213c8a301614071274c5337cc38
|
diff --git a/holoviews/core/io.py b/holoviews/core/io.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/io.py
+++ b/holoviews/core/io.py
@@ -109,6 +109,12 @@ class Exporter(param.ParameterizedFunction):
return data
@bothmethod
+ def _filename(self_or_cls, filename):
+ "Add the file extension if not already present"
+ if not filename.endswith(self_or_cls.file_ext):
+ return '%s.%s' % (filename, self_or_cls.file_ext)
+ else:
+ return filename
def _merge_metadata(self_or_cls, obj, fn, *dicts):
"""
Returns a merged metadata info dictionary from the supplied
@@ -218,12 +224,13 @@ class Serializer(Exporter):
return data, {'file-ext': self.file_ext, 'mime_type':self.mime_type}
@bothmethod
- def save(self_or_cls, obj, basename, info={}, key={}, **kwargs):
+ def save(self_or_cls, obj, filename, info={}, key={}, **kwargs):
data, base_info = self_or_cls(obj, **kwargs)
key = self_or_cls._merge_metadata(obj, self_or_cls.key_fn, base_info, key)
info = self_or_cls._merge_metadata(obj, self_or_cls.info_fn, info)
metadata, _ = self_or_cls({'info':info, 'key':key}, **kwargs)
- with open('%s.%s' % (basename, self_or_cls.file_ext), 'a') as f:
+ filename = self_or_cls._filename(filename)
+ with open(filename, 'a') as f:
f.write(metadata)
f.write(data)
|
Added Exporter._filename helper method for handling file extensions
|
pyviz_holoviews
|
train
|
44a1f6ef1ec16f41a3630861b26169459a79dfbc
|
diff --git a/nipap-www/nipapwww/public/nipap.js b/nipap-www/nipapwww/public/nipap.js
index <HASH>..<HASH> 100644
--- a/nipap-www/nipapwww/public/nipap.js
+++ b/nipap-www/nipapwww/public/nipap.js
@@ -884,9 +884,25 @@ function receiveVRFSelector(result) {
return;
}
- // place VRFs in VRF container
+ // empty VRF container
var vrf_cont = $('.selector_result');
vrf_cont.empty();
+
+ // If it's a search for the default VRF, ie empty search, then display the
+ // currently selected VRFs. For other search, we don't show the currently
+ // selected to avoid cluttering the result list.
+ if (result.interpretation.length == 1 && result.interpretation[0].string == '') {
+ // add selected VRFs to the selectedbar
+ $.each(selected_vrfs, function (k, v) {
+ // except for the default VRF, since that will already be included
+ // in the search result
+ if (k != 0) {
+ addVRFToSelectList(v, $('.selector_result'));
+ }
+ });
+ }
+
+ // place search result in VRF container
if (result.result.length > 0) {
for (i = 0; i < result.result.length; i++) {
|
Show current selection in VRF selector
This adds back the functionality to have the VRF selector by default
show the currently selected VRFs for easy access!
Fixes #<I>.
|
SpriteLink_NIPAP
|
train
|
ea3374bcb0b7a78ccbaeb9cd927fbaf259a5cf29
|
diff --git a/container.go b/container.go
index <HASH>..<HASH> 100644
--- a/container.go
+++ b/container.go
@@ -706,7 +706,7 @@ func (container *Container) RwChecksum() (string, error) {
if _, err := io.Copy(h, rwData); err != nil {
return "", err
}
- return hex.EncodeToString(h.Sum(nil)), nil
+ return "sha256:"+hex.EncodeToString(h.Sum(nil)), nil
}
func (container *Container) Export() (Archive, error) {
|
Prepend hash method to the image checksum
|
containers_storage
|
train
|
c70c1774737131e8490a80eebf5f366d7242f334
|
diff --git a/deuce.js b/deuce.js
index <HASH>..<HASH> 100644
--- a/deuce.js
+++ b/deuce.js
@@ -446,8 +446,8 @@ Builder.prototype.publish = function(doc, callback) {
output = template(scope, {'partials':partials, 'helpers':helpers})
} catch (er) {
self.log.debug('Template error', {'keys':Object.keys(er), 'message':er.message, 'str':er.toString()})
- throw er
- return callback(er)
+ output = er.stack + '\n'
+ attachment.content_type = 'text/plain'
}
}
|
Return the error if there is a problem
|
iriscouch_static-plus
|
train
|
6e0321927befdb82ec985d8898937c97c5a9691b
|
diff --git a/jeppetto-dao-dynamodb/src/main/java/org/iternine/jeppetto/dao/dynamodb/iterable/DynamoDBIterable.java b/jeppetto-dao-dynamodb/src/main/java/org/iternine/jeppetto/dao/dynamodb/iterable/DynamoDBIterable.java
index <HASH>..<HASH> 100644
--- a/jeppetto-dao-dynamodb/src/main/java/org/iternine/jeppetto/dao/dynamodb/iterable/DynamoDBIterable.java
+++ b/jeppetto-dao-dynamodb/src/main/java/org/iternine/jeppetto/dao/dynamodb/iterable/DynamoDBIterable.java
@@ -95,7 +95,7 @@ public abstract class DynamoDBIterable<T> implements Iterable<T> {
public String getPosition(boolean removeHashKey) {
- Map<String, AttributeValue> lastExaminedKey = getLastExaminedKey();
+ Map<String, AttributeValue> lastExaminedKey = getLastExaminedKey(removeHashKey);
if (lastExaminedKey == null) {
return null;
@@ -105,10 +105,6 @@ public abstract class DynamoDBIterable<T> implements Iterable<T> {
try {
for (Map.Entry<String, AttributeValue> entry : lastExaminedKey.entrySet()) {
- if (removeHashKey && entry.getKey().equals(hashKeyField)) {
- continue;
- }
-
if (sb.length() > 0) {
sb.append("&");
}
@@ -212,7 +208,7 @@ public abstract class DynamoDBIterable<T> implements Iterable<T> {
// Methods - Private
//-------------------------------------------------------------
- private Map<String, AttributeValue> getLastExaminedKey() {
+ private Map<String, AttributeValue> getLastExaminedKey(boolean removeHashKey) {
Map<String, AttributeValue> generatedKey = new HashMap<String, AttributeValue>(3); // hash, range, index keys
if (!dynamoDBIterator.hasNext0()) {
@@ -220,6 +216,10 @@ public abstract class DynamoDBIterable<T> implements Iterable<T> {
}
for (String keyField : getKeyFields()) {
+ if (removeHashKey && keyField.equals(hashKeyField)) {
+ continue;
+ }
+
generatedKey.put(keyField, dynamoDBIterator.getLastItem().get(keyField));
}
diff --git a/jeppetto-dao/src/test/java/org/iternine/jeppetto/dao/DAOBuilderTest.java b/jeppetto-dao/src/test/java/org/iternine/jeppetto/dao/DAOBuilderTest.java
index <HASH>..<HASH> 100644
--- a/jeppetto-dao/src/test/java/org/iternine/jeppetto/dao/DAOBuilderTest.java
+++ b/jeppetto-dao/src/test/java/org/iternine/jeppetto/dao/DAOBuilderTest.java
@@ -17,6 +17,8 @@
package org.iternine.jeppetto.dao;
+import org.iternine.jeppetto.dao.id.BaseNIdGenerator;
+
import org.junit.Assert;
import org.junit.Test;
@@ -44,4 +46,16 @@ public class DAOBuilderTest {
sampleDAO.findByFieldOne(0);
sampleDAO.findByFieldOneGreaterThan(1);
}
+
+
+ @Test
+ public void idTest() {
+ BaseNIdGenerator baseNIdGenerator = new BaseNIdGenerator(64, BaseNIdGenerator.BASE62_CHARACTERS);
+
+ for (int i = 0; i < 100; i++) {
+ String id = baseNIdGenerator.generateId();
+
+ System.out.println(id);
+ }
+ }
}
|
Push removeHashKey logic to where lastExaminedKey is being built.
|
jeppetto_jeppetto
|
train
|
3bdff0992fc688db2e8c1410ae8c34045a8d42bd
|
diff --git a/torf/_torrent.py b/torf/_torrent.py
index <HASH>..<HASH> 100644
--- a/torf/_torrent.py
+++ b/torf/_torrent.py
@@ -756,7 +756,7 @@ class Torrent():
def dump(self, validate=True):
"""
- Create bencoded :attr:`metainfo`
+ Create bencoded :attr:`metainfo` (i.e. the content of a torrent file)
:param bool validate: Whether to run :meth:`validate` first
@@ -770,7 +770,14 @@ class Torrent():
"""
Write current :attr:`metainfo` to torrent file
- :param filepath: Path of the new torrent file
+ This method opens the file before calling :meth:`generate` (which can
+ take a long time), in case it's not writable. If you don't care about
+ that, you can also do something like this:
+
+ >>> with open('my.torrent', 'wb') as f:
+ ... f.write(torrent.dump())
+
+ :param filepath: Path of the torrent file
:param bool validate: Whether to run :meth:`validate` first
:param bool overwrite: Whether to silently overwrite `filepath` (only
if all pieces were hashed successfully)
|
Expand docstrings of write() and dump()
|
rndusr_torf
|
train
|
bd4e67d8cfe5fce4c22e65607b8dc7a7a8483b80
|
diff --git a/exchangelib/autodiscover/discovery.py b/exchangelib/autodiscover/discovery.py
index <HASH>..<HASH> 100644
--- a/exchangelib/autodiscover/discovery.py
+++ b/exchangelib/autodiscover/discovery.py
@@ -26,6 +26,13 @@ from .protocol import AutodiscoverProtocol
log = logging.getLogger(__name__)
+DNS_LOOKUP_ERRORS = (
+ dns.name.EmptyLabel,
+ dns.resolver.NXDOMAIN,
+ dns.resolver.NoAnswer,
+ dns.resolver.NoNameservers,
+)
+
def discover(email, credentials=None, auth_type=None, retry_policy=None):
ad_response, protocol = Autodiscovery(email=email, credentials=credentials).discover()
@@ -357,8 +364,9 @@ class Autodiscovery:
def _is_valid_hostname(self, hostname):
log.debug("Checking if %s can be looked up in DNS", hostname)
try:
- self.resolver.resolve(hostname)
- except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer, dns.name.EmptyLabel):
+ self.resolver.resolve(f"{hostname}.", "A", lifetime=self.DNS_RESOLVER_ATTRS.get("timeout"))
+ except DNS_LOOKUP_ERRORS as e:
+ log.debug("DNS A lookup failure: %s", e)
return False
return True
@@ -379,8 +387,8 @@ class Autodiscovery:
records = []
try:
answers = self.resolver.resolve(f"{hostname}.", "SRV", lifetime=self.DNS_RESOLVER_ATTRS.get("timeout"))
- except (dns.resolver.NoNameservers, dns.resolver.NoAnswer, dns.resolver.NXDOMAIN) as e:
- log.debug("DNS lookup failure: %s", e)
+ except DNS_LOOKUP_ERRORS as e:
+ log.debug("DNS SRV lookup failure: %s", e)
return records
for rdata in answers:
try:
diff --git a/exchangelib/errors.py b/exchangelib/errors.py
index <HASH>..<HASH> 100644
--- a/exchangelib/errors.py
+++ b/exchangelib/errors.py
@@ -1691,7 +1691,7 @@ class ErrorWrongServerVersionDelegate(ResponseMessageError):
pass
-# Microsoft recommends to cache the autodiscover data around 24 hours and perform autodiscover
+# Microsoft recommends caching the autodiscover data around 24 hours and perform autodiscover
# immediately following certain error responses from EWS. See more at
# http://blogs.msdn.com/b/mstehle/archive/2010/11/09/ews-best-practices-use-autodiscover.aspx
diff --git a/tests/test_autodiscover.py b/tests/test_autodiscover.py
index <HASH>..<HASH> 100644
--- a/tests/test_autodiscover.py
+++ b/tests/test_autodiscover.py
@@ -542,7 +542,7 @@ class AutodiscoverTest(EWSTest):
class _Mock1:
@staticmethod
- def resolve(hostname, cat):
+ def resolve(*args, **kwargs):
class A:
@staticmethod
def to_text():
@@ -560,7 +560,7 @@ class AutodiscoverTest(EWSTest):
class _Mock2:
@staticmethod
- def resolve(hostname, cat):
+ def resolve(*args, **kwargs):
class A:
@staticmethod
def to_text():
|
Make calls to resolve() consistent. Fix mock versions.
|
ecederstrand_exchangelib
|
train
|
aea9558b5ea2d2ac245dba5f843bc22080fcbe53
|
diff --git a/karma.conf.js b/karma.conf.js
index <HASH>..<HASH> 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -9,7 +9,8 @@ module.exports = function (config) {
frameworks: ['mocha', 'chai', 'sinon'],
files: [
- 'node_modules/document-register-element/build/document-register-element.js',
+ // 'node_modules/document-register-element/build/document-register-element.js',
+ 'node_modules/webcomponents.js/webcomponents-lite.js',
'src/**/*.js',
'specs/**/*.spec.js'
],
@@ -42,7 +43,7 @@ module.exports = function (config) {
autoWatch: false,
- // browsers: ['IE'],
+ // browsers: ['IE'],
browsers: ['Firefox'],
singleRun: true,
diff --git a/specs/ceb-feature-template.spec.js b/specs/ceb-feature-template.spec.js
index <HASH>..<HASH> 100644
--- a/specs/ceb-feature-template.spec.js
+++ b/specs/ceb-feature-template.spec.js
@@ -90,7 +90,7 @@ describe('ceb-feature-template', function () {
div.appendChild(clonedCe);
setTimeout(done, timeout);
});
- xit('should contains the templated nodes', function () {
+ it('should contains the templated nodes', function () {
expect(clonedCe.querySelector('ul.tag1 > li.content > ' + tagName2 + ' > ul.tag2 > li.content > div')).to.exist();
});
it('should contains the initial light DOM', function () {
diff --git a/specs/ceb.spec.js b/specs/ceb.spec.js
index <HASH>..<HASH> 100644
--- a/specs/ceb.spec.js
+++ b/specs/ceb.spec.js
@@ -234,7 +234,7 @@ describe('A custom element', function () {
describe('when the the element is detached', function () {
beforeEach(function (done) {
ce.parentNode.removeChild(ce);
- setTimeout(done, 10);
+ setTimeout(done, 50);
});
it('should be removed', function () {
expect(ce.__eventHandlers).to.be.null();
diff --git a/specs/demo.html b/specs/demo.html
index <HASH>..<HASH> 100644
--- a/specs/demo.html
+++ b/specs/demo.html
@@ -12,8 +12,8 @@
<body>
<!-- polyfills -->
- <!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/webcomponentsjs/0.5.2/webcomponents-lite.min.js"></script> -->
- <script src="https://cdnjs.cloudflare.com/ajax/libs/document-register-element/0.1.6/document-register-element.js"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/webcomponentsjs/0.5.2/webcomponents-lite.min.js"></script>
+ <!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/document-register-element/0.1.6/document-register-element.js"></script> -->
<!-- source -->
<script src="ceb.js"></script>
diff --git a/specs/index.html b/specs/index.html
index <HASH>..<HASH> 100644
--- a/specs/index.html
+++ b/specs/index.html
@@ -14,8 +14,8 @@
<div id="mocha"></div>
<!-- polyfills -->
- <!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/webcomponentsjs/0.5.2/webcomponents-lite.min.js"></script> -->
- <script src="https://cdnjs.cloudflare.com/ajax/libs/document-register-element/0.1.6/document-register-element.js"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/webcomponentsjs/0.5.2/webcomponents-lite.min.js"></script>
+ <!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/document-register-element/0.1.6/document-register-element.js"></script> -->
<!-- testing -->
<script src="//cdnjs.cloudflare.com/ajax/libs/chai/1.10.0/chai.js"></script>
diff --git a/src/ceb-feature-template.js b/src/ceb-feature-template.js
index <HASH>..<HASH> 100644
--- a/src/ceb-feature-template.js
+++ b/src/ceb-feature-template.js
@@ -90,12 +90,14 @@
// When the node is freshly created, the content node is the element.
// When the node has been created by clonning, the content node is not anymore the element,
// but a sub content node linked to one of its descents.
+
var oldContentNode = findContentNode(el);
// Remove the light DOM to keep it.
while (oldContentNode.childNodes.length > 0) {
lightChildren.push(oldContentNode.removeChild(oldContentNode.childNodes[0]));
}
+ // lightChildren = Array.prototype.slice.call(oldContentNode.childNodes);
// Generate the new content's id value.
var newCebContentId = 'ceb-' + (counter++) + '-content';
@@ -115,6 +117,7 @@
var newContentNode = findContentNode(el);
lightChildren.forEach(function (child) {
newContentNode.appendChild(child);
+ // newContentNode.appendChild(child);
});
}
|
fix template with IE*, switch to webcomponents-lite.js
|
tmorin_ceb
|
train
|
4d0e7e537c8486a7bde6482e97bf2674c7ae41f5
|
diff --git a/lib/presto-client/index.js b/lib/presto-client/index.js
index <HASH>..<HASH> 100644
--- a/lib/presto-client/index.js
+++ b/lib/presto-client/index.js
@@ -9,7 +9,8 @@ var Client = exports.Client = function(args){
args = {};
this.host = args.host || 'localhost';
this.port = args.port || 8080;
- this.user = args.user;
+ this.user = args.user || process.env.USER;
+
this.catalog = args.catalog;
this.schema = args.schema;
@@ -85,10 +86,11 @@ Client.prototype.execute = function(opts) {
if (opts.schema || this.schema)
header[Headers.SCHEMA] = opts.schema || this.schema;
+ var fetch_info = opts.info || false;
+
var cancel_checker = opts.cancel;
var columns_callback = opts.columns;
var data_callback = opts.data;
- var info_callback = opts.info;
var success_callback = opts.success || opts.callback;
var error_callback = opts.error || opts.callback;
@@ -217,13 +219,11 @@ Client.prototype.execute = function(opts) {
return;
}
- // end of fetch sequence
var finishedStats = response.stats;
- if (info_callback && response.infoUri) {
+ if (fetch_info && response.infoUri) {
client.request(response.infoUri, function(error, code, response){
- info_callback(error, response);
- success_callback(null, finishedStats);
+ success_callback(null, finishedStats, response);
});
}
else {
|
fix to use info flag instead of callback to simplify caller
|
tagomoris_presto-client-node
|
train
|
5f4e5366499da7172af8bb101f5026289aaa4756
|
diff --git a/lib/stream/feed.rb b/lib/stream/feed.rb
index <HASH>..<HASH> 100644
--- a/lib/stream/feed.rb
+++ b/lib/stream/feed.rb
@@ -10,7 +10,8 @@ module Stream
base_uri 'https://getstream.io/api'
def make_http_request(method, relative_url, params=nil, data=nil, headers=nil)
- response = self.class.send(method, relative_url, :headers => headers, :query => params, :body => data)
+ headers['Content-Type'] = 'application/json'
+ response = self.class.send(method, relative_url, :headers => headers, :query => params, :body => data.to_json )
case response.code
when 200..203
return response
diff --git a/spec/integration_spec.rb b/spec/integration_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/integration_spec.rb
+++ b/spec/integration_spec.rb
@@ -1,5 +1,7 @@
+require 'date'
require 'stream'
+
describe "Integration tests" do
before do
@@ -15,8 +17,37 @@ describe "Integration tests" do
response.should include("id", "actor", "verb", "object", "target", "time")
end
- example "posting a broken activity" do
- expect { @feed42.add_activity({:actor => 1, :verb => 'tweet', :object => 1, :bogus=>42}) }.to raise_error(Stream::StreamApiResponseException)
+ example "posting an activity with datetime object" do
+ activity = {:actor => 1, :verb => 'tweet', :object => 1, :time => DateTime.now}
+ response = @feed42.add_activity(activity)
+ response.should include("id", "actor", "verb", "object", "target", "time")
+ end
+
+ example "localised datetimes should be returned in UTC correctly" do
+ now = DateTime.now.new_offset(5)
+ activity = {:actor => 1, :verb => 'tweet', :object => 1, :time => now}
+ response = @feed42.add_activity(activity)
+ response.should include("id", "actor", "verb", "object", "target", "time")
+ response = @feed42.get(:limit=>5)
+ DateTime.iso8601(response["results"][0]["time"]).should be_within(1).of(now.new_offset(0))
+ end
+
+ example "posting a custom field as a hash" do
+ hash_value = {'a' => 42}
+ activity = {:actor => 1, :verb => 'tweet', :object => 1, :hash_data => hash_value}
+ response = @feed42.add_activity(activity)
+ response.should include("id", "actor", "verb", "object", "target", "hash_data")
+ results = @feed42.get(:limit=>1)["results"]
+ results[0]["hash_data"].should eq hash_value
+ end
+
+ example "posting a custom field as a list" do
+ list_value = [1,2,3]
+ activity = {:actor => 1, :verb => 'tweet', :object => 1, :hash_data => list_value}
+ response = @feed42.add_activity(activity)
+ response.should include("id", "actor", "verb", "object", "target", "hash_data")
+ results = @feed42.get(:limit=>1)["results"]
+ results[0]["hash_data"].should eq list_value
end
example "posting and get one activity" do
|
add tests for datetime parsing, support sending hashes and lists
|
GetStream_stream-ruby
|
train
|
a11e01f7b67c1eaff87d5cef77a0f9abfeee1dd9
|
diff --git a/Supra/Package/Cms/Pages/Editable/Filter/EditableHtmlFilter.php b/Supra/Package/Cms/Pages/Editable/Filter/EditableHtmlFilter.php
index <HASH>..<HASH> 100644
--- a/Supra/Package/Cms/Pages/Editable/Filter/EditableHtmlFilter.php
+++ b/Supra/Package/Cms/Pages/Editable/Filter/EditableHtmlFilter.php
@@ -24,7 +24,7 @@ class EditableHtmlFilter extends HtmlFilter
return sprintf(
$wrap,
$this->blockProperty->getBlock()->getId(),
- $this->blockProperty->getName(),
+ str_replace('.', '_', $this->blockProperty->getHierarchicalName()),
parent::filter($content)
);
}
diff --git a/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineMapFilter.php b/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineMapFilter.php
index <HASH>..<HASH> 100644
--- a/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineMapFilter.php
+++ b/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineMapFilter.php
@@ -18,7 +18,7 @@ class EditableInlineMapFilter implements FilterInterface, BlockPropertyAware
{
if ($content instanceof HtmlTag) {
$content->addClass('yui3-content-inline yui3-input-map-inline map')
- ->setAttribute('id', sprintf('content_%s_%s', $this->blockProperty->getBlock()->getId(), $this->blockProperty->getName()));
+ ->setAttribute('id', sprintf('content_%s_%s', $this->blockProperty->getBlock()->getId(), str_replace('.', '_', $this->blockProperty->getHierarchicalName())));
}
return $content;
diff --git a/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineStringFilter.php b/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineStringFilter.php
index <HASH>..<HASH> 100644
--- a/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineStringFilter.php
+++ b/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineStringFilter.php
@@ -25,7 +25,7 @@ class EditableInlineStringFilter implements FilterInterface, BlockPropertyAware
return sprintf(
$wrap,
$this->blockProperty->getBlock()->getId(),
- $this->blockProperty->getName(),
+ str_replace('.', '_', $this->blockProperty->getHierarchicalName()),
htmlspecialchars($content, ENT_QUOTES, 'UTF-8')
);
}
diff --git a/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineTextareaFilter.php b/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineTextareaFilter.php
index <HASH>..<HASH> 100644
--- a/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineTextareaFilter.php
+++ b/Supra/Package/Cms/Pages/Editable/Filter/EditableInlineTextareaFilter.php
@@ -23,7 +23,7 @@ class EditableInlineTextareaFilter implements FilterInterface, BlockPropertyAwar
return sprintf(
$wrap,
$this->blockProperty->getBlock()->getId(),
- $this->blockProperty->getName(),
+ str_replace('.', '_', $this->blockProperty->getHierarchicalName()),
$content
);
}
|
Corrected property names that are used as a part of selectors in CMS
|
sitesupra_sitesupra
|
train
|
364516f09ee75c91b1843a37179a21ed362e8373
|
diff --git a/src/main/java/com/github/daytron/simpledialogfx/dialog/Dialog.java b/src/main/java/com/github/daytron/simpledialogfx/dialog/Dialog.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/daytron/simpledialogfx/dialog/Dialog.java
+++ b/src/main/java/com/github/daytron/simpledialogfx/dialog/Dialog.java
@@ -94,7 +94,7 @@ public final class Dialog extends Stage implements Initializable {
* dialog to be created is determine by the dialogType parameter. The
* default title is set to empty;
*
- * @param dialogType The dialog type to be created.
+ * @param dialogType The dialog type to be created
* @param header The text for the colored header label
* @param details The text for the message details label
*/
@@ -106,6 +106,64 @@ public final class Dialog extends Stage implements Initializable {
details,
null);
}
+
+ /**
+ * Construct a dialog with optional dialog style. The type of
+ * dialog to be created is determine by the dialogType parameter. The
+ * default title is set to empty;
+ *
+ * @param dialogType The dialog type to be created
+ * @param dialogStyle The dialog style to be created
+ * @param header The text for the colored header label
+ * @param details The text for the message details label
+ */
+ public Dialog(DialogType dialogType, DialogStyle dialogStyle,
+ String header, String details) {
+ this(dialogType,
+ dialogStyle,
+ "",
+ header,
+ details,
+ null);
+ }
+
+ /**
+ * Construct a dialog with optional dialog style. The type of
+ * dialog to be created is determine by the dialogType parameter.
+ *
+ * @param dialogType The dialog type to be created
+ * @param dialogStyle The dialog style to be created
+ * @param title The window title of the dialog
+ * @param header The text for the colored header label
+ * @param details The text for the message details label
+ */
+ public Dialog(DialogType dialogType, DialogStyle dialogStyle, String title,
+ String header, String details) {
+ this(dialogType,
+ dialogStyle,
+ title,
+ header,
+ details,
+ null);
+ }
+
+ /**
+ * Construct a dialog using the default "native" dialog style. The type of
+ * dialog to be created is determine by the dialogType parameter.
+ *
+ * @param dialogType The dialog type to be created
+ * @param title The window title of the dialog
+ * @param header The text for the colored header label
+ * @param details The text for the message details label
+ */
+ public Dialog(DialogType dialogType, String title, String header, String details) {
+ this(dialogType,
+ DialogStyle.NATIVE,
+ title,
+ header,
+ details,
+ null);
+ }
/**
* Construct an exception dialog using the default "native" dialog style.
|
Add two constructors for undecorated window style for both title and non-title parameter
|
Daytron_SimpleDialogFX
|
train
|
daee688937d70038c2fbd98ca2ad7664362e49d2
|
diff --git a/pyinfra/operations/windows.py b/pyinfra/operations/windows.py
index <HASH>..<HASH> 100644
--- a/pyinfra/operations/windows.py
+++ b/pyinfra/operations/windows.py
@@ -17,7 +17,7 @@ from pyinfra.api import operation
# FUTURE: add ability to stop processes (ex: "Stop-Process <id>")
-@operation
+@operation(is_idempotent=False)
def service(service, running=True, restart=False, suspend=False):
"""
Stop/Start a Windows service.
|
Flag `windows.service` operation as non-idempotent
|
Fizzadar_pyinfra
|
train
|
b4e5d40c54b5a4c6487035b70c2eba404459e782
|
diff --git a/lib/donatj/SimpleCalendar.php b/lib/donatj/SimpleCalendar.php
index <HASH>..<HASH> 100644
--- a/lib/donatj/SimpleCalendar.php
+++ b/lib/donatj/SimpleCalendar.php
@@ -134,11 +134,22 @@ class SimpleCalendar {
$out .= "</tr></thead>\n<tbody>\n<tr>";
+
+
if( $wday == 7 ) {
- $wday = 0;
- } else {
- $out .= str_repeat('<td class="SCprefix"> </td>', $wday);
- }
+ $wday = 0;
+ } else if($wday<0){
+ for($i=0; $i<7+$wday; $i++){
+ $out .='<td class="SCprefix"> </td>';
+ }
+
+ $wday = (7+$wday);
+
+ }else{
+ $out .= str_repeat('<td class="SCprefix"> </td>', $wday );
+ }
+
+
$count = $wday + 1;
for( $i = 1; $i <= $no_days; $i++ ) {
|
Fixed bug Sets the first day of Week
|
donatj_SimpleCalendar
|
train
|
b56f33d208f6649ef37803d7d3687b5e44931170
|
diff --git a/src/Event/GuardianListener.php b/src/Event/GuardianListener.php
index <HASH>..<HASH> 100644
--- a/src/Event/GuardianListener.php
+++ b/src/Event/GuardianListener.php
@@ -55,7 +55,9 @@ class GuardianListener implements EventListenerInterface
'Wasabi/Core.Users.login',
'Wasabi/Core.Users.logout',
'Wasabi/Core.Users.register',
- 'Wasabi/Core.Users.unauthorized'
+ 'Wasabi/Core.Users.unauthorized',
+ 'Wasabi/Core.Users.lostPassword',
+ 'Wasabi/Core.Users.resetPassword'
]);
}
|
add guest actions to GuardianListener
|
wasabi-cms_core
|
train
|
ab731a1915c3f9e8f3db7f960d2b39e50ae4109e
|
diff --git a/tests/test_cell.py b/tests/test_cell.py
index <HASH>..<HASH> 100644
--- a/tests/test_cell.py
+++ b/tests/test_cell.py
@@ -18,6 +18,24 @@ class Test_Cell( unittest.TestCase ):
np.testing.assert_array_equal( cell.matrix, cell_matrix )
np.testing.assert_array_equal( cell.inv_matrix, mock_invert.return_value )
+ def setUp( self ):
+ cell_matrix = np.array( [ [ 10.0, 0.0, 0.0 ],
+ [ 0.0, 10.0, 0.0 ],
+ [ 0.0, 0.0, 10.0 ] ] )
+ self.cell = Cell( cell_matrix )
+
+ def test_dr( self ):
+ r1 = np.array( [ 0.5, 0.1, 0.1 ] )
+ r2 = np.array( [ 0.1, 0.4, 0.1 ] )
+ self.assertEqual( self.cell.dr( r1, r2 ), 5.0 )
+
+ def test_dr_cutoff( self ):
+ r1 = np.array( [ 0.5, 0.1, 0.1 ] )
+ r2 = np.array( [ 0.1, 0.4, 0.1 ] )
+ self.assertEqual( self.cell.dr( r1, r2, cutoff=1.0 ), None )
+
+class Test_Cell_Support_Functions( unittest.TestCase ):
+
def test_angle( self ):
test_data = [ [ np.array( [ 1.0, 0.0, 0.0 ] ), np.array( [ 0.0, 1.0, 0.0 ] ), 90.0 ],
[ np.array( [ 2.0, 2.0, 0.0 ] ), np.array( [ 0.5, 0.0, 0.0 ] ), 45.0 ] ]
diff --git a/vasppy/cell.py b/vasppy/cell.py
index <HASH>..<HASH> 100644
--- a/vasppy/cell.py
+++ b/vasppy/cell.py
@@ -51,7 +51,18 @@ class Cell:
self.matrix = matrix # 3 x 3 numpy Array
self.inv_matrix = np.linalg.inv( matrix )
- def dr( self, r1, r2, cutoff = None ):
+ def dr( self, r1, r2, cutoff=None ):
+ """
+ Calculate the distance between two fractional coordinates in the cell.
+
+ Args:
+ r1 (np.array): fractional coordinates for position 1.
+ r2 (np.array): fractional coordinates for position 2.
+ cutoff (optional:Bool): If set, returns None for distances greater than the cutoff. Default None (unset).
+
+ Returns:
+ (float): the distance between r1 and r2.
+ """
delta_r_cartesian = ( r1 - r2 ).dot( self.matrix )
delta_r_squared = sum( delta_r_cartesian**2 )
if cutoff != None:
|
Added unit tests for Cell.dr
|
bjmorgan_vasppy
|
train
|
1f211d3892527b3d1e2f64bf3c1556ee56497cfb
|
diff --git a/ibis/filesystems.py b/ibis/filesystems.py
index <HASH>..<HASH> 100644
--- a/ibis/filesystems.py
+++ b/ibis/filesystems.py
@@ -177,6 +177,20 @@ class HDFS(object):
def tail(self, hdfs_path, nbytes=1024):
raise NotImplementedError
+ def mv(self, hdfs_path_src, hdfs_path_dest, overwrite=True):
+ """
+ Move hdfs_path_src to hdfs_path_dest
+
+ Parameters
+ ----------
+ overwrite : boolean, default True
+ Overwrite hdfs_path_dest if it exists.
+ """
+ raise NotImplementedError
+
+ def cp(self, hdfs_path_src, hdfs_path_dest):
+ raise NotImplementedError
+
def rm(self, path):
"""
Delete a single file
@@ -265,6 +279,13 @@ class WebHDFS(HDFS):
else:
raise NotImplementedError
+ @implements(HDFS.mv)
+ def mv(self, hdfs_path_src, hdfs_path_dest, overwrite=True):
+ if overwrite and self.exists(hdfs_path_dest):
+ if self.status(hdfs_path_dest)['type'] == 'FILE':
+ self.rm(hdfs_path_dest)
+ return self.client.rename(hdfs_path_src, hdfs_path_dest)
+
def delete(self, hdfs_path, recursive=False):
"""
diff --git a/ibis/tests/test_filesystems.py b/ibis/tests/test_filesystems.py
index <HASH>..<HASH> 100644
--- a/ibis/tests/test_filesystems.py
+++ b/ibis/tests/test_filesystems.py
@@ -135,11 +135,37 @@ class TestHDFSE2E(unittest.TestCase):
self.test_files.append(path)
return path
+ def _make_random_hdfs_file(self, size=1024, directory=None):
+ local_path = self._make_random_file(size=size)
+ remote_path = pjoin(directory or self.tmp_dir, local_path)
+ self.hdfs.put(remote_path, local_path)
+ return remote_path
+
def test_mkdir(self):
path = pjoin(self.tmp_dir, 'mkdir-test')
self.hdfs.mkdir(path)
assert self.hdfs.exists(path)
+ def test_mv_to_existing_file(self):
+ remote_file = self._make_random_hdfs_file()
+ existing_remote_file_dest = self._make_random_hdfs_file()
+ self.hdfs.mv(remote_file, existing_remote_file_dest)
+
+ def test_mv_to_existing_file_no_overwrite(self):
+ remote_file = self._make_random_hdfs_file()
+ existing_remote_file_dest = self._make_random_hdfs_file()
+ with self.assertRaises(Exception):
+ self.hdfs.mv(remote_file, existing_remote_file_dest, overwrite=False)
+
+ def test_mv_to_directory(self):
+ remote_file = self._make_random_hdfs_file()
+ dest_dir = pjoin(self.tmp_dir, util.guid())
+ self.hdfs.mkdir(dest_dir)
+ self.hdfs.mv(remote_file, dest_dir)
+ new_remote_file = pjoin(dest_dir, os.path.basename(remote_file))
+ file_status = self.hdfs.status(new_remote_file)
+ assert file_status['type'] == 'FILE'
+
def test_put_get_delete_file(self):
dirpath = pjoin(self.tmp_dir, 'write-delete-test')
self.hdfs.mkdir(dirpath)
|
Add HDFS.mv (and WebHDFS.mv)
Addresses half of #<I>
|
ibis-project_ibis
|
train
|
62ae7b394712c2ad6c63446e9352113550604daf
|
diff --git a/sacred/arg_parser.py b/sacred/arg_parser.py
index <HASH>..<HASH> 100644
--- a/sacred/arg_parser.py
+++ b/sacred/arg_parser.py
@@ -31,7 +31,8 @@ Options:
-h --help Print this help message and exit
-m DB --mongo_db=DB Add a MongoDB Observer to the experiment
-l LEVEL --logging=LEVEL Adjust the loglevel
- -d --debug Don't filter the stacktrace
+ -d --debug Don't filter the stacktrace and automatically enter
+ post-mortem debugging with pdb
Arguments:
DB Database specification. Can be [host:port:]db_name
diff --git a/sacred/experiment.py b/sacred/experiment.py
index <HASH>..<HASH> 100644
--- a/sacred/experiment.py
+++ b/sacred/experiment.py
@@ -273,7 +273,10 @@ class Experiment(Ingredient):
loglevel=loglevel)
except:
if args['--debug']:
- raise
+ import traceback
+ import pdb
+ traceback.print_exception(*sys.exc_info())
+ pdb.post_mortem()
else:
print_filtered_stacktrace()
|
added post-mortem debugging
fixes #<I>
|
IDSIA_sacred
|
train
|
b5b836cc8db687b555a8c603e3719ac79b1a8a31
|
diff --git a/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/MappingService.java b/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/MappingService.java
index <HASH>..<HASH> 100644
--- a/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/MappingService.java
+++ b/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/MappingService.java
@@ -43,6 +43,7 @@ public interface MappingService
/**
* Applies all mappings in a {@link MappingTarget}
+ * Adds the source attribute by default
*
* @param mappingTarget the MappingTarget whose mappings are applied
* @param entityName the name of the entity to map to
@@ -51,6 +52,16 @@ public interface MappingService
String applyMappings(MappingTarget mappingTarget, String entityName);
/**
+ * Applies all mappings in a {@link MappingTarget}
+ *
+ * @param mappingTarget the MappingTarget whose mappings are applied
+ * @param entityName the name of the entity to map to
+ * @param addSourceAttribute boolean indicating if the 'source' attribute should be added to the target repository
+ * @return fully qualified name of the generated entity
+ */
+ String applyMappings(MappingTarget mappingTarget, String entityName, boolean addSourceAttribute);
+
+ /**
* Deletes a {@link MappingProject}
*
* @param mappingProjectId id of the {@link MappingProject} to delete
diff --git a/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java b/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java
index <HASH>..<HASH> 100644
--- a/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java
+++ b/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java
@@ -157,14 +157,22 @@ public class MappingServiceImpl implements MappingService
return mappingProjectRepository.getMappingProject(identifier);
}
+ public String applyMappings(MappingTarget mappingTarget, String entityName)
+ {
+ return applyMappings(mappingTarget, entityName, true);
+ }
+
// TODO discuss: why isn't this method transactional?
@Override
- public String applyMappings(MappingTarget mappingTarget, String entityName)
+ public String applyMappings(MappingTarget mappingTarget, String entityName, boolean addSourceAttribute)
{
EntityMetaData targetMetaData = EntityMetaData.newInstance(mappingTarget.getTarget(), DEEP_COPY_ATTRS);
targetMetaData.setName(entityName);
targetMetaData.setLabel(entityName);
- targetMetaData.addAttribute(attrMetaFactory.create().setName("source"));
+ if (addSourceAttribute)
+ {
+ targetMetaData.addAttribute(attrMetaFactory.create().setName(SOURCE));
+ }
// add a new repository if the target repo doesn't exist, or check if the target repository is compatible with
// the result of the mappings
@@ -177,7 +185,17 @@ public class MappingServiceImpl implements MappingService
}
else
{
+ // Get an existing repository
targetRepo = dataService.getRepository(entityName);
+
+ // If the addSourceAttribute is true, but the existing repository does not have the SOURCE attribute yet
+ // Get the existing metadata and add the SOURCE attribute
+ EntityMetaData existingTargetMetaData = targetRepo.getEntityMetaData();
+ if (existingTargetMetaData.getAttribute(SOURCE) == null && addSourceAttribute)
+ {
+ existingTargetMetaData.addAttribute(attrMetaFactory.create().setName(SOURCE));
+ dataService.getMeta().updateEntityMeta(existingTargetMetaData);
+ }
}
try
|
Add addSourceAttribute to applyMappings method in interface and impl
|
molgenis_molgenis
|
train
|
c396476c1226ce1b7595c94e4c1c021237fcb112
|
diff --git a/src/Bugsnag/Notification.php b/src/Bugsnag/Notification.php
index <HASH>..<HASH> 100644
--- a/src/Bugsnag/Notification.php
+++ b/src/Bugsnag/Notification.php
@@ -158,12 +158,17 @@ class Bugsnag_Notification
// Prefer cURL if it is installed, otherwise fall back to fopen()
// cURL supports both timeouts and proxies
- if (function_exists('curl_version')) {
- $this->postWithCurl($url, $body);
- } elseif (ini_get('allow_url_fopen')) {
- $this->postWithFopen($url, $body);
- } else {
- error_log('Bugsnag Warning: Couldn\'t notify (neither cURL or allow_url_fopen are available on your PHP installation)');
+
+ try {
+ if (function_exists('curl_version')) {
+ $this->postWithCurl($url, $body);
+ } elseif (ini_get('allow_url_fopen')) {
+ $this->postWithFopen($url, $body);
+ } else {
+ error_log('Bugsnag Warning: Couldn\'t notify (neither cURL or allow_url_fopen are available on your PHP installation)');
+ }
+ } catch (Exception $e) {
+ error_log('Bugsnag Warning: Couldn\'t notify. '.$e->getMessage());
}
}
|
Don't hard fail on sending HTTP requests (#<I>)
|
bugsnag_bugsnag-php
|
train
|
c1accd531fff02f5fc71a071ac5548aacb78cee6
|
diff --git a/docs/pages/advanced/index.js b/docs/pages/advanced/index.js
index <HASH>..<HASH> 100644
--- a/docs/pages/advanced/index.js
+++ b/docs/pages/advanced/index.js
@@ -34,14 +34,14 @@ export default function Advanced() {
While React-Select assumes a standard way of filtering the menu on search, our api allows you to customise that filtering logic in various ways.
### createFilter function
- React-Select exports a createFilter function that returns a filterOptions method. By using this, users can pick and choose bits of the filtration logic to customise,
+ React-Select exports a createFilter function that returns a filterOption method. By using this, users can pick and choose bits of the filtration logic to customise,
without having to rewrite the logic wholesale.
Below is an example of how you could use the createFilter function to customise filtration logic in react-select.
${(
<ExampleWrapper
- label="Custom filterOptions with createFilter"
+ label="Custom filterOption with createFilter"
urlPath="docs/examples/CreateFilter.js"
raw={require('!!raw-loader!../../examples/CreateFilter.js')}
>
@@ -49,13 +49,13 @@ export default function Advanced() {
</ExampleWrapper>
)}
- ### filterOptions
- If you really would like to rewrite the filtration logic from the ground up, simply declare a new filterOptions function to be passed in as a prop to react-select.
- For details on the shape of the filterOptions prop, please see the proptypes in the api docs [here](/api#prop-types).
+ ### filterOption
+ If you really would like to rewrite the filtration logic from the ground up, simply declare a new filterOption function to be passed in as a prop to react-select.
+ For details on the shape of the filterOption prop, please see the proptypes in the api docs [here](/props).
${(
<ExampleWrapper
- label="Custom filterOptions with createFilter"
+ label="Custom filterOption with createFilter"
urlPath="docs/examples/CreateFilter.js"
raw={require('!!raw-loader!../../examples/CreateFilter.js')}
>
|
filterOptions -> filterOption
The "Advanced" section of the docs still refers to filterOptions and uses a wrong url
|
JedWatson_react-select
|
train
|
ab300f07e85c9c1ba538d37d16cd91fa3361ef4d
|
diff --git a/api/api.go b/api/api.go
index <HASH>..<HASH> 100644
--- a/api/api.go
+++ b/api/api.go
@@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"os"
+ "strings"
"time"
"github.com/hashicorp/go-retryablehttp"
@@ -61,6 +62,10 @@ func NewApi(ac *Config) (*Api, error) {
if api_url == "" {
api_url = defaultApiUrl
}
+ if !strings.Contains(api_url, "/") {
+ // if just a hostname is passed, ASSume "https" and a path prefix of "/v2"
+ api_url = fmt.Sprintf("https://%s/v2", ac.Url)
+ }
if last := len(api_url) - 1; last >= 0 && api_url[last] == '/' {
api_url = api_url[:last]
}
diff --git a/api/api_test.go b/api/api_test.go
index <HASH>..<HASH> 100644
--- a/api/api_test.go
+++ b/api/api_test.go
@@ -42,7 +42,7 @@ func TestApiGetUser(t *testing.T) {
t.Skip("skipping test; $CIRCONUS_API_TOKEN not set")
}
- t.Log("Testing correct API call to /user/current")
+ t.Log("Testing correct API call to /user/current [defaults]")
ac := &Config{}
ac.Token = TokenConfig{
@@ -59,3 +59,27 @@ func TestApiGetUser(t *testing.T) {
}
}
+
+func TestApiGetUser2(t *testing.T) {
+ if os.Getenv("CIRCONUS_API_TOKEN") == "" {
+ t.Skip("skipping test; $CIRCONUS_API_TOKEN not set")
+ }
+
+ t.Log("Testing correct API call to /user/current [url=hostname]")
+
+ ac := &Config{}
+ ac.Token = TokenConfig{
+ Key: os.Getenv("CIRCONUS_API_TOKEN"),
+ App: os.Getenv("CIRCONUS_API_APP"),
+ }
+ ac.Url = "api.circonus.com"
+ apih, err := NewApi(ac)
+ if err != nil {
+ t.Errorf("Expected no error, got '%v'", err)
+ }
+
+ if _, err := apih.Get("/user/current"); err != nil {
+ t.Errorf("Expected no error, got '%v'", err)
+ }
+
+}
|
facilitate Url being just a hostname (defined as, does not contain a "/")
|
circonus-labs_circonus-gometrics
|
train
|
a44dd9a2687055d0074f8e71f1c50e3096ee846b
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,7 @@
* [#304](https://github.com/intridea/hashie/pull/304): Ensured compatibility of `Hash` extensions with singleton objects - [@regexident](https://github.com/regexident).
* [#306](https://github.com/intridea/hashie/pull/306): Added `Hashie::Extensions::Dash::Coercion` - [@marshall-lee](https://github.com/marshall-lee).
* [#310](https://github.com/intridea/hashie/pull/310): Fixed `Hashie::Extensions::SafeAssignment` bug with private methods - [@marshall-lee](https://github.com/marshall-lee).
+* [#313](https://github.com/intridea/hashie/pull/313): Restrict pending spec to only Ruby versions 2.2.0-2.2.2 - [@pboling](https://github.com/pboling).
## 3.4.2 (6/2/2015)
diff --git a/spec/hashie/mash_spec.rb b/spec/hashie/mash_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/hashie/mash_spec.rb
+++ b/spec/hashie/mash_spec.rb
@@ -364,9 +364,7 @@ describe Hashie::Mash do
it 'is able to access an unknown suffixed key as a method' do
# See https://github.com/intridea/hashie/pull/285 for more information
- if mri22?
- pending 'Bug in MRI 2.2.x means this behavior is broken in those versions'
- end
+ pending_for(engine: 'ruby', versions: %w(2.2.0 2.2.1 2.2.2))
%w(= ? ! _).each do |suffix|
expect(subject.method(:"xyz#{suffix}")).to_not be_nil
diff --git a/spec/support/ruby_version.rb b/spec/support/ruby_version.rb
index <HASH>..<HASH> 100644
--- a/spec/support/ruby_version.rb
+++ b/spec/support/ruby_version.rb
@@ -1,10 +1,59 @@
-def mri22?
- ruby_version.start_with?('ruby_2.2')
+# How to pend specs that break due to bugs in Ruby interpreters or versions
+#
+# it("blah is blah") do
+# pending_for(engine: 'jruby', version: '2.2.2')
+# expect('blah').to eq 'blah'
+# end
+#
+def pending_for(options = {}) # Not using named parameters because still supporting Ruby 1.9
+ fail ArgumentError, 'pending_for requires at least an engine or versions to be specified' unless
+ options[:engine] || options[:versions]
+ current_engine, current_version = ruby_engine_and_version
+ versions_to_pend = Array(options[:versions]) # cast to array
+ engine_to_pend = options[:engine]
+ broken = 'This behavior is broken'
+ bug = 'due to a bug in the Ruby engine'
+ # If engine is nil, then any matching versions should be pended
+ if engine_to_pend.nil?
+ pending "#{broken} in Ruby versions #{versions_to_pend} #{bug}" if
+ versions_to_pend.include?(current_version)
+ elsif engine_to_pend == current_engine
+ if versions_to_pend.empty?
+ pending "#{broken} #{bug} #{INTERPRETER_MATRIX[engine_to_pend]}"
+ else
+ pending %[#{broken} in Ruby versions #{versions_to_pend} #{bug} (#{INTERPRETER_MATRIX[engine_to_pend]})] if
+ versions_to_pend.include?(current_version)
+ end
+ end
end
-def ruby_version
- interpreter = Object.const_defined?(:RUBY_ENGINE) && RUBY_ENGINE
- version = Object.const_defined?(:RUBY_VERSION) && RUBY_VERSION
+#
+# | RUBY_ENGINE | Implementation |
+# |:-----------:|:-----------------:|
+# | <undefined> | MRI < 1.9 |
+# | 'ruby' | MRI >= 1.9 or REE |
+# | 'jruby' | JRuby |
+# | 'macruby' | MacRuby |
+# | 'rbx' | Rubinius |
+# | 'maglev' | MagLev |
+# | 'ironruby' | IronRuby |
+# | 'cardinal' | Cardinal |
+#
- "#{interpreter}_#{version}"
+INTERPRETER_MATRIX = {
+ nil => 'MRI < 1.9',
+ 'ruby' => 'MRI >= 1.9 or REE',
+ 'jruby' => 'JRuby',
+ 'macruby' => 'MacRuby',
+ 'rbx' => 'Rubinius',
+ 'maglev' => 'MagLev',
+ 'ironruby' => 'IronRuby',
+ 'cardinal' => 'Cardinal'
+}
+
+def ruby_engine_and_version
+ current_engine = Object.const_defined?(:RUBY_ENGINE) && RUBY_ENGINE
+ current_version = Object.const_defined?(:RUBY_VERSION) && RUBY_VERSION
+
+ [current_engine, current_version]
end
|
- Restrict pending the spec to only Ruby versions <I>, <I>, <I>
- Better paradigm for pending specs due to bugs in interpreter
|
intridea_hashie
|
train
|
026269eba6eea386eff91d12cdd95418eae374cc
|
diff --git a/lib/upnp/control_point/service.rb b/lib/upnp/control_point/service.rb
index <HASH>..<HASH> 100644
--- a/lib/upnp/control_point/service.rb
+++ b/lib/upnp/control_point/service.rb
@@ -236,7 +236,8 @@ module UPnP
end.size
=end
@action_list << action
- define_method_from_action(action[:name].to_sym, action[:argumentList][:argument])
+ args = action[:argumentList] ? action[:argumentList][:argument] : {}
+ define_method_from_action(action[:name].to_sym, args)
end
else
log "<#{self.class}> Got actionList that's not an Array or Hash."
|
Fix for when an action doesn't provide an argumentList. Relates to gh-5.
|
turboladen_playful
|
train
|
fc343e9d87106271561b54330b9ee0afd4afd41e
|
diff --git a/test/parsing-response.js b/test/parsing-response.js
index <HASH>..<HASH> 100644
--- a/test/parsing-response.js
+++ b/test/parsing-response.js
@@ -144,9 +144,7 @@ describe("Parsing Responses", function() {
shouldRejectWithError(500);
- it("should use the exception message as the error message", function(
- done
- ) {
+ it("should use the exception message as the error message", function(done) {
this.result.catch(err => {
expect(err.message).to.equal(
"Cannot do the thing you wanted to do because of some important reason."
@@ -176,9 +174,7 @@ describe("Parsing Responses", function() {
shouldRejectWithError(500);
- it("should use the exception message as the error message", function(
- done
- ) {
+ it("should use the exception message as the error message", function(done) {
this.result.catch(err => {
expect(err.message).to.equal(
"Cannot do the thing you wanted to do because of some important reason."
@@ -220,9 +216,7 @@ describe("Parsing Responses", function() {
shouldRejectWithError(500);
- it("should use the inner-most exception message as the error message", function(
- done
- ) {
+ it("should use the inner-most exception message as the error message", function(done) {
this.result.catch(err => {
expect(err.message).to.equal(
"Cannot do the thing you wanted to do because of some important reason."
@@ -262,9 +256,7 @@ describe("Parsing Responses", function() {
shouldRejectWithError(500);
- it("should use the inner-most exception message as the error message", function(
- done
- ) {
+ it("should use the inner-most exception message as the error message", function(done) {
this.result.catch(err => {
expect(err.message).to.equal("Cannot do the thing");
done();
@@ -302,9 +294,7 @@ describe("Parsing Responses", function() {
shouldRejectWithError(500);
- it("should use the inner-most exception message as the error message", function(
- done
- ) {
+ it("should use the inner-most exception message as the error message", function(done) {
this.result.catch(err => {
expect(err.message).to.equal("Not a useless message");
done();
|
Delint due to prettier upgrade
|
civicsource_fetch-helpers
|
train
|
db739379cef6d269a6bc51a9ebf999981f6b3adc
|
diff --git a/src/type/__tests__/validation-test.js b/src/type/__tests__/validation-test.js
index <HASH>..<HASH> 100644
--- a/src/type/__tests__/validation-test.js
+++ b/src/type/__tests__/validation-test.js
@@ -878,8 +878,8 @@ describe('Type System: Union types must be resolvable', () => {
types: [ SomeObjectType ],
}))
).to.throw(
- 'Union Type SomeUnion does not provide a "resolveType" function and ' +
- 'possible Type SomeObject does not provide a "isTypeOf" function. ' +
+ 'Union type "SomeUnion" does not provide a "resolveType" function and ' +
+ 'possible type "SomeObject" does not provide an "isTypeOf" function. ' +
'There is no way to resolve this possible type during execution.'
);
});
diff --git a/src/type/definition.js b/src/type/definition.js
index <HASH>..<HASH> 100644
--- a/src/type/definition.js
+++ b/src/type/definition.js
@@ -655,39 +655,45 @@ export class GraphQLUnionType {
}
getTypes(): Array<GraphQLObjectType> {
- return this._types || (this._types = this._defineTypes());
+ return this._types || (this._types =
+ defineTypes(this, this._typeConfig.types)
+ );
+ }
+
+ toString(): string {
+ return this.name;
}
+}
+
+function defineTypes(
+ unionType: GraphQLUnionType,
+ typesThunk: Thunk<Array<GraphQLObjectType>>
+): Array<GraphQLObjectType> {
+ const types = resolveThunk(typesThunk);
- _defineTypes(): Array<GraphQLObjectType> {
- const types = resolveThunk(this._typeConfig.types);
+ invariant(
+ Array.isArray(types) && types.length > 0,
+ 'Must provide Array of types or a function which returns ' +
+ `such an array for Union ${unionType.name}.`
+ );
+ types.forEach(objType => {
invariant(
- Array.isArray(types) && types.length > 0,
- 'Must provide Array of types or a function which returns ' +
- `such an array for Union ${this.name}.`
+ objType instanceof GraphQLObjectType,
+ `${unionType.name} may only contain Object types, it cannot contain: ` +
+ `${String(objType)}.`
);
- types.forEach(type => {
+ if (typeof unionType.resolveType !== 'function') {
invariant(
- type instanceof GraphQLObjectType,
- `${this.name} may only contain Object types, it cannot contain: ` +
- `${String(type)}.`
+ typeof objType.isTypeOf === 'function',
+ `Union type "${unionType.name}" does not provide a "resolveType" ` +
+ `function and possible type "${objType.name}" does not provide an ` +
+ '"isTypeOf" function. There is no way to resolve this possible type ' +
+ 'during execution.'
);
- if (typeof this.resolveType !== 'function') {
- invariant(
- typeof type.isTypeOf === 'function',
- `Union Type ${this.name} does not provide a "resolveType" function ` +
- `and possible Type ${type.name} does not provide a "isTypeOf" ` +
- 'function. There is no way to resolve this possible type ' +
- 'during execution.'
- );
- }
- });
-
- return types;
- }
+ }
+ });
- toString(): string {
- return this.name;
- }
+ return types;
}
export type GraphQLUnionTypeConfig = {
|
Move Unions class-private method into module-private. Improve test message
|
graphql_graphql-js
|
train
|
4e1c44b63c0098201be85c8841e7c65a46c597e8
|
diff --git a/lib/dry/initializer/attribute.rb b/lib/dry/initializer/attribute.rb
index <HASH>..<HASH> 100644
--- a/lib/dry/initializer/attribute.rb
+++ b/lib/dry/initializer/attribute.rb
@@ -54,7 +54,11 @@ module Dry::Initializer
source == other.source
end
- # definition for the getter method
+ def postsetter
+ "@__options__[:#{target}] = @#{target}" \
+ " unless @#{target} == #{undefined}"
+ end
+
def getter
return unless reader
command = %w(private protected).include?(reader.to_s) ? reader : :public
diff --git a/lib/dry/initializer/builder.rb b/lib/dry/initializer/builder.rb
index <HASH>..<HASH> 100644
--- a/lib/dry/initializer/builder.rb
+++ b/lib/dry/initializer/builder.rb
@@ -51,7 +51,7 @@ module Dry::Initializer
private :__initialize__
private :__defaults__
private :__coercers__
-
+
#{getters}
RUBY
end
diff --git a/lib/dry/initializer/option.rb b/lib/dry/initializer/option.rb
index <HASH>..<HASH> 100644
--- a/lib/dry/initializer/option.rb
+++ b/lib/dry/initializer/option.rb
@@ -21,11 +21,6 @@ module Dry::Initializer
" : #{undefined}"
end
- def postsetter
- "@__options__[:#{target}] = @#{target}" \
- " unless @#{target} == #{undefined}"
- end
-
# part of __defaults__
def default_hash
super :option
diff --git a/lib/dry/initializer/param.rb b/lib/dry/initializer/param.rb
index <HASH>..<HASH> 100644
--- a/lib/dry/initializer/param.rb
+++ b/lib/dry/initializer/param.rb
@@ -16,8 +16,6 @@ module Dry::Initializer
safe_setter
end
- def postsetter; end
-
# part of __defaults__
def default_hash
super :param
diff --git a/spec/options_var_spec.rb b/spec/options_var_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/options_var_spec.rb
+++ b/spec/options_var_spec.rb
@@ -1,23 +1,26 @@
describe "@__options__" do
- context "when class has no options" do
+ context "when class has params" do
before do
class Test::Foo
- extend Dry::Initializer::Mixin
- param :foo
+ extend Dry::Initializer
+ param :foo, proc(&:to_s)
+ param :bar, default: proc { 1 }
+ param :baz, optional: true
end
end
- it "is set to empty hash" do
- subject = Test::Foo.new(1)
+ it "collects coerced params with default values" do
+ subject = Test::Foo.new(:FOO)
- expect(subject.instance_variable_get(:@__options__)).to eq({})
+ expect(subject.instance_variable_get(:@__options__))
+ .to eq({ foo: "FOO", bar: 1 })
end
end
context "when class has options" do
before do
class Test::Foo
- extend Dry::Initializer::Mixin
+ extend Dry::Initializer
option :foo
option :bar, default: proc { 1 }
option :baz, optional: true
|
Collect all attributes, assigned via #option and #param, in the @__options__ hash
|
dry-rb_dry-initializer
|
train
|
90032af0570d25a6dc82b6e2d75f641b2d9e831c
|
diff --git a/cluster/matrix.py b/cluster/matrix.py
index <HASH>..<HASH> 100644
--- a/cluster/matrix.py
+++ b/cluster/matrix.py
@@ -31,18 +31,30 @@ def _encapsulate_item_for_combinfunc(item):
before the invocation of combinfunc.
if not hasattr(item, '__iter__') or isinstance(item, tuple):
item = [item]
- Logging has been added to the original two lines
- and shows that the behaviour of this snippet
+ Logging was added to the original two lines
+ and shows that the outcome of this snippet
has changed between Python2.7 and Python3.5.
+ This logging showed that the difference in
+ outcome consisted of the handling of the builtin
+ str class, which was encapsulated into a list in
+ Python2.7 but returned naked in Python3.5.
+ Adding a test for this specific class to the
+ set of conditions appears to give correct behaviour
+ under both versions.
"""
encapsulated_item = None
- if not hasattr(item, '__iter__') or isinstance(item, tuple):
+ if (
+ not hasattr(item, '__iter__') or
+ isinstance(item, tuple) or
+ isinstance(item, str)
+ ):
encapsulated_item = [item]
else:
encapsulated_item = item
logging.debug(
"item class:%s encapsulated as:%s ",
- item.__class__, encapsulated_item.__class__
+ item.__class__.__name__,
+ encapsulated_item.__class__.__name__
)
return encapsulated_item
|
Modified new log message slightly to make the names of types on Python<I> and Python<I> consistent.
Fixed Github issue #<I> by explicitly adding str to the list of types to be wrapped in a list.
|
exhuma_python-cluster
|
train
|
502546a0c8e078ca476c6a6dcbc05cf3c9cde8cd
|
diff --git a/sonar-server/src/main/java/org/sonar/server/rule2/index/RuleIndex.java b/sonar-server/src/main/java/org/sonar/server/rule2/index/RuleIndex.java
index <HASH>..<HASH> 100644
--- a/sonar-server/src/main/java/org/sonar/server/rule2/index/RuleIndex.java
+++ b/sonar-server/src/main/java/org/sonar/server/rule2/index/RuleIndex.java
@@ -328,7 +328,7 @@ public class RuleIndex extends BaseIndex<Rule, RuleDto, RuleKey> {
/* the Lang facet */
query.addAggregation(AggregationBuilders
- .terms("Languages")
+ .terms("languages")
.field(RuleNormalizer.RuleField.LANGUAGE.key())
.order(Terms.Order.count(false))
.size(10)
@@ -336,7 +336,7 @@ public class RuleIndex extends BaseIndex<Rule, RuleDto, RuleKey> {
/* the Tag facet */
query.addAggregation(AggregationBuilders
- .terms("Tags")
+ .terms("tags")
.field(RuleNormalizer.RuleField._TAGS.key())
.order(Terms.Order.count(false))
.size(10)
@@ -344,7 +344,7 @@ public class RuleIndex extends BaseIndex<Rule, RuleDto, RuleKey> {
/* the Repo facet */
query.addAggregation(AggregationBuilders
- .terms("Repositories")
+ .terms("repositories")
.field(RuleNormalizer.RuleField.REPOSITORY.key())
.order(Terms.Order.count(false))
.size(10)
diff --git a/sonar-server/src/main/java/org/sonar/server/rule2/ws/SearchAction.java b/sonar-server/src/main/java/org/sonar/server/rule2/ws/SearchAction.java
index <HASH>..<HASH> 100644
--- a/sonar-server/src/main/java/org/sonar/server/rule2/ws/SearchAction.java
+++ b/sonar-server/src/main/java/org/sonar/server/rule2/ws/SearchAction.java
@@ -217,7 +217,7 @@ public class SearchAction implements RequestHandler {
json.name("facets").beginArray();
for (Map.Entry<String, Collection<FacetValue>> facet : results.getFacets().entrySet()) {
json.beginObject();
- json.prop("name", facet.getKey());
+ json.prop("property", facet.getKey());
json.name("values").beginArray();
for (FacetValue facetValue : facet.getValue()) {
json.beginObject();
diff --git a/sonar-server/src/test/java/org/sonar/server/rule2/index/RuleIndexMediumTest.java b/sonar-server/src/test/java/org/sonar/server/rule2/index/RuleIndexMediumTest.java
index <HASH>..<HASH> 100644
--- a/sonar-server/src/test/java/org/sonar/server/rule2/index/RuleIndexMediumTest.java
+++ b/sonar-server/src/test/java/org/sonar/server/rule2/index/RuleIndexMediumTest.java
@@ -122,7 +122,7 @@ public class RuleIndexMediumTest {
assertThat(result.getFacets()).hasSize(3);
// Verify the value of a given facet
- Collection<FacetValue> repoFacets = result.getFacetValues("Repositories");
+ Collection<FacetValue> repoFacets = result.getFacetValues("repositories");
assertThat(repoFacets).hasSize(2);
assertThat(Iterables.get(repoFacets, 0).getKey()).isEqualTo("javascript");
assertThat(Iterables.get(repoFacets, 0).getValue()).isEqualTo(2);
@@ -130,7 +130,7 @@ public class RuleIndexMediumTest {
assertThat(Iterables.get(repoFacets, 1).getValue()).isEqualTo(1);
// Check that tag facet has both Tags and SystemTags values
- Collection<FacetValue> tagFacet = result.getFacetValues("Tags");
+ Collection<FacetValue> tagFacet = result.getFacetValues("tags");
assertThat(tagFacet).hasSize(2);
}
|
SONAR-<I> - Changed facet's name to property to link/match search Filters
|
SonarSource_sonarqube
|
train
|
34d82d7cf33a4683e35fe8b465904959ba9929ad
|
diff --git a/lib/ruote/log/fs_history.rb b/lib/ruote/log/fs_history.rb
index <HASH>..<HASH> 100644
--- a/lib/ruote/log/fs_history.rb
+++ b/lib/ruote/log/fs_history.rb
@@ -73,7 +73,13 @@ module Ruote
next unless l.match(/ #{wfid} /)
- history.unshift(l.strip)
+ l = l.strip
+ r = split_line(l)
+
+ next unless r
+
+ history.unshift(r)
+
return history if l.match(/ processes launch$/)
end
end
@@ -85,6 +91,8 @@ module Ruote
# # (NOTE why not ?)
#end
+ LINE_REGEX = /^([0-9-]{10} [^ ]+) ([^ ]+) ([a-z]{2}) (.+)$/
+
ABBREVIATIONS = {
:processes => 'ps',
:workitems => 'wi'
@@ -92,6 +100,12 @@ module Ruote
protected
+ def split_line (l)
+
+ m = LINE_REGEX.match(l)
+ m ? [ Time.parse(m[1]), m[2], m[3], m[4] ] : nil
+ end
+
def ab (s)
ABBREVIATIONS[s] || s.to_s
|
fs_history : actually parsing lines when #process_history
|
jmettraux_ruote
|
train
|
2def436e34163471d32d3d3366226e57eee6db46
|
diff --git a/pointfree.py b/pointfree.py
index <HASH>..<HASH> 100644
--- a/pointfree.py
+++ b/pointfree.py
@@ -96,12 +96,14 @@ class partial(object):
self.extra_argv = inst.extra_argv
self.__sig_from_partial(inst)
- if isinstance(func, functools.partial):
+ elif isinstance(func, functools.partial):
self.func = func.func
self.__sig_from_func(self.func)
- self.__update_argv(*func.args, **func.keywords)
+ partial_args = func.args or ()
+ partial_keywords = func.keywords or {}
+ self.__update_argv(*partial_args, **partial_keywords)
- if isinstance(func, classmethod) or isinstance(func, staticmethod):
+ elif isinstance(func, classmethod) or isinstance(func, staticmethod):
self.__call_error = "'%s' object is not callable" % type(func).__name__
else:
diff --git a/test/pointfree_test.py b/test/pointfree_test.py
index <HASH>..<HASH> 100755
--- a/test/pointfree_test.py
+++ b/test/pointfree_test.py
@@ -1,4 +1,4 @@
-import os, sys, unittest, types
+import os, sys, unittest, types, functools
from pointfree import *
# The unittest.TestCase in Python 2.6 and 3.0 doesn't have some of the
@@ -299,6 +299,19 @@ class ArgsToInitCase(TestCase):
# should be evaluated.
self.assertEqual(p(), 6)
+class FunctoolsPartialCase(TestCase):
+ """Make sure pointfree.partial can accept a functools.partial as an
+ argument (though this won't work if the functools.partial instance
+ wraps a builtin Python function."""
+
+ def testFunctoolsPartialInit(self):
+ self.assertEqual(partial(functools.partial(just_add))(1, 2, 3), 6)
+ self.assertEqual(partial(functools.partial(just_add, 1, 2))(3), 6)
+ self.assertEqual(partial(functools.partial(just_add, 1, 2, 3))(), 6)
+
+ def testEarlyError(self):
+ self.assertRaises(TypeError, lambda: partial(functools.partial(just_add, 1, 2, 3, 4)))
+
### POINTFREE OPERATOR FIXTURES ###########################################
@pointfree
|
Fix and test for functools.partial handling
Fixes some bugs in the partial class pertaining to accepting a
functools.partial instance as a "function", and adds unit tests for this
functionality.
|
mshroyer_pointfree
|
train
|
ad7dd0bf4c56a84dfaea3e8ab98cb7e3a35d4530
|
diff --git a/fireplace/managers.py b/fireplace/managers.py
index <HASH>..<HASH> 100644
--- a/fireplace/managers.py
+++ b/fireplace/managers.py
@@ -167,6 +167,7 @@ CARD_ATTRIBUTE_MAP = {
GameTag.OUTGOING_HEALING_ADJUSTMENT: "outgoing_healing_adjustment",
GameTag.OVERLOAD: "overload",
GameTag.POISONOUS: "poisonous",
+ GameTag.POWERED_UP: "powered_up",
GameTag.RARITY: "rarity",
GameTag.RECEIVES_DOUBLE_SPELLDAMAGE_BONUS: "receives_double_spelldamage_bonus",
GameTag.SECRET: "secret",
|
Add POWERED_UP tag to card manager
Fixes #<I>.
|
jleclanche_fireplace
|
train
|
de4d366a6a885b472dfe49b61eed71ae5001d799
|
diff --git a/src/Grid.php b/src/Grid.php
index <HASH>..<HASH> 100644
--- a/src/Grid.php
+++ b/src/Grid.php
@@ -104,7 +104,7 @@ class Grid extends Lister
*
* @return Column\Generic
*/
- public function addColumn($name, $columnDef = null, $fieldDef = null)
+ public function addColumn($name, $columnDef = null, $fieldDef = [])
{
if (!$this->model) {
$this->model = new \atk4\ui\misc\ProxyModel();
|
defaults to empty array not null
because should be equal as in `\atk4\data\Model->addField($name, $defaults =[])`
|
atk4_ui
|
train
|
b529e3d155f82ba7bbdf7f032684bf9070d39509
|
diff --git a/src/views/supplier/detail.blade.php b/src/views/supplier/detail.blade.php
index <HASH>..<HASH> 100644
--- a/src/views/supplier/detail.blade.php
+++ b/src/views/supplier/detail.blade.php
@@ -26,8 +26,6 @@
@stop
@section('aimeos_body')
- <div class="container-xl">
- <?= $aibody['supplier/detail'] ?? '' ?>
- <?= $aibody['catalog/lists'] ?? '' ?>
- </div>
+ <?= $aibody['supplier/detail'] ?? '' ?>
+ <?= $aibody['catalog/lists'] ?? '' ?>
@stop
|
Removed container-xl in supplier detail page
|
aimeos_aimeos-laravel
|
train
|
2024d76787e59fc330f7bd5d78b068f801f14780
|
diff --git a/thinc/shims/mxnet.py b/thinc/shims/mxnet.py
index <HASH>..<HASH> 100644
--- a/thinc/shims/mxnet.py
+++ b/thinc/shims/mxnet.py
@@ -3,6 +3,7 @@ import contextlib
from io import BytesIO
import srsly
import tempfile
+import copy
try:
import mxnet.autograd
@@ -86,22 +87,6 @@ class MXNetShim(Shim):
return optimizer, gluon.Trainer(self._model.collect_params(), optimizer)
- @contextlib.contextmanager
- def use_params(self, params):
- key_prefix = f"mxnet_{self.id}_"
- state_dict = {}
- for k, v in params.items():
- if hasattr(k, "startswith") and k.startswith(key_prefix):
- state_dict[k.replace(key_prefix, "")] = xp2mxnet(v)
- # TODO: state_dict equiv in mxnet? collect_params().copy() maybe?
- if state_dict:
- backup = {k: v.clone() for k, v in self._model.state_dict().items()}
- self._model.load_state_dict(state_dict)
- yield
- self._model.load_state_dict(backup)
- else:
- yield
-
def _update_mxnet_averages(self, sgd, *, init_steps=1):
if getattr(sgd, "averages", None) is None:
return
@@ -116,12 +101,20 @@ class MXNetShim(Shim):
sgd.averages[key] = xp_param.copy()
sgd.nr_update[key] = init_steps
- def to_device(self, device): # pragma: no cover
- raise NotImplementedError("todo: test this with GPU system")
+ def copy(self, ctx: mx.context.Context = None):
+ if ctx is None:
+ ctx = mx.current_context()
+ model_bytes = self.to_bytes()
+ copied = copy.deepcopy(self)
+ copied._model.initialize(ctx=ctx)
+ copied.from_bytes(model_bytes)
+ return copied
+
+ def to_device(self, device):
if device == "cpu":
- self._model.cpu()
+ self._model = self.copy(mx.cpu())
else:
- self._model.cuda(device)
+ self._model = self.copy(mx.gpu())
def to_bytes(self):
# MXNet doesn't implement save/load without a filename
@@ -135,9 +128,12 @@ class MXNetShim(Shim):
def from_bytes(self, bytes_data):
msg = srsly.msgpack_loads(bytes_data)
self.cfg = msg["config"]
+ self._load_params(msg["state"])
+ return self
+
+ def _load_params(self, params):
# MXNet doesn't implement save/load without a filename :(
with tempfile.NamedTemporaryFile() as temp:
- temp.write(msg["state"])
+ temp.write(params)
self._model.load_parameters(temp.name, ctx=mx.current_context())
- return self
diff --git a/thinc/tests/layers/test_mxnet_wrapper.py b/thinc/tests/layers/test_mxnet_wrapper.py
index <HASH>..<HASH> 100644
--- a/thinc/tests/layers/test_mxnet_wrapper.py
+++ b/thinc/tests/layers/test_mxnet_wrapper.py
@@ -115,7 +115,8 @@ def test_mxnet_wrapper_train_overfits(
@pytest.mark.skipif(not has_mxnet, reason="needs MXNet")
-def test_mxnet_wrapper_can_copy_model(model: Model[Array2d, Array2d]):
+def test_mxnet_wrapper_can_copy_model(model: Model[Array2d, Array2d], X: Array2d):
+ model.predict(X)
copy: Model[Array2d, Array2d] = model.copy()
assert copy is not None
@@ -148,25 +149,9 @@ def test_mxnet_wrapper_from_bytes(model: Model[Array2d, Array2d], X: Array2d):
@pytest.mark.skipif(not has_mxnet, reason="needs MXNet")
-def test_mxnet_wrapper_use_params(
- model: Model[Array2d, Array2d], X: Array2d, Y: Array2d, answer: int
-):
- optimizer = Adam()
- with model.use_params(optimizer.averages):
- assert model.predict(X).argmax() is not None
- for i in range(10):
- guesses, backprop = model.begin_update(X)
- d_guesses = (guesses - Y) / guesses.shape[0]
- backprop(d_guesses)
- model.finish_update(optimizer)
- with model.use_params(optimizer.averages):
- predicted = model.predict(X).argmax()
- assert predicted == answer
-
-
-@pytest.mark.skipif(not has_mxnet, reason="needs MXNet")
-def test_mxnet_wrapper_to_cpu(mx_model):
+def test_mxnet_wrapper_to_cpu(mx_model, X: Array2d):
model = MXNetWrapper(mx_model)
+ model.predict(X)
model.to_cpu()
|
fix tests and drop use_params
|
explosion_thinc
|
train
|
20950e5df5480709fa1d4456181b2a441ae3e92c
|
diff --git a/src/key/factory.js b/src/key/factory.js
index <HASH>..<HASH> 100644
--- a/src/key/factory.js
+++ b/src/key/factory.js
@@ -191,10 +191,9 @@ async function wrapKeyObject(secretKeyPacket, secretSubkeyPackets, options) {
], config.aead_mode);
}
signaturePacket.preferredHashAlgorithms = createdPreferredAlgos([
- // prefer fast asm.js implementations (SHA-256). SHA-1 will not be secure much longer...move to bottom of list
+ // prefer fast asm.js implementations (SHA-256)
enums.hash.sha256,
- enums.hash.sha512,
- enums.hash.sha1
+ enums.hash.sha512
], config.prefer_hash_algorithm);
signaturePacket.preferredCompressionAlgorithms = createdPreferredAlgos([
enums.compression.zlib,
diff --git a/test/general/key.js b/test/general/key.js
index <HASH>..<HASH> 100644
--- a/test/general/key.js
+++ b/test/general/key.js
@@ -1981,7 +1981,7 @@ function versionSpecificTests() {
expect(key.users[0].selfCertifications[0].preferredAeadAlgorithms).to.eql([aead.eax, aead.ocb]);
}
const hash = openpgp.enums.hash;
- expect(key.users[0].selfCertifications[0].preferredHashAlgorithms).to.eql([hash.sha256, hash.sha512, hash.sha1]);
+ expect(key.users[0].selfCertifications[0].preferredHashAlgorithms).to.eql([hash.sha256, hash.sha512]);
const compr = openpgp.enums.compression;
expect(key.users[0].selfCertifications[0].preferredCompressionAlgorithms).to.eql([compr.zlib, compr.zip, compr.uncompressed]);
@@ -2031,7 +2031,7 @@ function versionSpecificTests() {
expect(key.users[0].selfCertifications[0].preferredAeadAlgorithms).to.eql([aead.experimental_gcm, aead.eax, aead.ocb]);
}
const hash = openpgp.enums.hash;
- expect(key.users[0].selfCertifications[0].preferredHashAlgorithms).to.eql([hash.sha224, hash.sha256, hash.sha512, hash.sha1]);
+ expect(key.users[0].selfCertifications[0].preferredHashAlgorithms).to.eql([hash.sha224, hash.sha256, hash.sha512]);
const compr = openpgp.enums.compression;
expect(key.users[0].selfCertifications[0].preferredCompressionAlgorithms).to.eql([compr.zlib, compr.zip, compr.uncompressed]);
|
Remove SHA-1 from default preferred hash algorithms (#<I>)
Also, remove SHA-1 from the (unused) KDF params constructor defaults.
|
openpgpjs_openpgpjs
|
train
|
f45c377f9ca81a2f5af31a56a85bad32b4741015
|
diff --git a/framework/widgets/ListView.php b/framework/widgets/ListView.php
index <HASH>..<HASH> 100644
--- a/framework/widgets/ListView.php
+++ b/framework/widgets/ListView.php
@@ -88,14 +88,18 @@ class ListView extends BaseListView
* - `$widget`: the ListView object
*
* The return result of the function will be rendered directly.
+ * Note: If the function returns `null`, nothing will be rendered before the item.
* @see renderBeforeItem
* @since 2.0.11
*/
public $beforeItem;
/**
* @var Closure an anonymous function that is called once AFTER rendering each data model.
- * It should have the similar signature as [[beforeItem]]. The return result of the function
- * will be rendered directly.
+ *
+ * It should have the same signature as [[beforeItem]].
+ *
+ * The return result of the function will be rendered directly.
+ * Note: If the function returns `null`, nothing will be rendered after the item.
* @see renderAfterItem
* @since 2.0.11
*/
|
ListView: PHPDocs and tests enhanced
|
yiisoft_yii-core
|
train
|
ac287ac6716cea55ec443fc697e803a1d2b834fa
|
diff --git a/Xlib/protocol/rq.py b/Xlib/protocol/rq.py
index <HASH>..<HASH> 100644
--- a/Xlib/protocol/rq.py
+++ b/Xlib/protocol/rq.py
@@ -536,7 +536,7 @@ class List(ValueField):
for v in val:
data.append(self.type.pack_value(v))
- data = b''.join(data)
+ data = ''.join(data).encode()
if self.pad:
dlen = len(data)
@@ -880,7 +880,7 @@ class StrClass(object):
def parse_binary(self, data, display):
slen = _to_ord(data[0]) + 1
- return data[1:slen], data[slen:]
+ return data[1:slen].decode(), data[slen:]
Str = StrClass()
|
TestListFonts tests from test_requests_le.py were fixed for Py3
|
python-xlib_python-xlib
|
train
|
1ea85690e91d87b77815ab0e6803e4c0946b4ade
|
diff --git a/frontmatter/__init__.py b/frontmatter/__init__.py
index <HASH>..<HASH> 100644
--- a/frontmatter/__init__.py
+++ b/frontmatter/__init__.py
@@ -2,8 +2,9 @@ import re
import yaml
class Frontmatter:
- _yaml_delim = "^(?:---|\+\+\+)(?:\s+)?\n"
- _re_pattern = (_yaml_delim + "(.+)") * 2
+ _yaml_delim = r'(?:---|\+\+\+)'
+ _all = r'(.*?)'
+ _re_pattern = r'^\s*' + _yaml_delim + _all + _yaml_delim + r'\s*' + _all + r'$'
_regex = re.compile(_re_pattern, re.S | re.M)
@classmethod
@@ -27,6 +28,7 @@ class Frontmatter:
fmatter = ""
body = ""
result = cls._regex.search(string)
+
if result:
fmatter = result.group(1)
body = result.group(2)
diff --git a/tests/test_simple.py b/tests/test_simple.py
index <HASH>..<HASH> 100644
--- a/tests/test_simple.py
+++ b/tests/test_simple.py
@@ -6,9 +6,20 @@ print("\n[attributes]")
print(post['attributes'])
print("\n[body]")
+if post['body'] == "":
+ print("ERROR: body not captured properly.")
+ print("TEST FAILED.")
+ exit(1)
+
print(post['body'])
print("\n[frontmatter]")
+if post['frontmatter'] == "":
+ print("ERROR: frontmatter not captured properly.")
+ print("TEST FAILED.")
+ exit(1)
+
print(post["frontmatter"])
-print("\nTEST SUCCEEDED.")
+print("TEST SUCCEEDED.")
+exit(0)
diff --git a/tests/testfile.md b/tests/testfile.md
index <HASH>..<HASH> 100644
--- a/tests/testfile.md
+++ b/tests/testfile.md
@@ -5,7 +5,15 @@ list:
- first
- second
- third
+
---
This is the actual post content
-This is a second line
+
+This is a second line. HR rule below:
+
+---
+
+Underneath the HR rule.
+
+---
|
Fixed bug where having '---' (YAML delimiter) in the content area would cause a parsing error due to use of greedy operator in groups.
This was due to the use of a greedy operator in groups. Fixed with *?. Also updated testfile.md for this use-case as well as the test script.
|
jonbeebe_frontmatter
|
train
|
d8eaceddbeb12383ffa9e5158579c4c2ed4882ac
|
diff --git a/sortinghat/db/model.py b/sortinghat/db/model.py
index <HASH>..<HASH> 100644
--- a/sortinghat/db/model.py
+++ b/sortinghat/db/model.py
@@ -203,7 +203,7 @@ class MetricsGrimoireIdentity(MappedTable):
COLUMN_PREFIX = '_'
- MG_ID_KEYS = ['_id', '_email_address', '_nick']
+ MG_ID_KEYS = ['_nick', '_id', '_email_address']
NAME_KEYS = ['_name']
EMAIL_KEYS = ['_email', '_email_address']
USERNAME_KEYS = ['_username', '_user_id', '_nick', '_login']
|
[model] Use nicks as ids when reflecting from IRC databases
The previous class used numeric ids but this was a bug. Currently,
there are not numeric ids on IRC databases. The identifier is the
nickname from the table 'irclog'.
|
chaoss_grimoirelab-sortinghat
|
train
|
dea3e06134c54116171d73ef72d48b6d2a7f330a
|
diff --git a/tests/PHPUnit/Core/ConfigTest.php b/tests/PHPUnit/Core/ConfigTest.php
index <HASH>..<HASH> 100644
--- a/tests/PHPUnit/Core/ConfigTest.php
+++ b/tests/PHPUnit/Core/ConfigTest.php
@@ -6,12 +6,11 @@ use Piwik\Config;
*
* @link http://piwik.org
* @license http://www.gnu.org/licenses/gpl-3.0.html GPL v3 or later
+ *
+ * @group Core
*/
class ConfigTest extends PHPUnit_Framework_TestCase
{
- /**
- * @group Core
- */
public function testUserConfigOverwritesSectionGlobalConfigValue()
{
$userFile = PIWIK_INCLUDE_PATH . '/tests/resources/Config/config.ini.php';
@@ -41,9 +40,6 @@ class ConfigTest extends PHPUnit_Framework_TestCase
}
- /**
- * @group Core
- */
public function test_CommonConfig_Overrides()
{
$userFile = PIWIK_INCLUDE_PATH . '/tests/resources/Config/config.ini.php';
@@ -60,9 +56,6 @@ class ConfigTest extends PHPUnit_Framework_TestCase
}
- /**
- * @group Core
- */
public function testWritingConfigWithSpecialCharacters()
{
$userFile = PIWIK_INCLUDE_PATH . '/tests/resources/Config/config.written.ini.php';
@@ -92,9 +85,6 @@ class ConfigTest extends PHPUnit_Framework_TestCase
$this->assertEquals($stringWritten, $config->Category['test2']);
}
- /**
- * @group Core
- */
public function testUserConfigOverwritesGlobalConfig()
{
$userFile = PIWIK_PATH_TEST_TO_ROOT . '/tests/resources/Config/config.ini.php';
@@ -154,8 +144,6 @@ class ConfigTest extends PHPUnit_Framework_TestCase
}
/**
- * @group Core
- *
* @dataProvider getCompareElementsData
*/
public function testCompareElements($description, $test)
@@ -213,8 +201,6 @@ class ConfigTest extends PHPUnit_Framework_TestCase
}
/**
- * @group Core
- *
* @dataProvider getArrayUnmergeData
*/
public function testArrayUnmerge($description, $test)
@@ -434,8 +420,6 @@ class ConfigTest extends PHPUnit_Framework_TestCase
}
/**
- * @group Core
- *
* @dataProvider getDumpConfigData
*/
public function testDumpConfig($description, $test)
|
refs #<I> make sure a group is set
|
matomo-org_matomo
|
train
|
eecf92d0561a7d676a543322afdf30c791b2028b
|
diff --git a/django_js_reverse/views.py b/django_js_reverse/views.py
index <HASH>..<HASH> 100644
--- a/django_js_reverse/views.py
+++ b/django_js_reverse/views.py
@@ -15,7 +15,7 @@ def urls_js(request):
url_patterns = list(urlresolvers.get_resolver(None).reverse_dict.items())
url_list = [(url_name, url_pattern[0][0]) for url_name, url_pattern in url_patterns if
- isinstance(url_name, str)]
+ (isinstance(url_name, str) or isinstance(url_name, unicode))]
return render_to_response('django_js_reverse/urls_js.tpl',
{
|
Added unicode support
If the url names where in unicode, they would not be included in the generated javascript. This should fix that.
|
ierror_django-js-reverse
|
train
|
b30dd3b400acf293d9d413161e7177755fa1d757
|
diff --git a/src/test/php/org/bovigo/vfs/PermissionsTestCase.php b/src/test/php/org/bovigo/vfs/PermissionsTestCase.php
index <HASH>..<HASH> 100644
--- a/src/test/php/org/bovigo/vfs/PermissionsTestCase.php
+++ b/src/test/php/org/bovigo/vfs/PermissionsTestCase.php
@@ -94,6 +94,7 @@ class PermissionsTestCase extends \PHPUnit_Framework_TestCase
* @group issue_107
* @expectedException PHPUnit_Framework_Error
* @expectedExceptionMessage Can not create new file in non-writable path root
+ * @requires PHP 5.4
*/
public function touchOnNonWriteableDirectoryTriggersError()
{
@@ -104,6 +105,7 @@ class PermissionsTestCase extends \PHPUnit_Framework_TestCase
/**
* @test
* @group issue_107
+ * @requires PHP 5.4
*/
public function touchOnNonWriteableDirectoryDoesNotCreateFile()
{
|
ensure tests for touch() are only run with at least PHP <I>
|
bovigo_vfsStream
|
train
|
66f32bf24af0cc7385ca380c27954f7068892b7f
|
diff --git a/lib/worker.js b/lib/worker.js
index <HASH>..<HASH> 100644
--- a/lib/worker.js
+++ b/lib/worker.js
@@ -2,7 +2,7 @@
process.title = process.env.WORKER_NAME || "blank-node-worker";
-process.on("uncaughtException", err => {
+process.on("uncaughtException", (err) => {
console.error(`\n${new Date()} Caught exception: ${err.message}\n${err.stack}\n`);
process.exit(1);
});
@@ -47,7 +47,7 @@ const wampClient = new WampClient(true, true);
let _libsReady = false;
let _started = false;
-wampClient.onopen = function() {
+wampClient.onopen = function () {
console.info("Connection to " + srUri + " established");
wampClient.call(
"register",
@@ -58,6 +58,7 @@ wampClient.onopen = function() {
);
subscribeToSR();
subscribeToConfig();
+ subscribeToAssets();
sessions.connected(wampClient);
subscribeToUsers();
localStorage.setup(wampClient);
@@ -67,7 +68,7 @@ wampClient.onopen = function() {
}
};
-wampClient.onclose = function() {
+wampClient.onclose = function () {
console.info("Connection closed.");
localStorage.setup(null);
events.setup(null);
@@ -91,25 +92,25 @@ console.info(`Connecting to ${srUri}`);
wampClient.open(srUri);
function subscribeToSR() {
- const updateRegistry = function(data) {
+ const updateRegistry = function (data) {
console.info("Service registry update received:", data);
serviceRegistry.update(data);
setupModules();
};
- wampClient.subscribe("registry", updateRegistry, updateRegistry, e => {
+ wampClient.subscribe("registry", updateRegistry, updateRegistry, (e) => {
throw new Error("cannot subscribe to service registry", e);
});
}
function subscribeToUsers() {
- const updateUsers = msg => {
+ const updateUsers = (msg) => {
if (!msg) {
return;
}
usersCache.update(msg);
};
- wampClient.subscribe("users", updateUsers, updateUsers, e => {
+ wampClient.subscribe("users", updateUsers, updateUsers, (e) => {
throw new Error("cannot subscribe to users", e);
});
}
@@ -117,7 +118,7 @@ function subscribeToUsers() {
let configReceived = false;
function subscribeToConfig() {
- const updateConfig = async data => {
+ const updateConfig = async (data) => {
console.info("[subscribeToConfig] Config received");
if (configReceived && process.env.NODE_ENV !== "DEV") {
console.info("[subscribeToConfig] This is production mode. Will not load new config.");
@@ -127,41 +128,52 @@ function subscribeToConfig() {
console.info("[subscribeToConfig] Start update config and libs");
configReceived = true;
const dataSources = await configStore.setup(data);
- loadLibs(dataSources);
- // setupModules(dataSources);
+ setupModules(dataSources);
mongoDB.createIndexes();
};
- wampClient.subscribe("config", updateConfig, updateConfig, e => {
+ wampClient.subscribe("config", updateConfig, updateConfig, (e) => {
throw new Error("cannot subscribe to config", e);
});
}
+function subscribeToAssets() {
+ const updateAssets = async (data) => {
+ if (data === "lib.zip") {
+ console.info("[subscribeToLibs] lib.zip updated");
+ loadLibs();
+ }
+ };
+ wampClient.subscribe("assets-update", updateAssets, updateAssets, (e) => {
+ throw new Error("cannot subscribe to assets-update", e);
+ });
+}
+
function loadLibs(dataSources) {
_libsReady = false;
let uri = srUri.replace("ws://", "http://");
uri += (uri[uri.length - 1] === "/" ? "" : "/") + "lib/";
- console.log("Loading libs from:", uri);
- http.get(uri, res => {
+ console.info("Loading libs from:", uri, "started");
+ http.get(uri, (res) => {
if (res.statusCode !== 200) {
- console.log("Libs load error:", res.statusCode);
+ console.error("Libs load error:", res.statusCode);
setupModules(dataSources);
return;
}
const data = [];
- res.on("data", function(chunk) {
+ res.on("data", function (chunk) {
data.push(chunk);
});
- res.on("end", function() {
+ res.on("end", function () {
const buf = Buffer.concat(data);
- userScript.require.registerZip(buf, err => {
- console.log("Libs loaded cb!");
+ userScript.require.registerZip(buf, (err) => {
+ console.info("Loading libs from completed!");
_libsReady = true;
setupModules(dataSources);
});
customFS._registerZip(buf);
});
- }).on("error", e => {
+ }).on("error", (e) => {
console.log(`Libs load error: ${e.message}`);
_libsReady = false;
setupModules(dataSources);
|
Update lib.zip separately from config
|
getblank_blank-node-worker
|
train
|
041031d2b1c4b250d17f8f4e625811de68d105b9
|
diff --git a/test/unit/models/classes/Security/AuthorizationServerFactoryTest.php b/test/unit/models/classes/Security/AuthorizationServerFactoryTest.php
index <HASH>..<HASH> 100644
--- a/test/unit/models/classes/Security/AuthorizationServerFactoryTest.php
+++ b/test/unit/models/classes/Security/AuthorizationServerFactoryTest.php
@@ -34,7 +34,7 @@ use oat\oatbox\cache\ItemPoolSimpleCacheAdapter;
use oat\oatbox\log\LoggerService;
use oat\tao\model\security\Business\Domain\Key\Key;
use oat\tao\model\security\Business\Domain\Key\KeyChain;
-use oat\taoDeliverConnect\test\unit\helpers\NoPrivacyTrait;
+use oat\tao\test\unit\helpers\NoPrivacyTrait;
use oat\taoLti\models\classes\Platform\Repository\Lti1p3RegistrationRepository;
use oat\taoLti\models\classes\Security\AuthorizationServer\AuthorizationServerFactory;
|
fix use to change NoPrivacy trait location
|
oat-sa_extension-tao-lti
|
train
|
bfd7987486a41f2a88bd95efa26b4eec83db7d60
|
diff --git a/doc/quickstart.md b/doc/quickstart.md
index <HASH>..<HASH> 100644
--- a/doc/quickstart.md
+++ b/doc/quickstart.md
@@ -197,7 +197,7 @@ symbol.add_device_notification(my_func, attr=attr, user_handle=user_handle)
A built-in notification is available to automatically update the symbol buffer based on the remote value. This is disabled by default, enable it with:
```python
-symbol.enable_auto_update(True)
+symbol.set_auto_update(True)
```
This will create a new notification callback to update `symbol.value`. This can be efficient if the remote variable changes less frequently then your code runs. The number of notification callbacks will then be less than what the number of read operations would have been.
@@ -205,7 +205,7 @@ This will create a new notification callback to update `symbol.value`. This can
It can be disabled again with:
```python
-symbol.enable_auto_update(False)
+symbol.set_auto_update(False)
```
Take care that `symbol.clear_notifications()` will *also* remove the auto-update notification. Like all symbol notifications, the auto-update will also be cleared automatically in the object destructor.
diff --git a/pyads/symbol.py b/pyads/symbol.py
index <HASH>..<HASH> 100644
--- a/pyads/symbol.py
+++ b/pyads/symbol.py
@@ -39,6 +39,9 @@ class AdsSymbol:
value for this symbol
"""
+ _regex_array = re.compile(r"ARRAY \[(\d+)..(\d+)\] OF (.*)")
+ _regex_matrix = re.compile(r"matrix_(\d+)_(.*)_T")
+
def __init__(
self,
plc: "Connection",
@@ -97,7 +100,7 @@ class AdsSymbol:
self.plc_type: Optional[Any] = None
if self.symbol_type is not None:
- self.plc_type = self._get_type_from_str(self.symbol_type)
+ self.plc_type = AdsSymbol.get_type_from_str(self.symbol_type)
def _create_symbol_from_info(self) -> None:
"""Look up remaining info from the remote
@@ -208,7 +211,7 @@ class AdsSymbol:
self._plc.del_device_notification(*handles)
self._handles_list.remove(handles)
- def enable_auto_update(self, auto_update: bool) -> None:
+ def set_auto_update(self, auto_update: bool) -> None:
"""Enable or disable auto-update of buffered value
This automatic update is done through a device notification. This
@@ -233,7 +236,8 @@ class AdsSymbol:
)
self.value = value
- def _get_type_from_str(self, type_str: str) -> Any:
+ @staticmethod
+ def get_type_from_str(type_str: str) -> Any:
"""Get PLCTYPE_* from PLC name string
If PLC name could not be mapped, return None. This is done on
@@ -249,7 +253,7 @@ class AdsSymbol:
return getattr(constants, plc_name)
# If ARRAY
- reg_match = re.match(r"ARRAY \[(\d+)..(\d+)\] OF (.*)", type_str)
+ reg_match = AdsSymbol._regex_array.match(type_str)
if reg_match is not None:
groups = reg_match.groups()
@@ -257,7 +261,7 @@ class AdsSymbol:
scalar_type_str = groups[2]
# Find scalar type
- scalar_type = self._get_type_from_str(scalar_type_str)
+ scalar_type = AdsSymbol.get_type_from_str(scalar_type_str)
if scalar_type:
return scalar_type * size
@@ -265,7 +269,7 @@ class AdsSymbol:
# Fall to method default instead
# If array/matrix (an 1D array is also called a matrix)
- reg_match = re.match(r"matrix_(\d+)_(.*)_T", type_str)
+ reg_match = AdsSymbol._regex_matrix.match(type_str)
if reg_match is not None:
groups = reg_match.groups()
diff --git a/tests/test_symbol.py b/tests/test_symbol.py
index <HASH>..<HASH> 100644
--- a/tests/test_symbol.py
+++ b/tests/test_symbol.py
@@ -415,7 +415,7 @@ class AdsSymbolTestCase(unittest.TestCase):
symbol = self.plc.get_symbol(self.test_var.name)
self.assertIsNone(symbol._auto_update_handle)
- symbol.enable_auto_update(True)
+ symbol.set_auto_update(True)
self.assertIsNotNone(symbol._auto_update_handle)
# Simulate value callback
@@ -427,7 +427,7 @@ class AdsSymbolTestCase(unittest.TestCase):
self.assertEqual(symbol.value, 5334.1545)
- symbol.enable_auto_update(False)
+ symbol.set_auto_update(False)
self.assertIsNone(symbol._auto_update_handle)
|
Renamed auto_update method, made plc_type method static again
|
stlehmann_pyads
|
train
|
c349669c7c1ba613d43c23118dac7bc3b03b571e
|
diff --git a/securegraph-accumulo/src/main/java/com/altamiracorp/securegraph/accumulo/AccumuloGraph.java b/securegraph-accumulo/src/main/java/com/altamiracorp/securegraph/accumulo/AccumuloGraph.java
index <HASH>..<HASH> 100644
--- a/securegraph-accumulo/src/main/java/com/altamiracorp/securegraph/accumulo/AccumuloGraph.java
+++ b/securegraph-accumulo/src/main/java/com/altamiracorp/securegraph/accumulo/AccumuloGraph.java
@@ -111,7 +111,7 @@ public class AccumuloGraph extends GraphBase {
getSearchIndex().addElement(this, element);
}
- public void removeProperty(AccumuloElement element, Property property) {
+ void removeProperty(AccumuloElement element, Property property) {
String rowPrefix = getRowPrefixForElement(element);
Mutation m = new Mutation(rowPrefix + element.getId());
@@ -199,22 +199,9 @@ public class AccumuloGraph extends GraphBase {
}
private void addMutations(Collection<Mutation> mutations) {
- try {
- BatchWriter writer = getWriter();
- synchronized (this.writerLock) {
- for (Mutation m : mutations) {
- writer.addMutation(m);
- }
- if (getConfiguration().isAutoFlush()) {
- flush();
- }
- }
- } catch (MutationsRejectedException ex) {
- throw new RuntimeException("Could not add mutation", ex);
- }
+ addMutations(mutations.toArray(new Mutation[mutations.size()]));
}
- // TODO consolodate this with addMutations(Collection) somehow
private void addMutations(Mutation... mutations) {
try {
BatchWriter writer = getWriter();
|
dry up addMutations methods
|
lumifyio_securegraph
|
train
|
31beb513ca3c8d00c539a38c04de7fc44ccc1b30
|
diff --git a/httptoo/inproc_roundtrip.go b/httptoo/inproc_roundtrip.go
index <HASH>..<HASH> 100644
--- a/httptoo/inproc_roundtrip.go
+++ b/httptoo/inproc_roundtrip.go
@@ -3,14 +3,16 @@ package httptoo
import (
"io"
"net/http"
+ "sync"
"github.com/anacrolix/missinggo"
)
type responseWriter struct {
+ mu sync.Mutex
r http.Response
headerWritten missinggo.Event
- bodyWriter io.Writer
+ bodyWriter io.WriteCloser
}
func (me *responseWriter) Header() http.Header {
@@ -21,24 +23,38 @@ func (me *responseWriter) Header() http.Header {
}
func (me *responseWriter) Write(b []byte) (int, error) {
+ me.mu.Lock()
if !me.headerWritten.IsSet() {
- me.WriteHeader(200)
+ me.writeHeader(200)
}
+ me.mu.Unlock()
return me.bodyWriter.Write(b)
}
func (me *responseWriter) WriteHeader(status int) {
+ me.mu.Lock()
+ me.writeHeader(status)
+ me.mu.Unlock()
+}
+
+func (me *responseWriter) writeHeader(status int) {
if me.headerWritten.IsSet() {
return
}
me.r.StatusCode = status
- me.r.Body, me.bodyWriter = io.Pipe()
me.headerWritten.Set()
}
+func (me *responseWriter) runHandler(h http.Handler, req *http.Request) {
+ me.r.Body, me.bodyWriter = io.Pipe()
+ defer me.bodyWriter.Close()
+ defer me.WriteHeader(200)
+ h.ServeHTTP(me, req)
+}
+
func RoundTripHandler(req *http.Request, h http.Handler) (*http.Response, error) {
rw := responseWriter{}
- go h.ServeHTTP(&rw, req)
- rw.headerWritten.Wait()
+ go rw.runHandler(h, req)
+ <-rw.headerWritten.LockedChan(&rw.mu)
return &rw.r, nil
}
|
Fix races in RoundTripHandler
|
anacrolix_missinggo
|
train
|
45d583442ba039652575959d24b6a05e938daeef
|
diff --git a/image_test.go b/image_test.go
index <HASH>..<HASH> 100644
--- a/image_test.go
+++ b/image_test.go
@@ -42,7 +42,7 @@ func TestImageSvgResize(t *testing.T) {
}
func TestImageGifToJpeg(t *testing.T) {
- if VipsMinorVersion >= 8 && VipsMinorVersion > 2 {
+ if VipsMajorVersion >= 8 && VipsMinorVersion > 2 {
i := initImage("test.gif")
options := Options{
Type: JPEG,
@@ -57,7 +57,7 @@ func TestImageGifToJpeg(t *testing.T) {
}
func TestImagePdfToJpeg(t *testing.T) {
- if VipsMinorVersion >= 8 && VipsMinorVersion > 2 {
+ if VipsMajorVersion >= 8 && VipsMinorVersion > 2 {
i := initImage("test.pdf")
options := Options{
Type: JPEG,
@@ -72,7 +72,7 @@ func TestImagePdfToJpeg(t *testing.T) {
}
func TestImageSvgToJpeg(t *testing.T) {
- if VipsMinorVersion >= 8 && VipsMinorVersion > 2 {
+ if VipsMajorVersion >= 8 && VipsMinorVersion > 2 {
i := initImage("test.svg")
options := Options{
Type: JPEG,
|
Fix tests where minor/major were mixed up
|
h2non_bimg
|
train
|
c56d8b4fe94594935a8ff8cbcdfb979122241034
|
diff --git a/script/bump-version.py b/script/bump-version.py
index <HASH>..<HASH> 100755
--- a/script/bump-version.py
+++ b/script/bump-version.py
@@ -14,7 +14,12 @@ SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
parser = argparse.ArgumentParser(
- description='Bump version numbers. Must specify at least one option.'
+ description='Bump version numbers. Must specify at least one of the three options:\n'
+ +' --bump=patch to increment patch version, or\n'
+ +' --stable to promote current beta to stable, or\n'
+ +' --version={version} to set version number directly\n'
+ +'Note that you can use both --bump and --stable simultaneously.',
+ formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument(
'--version',
|
Add examples to make usage description clearer.
|
electron_electron
|
train
|
c931b84aacd40ce96d07cb3e23f159c5641b1992
|
diff --git a/src/models/RecaptchaV3.php b/src/models/RecaptchaV3.php
index <HASH>..<HASH> 100644
--- a/src/models/RecaptchaV3.php
+++ b/src/models/RecaptchaV3.php
@@ -6,7 +6,6 @@ use GuzzleHttp\Client;
class RecaptchaV3
{
-
/**
* @var \GuzzleHttp\Client
*/
|
Cleanup and merging in fix for issue #<I>
|
Rias500_craft-contact-form-extensions
|
train
|
886bf1c2d0d092c6c6ea46970ef7680e7e3fe33d
|
diff --git a/moto/autoscaling/responses.py b/moto/autoscaling/responses.py
index <HASH>..<HASH> 100644
--- a/moto/autoscaling/responses.py
+++ b/moto/autoscaling/responses.py
@@ -314,7 +314,7 @@ DESCRIBE_LAUNCH_CONFIGURATIONS_TEMPLATE = """<DescribeLaunchConfigurationsRespon
{% endif %}
<InstanceType>{{ launch_configuration.instance_type }}</InstanceType>
<LaunchConfigurationARN>arn:aws:autoscaling:us-east-1:803981987763:launchConfiguration:
- 9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc</LaunchConfigurationARN>
+ 9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/{{ launch_configuration.name }}</LaunchConfigurationARN>
{% if launch_configuration.block_device_mappings %}
<BlockDeviceMappings>
{% for mount_point, mapping in launch_configuration.block_device_mappings.items() %}
@@ -504,7 +504,7 @@ DESCRIBE_AUTOSCALING_GROUPS_TEMPLATE = """<DescribeAutoScalingGroupsResponse xml
<HealthCheckGracePeriod>{{ group.health_check_period }}</HealthCheckGracePeriod>
<DefaultCooldown>{{ group.default_cooldown }}</DefaultCooldown>
<AutoScalingGroupARN>arn:aws:autoscaling:us-east-1:803981987763:autoScalingGroup:ca861182-c8f9-4ca7-b1eb-cd35505f5ebb
- :autoScalingGroupName/my-test-asg-lbs</AutoScalingGroupARN>
+ :autoScalingGroupName/{{ group.name }}</AutoScalingGroupARN>
{% if group.termination_policies %}
<TerminationPolicies>
{% for policy in group.termination_policies %}
|
ASG and LC arn fix
|
spulec_moto
|
train
|
8de17841d144c780a46389f5ef69b8c484c0747b
|
diff --git a/flink-connectors/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/initializer/SpecifiedOffsetsInitializer.java b/flink-connectors/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/initializer/SpecifiedOffsetsInitializer.java
index <HASH>..<HASH> 100644
--- a/flink-connectors/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/initializer/SpecifiedOffsetsInitializer.java
+++ b/flink-connectors/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/initializer/SpecifiedOffsetsInitializer.java
@@ -60,6 +60,12 @@ class SpecifiedOffsetsInitializer implements OffsetsInitializer {
}
}
if (!toLookup.isEmpty()) {
+ // First check the committed offsets.
+ Map<TopicPartition, Long> committedOffsets =
+ partitionOffsetsRetriever.committedOffsets(toLookup);
+ offsets.putAll(committedOffsets);
+ toLookup.removeAll(committedOffsets.keySet());
+
switch (offsetResetStrategy) {
case EARLIEST:
offsets.putAll(partitionOffsetsRetriever.beginningOffsets(toLookup));
diff --git a/flink-connectors/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/enumerator/initializer/OffsetsInitializerTest.java b/flink-connectors/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/enumerator/initializer/OffsetsInitializerTest.java
index <HASH>..<HASH> 100644
--- a/flink-connectors/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/enumerator/initializer/OffsetsInitializerTest.java
+++ b/flink-connectors/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/source/enumerator/initializer/OffsetsInitializerTest.java
@@ -41,12 +41,14 @@ import static org.junit.Assert.assertTrue;
/** Unit tests for {@link OffsetsInitializer}. */
public class OffsetsInitializerTest {
private static final String TOPIC = "topic";
+ private static final String TOPIC2 = "topic2";
private static KafkaSourceEnumerator.PartitionOffsetsRetrieverImpl retriever;
@BeforeClass
public static void setup() throws Throwable {
KafkaSourceTestEnv.setup();
KafkaSourceTestEnv.setupTopic(TOPIC, true, true);
+ KafkaSourceTestEnv.setupTopic(TOPIC2, false, false);
retriever =
new KafkaSourceEnumerator.PartitionOffsetsRetrieverImpl(
KafkaSourceTestEnv.getConsumer(),
@@ -116,19 +118,28 @@ public class OffsetsInitializerTest {
List<TopicPartition> partitions = KafkaSourceTestEnv.getPartitionsForTopic(TOPIC);
Map<TopicPartition, OffsetAndMetadata> committedOffsets =
KafkaSourceTestEnv.getCommittedOffsets(partitions);
- committedOffsets.forEach((tp, oam) -> specifiedOffsets.put(tp, oam.offset()));
+ partitions.forEach(tp -> specifiedOffsets.put(tp, (long) tp.partition()));
// Remove the specified offsets for partition 0.
- TopicPartition missingPartition = new TopicPartition(TOPIC, 0);
- specifiedOffsets.remove(missingPartition);
+ TopicPartition partitionSetToCommitted = new TopicPartition(TOPIC, 0);
+ specifiedOffsets.remove(partitionSetToCommitted);
OffsetsInitializer initializer = OffsetsInitializer.offsets(specifiedOffsets);
assertEquals(OffsetResetStrategy.EARLIEST, initializer.getAutoOffsetResetStrategy());
+ // The partition without committed offset should fallback to offset reset strategy.
+ TopicPartition partitionSetToEarliest = new TopicPartition(TOPIC2, 0);
+ partitions.add(partitionSetToEarliest);
Map<TopicPartition, Long> offsets = initializer.getPartitionOffsets(partitions, retriever);
for (TopicPartition tp : partitions) {
Long offset = offsets.get(tp);
- long expectedOffset =
- tp.equals(missingPartition) ? 0L : committedOffsets.get(tp).offset();
+ long expectedOffset;
+ if (tp.equals(partitionSetToCommitted)) {
+ expectedOffset = committedOffsets.get(tp).offset();
+ } else if (tp.equals(partitionSetToEarliest)) {
+ expectedOffset = 0L;
+ } else {
+ expectedOffset = specifiedOffsets.get(tp);
+ }
assertEquals(
String.format("%s has incorrect offset.", tp), expectedOffset, (long) offset);
}
|
[FLINK-<I>][connector/kafka] SourceOperatorStreamTask should check the committed offset first before using OffsetResetStrategy.
This is necessary to keep the same behavior as the legacy FlinkKafkaConsumer.
|
apache_flink
|
train
|
05737a78a123edf6b5403d1be85e014ffc2a7146
|
diff --git a/test/test_timeseries.py b/test/test_timeseries.py
index <HASH>..<HASH> 100644
--- a/test/test_timeseries.py
+++ b/test/test_timeseries.py
@@ -64,7 +64,7 @@ class TestTimeSeriesMethods(TimeSeriesTestCase):
rdd = self.sc.parallelize([(0, array([1, 2, 3, 4, 5]))])
data = TimeSeries(rdd).detrend('linear')
# detrending linearly increasing data should yield all 0s
- assert(allclose(data.first()[1], array([0, 0, 0, 0, 0])))
+ assert(allclose(data.first()[1], array([1, 1, 1, 1, 1])))
def test_normalization_bypercentile(self):
rdd = self.sc.parallelize([(0, array([1, 2, 3, 4, 5], dtype='float16'))])
|
fixed detrending test in test_timeseries.py
|
thunder-project_thunder
|
train
|
02375bb0d88d6f4c20a9d794a272c4fd308b0b16
|
diff --git a/snapshot-service-impl/src/main/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReader.java b/snapshot-service-impl/src/main/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReader.java
index <HASH>..<HASH> 100644
--- a/snapshot-service-impl/src/main/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReader.java
+++ b/snapshot-service-impl/src/main/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReader.java
@@ -53,6 +53,11 @@ public class ContentPropertiesFileReader implements ItemReader<ContentProperties
jParser = jfactory.createJsonParser(this.propertiesFile);
jParser.nextToken(); //skips the first [
}
+
+ //once parser is closed, always return null.
+ if(jParser.isClosed()){
+ return null;
+ }
try {
while (jParser.nextToken() != JsonToken.END_ARRAY &&
@@ -60,8 +65,10 @@ public class ContentPropertiesFileReader implements ItemReader<ContentProperties
return parseNext(jParser);
}
} catch(Exception e) {
- log.error("Error parsing content properties file: " +
- e.getMessage(), e);
+ String message = "Error parsing content properties file: " +
+ e.getMessage();
+ log.error(message, e);
+ throw new ParseException(message,e);
}
jParser.close();
diff --git a/snapshot-service-impl/src/test/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReaderTest.java b/snapshot-service-impl/src/test/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReaderTest.java
index <HASH>..<HASH> 100644
--- a/snapshot-service-impl/src/test/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReaderTest.java
+++ b/snapshot-service-impl/src/test/java/org/duracloud/snapshot/service/impl/ContentPropertiesFileReaderTest.java
@@ -11,6 +11,7 @@ import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import org.junit.internal.runners.statements.Fail;
import org.springframework.batch.item.NonTransientResourceException;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.UnexpectedInputException;
@@ -58,6 +59,9 @@ public class ContentPropertiesFileReaderTest {
count++;
}
+ //verify that once the reader returns null, it will always return null.
+ Assert.assertNull(reader.read());
+
Assert.assertEquals(2, count++);
}
|
Resolves issue #2 in
<URL>
|
duracloud_snapshot
|
train
|
a178c4737275c97c24802c18b8d14b62c8545005
|
diff --git a/src/Fiedsch/Data/Utility/VariablenameMapper.php b/src/Fiedsch/Data/Utility/VariablenameMapper.php
index <HASH>..<HASH> 100644
--- a/src/Fiedsch/Data/Utility/VariablenameMapper.php
+++ b/src/Fiedsch/Data/Utility/VariablenameMapper.php
@@ -24,15 +24,29 @@ class VariablenameMapper
protected $throwException;
/**
- * @param $names
- * @param $throwException when a lookup fails
+ * @param array $names
+ * @param boolean $throwException throw an exception when a lookup fails
*/
public function __construct($names, $throwException = false)
{
- $this->lookup = array_flip($names);
+ $this->lookup =
+ array_filter(
+ array_flip(
+ array_map(
+ function($element) {
+ return trim($element);
+ },
+ $names
+ )
+ ),
+ function($element) {
+ return trim($element) !== '';
+ },
+ ARRAY_FILTER_USE_KEY
+ );
$this->throwException = $throwException;
if (count($this->lookup) != count($names)) {
- throw new \RuntimeException("supplied array of names contained invalid values");
+ throw new \RuntimeException("supplied array of names contained invalid values or duplicates.");
}
}
@@ -54,4 +68,11 @@ class VariablenameMapper
return -1;
}
+ /**
+ * @return array the mapping of column names to column indexes
+ */
+ public function getMapping()
+ {
+ return $this->lookup;
+ }
}
\ No newline at end of file
diff --git a/tests/Fiedsch/Data/Utility/VariablenameMapperTest.php b/tests/Fiedsch/Data/Utility/VariablenameMapperTest.php
index <HASH>..<HASH> 100644
--- a/tests/Fiedsch/Data/Utility/VariablenameMapperTest.php
+++ b/tests/Fiedsch/Data/Utility/VariablenameMapperTest.php
@@ -35,8 +35,29 @@ class VariablenameMapperTest extends PHPUnit_Framework_TestCase
*/
public function testBadConstructorInput()
{
- // has to throw a \RuntimeException:
+ // has to throw a \RuntimeException as we have duplicate name 'a'
new VariablenameMapper(['a', 'b', 'a']);
}
+ /**
+ * @expectedException \RuntimeException
+ */
+ public function testBadConstructorInput2()
+ {
+ // has to throw a \RuntimeException as we have an empty name
+ new VariablenameMapper(['a', 'b', '']);
+ }
+
+ public function testGetMapping()
+ {
+ $names = ['a', 'b', 'c'];
+ $expectedMapping = ['a' => 0, 'b' => 1, 'c' => 2];
+
+ $mapper = new VariablenameMapper($names);
+ $this->assertEquals($mapper->getMapping(), $expectedMapping);
+
+ $names = ['a ', ' b', ' c '];
+ $mapper = new VariablenameMapper($names);
+ $this->assertEquals($mapper->getMapping(), $expectedMapping);
+ }
}
|
filter empty variable names
filter empty variable names as the don’t make much sense and thus make
it easier to spot errors
|
fiedsch_datamanagement
|
train
|
5ba8a395dbf9585b034147078260b91cfe4b6f5f
|
diff --git a/spec/v2/common.rb b/spec/v2/common.rb
index <HASH>..<HASH> 100644
--- a/spec/v2/common.rb
+++ b/spec/v2/common.rb
@@ -11,35 +11,6 @@ def yaml_load(file)
YAML.load_file(fixture(file))
end
-module YAML
- def self.load_expand(file)
- expand(YAML.load_file(file), file)
- end
-
-private
- def self.expand(node, file)
- case node
- when Hash
- node.each do |k, v|
- if v.is_a? String
- if v =~ /^\$\((.*)\)/
- file = File.join(File.dirname(file), $1)
- node[k] = load_expand(file)
- end
- else
- expand(v, file)
- end
- end
- when Array
- node.each do |n|
- expand(n, file)
- end
- end
-
- node
- end
-end
-
include WebMock::API
class WebMocks
@@ -51,7 +22,7 @@ class WebMocks
end
def self.load(file_name)
- doc = YAML.load_expand(file_name)
+ doc = YAML.load_file(file_name)
doc.each do |url, response|
uri = Addressable::URI.parse(url)
|
Removing YAML expansion code since it's no longer used.
|
schmich_kappa
|
train
|
8777ba1ccb95d547b765358d9f0818c0a68ddee4
|
diff --git a/python/src/nnabla/utils/converter/onnx/exporter.py b/python/src/nnabla/utils/converter/onnx/exporter.py
index <HASH>..<HASH> 100644
--- a/python/src/nnabla/utils/converter/onnx/exporter.py
+++ b/python/src/nnabla/utils/converter/onnx/exporter.py
@@ -766,6 +766,15 @@ class OnnxExporter:
init.data_type = t
init.raw_data = np.array(
param.data, dtype=TENSOR_TYPE_TO_DTYPE[t]).tostring()
+
+ p = graph.input.add()
+ p.name = param.variable_name
+ p.type.tensor_type.elem_type = get_tensor_type(
+ param.variable_name, self._input_types)
+ dims = [create_dim(d)
+ for d in self._var_dict[param.variable_name].dim]
+ p.type.tensor_type.shape.dim.extend(dims)
+
else:
print("Not in: {}".format(param.variable_name))
@@ -778,18 +787,6 @@ class OnnxExporter:
for d in self._var_dict[iv.variable_name].dim]
i.type.tensor_type.shape.dim.extend(dims)
- for pv in exe.parameter_variable:
- if pv.variable_name in self._var_dict:
- p = graph.input.add()
- p.name = pv.variable_name
- p.type.tensor_type.elem_type = get_tensor_type(
- pv.variable_name, self._input_types)
- dims = [create_dim(d)
- for d in self._var_dict[pv.variable_name].dim]
- p.type.tensor_type.shape.dim.extend(dims)
- else:
- print("param: {} not in dict.".format(pv.variable_name))
-
# Add only the final output of the graph as output
for ov in exe.output_variable:
o = graph.output.add()
|
fix the problem that initializer is inconsistent with input
|
sony_nnabla
|
train
|
45b209683971c02fadece9bf4d5b26758ea4f416
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@ All notable changes to this project will be documented in this file.
- Added page with configurable table of SPs on Proxy
- Added new model Member
- Added new model Resource
+ - New methods for getting data from Perun LDAP and Perun RPC
[Changed]
- Connectors methods are not static for now.
diff --git a/lib/Adapter.php b/lib/Adapter.php
index <HASH>..<HASH> 100644
--- a/lib/Adapter.php
+++ b/lib/Adapter.php
@@ -56,6 +56,13 @@ abstract class sspmod_perun_Adapter
public abstract function getVoByShortName($voShortName);
/**
+ * @param integer $id
+ * @return sspmod_perun_model_Vo
+ * @throws SimpleSAML_Error_Exception if does not exists
+ */
+ public abstract function getVoById($id);
+
+ /**
* @param sspmod_perun_model_User $user perun user
* @param sspmod_perun_model_Vo $vo vo we are working with.
* @return sspmod_perun_model_Group[] groups from vo which member is. Including VO members group.
diff --git a/lib/AdapterLdap.php b/lib/AdapterLdap.php
index <HASH>..<HASH> 100644
--- a/lib/AdapterLdap.php
+++ b/lib/AdapterLdap.php
@@ -151,6 +151,19 @@ class sspmod_perun_AdapterLdap extends sspmod_perun_Adapter
return new sspmod_perun_model_Vo($vo['perunVoId'][0], $vo['description'][0], $vo['o'][0]);
}
+ public function getVoById($id)
+ {
+ $vo = sspmod_perun_LdapConnector::searchForEntity($this->ldapBase,
+ "(&(objectClass=perunVo)(perunVoId=$id))",
+ array("o", "description")
+ );
+ if (is_null($vo)) {
+ throw new SimpleSAML_Error_Exception("Vo with id: $id does not exists in Perun LDAP.");
+ }
+
+ return new sspmod_perun_model_Vo($id, $vo['description'][0], $vo['o'][0]);
+ }
+
public function getUserAttributes($user, $attrNames)
{
diff --git a/lib/AdapterRpc.php b/lib/AdapterRpc.php
index <HASH>..<HASH> 100644
--- a/lib/AdapterRpc.php
+++ b/lib/AdapterRpc.php
@@ -176,6 +176,14 @@ class sspmod_perun_AdapterRpc extends sspmod_perun_Adapter
return new sspmod_perun_model_Vo($vo['id'], $vo['name'], $vo['shortName']);
}
+ public function getVoById($id)
+ {
+ $vo = $this->connector->get('vosManager', 'getVoById', array(
+ 'id' => $id,
+ ));
+
+ return new sspmod_perun_model_Vo($vo['id'], $vo['name'], $vo['shortName']);
+ }
public function getUserAttributes($user, $attrNames)
{
@@ -300,6 +308,40 @@ class sspmod_perun_AdapterRpc extends sspmod_perun_Adapter
return $facilities;
}
+ /**
+ * Returns member by User and Vo
+ * @param sspmod_perun_model_User $user
+ * @param sspmod_perun_model_Vo $vo
+ * @return sspmod_perun_model_Member
+ */
+ public function getMemberByUser($user, $vo) {
+ $member = sspmod_perun_RpcConnector::get('membersManager', 'getMemberByUser', array(
+ 'user' => $user->getId(),
+ 'vo' => $vo->getId(),
+ ));
+ if (is_null($member)) {
+ throw new SimpleSAML_Error_Exception("Member for User with name " . $user->getName() . " and Vo with shortName " .
+ $vo->getShortName() . "does not exist in Perun!");
+ }
+ return new sspmod_perun_model_Member($member['id'], $member['voId'], $member['status']);
+ }
+
+ /**
+ * Returns true if group has registration form, false otherwise
+ * @param sspmod_perun_model_Group $group
+ * @return bool
+ */
+ public function hasRegistrationForm($group) {
+ try {
+ sspmod_perun_RpcConnector::get( 'registrarManager', 'getApplicationForm', array(
+ 'group' => $group->getId(),
+ ));
+ return true;
+ } catch (Exception $exception) {
+ return false;
+ }
+ }
+
public function searchFacilitiesByAttributeValue($attribute)
{
$perunAttrs = $this->connector->post('searcher', 'getFacilities', array(
|
New methods for getting data from Perun LDAP and Perun RPC
* Added method getVoById for Perun LDAP and PErun RPC
* Added methods getMemberByUser, and hasRegistrationForm to PerunRPC
|
CESNET_perun-simplesamlphp-module
|
train
|
13be44272ac1163e2d4361ddbc815a20872f4b14
|
diff --git a/lib/system/signal.go b/lib/system/signal.go
index <HASH>..<HASH> 100644
--- a/lib/system/signal.go
+++ b/lib/system/signal.go
@@ -2,11 +2,22 @@ package system
/*
#include <signal.h>
-void resetInterruptSignalHandler() {
-signal(SIGINT, SIG_DFL);
+int resetInterruptSignalHandler() {
+ struct sigaction act;
+ int result;
+ if ((result = sigaction(SIGINT, 0, &act)) != 0) {
+ return result;
+ }
+ if (act.sa_handler == SIG_IGN) {
+ // Reset the handler for SIGINT to system default.
+ // FIXME: Note, this will also overwrite runtime's signal handler
+ signal(SIGINT, SIG_DFL);
+ }
+ return 0;
}
*/
import "C"
+import log "github.com/sirupsen/logrus"
// ResetInterruptSignal will reset the handler for SIGINT back to the default
// handler. We need to do this because when sysvinit launches Teleport on some
@@ -16,5 +27,8 @@ import "C"
// http://garethrees.org/2015/08/07/ping/
// https://github.com/openssh/openssh-portable/commit/4e0f5e1ec9b6318ef251180dbca50eaa01f74536
func ResetInterruptSignalHandler() {
- C.resetInterruptSignalHandler()
+ result := C.resetInterruptSignalHandler()
+ if result != 0 {
+ log.Warnf("Failed to reset interrupt signal handler: %v.", result)
+ }
}
|
Avoid resetting the SIGINT handler if it has not actually been set to
ignore (Go's runtime respects SIG_IGN, btw, by not setting a handler).
If the handler is reset unconditionally, no Go code can ask to be
notified of Interrupt signal as the system default handler obviously
knows nothing about Go code.
|
gravitational_teleport
|
train
|
fb362174d7435eed69f23cc667834236c3906ec7
|
diff --git a/server.go b/server.go
index <HASH>..<HASH> 100644
--- a/server.go
+++ b/server.go
@@ -411,7 +411,7 @@ func (s *Server) ServeConn(c io.ReadWriter) error {
func (s *Server) serveConn(c io.ReadWriter, ctxP **RequestCtx) error {
ctx := *ctxP
- initRequestCtx(ctx, c)
+ initCtx(ctx, c)
var rd readDeadliner
readTimeout := s.ReadTimeout
@@ -524,7 +524,7 @@ func trimBigBuffers(ctx *RequestCtx) {
}
}
-func initRequestCtx(ctx *RequestCtx, c io.ReadWriter) {
+func initCtx(ctx *RequestCtx, c io.ReadWriter) {
if ctx.r == nil {
readBufferSize := ctx.s.ReadBufferSize
if readBufferSize <= 0 {
|
initRequestCtx -> initCtx for the sake of consistency
|
valyala_fasthttp
|
train
|
80158149a008b9a2175f89a2558ff318fe16b1d6
|
diff --git a/tests/src/Hodor/Database/DriverTest.php b/tests/src/Hodor/Database/DriverTest.php
index <HASH>..<HASH> 100644
--- a/tests/src/Hodor/Database/DriverTest.php
+++ b/tests/src/Hodor/Database/DriverTest.php
@@ -15,10 +15,11 @@ class DriverTest extends PHPUnit_Framework_TestCase
* @covers ::__construct
* @covers ::queryMultiple
* @covers ::<private>
- * @dataProvider adapterProvider
*/
- public function testQueryMultipleCanRunMultipleQueries($adapter)
+ public function testQueryMultipleCanRunMultipleQueries()
{
+ $adapter = $this->getYoPdoDriver();
+
$tablename = 'test_multiple_queries_' . uniqid();
$sql = <<<SQL
@@ -32,11 +33,12 @@ SQL;
* @covers ::__construct
* @covers ::queryMultiple
* @covers ::<private>
- * @dataProvider adapterProvider
* @expectedException Exception
*/
- public function testQueryMultipleThrowsAnExceptionOnError($adapter)
+ public function testQueryMultipleThrowsAnExceptionOnError()
{
+ $adapter = $this->getYoPdoDriver();
+
$sql = <<<SQL
SELECT 1 FROM not_there;
SQL;
@@ -47,10 +49,11 @@ SQL;
* @covers ::__construct
* @covers ::selectRowGenerator
* @covers ::<private>
- * @dataProvider adapterProvider
*/
- public function testSelectRowGeneratorGeneratesResults($adapter)
+ public function testSelectRowGeneratorGeneratesResults()
{
+ $adapter = $this->getYoPdoDriver();
+
$sql = <<<SQL
SELECT 1 AS col UNION
SELECT 2 AS col UNION
@@ -69,11 +72,12 @@ SQL;
* @covers ::__construct
* @covers ::selectRowGenerator
* @covers ::<private>
- * @dataProvider adapterProvider
* @expectedException Exception
*/
- public function testSelectRowGeneratorThrowsAnExceptionOnError($adapter)
+ public function testSelectRowGeneratorThrowsAnExceptionOnError()
{
+ $adapter = $this->getYoPdoDriver();
+
$sql = <<<SQL
SELECT 1 FROM not_here;
SQL;
@@ -88,10 +92,11 @@ SQL;
* @covers ::__construct
* @covers ::selectOne
* @covers ::<private>
- * @dataProvider adapterProvider
*/
- public function testSelectOneReturnsResults($adapter)
+ public function testSelectOneReturnsResults()
{
+ $adapter = $this->getYoPdoDriver();
+
$sql = <<<SQL
SELECT 5 AS col
SQL;
@@ -102,11 +107,12 @@ SQL;
* @covers ::__construct
* @covers ::selectOne
* @covers ::<private>
- * @dataProvider adapterProvider
* @expectedException Exception
*/
- public function testSelectOneThrowsAnExceptionOnError($adapter)
+ public function testSelectOneThrowsAnExceptionOnError()
{
+ $adapter = $this->getYoPdoDriver();
+
$sql = <<<SQL
SELECT 1 FROM not_there;
SQL;
@@ -117,10 +123,11 @@ SQL;
* @covers ::__construct
* @covers ::insert
* @covers ::<private>
- * @dataProvider adapterProvider
*/
- public function testInsertedRowCanBeRetrieved($adapter)
+ public function testInsertedRowCanBeRetrieved()
{
+ $adapter = $this->getYoPdoDriver();
+
$tablename = 'test_insert_' . uniqid();
$sql = <<<SQL
@@ -149,11 +156,12 @@ SQL;
* @covers ::__construct
* @covers ::insert
* @covers ::<private>
- * @dataProvider adapterProvider
* @expectedException Exception
*/
- public function testInsertThrowsAnExceptionOnError($adapter)
+ public function testInsertThrowsAnExceptionOnError()
{
+ $adapter = $this->getYoPdoDriver();
+
$adapter->insert('some_table', ['no_row' => true]);
}
@@ -161,10 +169,11 @@ SQL;
* @covers ::__construct
* @covers ::delete
* @covers ::<private>
- * @dataProvider adapterProvider
*/
- public function testDeletedRowNoLongerExists($adapter)
+ public function testDeletedRowNoLongerExists()
{
+ $adapter = $this->getYoPdoDriver();
+
$tablename = 'test_insert_' . uniqid();
$sql = <<<SQL
@@ -204,18 +213,20 @@ SQL;
* @covers ::__construct
* @covers ::delete
* @covers ::<private>
- * @dataProvider adapterProvider
* @expectedException Exception
*/
- public function testDeleteThrowsAnExceptionOnError($adapter)
+ public function testDeleteThrowsAnExceptionOnError()
{
+ $adapter = $this->getYoPdoDriver();
+
$adapter->delete('some_table', 'no_row = :no_row', ['no_row' => true]);
}
/**
- * @return array
+ * @return YoPdoDriver
+ * @throws Exception
*/
- public function adapterProvider()
+ public function getYoPdoDriver()
{
$config_path = __DIR__ . '/../../../../config/config.test.php';
if (!file_exists($config_path)) {
@@ -224,8 +235,6 @@ SQL;
$config = require $config_path;
- return [
- [new YoPdoDriver($config['test']['db']['yo-pdo-pgsql'])],
- ];
+ return new YoPdoDriver($config['test']['db']['yo-pdo-pgsql']);
}
}
|
Update DriverTest to cover the constructor
Code executed in provider methods does not count
towards code coverage
|
lightster_hodor
|
train
|
645c37cc4011982e362d4552518b85865e5633fe
|
diff --git a/redis_metrics/models.py b/redis_metrics/models.py
index <HASH>..<HASH> 100644
--- a/redis_metrics/models.py
+++ b/redis_metrics/models.py
@@ -94,8 +94,8 @@ class R(object):
objects that differ by 1 second each.
"""
- if since is None: # Default to the last 90 days
- since = datetime.utcnow() - timedelta(days=90)
+ if since is None: # Default to the last 7 day
+ since = datetime.utcnow() - timedelta(days=7)
now = datetime.utcnow()
elapsed = (now - since)
|
change the default time period from <I> to 7 days
|
bradmontgomery_django-redis-metrics
|
train
|
c352ec060ceddc43f63863c5d16ae31c3a72e42f
|
diff --git a/actionpack/lib/action_view/helpers/active_model_helper.rb b/actionpack/lib/action_view/helpers/active_model_helper.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_view/helpers/active_model_helper.rb
+++ b/actionpack/lib/action_view/helpers/active_model_helper.rb
@@ -6,7 +6,7 @@ require 'active_support/core_ext/kernel/reporting'
module ActionView
class Base
- @@field_error_proc = Proc.new{ |html_tag, instance| "<div class=\"fieldWithErrors\">#{html_tag}</div>" }
+ @@field_error_proc = Proc.new{ |html_tag, instance| "<div class=\"fieldWithErrors\">#{html_tag}</div>".html_safe! }
cattr_accessor :field_error_proc
end
|
error procs have to be safe too
|
rails_rails
|
train
|
105a5f8a18697ad94594bb1531c1bd91acd70207
|
diff --git a/src/tools/ReferenceLinesTool.js b/src/tools/ReferenceLinesTool.js
index <HASH>..<HASH> 100644
--- a/src/tools/ReferenceLinesTool.js
+++ b/src/tools/ReferenceLinesTool.js
@@ -3,7 +3,6 @@ import external from './../externalModules.js';
import BaseTool from './../base/BaseTool.js';
import { getNewContext } from '../drawing/index.js';
-import { addToolState, getToolState } from './../stateManagement/toolState.js';
import renderActiveReferenceLine from './referenceLines/renderActiveReferenceLine.js';
import { waitForEnabledElementImageToLoad } from './../util/wait.js';
@@ -34,22 +33,28 @@ export default class ReferenceLinesTool extends BaseTool {
super(initialConfiguration);
this.initialConfiguration = initialConfiguration;
+ this.renderer = null;
+ this.synchronizationContext = null;
}
async enabledCallback (element, { synchronizationContext } = {}) {
const renderer = this.configuration.renderer;
const enabledElement = await waitForEnabledElementImageToLoad(element);
- if (!enabledElement) {
+ if (!enabledElement || !renderer || !synchronizationContext) {
// TODO: Unable to add tool state, image never loaded.
// Should we `setToolDisabledForElement` here?
+ console.warn(
+ `Unable to enable ${
+ this.name
+ }. Exiting enable callback. Tool will be enabled, but will not render.`
+ );
+
return;
}
+ this.renderer = renderer;
+ this.synchronizationContext = synchronizationContext;
- addToolState(element, this.name, {
- synchronizationContext,
- renderer
- });
this.forceImageUpdate(element);
}
@@ -67,35 +72,36 @@ export default class ReferenceLinesTool extends BaseTool {
renderToolData (evt) {
const eventData = evt.detail;
- const toolData = getToolState(evt.currentTarget, this.name);
-
- // No tool data? Bail out
- if (toolData === undefined) {
- return;
- }
-
- const { renderer, synchronizationContext } = toolData.data[0];
// No renderer or synch context? Adios
- if (renderer === undefined || synchronizationContext === undefined) {
+ if (
+ this.renderer === undefined ||
+ this.synchronizationContext === undefined
+ ) {
return;
}
// Get the enabled elements associated with this synchronization context and draw them
- const enabledElements = synchronizationContext.getSourceElements();
+ const enabledElements = this.synchronizationContext.getSourceElements();
const context = getNewContext(eventData.canvasContext.canvas);
- external.cornerstone.setToPixelCoordinateSystem(eventData.enabledElement, context);
+ external.cornerstone.setToPixelCoordinateSystem(
+ eventData.enabledElement,
+ context
+ );
enabledElements.forEach((referenceEnabledElement) => {
-
// Don't draw ourselves
if (referenceEnabledElement === evt.currentTarget) {
return;
}
// Render it
- renderer(context, eventData, evt.currentTarget, referenceEnabledElement);
+ this.renderer(
+ context,
+ eventData,
+ evt.currentTarget,
+ referenceEnabledElement
+ );
});
-
}
}
|
Fix: referenceLines should continue to work, even if the image does not have toolData
|
cornerstonejs_cornerstoneTools
|
train
|
73a536b1f847285f51255572449e495a050c3015
|
diff --git a/polysquarelinter/linter.py b/polysquarelinter/linter.py
index <HASH>..<HASH> 100644
--- a/polysquarelinter/linter.py
+++ b/polysquarelinter/linter.py
@@ -29,6 +29,8 @@ from contextlib import closing
from functools import reduce as freduce
+from jobstamps import jobstamp
+
import parmap
from polysquarelinter.spelling import (Dictionary,
@@ -765,13 +767,13 @@ def _run_lint_on_file_exceptions(file_path,
raise exception
-def _run_lint_on_file_stamped(file_path, # suppress(too-many-arguments)
- stamp_file_path,
- log_technical_terms_to,
- linter_functions,
- tool_options,
- fix_what_you_can):
- """Run linter functions on file_path, stamping in stamp_file_path."""
+def _run_lint_on_file_stamped_args(file_path, # suppress(too-many-arguments)
+ stamp_file_path,
+ log_technical_terms_to,
+ linter_functions,
+ tool_options,
+ fix_what_you_can):
+ """Return tuple of args and kwargs that function would be called with."""
dictionary_path = os.path.abspath("DICTIONARY")
dependencies = [file_path]
@@ -785,12 +787,20 @@ def _run_lint_on_file_stamped(file_path, # suppress(too-many-arguments)
if log_technical_terms_to:
kwargs["jobstamps_output_files"] = [log_technical_terms_to]
+ return ((file_path,
+ linter_functions,
+ tool_options,
+ fix_what_you_can),
+ kwargs)
+
+
+def _run_lint_on_file_stamped(*args, **kwargs):
+ """Run linter functions on file_path, stamping in stamp_file_path."""
+ stamp_args, stamp_kwargs = _run_lint_on_file_stamped_args(*args, **kwargs)
+
return stamp(_run_lint_on_file_exceptions,
- file_path,
- linter_functions,
- tool_options,
- fix_what_you_can,
- **kwargs)
+ *stamp_args,
+ **stamp_kwargs)
def _ordered(generator, *args, **kwargs):
@@ -804,6 +814,25 @@ def _ordered(generator, *args, **kwargs):
return result
+def _any_would_run(func, filenames, *args, **kwargs):
+ """True if a linter function would be called on any of filenames."""
+ if os.environ.get("_POLYSQUARE_GENERIC_FILE_LINTER_NO_STAMPING", None):
+ return True
+
+ for filename in filenames:
+ stamp_args, stamp_kwargs = _run_lint_on_file_stamped_args(filename,
+ *args,
+ **kwargs)
+ dependency = jobstamp.out_of_date(func,
+ *stamp_args,
+ **stamp_kwargs)
+
+ if dependency:
+ return True
+
+ return False
+
+
def main(arguments=None): # suppress(unused-function)
"""Entry point for the linter."""
result = _parse_arguments(arguments)
@@ -812,12 +841,22 @@ def main(arguments=None): # suppress(unused-function)
result.blacklist)
global_options = vars(result)
tool_options = tool_options_from_global(global_options, len(result.files))
-
- for linter_function in linter_funcs.values():
- if linter_function.before_all:
- linter_function.before_all(global_options, tool_options)
-
- use_multiprocessing = _should_use_multiprocessing(len(result.files))
+ any_would_run = _any_would_run(_run_lint_on_file_exceptions,
+ result.files,
+ result.stamp_file_path,
+ result.log_technical_terms_to,
+ linter_funcs,
+ tool_options,
+ result.fix_what_you_can)
+
+ if any_would_run:
+ for linter_function in linter_funcs.values():
+ if linter_function.before_all:
+ linter_function.before_all(global_options, tool_options)
+
+ use_multiprocessing = _should_use_multiprocessing(len(result.files))
+ else:
+ use_multiprocessing = False
if use_multiprocessing:
mapper = parmap.map
@@ -836,8 +875,9 @@ def main(arguments=None): # suppress(unused-function)
for error in sorted(errors):
_report_lint_error(error.failure, os.path.relpath(error.absolute_path))
- for linter_funcs in linter_funcs.values():
- if linter_funcs.after_all:
- linter_funcs.after_all(global_options, tool_options)
+ if any_would_run:
+ for linter_funcs in linter_funcs.values():
+ if linter_funcs.after_all:
+ linter_funcs.after_all(global_options, tool_options)
return len(errors)
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -35,12 +35,12 @@ setup(name="polysquare-generic-file-linter",
]
},
install_requires=[
- "jobstamps>=0.0.8",
+ "jobstamps>=0.0.11",
"parmap",
"Whoosh<=2.6.0"
],
extras_require={
- "polysquarelint": ["polysquare-setuptools-lint>=0.0.16"],
+ "polysquarelint": ["polysquare-setuptools-lint>=0.0.22"],
"green": [
"nose",
"nose-parameterized>=0.5.0",
|
Don't run before_all functions unless any lint jobs would run
The only before_all job and after_all jobs are for the spellcheck
linter and they all involve creating Whoosh dictionary
structures. This is an expensive process and is not necessary
if no jobs will run.
|
polysquare_polysquare-generic-file-linter
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.