hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
8db92bc09ff23d0b6872d3dd87505edc0c25d404
|
diff --git a/tests/HTTP/ClientTest.php b/tests/HTTP/ClientTest.php
index <HASH>..<HASH> 100644
--- a/tests/HTTP/ClientTest.php
+++ b/tests/HTTP/ClientTest.php
@@ -212,7 +212,7 @@ class ClientTest extends \PHPUnit\Framework\TestCase
$response = $client->send($request);
$this->assertEquals(200, $response->getStatus());
- $this->assertGreaterThan(memory_get_peak_usage(), 40 * pow(1024, 2));
+ $this->assertLessThan(40 * pow(1024, 2), memory_get_peak_usage());
}
/**
|
Change expected and actual to be around the usual way
|
sabre-io_http
|
train
|
abc70dfcdf1a37501ed1b4c8d24eedb7517c6d38
|
diff --git a/couscous.yml b/couscous.yml
index <HASH>..<HASH> 100644
--- a/couscous.yml
+++ b/couscous.yml
@@ -225,9 +225,6 @@ menu:
replace:
text: replace
relativeUrl: docs/Methods/replace.html
- replaceExact:
- text: replaceExact
- relativeUrl: docs/Methods/replaceexact.html
replaceBeginning:
text: replaceBeginning
relativeUrl: docs/Methods/replacebeginning.html
diff --git a/src/Methods/Replace.php b/src/Methods/Replace.php
index <HASH>..<HASH> 100644
--- a/src/Methods/Replace.php
+++ b/src/Methods/Replace.php
@@ -51,49 +51,6 @@ trait Replace
}
/**
- * Replaces only if an exact match is found.
- *
- * Essentially all this does is swaps one string for another.
- * I needed this in a db migration script to map a bunch of
- * old column names to new column names.
- *
- * @param string|string[] $search Either a single search term or
- * an array of search terms.
- *
- * @param string|string[] $replacement Must be the same length as $search.
- * So if you provide a single search
- * term, you must provide a single
- * replacement, if you provide 10
- * search terms you must provide 10
- * replacements.
- *
- * @return static
- */
- public function replaceExact($search, $replacement)
- {
- if (!is_array($search)) $search = [$search];
- if (!is_array($replacement)) $replacement = [$replacement];
-
- if (count($search) !== count($replacement))
- {
- throw new \InvalidArgumentException
- (
- '$search and $replacement must the same length!'
- );
- }
-
- foreach ($search as $key => $term)
- {
- if ($this->scalarString == (string)$term)
- {
- return $this->newSelf($replacement[$key]);
- }
- }
-
- return $this;
- }
-
- /**
* Replaces all occurrences of $search from the
* beginning of string with $replacement.
*
diff --git a/tests/ReplaceTest.php b/tests/ReplaceTest.php
index <HASH>..<HASH> 100644
--- a/tests/ReplaceTest.php
+++ b/tests/ReplaceTest.php
@@ -52,30 +52,6 @@ class ReplaceTest extends PHPUnit_Framework_TestCase
}
/**
- * @dataProvider replaceExactProvider()
- */
- public function testReplaceExact($expected, $string, $search, $replacement)
- {
- $str = new Str($string);
- $result = $str->replaceExact($search, $replacement);
- $this->assertInstanceOf('Gears\\String\\Str', $result);
- $this->assertEquals($expected, $result);
- $this->assertEquals($string, $str);
- }
-
- public function replaceExactProvider()
- {
- return
- [
- ['foo bar', 'foobar', 'foobar', 'foo bar'],
- ['foobar', 'foobar', 'bazbar', 'foo bar'],
- ['foo bar', 'foobar', ['bar', 'bazbar', 'foobar'], ['bar bar', 'baz bar', 'foo bar']],
- ['bar bar', 'bar', ['bar', 'bazbar', 'foobar'], ['bar bar', 'baz bar', 'foo bar']],
- ['baz bar', 'bazbar', ['bar', 'bazbar', 'foobar'], ['bar bar', 'baz bar', 'foo bar']]
- ];
- }
-
- /**
* @dataProvider replaceBeginningProvider()
*/
public function testReplaceBeginning($expected, $string, $search, $replacement, $encoding = null)
|
Removed `replaceExact()`
Really not sure what I was thinking when I wrote that...
|
phpgearbox_string
|
train
|
ae63df8482d1bfe378c44ac0873d83dba53c03ca
|
diff --git a/src/main/java/com/basho/riak/client/core/operations/CoveragePlanOperation.java b/src/main/java/com/basho/riak/client/core/operations/CoveragePlanOperation.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/basho/riak/client/core/operations/CoveragePlanOperation.java
+++ b/src/main/java/com/basho/riak/client/core/operations/CoveragePlanOperation.java
@@ -237,6 +237,11 @@ public class CoveragePlanOperation extends FutureOperation<CoveragePlanOperation
assert subIterator != null;
return subIterator.next();
}
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
};
}
|
Java <I> compatibility issue
|
basho_riak-java-client
|
train
|
d48370a60b5e01c490a2fd33feb2a48b64aef10b
|
diff --git a/backup/backuplib.php b/backup/backuplib.php
index <HASH>..<HASH> 100644
--- a/backup/backuplib.php
+++ b/backup/backuplib.php
@@ -549,6 +549,7 @@
if ($users) {
//Begin Users tag
fwrite ($bf,start_tag("USERS",2,true));
+ $counter = 0;
//With every user
foreach ($users as $user) {
//Get user data from table
@@ -654,6 +655,15 @@
}
//End User tag
fwrite ($bf,end_tag("USER",3,true));
+ //Do some output
+ $counter++;
+ if ($counter % 10 == 0) {
+ echo ".";
+ if ($counter % 200 == 0) {
+ echo "<br>";
+ }
+ backup_flush(300);
+ }
}
//End Users tag
fwrite ($bf,end_tag("USERS",2,true));
@@ -678,6 +688,7 @@
if ($logs) {
//Pring logs header
fwrite ($bf,start_tag("LOGS",2,true));
+ $counter = 0;
//Iterate
foreach ($logs as $log) {
//See if it is a valid module to backup
@@ -700,6 +711,15 @@
//End log tag
fwrite ($bf,end_tag("LOG",3,true));
}
+ //Do some output
+ $counter++;
+ if ($counter % 10 == 0) {
+ echo ".";
+ if ($counter % 200 == 0) {
+ echo "<br>";
+ }
+ backup_flush(300);
+ }
}
//End logs tag
$status = fwrite ($bf,end_tag("LOGS",2,true));
diff --git a/backup/mod/quiz/backuplib.php b/backup/mod/quiz/backuplib.php
index <HASH>..<HASH> 100644
--- a/backup/mod/quiz/backuplib.php
+++ b/backup/mod/quiz/backuplib.php
@@ -124,6 +124,7 @@
if ($questions) {
//Write start tag
$status =fwrite ($bf,start_tag("QUESTIONS",4,true));
+ $counter = 0;
//Iterate over each question
foreach ($questions as $question) {
//Start question
@@ -153,6 +154,15 @@
}
//End question
$status =fwrite ($bf,end_tag("QUESTION",5,true));
+ //Do some output
+ $counter++;
+ if ($counter % 10 == 0) {
+ echo ".";
+ if ($counter % 200 == 0) {
+ echo "<br>";
+ }
+ backup_flush(300);
+ }
}
//Write end tag
$status =fwrite ($bf,end_tag("QUESTIONS",4,true));
|
Added some flush() to the backup process (in users, questions and logs)
|
moodle_moodle
|
train
|
578a25190761127d67e2f166b3774e1f77f6a8fd
|
diff --git a/go/firewallsso/base.go b/go/firewallsso/base.go
index <HASH>..<HASH> 100644
--- a/go/firewallsso/base.go
+++ b/go/firewallsso/base.go
@@ -18,6 +18,7 @@ import (
// Basic interface that all FirewallSSO must implement
type FirewallSSOInt interface {
init(ctx context.Context) error
+ initChild(ctx context.Context) error
logger(ctx context.Context) log15.Logger
getSourceIp(ctx context.Context) net.IP
Start(ctx context.Context, info map[string]string, timeout int) (bool, error)
@@ -55,6 +56,11 @@ func (fw *FirewallSSO) init(ctx context.Context) error {
return nil
}
+// Meant to be overriden in the child classes
+func (fw *FirewallSSO) initChild(ctx context.Context) error {
+ return nil
+}
+
// Structure representing a network part of a firewall
type FirewallSSONetwork struct {
Cidr string
diff --git a/go/firewallsso/factory.go b/go/firewallsso/factory.go
index <HASH>..<HASH> 100644
--- a/go/firewallsso/factory.go
+++ b/go/firewallsso/factory.go
@@ -43,7 +43,17 @@ func (f *Factory) Instantiate(ctx context.Context, id string) (FirewallSSOInt, e
}
fwint := firewall2.(FirewallSSOInt)
- fwint.init(ctx)
+
+ err = fwint.init(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ err = fwint.initChild(ctx)
+ if err != nil {
+ return nil, err
+ }
+
return fwint, nil
} else {
return nil, errors.New("Cannot find the type of the object")
diff --git a/go/firewallsso/paloalto.go b/go/firewallsso/paloalto.go
index <HASH>..<HASH> 100644
--- a/go/firewallsso/paloalto.go
+++ b/go/firewallsso/paloalto.go
@@ -15,6 +15,17 @@ type PaloAlto struct {
Transport string `json:"transport"`
Password string `json:"password"`
Port string `json:"port"`
+ Vsys string `json:"vsys"`
+}
+
+// Firewall specific init
+func (fw *PaloAlto) initChild(ctx context.Context) error {
+ // Set a default value for vsys if there is none
+ if fw.Vsys == "" {
+ log.LoggerWContext(ctx).Debug("Setting default value for vsys as it isn't defined")
+ fw.Vsys = "1"
+ }
+ return nil
}
// Send an SSO start to the PaloAlto using either syslog or HTTP depending on the Transport value of the struct
@@ -75,7 +86,7 @@ func (fw *PaloAlto) startSyslog(ctx context.Context, info map[string]string, tim
// Send a start to the PaloAlto using the HTTP transport
// Will return an error if it fails to get a valid reply from it
func (fw *PaloAlto) startHttp(ctx context.Context, info map[string]string, timeout int) (bool, error) {
- resp, err := fw.getHttpClient(ctx).PostForm("https://"+fw.PfconfigHashNS+":"+fw.Port+"/api/?type=user-id&action=set&key="+fw.Password,
+ resp, err := fw.getHttpClient(ctx).PostForm("https://"+fw.PfconfigHashNS+":"+fw.Port+"/api/?type=user-id&vsys=vsys"+fw.Vsys+"&action=set&key="+fw.Password,
url.Values{"cmd": {fw.startHttpPayload(ctx, info, timeout)}})
if err != nil {
@@ -145,7 +156,7 @@ func (fw *PaloAlto) stopHttpPayload(ctx context.Context, info map[string]string)
// Send an SSO stop using HTTP to the PaloAlto firewall
// Returns an error if it fails to get a valid reply from the firewall
func (fw *PaloAlto) stopHttp(ctx context.Context, info map[string]string) (bool, error) {
- resp, err := fw.getHttpClient(ctx).PostForm("https://"+fw.PfconfigHashNS+":"+fw.Port+"/api/?type=user-id&action=set&key="+fw.Password,
+ resp, err := fw.getHttpClient(ctx).PostForm("https://"+fw.PfconfigHashNS+":"+fw.Port+"/api/?type=user-id&vsys=vsys"+fw.Vsys+"&action=set&key="+fw.Password,
url.Values{"cmd": {fw.stopHttpPayload(ctx, info)}})
if err != nil {
|
migrated vsys handling to pfsso
|
inverse-inc_packetfence
|
train
|
e81eb35cb3a4ea62e7337819ccbceca0bfd80408
|
diff --git a/src/Entity/EntityRepository.php b/src/Entity/EntityRepository.php
index <HASH>..<HASH> 100644
--- a/src/Entity/EntityRepository.php
+++ b/src/Entity/EntityRepository.php
@@ -11,6 +11,7 @@ namespace Gibilogic\CrudBundle\Entity;
use Doctrine\ORM\EntityRepository as BaseRepository;
use Doctrine\ORM\AbstractQuery;
+use Doctrine\ORM\Query;
use Doctrine\ORM\QueryBuilder;
use Doctrine\ORM\Tools\Pagination\Paginator;
@@ -228,6 +229,25 @@ class EntityRepository extends BaseRepository
}
/**
+ * Returns TRUE if the join string is already present inside the query builder, FALSE otherwise.
+ *
+ * @param \Doctrine\ORM\QueryBuilder $queryBuilder
+ * @param string $joinString
+ * @return boolean
+ */
+ protected function hasJoin(QueryBuilder $queryBuilder, $joinString)
+ {
+ /* @var \Doctrine\ORM\Query\Expr\Join $joinExpression */
+ foreach ($queryBuilder->getDQLPart('join') as $joinExpression) {
+ if ($joinExpression->getJoin() == $joinString) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
* Returns TRUE if the field is sortable, FALSE otherwise.
*
* @param string $field
|
Added 'hasJoin' method to the 'EntityRepository' class
|
gibilogic_crud-bundle
|
train
|
969f95088a3a1e490460ad53931b24e46e6ca907
|
diff --git a/api/src/main/java/io/opencensus/common/Scope.java b/api/src/main/java/io/opencensus/common/Scope.java
index <HASH>..<HASH> 100644
--- a/api/src/main/java/io/opencensus/common/Scope.java
+++ b/api/src/main/java/io/opencensus/common/Scope.java
@@ -26,7 +26,4 @@ package io.opencensus.common;
* </pre>
*/
@SuppressWarnings("deprecation")
-public interface Scope extends NonThrowingCloseable {
- @Override
- void close();
-}
+public interface Scope extends NonThrowingCloseable {}
|
Remove Scope.close() method.
|
census-instrumentation_opencensus-java
|
train
|
0819bb7d23c89f112ed7fe7de45a77a08b97f957
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name = "wagtail-blog",
- version = "1.6.1",
+ version = "1.6.2",
author = "David Burke",
author_email = "david@thelabnyc.com",
description = ("A wordpress like blog app implemented in wagtail"),
|
Python wheel FUCK YOU and your binary files that never existed
|
thelabnyc_wagtail_blog
|
train
|
c0b7cf1b6b0831098728310f3dd5a56c3e8c88f5
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -56,8 +56,8 @@ module.exports = {
var fileOutputPattern = this.readConfig('fileOutputPattern');
var inputPath = path.join(distDir, fileInputPattern);
var outputPath = path.join(distDir, fileOutputPattern);
- var absoluteInputPath = path.join(root, inputPath);
- var absoluteOutputPath = path.join(root, outputPath);
+ var absoluteInputPath = path.resolve(root, inputPath);
+ var absoluteOutputPath = path.resolve(root, outputPath);
this.log('generating `' + outputPath + '` from `' + inputPath + '`', { verbose: true });
diff --git a/tests/unit/index-nodetest.js b/tests/unit/index-nodetest.js
index <HASH>..<HASH> 100644
--- a/tests/unit/index-nodetest.js
+++ b/tests/unit/index-nodetest.js
@@ -1,5 +1,7 @@
'use strict';
+/* globals require, describe, before, beforeEach, it, process */
+
var fs = require('fs');
var path = require('path');
var assert = require('ember-cli/tests/helpers/assert');
@@ -8,9 +10,11 @@ describe('the deploy plugin object', function() {
var fakeRoot;
var plugin;
var promise;
+ var distDir;
before(function() {
- fakeRoot = process.cwd() + '/tests/fixtures';
+ fakeRoot = process.cwd() + '/tests/fixtures';
+ distDir = 'dist';
});
beforeEach(function() {
@@ -33,7 +37,7 @@ describe('the deploy plugin object', function() {
fileInputPattern: 'index.html',
fileOutputPattern: 'index.json',
distDir: function(context) {
- return 'dist';
+ return distDir;
},
projectRoot: function(context) {
return fakeRoot;
@@ -80,5 +84,22 @@ describe('the deploy plugin object', function() {
assert.deepEqual(result.distFiles, ['index.json']);
});
});
+
+ describe('when the distDir is an absolute path', function() {
+ before(function() {
+ distDir = fakeRoot + '/dist';
+ });
+
+ it('still works', function() {
+ return assert.isFulfilled(promise)
+ .then(function() {
+ var json = require(fakeRoot + '/dist/index.json');
+
+ assert.equal(Object.keys(json).length, 4);
+ });
+ });
+
+
+ });
});
});
|
Fix case distDir is an absolute path
Before, if `distDir` was an absolute path, `absoluteInputPath` would end up
having rootPath doubled. This uses `path.resolve` instead of `path.join` to
resolve that issue.
|
ember-cli-deploy_ember-cli-deploy-json-config
|
train
|
fa32558f9a8072f60063bc9e6836dc4f28700ec7
|
diff --git a/dock/cli/main.py b/dock/cli/main.py
index <HASH>..<HASH> 100644
--- a/dock/cli/main.py
+++ b/dock/cli/main.py
@@ -56,6 +56,9 @@ def cli_build_image(args):
response.return_code = -1
else:
response.return_code = 0
+
+ if response.return_code != 0:
+ logger.error("build failed")
sys.exit(response.return_code)
|
indicate that the build failed
So that non-verbose logs for successful and failed build don't look
exactly the same.
|
projectatomic_atomic-reactor
|
train
|
bc4c05e416603f93feff3df990b25c755706f94d
|
diff --git a/src/main/java/io/scalecube/gateway/websocket/WebSocketServerWhyNotSendingEchoRunner.java b/src/main/java/io/scalecube/gateway/websocket/WebSocketServerWhyNotSendingEchoRunner.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/scalecube/gateway/websocket/WebSocketServerWhyNotSendingEchoRunner.java
+++ b/src/main/java/io/scalecube/gateway/websocket/WebSocketServerWhyNotSendingEchoRunner.java
@@ -1,35 +1,15 @@
package io.scalecube.gateway.websocket;
-import io.netty.handler.codec.http.websocketx.WebSocketFrame;
-
import java.net.InetSocketAddress;
-import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
-import reactor.ipc.netty.NettyPipeline;
-import reactor.ipc.netty.http.websocket.WebsocketInbound;
-import reactor.ipc.netty.http.websocket.WebsocketOutbound;
public class WebSocketServerWhyNotSendingEchoRunner {
public static void main(String[] args) throws InterruptedException {
WebSocketServer server = new WebSocketServer(new WebSocketAcceptor() {
@Override
public Mono<Void> onConnect(WebSocketSession session) {
- WebsocketInbound inbound = session.getInbound();
- WebsocketOutbound outbound = session.getOutbound();
-
- Flux<WebSocketFrame> receiveFlux = inbound
- .aggregateFrames()
- .receiveFrames()
- .doOnNext(WebSocketFrame::retain);
-
- // Flux<ServiceMessage> receiveFlux = session.receive();
-
- return outbound
- .options(NettyPipeline.SendOptions::flushOnEach)
- /* .sendObject(receiveFlux.map(message -> (ByteBuf) message.data()).map(BinaryWebSocketFrame::new).log()) */
- .sendObject(receiveFlux.log())
- .then();
+ return session.send(session.receive());
}
@Override
|
Added WebSocketServerWhyDoesThisSendingEchoRunner.java; confuused why does this impl sends echoes back
|
scalecube_scalecube-services
|
train
|
91d0c36d0176d1eeb037c21328d98eacf03fc281
|
diff --git a/src/main/java/com/github/jsonj/JsonObject.java b/src/main/java/com/github/jsonj/JsonObject.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/jsonj/JsonObject.java
+++ b/src/main/java/com/github/jsonj/JsonObject.java
@@ -35,6 +35,8 @@ import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
+import org.apache.commons.lang.Validate;
+
import com.github.jsonj.exceptions.JsonTypeMismatchException;
import com.github.jsonj.tools.JsonSerializer;
import com.jillesvangurp.efficientstring.EfficientString;
@@ -144,17 +146,6 @@ public class JsonObject implements Map<String, JsonElement>, JsonElement {
return JsonSerializer.serialize(this, true);
}
- /**
- * Variant of put that can take a Object instead of a primitive. The normal put inherited from LinkedHashMap only takes JsonElement instances.
- * @param key
- * @param value any object that is accepted by the JsonPrimitive constructor.
- * @return the JsonElement that was added.
- * @throws JsonTypeMismatchException if the value cannot be turned into a primitive.
- */
- public JsonElement put(final String key, final Object value) {
- return map.put(EfficientString.fromString(key), primitive(value));
- }
-
@Override
public boolean isObject() {
return true;
@@ -171,6 +162,33 @@ public class JsonObject implements Map<String, JsonElement>, JsonElement {
}
/**
+ * Variant of put that can take a Object instead of a primitive. The normal put inherited from LinkedHashMap only takes JsonElement instances.
+ * @param key
+ * @param value any object that is accepted by the JsonPrimitive constructor.
+ * @return the JsonElement that was added.
+ * @throws JsonTypeMismatchException if the value cannot be turned into a primitive.
+ */
+ public JsonElement put(final String key, final Object value) {
+ Validate.notNull(key);
+ Validate.notNull(value);
+ return map.put(EfficientString.fromString(key), primitive(value));
+ }
+
+ @Override
+ public JsonElement put(String key, JsonElement value) {
+ Validate.notNull(key);
+ Validate.notNull(value);
+ return map.put(EfficientString.fromString(key), value);
+ }
+
+ @Override
+ public void putAll(Map<? extends String, ? extends JsonElement> m) {
+ for(Entry<? extends String, ? extends JsonElement> e: m.entrySet()) {
+ map.put(EfficientString.fromString(e.getKey()), e.getValue());
+ }
+ }
+
+ /**
* Allows you to get the nth entry in the JsonObject. Please note that this method iterates over all the entries
* until it finds the nth, so getting the last element is probably going to be somewhat expensive, depending on the
* size of the collection. Also note that the entries in JsonObject are ordered by the order of insertion (it is a
@@ -200,7 +218,16 @@ public class JsonObject implements Map<String, JsonElement>, JsonElement {
return get(0);
}
- /**
+ @Override
+ public JsonElement get(Object key) {
+ if(key != null && key instanceof String) {
+ return map.get(EfficientString.fromString(key.toString()));
+ } else {
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
* Get a json element at a particular path in an object structure.
* @param labels list of field names that describe the location to a particular json node.
* @return a json element at a particular path in an object or null if it can't be found.
@@ -596,15 +623,6 @@ public class JsonObject implements Map<String, JsonElement>, JsonElement {
}
@Override
- public JsonElement get(Object key) {
- if(key != null && key instanceof String) {
- return map.get(EfficientString.fromString(key.toString()));
- } else {
- throw new IllegalArgumentException();
- }
- }
-
- @Override
public Set<String> keySet() {
Set<EfficientString> keySet = map.keySet();
Set<String> keys = new HashSet<String>();
@@ -615,18 +633,6 @@ public class JsonObject implements Map<String, JsonElement>, JsonElement {
}
@Override
- public JsonElement put(String key, JsonElement value) {
- return map.put(EfficientString.fromString(key), value);
- }
-
- @Override
- public void putAll(Map<? extends String, ? extends JsonElement> m) {
- for(Entry<? extends String, ? extends JsonElement> e: m.entrySet()) {
- map.put(EfficientString.fromString(e.getKey()), e.getValue());
- }
- }
-
- @Override
public JsonElement remove(Object key) {
if(key != null && key instanceof String) {
return map.remove(EfficientString.fromString(key.toString()));
|
reorder methods, validate no null values are put
|
jillesvangurp_jsonj
|
train
|
d9977c447eae7ae9af83dcee6ef245a15c102de5
|
diff --git a/cuts/cutter.py b/cuts/cutter.py
index <HASH>..<HASH> 100644
--- a/cuts/cutter.py
+++ b/cuts/cutter.py
@@ -45,7 +45,7 @@ class Cutter(object):
except IndexError:
result.append(self.invalid_pos)
except ValueError:
- result.append(field)
+ result.append(str(field))
except TypeError:
result.extend(self._cut_range(line,int(field[0]),i))
@@ -66,17 +66,17 @@ class Cutter(object):
if i > 0:
updated_positions.append(self.separator)
start = self._groupval(ranger.group('start'))
- end = self._groupval(ranger.group('end')) + 1
+ end = self._groupval(ranger.group('end'))
if start and end:
- updated_positions.extend(self._extendrange(start,end))
+ updated_positions.extend(self._extendrange(start,end + 1))
# Since the number of positions on a line is unknown,
# send input to cause exception that can be caught and call
# _cut_range helper function
elif ranger.group('start'):
updated_positions.append([start])
else:
- updated_positions.extend(self._extendrange(1,end))
+ updated_positions.extend(self._extendrange(1,end + 1))
else:
updated_positions.append(positions[i])
try:
@@ -94,7 +94,7 @@ class Cutter(object):
Positive indicies need to be reduced by one to match with zero based
indexing.
- Zero is not a valid input, and as such will throw an exception.
+ Zero is not a valid input, and as such will throw a value error.
Arguments:
index - index to shift
@@ -104,7 +104,7 @@ class Cutter(object):
index -= 1
elif index == 0:
# Zero indicies should not be allowed by default.
- raise IndexError
+ raise ValueError
return index
def _cut_range(self,line,start,current_position):
@@ -116,6 +116,7 @@ class Cutter(object):
current_position - current position in main cut function
"""
result = []
+ print "here"
try:
for j in range(start,len(line)):
index = self._setup_index(j)
|
Fixed exception in case of 0 in numerical separator
|
jpweiser_cuts
|
train
|
a0cdfdc771cd4034f69c9a08a188cf6ba7b110c2
|
diff --git a/actionpack/lib/action_view/helpers/form_helper.rb b/actionpack/lib/action_view/helpers/form_helper.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_view/helpers/form_helper.rb
+++ b/actionpack/lib/action_view/helpers/form_helper.rb
@@ -1096,6 +1096,14 @@ module ActionView
# create: "Create a {{model}}"
# update: "Confirm changes to {{model}}"
#
+ # It also searches for a key specific for the given object:
+ #
+ # en:
+ # helpers:
+ # submit:
+ # post:
+ # create: "Add {{model}}"
+ #
def submit(value=nil, options={})
value, options = nil, value if value.is_a?(Hash)
value ||= submit_default_value
@@ -1121,8 +1129,12 @@ module ActionView
@object_name.to_s.humanize
end
- I18n.t(:"helpers.submit.#{key}", :model => model,
- :default => "#{key.to_s.humanize} #{model}")
+ defaults = []
+ defaults << :"helpers.submit.#{object_name}.#{key}"
+ defaults << :"helpers.submit.#{key}"
+ defaults << "#{key.to_s.humanize} #{model}"
+
+ I18n.t(defaults.shift, :model => model, :default => defaults)
end
def nested_attributes_association?(association_name)
diff --git a/actionpack/test/template/form_helper_test.rb b/actionpack/test/template/form_helper_test.rb
index <HASH>..<HASH> 100644
--- a/actionpack/test/template/form_helper_test.rb
+++ b/actionpack/test/template/form_helper_test.rb
@@ -31,7 +31,10 @@ class FormHelperTest < ActionView::TestCase
:submit => {
:create => 'Create {{model}}',
:update => 'Confirm {{model}} changes',
- :submit => 'Save changes'
+ :submit => 'Save changes',
+ :another_post => {
+ :update => 'Update your {{model}}'
+ }
}
}
}
@@ -550,6 +553,21 @@ class FormHelperTest < ActionView::TestCase
I18n.locale = old_locale
end
+ def test_submit_with_object_and_nested_lookup
+ old_locale, I18n.locale = I18n.locale, :submit
+
+ form_for(:another_post, @post) do |f|
+ concat f.submit
+ end
+
+ expected = "<form action='http://www.example.com' method='post'>" +
+ "<input name='commit' id='another_post_submit' type='submit' value='Update your Post' />" +
+ "</form>"
+ assert_dom_equal expected, output_buffer
+ ensure
+ I18n.locale = old_locale
+ end
+
def test_nested_fields_for
form_for(:post, @post) do |f|
f.fields_for(:comment, @post) do |c|
|
Allow f.submit to be localized per object.
|
rails_rails
|
train
|
5b35385dbd35fb67dcc33ae7b32120bea06eac85
|
diff --git a/nipap-cli/nipap_cli/nipap_cli.py b/nipap-cli/nipap_cli/nipap_cli.py
index <HASH>..<HASH> 100755
--- a/nipap-cli/nipap_cli/nipap_cli.py
+++ b/nipap-cli/nipap_cli/nipap_cli.py
@@ -54,7 +54,11 @@ def setup_connection():
print >> sys.stderr, "Please define the username, password, hostname and port in your .nipaprc under the section 'global'"
sys.exit(1)
- ao = pynipap.AuthOptions({'authoritative_source': 'nipap'})
+ ao = pynipap.AuthOptions({
+ 'authoritative_source': 'nipap',
+ 'username': os.getenv('NIPAP_IMPERSONATE_USERNAME') or cfg.get('global', 'username'),
+ 'full_name': os.getenv('NIPAP_IMPERSONATE_FULL_NAME'),
+ })
|
Add user impersonation in CLI via env
This adds the capability to impersonate another user (setting username
and full_name) via the CLI. It accomplishes this by reading the
environment variables NIPAP_IMPERSONATE_USERNAME and
NIPAP_IMPERSONATE_FULL_NAME. Naturally it only works when a user it
trusted.
|
SpriteLink_NIPAP
|
train
|
62c6d93b6d1db23444c67520237a363b9f28a596
|
diff --git a/tests/HTMLPurifier/Strategy/MakeWellFormedTest.php b/tests/HTMLPurifier/Strategy/MakeWellFormedTest.php
index <HASH>..<HASH> 100644
--- a/tests/HTMLPurifier/Strategy/MakeWellFormedTest.php
+++ b/tests/HTMLPurifier/Strategy/MakeWellFormedTest.php
@@ -9,6 +9,7 @@ class HTMLPurifier_Strategy_MakeWellFormedTest extends HTMLPurifier_StrategyHarn
function setUp() {
parent::setUp();
$this->obj = new HTMLPurifier_Strategy_MakeWellFormed();
+ $this->config = array();
}
function testNormalIntegration() {
@@ -176,6 +177,70 @@ Par
}
+ function testLinkify() {
+
+ $this->config = array('Core.AutoLinkify' => true);
+
+ $this->assertResult(
+ 'http://example.com',
+ '<a href="http://example.com">http://example.com</a>'
+ );
+
+ $this->assertResult(
+ '<b>http://example.com</b>',
+ '<b><a href="http://example.com">http://example.com</a></b>'
+ );
+
+ $this->assertResult(
+ 'This URL http://example.com is what you need',
+ 'This URL <a href="http://example.com">http://example.com</a> is what you need'
+ );
+
+ }
+
+ function testMultipleInjectors() {
+
+ $this->config = array('Core.AutoParagraph' => true, 'Core.AutoLinkify' => true);
+
+ $this->assertResult(
+ 'Foobar',
+ '<p>Foobar</p>'
+ );
+
+ $this->assertResult(
+ 'http://example.com',
+ '<p><a href="http://example.com">http://example.com</a></p>'
+ );
+
+ $this->assertResult(
+ '<b>http://example.com</b>',
+ '<p><b><a href="http://example.com">http://example.com</a></b></p>'
+ );
+
+ $this->assertResult(
+ '<b>http://example.com',
+ '<p><b><a href="http://example.com">http://example.com</a></b></p>'
+ );
+
+ $this->assertResult(
+'http://example.com
+
+http://dev.example.com',
+ '<p><a href="http://example.com">http://example.com</a></p><p><a href="http://dev.example.com">http://dev.example.com</a></p>'
+ );
+
+ $this->assertResult(
+ 'http://example.com <div>http://example.com</div>',
+ '<p><a href="http://example.com">http://example.com</a></p><div><a href="http://example.com">http://example.com</a></div>'
+ );
+
+ $this->assertResult(
+ 'This URL http://example.com is what you need',
+ '<p>This URL <a href="http://example.com">http://example.com</a> is what you need</p>'
+ );
+
+ }
+
}
?>
\ No newline at end of file
|
Add more unit tests; everything seems to be good, but I'm suspicious.
git-svn-id: <URL>
|
Masterjoa_HTMLPurifier-standalone
|
train
|
4adb4f0c0431a453ae05354aa69a227877b025a1
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,10 @@
# master
+ * Chewy::Query#facets called without params performs the request and returns facets.
+
* Added `Type.template` dsl method for root objects dynamic templates definition. See [mapping.rb](lib/chewy/type/mapping.rb) for more details.
- * ActiveRecord adapter custom `primary_key` support (matthee).
+ * ActiveRecord adapter custom `primary_key` support (@matthee).
* Urgent update now clears association cache in ActiveRecord to ensure latest changes are imported.
@@ -26,9 +28,9 @@
# Version 0.2.2
- * Support for `none` scope (undr).
+ * Support for `none` scope (@undr).
- * Auto-resolved analyzers and analyzers repository (webgago):
+ * Auto-resolved analyzers and analyzers repository (@webgago):
```ruby
# Setting up analyzers repository:
diff --git a/lib/chewy/query.rb b/lib/chewy/query.rb
index <HASH>..<HASH> 100644
--- a/lib/chewy/query.rb
+++ b/lib/chewy/query.rb
@@ -242,8 +242,15 @@ module Chewy
# facets: {tags: {terms: {field: 'tags'}}, ages: {terms: {field: 'age'}}}
# }}
#
- def facets params
- chain { criteria.update_facets params }
+ # If called parameterless - returns result facets from ES performing request.
+ # Returns empty hash if no facets was requested or resulted.
+ #
+ def facets params = nil
+ if params
+ chain { criteria.update_facets params }
+ else
+ _response['facets'] || {}
+ end
end
# Marks the criteria as having zero records. This scope always returns empty array
diff --git a/spec/chewy/query_spec.rb b/spec/chewy/query_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/chewy/query_spec.rb
+++ b/spec/chewy/query_spec.rb
@@ -100,6 +100,37 @@ describe Chewy::Query do
specify { subject.facets(term: {field: 'hello'}).should_not == subject }
specify { subject.facets(term: {field: 'hello'}).criteria.facets.should include(term: {field: 'hello'}) }
specify { expect { subject.facets(term: {field: 'hello'}) }.not_to change { subject.criteria.facets } }
+
+ context 'results' do
+ before { stub_model(:city) }
+ let(:cities) { 10.times.map { |i| City.create! name: "name#{i}", rating: i % 3 } }
+
+ context do
+ before do
+ stub_index(:cities) do
+ define_type :city do
+ field :name
+ field :rating, type: 'integer'
+ field :nested, type: 'object', value: ->{ {name: name} }
+ end
+ end
+ end
+
+ before { CitiesIndex::City.import! cities }
+
+ specify { CitiesIndex.facets.should == {} }
+ specify { CitiesIndex.facets(ratings: {terms: {field: 'rating'}}).facets.should == {
+ 'ratings' => {
+ '_type' => 'terms', 'missing' => 0, 'total' => 10, 'other' => 0,
+ 'terms' => [
+ {'term' => 0, 'count' => 4},
+ {'term' => 2, 'count' => 3},
+ {'term' => 1, 'count' => 3}
+ ]
+ }
+ } }
+ end
+ end
end
describe '#filter' do
|
Added Chewy::Query#facets method used for result facets access
|
toptal_chewy
|
train
|
2cd6f356016fd6c20eb68d20fe2e256d243149d1
|
diff --git a/unfriendly/__init__.py b/unfriendly/__init__.py
index <HASH>..<HASH> 100644
--- a/unfriendly/__init__.py
+++ b/unfriendly/__init__.py
@@ -1,2 +1,2 @@
-VERSION = (0, 2, 3)
+VERSION = (0, 2, 4)
__version__ = '.'.join([str(x) for x in VERSION])
|
Bumped version -> <I>
|
tomatohater_django-unfriendly
|
train
|
badcf125ba4cde5c9f293e689996cff2f6acb87a
|
diff --git a/txtwitter/tests/fake_twitter.py b/txtwitter/tests/fake_twitter.py
index <HASH>..<HASH> 100644
--- a/txtwitter/tests/fake_twitter.py
+++ b/txtwitter/tests/fake_twitter.py
@@ -690,6 +690,9 @@ class FakeTwitterAPI(object):
follow.source_id == self._user_id_str or
follow.target_id == self._user_id_str)
+ def userstream_unfollow_predicate(follow):
+ return follow.source_id == self._user_id_str
+
def userstream_tweet_predicate(tweet):
if tweet.user_id_str == self._user_id_str:
return True
@@ -710,6 +713,7 @@ class FakeTwitterAPI(object):
stream.add_message_type('tweet', userstream_tweet_predicate)
stream.add_message_type('dm', userstream_dm_predicate)
stream.add_message_type('follow', userstream_follow_predicate)
+ stream.add_message_type('unfollow', userstream_unfollow_predicate)
# TODO: Proper friends.
stream.deliver({'friends_str': []})
diff --git a/txtwitter/tests/test_fake_twitter.py b/txtwitter/tests/test_fake_twitter.py
index <HASH>..<HASH> 100644
--- a/txtwitter/tests/test_fake_twitter.py
+++ b/txtwitter/tests/test_fake_twitter.py
@@ -1038,6 +1038,33 @@ class TestFakeTwitterAPI(TestCase):
resp.finished()
self.assertEqual(twitter.streams, {})
+ def test_userstream_user_with_unfollows(self):
+ twitter = self._FakeTwitterData()
+ twitter.add_user('1', 'fakeuser', 'Fake User')
+ twitter.add_user('2', 'fakeuser2', 'Fake User')
+ twitter.add_user('3', 'fakeuser3', 'Fake User')
+
+ follow1 = twitter.add_follow('1', '2')
+ twitter.add_follow('2', '1')
+ twitter.add_follow('2', '3')
+
+ api = self._FakeTwitterAPI(twitter, '1')
+ messages = []
+ resp = api.userstream_user(stringify_friend_ids='true', with_='user')
+ self._process_stream_response(resp, messages.append)
+ messages.pop(0)
+
+ twitter.del_follow('1', '2')
+ twitter.del_follow('2', '1')
+ twitter.del_follow('2', '3')
+
+ self.assertEqual(
+ messages,
+ [f for f in twitter.to_dicts(follow1, event='unfollow')])
+
+ resp.finished()
+ self.assertEqual(twitter.streams, {})
+
# TODO: Replies
# TODO: More tests for fake userstream_user()
|
Add unfollow events to FakeTwitterAPI's user stream
|
jerith_txTwitter
|
train
|
303d228a19712d4bd1da6c82af0989abcee611c7
|
diff --git a/quilt/tools/store.py b/quilt/tools/store.py
index <HASH>..<HASH> 100644
--- a/quilt/tools/store.py
+++ b/quilt/tools/store.py
@@ -143,6 +143,7 @@ class PackageStore(object):
packages = [
(user, pkg[:-len(self.PACKAGE_FILE_EXT)])
for user in os.listdir(self._path)
+ if os.path.isdir(os.path.join(self._path, user))
for pkg in os.listdir(os.path.join(self._path, user))
if pkg.endswith(self.PACKAGE_FILE_EXT)]
return packages
|
Don't blow up if `quilt_packages` contains random files (#<I>)
|
quiltdata_quilt
|
train
|
db88cbe465cdc6d0b5618ed18c127713cf9e896e
|
diff --git a/tests/TestCase.php b/tests/TestCase.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase.php
+++ b/tests/TestCase.php
@@ -2,10 +2,10 @@
namespace PHPStan;
-use Nette\DI\Container;
use PHPStan\Broker\Broker;
use PHPStan\Cache\Cache;
use PHPStan\Cache\MemoryCacheStorage;
+use PHPStan\DependencyInjection\ContainerFactory;
use PHPStan\File\FileHelper;
use PHPStan\Parser\DirectParser;
use PHPStan\Parser\FunctionCallStatementFinder;
@@ -47,12 +47,15 @@ abstract class TestCase extends \PHPUnit\Framework\TestCase
public function getContainer(): \Nette\DI\Container
{
- return self::$container;
- }
+ if (self::$container === null) {
+ $rootDir = __DIR__ . '/..';
+ $containerFactory = new ContainerFactory($rootDir);
+ self::$container = $containerFactory->create($rootDir . '/tmp', [
+ $containerFactory->getConfigDirectory() . '/config.level7.neon',
+ ]);
+ }
- public static function setContainer(Container $container)
- {
- self::$container = $container;
+ return self::$container;
}
public function getParser(): \PHPStan\Parser\Parser
diff --git a/tests/bootstrap.php b/tests/bootstrap.php
index <HASH>..<HASH> 100644
--- a/tests/bootstrap.php
+++ b/tests/bootstrap.php
@@ -1,19 +1,10 @@
<?php declare(strict_types = 1);
-use PHPStan\DependencyInjection\ContainerFactory;
-
require_once __DIR__ . '/../vendor/autoload.php';
require_once __DIR__ . '/TestCase.php';
require_once __DIR__ . '/PHPStan/Rules/AbstractRuleTest.php';
require_once __DIR__ . '/PHPStan/Rules/AlwaysFailRule.php';
require_once __DIR__ . '/PHPStan/Rules/DummyRule.php';
-$rootDir = __DIR__ . '/..';
-$containerFactory = new ContainerFactory($rootDir);
-$container = $containerFactory->create($rootDir . '/tmp', [
- $containerFactory->getConfigDirectory() . '/config.level7.neon',
-]);
-
-PHPStan\TestCase::setContainer($container);
PHPStan\Type\TypeCombinator::setUnionTypesEnabled(true);
require_once __DIR__ . '/phpstan-bootstrap.php';
|
Container is created directly inside TestCase
|
phpstan_phpstan
|
train
|
8021029b95371c09e4b6edf7e296d6906cb90c5f
|
diff --git a/frontend/app/map/route/route.js b/frontend/app/map/route/route.js
index <HASH>..<HASH> 100644
--- a/frontend/app/map/route/route.js
+++ b/frontend/app/map/route/route.js
@@ -92,6 +92,9 @@ fm.app.factory("fmMapRoute", function(fmUtils, $uibModal, $compile, $timeout, $r
function registerMarkerHandlers(marker, route) {
marker.on("dblclick", function() {
+ if(route.routePoints.length <= 2)
+ return;
+
let index = markers.indexOf(marker);
route.routePoints.splice(index, 1);
markers[index].remove();
|
Do not allow removing route points if there are only 2
|
FacilMap_facilmap2
|
train
|
c6d61f9b43d89dc280b6089dee674daa8c180146
|
diff --git a/example/src/main/java/io/netty/example/http2/server/Http2ServerInitializer.java b/example/src/main/java/io/netty/example/http2/server/Http2ServerInitializer.java
index <HASH>..<HASH> 100644
--- a/example/src/main/java/io/netty/example/http2/server/Http2ServerInitializer.java
+++ b/example/src/main/java/io/netty/example/http2/server/Http2ServerInitializer.java
@@ -70,7 +70,7 @@ public class Http2ServerInitializer extends ChannelInitializer<SocketChannel> {
ch.pipeline().addLast(upgradeHandler);
ch.pipeline().addLast(new SimpleChannelInboundHandler<HttpMessage>() {
@Override
- protected void messageReceived(ChannelHandlerContext ctx, HttpMessage msg) throws Exception {
+ protected void channelRead0(ChannelHandlerContext ctx, HttpMessage msg) throws Exception {
// If this handler is hit then no upgrade has been attempted and the client is just talking HTTP.
System.err.println("Directly talking: " + msg.protocolVersion() + " (no upgrade was attempted)");
ctx.pipeline().replace(this, "http-hello-world",
|
Fix merge issue introduced by <I>c0d<I>
Motiviation:
Interface changes between master and <I> branch resulted in a compile failure.
Modifications:
- change messageReceived to channelRead0
Result:
No more compile error.
|
netty_netty
|
train
|
c73fb088d6b5bfa46f066f0a2602c05eb3ad7663
|
diff --git a/gridtk/tools.py b/gridtk/tools.py
index <HASH>..<HASH> 100644
--- a/gridtk/tools.py
+++ b/gridtk/tools.py
@@ -29,7 +29,7 @@ except ImportError:
A list of valid values for this column
"""
- if values is None or len(values) is 0:
+ if values is None or len(values) == 0:
raise AssertionError('Enum requires a list of values')
self.values = values[:]
|
Fix a warning; value is 0 is replaced with value == 0
|
bioidiap_gridtk
|
train
|
e78f81a2695440f0108c68363d47612b2ae48e3a
|
diff --git a/Observer/Adminhtml/EmailTemplates.php b/Observer/Adminhtml/EmailTemplates.php
index <HASH>..<HASH> 100755
--- a/Observer/Adminhtml/EmailTemplates.php
+++ b/Observer/Adminhtml/EmailTemplates.php
@@ -96,7 +96,7 @@ class EmailTemplates implements \Magento\Framework\Event\ObserverInterface
$this->storeId = (empty($store))? '0' : $store;
//important use default, website or store when it's present as an appendix to the template code
if (! is_numeric($website) && ! is_numeric($store)) {
- $this->storeCode = 'default';
+ $this->storeCode = 'admin';
} elseif (! is_numeric($store)) {
$this->storeCode = $this->storeManager->getWebsite($this->websiteId)->getCode();
} else {
@@ -123,8 +123,9 @@ class EmailTemplates implements \Magento\Framework\Event\ObserverInterface
//remove the config for dotmailer template
$this->removeConfigValue($template->templateEmailConfigMapping[$templateCode]);
//delete the dotmailer template when it's unmapped
- $templateCodeWithStoreId = $template->getTemplateCodeWithCodeName($templateCode, $this->storeCode);
- $template->deleteTemplateByCode($templateCodeWithStoreId);
+ $templateCodeWithStoreCode =
+ $template->getTemplateCodeWithCodeName($templateCode, $this->storeCode);
+ $template->deleteTemplateByCode($templateCodeWithStoreCode);
}
}
}
diff --git a/Plugin/TemplatePlugin.php b/Plugin/TemplatePlugin.php
index <HASH>..<HASH> 100644
--- a/Plugin/TemplatePlugin.php
+++ b/Plugin/TemplatePlugin.php
@@ -5,6 +5,10 @@ namespace Dotdigitalgroup\Email\Plugin;
class TemplatePlugin
{
/**
+ * @var
+ */
+ private $templateCode;
+ /**
* @var \Magento\Framework\Registry
*/
private $registry;
@@ -27,25 +31,35 @@ class TemplatePlugin
*/
public function afterGetData(\Magento\Email\Model\Template $subject, $result, ...$args)
{
+ //get the template code value
+ if (! empty($args)) {
+ if ($args[0] == 'template_code') {
+ $this->templateCode = $result;
+ }
+ }
+
//get data before saving
if ($this->registry->registry('dotmailer_saving_data')) {
//saving array values
if (empty($args)) {
+ //save template id for email sending to update the sender name and sender email saved on template level.
if (isset($result['template_id'])) {
$this->saveTemplateIdInRegistry($result['template_id']);
}
$templateText = $result['template_text'];
//compress text
- if (! $this->isStringCompressed($templateText)) {
+ if (!$this->isStringCompressed($templateText) && $this->isDotmailerTemplate($result['template_code'])) {
$result['template_text'] = $this->compresString($templateText);
}
} else {
//saving string value
- $templateText = $result;
$field = $args[0];
- //check for correct field
- if ($field == 'template_text' && ! $this->isStringCompressed($templateText)) {
- $result = $this->compresString($templateText);
+
+ //compress the text body when is a dotmailer template
+ if ($field == 'template_text' && ! $this->isStringCompressed($result)
+ && $this->isDotmailerTemplate($this->templateCode)
+ ) {
+ $result = $this->compresString($result);
}
if ($field == 'template_id') {
$this->saveTemplateIdInRegistry($result);
@@ -102,7 +116,7 @@ class TemplatePlugin
private function isStringCompressed($string)
{
//check if the data is compressed
- if (substr($string, 0, 1) == 'e') {
+ if (substr($string, 0, 1) == 'e' && substr_count($string, ' ') == 0) {
return true;
}
@@ -137,5 +151,22 @@ class TemplatePlugin
$this->registry->register('dotmailer_current_template_id', $templateId);
}
}
+
+ /**
+ * Check if the template code is containing dotmailer.
+ *
+ * @param $templateCode
+ * @return bool
+ */
+ private function isDotmailerTemplate($templateCode)
+ {
+ preg_match("/\(dotmailer\)/", $templateCode, $matches);
+
+ if (count($matches)) {
+ return true;
+ }
+
+ return false;
+ }
}
|
Merged PR <I>: Encode/decode compress/decompress only dotmailer templates
encode/decode compress/decompress only dotmailer templates
Compression and decompression should be done only on matching dotmailer templates
Added a regex for template code to match the dotmailer.
Related work items: #<I>
|
dotmailer_dotmailer-magento2-extension
|
train
|
f825e763715b615b077296cb366863e9b40e0513
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ import setuptools
setuptools.setup(
name='brozzler',
- version='1.1.dev45',
+ version='1.1b3.dev45',
description='Distributed web crawling with browsers',
url='https://github.com/internetarchive/brozzler',
author='Noah Levitt',
|
give master a version number considered later than the one up on pypi (<I>b3.de<I> > <I>b2)
|
internetarchive_brozzler
|
train
|
f170d5ebb729c44f65ef651dd64ba5d4cf669f7a
|
diff --git a/phoebe/backend/mesh.py b/phoebe/backend/mesh.py
index <HASH>..<HASH> 100644
--- a/phoebe/backend/mesh.py
+++ b/phoebe/backend/mesh.py
@@ -938,6 +938,7 @@ class ScaledProtoMesh(ProtoMesh):
super(ScaledProtoMesh, self).__init__(keys=keys, **kwargs)
if scale is not None:
+ self._copy_roche_values()
self._scale_mesh(scale)
@classmethod
@@ -1088,8 +1089,8 @@ class Mesh(ScaledProtoMesh):
mesh = cls(**scaledproto_mesh.items())
- # roche coordinates have already been copied
- mesh._copy_roche_values()
+ # roche coordinates have already been copied,
+ # so do NOT call mesh._copy_roche_values() here
mesh._place_in_orbit(pos, vel, euler, rotation_vel)
if hasattr(scaledproto_mesh, '_label_envelope'):
|
fix to last commit - copying roche values needs to happen in the init
not in from_scaledproto... otherwise the roche values will include the scale which is... well, wrong
|
phoebe-project_phoebe2
|
train
|
bd91319304f372780839a44dadb12ecb8af68b7b
|
diff --git a/Collection.php b/Collection.php
index <HASH>..<HASH> 100755
--- a/Collection.php
+++ b/Collection.php
@@ -350,7 +350,7 @@ class Collection implements ArrayAccess, ArrayableInterface, Countable, Iterator
{
$keys = array_rand($this->items, $amount);
- return is_array($keys) ? array_intersect_key($this->items, $keys) : $this->items[$keys];
+ return is_array($keys) ? array_intersect_key($this->items, array_flip($keys)) : $this->items[$keys];
}
/**
|
Collection::random() not actually random
array_intersect_key() is incorrectly using the output of array_rand() which is an array with zero based, consecutive keys (e.g. [0=>n, 1=>n, 2=>n, 3=>n]) of length $amount as the challenge. this resulting in identical results with calls on identical collections. flipping the output of array_rand() so that the random values are used as challenging keys with array_intersect_key() fixes this issue.
|
illuminate_support
|
train
|
35e283831de5401c8578244d616bd26bd245f78c
|
diff --git a/faq-bundle/src/Resources/contao/system/modules/rep_base/RepositorySettings.php b/faq-bundle/src/Resources/contao/system/modules/rep_base/RepositorySettings.php
index <HASH>..<HASH> 100755
--- a/faq-bundle/src/Resources/contao/system/modules/rep_base/RepositorySettings.php
+++ b/faq-bundle/src/Resources/contao/system/modules/rep_base/RepositorySettings.php
@@ -13,6 +13,7 @@ define('REPOSITORY_SOAPCACHE', true);
// valid core versions in descending order
define('REPOSITORY_COREVERSIONS',
+ '20060039,20060039;'. // 2.6.3 stable
'20060029,20060029;'. // 2.6.2 stable
'20060019,20060019;'. // 2.6.1 stable
'20060009,20060009;'. // 2.6.0 stable
|
[Faq] Version <I>
|
contao_contao
|
train
|
b91f30acd2f7eab63c8f5d75b9dc361042fea332
|
diff --git a/packages/scriptappy-from-jsdoc/src/transformer.js b/packages/scriptappy-from-jsdoc/src/transformer.js
index <HASH>..<HASH> 100644
--- a/packages/scriptappy-from-jsdoc/src/transformer.js
+++ b/packages/scriptappy-from-jsdoc/src/transformer.js
@@ -200,7 +200,11 @@ function checkTypes(obj, priv, cfg) {
function transform({ ids, priv }, cfg) {
const entries = {};
const definitions = {};
- Object.keys(ids).forEach(longname => {
+ Object.keys(ids).sort((a, b) => {
+ const aa = a.toLowerCase();
+ const bb = b.toLowerCase();
+ return aa > bb ? 1 : (bb > aa ? -1 : 0); // eslint-disable-line
+ }).forEach(longname => {
ids[longname].forEach((d, idx) => {
// const d = ids[longname];
const pr = priv[longname][idx];
|
feat(from-jsdoc): sort entries alphabetically
|
miralemd_scriptappy
|
train
|
cb1f7ed4d1cde9c050c1c05ab7a8939038553556
|
diff --git a/ca/django_ca/management/commands/dump_cert.py b/ca/django_ca/management/commands/dump_cert.py
index <HASH>..<HASH> 100644
--- a/ca/django_ca/management/commands/dump_cert.py
+++ b/ca/django_ca/management/commands/dump_cert.py
@@ -31,7 +31,7 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- '-f', '--format', choices=['pem', 'asn1', 'text'], default='pem',
+ '-f', '--format', choices=['pem', 'asn1', 'text', 'der'], default='pem',
help='The format to use, default is %(default)s.')
parser.add_argument('serial', help='''The serial of the certificate to dump.
The "list_certs" command lists all known certificates.''')
@@ -47,7 +47,7 @@ class Command(BaseCommand):
format = options.get('format')
if format == 'pem':
data = cert.pub.encode('utf-8')
- elif format == 'asn1':
+ elif format == 'asn1' or format == 'der':
data = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert.x509)
elif format == 'text':
data = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert.x509)
diff --git a/ca/django_ca/management/commands/dump_crl.py b/ca/django_ca/management/commands/dump_crl.py
index <HASH>..<HASH> 100644
--- a/ca/django_ca/management/commands/dump_crl.py
+++ b/ca/django_ca/management/commands/dump_crl.py
@@ -33,7 +33,7 @@ class Command(BaseCommand):
parser.add_argument(
'-d', '--days', type=int,
help="The number of days until the next update of this CRL (default: 100).")
- parser.add_argument('-t', '--type', choices=['pem', 'asn1', 'text'],
+ parser.add_argument('-t', '--type', choices=['pem', 'asn1', 'text', 'der'],
help="Format of the CRL file (default: pem).")
parser.add_argument('--digest',
help="The name of the message digest to use (default: sha512).")
@@ -44,6 +44,8 @@ class Command(BaseCommand):
if options['days']:
kwargs['days'] = options['days']
if options['type']:
+ if options.get('type') == 'der':
+ options['type'] = 'asn1'
kwargs['type'] = getattr(crypto, 'FILETYPE_%s' % options['type'].upper())
if options['digest']:
kwargs['digest'] = bytes(options['digest'], 'utf-8')
|
ASN1 is an alias for DER
|
mathiasertl_django-ca
|
train
|
71a2de6fb9bdfcb45e22ca128730944be1df39be
|
diff --git a/parse.go b/parse.go
index <HASH>..<HASH> 100644
--- a/parse.go
+++ b/parse.go
@@ -116,21 +116,44 @@ func (p parser) parsePlural(field string, rules ...interface{}) (r string, err e
return
}
-func getCount(v interface{}, field string) (count interface{}, has bool) {
+func getCount(v interface{}, field string) (interface{}, bool) {
+ if len(field) == 0 {
+ return nil, false
+ }
+
+ if field != "." {
+ fieldParts := strings.Split(field, ".")
+ fieldPartsLen := len(fieldParts)
+ for i := 0; i < fieldPartsLen; i++ {
+ fieldPart := fieldParts[i]
+ rv := reflect.Indirect(reflect.ValueOf(v))
+ switch rv.Kind() {
+ case reflect.Struct:
+ f := rv.FieldByName(fieldPart)
+ if f.IsValid() {
+ v = f.Interface()
+ continue
+ }
+ return nil, false
+ case reflect.Map:
+ f := rv.MapIndex(reflect.ValueOf(fieldPart))
+ if f.IsValid() {
+ v = f.Interface()
+ continue
+ }
+ return nil, false
+ case reflect.Invalid:
+ return nil, false
+ }
+ }
+ }
+
rv := reflect.ValueOf(v)
switch rv.Kind() {
- case reflect.Struct:
- f := rv.FieldByName(field)
- if has = f.IsValid(); has {
- count = f.Interface()
- }
- return
- case reflect.Map:
- f := rv.MapIndex(reflect.ValueOf(field))
- if has = f.IsValid(); has {
- count = f.Interface()
- }
- return
+ case reflect.Array, reflect.Chan, reflect.Slice:
+ v = rv.Len()
+ case reflect.Map, reflect.Struct:
+ return nil, false
}
- return
+ return v, true
}
diff --git a/parse_test.go b/parse_test.go
index <HASH>..<HASH> 100644
--- a/parse_test.go
+++ b/parse_test.go
@@ -7,6 +7,12 @@ import (
"testing"
)
+type testCart struct {
+ Name string
+ Items []string
+ NumberOfItems int32
+}
+
func TestParse(t *testing.T) {
cases := []struct {
locale string
@@ -34,6 +40,34 @@ func TestParse(t *testing.T) {
},
{
locale: "en",
+ text: `{{p "Cart.Items" (one "1 item") (other "{{len .Cart.Items}} items")}} in your cart; {{p "Cart.NumberOfItems" (one "1 item") (other "{{.Cart.NumberOfItems}} items")}} in your cart.`,
+ data: []interface{}{map[string]interface{}{"Cart": struct {
+ Name string
+ Items []string
+ NumberOfItems int32
+ }{Name: "Mr Someone", Items: []string{"Item 1", "Item 2"}, NumberOfItems: 4}}},
+ want: "2 items in your cart; 4 items in your cart.",
+ },
+ {
+ locale: "en",
+ text: `{{p "Cart2.Items" (one "1 item") (other "{{len .Cart2.Items}} items")}} in your cart; {{p "Cart2.NumberOfItems" (one "1 item") (other "{{.Cart2.NumberOfItems}} items")}} in your cart.`,
+ data: []interface{}{map[string]interface{}{"Cart2": &testCart{Name: "Test cart", Items: []string{"Item 3", "Item 4", "Item 5"}, NumberOfItems: 6}}},
+ want: "3 items in your cart; 6 items in your cart.",
+ },
+ {
+ locale: "en",
+ text: `{{p "." (one "1 item") (other "{{.}} items")}} in your cart.`,
+ data: []interface{}{4},
+ want: "4 items in your cart.",
+ },
+ {
+ locale: "en",
+ text: `{{p "." (one "1 item") (other "{{len .}} items")}} in your cart.`,
+ data: []interface{}{[]string{"Item 1", "Item 2"}},
+ want: "2 items in your cart.",
+ },
+ {
+ locale: "en",
text: `{{$1}} {{$2}} {{$1}}`,
data: []interface{}{"string1", "string2"},
want: "string1 string2 string1",
|
Make pluralization template more flexible
The first argument can now denote a struct or nested map path (separated
by dots or also just be "." to use the context object – so it's now
possible to pass a number. I also added support for anything that has a
length (array, channel, slice) and added tests for all of those. All the
existing tests still work, I didn't touch them so no BC breaks.
|
theplant_cldr
|
train
|
1a66130ae265d29777073d4fc79540acd8eae76e
|
diff --git a/lib/fasterer/scanners/method_call_scanner.rb b/lib/fasterer/scanners/method_call_scanner.rb
index <HASH>..<HASH> 100644
--- a/lib/fasterer/scanners/method_call_scanner.rb
+++ b/lib/fasterer/scanners/method_call_scanner.rb
@@ -132,6 +132,7 @@ module Fasterer
return unless body_method_call.arguments.count.zero?
return if body_method_call.has_block?
return if body_method_call.receiver.nil?
+ return if body_method_call.receiver.is_a?(Fasterer::Primitive)
return if body_method_call.receiver.name != method_call.block_argument_names.first
add_offense(:block_vs_symbol_to_proc)
diff --git a/spec/support/analyzer/18_block_vs_symbol_to_proc.rb b/spec/support/analyzer/18_block_vs_symbol_to_proc.rb
index <HASH>..<HASH> 100644
--- a/spec/support/analyzer/18_block_vs_symbol_to_proc.rb
+++ b/spec/support/analyzer/18_block_vs_symbol_to_proc.rb
@@ -11,6 +11,10 @@ route_sets.each do |route|
puts route.name
end
+route_sets.each do |routes|
+ [].finalize!
+end
+
route_sets.each(&:finalize!)
route_sets.each(:oppa) do |route|
|
Fix symbol to proc offense detector when a method is invoked on a range or array
Fixes the following issue: <URL>
|
DamirSvrtan_fasterer
|
train
|
16c2370352139edc0faa9f2e56ec0c3f8c593c6a
|
diff --git a/CHANGES.txt b/CHANGES.txt
index <HASH>..<HASH> 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,3 +1,6 @@
+* V1.9.4.1
+ * starting new development version
+
* V1.9.4
* fixed minor bug when searching repo by UID when repo doesn't exist
* added CK internal var "debug". If set to "yes", print error when CK entries are broken or ignore them (now default)
diff --git a/ck/kernel.py b/ck/kernel.py
index <HASH>..<HASH> 100644
--- a/ck/kernel.py
+++ b/ck/kernel.py
@@ -10,8 +10,8 @@
# CK kernel - we made it monolithic with a minimal set
# of common functions for performance reasons
-__version__ = "1.9.4" # We use 3 digits for the main (released) version and 4th digit for development revision
- # Do not use characters (to detect outdated version)!
+__version__ = "1.9.4.1" # We use 3 digits for the main (released) version and 4th digit for development revision
+ # Do not use characters (to detect outdated version)!
# Extra modules global for the whole kernel
import sys
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@ from distutils.util import convert_path
setup(
name='ck',
- version='1.9.4',
+ version='1.9.4.1',
url='https://github.com/ctuning/ck/wiki',
license='BSD 3-clause',
author='Grigori Fursin and non-profit cTuning foundation',
|
starting new development version <I>
|
ctuning_ck
|
train
|
efc7ef7f6cbb894b756377da4fcdc47fb3492bf6
|
diff --git a/src/ContaoCommunityAlliance/DcGeneral/Contao/View/Contao2BackendView/BaseView.php b/src/ContaoCommunityAlliance/DcGeneral/Contao/View/Contao2BackendView/BaseView.php
index <HASH>..<HASH> 100644
--- a/src/ContaoCommunityAlliance/DcGeneral/Contao/View/Contao2BackendView/BaseView.php
+++ b/src/ContaoCommunityAlliance/DcGeneral/Contao/View/Contao2BackendView/BaseView.php
@@ -1088,6 +1088,9 @@ class BaseView implements BackendViewInterface, EventSubscriberInterface
$parent = $dataProvider->fetch($dataProvider->getEmptyConfig()->setId($into->getId()));
$environment->getController()->pasteInto($parent, $models, $this->getManualSortingProperty());
}
+ else if ($after == '0') {
+ $environment->getController()->pasteTop($models, $this->getManualSortingProperty());
+ }
else
{
throw new DcGeneralRuntimeException('Invalid parameters.');
|
Support ajax DND to top.
|
contao-community-alliance_dc-general
|
train
|
865f4107d8d3aa917c8a62b32fb774a0da3f4f66
|
diff --git a/bin/templates/scripts/cordova/lib/plugman/pluginHandlers.js b/bin/templates/scripts/cordova/lib/plugman/pluginHandlers.js
index <HASH>..<HASH> 100644
--- a/bin/templates/scripts/cordova/lib/plugman/pluginHandlers.js
+++ b/bin/templates/scripts/cordova/lib/plugman/pluginHandlers.js
@@ -94,13 +94,13 @@ var handlers = {
}
},
uninstall:function(obj, plugin, project, options) {
- var podsJSON = require(path.join(project.projectDir, 'pods.json'));
var src = obj.src;
if (!obj.custom) { //CB-9825 cocoapod integration for plugins
var keepFrameworks = keep_these_frameworks;
if (keepFrameworks.indexOf(src) < 0) {
if (obj.type === 'podspec') {
+ var podsJSON = require(path.join(project.projectDir, 'pods.json'));
if(podsJSON[src]) {
if(podsJSON[src].count > 1) {
podsJSON[src].count = podsJSON[src].count - 1;
|
CB-<I> - Cocoapod integration of plugins - fix for node-windows <I> and <I> unit-test failures
|
apache_cordova-ios
|
train
|
b41a18dd1e071aa1b485b909dee36b90092783fb
|
diff --git a/lib/winrm/shells/retryable.rb b/lib/winrm/shells/retryable.rb
index <HASH>..<HASH> 100644
--- a/lib/winrm/shells/retryable.rb
+++ b/lib/winrm/shells/retryable.rb
@@ -24,7 +24,7 @@ module WinRM
RETRYABLE_EXCEPTIONS = lambda do
[
Errno::EACCES, Errno::EADDRINUSE, Errno::ECONNREFUSED, Errno::ETIMEDOUT,
- Errno::ECONNRESET, Errno::ENETUNREACH, Errno::EHOSTUNREACH,
+ Errno::ECONNRESET, Errno::ENETUNREACH, Errno::EHOSTUNREACH, ::WinRM::WinRMWSManFault,
::WinRM::WinRMHTTPTransportError, ::WinRM::WinRMAuthorizationError,
HTTPClient::KeepAliveDisconnected, HTTPClient::ConnectTimeoutError
].freeze
|
adding wsman faults to retryable exceptions
|
WinRb_WinRM
|
train
|
d73b69488841d57d4e4778848de72b17a34bf064
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -202,7 +202,7 @@ function copy(obj) {
}
function toExpressParams(part) {
- return part.replace(/^\{([^\{]+)\}$/, ':$1');
+ return part.replace(/^\{([^\}]+)\}$/, ':$1');
}
function withNoDuplicates(arr) {
|
Correcting the regex for express param conversion.
* It was working before, but it wasn't written properly. Should match on [^}]+.
* Learned something new about javascript I.E. + can backtrack.
|
kogosoftwarellc_open-api
|
train
|
e95c8b696a43f163480e73766cb3f7fa8b52a971
|
diff --git a/src/widgets/layout_resizable/layout_resizable.js b/src/widgets/layout_resizable/layout_resizable.js
index <HASH>..<HASH> 100644
--- a/src/widgets/layout_resizable/layout_resizable.js
+++ b/src/widgets/layout_resizable/layout_resizable.js
@@ -315,7 +315,8 @@
index = sash._resid,
mappingLeft = this.mapping[index],
mappingRight = this.mapping[index + 1],
- centerDelta = ( ( mappingLeft[6] + mappingRight[6] ) / 2 ) - mappingLeft[6],
+ centerDeltaRaw = ( ( mappingLeft[6] + mappingRight[6] ) / 2 ) - mappingLeft[6],
+ centerDelta = centerDeltaRaw >= 0 ? Math.min ( centerDeltaRaw, mappingLeft[5] - mappingLeft[6] ) : centerDeltaRaw,
clickXY = $.eventXY ( event ),
x = this.isHorizontal ? clickXY.x + centerDelta : clickXY.x,
y = this.isHorizontal ? clickXY.y : clickXY.y + centerDelta;
|
Layout resizable: ensuring double-clicking a sash doesn’t have unwanted effects no other panes that should be unaffected by this
|
svelto_svelto
|
train
|
6ddef58aa24b5b3e90094d2b9ceb5ebc0b0bd594
|
diff --git a/src/request/create/Create.js b/src/request/create/Create.js
index <HASH>..<HASH> 100644
--- a/src/request/create/Create.js
+++ b/src/request/create/Create.js
@@ -142,7 +142,7 @@ class Create {
document.body.classList.add('loading');
const key = new Key(this._selectedEntropy.serialize());
// XXX Should we use utf8 encoding here instead?
- const passphrase = Nimiq.BufferUtils.fromAscii(this._passphrase);
+ const passphrase = this._passphrase.length > 0 ? Nimiq.BufferUtils.fromAscii(this._passphrase) : undefined;
await KeyStore.instance.put(key, passphrase);
const keyPath = request.defaultKeyPath;
diff --git a/src/request/sign-transaction/BaseLayout.js b/src/request/sign-transaction/BaseLayout.js
index <HASH>..<HASH> 100644
--- a/src/request/sign-transaction/BaseLayout.js
+++ b/src/request/sign-transaction/BaseLayout.js
@@ -93,7 +93,9 @@ class BaseLayout {
this._passphraseBox.on(
PassphraseBox.Events.SUBMIT,
- passphrase => this._onConfirm(request, resolve, reject, passphrase),
+ /** @param {string} [passphrase] */ passphrase => {
+ this._onConfirm(request, resolve, reject, passphrase);
+ },
);
this._passphraseBox.on(PassphraseBox.Events.CANCEL, () => window.history.back());
@@ -110,7 +112,7 @@ class BaseLayout {
* @param {ParsedSignTransactionRequest} request
* @param {Function} resolve
* @param {Function} reject
- * @param {string} passphrase
+ * @param {string} [passphrase]
* @returns {Promise<void>}
* @private
*/
@@ -119,7 +121,7 @@ class BaseLayout {
try {
// XXX Passphrase encoding
- const passphraseBuf = Nimiq.BufferUtils.fromAscii(passphrase);
+ const passphraseBuf = passphrase ? Nimiq.BufferUtils.fromAscii(passphrase) : undefined;
const key = await KeyStore.instance.get(request.keyInfo.id, passphraseBuf);
if (!key) {
reject(new Error('Failed to retrieve key'));
|
Properly store and retrieve unencrypted keys
|
nimiq_keyguard-next
|
train
|
5c2282ec85b1bdda2558e5db1f5b153438b78578
|
diff --git a/src/utils/regexes.js b/src/utils/regexes.js
index <HASH>..<HASH> 100644
--- a/src/utils/regexes.js
+++ b/src/utils/regexes.js
@@ -1,4 +1,4 @@
-const inlineExtensionRegex = /^!(\w+)(?:\[([^)]*)\])?(?:\(([^)]*)\))?(?:\{([^}]*)\})?/;
+const inlineExtensionRegex = /^!(\w+)(?:\[([^\]]*)\])?(?:\(([^)]*)\))?(?:\{([^}]*)\})?/;
const blockExtensionRegex = /^(\w+):(?:(?:[ \t]+)([^\f\n\r\v]*))?(?:[\f\n\r\v]+):::([^]*?):::(?:(?:[\f\n\r\v]+)(?:\{([^}]*)\}))?/;
|
fix: fix inline extension regex (#<I>)
The inline extension regex did not match the documentation. Specifically
the "content" part is supposet to match everything but the "]" character
but in fact matched everything but the ")" character.
Update the regexp to match on the correct character.
|
medfreeman_remark-generic-extensions
|
train
|
a9b0e9e60f5aff61b37143fd0a0cf21515b60df8
|
diff --git a/seqmagick/subcommands/common.py b/seqmagick/subcommands/common.py
index <HASH>..<HASH> 100644
--- a/seqmagick/subcommands/common.py
+++ b/seqmagick/subcommands/common.py
@@ -8,6 +8,7 @@ import functools
import os
import os.path
import signal
+import sys
import tempfile
@contextlib.contextmanager
@@ -138,12 +139,21 @@ def positive_value(target_type):
return inner
+def _exit_on_signal(sig, status=None, message=None):
+ def exit(sig, frame):
+ if message:
+ print >> sys.stderr, message
+ raise SystemExit(status)
+ signal.signal(sig, exit)
+
+def exit_on_sigint(status=1, message="Canceled."):
+ """
+ Set program to exit on SIGINT, with provided status and message.
+ """
+ _exit_on_signal(signal.SIGINT, status, message)
def exit_on_sigpipe(status=None):
"""
Set program to exit on SIGPIPE
"""
- def exit(signal, frame):
- raise SystemExit(status)
-
- signal.signal(signal.SIGPIPE, exit)
+ _exit_on_signal(signal.SIGPIPE, status)
diff --git a/seqmagick/subcommands/info.py b/seqmagick/subcommands/info.py
index <HASH>..<HASH> 100644
--- a/seqmagick/subcommands/info.py
+++ b/seqmagick/subcommands/info.py
@@ -143,6 +143,7 @@ def action(arguments):
"""
# Ignore SIGPIPE, for head support
common.exit_on_sigpipe()
+ common.exit_on_sigint()
handle = arguments.destination_file
output_format = arguments.output_format
|
Exit on SIGINT in seqmagick info
No one needs tracebacks.
|
fhcrc_seqmagick
|
train
|
e58d9f8a566fa6461da40bcba4ef70e189f7582b
|
diff --git a/lib/tower_cli/models/base.py b/lib/tower_cli/models/base.py
index <HASH>..<HASH> 100644
--- a/lib/tower_cli/models/base.py
+++ b/lib/tower_cli/models/base.py
@@ -701,41 +701,17 @@ class ResourceMethods(BaseResource):
# Done; return the response
return response
- @click.option('--fail-on-found', default=False,
- show_default=True, type=bool, is_flag=True,
- help='If used, return an error if a matching record already '
- 'exists.')
- @click.option('--force-on-exists', default=False,
- show_default=True, type=bool, is_flag=True,
- help='If used, if a match is found on unique fields, other '
- 'fields will be updated to the provided values. If '
- 'False, a match causes the request to be a no-op.')
def create(self, fail_on_found=False, force_on_exists=False, **kwargs):
- """Create an object.
-
- Fields in the resource's `identity` tuple are used for a lookup;
- if a match is found, then no-op (unless `force_on_exists` is set) but
- do not fail (unless `fail_on_found` is set).
- """
+ """The code for the create method in a non-command implementation
+ here, so that the child classes can over-ride it as a command,
+ depending on circumstances."""
return self.write(create_on_missing=True, fail_on_found=fail_on_found,
force_on_exists=force_on_exists, **kwargs)
- @resources.command
- @click.option('--create-on-missing', default=False,
- show_default=True, type=bool, is_flag=True,
- help='If used, and if options rather than a primary key are '
- 'used to attempt to match a record, will create the '
- 'record if it does not exist. This is an alias to '
- '`create --force-on-exists`.')
def modify(self, pk=None, create_on_missing=False, **kwargs):
- """Modify an already existing object.
-
- Fields in the resource's `identity` tuple can be used in lieu of a
- primary key for a lookup; in such a case, only other fields are
- written.
-
- To modify unique fields, you must use the primary key for the lookup.
- """
+ """The code for the modify method in a non-command implementation
+ here, so that the child classes can over-ride it as a command,
+ depending on circumstances."""
force_on_exists = kwargs.pop('force_on_exists', True)
return self.write(pk, create_on_missing=create_on_missing,
force_on_exists=force_on_exists, **kwargs)
@@ -996,7 +972,7 @@ class ExeResource(MonitorableResource):
class Resource(ResourceMethods):
- """This is the parent class for all 'standard' resources."""
+ """This is the parent class for all standard resources."""
abstract = True
@resources.command
@@ -1010,8 +986,32 @@ class Resource(ResourceMethods):
'fields will be updated to the provided values. If '
'False, a match causes the request to be a no-op.')
def create(self, fail_on_found=False, force_on_exists=False, **kwargs):
- """Create an object by implementing the super class version of create.
+ """Create an object.
+
+ Fields in the resource's `identity` tuple are used for a lookup;
+ if a match is found, then no-op (unless `force_on_exists` is set) but
+ do not fail (unless `fail_on_found` is set).
"""
return super(Resource, self).create(
fail_on_found=False, force_on_exists=False, **kwargs
)
+
+ @resources.command
+ @click.option('--create-on-missing', default=False,
+ show_default=True, type=bool, is_flag=True,
+ help='If used, and if options rather than a primary key are '
+ 'used to attempt to match a record, will create the '
+ 'record if it does not exist. This is an alias to '
+ '`create --force-on-exists`.')
+ def modify(self, pk=None, create_on_missing=False, **kwargs):
+ """Modify an already existing object.
+
+ Fields in the resource's `identity` tuple can be used in lieu of a
+ primary key for a lookup; in such a case, only other fields are
+ written.
+
+ To modify unique fields, you must use the primary key for the lookup.
+ """
+ force_on_exists = kwargs.pop('force_on_exists', True)
+ return self.write(pk, create_on_missing=create_on_missing,
+ force_on_exists=force_on_exists, **kwargs)
diff --git a/lib/tower_cli/resources/project.py b/lib/tower_cli/resources/project.py
index <HASH>..<HASH> 100644
--- a/lib/tower_cli/resources/project.py
+++ b/lib/tower_cli/resources/project.py
@@ -95,13 +95,22 @@ class Resource(models.MonitorableResource):
'scm_update_on_launch'
))
def modify(self, pk=None, *args, **kwargs):
- """Modify a project, see org help to modify org.
- Also associated with issue #52, the organization can't be modified
- with the 'modify' command. This would create confusion about whether
- it served the role of an identifier versus a field to modify. This
- method is used to set the allowed fields on the modify command,
- removing the organization from available options.
+ """Modify an already existing.
+
+ To edit the project's organizations, see help for organizations.
+
+ Fields in the resource's `identity` tuple can be used in lieu of a
+ primary key for a lookup; in such a case, only other fields are
+ written.
+
+ To modify unique fields, you must use the primary key for the lookup.
"""
+ # Associated with issue #52, the organization can't be modified
+ # with the 'modify' command. This would create confusion about
+ # whether its flag is an identifier versus a field to modify.
+ # Another role this method serves is to re-implement the modify
+ # method as a command. If this method is deleted, the inheritance
+ # chain for project should also be changed.
return super(Resource, self).modify(pk=pk, *args, **kwargs)
@resources.command(use_fields_as_options=('name', 'organization'))
|
fixed coverage issue, treat modify same as create
|
ansible_tower-cli
|
train
|
75497958811278fb6d9b7c312cf98642f253e3ba
|
diff --git a/src/controllers/PlanController.php b/src/controllers/PlanController.php
index <HASH>..<HASH> 100644
--- a/src/controllers/PlanController.php
+++ b/src/controllers/PlanController.php
@@ -186,6 +186,7 @@ class PlanController extends CrudController
'columns' => [
'object->name',
'type',
+ 'object->label',
'old_price',
'note',
],
|
added description in plan history grids (#<I>)
|
hiqdev_hipanel-module-finance
|
train
|
c6257280f18595c2abec2372002eac0aa436a38c
|
diff --git a/lib/friendly_id/active_record_adapter/relation.rb b/lib/friendly_id/active_record_adapter/relation.rb
index <HASH>..<HASH> 100644
--- a/lib/friendly_id/active_record_adapter/relation.rb
+++ b/lib/friendly_id/active_record_adapter/relation.rb
@@ -101,7 +101,7 @@ module FriendlyId
fragment = "(slugs.sluggable_type = %s AND slugs.name = %s AND slugs.sequence = %d)"
conditions = ids.inject(nil) do |clause, id|
name, seq = id.parse_friendly_id
- string = fragment % [connection.quote(klass.base_class), connection.quote(name), seq]
+ string = fragment % [connection.quote(klass.base_class.name), connection.quote(name), seq]
clause ? clause + " OR #{string}" : string
end
sql = "SELECT sluggable_id FROM slugs WHERE (%s)" % conditions
|
Fix tests for Rails <I>.rc1
|
norman_friendly_id
|
train
|
d81dec2a9233c9267797c28135af55b9478e26fb
|
diff --git a/jbpm-human-task/jbpm-human-task-core/src/test/java/org/jbpm/services/task/DeadlinesBaseTest.java b/jbpm-human-task/jbpm-human-task-core/src/test/java/org/jbpm/services/task/DeadlinesBaseTest.java
index <HASH>..<HASH> 100644
--- a/jbpm-human-task/jbpm-human-task-core/src/test/java/org/jbpm/services/task/DeadlinesBaseTest.java
+++ b/jbpm-human-task/jbpm-human-task-core/src/test/java/org/jbpm/services/task/DeadlinesBaseTest.java
@@ -39,6 +39,7 @@ import org.jbpm.services.task.impl.model.TaskImpl;
import org.jbpm.services.task.impl.model.UserImpl;
import org.jbpm.services.task.utils.ContentMarshallerHelper;
import org.junit.After;
+import org.junit.Ignore;
import org.junit.Test;
import org.kie.api.task.model.OrganizationalEntity;
import org.kie.api.task.model.Status;
@@ -63,6 +64,7 @@ public abstract class DeadlinesBaseTest extends HumanTaskServicesBaseTest {
@Test
+ @Ignore // temporary ignoring this as it randomly failing on jenkins
public void testDelayedEmailNotificationOnDeadline() throws Exception {
Map<String, Object> vars = new HashMap<String, Object>();
|
temporary ignore deadline tests that fails randomly on jenkins - to be fixed with JBPM-<I>
|
kiegroup_jbpm
|
train
|
f63a839aaa4f923bd95d2163ae2d065e6b4537b6
|
diff --git a/lib/spore.js b/lib/spore.js
index <HASH>..<HASH> 100644
--- a/lib/spore.js
+++ b/lib/spore.js
@@ -9,22 +9,16 @@ var fs = require('fs')
/**
* Client
*/
-function Client(spore) {
+function Client(spec) {
this.httpClient = http;
- if (typeof spore == 'string')
- {
- // call to readFileSync should be avoid
- var spec = fs.readFileSync(spore);
- spore = JSON.parse(spec);
- }
- this.spec = spore;
+ this.spec = spec;
var that = this;
function create_wrapper(methodName) {
return function() {
that._call(methodName, arguments);
}
}
- for (methodName in spore.methods) {
+ for (methodName in spec.methods) {
this[methodName] = create_wrapper(methodName);
}
}
@@ -92,5 +86,14 @@ function API() {
throw 'not implemented. Please fork and add code here ;)';
}
-exports.Client = Client;
-exports.API = API;
+exports.createClient = function(sporeResource) {
+ if (typeof sporeResource == 'string')
+ {
+ // call to readFileSync should be avoid
+ var content = fs.readFileSync(sporeResource);
+ sporeResource = JSON.parse(content);
+ }
+ return new Client(sporeResource);
+};
+
+exports.API = API;
diff --git a/tests/test_client.js b/tests/test_client.js
index <HASH>..<HASH> 100644
--- a/tests/test_client.js
+++ b/tests/test_client.js
@@ -3,7 +3,7 @@ require.paths.unshift(__dirname +"/minitest");
require.paths.unshift(__dirname +"/../lib");
// we test that
-var Client = require('spore').Client;
+var spore = require('spore');
var minitest = require("minitest");
var assert = require("assert");
@@ -12,7 +12,7 @@ minitest.setupListeners();
minitest.context("Create client with filename", function () {
this.setup(function () {
- this.client = new Client(__dirname +'/fixtures/test.json');
+ this.client = spore.createClient(__dirname +'/fixtures/test.json');
});
this.assertion("client should have a public_timeline method", function (test) {
@@ -24,7 +24,7 @@ minitest.context("Create client with filename", function () {
minitest.context('Create client with json object', function() {
this.setup(function() {
- this.client = new Client({
+ this.client = spore.createClient({
"base_url" : "http://api.twitter.com/1",
"version" : "0.1",
"methods" : {
|
new Client to spore.createClient
|
francois2metz_node-spore
|
train
|
7d5a97862f927e8ba2fb82018e61fb17cd9ffcd2
|
diff --git a/server/rpki.go b/server/rpki.go
index <HASH>..<HASH> 100644
--- a/server/rpki.go
+++ b/server/rpki.go
@@ -571,6 +571,11 @@ func validatePath(ownAs uint32, tree *radix.Tree, cidr string, asPath *bgp.PathA
}
func (c *roaManager) validate(pathList []*table.Path) {
+ if len(c.clientMap) == 0 {
+ // RPKI isn't enabled
+ return
+ }
+
for _, path := range pathList {
if path.IsWithdraw || path.IsEOR() {
continue
|
rpki: validate only when RPKI is enabled
|
osrg_gobgp
|
train
|
f81ee3ae123bfad793a5477a248cfe9d135a94dd
|
diff --git a/src/get-start-and-end-commands.js b/src/get-start-and-end-commands.js
index <HASH>..<HASH> 100644
--- a/src/get-start-and-end-commands.js
+++ b/src/get-start-and-end-commands.js
@@ -87,7 +87,10 @@ async function isDefaultAddonBlueprint(blueprint) {
return isDefaultAddonBlueprint;
}
-function getArgs(projectName, blueprint) {
+function getArgs({
+ projectName,
+ blueprint
+}) {
let args = [];
if (blueprint.isBaseBlueprint) {
@@ -147,7 +150,10 @@ async function runEmberLocally({
projectName,
blueprint
}) {
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
if (!blueprint.isBaseBlueprint) {
cwd = path.join(cwd, projectName);
@@ -166,7 +172,10 @@ async function runEmberRemotely({
}) {
let isCustomBlueprint = !isDefaultBlueprint(blueprint);
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
if (!blueprint.isBaseBlueprint) {
cwd = path.join(cwd, projectName);
diff --git a/test/unit/get-start-and-end-commands-test.js b/test/unit/get-start-and-end-commands-test.js
index <HASH>..<HASH> 100644
--- a/test/unit/get-start-and-end-commands-test.js
+++ b/test/unit/get-start-and-end-commands-test.js
@@ -760,7 +760,10 @@ describe(_getStartAndEndCommands, function() {
it('works for default app', function() {
let blueprint = loadDefaultBlueprint(['welcome']);
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
expect(args).to.deep.equal([
'new',
@@ -776,7 +779,10 @@ describe(_getStartAndEndCommands, function() {
it('works for default addon', function() {
let blueprint = loadDefaultBlueprint(['addon']);
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
expect(args).to.deep.equal([
'new',
@@ -796,7 +802,10 @@ describe(_getStartAndEndCommands, function() {
isBaseBlueprint: true
});
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
expect(args).to.deep.equal([
'new',
@@ -814,7 +823,10 @@ describe(_getStartAndEndCommands, function() {
path: '/path/to/my-blueprint'
});
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
expect(args).to.deep.equal([
'init',
@@ -834,7 +846,10 @@ describe(_getStartAndEndCommands, function() {
]
};
- let args = getArgs(projectName, blueprint);
+ let args = getArgs({
+ projectName,
+ blueprint
+ });
expect(args).to.deep.equal([
'new',
|
refactor getArgs to take an object
|
ember-cli_ember-cli-update
|
train
|
3da5f9088dc410c481315ece1f07a09fd172bc16
|
diff --git a/registry/storage/driver/s3-aws/s3.go b/registry/storage/driver/s3-aws/s3.go
index <HASH>..<HASH> 100644
--- a/registry/storage/driver/s3-aws/s3.go
+++ b/registry/storage/driver/s3-aws/s3.go
@@ -270,33 +270,21 @@ func FromParameters(parameters map[string]interface{}) (*Driver, error) {
// bucketName
func New(params DriverParameters) (*Driver, error) {
awsConfig := aws.NewConfig()
- var creds *credentials.Credentials
- if params.RegionEndpoint == "" {
- creds = credentials.NewChainCredentials([]credentials.Provider{
- &credentials.StaticProvider{
- Value: credentials.Value{
- AccessKeyID: params.AccessKey,
- SecretAccessKey: params.SecretKey,
- },
- },
- &credentials.EnvProvider{},
- &credentials.SharedCredentialsProvider{},
- &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(session.New())},
- })
-
- } else {
- creds = credentials.NewChainCredentials([]credentials.Provider{
- &credentials.StaticProvider{
- Value: credentials.Value{
- AccessKeyID: params.AccessKey,
- SecretAccessKey: params.SecretKey,
- },
- },
- &credentials.EnvProvider{},
- })
+ if params.RegionEndpoint != "" {
awsConfig.WithS3ForcePathStyle(true)
awsConfig.WithEndpoint(params.RegionEndpoint)
}
+ creds := credentials.NewChainCredentials([]credentials.Provider{
+ &credentials.StaticProvider{
+ Value: credentials.Value{
+ AccessKeyID: params.AccessKey,
+ SecretAccessKey: params.SecretKey,
+ },
+ },
+ &credentials.EnvProvider{},
+ &credentials.SharedCredentialsProvider{},
+ &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(session.New())},
+ })
awsConfig.WithCredentials(creds)
awsConfig.WithRegion(params.Region)
|
Allow EC2 IAM roles to be used when authorizing region endpoints
|
docker_distribution
|
train
|
2c3ca4c4e6d62eb00d46bcf767d04917428f27a7
|
diff --git a/activerecord/CHANGELOG b/activerecord/CHANGELOG
index <HASH>..<HASH> 100644
--- a/activerecord/CHANGELOG
+++ b/activerecord/CHANGELOG
@@ -1,5 +1,7 @@
*SVN*
+* Don't rollback in teardown unless a transaction was started. Don't start a transaction in create_fixtures if a transaction is started. #6282 [lukfugl, Jeremy Kemper]
+
* Add #delete support to has_many :through associations. Closes #6049 [Martin Landers]
* Reverted old select_limited_ids_list postgresql fix that caused issues in mysql. Closes #5851 [Rick]
diff --git a/activerecord/lib/active_record/fixtures.rb b/activerecord/lib/active_record/fixtures.rb
index <HASH>..<HASH> 100755
--- a/activerecord/lib/active_record/fixtures.rb
+++ b/activerecord/lib/active_record/fixtures.rb
@@ -252,7 +252,7 @@ class Fixtures < YAML::Omap
end
all_loaded_fixtures.merge! fixtures_map
- connection.transaction do
+ connection.transaction(Thread.current['open_transactions'] == 0) do
fixtures.reverse.each { |fixture| fixture.delete_existing_fixtures }
fixtures.each { |fixture| fixture.insert_fixtures }
@@ -542,10 +542,10 @@ module Test #:nodoc:
def teardown_with_fixtures
return unless defined?(ActiveRecord::Base) && !ActiveRecord::Base.configurations.blank?
- # Rollback changes.
- if use_transactional_fixtures?
+ # Rollback changes if a transaction is active.
+ if use_transactional_fixtures? && !Thread.current['open_transactions'].zero?
ActiveRecord::Base.connection.rollback_db_transaction
- ActiveRecord::Base.send :decrement_open_transactions
+ Thread.current['open_transactions'] = 0
end
ActiveRecord::Base.verify_active_connections!
end
diff --git a/activerecord/test/fixtures_test.rb b/activerecord/test/fixtures_test.rb
index <HASH>..<HASH> 100755
--- a/activerecord/test/fixtures_test.rb
+++ b/activerecord/test/fixtures_test.rb
@@ -361,4 +361,30 @@ class ManyToManyFixturesWithClassDefined < Test::Unit::TestCase
def test_this_should_run_cleanly
assert true
end
-end
\ No newline at end of file
+end
+
+
+class FixturesBrokenRollbackTest < Test::Unit::TestCase
+ def blank_setup; end
+ alias_method :ar_setup_with_fixtures, :setup_with_fixtures
+ alias_method :setup_with_fixtures, :blank_setup
+ alias_method :setup, :blank_setup
+
+ def blank_teardown; end
+ alias_method :ar_teardown_with_fixtures, :teardown_with_fixtures
+ alias_method :teardown_with_fixtures, :blank_teardown
+ alias_method :teardown, :blank_teardown
+
+ def test_no_rollback_in_teardown_unless_transaction_active
+ assert_equal 0, Thread.current['open_transactions']
+ assert_raise(RuntimeError) { ar_setup_with_fixtures }
+ assert_equal 0, Thread.current['open_transactions']
+ assert_nothing_raised { ar_teardown_with_fixtures }
+ assert_equal 0, Thread.current['open_transactions']
+ end
+
+ private
+ def load_fixtures
+ raise 'argh'
+ end
+end
|
Don't rollback in teardown unless a transaction was started. Don't start a transaction in create_fixtures if a transaction is started. Closes #<I>.
git-svn-id: <URL>
|
rails_rails
|
train
|
38f1b58fc2a0c9be3b21ccb5da03f38d4c9166a9
|
diff --git a/spec/public/test/route_matchers_spec.rb b/spec/public/test/route_matchers_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/public/test/route_matchers_spec.rb
+++ b/spec/public/test/route_matchers_spec.rb
@@ -4,7 +4,7 @@ Merb.start :environment => 'test', :log_level => :fatal
class TestController < Merb::Controller
def get(id = nil); end
- def post; end
+ def post(version = nil); end
end
class IDish
@@ -21,6 +21,7 @@ describe Merb::Test::Rspec::RouteMatchers do
before(:each) do
Merb::Router.prepare do |r|
+ r.match(%r"/v(\d+\.\d+)", :method => :post).to(:controller => "test_controller", :action => "post", :version => "[1]")
r.match("/", :method => :get).to(:controller => "test_controller", :action => "get").name(:getter)
r.match("/", :method => :post).to(:controller => "test_controller", :action => "post")
r.match("/:id").to(:controller => "test_controller", :action => "get").name(:with_id)
@@ -35,8 +36,18 @@ describe Merb::Test::Rspec::RouteMatchers do
it "should work with the url helper and ParamMatcher" do
idish = IDish.new(rand(1000).to_s)
+
request_to(url(:with_id, idish)).should route_to(TestController, :get).with(idish)
end
+
+ it "should work with a negative ParamMatcher" do
+ request_to(url(:with_id, :id => 100)).should_not route_to(TestController, :get).with(:id => 1)
+ end
+
+ it "should work with a route containing a regexp" do
+ request_to("/v1.2", :post).should route_to(TestController, :post).with(:version => "1.2")
+ request_to("/v1.0", :post).should_not route_to(TestController, :post).with(:version => "3.14")
+ end
end
module Merb::Test::Rspec::RouteMatchers
|
Added spec for the route matcher testing regexp routes. [#<I>]
|
wycats_merb
|
train
|
5d73c6d7bd7b42264e3649e14c444738a831d636
|
diff --git a/src/Controller/ViewDataTrait.php b/src/Controller/ViewDataTrait.php
index <HASH>..<HASH> 100644
--- a/src/Controller/ViewDataTrait.php
+++ b/src/Controller/ViewDataTrait.php
@@ -20,7 +20,7 @@ trait ViewDataTrait
/**
* @return array
*/
- public function getData() : array
+ protected function getViewData() : array
{
return $this->data;
}
@@ -31,7 +31,7 @@ trait ViewDataTrait
*
* @return $this|self
*/
- public function addData(string $name, $value)
+ protected function addViewData(string $name, $value)
{
$this->data[$name] = $value;
@@ -43,7 +43,7 @@ trait ViewDataTrait
*
* @return $this|self
*/
- public function addDatas(array $datas)
+ protected function addViewDatas(array $datas)
{
$this->data = array_merge($this->data, $datas);
|
ViewData renamed & protected
|
cawaphp_cawa
|
train
|
db1bb2adabf40421f938c6a034b85e794d8355d2
|
diff --git a/src/PhpImap/Mailbox.php b/src/PhpImap/Mailbox.php
index <HASH>..<HASH> 100644
--- a/src/PhpImap/Mailbox.php
+++ b/src/PhpImap/Mailbox.php
@@ -19,6 +19,14 @@ class Mailbox {
protected $expungeOnDisconnect = true;
private $imapStream;
+ /**
+ * @param type $imapPath
+ * @param type $login
+ * @param type $password
+ * @param type $attachmentsDir
+ * @param type $serverEncoding
+ * @throws Exception
+ */
public function __construct($imapPath, $login, $password, $attachmentsDir = null, $serverEncoding = 'UTF-8') {
$this->imapPath = $imapPath;
$this->imapLogin = $login;
|
Added PHPDoc to constructor
|
barbushin_php-imap
|
train
|
4fdfb9898714145aa285dafb0a344534172b484d
|
diff --git a/mintapi/api.py b/mintapi/api.py
index <HASH>..<HASH> 100644
--- a/mintapi/api.py
+++ b/mintapi/api.py
@@ -365,30 +365,34 @@ def main():
options = cmdline.parse_args()
- # Handle Python 3's raw_input change.
+ if options.keyring and not keyring:
+ cmdline.error('--keyring can only be used if the `keyring`'
+ 'library is installed.')
+
try:
- input = raw_input
+ from __builtin__ import raw_input as input
except NameError:
pass
+ # Try to get the e-mail and password from the arguments
email = options.email
password = options.password
if not email:
+ # If the user did not provide an e-mail, prompt for it
email = input("Mint e-mail: ")
- if options.keyring:
- if not keyring:
- cmdline.error('--keyring can only be used if the `keyring`'
- 'library is installed.')
-
- if not password:
- password = keyring.get_password('mintapi', email)
+ if keyring and not password:
+ # If the keyring module is installed and we don't yet have
+ # a password, try prompting for it
+ password = keyring.get_password('mintapi', email)
if not password:
+ # If we still don't have a password, prompt for it
password = getpass.getpass("Mint password: ")
if options.keyring:
+ # If keyring option is specified, save the password in the keyring
keyring.set_password('mintapi', email, password)
if options.accounts_ext:
|
Added a backwards-compatible path for trying to use the keyring password even if the --keyring argument is not specified.
|
mrooney_mintapi
|
train
|
378d597d37374274bdd124b2719eb160700981cf
|
diff --git a/ayrton/__init__.py b/ayrton/__init__.py
index <HASH>..<HASH> 100644
--- a/ayrton/__init__.py
+++ b/ayrton/__init__.py
@@ -181,7 +181,7 @@ class Environment (dict):
'N', 'S', 'nt', 'ot' ],
'ayrton.expansion': [ 'bash', ],
'ayrton.functions': [ 'cd', ('cd', 'chdir'), 'exit', 'export',
- 'option', 'run', 'shift', 'unset', ],
+ 'option', 'run', 'shift', 'trap', 'unset', ],
'ayrton.execute': [ 'o', 'Capture', 'CommandFailed', 'CommandNotFound',
'Pipe', 'Command'],
'ayrton.remote': [ 'remote' ]
diff --git a/ayrton/functions.py b/ayrton/functions.py
index <HASH>..<HASH> 100644
--- a/ayrton/functions.py
+++ b/ayrton/functions.py
@@ -20,6 +20,7 @@
import ayrton
import ayrton.execute
import os
+import signal
import logging
logger= logging.getLogger ('ayrton.functions')
@@ -99,6 +100,11 @@ def shift (n=1):
return ans
+def trap(handler, *signals):
+ for signal in signals:
+ signal.signal(signal, handler)
+
+
def unset (*args):
for k in args:
if k in ayrton.runner.globals.keys ():
diff --git a/doc/source/reference.rst b/doc/source/reference.rst
index <HASH>..<HASH> 100644
--- a/doc/source/reference.rst
+++ b/doc/source/reference.rst
@@ -100,6 +100,11 @@ Functions
1, the value returned is just the first element; if it's bigger than 1, it
returns a list with those *n* elements.
+.. py:function:: trap(handler, *signals)
+
+ Associates `handler` to all the `signals`. You will need to ``import signal``
+ and use the ``SIG*`` constants.
+
.. py:function:: unset (*args)
For each variable name in *\*args*, unset the variable and remove it from
|
[+] trap(), with docs, but no tests.
|
StyXman_ayrton
|
train
|
852f8650423e2718cf4f428d1f3be1fba26f7149
|
diff --git a/Configuration/TCA/tt_address.php b/Configuration/TCA/tt_address.php
index <HASH>..<HASH> 100755
--- a/Configuration/TCA/tt_address.php
+++ b/Configuration/TCA/tt_address.php
@@ -66,16 +66,19 @@ return [
],
'sys_language_uid' => [
'exclude' => true,
- 'label' => $generalLanguageFilePrefix . 'locallang_general.xlf:LGL.language',
+ 'label' => 'LLL:EXT:lang/Resources/Private/Language/locallang_general.xlf:LGL.language',
'config' => [
'type' => 'select',
'renderType' => 'selectSingle',
- 'foreign_table' => 'sys_language',
- 'foreign_table_where' => 'ORDER BY sys_language.title',
+ 'special' => 'languages',
'items' => [
- [$generalLanguageFilePrefix . 'locallang_general.xlf:LGL.allLanguages', -1],
- [$generalLanguageFilePrefix . 'locallang_general.xlf:LGL.default_value', 0],
- ]
+ [
+ 'LLL:EXT:lang/Resources/Private/Language/locallang_general.xlf:LGL.allLanguages',
+ -1,
+ 'flags-multiple'
+ ],
+ ],
+ 'default' => 0,
]
],
'l10n_parent' => [
@@ -85,6 +88,7 @@ return [
'config' => [
'type' => 'select',
'renderType' => 'selectSingle',
+ 'default' => 0,
'items' => [
['', 0],
],
|
[BUGFIX] Improve TCA for sys_language_uid
Resolves: #<I>
|
FriendsOfTYPO3_tt_address
|
train
|
067e1e8fd2528af896ace632ed32c66c19474c76
|
diff --git a/Tests/ValidatorTest.php b/Tests/ValidatorTest.php
index <HASH>..<HASH> 100644
--- a/Tests/ValidatorTest.php
+++ b/Tests/ValidatorTest.php
@@ -78,11 +78,11 @@ class ValidatorTest extends \PHPUnit_Framework_TestCase
$violations = $this->v->validate($r, 'UPDATE');
$this->assertSame(2, $violations->count());
$this->assertSame(
- 'Array[foo][foo]:' . "\n" . ' This field was not expected. (code 2)',
+ 'Array[foo][foo]:' . "\n" . ' This field was not expected. (code 7703c766-b5d5-4cef-ace7-ae0dd82304e9)',
(string) $violations->get(0)
);
$this->assertSame(
- 'Array[baz]:' . "\n" . ' This field was not expected. (code 2)',
+ 'Array[baz]:' . "\n" . ' This field was not expected. (code 7703c766-b5d5-4cef-ace7-ae0dd82304e9)',
(string) $violations->get(1)
);
}
|
fix tests (as there are run with symfony <I> now)
|
Innmind_rest-server
|
train
|
a9be1e0a23dc2a609ace31e8359733ddd516e941
|
diff --git a/metpy/calc/thermo.py b/metpy/calc/thermo.py
index <HASH>..<HASH> 100644
--- a/metpy/calc/thermo.py
+++ b/metpy/calc/thermo.py
@@ -9,7 +9,7 @@ import numpy as np
import scipy.integrate as si
import scipy.optimize as so
-from .tools import find_intersections, get_layer
+from .tools import _greater_or_close, _less_or_close, find_intersections, get_layer
from ..constants import Cp_d, epsilon, kappa, Lv, P0, Rd
from ..package_tools import Exporter
from ..units import atleast_1d, check_units, concatenate, units
@@ -940,20 +940,20 @@ def cape_cin(pressure, temperature, dewpt, parcel_profile):
# CAPE (temperature parcel < temperature environment)
# Only use data between the LFC and EL for calculation
- p_mask = (x <= lfc_pressure) & (x >= el_pressure)
+ p_mask = _less_or_close(x, lfc_pressure) & _greater_or_close(x, el_pressure)
x_clipped = x[p_mask]
y_clipped = y[p_mask]
- y_clipped[y_clipped <= 0 * units.degK] = 0 * units.degK
+ y_clipped[_less_or_close(y_clipped, 0 * units.degK)] = 0 * units.degK
cape = (Rd * (np.trapz(y_clipped, np.log(x_clipped)) * units.degK)).to(units('J/kg'))
# CIN (temperature parcel < temperature environment)
# Only use data between the surface and LFC for calculation
- p_mask = (x >= lfc_pressure)
+ p_mask = _greater_or_close(x, lfc_pressure)
x_clipped = x[p_mask]
y_clipped = y[p_mask]
- y_clipped[y_clipped >= 0 * units.degK] = 0 * units.degK
+ y_clipped[_greater_or_close(y_clipped, 0 * units.degK)] = 0 * units.degK
cin = (Rd * (np.trapz(y_clipped, np.log(x_clipped)) * units.degK)).to(units('J/kg'))
return cape, cin
|
Use close comparisons in CAPE and CIN.
There were edge cases where greater/less than or equal to would suffer from
precision issues and drop an extra point in the sounding. Modify them to use
our approximate comparisons.
|
Unidata_MetPy
|
train
|
c2c45c3c2de43c4a22326404dcefb6a788b66571
|
diff --git a/src/Runner.php b/src/Runner.php
index <HASH>..<HASH> 100644
--- a/src/Runner.php
+++ b/src/Runner.php
@@ -62,6 +62,11 @@ class Runner
*/
public function runInstallers(PackageInterface $package)
{
+ foreach ($this->installers as $installer) {
+ if ($installer->supports($package)) {
+ $installer->install($package);
+ }
+ }
}
/**
|
Add logic to install for all installers
|
BudgeIt_composer-builder
|
train
|
eee78cecce5e68f06a438d0bafa2fc0c770d5f24
|
diff --git a/lib/fig_newton/version.rb b/lib/fig_newton/version.rb
index <HASH>..<HASH> 100644
--- a/lib/fig_newton/version.rb
+++ b/lib/fig_newton/version.rb
@@ -1,3 +1,3 @@
module FigNewton
- VERSION = "0.1"
+ VERSION = "0.2"
end
|
version bump - getting ready for release
|
cheezy_fig_newton
|
train
|
69c6dfeaab6802005c6539f8fc9fb467670630a1
|
diff --git a/closure/goog/events/events.js b/closure/goog/events/events.js
index <HASH>..<HASH> 100644
--- a/closure/goog/events/events.js
+++ b/closure/goog/events/events.js
@@ -268,6 +268,13 @@ goog.events.listen_ = function(
// incarnation of this code, from 2007, indicates that it replaced an
// earlier still version that caused excess allocations on IE6.
src.attachEvent(goog.events.getOnString_(type.toString()), proxy);
+ } else if (src.addListener && src.removeListener) {
+ // In IE, MediaQueryList uses addListener() insteadd of addEventListener. In
+ // Safari, there is no global for the MediaQueryList constructor, so we just
+ // check whether the object "looks like" MediaQueryList.
+ goog.asserts.assert(
+ type === 'change', 'MediaQueryList only has a change event');
+ src.addListener(proxy);
} else {
throw new Error('addEventListener and attachEvent are unavailable.');
}
@@ -457,6 +464,8 @@ goog.events.unlistenByKey = function(key) {
src.removeEventListener(type, proxy, listener.capture);
} else if (src.detachEvent) {
src.detachEvent(goog.events.getOnString_(type), proxy);
+ } else if (src.addListener && src.removeListener) {
+ src.removeListener(proxy);
}
goog.events.listenerCountEstimate_--;
diff --git a/closure/goog/events/events_test.js b/closure/goog/events/events_test.js
index <HASH>..<HASH> 100644
--- a/closure/goog/events/events_test.js
+++ b/closure/goog/events/events_test.js
@@ -96,10 +96,21 @@ function testSelfRemove() {
// Test that goog.events.getListener ignores events marked as 'removed'.
assertNull(goog.events.getListener(et1, 'click', callback));
};
- var key = goog.events.listen(et1, 'click', callback);
+ goog.events.listen(et1, 'click', callback);
goog.events.dispatchEvent(et1, 'click');
}
+function testMediaQueryList() {
+ if (!window.matchMedia) return;
+
+ var mql = window.matchMedia('(max-width: 640px)');
+ var key = goog.events.listen(mql, 'change', goog.nullFunction);
+
+ // I don't know of any way to make it raise an event in a test.
+
+ goog.events.unlistenByKey(key);
+}
+
function testHasListener() {
var div = goog.dom.createElement(goog.dom.TagName.DIV);
assertFalse(goog.events.hasListener(div));
@@ -122,7 +133,7 @@ function testHasListener() {
function testHasListenerWithEventTarget() {
assertFalse(goog.events.hasListener(et1));
- function callback(){};
+ function callback() {}
goog.events.listen(et1, 'test', callback, true);
assertTrue(goog.events.hasListener(et1));
assertTrue(goog.events.hasListener(et1, 'test'));
@@ -137,7 +148,7 @@ function testHasListenerWithEventTarget() {
}
function testHasListenerWithMultipleTargets() {
- function callback(){};
+ function callback() {}
goog.events.listen(et1, 'test1', callback, true);
goog.events.listen(et2, 'test2', callback, true);
|
RELNOTES[NEW]: Make goog.events.* support MediaQueryList (from window.matchMedia).
Even in IE (all versions), which follows an older spec that doesn't implement EventTarget.
-------------
Created by MOE: <URL>
|
google_closure-library
|
train
|
6133351690785f034dfb1dca0ef24d0e7e1de896
|
diff --git a/lib/logstasher.rb b/lib/logstasher.rb
index <HASH>..<HASH> 100644
--- a/lib/logstasher.rb
+++ b/lib/logstasher.rb
@@ -117,7 +117,7 @@ module LogStasher
def log(severity, msg)
if self.logger && self.logger.send("#{severity}?")
event = LogStash::Event.new('@source' => self.source, '@fields' => {:message => msg, :level => severity}, '@tags' => ['log'])
- self.logger.send severity, event.to_json
+ self.logger << event.to_json + "\n"
end
end
diff --git a/spec/lib/logstasher_spec.rb b/spec/lib/logstasher_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/logstasher_spec.rb
+++ b/spec/lib/logstasher_spec.rb
@@ -200,7 +200,7 @@ describe LogStasher do
end
it 'adds to log with specified level' do
expect(logger).to receive(:send).with('warn?').and_return(true)
- expect(logger).to receive(:send).with('warn',"{\"@source\":\"unknown\",\"@tags\":[\"log\"],\"@fields\":{\"message\":\"WARNING\",\"level\":\"warn\"},\"@timestamp\":\"timestamp\"}")
+ expect(logger).to receive(:<<).with("{\"@source\":\"unknown\",\"@tags\":[\"log\"],\"@fields\":{\"message\":\"WARNING\",\"level\":\"warn\"},\"@timestamp\":\"timestamp\"}\n")
LogStasher.log('warn', 'WARNING')
end
context 'with a source specified' do
@@ -209,7 +209,7 @@ describe LogStasher do
end
it 'sets the correct source' do
expect(logger).to receive(:send).with('warn?').and_return(true)
- expect(logger).to receive(:send).with('warn',"{\"@source\":\"foo\",\"@tags\":[\"log\"],\"@fields\":{\"message\":\"WARNING\",\"level\":\"warn\"},\"@timestamp\":\"timestamp\"}")
+ expect(logger).to receive(:<<).with("{\"@source\":\"foo\",\"@tags\":[\"log\"],\"@fields\":{\"message\":\"WARNING\",\"level\":\"warn\"},\"@timestamp\":\"timestamp\"}\n")
LogStasher.log('warn', 'WARNING')
end
end
|
Fixing the log method to send only JSON content
|
shadabahmed_logstasher
|
train
|
6aae806e598daa6568febf9b8eac891981d2f6f2
|
diff --git a/shell/impl/src/main/java/org/jboss/forge/addon/shell/command/RunCommand.java b/shell/impl/src/main/java/org/jboss/forge/addon/shell/command/RunCommand.java
index <HASH>..<HASH> 100644
--- a/shell/impl/src/main/java/org/jboss/forge/addon/shell/command/RunCommand.java
+++ b/shell/impl/src/main/java/org/jboss/forge/addon/shell/command/RunCommand.java
@@ -113,9 +113,7 @@ public class RunCommand extends AbstractShellCommand
.outputStream(new PrintStream(stdout))
.outputStreamError(new PrintStream(stderr)).create());
- BufferedReader reader = new BufferedReader(new InputStreamReader(resource.getResourceInputStream()));
-
- try
+ try (BufferedReader reader = new BufferedReader(new InputStreamReader(resource.getResourceInputStream())))
{
long startTime = System.currentTimeMillis();
while (reader.ready())
@@ -142,7 +140,6 @@ public class RunCommand extends AbstractShellCommand
}
finally
{
- reader.close();
scriptShell.close();
}
}
|
Using try-with-resources with run command
|
forge_core
|
train
|
6d2011e3c3b419932577dd81d859705b613223f6
|
diff --git a/config/default.yml b/config/default.yml
index <HASH>..<HASH> 100644
--- a/config/default.yml
+++ b/config/default.yml
@@ -176,8 +176,7 @@ PreCommit:
enabled: false
description: 'Analyzing with coffeelint'
required_executable: 'coffeelint'
- required_library: 'json'
- flags: ['--reporter=raw']
+ flags: ['--reporter=csv']
install_command: 'npm install -g coffeelint'
include: '**/*.coffee'
diff --git a/lib/overcommit/hook/pre_commit/coffee_lint.rb b/lib/overcommit/hook/pre_commit/coffee_lint.rb
index <HASH>..<HASH> 100644
--- a/lib/overcommit/hook/pre_commit/coffee_lint.rb
+++ b/lib/overcommit/hook/pre_commit/coffee_lint.rb
@@ -3,38 +3,30 @@ module Overcommit::Hook::PreCommit
#
# @see http://www.coffeelint.org/
class CoffeeLint < Base
+ MESSAGE_REGEX = /
+ ^(?<file>.+)
+ ,(?<line>\d*),\d*
+ ,(?<type>\w+)
+ ,(?<msg>.+)$
+ /x
+
+ MESSAGE_TYPE_CATEGORIZER = lambda do |type|
+ type.include?('w') ? :warning : :error
+ end
+
def run
result = execute(command + applicable_files)
-
- begin
- parse_json_messages(result.stdout)
- rescue JSON::ParserError => e
- [:fail, "Error parsing coffeelint output: #{e.message}"]
- end
+ parse_messages(result.stdout)
end
private
- def parse_json_messages(output)
- JSON.parse(output).collect do |file, messages|
- messages.collect { |msg| extract_message(file, msg) }
- end.flatten
- end
-
- def extract_message(file, message_hash)
- type = message_hash['level'].include?('w') ? :warning : :error
- line = message_hash['lineNumber']
- rule = message_hash['rule']
- msg = message_hash['message']
- text =
- if rule == 'coffeescript_error'
- # Syntax errors are output in different format.
- # Splice in the file name and grab the first line.
- msg.sub('[stdin]', file).split("\n")[0]
- else
- "#{file}:#{line}: #{msg} (#{rule})"
- end
- Overcommit::Hook::Message.new(type, file, line, text)
+ def parse_messages(output)
+ output.scan(MESSAGE_REGEX).map do |file, line, type, msg|
+ type = MESSAGE_TYPE_CATEGORIZER.call(type)
+ text = "#{file}:#{line}:#{type} #{msg}"
+ Overcommit::Hook::Message.new(type, file, line, text)
+ end
end
end
end
diff --git a/spec/overcommit/hook/pre_commit/coffee_lint_spec.rb b/spec/overcommit/hook/pre_commit/coffee_lint_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/overcommit/hook/pre_commit/coffee_lint_spec.rb
+++ b/spec/overcommit/hook/pre_commit/coffee_lint_spec.rb
@@ -19,9 +19,9 @@ describe Overcommit::Hook::PreCommit::CoffeeLint do
context 'with no warnings' do
before do
- result.stub(:stdout).and_return('{
- "file1.coffee": []
- }')
+ result.stub(:stdout).and_return(normalize_indent(<<-OUT))
+ path,lineNumber,lineNumberEnd,level,message
+ OUT
end
it { should pass }
@@ -29,20 +29,10 @@ describe Overcommit::Hook::PreCommit::CoffeeLint do
context 'and it reports a warning' do
before do
- result.stub(:stdout).and_return('{
- "file1.coffee": [
- {
- "name": "ensure_comprehensions",
- "level": "warn",
- "message": "Comprehensions must have parentheses around them",
- "description": "This rule makes sure that parentheses are around comprehensions.",
- "context": "",
- "lineNumber": 31,
- "line": "cubes = math.cube num for num in list",
- "rule": "ensure_comprehensions"
- }
- ]
- }')
+ result.stub(:stdout).and_return(normalize_indent(<<-OUT))
+ path,lineNumber,lineNumberEnd,level,message
+ file1.coffee,31,,warn,Comprehensions must have parentheses around them
+ OUT
end
it { should warn }
@@ -59,27 +49,10 @@ describe Overcommit::Hook::PreCommit::CoffeeLint do
context 'and it reports an error' do
before do
- result.stub(:stdout).and_return('{
- "file1.coffee": [
- {
- "name": "duplicate_key",
- "level": "error",
- "message": "Duplicate key defined in object or class",
- "description": "Prevents defining duplicate keys in object literals and classes",
- "lineNumber": 17,
- "line": " root: foo",
- "rule": "duplicate_key"
- }
- ]
- }')
- end
-
- it { should fail_hook }
- end
-
- context 'and its output is not valid JSON' do
- before do
- result.stub(:stdout).and_return('foo')
+ result.stub(:stdout).and_return(normalize_indent(<<-OUT))
+ path,lineNumber,lineNumberEnd,level,message
+ file1.coffee,17,,error,Duplicate key defined in object or class
+ OUT
end
it { should fail_hook }
|
Use csv formatter for coffeelint
This allows us to remove the dependency on the 'json' library and
simplify the message parsing logic. It also makes it easier to parse
concatenated results of multiple calls when splitting argument lists.
|
sds_overcommit
|
train
|
749b0fb0c2ce4c5eece371f3232dadd565696413
|
diff --git a/addon/routes/edit-form-new.js b/addon/routes/edit-form-new.js
index <HASH>..<HASH> 100644
--- a/addon/routes/edit-form-new.js
+++ b/addon/routes/edit-form-new.js
@@ -20,7 +20,7 @@ export default EditFormRoute.extend({
deactivate: function() {
var model = this.get('controller').get('model');
- model.rollback();
+ model.rollbackAttributes();
if (model.get('isNew')) {
model.deleteRecord();
diff --git a/addon/routes/edit-form.js b/addon/routes/edit-form.js
index <HASH>..<HASH> 100644
--- a/addon/routes/edit-form.js
+++ b/addon/routes/edit-form.js
@@ -9,12 +9,19 @@ export default ProjectedModelFormRoute.extend({
* @type Service
*/
groupEditEventsService: Ember.inject.service('groupedit-events'),
+ deletedRecords: null,
activate() {
this._super(...arguments);
this.get('groupEditEventsService').on('groupEditRowAdded', this, this._rowAdded);
this.get('groupEditEventsService').on('groupEditRowDeleted', this, this._rowDeleted);
this.get('groupEditEventsService').on('groupEditRowsChanged', this, this._rowChanged);
+ if (!this.get('deletedRecords')) {
+ this.set('deletedRecords', Ember.A());
+ }
+ else {
+ this.get('deletedRecords').clear();
+ }
},
deactivate() {
@@ -42,7 +49,8 @@ export default ProjectedModelFormRoute.extend({
controller.send('dismissErrorMessages');
var model = controller.get('model');
- if (model && model.get('isDirty')) {
+ this._rollbackDetails(model);
+ if (model && model.get('hasDirtyAttributes')) {
model.rollback();
}
},
@@ -97,6 +105,13 @@ export default ProjectedModelFormRoute.extend({
*/
_rowDeleted: function(componentName, record) {
// Manually set isDirty flag, because its not working now when change relation props.
+ if (record.get('id')) {
+ this.get('deletedRecords').pushObject({
+ model: record.constructor.modelName,
+ id: record.get('id')
+ });
+ }
+
this.controller.get('model').send('becomeDirty');
},
@@ -111,5 +126,36 @@ export default ProjectedModelFormRoute.extend({
_rowChanged: function(componentName) {
// Manually set isDirty flag, because its not working now when change relation props.
this.controller.get('model').send('becomeDirty');
+ },
+
+ _rollbackDetails: function(model) {
+ var modelClass = model.constructor;
+ var modelProjName = this.get('modelProjection');
+ var projection = modelClass.projections.get(modelProjName);
+ var attributes = projection.attributes;
+ for (var attrName in attributes) {
+ if (!attributes.hasOwnProperty(attrName)) {
+ continue;
+ }
+
+ var attr = attributes[attrName];
+ if (attr.kind === 'hasMany') {
+ var detailModels = model.get(attrName);
+ for (var i = 0; i < detailModels.get('length'); i++) {
+ if (detailModels.objectAt(i).get('hasDirtyAttributes')) {
+ detailModels.objectAt(i).rollbackAttributes();
+ }
+ }
+ }
+ }
+
+ var _this = this;
+ this.get('deletedRecords').forEach(function(deletedRecord) {
+ _this.store.findRecord(deletedRecord.model, deletedRecord.id, {reload: false}).then(function(record) {
+ record.rollbackAttributes();
+ });
+ });
+ this.get('deletedRecords').clear();
}
+
});
|
Add rollback operation for details
on edit form's route deactivation
|
Flexberry_ember-flexberry
|
train
|
b0bdb41e072c4c9ccc545639cb255d142b1c5192
|
diff --git a/Entity/FilesOfMember.php b/Entity/FilesOfMember.php
index <HASH>..<HASH> 100644
--- a/Entity/FilesOfMember.php
+++ b/Entity/FilesOfMember.php
@@ -37,14 +37,14 @@ class FilesOfMember extends CoreEntity
/**
* @ORM\Id
- * @ORM\ManyToOne(targetEntity="BiberLtd\Core\Bundles\MemberManagementBundle\Entity\Member")
+ * @ORM\ManyToOne(targetEntity="BiberLtd\Bundle\MemberManagementBundle\Entity\Member")
* @ORM\JoinColumn(name="member", referencedColumnName="id", nullable=false)
*/
private $member;
/**
* @ORM\Id
- * @ORM\ManyToOne(targetEntity="BiberLtd\Core\Bundles\FileManagementBundle\Entity\File")
+ * @ORM\ManyToOne(targetEntity="BiberLtd\Bundle\FileManagementBundle\Entity\File")
* @ORM\JoinColumn(name="file", referencedColumnName="id", nullable=false, onDelete="CASCADE")
*/
private $file;
diff --git a/Entity/MemberGroupLocalization.php b/Entity/MemberGroupLocalization.php
index <HASH>..<HASH> 100644
--- a/Entity/MemberGroupLocalization.php
+++ b/Entity/MemberGroupLocalization.php
@@ -49,7 +49,7 @@ class MemberGroupLocalization extends CoreEntity
/**
* @ORM\Id
* @ORM\ManyToOne(
- * targetEntity="BiberLtd\Core\Bundles\MemberManagementBundle\Entity\MemberGroup",
+ * targetEntity="BiberLtd\Bundle\MemberManagementBundle\Entity\MemberGroup",
* inversedBy="localizations"
* )
* @ORM\JoinColumn(name="member_group", referencedColumnName="id", nullable=false)
diff --git a/Entity/MembersOfGroup.php b/Entity/MembersOfGroup.php
index <HASH>..<HASH> 100644
--- a/Entity/MembersOfGroup.php
+++ b/Entity/MembersOfGroup.php
@@ -57,7 +57,7 @@ class MembersOfGroup extends CoreEntity
/**
* @ORM\Id
- * @ORM\ManyToOne(targetEntity="BiberLtd\Core\Bundles\MemberManagementBundle\Entity\MemberGroup")
+ * @ORM\ManyToOne(targetEntity="BiberLtd\Bundle\MemberManagementBundle\Entity\MemberGroup")
* @ORM\JoinColumn(name="member_group", referencedColumnName="id", nullable=false, onDelete="CASCADE")
*/
private $group;
|
BF :: Annotations fixed.
|
biberltd_MemberManagementBundle
|
train
|
15bd18d2a01b218d6dbf811dad342b23072f377e
|
diff --git a/lib/searchkick/relation_indexer.rb b/lib/searchkick/relation_indexer.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick/relation_indexer.rb
+++ b/lib/searchkick/relation_indexer.rb
@@ -68,12 +68,18 @@ module Searchkick
# remove order to prevent possible warnings
relation.except(:order).find_in_batches(batch_size: batch_size) do |batch|
# prevent scope from affecting search_data as well as inline jobs
+ # Active Record runs relation calls in scoping block
+ # https://github.com/rails/rails/blob/main/activerecord/lib/active_record/relation/delegation.rb
previous_scope = klass.current_scope(true)
- begin
- klass.current_scope = nil
+ if previous_scope
+ begin
+ klass.current_scope = nil
+ yield batch
+ ensure
+ klass.current_scope = previous_scope
+ end
+ else
yield batch
- ensure
- klass.current_scope = previous_scope
end
end
else
@@ -82,11 +88,15 @@ module Searchkick
# prevent scope from affecting search_data as well as inline jobs
# note: Model.with_scope doesn't always restore scope, so use custom logic
previous_scope = Mongoid::Threaded.current_scope(klass)
- begin
- Mongoid::Threaded.set_current_scope(nil, klass)
+ if previous_scope
+ begin
+ Mongoid::Threaded.set_current_scope(nil, klass)
+ yield batch
+ ensure
+ Mongoid::Threaded.set_current_scope(previous_scope, klass)
+ end
+ else
yield batch
- ensure
- Mongoid::Threaded.set_current_scope(previous_scope, klass)
end
end
end
diff --git a/test/reindex_test.rb b/test/reindex_test.rb
index <HASH>..<HASH> 100644
--- a/test/reindex_test.rb
+++ b/test/reindex_test.rb
@@ -77,6 +77,23 @@ class ReindexTest < Minitest::Test
Product.dynamic_data = nil
end
+ def test_relation_scoping_restored
+ # TODO add test for Mongoid
+ skip unless activerecord?
+
+ assert_nil Product.current_scope
+ Product.where(name: "Product A").scoping do
+ scope = Product.current_scope
+ refute_nil scope
+
+ Product.all.reindex(refresh: true)
+
+ # note: should be reset even if we don't do it
+ assert_equal scope, Product.current_scope
+ end
+ assert_nil Product.current_scope
+ end
+
def test_relation_should_index
store_names ["Product A", "Product B"]
Searchkick.callbacks(false) do
|
Improved scoping logic and added another test [skip ci]
|
ankane_searchkick
|
train
|
f16a4e962de55e353d9979ec48f5636440ae4ffd
|
diff --git a/test/requestTransformTests.js b/test/requestTransformTests.js
index <HASH>..<HASH> 100644
--- a/test/requestTransformTests.js
+++ b/test/requestTransformTests.js
@@ -40,6 +40,20 @@ test('alters the request', (t) => {
})
})
+test('survives empty PUTs', (t) => {
+ const x = create({ baseURL: `http://localhost:${port}` })
+ let count = 0
+ x.addRequestTransform(({ data, url, method }) => {
+ count++
+ })
+ t.is(count, 0)
+ return x.post('/puts', null).then(response => {
+ t.is(response.status, 200)
+ t.is(count, 1)
+ t.deepEqual(response.data, {got: {a: 'hi'}})
+ })
+})
+
test('alters nothing for gets', (t) => {
const x = create({ baseURL: `http://localhost:${port}` })
let count = 0
|
Adds one more test for request transformations.
|
infinitered_apisauce
|
train
|
021cb1a69c1c7d4154dbe086b2f3165d60eb31d9
|
diff --git a/lib/TextBox.js b/lib/TextBox.js
index <HASH>..<HASH> 100644
--- a/lib/TextBox.js
+++ b/lib/TextBox.js
@@ -40,6 +40,14 @@ export class TextBox extends Input {
}
/**
+ * @param {{}} init
+ */
+ init(init) {
+ this.on('click', this.onClick)
+ super.init(init)
+ }
+
+ /**
* Focus the input
*/
focus() {
@@ -47,6 +55,16 @@ export class TextBox extends Input {
}
/**
+ * @param {MouseEvent} event
+ */
+ onClick(event) {
+ if(this.disabled) {
+ event.stopImmediatePropagation()
+ }
+ else this.focus()
+ }
+
+ /**
* @param {FocusEvent} event
*/
onBlur(event) {
|
TextBox: focus when clicking on a label
|
aristov_ariamodule
|
train
|
ab383d8672511875ee2a29f40952273be5d6bf21
|
diff --git a/lib/grammars/translator.ojs b/lib/grammars/translator.ojs
index <HASH>..<HASH> 100644
--- a/lib/grammars/translator.ojs
+++ b/lib/grammars/translator.ojs
@@ -8,6 +8,7 @@ ometa Translator <: JsonMLWalker {
StringExpr :attr = walk*:exprs -> tmpl.StringExpr ( attr, exprs ),
Array :attr = walk*:exprs -> tmpl.Array ( attr, exprs ),
RegExp :attr -> tmpl.RegExp ( attr ),
+ RawJS :attr -> tmpl.RawJS ( attr ),
Lambda :attr = :args walk:body -> tmpl.Lambda ( attr, args, body ),
FunArgs :attr = walk*:args -> tmpl.FunArgs ( attr, args ),
FunBody :attr = walk*:exprs -> tmpl.FunBody ( attr, exprs ),
diff --git a/lib/templates.js b/lib/templates.js
index <HASH>..<HASH> 100644
--- a/lib/templates.js
+++ b/lib/templates.js
@@ -34,6 +34,12 @@ module.exports = function (_) {
return join('/', attr.body(), '/', attr.flags());
},
+ RawJS: function (attr) {
+ // This is really ugly but we need a way to distinguish raw javascript in implicit_self.
+ // This is one of the reasons why we would like to implicitly add self in an earlier pass instead.
+ return join('`$js$`', attr.value());
+ },
+
Lambda: function (attr, args, body) {
var params = args.slice(2).map(function (Id) {
var name = Id.value();
@@ -85,6 +91,8 @@ module.exports = function (_) {
if (compiled_expr) {
if (compiled_expr.match(/^\[\"send:args:\"\]/)) {
return join("$elf", compiled_expr);
+ } else if (compiled_expr.match(/^\`\$js\$\`/)) {
+ return compiled_expr.replace(/^\`\$js\$\`/, '');
} else {
return compiled_expr.replace(/^[a-zA-Z$_]+/, function (m) {
if (!m.match(/self|function/)) {
diff --git a/test/translator_test.js b/test/translator_test.js
index <HASH>..<HASH> 100644
--- a/test/translator_test.js
+++ b/test/translator_test.js
@@ -87,6 +87,12 @@ describe("Translator", function () {
});
});
+ describe("RawJS", function() {
+ it("should translate raw javascript", function () {
+ compile('`var foo = function () {};`').should.eql("var foo = function () {};");
+ });
+ });
+
describe("Lambdas", function() {
it("should translate empty lambdas", function() {
compile('{}').should.eql(join_nl(
|
added translator rule for raw js
|
pmh_espresso
|
train
|
8334011aa0ac101472401b23fcdd1fe4efddd34d
|
diff --git a/conn/commands.go b/conn/commands.go
index <HASH>..<HASH> 100644
--- a/conn/commands.go
+++ b/conn/commands.go
@@ -65,7 +65,7 @@ func init() {
// STORE 2:4 +FLAGS (\Deleted) Mark messages as deleted
// STORE 2:4 -FLAGS (\Seen) Mark messages as unseen
// STORE 2:4 FLAGS (\Seen \Deleted) Replace flags
- registerCommand("((?i)UID )?(?i:STORE) ("+sequenceSet+") ([\\+\\-])?(?i:FLAGS(\\.SILENT)?) \\(?([\\\\A-z0-9]+)\\)?", cmdStoreFlags)
+ registerCommand("((?i)UID )?(?i:STORE) ("+sequenceSet+") ([\\+\\-])?(?i:FLAGS(\\.SILENT)?) \\(?([\\\\A-z0-9\\s]+)\\)?", cmdStoreFlags)
}
func registerCommand(matchExpr string, handleFunc func(commandArgs, *Conn)) error {
diff --git a/server_test.go b/server_test.go
index <HASH>..<HASH> 100644
--- a/server_test.go
+++ b/server_test.go
@@ -154,9 +154,9 @@ func TestStore(t *testing.T) {
r.expect(t, "abcd.124 OK STORE Completed")
r.cConn.PrintfLine("abcd.125 uid STORE 3:* FLAGS (\\Deleted \\Seen)")
- r.expect(t, "* 1 FETCH (FLAGS (\\Deleted \\Seen))")
- r.expect(t, "* 2 FETCH (FLAGS (\\Deleted \\Seen))")
- r.expect(t, "* 3 FETCH (FLAGS (\\Deleted \\Seen))")
+ r.expect(t, "* 1 FETCH (FLAGS (\\Seen \\Deleted))")
+ r.expect(t, "* 2 FETCH (FLAGS (\\Seen \\Deleted))")
+ r.expect(t, "* 3 FETCH (FLAGS (\\Seen \\Deleted))")
r.expect(t, "abcd.125 OK STORE Completed")
}
|
STORE command recognizes multiple flags
A missing space character in the STORE command regexp meant that the server
only recognized the first flag, causing the last STORE test to fail.
This change adds the space character to the regexp, and alters the test
to match the order of the returned flags
|
jordwest_imap-server
|
train
|
112fe3c1d4b9fadaf51b50354956d2f3c0c1afb6
|
diff --git a/core/model/SiteTree.php b/core/model/SiteTree.php
index <HASH>..<HASH> 100755
--- a/core/model/SiteTree.php
+++ b/core/model/SiteTree.php
@@ -1614,7 +1614,7 @@ class SiteTree extends DataObject implements PermissionProvider,i18nEntityProvid
'BackLinkTracking',
'SiteTree',
array(
- 'Title' => 'Title'
+ 'Title' => 'Title',
),
'"ChildID" = ' . $this->ID,
'',
@@ -1629,10 +1629,28 @@ class SiteTree extends DataObject implements PermissionProvider,i18nEntityProvid
));
}
+ $virtualPagesNote = new LiteralField('BackLinksNote', '<p>' . _t('SiteTree.VIRTUALPAGESLINKING', 'The following virtual pages pull from this page:') . '</p>');
+ $virtualPagesTable = new TableListField(
+ 'VirtualPageTracking',
+ 'SiteTree',
+ array(
+ 'Title' => 'Title',
+ 'AbsoluteLink' => 'URL'
+ ),
+ '"CopyContentFromID" = ' . $this->ID,
+ ''//,
+ // 'LEFT JOIN "SiteTree_LinkTracking" ON "SiteTree"."ID" = "SiteTree_LinkTracking"."SiteTreeID"'
+ );
+ $virtualPagesTable->setFieldFormatting(array(
+ 'Title' => '<a href=\"admin/show/$ID\">$Title</a>'
+ ));
+ $virtualPagesTable->setPermissions(array(
+ 'show',
+ 'export'
+ ));
+
// Lay out the fields
$fields = new FieldSet(
- // Add a field with a bit of metadata for concurrent editing. The fact that we're using
- // non-standard attributes does not really matter, all modern UA's just ignore em.
new TabSet("Root",
$tabContent = new TabSet('Content',
$tabMain = new Tab('Main',
@@ -1698,6 +1716,10 @@ class SiteTree extends DataObject implements PermissionProvider,i18nEntityProvid
$tabBacklinks = new Tab('Backlinks',
$backLinksNote,
$backLinksTable
+ ),
+ $tabVirtualPages = new Tab('VirtualPages',
+ $virtualPagesNote,
+ $virtualPagesTable
)
),
$tabAccess = new Tab('Access',
|
MINOR track virtual pages that link to the current page (from r<I>) (from r<I>)
git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/sapphire/trunk@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
|
silverstripe_silverstripe-framework
|
train
|
61e7ff9a474a17566878cc873dac00f297ce54b3
|
diff --git a/aiogram/types/chat.py b/aiogram/types/chat.py
index <HASH>..<HASH> 100644
--- a/aiogram/types/chat.py
+++ b/aiogram/types/chat.py
@@ -217,7 +217,7 @@ class ChatActions(helper.Helper):
await asyncio.sleep(sleep)
@classmethod
- def calc_timeout(cls, text, timeout=.05):
+ def calc_timeout(cls, text, timeout=.8):
"""
Calculate timeout for text
@@ -235,6 +235,8 @@ class ChatActions(helper.Helper):
:param sleep: sleep timeout
:return:
"""
+ if isinstance(sleep, str):
+ sleep = cls.calc_timeout(sleep)
await cls._do(cls.TYPING, sleep)
@classmethod
|
Change timeout calculator in ChatActions helper.
|
aiogram_aiogram
|
train
|
b418c336c230fad6165a48a4a954ef9bbae01e63
|
diff --git a/src/AssetLibraryServiceProvider.php b/src/AssetLibraryServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/AssetLibraryServiceProvider.php
+++ b/src/AssetLibraryServiceProvider.php
@@ -2,11 +2,10 @@
namespace Thinktomorrow\AssetLibrary;
+use Illuminate\Database\Eloquent\Factory as EloquentFactory;
use Illuminate\Routing\Router;
-use Thinktomorrow\Locale\Locale;
use Illuminate\Support\ServiceProvider;
use Thinktomorrow\AssetLibrary\Models\Asset;
-use Illuminate\Database\Eloquent\Factory as EloquentFactory;
class AssetLibraryServiceProvider extends ServiceProvider
{
diff --git a/src/Models/Asset.php b/src/Models/Asset.php
index <HASH>..<HASH> 100644
--- a/src/Models/Asset.php
+++ b/src/Models/Asset.php
@@ -2,13 +2,13 @@
namespace Thinktomorrow\AssetLibrary\Models;
+use Illuminate\Database\Eloquent\Model;
use Illuminate\Http\File;
-use Spatie\MediaLibrary\Media;
-use Thinktomorrow\Locale\Locale;
use Illuminate\Http\UploadedFile;
-use Illuminate\Database\Eloquent\Model;
use Spatie\MediaLibrary\HasMedia\HasMediaTrait;
use Spatie\MediaLibrary\HasMedia\Interfaces\HasMediaConversions;
+use Spatie\MediaLibrary\Media;
+use Thinktomorrow\Locale\Locale;
class Asset extends Model implements HasMediaConversions
{
@@ -88,7 +88,7 @@ class Asset extends Model implements HasMediaConversions
*
* @param Model $model
* @param string $type
- * @param null $locale
+ * @param null|string $locale
* @return Model
*/
public function attachToModel(Model $model, $type = '', $locale = null)
@@ -118,6 +118,9 @@ class Asset extends Model implements HasMediaConversions
return basename($this->getFileUrl($size));
}
+ /**
+ * @return string
+ */
public function getFileUrl($size = '')
{
$media = $this->getMedia();
diff --git a/src/Traits/AssetTrait.php b/src/Traits/AssetTrait.php
index <HASH>..<HASH> 100644
--- a/src/Traits/AssetTrait.php
+++ b/src/Traits/AssetTrait.php
@@ -2,9 +2,9 @@
namespace Thinktomorrow\AssetLibrary\Traits;
-use Thinktomorrow\Locale\Locale;
use Illuminate\Support\Collection;
use Thinktomorrow\AssetLibrary\Models\Asset;
+use Thinktomorrow\Locale\Locale;
trait AssetTrait
{
@@ -25,6 +25,11 @@ trait AssetTrait
return basename($this->getFileUrl($type, '', $locale));
}
+ /**
+ * @param string $locale
+ *
+ * @return string
+ */
public function getFileUrl($type = '', $size = '', $locale = null)
{
if ($this->assets->first() === null || $this->assets->first()->pivot === null) {
|
Scrutinizer Auto-Fixes
This commit consists of patches automatically generated for this project on <URL>
|
thinktomorrow_assetlibrary
|
train
|
83996e3b7d55d7b29dfde10e4cfb1f94d69dcef0
|
diff --git a/src/components/cms/DocumentRouting.php b/src/components/cms/DocumentRouting.php
index <HASH>..<HASH> 100644
--- a/src/components/cms/DocumentRouting.php
+++ b/src/components/cms/DocumentRouting.php
@@ -248,7 +248,8 @@ class DocumentRouting implements CmsRouting
/**
* @param $request
- * @param $cmsComponent
+ * @param CmsComponent $cmsComponent
+ * @throws \Exception
*/
private function createNewDocument($request, $cmsComponent)
{
diff --git a/src/storage/factories/DocumentFactory.php b/src/storage/factories/DocumentFactory.php
index <HASH>..<HASH> 100644
--- a/src/storage/factories/DocumentFactory.php
+++ b/src/storage/factories/DocumentFactory.php
@@ -218,8 +218,11 @@ class DocumentFactory
$config->set('HTML.SafeIframe', true);
$config->set('URI.SafeIframeRegexp', '%^(https?:)?//(www\.youtube(?:-nocookie)?\.com/embed/|player\.vimeo\.com/video/)%'); //allow YouTube and Vimeo
$config->set('Attr.AllowedFrameTargets', array('_blank'));
- $config->set('HTML.AllowedAttributes', 'src, alt, href, target, frameborder');
+ $config->set('HTML.AllowedAttributes', 'src, alt, href, target, frameborder, data-original');
$config->set('URI.AllowedSchemes', array('data' => true, 'http' => true, 'https' => true));
+ $config->set('Cache.DefinitionImpl', null); // remove this later!
+ $def = $config->getHTMLDefinition(true);
+ $def->addAttribute('img', 'data-original', 'Text');
self::$purifier = new HTMLPurifier($config);
return self::$purifier;
}
diff --git a/src/templates/documents/fieldTypes/Rich-Text.php b/src/templates/documents/fieldTypes/Rich-Text.php
index <HASH>..<HASH> 100644
--- a/src/templates/documents/fieldTypes/Rich-Text.php
+++ b/src/templates/documents/fieldTypes/Rich-Text.php
@@ -9,19 +9,48 @@
</div>
<textarea style="display:none;" id="summernote_<?= $field->slug ?>_container_<?= $summernoteInstances ?>" name="<?= $fieldPrefix ?>[<?= $field->slug ?>][]"></textarea>
<script>
- $(document).ready(function () {
- $('#summernote_<?=str_replace(']', '-', str_replace('[', '-', $fieldPrefix)) . $field->slug?>_rte_<?=$summernoteInstances?>').summernote({
- height: 300,
- toolbar: [
- /*[groupname, [button list]]*/
- ['style', ['bold', 'italic', 'underline', 'clear', 'style']],
- ['font', ['strikethrough', 'superscript', 'subscript']],
- ['para', ['ul', 'ol']],
- ['insert', ['table', 'link', 'picture', 'video']],
- ['misc', ['codeview']]
- ]
+ <?php $summernoteName = str_replace(']', '-', str_replace('[', '-', $fieldPrefix)) . $field->slug . '_rte_' . $summernoteInstances; ?>
+ function uploadImage (file) {
+ "use strict";
+ var xhr,
+ formData;
+ console.log(file);
+ formData = new FormData();
+ formData.append("file", file, file.name);
+ xhr = new XMLHttpRequest();
+ xhr.open("POST", cmsSubfolders + '/images/new-ajax', true);
+ xhr.onreadystatechange = function () {
+ if (xhr.readyState === 4 && xhr.status === 200) {
+ var image = JSON.parse(xhr.responseText);
+ console.log(image);
+ var imageNode = $('<img>').attr('src', subfolders + 'images/' + image.set[smallestImage])
+ .attr('data-original', image.set['original']);
+ $('#summernote_<?=$summernoteName?>').summernote("insertNode", imageNode[0]);
+ }
+ };
+ xhr.send(formData);
+ }
+
+ $(document).ready(function () {
+ $('#summernote_<?=$summernoteName?>').summernote({
+ height: 300,
+ toolbar: [
+ /*[groupname, [button list]]*/
+ ['style', ['bold', 'italic', 'underline', 'clear', 'style']],
+ ['font', ['strikethrough', 'superscript', 'subscript']],
+ ['para', ['ul', 'ol']],
+ ['insert', ['table', 'link', 'picture', 'video']],
+ ['misc', ['codeview']]
+ ],
+ callbacks: {
+ onImageUpload: function (image) {
+ uploadImage(image[0]);
+ }
+ }
+ });
});
- });
+
+
</script>
<?php if (!isset($GLOBALS['rteList'])) {
$GLOBALS['rteList'] = array();
|
Added ajax upload for image in rich text editor, adding them to the gallery
|
jenskooij_cloudcontrol
|
train
|
96e85b88cb0df5259c519ab4e9b495f29e0f0369
|
diff --git a/karma.conf.js b/karma.conf.js
index <HASH>..<HASH> 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -52,10 +52,10 @@ module.exports = function(config) {
reporters: ['progress', 'osx', 'saucelabs'],
// Web server port
- port: 9876,
+ //port: 9876,
// cli runner port
- runnerPort: 9100,
+ //runnerPort: 9100,
// Enable / disable colors in the output (reporters and logs)
colors: true,
|
fix(tests): removes port config to avoid sauce related conflicts
|
platanus_angular-restmod
|
train
|
8ae75df6610cc8e5698cb96ca58dcb136dc1ed9f
|
diff --git a/tests/downscaling/diagnostics.py b/tests/downscaling/diagnostics.py
index <HASH>..<HASH> 100644
--- a/tests/downscaling/diagnostics.py
+++ b/tests/downscaling/diagnostics.py
@@ -9,8 +9,8 @@ from scipy.stats.kde import gaussian_kde
from . import utils as tu
from xclim.downscaling.correction import DetrendedQuantileMapping
+from xclim.downscaling.correction import EmpiricalQuantileMapping
from xclim.downscaling.correction import QuantileDeltaMapping
-from xclim.downscaling.correction import QuantileMapping
from xclim.downscaling.processing import adapt_freq
@@ -36,7 +36,7 @@ def synth_rainfall(shape, scale=1, wet_freq=0.25, size=1):
def cannon_2015_figure_2():
n = 10000
obs, hist, fut = tu.cannon_2015_rvs(n, random=False)
- QM = QuantileMapping(kind="*", group="time", interp="linear")
+ QM = EmpiricalQuantileMapping(kind="*", group="time", interp="linear")
QM.train(obs, hist)
fut_eqm = QM.predict(fut)
diff --git a/xclim/downscaling/base.py b/xclim/downscaling/base.py
index <HASH>..<HASH> 100644
--- a/xclim/downscaling/base.py
+++ b/xclim/downscaling/base.py
@@ -208,9 +208,9 @@ class Grouper(ParametrizableClass):
else:
grpd = self.group(da)
- dims = [self.dim]
+ dims = self.dim
if not main_only:
- dims += [dim for dim in self.add_dims if dim in grpd.dims]
+ dims = [dims] + [dim for dim in self.add_dims if dim in grpd.dims]
if isinstance(func, str):
out = getattr(grpd, func)(dim=dims, **kwargs)
|
Small tweaks for cftime calendars
|
Ouranosinc_xclim
|
train
|
b91228798491d22f9c921536d1026b533b240a03
|
diff --git a/Bundle/PageBundle/Helper/PageHelper.php b/Bundle/PageBundle/Helper/PageHelper.php
index <HASH>..<HASH> 100644
--- a/Bundle/PageBundle/Helper/PageHelper.php
+++ b/Bundle/PageBundle/Helper/PageHelper.php
@@ -122,6 +122,12 @@ class PageHelper extends ViewHelper
{
if (!empty($parameters['id']) && !preg_match('/^ref_/', $parameters['id'])) {
$page = $this->em->getRepository('VictoireCoreBundle:View')->findOneById($parameters['id']);
+
+ $entity = null;
+ if (method_exists($page, 'getBusinessEntity')) {
+ $entity = $page->getBusinessEntity();
+ }
+ $this->checkPageValidity($page, $entity, $parameters);
} else {
$viewReference = $this->viewCacheHelper->getReferenceByParameters($parameters);
if ($viewReference === null && !empty($parameters['viewId'])) {
|
If we try to find a page in database, check page validity too
|
Victoire_victoire
|
train
|
4c9a31541310b4e833e8ec99153a9a2fdf20cade
|
diff --git a/pymite/api/adapters.py b/pymite/api/adapters.py
index <HASH>..<HASH> 100644
--- a/pymite/api/adapters.py
+++ b/pymite/api/adapters.py
@@ -289,6 +289,7 @@ class Tracker(MiteAPI):
""" stop the tracker. """
path = partial(_path, self.adapter)
if not id and not self.actual:
+ # TODO: this is wrong in case of a running timer and a fresh setup
raise Exception('No timer running')
elif not id and self.actual:
path = path(self.actual)
|
added TODO as this is an error found by pytesting
|
damnit_pymite
|
train
|
ecbd95fbe5d5d957102e5400979e90f3d6525a0a
|
diff --git a/src/test/java/com/cronutils/utils/descriptor/CronDescriptorQuartzIntegrationTest.java b/src/test/java/com/cronutils/utils/descriptor/CronDescriptorQuartzIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/cronutils/utils/descriptor/CronDescriptorQuartzIntegrationTest.java
+++ b/src/test/java/com/cronutils/utils/descriptor/CronDescriptorQuartzIntegrationTest.java
@@ -103,7 +103,7 @@ public class CronDescriptorQuartzIntegrationTest {
* Issue #43: getting bad description for expression
* @throws Exception
*/
- //TODO
+ //TODO enable
public void testEveryDayEveryFourHoursFromHour2() throws Exception {
assertExpression("0 0 2/4 * * ?", "");
}
@@ -111,7 +111,7 @@ public class CronDescriptorQuartzIntegrationTest {
/*
* Issue #103
*/
- @Test
+ //TODO enable
public void testDescriptionDayOfWeek() {
assertExpression("* 0/1 * ? * TUE", "every minute at Tuesday day");
}
|
Issue #<I>: Comment test annotation, until we get issue fixed.
|
jmrozanec_cron-utils
|
train
|
0159f0f553964339caf2fb1e5346a28cf66fa737
|
diff --git a/quart/app.py b/quart/app.py
index <HASH>..<HASH> 100644
--- a/quart/app.py
+++ b/quart/app.py
@@ -1378,13 +1378,13 @@ class Quart(PackageStatic):
config = HyperConfig()
config.access_log_format = "%(h)s %(r)s %(s)s %(b)s %(D)s"
- config.access_logger = create_serving_logger() # type: ignore
+ config.accesslog = create_serving_logger()
config.bind = [f"{host}:{port}"]
config.ca_certs = ca_certs
config.certfile = certfile
if debug is not None:
self.debug = debug
- config.error_logger = config.access_logger # type: ignore
+ config.errorlog = config.accesslog
config.keyfile = keyfile
config.use_reloader = use_reloader
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ INSTALL_REQUIRES = [
'aiofiles',
'blinker',
'click',
- 'hypercorn >= 0.6.0',
+ 'hypercorn >= 0.7.0',
'itsdangerous',
'jinja2',
'multidict',
|
Update to Hypercorn <I> as minimum version
This fixes an issue whereby the access log is not shown when running
Quart with the app.run() method. In development the log should show by
default.
|
pgjones_quart
|
train
|
727950cda023db0bf0e4f50172b21285c2b6297a
|
diff --git a/client/lib/analytics/ad-tracking.js b/client/lib/analytics/ad-tracking.js
index <HASH>..<HASH> 100644
--- a/client/lib/analytics/ad-tracking.js
+++ b/client/lib/analytics/ad-tracking.js
@@ -542,7 +542,7 @@ function recordSignupStartInFloodlight() {
const params = {
src: TRACKING_IDS.dcmFloodlightAdvertiserId,
type: 'wordp0',
- cat: 'signu0',
+ cat: 'pre-p0',
ord: floodlightCacheBuster()
};
|
Ad Tracking: Update sign up start DCM Floodlight category name
|
Automattic_wp-calypso
|
train
|
65a7d49ba7d0bf521543c80706245774a29e66cf
|
diff --git a/project/merge.go b/project/merge.go
index <HASH>..<HASH> 100644
--- a/project/merge.go
+++ b/project/merge.go
@@ -29,15 +29,12 @@ var (
type rawService map[string]interface{}
type rawServiceMap map[string]rawService
-// Merge read the specified byte array, which is the content of a yaml composefile,
-// and merge it into the current project.
-func Merge(p *Project, bytes []byte) (map[string]*ServiceConfig, error) {
+func mergeProject(p *Project, bytes []byte) (map[string]*ServiceConfig, error) {
configs := make(map[string]*ServiceConfig)
datas := make(rawServiceMap)
- err := yaml.Unmarshal(bytes, &datas)
- if err != nil {
- logrus.Fatalf("Could not parse config for project %s : %v", p.Name, err)
+ if err := yaml.Unmarshal(bytes, &datas); err != nil {
+ return nil, err
}
for name, data := range datas {
@@ -50,7 +47,7 @@ func Merge(p *Project, bytes []byte) (map[string]*ServiceConfig, error) {
datas[name] = data
}
- err = utils.Convert(datas, &configs)
+ err := utils.Convert(datas, &configs)
return configs, err
}
diff --git a/project/project.go b/project/project.go
index <HASH>..<HASH> 100644
--- a/project/project.go
+++ b/project/project.go
@@ -121,9 +121,10 @@ func (p *Project) AddConfig(name string, config *ServiceConfig) error {
// service configuration to the project.
func (p *Project) Load(bytes []byte) error {
configs := make(map[string]*ServiceConfig)
- configs, err := Merge(p, bytes)
+ configs, err := mergeProject(p, bytes)
if err != nil {
- log.Fatalf("Could not parse config for project %s : %v", p.Name, err)
+ log.Errorf("Could not parse config for project %s : %v", p.Name, err)
+ return err
}
for name, config := range configs {
diff --git a/project/project_test.go b/project/project_test.go
index <HASH>..<HASH> 100644
--- a/project/project_test.go
+++ b/project/project_test.go
@@ -2,6 +2,7 @@ package project
import (
"fmt"
+ "strings"
"testing"
)
@@ -19,3 +20,29 @@ func TestEventEquality(t *testing.T) {
t.Fatal("Events match")
}
}
+
+func TestParseWithBadContent(t *testing.T) {
+ p := NewProject(&Context{
+ ComposeBytes: []byte("garbage"),
+ })
+
+ err := p.Parse()
+ if err == nil {
+ t.Fatal("Should have failed parse")
+ }
+
+ if !strings.HasPrefix(err.Error(), "yaml: unmarshal errors") {
+ t.Fatal("Should have failed parse", err)
+ }
+}
+
+func TestParseWithGoodContent(t *testing.T) {
+ p := NewProject(&Context{
+ ComposeBytes: []byte("not-garbage:\n image: foo"),
+ })
+
+ err := p.Parse()
+ if err != nil {
+ t.Fatal(err)
+ }
+}
|
Do not log fatal while parsing
|
docker_libcompose
|
train
|
a5f1e0606ffe38ab01a1ebf9462ebf75b9831830
|
diff --git a/app/models/rubygem.rb b/app/models/rubygem.rb
index <HASH>..<HASH> 100644
--- a/app/models/rubygem.rb
+++ b/app/models/rubygem.rb
@@ -18,6 +18,7 @@ class Rubygem < ActiveRecord::Base
validates_presence_of :name
validates_uniqueness_of :name
+ validates_format_of :name, :with => /(?=[^0-9]+)/, :message => "must include at least one letter."
cattr_accessor :source_index
attr_accessor :spec, :path, :processing
diff --git a/test/unit/rubygem_test.rb b/test/unit/rubygem_test.rb
index <HASH>..<HASH> 100644
--- a/test/unit/rubygem_test.rb
+++ b/test/unit/rubygem_test.rb
@@ -18,6 +18,22 @@ class RubygemTest < ActiveSupport::TestCase
@rubygem = Factory.build(:rubygem)
end
+ context "numbers in rubygem name" do
+ should "not be valid if name consists solely of numbers" do
+ @rubygem.name = "123456"
+ assert !@rubygem.valid?
+ assert_equal "must include at least one letter.", @rubygem.errors.on(:name)
+ end
+ should "be valid if name has numbers in it" do
+ @rubygem.name = "123test123"
+ assert @rubygem.valid?
+ end
+ should "be valid if name has no numbers in it" do
+ @rubygem.name = "test"
+ assert @rubygem.valid?
+ end
+ end
+
context "with a user" do
setup do
@user = Factory(:user)
|
Making sure gems cannot have only numbers as their name. Closes #8
|
rubygems_rubygems.org
|
train
|
4cf51983df1a1f98364994a9965f0f3f1943116a
|
diff --git a/holoviews/plotting/mpl/element.py b/holoviews/plotting/mpl/element.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/mpl/element.py
+++ b/holoviews/plotting/mpl/element.py
@@ -515,7 +515,11 @@ class LegendPlot(ElementPlot):
legend_position = param.ObjectSelector(objects=['inner', 'right',
'bottom', 'top',
- 'left', 'best'],
+ 'left', 'best',
+ 'top_right',
+ 'top_left',
+ 'bottom_left',
+ 'bottom_right'],
default='inner', doc="""
Allows selecting between a number of predefined legend position
options. The predefined options may be customized in the
@@ -529,7 +533,11 @@ class LegendPlot(ElementPlot):
ncol=3, loc=3, mode="expand", borderaxespad=0.),
'bottom': dict(ncol=3, mode="expand", loc=2,
bbox_to_anchor=(0., -0.25, 1., .102),
- borderaxespad=0.1)}
+ borderaxespad=0.1),
+ 'top_right': dict(loc=1),
+ 'top_left': dict(loc=2),
+ 'bottom_left': dict(loc=3),
+ 'bottom_right': dict(loc=4)}
|
Matplotlib backend now supports same legend_position as bokeh
Addresses issue #<I>
|
pyviz_holoviews
|
train
|
55d37f5287e65dc1989be34c75048692b3ea42f0
|
diff --git a/salt/states/test.py b/salt/states/test.py
index <HASH>..<HASH> 100644
--- a/salt/states/test.py
+++ b/salt/states/test.py
@@ -77,7 +77,9 @@ def succeed_without_changes(name, **kwargs): # pylint: disable=unused-argument
name
A unique string.
"""
- ret = {"name": name, "changes": {}, "result": True, "comment": "Success!"}
+ comment = kwargs.get("comment", "Success!")
+
+ ret = {"name": name, "changes": {}, "result": True, "comment": comment}
return ret
@@ -90,7 +92,9 @@ def fail_without_changes(name, **kwargs): # pylint: disable=unused-argument
name:
A unique string.
"""
- ret = {"name": name, "changes": {}, "result": False, "comment": "Failure!"}
+ comment = kwargs.get("comment", "Failure!")
+
+ ret = {"name": name, "changes": {}, "result": False, "comment": comment}
if __opts__["test"]:
ret["result"] = False
@@ -108,7 +112,9 @@ def succeed_with_changes(name, **kwargs): # pylint: disable=unused-argument
name:
A unique string.
"""
- ret = {"name": name, "changes": {}, "result": True, "comment": "Success!"}
+ comment = kwargs.get("comment", "Success!")
+
+ ret = {"name": name, "changes": {}, "result": True, "comment": comment}
# Following the docs as written here
# http://docs.saltstack.com/ref/states/writing.html#return-data
@@ -134,6 +140,8 @@ def fail_with_changes(name, **kwargs): # pylint: disable=unused-argument
name:
A unique string.
"""
+ comment = kwargs.get("comment", "Failure!")
+
ret = {"name": name, "changes": {}, "result": False, "comment": "Failure!"}
# Following the docs as written here
diff --git a/tests/unit/states/test_test.py b/tests/unit/states/test_test.py
index <HASH>..<HASH> 100644
--- a/tests/unit/states/test_test.py
+++ b/tests/unit/states/test_test.py
@@ -36,6 +36,12 @@ class TestTestCase(TestCase, LoaderModuleMockMixin):
ret.update({"comment": "Success!"})
self.assertDictEqual(test.succeed_without_changes("salt"), ret)
+ with patch.dict(test.__opts__, {"test": False}):
+ ret.update({"comment": "A success comment!"})
+ self.assertDictEqual(
+ test.succeed_without_changes("salt", comment="A success comment!"), ret
+ )
+
def test_fail_without_changes(self):
"""
Test to returns failure.
@@ -45,6 +51,12 @@ class TestTestCase(TestCase, LoaderModuleMockMixin):
ret.update({"comment": "Failure!"})
self.assertDictEqual(test.fail_without_changes("salt"), ret)
+ with patch.dict(test.__opts__, {"test": False}):
+ ret.update({"comment": "A failure comment!"})
+ self.assertDictEqual(
+ test.fail_without_changes("salt", comment="A failure comment!"), ret
+ )
+
def test_succeed_with_changes(self):
"""
Test to returns successful and changes is not empty
@@ -65,6 +77,23 @@ class TestTestCase(TestCase, LoaderModuleMockMixin):
)
self.assertDictEqual(test.succeed_with_changes("salt"), ret)
+ with patch.dict(test.__opts__, {"test": False}):
+ ret.update(
+ {
+ "changes": {
+ "testing": {
+ "new": "Something pretended" " to change",
+ "old": "Unchanged",
+ }
+ },
+ "comment": "A success comment!",
+ "result": True,
+ }
+ )
+ self.assertDictEqual(
+ test.succeed_with_changes("salt", comment="A success comment!"), ret
+ )
+
def test_fail_with_changes(self):
"""
Test to returns failure and changes is not empty.
@@ -85,6 +114,23 @@ class TestTestCase(TestCase, LoaderModuleMockMixin):
)
self.assertDictEqual(test.succeed_with_changes("salt"), ret)
+ with patch.dict(test.__opts__, {"test": False}):
+ ret.update(
+ {
+ "changes": {
+ "testing": {
+ "new": "Something pretended" " to change",
+ "old": "Unchanged",
+ }
+ },
+ "comment": "A failure comment!",
+ "result": True,
+ }
+ )
+ self.assertDictEqual(
+ test.succeed_with_changes("salt", comment="A failure comment!"), ret
+ )
+
def test_configurable_test_state(self):
"""
Test test.configurable_test_state with and without comment
|
Update the various succeed_with and fail_with functions to use comment when passed.
|
saltstack_salt
|
train
|
9be9abc6825b0f9a7f0e540eea5b7e96c2f663d8
|
diff --git a/uncompyle6/parsers/parse2.py b/uncompyle6/parsers/parse2.py
index <HASH>..<HASH> 100644
--- a/uncompyle6/parsers/parse2.py
+++ b/uncompyle6/parsers/parse2.py
@@ -397,8 +397,8 @@ class Python2Parser(PythonParser):
return
def reduce_is_invalid(self, rule, ast, tokens, first, last):
- if not tokens:
- return
+ if tokens is None:
+ return False
lhs = rule[0]
if lhs in ('augassign1', 'augassign2') and ast[0][0] == 'and':
return True
diff --git a/uncompyle6/parsers/parse24.py b/uncompyle6/parsers/parse24.py
index <HASH>..<HASH> 100644
--- a/uncompyle6/parsers/parse24.py
+++ b/uncompyle6/parsers/parse24.py
@@ -55,7 +55,7 @@ class Python24Parser(Python25Parser):
invalid = super(Python24Parser,
self).reduce_is_invalid(rule, ast,
tokens, first, last)
- if invalid:
+ if invalid or tokens is None:
return invalid
# FiXME: this code never gets called...
diff --git a/uncompyle6/parsers/parse26.py b/uncompyle6/parsers/parse26.py
index <HASH>..<HASH> 100644
--- a/uncompyle6/parsers/parse26.py
+++ b/uncompyle6/parsers/parse26.py
@@ -258,7 +258,7 @@ class Python26Parser(Python2Parser):
invalid = super(Python26Parser,
self).reduce_is_invalid(rule, ast,
tokens, first, last)
- if invalid:
+ if invalid or tokens is None:
return invalid
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
# Test that jmp_false jumps to the end of "and"
|
handle newer parser reduction behavior
|
rocky_python-uncompyle6
|
train
|
dc2ca81f1057599d2bd8ddba819c3f10690ebb4c
|
diff --git a/lib/nightwatch-api.js b/lib/nightwatch-api.js
index <HASH>..<HASH> 100644
--- a/lib/nightwatch-api.js
+++ b/lib/nightwatch-api.js
@@ -10,6 +10,7 @@ const ClientManager = require.main.require('nightwatch/lib/runner/clientmanager'
const ClientRunner = require.main.require('nightwatch/lib/runner/cli/clirunner')
const ChildProcess = require.main.require('nightwatch/lib/runner/cli/child-process')
const Utils = require.main.require('nightwatch/lib/util/utils')
+const Protocol = require.main.require('nightwatch/lib/api/protocol')
process.on('unhandledRejection', (reason, p) => {
console.error('Unhandled Rejection at: Promise', p, 'reason:', reason)
@@ -20,6 +21,7 @@ module.exports = class NightwatchApi {
this.client = new ClientManager()
this.client.init(options)
this.client.api('currentEnv', options.currentEnv)
+ this.protocol = Protocol(this.client.get())
patchEmitter(this.client)
}
@@ -42,12 +44,16 @@ module.exports = class NightwatchApi {
yield new Promise((resolve, reject) => {
console.log('Taking screenshot to ', filePath)
- this.client.get().api.saveScreenshot(filePath, function (result, err) {
- console.log('Screenshot creation result', result, err)
- if (err) return reject(err)
- resolve(result)
+ this.protocol.screenshot(false, (response) => {
+ if (response.state !== 'success') {
+ reject(new Error('Creating screenshot was not successful. Response was:\n' + require('util').inspect(response)))
+ }
+
+ this.client.get().saveScreenshotToFile(filePath, response.value, (err) => {
+ if (err) reject(err)
+ resolve()
+ })
})
- return this.client.get().start()
})
const content = yield fs.readFile(filePath, 'base64')
|
chore(Improve screenshot creation):
|
mucsi96_nightwatch-cucumber
|
train
|
beb3a26fef4a9836c8a30b1d4f65e09b226dcbc6
|
diff --git a/lib/mongoose/delete.js b/lib/mongoose/delete.js
index <HASH>..<HASH> 100644
--- a/lib/mongoose/delete.js
+++ b/lib/mongoose/delete.js
@@ -46,7 +46,7 @@ module.exports = exports = function deletePlugin(schema /*, schemaOptns*/ ) {
* @name del
* @function del
* @description delete model instance. more business logics can be
- * implemented using beforeDelete and afterDelete.
+ * implemented using beforeDelete and afterDelete instance methods.
* @param {Function} done a callback to invoke on success or failure
* @return {instance|error} deleted instance or error found on delete
* @version 0.1.0
@@ -92,14 +92,14 @@ module.exports = exports = function deletePlugin(schema /*, schemaOptns*/ ) {
/**
* @name beforeDelete
* @function beforeDelete
- * @description call perform pre delete logics
+ * @description perform pre delete logics
* @param {Function} next a callback to invoke after beforeDelete
* @return {instance|error}
* @private
*/
function beforeDelete(next) {
//obtain before hooks
- const before = this.beforeDelete;
+ const before = (this.beforeDelete || this.preDelete);
//run hook(s)
if (_.isFunction(before)) {
@@ -132,14 +132,14 @@ module.exports = exports = function deletePlugin(schema /*, schemaOptns*/ ) {
/**
* @name afterDelete
* @function afterDelete
- * @description call perform after delete logics
+ * @description perform after delete logics
* @param {Function} next a callback to invoke after afterDelete
* @return {instance|error}
* @private
*/
function afterDelete(instance, next) {
//obtain after hooks
- const after = instance.afterDelete;
+ const after = (instance.afterDelete || instance.postDelete);
//run hook(s)
if (_.isFunction(after)) {
@@ -198,7 +198,7 @@ module.exports = exports = function deletePlugin(schema /*, schemaOptns*/ ) {
async.waterfall([
function findExisting(next) {
- this.findById(_id, next);
+ this.findById(_id, next); //TODO use getById
}.bind(this),
function afterFindExisting(instance, next) {
|
improve delete plugin jsdocs
|
lykmapipo_mongoose-rest-actions
|
train
|
e8515be7919d9a2970755c814cd7b206b9d52314
|
diff --git a/cli/export.go b/cli/export.go
index <HASH>..<HASH> 100644
--- a/cli/export.go
+++ b/cli/export.go
@@ -344,7 +344,7 @@ func runImport(args *docopt.Args, client *controller.Client) error {
release.Env[k] = v
}
- config, err := getPgRunConfig(client, release)
+ config, err := getPgRunConfig(client, app.ID, release)
if err != nil {
return fmt.Errorf("error getting postgres config: %s", err)
}
@@ -402,7 +402,7 @@ func runImport(args *docopt.Args, client *controller.Client) error {
if uploadSlug {
config := runConfig{
- App: mustApp(),
+ App: app.ID,
Release: release.ID,
DisableLog: true,
Entrypoint: []string{"curl"},
diff --git a/cli/pg.go b/cli/pg.go
index <HASH>..<HASH> 100644
--- a/cli/pg.go
+++ b/cli/pg.go
@@ -60,10 +60,10 @@ func getAppPgRunConfig(client *controller.Client) (*runConfig, error) {
if err != nil {
return nil, fmt.Errorf("error getting app release: %s", err)
}
- return getPgRunConfig(client, appRelease)
+ return getPgRunConfig(client, mustApp(), appRelease)
}
-func getPgRunConfig(client *controller.Client, appRelease *ct.Release) (*runConfig, error) {
+func getPgRunConfig(client *controller.Client, app string, appRelease *ct.Release) (*runConfig, error) {
pgApp := appRelease.Env["FLYNN_POSTGRES"]
if pgApp == "" {
return nil, fmt.Errorf("No postgres database found. Provision one with `flynn resource add postgres`")
@@ -75,7 +75,7 @@ func getPgRunConfig(client *controller.Client, appRelease *ct.Release) (*runConf
}
config := &runConfig{
- App: mustApp(),
+ App: app,
Release: pgRelease.ID,
Env: make(map[string]string),
DisableLog: true,
diff --git a/test/test_cli.go b/test/test_cli.go
index <HASH>..<HASH> 100644
--- a/test/test_cli.go
+++ b/test/test_cli.go
@@ -744,6 +744,9 @@ func (s *CLISuite) TestExportImport(t *c.C) {
// remove db table from source app
t.Assert(r.flynn("pg", "psql", "--", "-c", "DROP TABLE foos"), Succeeds)
+ // remove the git remote
+ t.Assert(r.git("remote", "remove", "flynn"), Succeeds)
+
// import app
t.Assert(r.flynn("import", "--name", dstApp, "--file", file), Succeeds)
|
cli: Fix import without git remote available
There's no reason to read the app from the git remote.
Closes #<I>
|
flynn_flynn
|
train
|
c8f7ade1d156db2c739987691484a3981c215d24
|
diff --git a/lib/active_scaffold/actions/core.rb b/lib/active_scaffold/actions/core.rb
index <HASH>..<HASH> 100644
--- a/lib/active_scaffold/actions/core.rb
+++ b/lib/active_scaffold/actions/core.rb
@@ -106,13 +106,8 @@ module ActiveScaffold::Actions
params[:child_association].presence || @scope.split(']').first.sub(/^\[/, '').presence
end
- def controller_for_path(column, path)
- ctrl = self.class.active_scaffold_controller_for(column.association.klass)
- if ctrl.controller_path == path
- ctrl
- else
- ctrl.subclasses.find { |c| c.controller_path == path }
- end
+ def parent_controller_name
+ "#{params[:parent_controller].camelize}Controller"
end
def set_parent(record)
diff --git a/lib/active_scaffold/helpers/controller_helpers.rb b/lib/active_scaffold/helpers/controller_helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/active_scaffold/helpers/controller_helpers.rb
+++ b/lib/active_scaffold/helpers/controller_helpers.rb
@@ -86,17 +86,10 @@ module ActiveScaffold
def main_form_controller
return unless params[:parent_controller] && subform_child_association
- klass = active_scaffold_config.model
- @main_form_controller ||= begin
- controller = nil
- active_scaffold_config.columns.find do |col|
- next unless col.association
- next unless col.association.reverse(klass).to_s == subform_child_association
- controller = controller_for_path(col, params[:parent_controller])
- break if controller
- end
- controller
- end
+ controller = parent_controller_name.constantize
+ column = controller.active_scaffold_config.columns[subform_child_association] if controller
+ return unless column
+ controller if self.class.active_scaffold_controller_for(column).controller_path == controller_path
end
def render_parent?
|
fix main_form_controller for subform on associations with polymorphic reverse
|
activescaffold_active_scaffold
|
train
|
4452a20611a8f5394959f8d39ec4d10fe860d8cc
|
diff --git a/lib/chef/knife/supermarket_install.rb b/lib/chef/knife/supermarket_install.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/knife/supermarket_install.rb
+++ b/lib/chef/knife/supermarket_install.rb
@@ -1,6 +1,6 @@
#
# Author:: Christopher Webber (<cwebber@chef.io>)
-# Copyright:: Copyright (c) 2014-2018 Chef Software, Inc.
+# Copyright:: Copyright (c) 2014-2019 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,15 +18,14 @@
require "chef/knife"
require "chef/exceptions"
-require "shellwords"
-require "mixlib/archive"
class Chef
class Knife
class SupermarketInstall < Knife
deps do
- require "chef/mixin/shell_out"
+ require "shellwords"
+ require "mixlib/archive"
require "chef/knife/core/cookbook_scm_repo"
require "chef/cookbook/metadata"
end
@@ -71,8 +70,6 @@ class Chef
attr_reader :vendor_path
def run
- extend Chef::Mixin::ShellOut
-
if config[:cookbook_path]
Chef::Config[:cookbook_path] = config[:cookbook_path]
else
diff --git a/lib/chef/knife/supermarket_share.rb b/lib/chef/knife/supermarket_share.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/knife/supermarket_share.rb
+++ b/lib/chef/knife/supermarket_share.rb
@@ -1,6 +1,6 @@
#
# Author:: Christopher Webber (<cwebber@chef.io>)
-# Copyright:: Copyright (c) 2014-2018 Chef Software, Inc.
+# Copyright:: Copyright (c) 2014-2019 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,7 +17,6 @@
#
require "chef/knife"
-require "chef/mixin/shell_out"
class Chef
class Knife
@@ -29,11 +28,9 @@ class Chef
require "chef/cookbook_loader"
require "chef/cookbook_uploader"
require "chef/cookbook_site_streaming_uploader"
- require "mixlib/shellout"
+ require "chef/mixin/shell_out"
end
- include Chef::Mixin::ShellOut
-
banner "knife supermarket share COOKBOOK [CATEGORY] (options)"
category "supermarket"
|
Cleanup requires / includes in knife supermarket
1) Move everything into the deps blocks
2) don't require shell_out in install since we're not using it. We use mixlib_archive now
|
chef_chef
|
train
|
f3e9f48aa610ba1ce3aa5ab372a039e5023a08b3
|
diff --git a/folium/features.py b/folium/features.py
index <HASH>..<HASH> 100644
--- a/folium/features.py
+++ b/folium/features.py
@@ -450,6 +450,7 @@ class GeoJson(Layer):
self.data = json.loads(json.dumps(data.__geo_interface__)) # noqa
else:
raise ValueError('Unhandled object {!r}.'.format(data))
+
self.style_function = style_function or (lambda x: {})
self.highlight = highlight_function is not None
@@ -458,11 +459,25 @@ class GeoJson(Layer):
self.smooth_factor = smooth_factor
+ self._validate_function(self.style_function, 'style_function')
+ self._validate_function(self.highlight_function, 'highlight_function')
+
if isinstance(tooltip, (GeoJsonTooltip, Tooltip)):
self.add_child(tooltip)
elif tooltip is not None:
self.add_child(Tooltip(tooltip))
+ def _validate_function(self, func, name):
+ """
+ Tests `self.style_function` and `self.highlight_function` to ensure
+ they are functions returning dictionaries.
+ """
+ test_feature = self.data if self.data.get('features') is None else self.data['features'][0] # noqa
+ if not callable(func) or not isinstance(func(test_feature), dict):
+ raise ValueError('{} should be a function that accepts items from '
+ 'data[\'features\'] and returns a dictionary.'
+ .format(name))
+
def style_data(self):
"""
Applies `self.style_function` to each feature of `self.data` and
|
GeoJson style and highlight function validation (#<I>)
Add check on `style_function` and `highlight_function` to make sure that they are callable, and that they return a dictionary.
|
python-visualization_folium
|
train
|
57d506f0fa90b03402cabc65084872517e4c105d
|
diff --git a/src/FileParser/FileParser.php b/src/FileParser/FileParser.php
index <HASH>..<HASH> 100644
--- a/src/FileParser/FileParser.php
+++ b/src/FileParser/FileParser.php
@@ -251,7 +251,7 @@ class FileParser
{
$tagCollection = $this->docblockParser->parseComment($text);
- if ($tagCollection->hasTag('inheritdoc')) {
+ if ($tagCollection->hasTag('inheritdoc') || $tagCollection->hasTag('inheritDoc')) {
return ['inherit' => true];
}
|
Allow for @inheritDoc tag with uppercase D
|
dancryer_php-docblock-checker
|
train
|
56f0f1b936242a7384675749c89e990aef7c0d8b
|
diff --git a/server/irc/connection.js b/server/irc/connection.js
index <HASH>..<HASH> 100644
--- a/server/irc/connection.js
+++ b/server/irc/connection.js
@@ -263,11 +263,18 @@ IrcConnection.prototype.clientEvent = function (event_name, data, callback) {
/**
* Write a line of data to the IRCd
+ * @param data The line of data to be sent
+ * @param force Write the data now, ignoring any write queue
*/
-IrcConnection.prototype.write = function (data, callback) {
+IrcConnection.prototype.write = function (data, force) {
//ENCODE string to encoding of the server
encoded_buffer = iconv.encode(data + '\r\n', this.encoding);
+ if (force) {
+ this.socket.write(encoded_buffer);
+ return;
+ }
+
this.write_buffer.push(encoded_buffer);
// Only flush if we're not writing already
@@ -325,11 +332,14 @@ IrcConnection.prototype.flushWriteBuffer = function () {
/**
- * Close the connection to the IRCd after sending one last line
+ * Close the connection to the IRCd after forcing one last line
*/
IrcConnection.prototype.end = function (data, callback) {
+ if (!this.socket)
+ return;
+
if (data)
- this.write(data);
+ this.write(data, true);
this.socket.end();
};
|
Server: Force data to be sent ahead of a write queue
|
prawnsalad_KiwiIRC
|
train
|
c30401c8ba707b62518676aadb06b78840dd1daf
|
diff --git a/Kwf/Config/Web.php b/Kwf/Config/Web.php
index <HASH>..<HASH> 100644
--- a/Kwf/Config/Web.php
+++ b/Kwf/Config/Web.php
@@ -92,10 +92,10 @@ class Kwf_Config_Web extends Kwf_Config_Ini
public static function getDefaultConfigSection()
{
if (file_exists('config_section')) {
- return trim(file_get_contents('config_section'));
- } else {
- return 'production';
+ $ret = trim(file_get_contents('config_section'));
+ if ($ret) return $ret;
}
+ return 'production';
}
public function __construct($section, $options = array())
|
don't use config_section file if it's empty
this can happen during setup
|
koala-framework_koala-framework
|
train
|
df1886f8a296222346ab826613d8c3bf55605809
|
diff --git a/index/brand.go b/index/brand.go
index <HASH>..<HASH> 100644
--- a/index/brand.go
+++ b/index/brand.go
@@ -394,17 +394,11 @@ func (self *Brand) Load(requestChan chan Command, f *Fragment) {
//log.Println("Bad mojo")
}
time.Sleep(time.Duration(rand.Intn(15)) * time.Second) //trying to avoid mass cassandra hit
- counter := 0
for _, k := range keys {
request := NewLoadRequest(k)
requestChan <- request
request.Response()
- counter++
- if counter > 100 {
- backoff := (counter / 100) * 100
-
- time.Sleep(time.Duration(backoff) * time.Millisecond) //trying to avoid mass cassandra hit
- }
+ time.Sleep(time.Duration(rand.Intn(1000)) * time.Millisecond) //trying to avoid mass cassandra hit
}
}
diff --git a/index/fragment_container.go b/index/fragment_container.go
index <HASH>..<HASH> 100644
--- a/index/fragment_container.go
+++ b/index/fragment_container.go
@@ -287,7 +287,7 @@ func (self *FragmentContainer) AddFragment(db string, frame string, slice int, i
loader := make(chan Command)
self.fragments[id] = f
go f.ServeFragment(loader)
- //go f.Load(loader)
+ go f.Load(loader)
}
}
|
added back load but with greater delay (random up to a sec between id's)
|
pilosa_pilosa
|
train
|
ec3a0a3e3e28b56dfa61c2c5e2518a65af18b0f9
|
diff --git a/neevo/Neevo.php b/neevo/Neevo.php
index <HASH>..<HASH> 100644
--- a/neevo/Neevo.php
+++ b/neevo/Neevo.php
@@ -32,7 +32,7 @@ class Neevo implements INeevoObservable, INeevoObserver {
// Neevo revision
- const REVISION = 425;
+ const REVISION = 426;
// Data types
const BOOL = 'b';
diff --git a/neevo/NeevoConnection.php b/neevo/NeevoConnection.php
index <HASH>..<HASH> 100644
--- a/neevo/NeevoConnection.php
+++ b/neevo/NeevoConnection.php
@@ -24,7 +24,7 @@
* @author Martin Srank
* @package Neevo
*/
-class NeevoConnection implements INeevoObservable {
+class NeevoConnection implements INeevoObservable, ArrayAccess {
/** @var array */
@@ -214,6 +214,26 @@ class NeevoConnection implements INeevoObservable {
}
+ /**
+ * Get configuration value.
+ * @param string $key
+ * @return mixed
+ */
+ public function offsetGet($key){
+ return $this->getConfig($key);
+ }
+
+
+ /**
+ * Check if configuration value exists.
+ * @param mixed $key
+ * @return bool
+ */
+ public function offsetExists($key){
+ return isset($this->config[$key]);
+ }
+
+
/* ************ Internal methods ************ */
@@ -231,6 +251,13 @@ class NeevoConnection implements INeevoObservable {
}
+ /** @internal */
+ public function offsetSet($offset, $value){}
+
+ /** @internal */
+ public function offsetUnset($offset){}
+
+
/**
* Set the driver and statement parser.
* @param string $driver
|
NeevoConnection implements ArrayAccess to get a config value
|
smasty_Neevo
|
train
|
185a03d157f4230bf04c92aad844224e6f29dadf
|
diff --git a/spyder/widgets/fileswitcher.py b/spyder/widgets/fileswitcher.py
index <HASH>..<HASH> 100644
--- a/spyder/widgets/fileswitcher.py
+++ b/spyder/widgets/fileswitcher.py
@@ -295,7 +295,8 @@ class FileSwitcher(QDialog):
@property
def widgets(self):
widgets = []
- for tabs, plugin in self.plugins_tabs:
+ for plugin in self.plugins_instances:
+ tabs = self.get_plugin_tabwidget(plugin)
widgets += [(tabs.widget(index), plugin) for
index in range(tabs.count())]
return widgets
@@ -321,14 +322,16 @@ class FileSwitcher(QDialog):
@property
def paths(self):
paths = []
- for da, icon in self.plugins_data:
+ for plugin in self.plugins_instances:
+ da = self.get_plugin_data(plugin)
paths += [getattr(td, 'filename', None) for td in da]
return paths
@property
def filenames(self):
filenames = []
- for da, icon in self.plugins_data:
+ for plugin in self.plugins_instances:
+ da = self.get_plugin_data(plugin)
filenames += [os.path.basename(getattr(td,
'filename',
None)) for td in da]
@@ -526,18 +529,35 @@ class FileSwitcher(QDialog):
return real_index
# --- Helper methods: Widget
+ def get_plugin_data(self, plugin):
+ # The data object is named "data" in the editor plugin while it is
+ # named "clients" in the notebook plugin.
+ try:
+ data = plugin.get_current_tab_manager().data
+ except AttributeError:
+ data = plugin.get_current_tab_manager().clients
+
+ return data
+
+ def get_plugin_tabwidget(self, plugin):
+ # The tab widget is named "tabs" in the editor plugin while it is
+ # named "tabwidget" in the notebook plugin.
+ try:
+ tabwidget = plugin.get_current_tab_manager().tabs
+ except AttributeError:
+ tabwidget = plugin.get_current_tab_manager().tabwidget
+
+ return tabwidget
+
def get_widget(self, index=None, path=None, tabs=None):
"""Get widget by index.
-
+
If no tabs and index specified the current active widget is returned.
"""
- if index and tabs:
- return tabs.widget(index)
- elif path and tabs:
+ if (index and tabs) or (path and tabs):
return tabs.widget(index)
elif self.plugin:
- index = self.plugins_instances.index(self.plugin)
- return self.plugins_tabs[index][0].currentWidget()
+ return self.get_plugin_tabwidget(self.plugin).currentWidget()
else:
return self.plugins_tabs[0][0].currentWidget()
|
Make things work for asynchonous Editorstacks
|
spyder-ide_spyder
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.