hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
dff3574956c1680cd104c85f7c2a87550024e892
|
diff --git a/src/components/Modal.js b/src/components/Modal.js
index <HASH>..<HASH> 100644
--- a/src/components/Modal.js
+++ b/src/components/Modal.js
@@ -4,7 +4,7 @@ const React = require('react');
import { withTheme } from './Theme';
const Button = require('./Button');
const Icon = require('./Icon');
-const MXFocusTrap = require('../components/MXFocusTrap');
+const RestrictFocus = require('../components/RestrictFocus');
const _merge = require('lodash/merge');
@@ -190,7 +190,7 @@ class Modal extends React.Component {
const styles = this.styles(theme);
return (
- <MXFocusTrap {...mergedFocusTrapProps}>
+ <RestrictFocus>
<div className='mx-modal' style={Object.assign({}, styles.scrim, this.props.isRelative && styles.relative)}>
<div className='mx-modal-scrim' onClick={this.props.onRequestClose} style={Object.assign({}, styles.scrim, styles.overlay, this.props.isRelative && styles.relative)} />
<div
@@ -230,7 +230,7 @@ class Modal extends React.Component {
)}
</div>
</div>
- </MXFocusTrap>
+ </RestrictFocus>
);
}
|
Replace MXFocusTrap with RestrictFocus component
|
mxenabled_mx-react-components
|
train
|
f802ab7dc7b11d1889b4b197b953b58f3444d499
|
diff --git a/sllurp/llrp.py b/sllurp/llrp.py
index <HASH>..<HASH> 100644
--- a/sllurp/llrp.py
+++ b/sllurp/llrp.py
@@ -446,7 +446,8 @@ class LLRPClient(LineReceiver):
elif self.state == LLRPClient.STATE_SENT_GET_CONFIG:
if msgName not in ('GET_READER_CONFIG_RESPONSE',
- 'DELETE_ACCESSSPEC_RESPONSE'):
+ 'DELETE_ACCESSSPEC_RESPONSE',
+ 'DELETE_ROSPEC_RESPONSE'):
logger.error('unexpected response %s getting config',
msgName)
return
@@ -467,7 +468,9 @@ class LLRPClient(LineReceiver):
self.send_SET_READER_CONFIG(onCompletion=d)
elif self.state == LLRPClient.STATE_SENT_SET_CONFIG:
- if msgName not in ('SET_READER_CONFIG_RESPONSE',):
+ if msgName not in ('SET_READER_CONFIG_RESPONSE',
+ 'GET_READER_CONFIG_RESPONSE',
+ 'DELETE_ACCESSSPEC_RESPONSE'):
logger.error('unexpected response %s setting config',
msgName)
return
diff --git a/sllurp/verb/reset.py b/sllurp/verb/reset.py
index <HASH>..<HASH> 100644
--- a/sllurp/verb/reset.py
+++ b/sllurp/verb/reset.py
@@ -12,11 +12,9 @@ logger = logging.getLogger(__name__)
def shutdown(proto):
host, port = proto.peername
logger.info('Shutting down reader %s:%d', host, port)
- return proto.stopPolitely(disconnect=True)
-
-
-def finish(*args):
- reactor.stop()
+ d = proto.stopPolitely(disconnect=True)
+ d.addCallback(lambda _: reactor.stop())
+ return d
def main(host, port):
@@ -24,12 +22,8 @@ def main(host, port):
logger.info('No readers specified.')
return 0
- onFinish = defer.Deferred()
- onFinish.addCallback(finish)
-
factory = LLRPClientFactory(reset_on_connect=False,
- start_inventory=False,
- onFinish=onFinish)
+ start_inventory=False)
factory.addStateCallback(LLRPClient.STATE_CONNECTED, shutdown)
for host in host:
|
"fix" reset CLI tool
the state machine, with all its threading of Deferreds and hacked-on abilities
to handle out-of-order messages is getting insane... this commit adds some
pretty unintuitive stuff!
|
ransford_sllurp
|
train
|
f3bac3c05a6c93ab08b1b32ca15c86a4825266fa
|
diff --git a/data/menu.glade b/data/menu.glade
index <HASH>..<HASH> 100644
--- a/data/menu.glade
+++ b/data/menu.glade
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
-<!--Generated with glade3 3.4.5 on Thu Aug 21 13:58:01 2008 -->
+<!--Generated with glade3 3.4.5 on Mon Nov 17 23:09:58 2008 -->
<glade-interface>
<widget class="GtkWindow" id="hamster-window">
<property name="can_focus">True</property>
@@ -129,11 +129,11 @@
</widget>
</child>
<child>
- <widget class="GtkLabel" id="no_facts_today">
+ <widget class="GtkLabel" id="fact_totals">
<property name="height_request">32</property>
- <property name="sensitive">False</property>
+ <property name="visible">True</property>
<property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
- <property name="xalign">0.090000003576278687</property>
+ <property name="xalign">0</property>
<property name="label" translatable="yes">No records today</property>
</widget>
<packing>
diff --git a/hamster/applet.py b/hamster/applet.py
index <HASH>..<HASH> 100755
--- a/hamster/applet.py
+++ b/hamster/applet.py
@@ -406,11 +406,19 @@ class HamsterApplet(object):
if len(day.facts) == 0:
self.last_activity = None
self.glade.get_widget("todays_scroll").hide()
- self.glade.get_widget("no_facts_today").show()
+
+ self.glade.get_widget("fact_totals").set_text(_("No records today"))
else:
self.last_activity = day.facts[len(day.facts) - 1]
self.glade.get_widget("todays_scroll").show()
- self.glade.get_widget("no_facts_today").hide()
+
+ total_string = ""
+ for total in day.totals:
+ total_string += _("%(category)s: %(duration)s, ") % ({'category': total,
+ 'duration': format_duration(day.totals[total])})
+
+ total_string = total_string.rstrip(", ") # trailing slash
+ self.glade.get_widget("fact_totals").set_text(total_string)
def refresh_menu(self):
diff --git a/hamster/stuff.py b/hamster/stuff.py
index <HASH>..<HASH> 100644
--- a/hamster/stuff.py
+++ b/hamster/stuff.py
@@ -121,13 +121,13 @@ class DayStore(object):
delta = dt.datetime.now() - fact["start_time"]
duration = 24 * delta.days + delta.seconds / 60
- fact_name = fact['name']
+ fact_category = fact['category']
- if fact_name not in self.totals:
- self.totals[fact_name] = 0
+ if fact_category not in self.totals:
+ self.totals[fact_category] = 0
if duration:
- self.totals[fact_name] += duration
+ self.totals[fact_category] += duration
current_duration = format_duration(duration)
|
show totals by category in day view
svn path=/trunk/; revision=<I>
|
projecthamster_hamster
|
train
|
40dba196d38f0eff2a5b15cc827014317cb958a0
|
diff --git a/config/karma.conf.js b/config/karma.conf.js
index <HASH>..<HASH> 100644
--- a/config/karma.conf.js
+++ b/config/karma.conf.js
@@ -29,7 +29,7 @@ module.exports = function (config) {
module: {
postLoaders: [{
test: /\.js$/,
- exclude: /(src\/dist|packages|.git|node_modules)/,
+ exclude: /(src\/dist|packages|.git|node_modules|__tests__)/,
loader: 'isparta',
include: path.join(__dirname, '../src')
}],
|
Remove __tests__ folders from code coverage report
|
infernojs_inferno
|
train
|
269afde5b7ad702154a2e0a31336d5dcb14d5867
|
diff --git a/test/test.rb b/test/test.rb
index <HASH>..<HASH> 100644
--- a/test/test.rb
+++ b/test/test.rb
@@ -67,6 +67,7 @@ describe "database functions" do
db2.set! '1', 5
@db.sync
assert_equal @db['1'], 5
+ db2.close
end
it "should be able to handle another process's call to compact" do
@@ -76,6 +77,7 @@ describe "database functions" do
@db.compact
db2.sync
assert_equal 19, db2['19']
+ db2.close
end
it "can empty the database" do
@@ -83,6 +85,7 @@ describe "database functions" do
@db.clear
db2 = Daybreak::DB.new DB_PATH
assert_equal nil, db2['19']
+ db2.close
end
it "should handle deletions" do
@@ -91,10 +94,11 @@ describe "database functions" do
@db.delete! 'two'
assert !@db.has_key?('two')
assert_equal @db['two'], nil
- db2 = Daybreak::DB.new DB_PATH
+ db2 = Daybreak::DB.new DB_PATH
assert !db2.has_key?('two')
assert_equal db2['two'], nil
+ db2.close
end
it "should close and reopen the file when clearing the database" do
|
don't forget to close all dbs
|
propublica_daybreak
|
train
|
bb345f45e7bd5fac80bbd3971eb770b8dd1f44cf
|
diff --git a/synapse/lib/ast.py b/synapse/lib/ast.py
index <HASH>..<HASH> 100644
--- a/synapse/lib/ast.py
+++ b/synapse/lib/ast.py
@@ -614,7 +614,7 @@ class PropPivotOut(PivotOper):
# :ipv4 -> *
ndef = (prop.type.name, valu)
- pivo = self.snap.getNodeByNdef(valu)
+ pivo = self.snap.getNodeByNdef(ndef)
yield pivo, path.fork(pivo)
class PropPivot(PivotOper):
diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py
index <HASH>..<HASH> 100644
--- a/synapse/tests/test_cortex.py
+++ b/synapse/tests/test_cortex.py
@@ -19,6 +19,17 @@ class HttpTestV1(t_web.RequestHandler):
class CortexTest(s_test.SynTest):
+ def test_cortex_prop_pivout(self):
+
+ with self.getTestCore() as core:
+
+ with core.snap() as snap:
+ snap.addNode('inet:dns:a', ('woot.com', '1.2.3.4'))
+
+ nodes = list([n.pack() for n in core.eval('inet:dns:a :ipv4 -> *')])
+ self.len(1, nodes)
+ self.eq(nodes[0][0], ('inet:ipv4', 0x01020304))
+
def test_cortex_of_the_future(self):
# test "future/ongoing" time stamp.
|
fix for whippits ndef catch and test that confirms it was borked
|
vertexproject_synapse
|
train
|
0dab1d10cf28e50d9e92dd24caa003f86dedae38
|
diff --git a/Slim/App.php b/Slim/App.php
index <HASH>..<HASH> 100644
--- a/Slim/App.php
+++ b/Slim/App.php
@@ -664,6 +664,16 @@ class App
{
// Send response
if (!headers_sent()) {
+ // Headers
+ foreach ($response->getHeaders() as $name => $values) {
+ foreach ($values as $value) {
+ header(sprintf('%s: %s', $name, $value), false);
+ }
+ }
+
+ // Set the status _after_ the headers, because of PHP's "helpful" behavior with location headers.
+ // See https://github.com/slimphp/Slim/issues/1730
+
// Status
header(sprintf(
'HTTP/%s %s %s',
@@ -671,13 +681,6 @@ class App
$response->getStatusCode(),
$response->getReasonPhrase()
));
-
- // Headers
- foreach ($response->getHeaders() as $name => $values) {
- foreach ($values as $value) {
- header(sprintf('%s: %s', $name, $value), false);
- }
- }
}
// Body
|
Send correct status code regardless of location header
PHP thinks it's clever, but it isn't. Fixes #<I>.
|
slimphp_Slim
|
train
|
170db1dcfb7c4938c0615012958bbb5e36bda705
|
diff --git a/indy-repository-manager/src/main/java/org/jboss/pnc/indyrepositorymanager/RepositoryManagerDriver.java b/indy-repository-manager/src/main/java/org/jboss/pnc/indyrepositorymanager/RepositoryManagerDriver.java
index <HASH>..<HASH> 100644
--- a/indy-repository-manager/src/main/java/org/jboss/pnc/indyrepositorymanager/RepositoryManagerDriver.java
+++ b/indy-repository-manager/src/main/java/org/jboss/pnc/indyrepositorymanager/RepositoryManagerDriver.java
@@ -416,7 +416,7 @@ public class RepositoryManagerDriver implements RepositoryManager {
for (ArtifactRepository repository : repositories) {
StoreKey remoteKey = null;
for (RemoteRepository existingRepo : existingRepos) {
- if (existingRepo.getUrl().equals(repository.getUrl())) {
+ if (StringUtils.equals(existingRepo.getUrl(), repository.getUrl())) {
remoteKey = existingRepo.getKey();
break;
}
|
[NCL-<I>] Fix comparison of existing remote repos
|
project-ncl_pnc
|
train
|
898125c3dba91c1b8447e7f849c318f361ed80d3
|
diff --git a/spec/lib/action_subscriber/middleware/active_record/query_cache_spec.rb b/spec/lib/action_subscriber/middleware/active_record/query_cache_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/action_subscriber/middleware/active_record/query_cache_spec.rb
+++ b/spec/lib/action_subscriber/middleware/active_record/query_cache_spec.rb
@@ -11,7 +11,8 @@ describe ActionSubscriber::Middleware::ActiveRecord::QueryCache do
allow(connection).to receive(:enable_query_cache!)
allow(ActiveRecord::Base).to receive(:connection).and_return(connection)
- allow(ActiveRecord::Base).to receive(:connection_id)
+ # Rails 5 "compat"
+ allow(ActiveRecord::Base).to receive(:connection_id) if ::ActiveRecord::Base.respond_to?(:connection_id)
end
subject { described_class.new(app) }
|
add attempted rails 5 compat for ar-base
|
mxenabled_action_subscriber
|
train
|
0d6325cfb75796483cabf738f4d45abef7c0a33c
|
diff --git a/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
+++ b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
@@ -61,7 +61,7 @@ module ActiveRecord
ActiveRecord::Base.dump_schemas
end
- args = ["-s", "-X", "-x", "-O", "-f", filename]
+ args = ["-s", "-x", "-O", "-f", filename]
args.concat(Array(extra_flags)) if extra_flags
unless search_path.blank?
args += search_path.split(",").map do |part|
diff --git a/activerecord/test/cases/tasks/postgresql_rake_test.rb b/activerecord/test/cases/tasks/postgresql_rake_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/tasks/postgresql_rake_test.rb
+++ b/activerecord/test/cases/tasks/postgresql_rake_test.rb
@@ -366,7 +366,7 @@ if current_adapter?(:PostgreSQLAdapter)
assert_called_with(
Kernel,
:system,
- ["pg_dump", "-s", "-X", "-x", "-O", "-f", @filename, "my-app-db"],
+ ["pg_dump", "-s", "-x", "-O", "-f", @filename, "my-app-db"],
returns: true
) do
ActiveRecord::Tasks::DatabaseTasks.structure_dump(@configuration, @filename)
@@ -383,7 +383,7 @@ if current_adapter?(:PostgreSQLAdapter)
end
def test_structure_dump_with_extra_flags
- expected_command = ["pg_dump", "-s", "-X", "-x", "-O", "-f", @filename, "--noop", "my-app-db"]
+ expected_command = ["pg_dump", "-s", "-x", "-O", "-f", @filename, "--noop", "my-app-db"]
assert_called_with(Kernel, :system, expected_command, returns: true) do
with_structure_dump_flags(["--noop"]) do
@@ -401,7 +401,7 @@ if current_adapter?(:PostgreSQLAdapter)
assert_called_with(
Kernel,
:system,
- ["pg_dump", "-s", "-X", "-x", "-O", "-f", @filename, "-T", "foo", "-T", "bar", "my-app-db"],
+ ["pg_dump", "-s", "-x", "-O", "-f", @filename, "-T", "foo", "-T", "bar", "my-app-db"],
returns: true
) do
ActiveRecord::Tasks::DatabaseTasks.structure_dump(@configuration, @filename)
@@ -415,7 +415,7 @@ if current_adapter?(:PostgreSQLAdapter)
assert_called_with(
Kernel,
:system,
- ["pg_dump", "-s", "-X", "-x", "-O", "-f", @filename, "--schema=foo", "--schema=bar", "my-app-db"],
+ ["pg_dump", "-s", "-x", "-O", "-f", @filename, "--schema=foo", "--schema=bar", "my-app-db"],
returns: true
) do
ActiveRecord::Tasks::DatabaseTasks.structure_dump(@configuration, @filename)
@@ -428,7 +428,7 @@ if current_adapter?(:PostgreSQLAdapter)
assert_called_with(
Kernel,
:system,
- ["pg_dump", "-s", "-X", "-x", "-O", "-f", @filename, "my-app-db"],
+ ["pg_dump", "-s", "-x", "-O", "-f", @filename, "my-app-db"],
returns: true
) do
with_dump_schemas(:all) do
@@ -441,7 +441,7 @@ if current_adapter?(:PostgreSQLAdapter)
assert_called_with(
Kernel,
:system,
- ["pg_dump", "-s", "-X", "-x", "-O", "-f", @filename, "--schema=foo", "--schema=bar", "my-app-db"],
+ ["pg_dump", "-s", "-x", "-O", "-f", @filename, "--schema=foo", "--schema=bar", "my-app-db"],
returns: true
) do
with_dump_schemas("foo,bar") do
@@ -455,7 +455,7 @@ if current_adapter?(:PostgreSQLAdapter)
assert_called_with(
Kernel,
:system,
- ["pg_dump", "-s", "-X", "-x", "-O", "-f", filename, "my-app-db"],
+ ["pg_dump", "-s", "-x", "-O", "-f", filename, "my-app-db"],
returns: nil
) do
e = assert_raise(RuntimeError) do
|
Removed invalid -X flag for pg_dump
|
rails_rails
|
train
|
affcce63b0826cd414643a71eb7d3c9c5994bed7
|
diff --git a/src/js/core/img.js b/src/js/core/img.js
index <HASH>..<HASH> 100644
--- a/src/js/core/img.js
+++ b/src/js/core/img.js
@@ -21,6 +21,8 @@ import {
trigger,
} from 'uikit-util';
+const nativeLazyLoad = 'loading' in HTMLImageElement.prototype;
+
export default {
args: 'dataSrc',
@@ -58,7 +60,16 @@ export default {
return;
}
- ensurePlaceholderImage(this.$el);
+ if (nativeLazyLoad && isImg(this.$el)) {
+ this.$el.loading = 'lazy';
+ setSrcAttrs(this.$el);
+
+ if (this.target.length === 1) {
+ return;
+ }
+ }
+
+ ensureSrcAttribute(this.$el);
const rootMargin = `${toPx(this.offsetTop, 'height')}px ${toPx(
this.offsetLeft,
@@ -110,6 +121,7 @@ export default {
: getImageFromElement(this.$el, this.dataSrc, this.dataSources);
this._data.image = image;
+ image.loading = 'eager';
setSrcAttrs(this.$el, image.currentSrc);
this.observer.disconnect();
@@ -234,7 +246,7 @@ function getSourceSize(srcset, sizes) {
return descriptors.filter((size) => size >= srcSize)[0] || descriptors.pop() || '';
}
-function ensurePlaceholderImage(el) {
+function ensureSrcAttribute(el) {
if (isImg(el) && !hasAttr(el, 'src')) {
attr(el, 'src', 'data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg"></svg>');
}
|
feat: use native loading="lazy" for image tags
|
uikit_uikit
|
train
|
6949a2d5a9744f5ae298712460e5d02fb75d49c0
|
diff --git a/pkg/oc/bootstrap/docker/up.go b/pkg/oc/bootstrap/docker/up.go
index <HASH>..<HASH> 100644
--- a/pkg/oc/bootstrap/docker/up.go
+++ b/pkg/oc/bootstrap/docker/up.go
@@ -439,7 +439,7 @@ func (c *ClusterUpConfig) Check(out io.Writer) error {
// OpenShift checks
taskPrinter.StartTask("Checking if OpenShift client is configured properly")
- if err := checkOpenShiftClient(); err != nil {
+ if err := c.checkOpenShiftClient(); err != nil {
return taskPrinter.ToError(err)
}
taskPrinter.Success()
@@ -601,16 +601,33 @@ func defaultPortForwarding() bool {
// checkOpenShiftClient ensures that the client can be configured
// for the new server
-func checkOpenShiftClient() error {
+func (c *ClusterUpConfig) checkOpenShiftClient() error {
kubeConfig := os.Getenv("KUBECONFIG")
if len(kubeConfig) == 0 {
return nil
}
+
+ // if you're trying to use the kubeconfig into a subdirectory of the basedir, you're probably using a KUBECONFIG
+ // location that is going to overwrite a "real" kubeconfig, usually admin.kubeconfig which will break every other component
+ // relying on it being a full power kubeconfig
+ kubeConfigDir := filepath.Dir(kubeConfig)
+ cwd, err := os.Getwd()
+ if err != nil {
+ return err
+ }
+ absKubeConfigDir, err := cmdutil.MakeAbs(kubeConfigDir, cwd)
+ if err != nil {
+ return err
+ }
+ if strings.HasPrefix(absKubeConfigDir, c.BaseDir+"/") {
+ return fmt.Errorf("cannot choose kubeconfig in subdirectory of the --base-dir: %q", kubeConfig)
+ }
+
var (
kubeConfigError error
f *os.File
)
- _, err := os.Stat(kubeConfig)
+ _, err = os.Stat(kubeConfig)
switch {
case os.IsNotExist(err):
err = os.MkdirAll(filepath.Dir(kubeConfig), 0755)
|
prevent oc cluster up from allowing bad KUBECONFIGs
|
openshift_origin
|
train
|
7d63d95e56361f59a5cc3413dbe34b362fbbeeab
|
diff --git a/engine/xml-config/src/test/java/org/datacleaner/configuration/JaxbConfigurationReaderTest.java b/engine/xml-config/src/test/java/org/datacleaner/configuration/JaxbConfigurationReaderTest.java
index <HASH>..<HASH> 100644
--- a/engine/xml-config/src/test/java/org/datacleaner/configuration/JaxbConfigurationReaderTest.java
+++ b/engine/xml-config/src/test/java/org/datacleaner/configuration/JaxbConfigurationReaderTest.java
@@ -229,7 +229,7 @@ public class JaxbConfigurationReaderTest extends TestCase {
CassandraDatastore cassandraDatastore = (CassandraDatastore) datastoreCatalog.getDatastore("my cassandra db");
assertEquals("localhost", cassandraDatastore.getHostname());
assertEquals(9042, cassandraDatastore.getPort());
- assertEquals("my_keyspace", cassandraDatastore.getKeySpace());
+ assertEquals("my_keyspace", cassandraDatastore.getKeyspace());
assertEquals("foo", cassandraDatastore.getUsername());
assertEquals("bar", cassandraDatastore.getPassword());
assertEquals("[SimpleTableDef[name=table,columnNames=[bah, baz],columnTypes=[STRING, STRING]]]",
|
Fixes a typo in the Cassandra test
|
datacleaner_DataCleaner
|
train
|
d62a7127e2e630c37ecb4c6369c5cb7d53dfad43
|
diff --git a/modelx/core/spacecontainer.py b/modelx/core/spacecontainer.py
index <HASH>..<HASH> 100644
--- a/modelx/core/spacecontainer.py
+++ b/modelx/core/spacecontainer.py
@@ -13,6 +13,7 @@
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
import warnings
+import pathlib
from modelx.core.base import get_impls, get_interfaces, Impl, Interface
from modelx.core.util import AutoNamer, is_valid_name, get_module
@@ -447,7 +448,7 @@ class EditableSpaceContainerImpl(BaseSpaceContainerImpl):
source = {
"method": "new_space_from_excel",
- "args": [book, range_],
+ "args": [str(pathlib.Path(book).absolute()), range_],
"kwargs": {
"sheet": sheet,
"name": name,
|
ENH: Store book path in absolute form
|
fumitoh_modelx
|
train
|
dadabb2be87284222a4e4f99523101fac10650d4
|
diff --git a/lib/crummy.rb b/lib/crummy.rb
index <HASH>..<HASH> 100644
--- a/lib/crummy.rb
+++ b/lib/crummy.rb
@@ -96,7 +96,8 @@ module Crummy
crumb_string = crumbs.collect do |crumb|
crumb_to_html crumb, options[:links]
end * options[:seperator]
- crumb_string.html_safe
+ crumb_string = crumb_string.html_safe if crumb_string.respond_to?(:html_safe)
+ crumb_string
when :xml
crumbs.collect do |crumb|
crumb_to_xml crumb, options[:links], options[:seperator]
|
ignore html_safe if there isn't a method like that
|
zachinglis_crummy
|
train
|
2016869d2c11bcf62e895b4f741fcc3c532cd18e
|
diff --git a/hazelcast/src/main/java/com/hazelcast/util/ServiceLoader.java b/hazelcast/src/main/java/com/hazelcast/util/ServiceLoader.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/util/ServiceLoader.java
+++ b/hazelcast/src/main/java/com/hazelcast/util/ServiceLoader.java
@@ -426,14 +426,15 @@ public final class ServiceLoader {
// this can happen in application containers - different Hazelcast JARs are loaded
// by different classloaders.
LOGGER.fine("There appears to be a classloading conflict. "
- + "Class " + className + " loaded by " + candidate.getClassLoader() + " does not "
- + "implement " + expectedType.getClass().getName() + " loaded by "
- + expectedType.getClass().getClassLoader());
+ + "Class " + className + " loaded by " + candidate.getClassLoader() + " implements "
+ + expectedType.getName() + " from its own class loader, but it does not implement "
+ + expectedType.getName() + " loaded by " + expectedType.getClassLoader());
} else {
- // ok, the class does not implement interface with the expected name. it's probably
- // an error in hook implementation -> let's fail fast
- throw new ClassCastException("Class " + className + " does not implement "
- + expectedType.getName());
+ //the class does not implement interface with the expected name.
+ LOGGER.fine("There appears to be a classloading conflict. "
+ + "Class " + className + " loaded by " + candidate.getClassLoader() + " does not "
+ + "implement an interface with name " + expectedType.getName() + " in both class loaders."
+ + "the interface currently loaded by " + expectedType.getClassLoader());
}
}
}
diff --git a/hazelcast/src/test/java/com/hazelcast/util/ServiceLoaderTest.java b/hazelcast/src/test/java/com/hazelcast/util/ServiceLoaderTest.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/test/java/com/hazelcast/util/ServiceLoaderTest.java
+++ b/hazelcast/src/test/java/com/hazelcast/util/ServiceLoaderTest.java
@@ -67,7 +67,7 @@ public class ServiceLoaderTest extends HazelcastTestSupport {
assertFalse(iterator.hasNext());
}
- @Test(expected = ClassCastException.class)
+ @Test
public void testFailFastWhenHookDoesNotImplementExpectedInteface() {
Class<?> otherInterface = newInterface("com.hazelcast.internal.serialization.DifferentInterface");
ClassLoader otherClassloader = otherInterface.getClassLoader();
@@ -79,7 +79,7 @@ public class ServiceLoaderTest extends HazelcastTestSupport {
Set<ServiceLoader.ServiceDefinition> definitions = singleton(definition);
ServiceLoader.ClassIterator<PortableHook> iterator = new ServiceLoader.ClassIterator<PortableHook>(definitions, PortableHook.class);
- iterator.hasNext();
+ assertFalse(iterator.hasNext());
}
@Test
|
Fix for class cast exception when running on mule
ClassCastException was there to check whether the hooks listed in META-INF
are actually implements relevant interface or not.
An example to related interface is PortableHook.
Since hook classes are moved in <I>, now it is hitting this exception and
causing instance to fail fast unncessarily.
To fix the issue we had to gave up on fail fast behaviour. Exception converted
to log to warn the user.
Fixes #<I>
|
hazelcast_hazelcast
|
train
|
d8b45f064d31f3c840492d19eb26433e15292fa5
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -256,7 +256,7 @@ gulp.task('clean-vendor-assets', function () {
// gulp dist
// Copies the files to the /dist folder for distribution as simple theme
gulp.task('dist', ['clean-dist'], function() {
- return gulp.src(['**/*', '!'+paths.bower, '!'+paths.bower+'**', '!'+paths.node, '!'+paths.node+'**', '!'+paths.dev, '!'+paths.dev+'/**', '!'+paths.dist, '!'+paths.dist+'/**', '!'+paths.distprod, '!'+paths.distprod+'/**', '!'+paths.sass, '!'+paths.sass+'/**', '!readme.txt', '!readme.md', '!package.json', '!gulpfile.js', '!CHANGELOG.md', '!.travis.yml', '!jshintignore', '!codesniffer.ruleset.xml', '*'])
+ return gulp.src(['**/*', '!'+paths.bower, '!'+paths.bower+'/**', '!'+paths.node, '!'+paths.node+'/**', '!'+paths.dev, '!'+paths.dev+'/**', '!'+paths.dist, '!'+paths.dist+'/**', '!'+paths.distprod, '!'+paths.distprod+'/**', '!'+paths.sass, '!'+paths.sass+'/**', '!readme.txt', '!readme.md', '!package.json', '!package-lock.json', '!gulpfile.js', '!gulpconfig.json', '!CHANGELOG.md', '!.travis.yml', '!jshintignore', '!codesniffer.ruleset.xml', '*'], {'buffer': false})
.pipe(replace('/js/jquery.slim.min.js', '/js'+paths.vendor+'/jquery.slim.min.js', {'skipBinary': true}))
.pipe(replace('/js/popper.min.js', '/js'+paths.vendor+'/popper.min.js', {'skipBinary': true}))
.pipe(replace('/js/skip-link-focus-fix.js', '/js'+paths.vendor+'/skip-link-focus-fix.js', {'skipBinary': true}))
|
fix paths, add files to ignored dist, and fix src buffer
buffer false on gulp.src to allow large images to pass through safely.
fix slash in front of bower and node.
added package-lock and gulpconfig to ignored list.
|
understrap_understrap
|
train
|
97b32959acee268d225ad68d4af10b90dc09dabf
|
diff --git a/fastlane/lib/fastlane/actions/docs/sync_code_signing.md b/fastlane/lib/fastlane/actions/docs/sync_code_signing.md
index <HASH>..<HASH> 100644
--- a/fastlane/lib/fastlane/actions/docs/sync_code_signing.md
+++ b/fastlane/lib/fastlane/actions/docs/sync_code_signing.md
@@ -390,7 +390,7 @@ lane :beta do
end
```
-By using the `force_for_new_devices` parameter, _match_ will check if the device count has changed since the last time you ran _match_, and automatically re-generate the provisioning profile if necessary. You can also use `force: true` to re-generate the provisioning profile on each run.
+By using the `force_for_new_devices` parameter, _match_ will check if the (enabled) device count has changed since the last time you ran _match_, and automatically re-generate the provisioning profile if necessary. You can also use `force: true` to re-generate the provisioning profile on each run.
_**Important:** The `force_for_new_devices` parameter is ignored for App Store provisioning profiles since they don't contain any device information._
diff --git a/match/lib/match/runner.rb b/match/lib/match/runner.rb
index <HASH>..<HASH> 100644
--- a/match/lib/match/runner.rb
+++ b/match/lib/match/runner.rb
@@ -355,7 +355,7 @@ module Match
devices = Spaceship::ConnectAPI::Device.all
unless device_classes.empty?
devices = devices.select do |device|
- device_classes.include?(device.device_class)
+ device_classes.include?(device.device_class) && device.enabled?
end
end
diff --git a/match/spec/runner_spec.rb b/match/spec/runner_spec.rb
index <HASH>..<HASH> 100644
--- a/match/spec/runner_spec.rb
+++ b/match/spec/runner_spec.rb
@@ -333,5 +333,42 @@ describe Match do
end
end
end
+
+ describe "#device_count_different?" do
+ let(:profile_file) { double("profile file") }
+ let(:uuid) { "1234-1234-1234-1234" }
+ let(:parsed_profile) { { "UUID" => uuid } }
+ let(:profile) { double("profile") }
+ let(:profile_device) { double("profile_device") }
+
+ before do
+ allow(profile).to receive(:uuid).and_return(uuid)
+ allow(profile).to receive(:fetch_all_devices).and_return([profile_device])
+ end
+
+ it "device is enabled" do
+ expect(FastlaneCore::ProvisioningProfile).to receive(:parse).and_return(parsed_profile)
+ expect(Spaceship::ConnectAPI::Profile).to receive(:all).and_return([profile])
+ expect(Spaceship::ConnectAPI::Device).to receive(:all).and_return([profile_device])
+
+ expect(profile_device).to receive(:device_class).and_return(Spaceship::ConnectAPI::Device::DeviceClass::IPOD)
+ expect(profile_device).to receive(:enabled?).and_return(true)
+
+ runner = Match::Runner.new
+ expect(runner.device_count_different?(profile: profile_file, platform: :ios)).to be(false)
+ end
+
+ it "device is disabled" do
+ expect(FastlaneCore::ProvisioningProfile).to receive(:parse).and_return(parsed_profile)
+ expect(Spaceship::ConnectAPI::Profile).to receive(:all).and_return([profile])
+ expect(Spaceship::ConnectAPI::Device).to receive(:all).and_return([profile_device])
+
+ expect(profile_device).to receive(:device_class).and_return(Spaceship::ConnectAPI::Device::DeviceClass::IPOD)
+ expect(profile_device).to receive(:enabled?).and_return(false)
+
+ runner = Match::Runner.new
+ expect(runner.device_count_different?(profile: profile_file, platform: :ios)).to be(true)
+ end
+ end
end
end
diff --git a/spaceship/lib/spaceship/connect_api/models/device.rb b/spaceship/lib/spaceship/connect_api/models/device.rb
index <HASH>..<HASH> 100644
--- a/spaceship/lib/spaceship/connect_api/models/device.rb
+++ b/spaceship/lib/spaceship/connect_api/models/device.rb
@@ -40,6 +40,10 @@ module Spaceship
return "devices"
end
+ def enabled?
+ return status == Status::ENABLED
+ end
+
#
# API
#
diff --git a/spaceship/spec/connect_api/models/device_spec.rb b/spaceship/spec/connect_api/models/device_spec.rb
index <HASH>..<HASH> 100644
--- a/spaceship/spec/connect_api/models/device_spec.rb
+++ b/spaceship/spec/connect_api/models/device_spec.rb
@@ -20,6 +20,11 @@ describe Spaceship::ConnectAPI::Device do
expect(model.status).to eq("ENABLED")
expect(model.udid).to eq("184098239048390489012849018")
expect(model.added_date).to eq("2018-10-10T01:43:27.000+0000")
+
+ expect(model.enabled?).to eq(true)
+
+ model.status = "DISABLED"
+ expect(model.enabled?).to eq(false)
end
end
end
|
[match] Only include enabled devices for force_for_new_devices (#<I>)
* [match] Only include enabled devices when counting devices for force_for_new_devices
* Added enabled? to device in connect_api
* Added some tests
|
fastlane_fastlane
|
train
|
f3f59420f72918c496d51f3d89857785cfcf4631
|
diff --git a/tornado/web.py b/tornado/web.py
index <HASH>..<HASH> 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -1633,8 +1633,12 @@ class StaticFileHandler(RequestHandler):
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
- The local root directory of the content should be passed as the ``path``
- argument to the handler.
+ The handler constructor requires a ``path`` argument, which specifies the
+ local root directory of the content to be served.
+
+ Note that a capture group in the regex is required to parse the value for
+ the ``path`` argument to the get() method (different than the constructor
+ argument above); see `URLSpec` for details.
To support aggressive browser caching, if the argument ``v`` is given
with the path, we set an infinite HTTP expiration header. So, if you
|
Added note regarding regex group capture to StaticFileHandler docs.
|
tornadoweb_tornado
|
train
|
4b0edb86727fd9d158af1ad979fd68a6a88d61f2
|
diff --git a/xpdo/transport/xpdotransport.class.php b/xpdo/transport/xpdotransport.class.php
index <HASH>..<HASH> 100644
--- a/xpdo/transport/xpdotransport.class.php
+++ b/xpdo/transport/xpdotransport.class.php
@@ -153,7 +153,8 @@ class xPDOTransport {
$version = '';
$part = next($exploded);
while ($part !== false) {
- if (is_numeric(substr($part, 0, 1)) && strpos($part, '.', 1) !== false) {
+ $dotPos = strpos($part, '.');
+ if ($dotPos > 0 && is_numeric(substr($part, 0, $dotPos))) {
$version = $part;
while (($part = next($exploded)) !== false) {
$version .= '-' . $part;
|
Improve xPDOTransport::parseSignature()
Now starts version on first exploded part containing a numeric string followed by a dot, e.g.
food.beer<I>-<I>a<I>-<I>-abc would parse to:
* name = food.beer<I>-<I>a<I>
* version = <I>-abc
|
modxcms_xpdo
|
train
|
4ddd21e50815ab4a2bee2cf5b088af095445301f
|
diff --git a/lib/relations/CSSBackgroundImage.js b/lib/relations/CSSBackgroundImage.js
index <HASH>..<HASH> 100644
--- a/lib/relations/CSSBackgroundImage.js
+++ b/lib/relations/CSSBackgroundImage.js
@@ -14,7 +14,7 @@ util.inherits(CSSBackgroundImage, Base);
_.extend(CSSBackgroundImage.prototype, {
remove: function () {
var style = this.cssRule.style;
- if (this.propertyName === 'background-image' || style[this.propertyName].match(/^url\((\'|\"|)([^\'\"]*)\1\)^/)) {
+ if (this.propertyName === 'background-image' || style[this.propertyName].match(/^url\((\'|\"|)([^\'\"]*)\1\)$/)) {
style.removeProperty(this.propertyName);
} else {
// We're attached to a 'background' property with other tokens in it. Just remove the url().
|
relations.CSSBackgroundImage.remove bugfix: Fixed '^' in regexp that should have been '$'. Caused the spriting code to produce some weird CSS properties that spooked Internet Explorer.
|
assetgraph_assetgraph
|
train
|
62517fc78ef7184637fa91b31ac20c043d32826a
|
diff --git a/packages/react-ui-testing/TestPages/webpack.config.js b/packages/react-ui-testing/TestPages/webpack.config.js
index <HASH>..<HASH> 100644
--- a/packages/react-ui-testing/TestPages/webpack.config.js
+++ b/packages/react-ui-testing/TestPages/webpack.config.js
@@ -95,6 +95,7 @@ function createConfig(reactVersion, retailUIVersion) {
})
],
devServer: {
+ host: "0.0.0.0",
port: 8083,
historyApiFallback: {
rewrites: versions.map(version =>
|
build(react-ui-testing): allow to use any host for devServer
|
skbkontur_retail-ui
|
train
|
1e2a504ff041e76eebcfbf040625d5a867ea9ec6
|
diff --git a/CHANGELOG.rdoc b/CHANGELOG.rdoc
index <HASH>..<HASH> 100644
--- a/CHANGELOG.rdoc
+++ b/CHANGELOG.rdoc
@@ -1,5 +1,11 @@
= Changelog
+== master
+
+* CHANGED: Standardized whois.aero #status property,
+ it now returns symbols instead of strings.
+
+
== Release 1.3.10
* SERVER: Removed the .fed.us TLD definition.
diff --git a/lib/whois/answer/parser/whois.aero.rb b/lib/whois/answer/parser/whois.aero.rb
index <HASH>..<HASH> 100644
--- a/lib/whois/answer/parser/whois.aero.rb
+++ b/lib/whois/answer/parser/whois.aero.rb
@@ -35,34 +35,40 @@ module Whois
class WhoisAero < Base
property_supported :status do
- @status ||= if content_for_scanner =~ /Domain Status:(.*?)\n/
- $1
+ @status ||= if content_for_scanner =~ /Domain Status:(.+?)\n/
+ case $1.downcase
+ when "ok" then :registered
+ else
+ Whois.bug!(ParserError, "Unknown status `#{$1}'.")
+ end
+ else
+ :available
end
end
property_supported :available? do
- @available ||= (content_for_scanner.strip == "NOT FOUND")
+ @available ||= (content_for_scanner.strip == "NOT FOUND")
end
property_supported :registered? do
- !available?
+ @registered ||= !available?
end
property_supported :created_on do
- @created_on ||= if content_for_scanner =~ /Created On:(.*?)\n/
+ @created_on ||= if content_for_scanner =~ /Created On:(.+?)\n/
Time.parse($1)
end
end
property_supported :updated_on do
- @updated_on ||= if content_for_scanner =~ /Updated On:(.*?)\n/
+ @updated_on ||= if content_for_scanner =~ /Updated On:(.+?)\n/
Time.parse($1)
end
end
property_supported :expires_on do
- @expires_on ||= if content_for_scanner =~ /Expires On:(.*?)\n/
+ @expires_on ||= if content_for_scanner =~ /Expires On:(.+?)\n/
Time.parse($1)
end
end
diff --git a/test/whois/answer/parser/whois.aero_test.rb b/test/whois/answer/parser/whois.aero_test.rb
index <HASH>..<HASH> 100644
--- a/test/whois/answer/parser/whois.aero_test.rb
+++ b/test/whois/answer/parser/whois.aero_test.rb
@@ -11,24 +11,38 @@ class AnswerParserWhoisAeroTest < Whois::Answer::Parser::TestCase
def test_status
parser = @klass.new(load_part('/registered.txt'))
- expected = "OK"
+ expected = :registered
assert_equal expected, parser.status
assert_equal expected, parser.instance_eval { @status }
parser = @klass.new(load_part('/available.txt'))
- expected = nil
+ expected = :available
assert_equal expected, parser.status
assert_equal expected, parser.instance_eval { @status }
end
def test_available?
- assert !@klass.new(load_part('/registered.txt')).available?
- assert @klass.new(load_part('/available.txt')).available?
+ parser = @klass.new(load_part('/registered.txt'))
+ expected = false
+ assert_equal expected, parser.available?
+ assert_equal expected, parser.instance_eval { @available }
+
+ parser = @klass.new(load_part('/available.txt'))
+ expected = true
+ assert_equal expected, parser.available?
+ assert_equal expected, parser.instance_eval { @available }
end
def test_registered?
- assert @klass.new(load_part('/registered.txt')).registered?
- assert !@klass.new(load_part('/available.txt')).registered?
+ parser = @klass.new(load_part('/registered.txt'))
+ expected = true
+ assert_equal expected, parser.registered?
+ assert_equal expected, parser.instance_eval { @registered }
+
+ parser = @klass.new(load_part('/available.txt'))
+ expected = false
+ assert_equal expected, parser.registered?
+ assert_equal expected, parser.instance_eval { @registered }
end
|
Standardized whois.aero #status property,
it now returns symbols instead of strings.
|
weppos_whois
|
train
|
08a167b54e6663f449ff69b8657f16da16afdc75
|
diff --git a/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayer.java b/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayer.java
index <HASH>..<HASH> 100644
--- a/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayer.java
+++ b/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayer.java
@@ -204,7 +204,7 @@ public class LotteLayer {
keys.add(0f);
keyTimes.add(1f);
} else {
- keys.add(0f);
+ keys.add(1f);
keyTimes.add(1f);
}
diff --git a/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayerView.java b/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayerView.java
index <HASH>..<HASH> 100644
--- a/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayerView.java
+++ b/lotte/src/main/java/com/airbnb/lotte/layers/LotteLayerView.java
@@ -10,6 +10,7 @@ import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Shader;
import android.graphics.drawable.Drawable;
+import android.support.annotation.FloatRange;
import android.support.annotation.NonNull;
import android.util.SparseArray;
@@ -73,7 +74,6 @@ public class LotteLayerView extends LotteAnimatableLayer {
}
private void setupForModel(Drawable.Callback callback) {
- setBounds(composition.getBounds());
anchorPoint = new Observable<>();
anchorPoint.setValue(new PointF());
@@ -245,6 +245,30 @@ public class LotteLayerView extends LotteAnimatableLayer {
canvas.translate(-translation.x, -translation.y);
}
+ @Override
+ public void play() {
+ super.play();
+ if (matte != null) {
+ matte.play();
+ }
+ }
+
+ @Override
+ public void setProgress(@FloatRange(from = 0f, to = 1f) float progress) {
+ super.setProgress(progress);
+ if (matte != null) {
+ matte.setProgress(progress);
+ }
+ }
+
+ @Override
+ public void setMaxDuration(long maxDuration) {
+ super.setMaxDuration(maxDuration);
+ if (matte != null) {
+ matte.setMaxDuration(maxDuration);
+ }
+ }
+
public long getId() {
return layerModel.getId();
}
|
Wired up some missing calls to matte layers
|
airbnb_lottie-android
|
train
|
b09869b7c65923b9c37b9e8bbd62e4c9db5bb255
|
diff --git a/src/Psalm/Internal/CallMap.php b/src/Psalm/Internal/CallMap.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Internal/CallMap.php
+++ b/src/Psalm/Internal/CallMap.php
@@ -1191,7 +1191,7 @@ return [
'connection_aborted' => ['int'],
'connection_status' => ['int'],
'connection_timeout' => ['int'],
-'constant' => ['mixed', 'const_name'=>'string'],
+'constant' => ['scalar|array<scalar>|null', 'const_name'=>'string'],
'convert_cyr_string' => ['string', 'str'=>'string', 'from'=>'string', 'to'=>'string'],
'convert_uudecode' => ['string', 'data'=>'string'],
'convert_uuencode' => ['string', 'data'=>'string'],
@@ -11224,7 +11224,7 @@ return [
'ReflectionClassConstant::getDocComment' => ['string|false'],
'ReflectionClassConstant::getModifiers' => ['int'],
'ReflectionClassConstant::getName' => ['string'],
-'ReflectionClassConstant::getValue' => ['mixed'],
+'ReflectionClassConstant::getValue' => ['scalar|array<scalar>|null'],
'ReflectionClassConstant::isPrivate' => ['bool'],
'ReflectionClassConstant::isProtected' => ['bool'],
'ReflectionClassConstant::isPublic' => ['bool'],
@@ -11913,7 +11913,7 @@ return [
'SessionUpdateTimestampHandler::validateId' => ['char', 'id'=>'string'],
'SessionUpdateTimestampHandlerInterface::updateTimestamp' => ['bool', 'key'=>'string', 'val'=>'string'],
'SessionUpdateTimestampHandlerInterface::validateId' => ['bool', 'key'=>'string'],
-'set_error_handler' => ['?callable', 'error_handler'=>'null|callable(int,string,string=,int=,array=):bool', 'error_types='=>'int'],
+'set_error_handler' => ['null|callable(int,string,string=,int=,array=):bool', 'error_handler'=>'null|callable(int,string,string=,int=,array=):bool', 'error_types='=>'int'],
'set_exception_handler' => ['null|callable(Throwable):void', 'exception_handler'=>'null|callable(Throwable):void'],
'set_file_buffer' => ['int', 'fp'=>'resource', 'buffer'=>'int'],
'set_include_path' => ['string|false', 'new_include_path'=>'string'],
|
callmap changes (#<I>)
* set_error_handler return the same type as its param
* a constant can only have scalars, array of scalars or null
* a constant can only have scalars, array of scalars or null
|
vimeo_psalm
|
train
|
9d915d61a69b3d2b6d11052c1376ec882779e7a6
|
diff --git a/src/Ractive/Ractive.js b/src/Ractive/Ractive.js
index <HASH>..<HASH> 100644
--- a/src/Ractive/Ractive.js
+++ b/src/Ractive/Ractive.js
@@ -154,7 +154,7 @@ Ractive = function ( options ) {
// Unpack string-based partials, if necessary
for ( key in this.partials ) {
- if ( this.partials.hasOwnProperty( key ) && this.partials[ key ].length === 1 && typeof this.partials[ key ] === 'string' ) {
+ if ( this.partials.hasOwnProperty( key ) && this.partials[ key ].length === 1 && typeof this.partials[ key ][0] === 'string' ) {
this.partials[ key ] = this.partials[ key ][0];
}
}
|
fix bug with unpacking partials
|
ractivejs_ractive
|
train
|
4c9c2149da30dcfc4424104d87c5a7ce4166e872
|
diff --git a/imdb.class.php b/imdb.class.php
index <HASH>..<HASH> 100644
--- a/imdb.class.php
+++ b/imdb.class.php
@@ -51,7 +51,7 @@ class IMDB {
const IMDB_NAME = '~href="/name/nm(\d+)/">(.*)</a>~Ui';
const IMDB_PLOT = '~<h2>Storyline</h2><p>(.*)(<em class="nobr">|</p>)~Ui';
const IMDB_POSTER = '~href="/media/(.*)"\s+><img src="(.*)"~Ui';
- const IMDB_RATING = '~<span class="rating-rating"><span class="value".*?>(\d+\.\d+)<span>~Ui';
+ const IMDB_RATING = '~<span class="rating-rating"><span class="value".*?>(\d+\.\d+)</span>~Ui';
const IMDB_REDIRECT = '~Location:\s(.*)~';
const IMDB_RELEASE_DATE = '~Release Date:</h4>(.*)(<span|</div>)~Ui';
const IMDB_RUNTIME = '~(\d+)\smin~Uis';
|
rating repaired (forgot to add slash)
|
FabianBeiner_PHP-IMDB-Grabber
|
train
|
a3dc40f5fa712ce5aa9e2f9d4d65bfc47952b0f1
|
diff --git a/landsat/landsat.py b/landsat/landsat.py
index <HASH>..<HASH> 100755
--- a/landsat/landsat.py
+++ b/landsat/landsat.py
@@ -262,6 +262,10 @@ def main(args):
stored = process_image(path, args.bands, False, args.pansharpen)
+ if args.upload:
+ u = Uploader(args.key, args.secret, args.region)
+ u.run(args.bucket, get_file(stored), stored)
+
exit("The output is stored at %s" % stored)
else:
exit('Download Completed', 0)
|
upload sequence was missing from download sub command
|
developmentseed_landsat-util
|
train
|
cbfa38680cc7d940bda8c38d762d1cc15d510115
|
diff --git a/t/napbase.py b/t/napbase.py
index <HASH>..<HASH> 100755
--- a/t/napbase.py
+++ b/t/napbase.py
@@ -82,7 +82,7 @@ class NapTest(unittest.TestCase):
attrs = {
'name': 'test-pool-wrong',
'schema': schema[0]['id'],
- 'default_type': 'assignment',
+ 'default_type': 'reservation',
'description': 'A simple test pool with incorrect name!'
}
pool_id = self.nap.add_pool(attrs)
@@ -101,6 +101,7 @@ class NapTest(unittest.TestCase):
spec = { 'name': 'test-pool-wrong' }
attrs = {
'name': 'test-pool',
+ 'default_type': 'assignment',
'description': 'A simple test pool with correct name!'
}
self.nap.edit_pool(spec, attrs)
@@ -112,6 +113,16 @@ class NapTest(unittest.TestCase):
self.assertEqual(pool[0]['description'], attrs['description'], 'Modified description differ from listed description')
+ def test_pool_remove(self):
+ """ Remove a pool
+ """
+ spec = { 'name': 'test-pool' }
+ self.nap.remove_pool(spec)
+ # check that search for old record doesn't return anything
+ pool = self.nap.list_pool(spec)
+ self.assertEqual(pool, [], 'Old entry still exists')
+
+
def test_pool(self):
|
Add unittest for remove_pool
.. and slightly modify modify_pool to also change default_type so we
know that is working!
|
SpriteLink_NIPAP
|
train
|
e4032674d7d2f77d15390cb090fa36dfc7f209ab
|
diff --git a/lib/fastly_api.php b/lib/fastly_api.php
index <HASH>..<HASH> 100644
--- a/lib/fastly_api.php
+++ b/lib/fastly_api.php
@@ -14,7 +14,7 @@ class FastlyAPI {
private $cookie_file = "./fastly.cookie";
public function __construct () {
- $this->_ch = curl_init();
+ $this->_curl_init();
}
public function __destruct () {
@@ -232,6 +232,7 @@ class FastlyAPI {
}
# =================================================================
+ # http://www.fastly.com/docs/api#Auth
/*
* POST /login
* -requires having user/pass
@@ -284,6 +285,8 @@ class FastlyAPI {
return true;
}
+ # =================================================================
+ # http://www.fastly.com/docs/api#Users
/*
* GET /current_user
* -Get the logged in user
@@ -331,27 +334,64 @@ class FastlyAPI {
/*
* GET /user
* -List users
- * -if role < superuser, will fail
- * -if role = superuser, will get users in YOUR customer
- * -if role = admin, will get ALL users in ALL customers (have fun)
- *
+ * -if role < admin, fail
+ * -if role = admin, gets ALL users in ALL customers (have fun)
+ */
+ public function API_users () {
+ $this->_lastmsg = null;
+
+ # list user mode
+ $ret = $this->_get( '/user' );
+
+ if( $ret === false ) {
+ $this->_lastmsg = 'hard_false';
+ return false;
+ }
+
+ if( !empty($ret->msg) ) {
+ $this->_lastmsg = $ret->msg;
+ }
+
+ if( $this->lasthttp != 200 ) {
+ return false;
+ }
+
+
+ return $ret;
+ }
+
+ /*
* GET /user/<id>
* -Get a specific user
- *
- * NOTE:
- * getting ANY user requires [role=ADMIN]
- * getting a user in your CUSTOMER requires [role=superuser]
- * can always get YOUR id (mimics doing /current_user ?)
+ * -if role = user, can get own id
+ * (same return as /current_user, but does not update internal user cache)
+ * -if role = superuser, can get any user in your CUSTOMER
+ * -if role = admin, can get any user
*/
- public function API_user ( $id=null ) {
+ public function API_user ( $user_id=null ) {
+ $this->_lastmsg = null;
- if( empty($id) ) {
- return $this->_get( '/user' );
- } else {
- return $this->_get( '/user/' . $id );
+ # single user mode
+ $ret = $this->_get( '/user/' . $user_id );
+
+ if( $ret === false ) {
+ $this->_lastmsg = 'hard_false';
+ return false;
}
+
+ if( !empty($ret->msg) ) {
+ $this->_lastmsg = $ret->msg;
+ }
+
+ if( $this->lasthttp != 200 ) {
+ return false;
+ }
+
+ return $ret;
}
+ # =================================================================
+ # http://www.fastly.com/docs/api#Customers
/*
* GET /current_customer
* -Get the logged in customer info
|
split the 2 modes of /user into functions, fleshed them out to do real logic
|
fastly_fastly-php
|
train
|
d56c66a5a82e07ad0c9ef513887c70e5209bfc58
|
diff --git a/bin/testUpdated.js b/bin/testUpdated.js
index <HASH>..<HASH> 100644
--- a/bin/testUpdated.js
+++ b/bin/testUpdated.js
@@ -20,13 +20,12 @@
const {execSync} = require('child_process');
let output;
+console.info('Checking for updated packages');
try {
output = execSync(`npx lerna updated`);
-} catch (error) {}
-
-if (output === undefined) {
+} catch (error) {
console.info(`No project updates - skipping tests`);
- return 0;
+ process.exit(0);
}
const updatedProjects = output
@@ -34,6 +33,9 @@ const updatedProjects = output
.replace(/- /g, '')
.match(/[^\r\n]+/g);
+console.info('Building all packages');
+execSync(`yarn clear && yarn dist`, {stdio: [0, 1]});
+
updatedProjects.forEach(project => {
console.info(`Running tests for project "${project}"`);
execSync(`npx lerna run --scope ${project} test`, {stdio: [0, 1]});
|
fix: Build all packages before testing selective packages (#<I>)
|
wireapp_wire-web-packages
|
train
|
d60505b03452aaa98cd83c5f1087fab3c050cabf
|
diff --git a/vasppy/__init__.py b/vasppy/__init__.py
index <HASH>..<HASH> 100644
--- a/vasppy/__init__.py
+++ b/vasppy/__init__.py
@@ -1 +1 @@
-__version__ = '0.4.0.11'
+__version__ = '0.4.1.0'
|
Bumped version number to <I>
|
bjmorgan_vasppy
|
train
|
5b6d66950f20d5efb2c32f5f24b388766226f1d7
|
diff --git a/cme/protocols/ldap.py b/cme/protocols/ldap.py
index <HASH>..<HASH> 100644
--- a/cme/protocols/ldap.py
+++ b/cme/protocols/ldap.py
@@ -54,7 +54,7 @@ class ldap(connection):
egroup.add_argument("--kerberoasting", help='Get TGS ticket ready to crack with hashcat')
vgroup = ldap_parser.add_argument_group("Retrieve useful information on the domain", "Options to to play with Kerberos")
- vgroup.add_argument("--trusted-for-auth", action="store_true", help="Get the list of users and computers with flag TRUSTED_FOR_DELEGATION")
+ vgroup.add_argument("--trusted-for-delegation", action="store_true", help="Get the list of users and computers with flag TRUSTED_FOR_DELEGATION")
vgroup.add_argument("--admin-count", action="store_true", help="Get objets that had the value adminCount=1")
return parser
@@ -416,7 +416,7 @@ class ldap(connection):
else:
self.logger.error("No entries found!")
- def trusted_for_auth(self):
+ def trusted_for_delegation(self):
# Building the search filter
searchFilter = "(userAccountControl:1.2.840.113556.1.4.803:=524288)"
try:
diff --git a/cme/protocols/ssh.py b/cme/protocols/ssh.py
index <HASH>..<HASH> 100644
--- a/cme/protocols/ssh.py
+++ b/cme/protocols/ssh.py
@@ -67,8 +67,8 @@ class ssh(connection):
self.conn.connect(self.host, port=self.args.port, username=username, password=password, look_for_keys=False, allow_agent=False)
self.check_if_admin()
- self.logger.success(u'{}:{} {}'.format(username.decode('utf-8'),
- password.decode('utf-8'),
+ self.logger.success(u'{}:{} {}'.format(username,
+ password,
highlight('({})'.format(self.config.get('CME', 'pwn3d_label')) if self.admin_privs else '')))
return True
|
Fix ssh authentication error and update option for unconstrainte delegation to --trusted-for-delegation
|
byt3bl33d3r_CrackMapExec
|
train
|
d416bc2e8289ec42a1fae8bfb2aba44fc365afb4
|
diff --git a/lib/iob/total.js b/lib/iob/total.js
index <HASH>..<HASH> 100644
--- a/lib/iob/total.js
+++ b/lib/iob/total.js
@@ -19,7 +19,8 @@ function iobTotal(opts, time) {
if (tIOB && tIOB.activityContrib) activity += tIOB.activityContrib;
// keep track of bolus IOB separately for snoozes, but decay it three times as fast
if (treatment.insulin >= 0.2 && treatment.started_at) {
- var bIOB = iobCalc(treatment, time, dia*2);
+ //use half the dia for double speed bolus snooze
+ var bIOB = iobCalc(treatment, time, dia / 2);
//console.log(treatment);
//console.log(bIOB);
if (bIOB && bIOB.iobContrib) bolusiob += bIOB.iobContrib;
|
fix bolus iob since it was compensating for the dia scaling bug
|
openaps_oref0
|
train
|
24b3addf1b781859f3b2dd4947c1b08a10e870a8
|
diff --git a/ruby-tools/src/main/ruby/maven/tools/gem_project.rb b/ruby-tools/src/main/ruby/maven/tools/gem_project.rb
index <HASH>..<HASH> 100644
--- a/ruby-tools/src/main/ruby/maven/tools/gem_project.rb
+++ b/ruby-tools/src/main/ruby/maven/tools/gem_project.rb
@@ -1,4 +1,4 @@
-require File.join(File.dirname(__FILE__), '..', 'model', 'model.rb')
+require File.join(File.dirname(File.dirname(__FILE__)), 'model', 'model.rb')
require File.join(File.dirname(__FILE__), 'gemfile_lock.rb')
require File.join(File.dirname(__FILE__), 'versions.rb')
|
make require work with jar-URIs
|
torquebox_jruby-maven-plugins
|
train
|
f7b48ed48888cc4171da078b3970f2eecea1300d
|
diff --git a/src/test/java/one/util/streamex/MoreCollectorsTest.java b/src/test/java/one/util/streamex/MoreCollectorsTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/one/util/streamex/MoreCollectorsTest.java
+++ b/src/test/java/one/util/streamex/MoreCollectorsTest.java
@@ -154,11 +154,34 @@ public class MoreCollectorsTest {
checkCollector("entry", new SimpleEntry<>(expectedMin.get(0), (long) expectedMin.size()), ints::stream,
MoreCollectors.minAll(downstream));
});
+
+ class MyNumber implements Comparable<MyNumber> {
+ int value;
+
+ MyNumber(int value) {
+ this.value = value;
+ }
+
+ @Override
+ public int hashCode() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj instanceof MyNumber && ((MyNumber)obj).value == value;
+ }
+
+ @Override
+ public int compareTo(MyNumber o) {
+ return Integer.compare(value, o.value);
+ }
+ }
- Integer a = new Integer(1), b = new Integer(1), c = new Integer(1000), d = new Integer(1000);
- List<Integer> ints = IntStreamEx.range(10, 100).boxed().append(a, c).prepend(b, d).toList();
- streamEx(ints::stream, supplier -> {
- List<Integer> list = supplier.get().collect(MoreCollectors.maxAll());
+ MyNumber a = new MyNumber(1), b = new MyNumber(1), c = new MyNumber(1000), d = new MyNumber(1000);
+ List<MyNumber> nums = IntStreamEx.range(10, 100).mapToObj(MyNumber::new).append(a, c).prepend(b, d).toList();
+ streamEx(nums::stream, supplier -> {
+ List<MyNumber> list = supplier.get().collect(MoreCollectors.maxAll());
assertEquals(2, list.size());
assertSame(d, list.get(0));
assertSame(c, list.get(1));
|
MoreCollectorsTest: custom integer used (as new Integer() is deprecated
in Java-9)
|
amaembo_streamex
|
train
|
d0a838aafe25751a39bf162893584cecea00d7b0
|
diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md
index <HASH>..<HASH> 100644
--- a/RELEASE-NOTES.md
+++ b/RELEASE-NOTES.md
@@ -24,6 +24,7 @@ Picocli follows [semantic versioning](http://semver.org/).
## <a name="4.0.0-alpha-2-fixes"></a> Fixed issues
+- [#656] Bugfix for issue where synopsis for composite groups did not expand for n..* (n > 1). Thanks to Arno Tuomainen for finding this issue.
- [#635] Bugfix in validation: did not show an error if some but not all parts of a co-occurring group were specified. Thanks to [Philipp Hanslovsky](https://github.com/hanslovsky) for the pull request.
- [#653] Bugfix: validation should be skipped if help was requested. Thanks to [Andreas Deininger](https://github.com/deining) for raising this.
- [#652] Documentation: fixes in user manual. Thanks to [Andreas Deininger](https://github.com/deining) for the pull request.
diff --git a/src/main/java/picocli/CommandLine.java b/src/main/java/picocli/CommandLine.java
index <HASH>..<HASH> 100644
--- a/src/main/java/picocli/CommandLine.java
+++ b/src/main/java/picocli/CommandLine.java
@@ -6611,13 +6611,13 @@ public class CommandLine {
String prefix = multiplicity().min > 0 ? "(" : "[";
String postfix = multiplicity().min > 0 ? ")" : "]";
Text result = colorScheme.ansi().text(prefix).concat(synopsis).concat(postfix);
+ int i = 1;
+ for (; i < multiplicity.min; i++) {
+ result = result.concat(" (").concat(synopsis).concat(")");
+ }
if (multiplicity().isVariable) {
result = result.concat("...");
} else {
- int i = 1;
- for (; i < multiplicity.min; i++) {
- result = result.concat(" (").concat(synopsis).concat(")");
- }
for (; i < multiplicity.max; i++) {
result = result.concat(" [").concat(synopsis).concat("]");
}
diff --git a/src/test/java/picocli/ArgGroupTest.java b/src/test/java/picocli/ArgGroupTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/picocli/ArgGroupTest.java
+++ b/src/test/java/picocli/ArgGroupTest.java
@@ -1886,6 +1886,45 @@ public class ArgGroupTest {
assertEquals(2, c2.dependent.c);
}
+ static class CompositeGroupSynopsisDemo {
+
+ @ArgGroup(exclusive = false, multiplicity = "2..*")
+ List<Composite> composites;
+
+ static class Composite {
+ @ArgGroup(exclusive = false, multiplicity = "1")
+ Dependent dependent;
+
+ @ArgGroup(exclusive = true, multiplicity = "1")
+ Exclusive exclusive;
+ }
+
+ static class Dependent {
+ @Option(names = "-a", required = true)
+ int a;
+ @Option(names = "-b", required = true)
+ int b;
+ @Option(names = "-c", required = true)
+ int c;
+ }
+
+ static class Exclusive {
+ @Option(names = "-x", required = true)
+ boolean x;
+ @Option(names = "-y", required = true)
+ boolean y;
+ @Option(names = "-z", required = true)
+ boolean z;
+ }
+ }
+ @Test
+ public void testCompositeSynopsisDemo() {
+ CompositeGroupSynopsisDemo example = new CompositeGroupSynopsisDemo();
+ CommandLine cmd = new CommandLine(example);
+ String synopsis = cmd.getCommandSpec().argGroups().get(0).synopsis();
+ assertEquals("((-a=<a> -b=<b> -c=<c>) (-x | -y | -z)) ((-a=<a> -b=<b> -c=<c>) (-x | -y | -z))...", synopsis);
+ }
+
// https://github.com/remkop/picocli/issues/635
@Command(name = "test-composite")
static class TestComposite {
|
[#<I>] Bugfix for issue where synopsis for composite groups did not expand for n..* (n > 1)
Closes #<I>
|
remkop_picocli
|
train
|
f7002b5150bf068de0daae450e78eee1a183f63a
|
diff --git a/src/main/java/org/junit/rules/ExpectedException.java b/src/main/java/org/junit/rules/ExpectedException.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/junit/rules/ExpectedException.java
+++ b/src/main/java/org/junit/rules/ExpectedException.java
@@ -106,18 +106,19 @@ import org.junit.runners.model.Statement;
* exception. E.g. "Test doesn't throw %s." will fail with the error message
* "Test doesn't throw an instance of foo.".
*
- * @deprecated Since 4.13
- * {@link org.junit.Assert#assertThrows(Class, org.junit.function.ThrowingRunnable)
- * Assert.assertThrows} can be used to verify that your code throws a specific
- * exception.
* @since 4.7
*/
-@Deprecated
public class ExpectedException implements TestRule {
/**
* Returns a {@linkplain TestRule rule} that expects no exception to
* be thrown (identical to behavior without this rule).
+ *
+ * @deprecated Since 4.13
+ * {@link org.junit.Assert#assertThrows(Class, org.junit.function.ThrowingRunnable)
+ * Assert.assertThrows} can be used to verify that your code throws a specific
+ * exception.
*/
+ @Deprecated
public static ExpectedException none() {
return new ExpectedException();
}
|
Only deprecate ExpectedException.none()
Instead of deprecating the whole class, now only the factory method is
deprecated and points to `assertThrows()`. This will make it easier to
temporarily suppress the exception for codebases that use it
extensively by adding an intermediate factory method of their own.
Resolves #<I>.
|
junit-team_junit4
|
train
|
cc36428260f979d78e69d86dac0e6a4ba6b17780
|
diff --git a/superset/__init__.py b/superset/__init__.py
index <HASH>..<HASH> 100644
--- a/superset/__init__.py
+++ b/superset/__init__.py
@@ -69,7 +69,7 @@ db = SQLA(app)
if conf.get('WTF_CSRF_ENABLED'):
csrf = CSRFProtect(app)
-utils.pessimistic_connection_handling(db.engine.pool)
+utils.pessimistic_connection_handling(db.engine)
cache = utils.setup_cache(app, conf.get('CACHE_CONFIG'))
tables_cache = utils.setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
diff --git a/superset/utils.py b/superset/utils.py
index <HASH>..<HASH> 100644
--- a/superset/utils.py
+++ b/superset/utils.py
@@ -40,7 +40,7 @@ from flask_babel import gettext as __
import markdown as md
from past.builtins import basestring
from pydruid.utils.having import Having
-from sqlalchemy import event, exc
+from sqlalchemy import event, exc, select
from sqlalchemy.types import TypeDecorator, TEXT
logging.getLogger('MARKDOWN').setLevel(logging.INFO)
@@ -436,19 +436,42 @@ class timeout(object):
logging.warning("timeout can't be used in the current context")
logging.exception(e)
-def pessimistic_connection_handling(target):
- @event.listens_for(target, "checkout")
- def ping_connection(dbapi_connection, connection_record, connection_proxy):
- """
- Disconnect Handling - Pessimistic, taken from:
- http://docs.sqlalchemy.org/en/rel_0_9/core/pooling.html
- """
- cursor = dbapi_connection.cursor()
+
+def pessimistic_connection_handling(some_engine):
+ @event.listens_for(some_engine, "engine_connect")
+ def ping_connection(connection, branch):
+ if branch:
+ # "branch" refers to a sub-connection of a connection,
+ # we don't want to bother pinging on these.
+ return
+
+ # turn off "close with result". This flag is only used with
+ # "connectionless" execution, otherwise will be False in any case
+ save_should_close_with_result = connection.should_close_with_result
+ connection.should_close_with_result = False
+
try:
- cursor.execute("SELECT 1")
- except:
- raise exc.DisconnectionError()
- cursor.close()
+ # run a SELECT 1. use a core select() so that
+ # the SELECT of a scalar value without a table is
+ # appropriately formatted for the backend
+ connection.scalar(select([1]))
+ except exc.DBAPIError as err:
+ # catch SQLAlchemy's DBAPIError, which is a wrapper
+ # for the DBAPI's exception. It includes a .connection_invalidated
+ # attribute which specifies if this connection is a "disconnect"
+ # condition, which is based on inspection of the original exception
+ # by the dialect in use.
+ if err.connection_invalidated:
+ # run the same SELECT again - the connection will re-validate
+ # itself and establish a new connection. The disconnect detection
+ # here also causes the whole connection pool to be invalidated
+ # so that all stale connections are discarded.
+ connection.scalar(select([1]))
+ else:
+ raise
+ finally:
+ # restore "close with result"
+ connection.should_close_with_result = save_should_close_with_result
class QueryStatus(object):
|
Modernize SQLA pessimistic handling (#<I>)
Looks like SQLAlchemy has redefined the best practice around
pessimistic connection handling.
|
apache_incubator-superset
|
train
|
465cf5be889fcfde0b8c77aaa3414cb43dbcff75
|
diff --git a/src/Sulu/Bundle/WebsocketBundle/Controller/FallbackController.php b/src/Sulu/Bundle/WebsocketBundle/Controller/FallbackController.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/WebsocketBundle/Controller/FallbackController.php
+++ b/src/Sulu/Bundle/WebsocketBundle/Controller/FallbackController.php
@@ -49,6 +49,12 @@ class FallbackController
$app->onMessage($connection, $message);
+ // clean output buffer if there is data in it
+ // happens if a twig error occurs
+ if (ob_get_length() > 0) {
+ ob_clean();
+ }
+
return new Response($connection->getData(), 200, array('Content-Type' => 'application/json'));
}
}
|
added ob_clean if outputbuffer is not empty
|
sulu_sulu
|
train
|
808b6dd3690f344dffb34d45305073583ce182c9
|
diff --git a/facebook.py b/facebook.py
index <HASH>..<HASH> 100755
--- a/facebook.py
+++ b/facebook.py
@@ -52,6 +52,11 @@ except ImportError:
import json
_parse_json = json.loads
+# Find a query string parser
+try:
+ from urlparse import parse_qs
+except ImportError:
+ from cgi import parse_qs
class GraphAPI(object):
"""A client for the Facebook Graph API.
@@ -348,15 +353,29 @@ def get_user_from_cookie(cookies, app_id, app_secret):
http://github.com/facebook/connect-js/. Read more about Facebook
authentication at http://developers.facebook.com/docs/authentication/.
"""
- cookie = cookies.get("fbs_" + app_id, "")
+ cookie = cookies.get("fbsr_" + app_id, "")
if not cookie: return None
- args = dict((k, v[-1]) for k, v in cgi.parse_qs(cookie.strip('"')).items())
- payload = "".join(k + "=" + args[k] for k in sorted(args.keys())
- if k != "sig")
- sig = hashlib.md5(payload + app_secret).hexdigest()
- expires = int(args["expires"])
- if sig == args.get("sig") and (expires == 0 or time.time() < expires):
- return args
+ parsed_request = parse_signed_request(cookie, app_secret)
+ args = {
+ "client_id": app_id,
+ "client_secret": app_secret,
+ "code": parsed_request["code"],
+ "redirect_uri":""
+ }
+ # We would use GraphAPI.request() here, except for that the fact that the
+ # response is a key-value pair, and not JSON.
+ response = urllib.urlopen("https://graph.facebook.com/oauth/access_token" +
+ "?" + urllib.urlencode(args))
+ query_str = parse_qs(response.read())
+ if "access_token" in query_str:
+ result = {
+ "uid":parsed_request["user_id"],
+ "issued_at":parsed_request["issued_at"],
+ "access_token":query_str["access_token"][0],
+ }
+ if "expires" in query_str:
+ result["expires"] = query_str["expires"][0]
+ return result
else:
return None
|
OAuth <I> support.
Support fbsr cookies and retrieving OAuth <I> based access tokens. See
<URL>
|
mobolic_facebook-sdk
|
train
|
b477cd8aaf510db1ef9d8e0deec3ba23a457e021
|
diff --git a/SftpAdapter.php b/SftpAdapter.php
index <HASH>..<HASH> 100644
--- a/SftpAdapter.php
+++ b/SftpAdapter.php
@@ -190,7 +190,7 @@ class SftpAdapter implements FilesystemAdapter
$this->makeDirectory($path, $config->get(Config::OPTION_VISIBILITY));
}
- public function setVisibility(string $path, $visibility): void
+ public function setVisibility(string $path, string $visibility): void
{
$location = $this->prefixer->prefixPath($path);
$connection = $this->connectionProvider->provideConnection();
|
Tighten visibility typehint, always string.
|
thephpleague_flysystem-sftp
|
train
|
73d9e654f7bb19a87c9168a5df7c1028f549cabf
|
diff --git a/TODO.md b/TODO.md
index <HASH>..<HASH> 100644
--- a/TODO.md
+++ b/TODO.md
@@ -2,10 +2,6 @@
## Immediate
-- Have Rows force everything to scalars, or at least not objects, because
- the Row represents the data as it is at the database. It is the Record that
- might be allowed to do trivial modifications for the domain.
-
## Next Release Priority
- Documentation.
@@ -14,6 +10,10 @@
## Near-Term
+- (???) Have Rows force everything to scalars, or at least not objects, because
+ the Row represents the data as it is at the database. It is the Record that
+ might be allowed to do trivial modifications for the domain.
+
- Support for relation-specific joins. E.g.:
$select = $atlas->select(Mapper::CLASS)
diff --git a/src/Table/Gateway.php b/src/Table/Gateway.php
index <HASH>..<HASH> 100644
--- a/src/Table/Gateway.php
+++ b/src/Table/Gateway.php
@@ -275,7 +275,7 @@ class Gateway implements GatewayInterface
public function newSelectedRow(array $cols)
{
$row = $this->newRow($cols);
- $row->setStatus($row::IS_CLEAN);
+ $row->setStatus($row::IS_SELECTED);
$this->identityMap->setRow($row, $cols);
return $row;
}
diff --git a/src/Table/Row.php b/src/Table/Row.php
index <HASH>..<HASH> 100644
--- a/src/Table/Row.php
+++ b/src/Table/Row.php
@@ -9,10 +9,10 @@ class Row implements RowInterface
const IS_NEW = 'IS_NEW';
// selected, and not yet modified in memory
- const IS_CLEAN = 'IS_CLEAN';
+ const IS_SELECTED = 'IS_SELECTED';
// selected/inserted/updated, then modified in memory
- const IS_DIRTY = 'IS_DIRTY';
+ const IS_MODIFIED = 'IS_MODIFIED';
// marked for deletion but not deleted, modification in memory allowed
const IS_TRASH = 'IS_TRASH';
@@ -128,7 +128,7 @@ class Row implements RowInterface
throw Exception::immutableOnceDeleted($this, $col);
}
- if ($this->status == static::IS_NEW || $this->status == static::IS_TRASH) {
+ if ($this->status == static::IS_NEW) {
$this->cols[$col] = $new;
return;
}
@@ -136,7 +136,7 @@ class Row implements RowInterface
$old = $this->cols[$col];
$this->cols[$col] = $new;
if (! $this->isSameValue($old, $new)) {
- $this->setStatus(static::IS_DIRTY);
+ $this->setStatus(static::IS_MODIFIED);
}
}
diff --git a/tests/Table/RowTest.php b/tests/Table/RowTest.php
index <HASH>..<HASH> 100644
--- a/tests/Table/RowTest.php
+++ b/tests/Table/RowTest.php
@@ -69,4 +69,36 @@ class RowTest extends \PHPUnit_Framework_TestCase
$this->setExpectedException('Atlas\Orm\Exception');
unset($row->id);
}
+
+ public function testStatus()
+ {
+ $row = new Row(new Primary(['id' => '1']), ['foo' => 'bar']);
+ $this->assertSame($row::IS_NEW, $row->getStatus());
+
+ $row->setStatus($row::IS_SELECTED);
+ $this->assertSame($row::IS_SELECTED, $row->getStatus());
+
+ $this->assertTrue($row->hasStatus([
+ $row::IS_SELECTED,
+ $row::IS_MODIFIED,
+ ]));
+
+ $this->setExpectedException(
+ 'UnexpectedValueException',
+ "Expected valid row status, got 'No Such Status' instead."
+ );
+ $row->setStatus('No Such Status');
+ }
+
+ public function testCannotModifyAfterDelete()
+ {
+ $row = new Row(new Primary(['id' => '1']), ['foo' => 'bar']);
+ $row->setStatus($row::IS_DELETED);
+
+ $this->setExpectedException(
+ 'Atlas\Orm\Exception',
+ 'Row::$foo is immutable once deleted.'
+ );
+ $row->foo = 'zim';
+ }
}
|
change Row statuses, and test them
|
atlasphp_Atlas.Orm
|
train
|
babc2011ad74400f5f34375add9bd752bbc81a20
|
diff --git a/src/main/java/com/blade/jdbc/core/ActiveRecord.java b/src/main/java/com/blade/jdbc/core/ActiveRecord.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/blade/jdbc/core/ActiveRecord.java
+++ b/src/main/java/com/blade/jdbc/core/ActiveRecord.java
@@ -185,7 +185,7 @@ public class ActiveRecord implements Serializable {
}
String countSql = "select count(*) from (" + sql + ") tmp";
- long count = this.count(countSql);
+ long count = this.count(countSql, params);
List<T> list = this.queryAll(sql, params);
|
🐒 fix count and page sql
|
lets-blade_blade-jdbc
|
train
|
3d8cab95e910250411104a118dffdf84267a7a22
|
diff --git a/src/generic-provider-views/index.js b/src/generic-provider-views/index.js
index <HASH>..<HASH> 100644
--- a/src/generic-provider-views/index.js
+++ b/src/generic-provider-views/index.js
@@ -74,7 +74,7 @@ module.exports = class View {
* @param {String} id Folder id
* @return {Promise} Folders/files in folder
*/
- getFolder (id) {
+ getFolder (id, name) {
return this.Provider.list(id)
.then((res) => {
let folders = []
@@ -87,7 +87,7 @@ module.exports = class View {
if (index !== -1) {
updatedDirectories = state.directories.slice(0, index + 1)
} else {
- updatedDirectories = state.directories.concat([{id, title: this.plugin.getItemName(res)}])
+ updatedDirectories = state.directories.concat([{id, title: name || this.plugin.getItemName(res)}])
}
this.plugin.getItemSubList(res).forEach((item) => {
@@ -115,7 +115,7 @@ module.exports = class View {
*/
getNextFolder (folder) {
let id = this.plugin.getItemRequestPath(folder)
- this.getFolder(id)
+ this.getFolder(id, this.plugin.getItemName(folder))
}
addFile (file) {
diff --git a/src/plugins/GoogleDrive/index.js b/src/plugins/GoogleDrive/index.js
index <HASH>..<HASH> 100644
--- a/src/plugins/GoogleDrive/index.js
+++ b/src/plugins/GoogleDrive/index.js
@@ -59,6 +59,7 @@ module.exports = class Google extends Plugin {
const plugin = this
this.target = this.mount(target, plugin)
+ // catch error here.
this[this.id].auth().then(this.onAuth)
return
}
@@ -87,7 +88,7 @@ module.exports = class Google extends Plugin {
}
getItemName (item) {
- return item.title
+ return item.title ? item.title : '/'
}
getMimeType (item) {
|
fix: broken google drive breadcrumb
|
transloadit_uppy
|
train
|
28c3b2b40340848250d004305b3c189efa0e1899
|
diff --git a/lib/field.js b/lib/field.js
index <HASH>..<HASH> 100644
--- a/lib/field.js
+++ b/lib/field.js
@@ -10,6 +10,18 @@ function MarcVariableField(tag) {
}
/*
+ * Returns copy of the specified field.
+ */
+MarcVariableField.clone = function(field) {
+ if (field instanceof MarcControlField) {
+ return new MarcControlField(field);
+ } else if (field instanceof MarcDataField) {
+ return new MarcDataField(field);
+ }
+ return null;
+}
+
+/*
* Always returns false (stub).
*/
MarcVariableField.prototype.isControlField = function() {
@@ -90,6 +102,16 @@ function MarcControlField(tag, data) {
MarcControlField.prototype = new MarcVariableField;
/*
+ * Returns copy of the specified field.
+ */
+MarcControlField.clone = function(field) {
+ if (field instanceof MarcControlField) {
+ return new MarcControlField(field);
+ }
+ return null;
+}
+
+/*
* Always returns true.
*/
MarcControlField.prototype.isControlField = function() {
@@ -181,6 +203,16 @@ function MarcDataField(tag, ind1, ind2, subfields) {
MarcDataField.prototype = new MarcVariableField;
/*
+ * Returns copy of the specified field.
+ */
+MarcDataField.clone = function(field) {
+ if (field instanceof MarcDataField) {
+ return new MarcDataField(field);
+ }
+ return null;
+}
+
+/*
* Always returns true.
*/
MarcDataField.prototype.isDataField = function() {
@@ -680,6 +712,16 @@ function MarcSubfield(code, data) {
}
/*
+ * Returns copy of the specified subfield.
+ */
+MarcSubfield.clone = function(subfield) {
+ if (subfield instanceof MarcSubfield) {
+ return new MarcSubfield(subfield);
+ }
+ return null;
+}
+
+/*
* Returns true if the subfields are equal.
*/
MarcSubfield.equals = function(subfield1, subfield2, weakMode) {
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "marcrecord",
"description": "MARC record library (inspired by marc4j)",
- "version": "0.0.21",
+ "version": "0.0.22",
"author": "Alexander Fronkin <alexander.fronkin@afronkin.net>",
"repository": {
"type": "git",
diff --git a/test/field.js b/test/field.js
index <HASH>..<HASH> 100644
--- a/test/field.js
+++ b/test/field.js
@@ -23,6 +23,18 @@ var field = new MarcVariableField();
assert(field.tag === '???');
/*
+ * MarcVariableField.clone()
+ */
+var field = MarcVariableField.clone(data.records[0].fields[0]);
+assert(field !== data.records[0].fields[0]);
+assert(field.equals(data.records[0].fields[0]));
+var field = MarcVariableField.clone(data.records[0].fields[1]);
+assert(field !== data.records[0].fields[1]);
+assert(field.equals(data.records[0].fields[1]));
+var field = MarcVariableField.clone({});
+assert(field === null);
+
+/*
* MarcVariableField.isControlField()
*/
assert(data.records[0].fields[0].isControlField());
@@ -98,6 +110,17 @@ var field = new MarcControlField(data.records[0].fields[0]);
assert(field.tag === '001' && field.data === 'ID/1');
/*
+ * MarcControlField.clone()
+ */
+var field = MarcControlField.clone(data.records[0].fields[0]);
+assert(field !== data.records[0].fields[0]);
+assert(field.equals(data.records[0].fields[0]));
+var field = MarcControlField.clone(data.records[0].fields[1]);
+assert(field === null);
+var field = MarcControlField.clone({});
+assert(field === null);
+
+/*
* MarcControlField.equals()
*/
var field1 = data.records[0].fields[0];
@@ -162,6 +185,17 @@ assert(field.tag === '950' && field.ind1 === '3' && field.ind2 === '4'
&& field.subfields.length === 5);
/*
+ * MarcDataField.clone()
+ */
+var field = MarcDataField.clone(data.records[0].fields[0]);
+assert(field === null);
+var field = MarcDataField.clone(data.records[0].fields[1]);
+assert(field !== data.records[0].fields[1]);
+assert(field.equals(data.records[0].fields[1]));
+var field = MarcDataField.clone({});
+assert(field === null);
+
+/*
* MarcDataField.equals()
*/
var field1 = data.records[0].fields[1];
@@ -449,6 +483,15 @@ var subfield = new MarcSubfield(data.records[0].fields[1].subfields[2]);
assert(subfield.data.isDataField() && subfield.data.tag === '905');
/*
+ * MarcSubfield.clone()
+ */
+var subfield = MarcSubfield.clone(data.records[0].fields[1].subfields[0]);
+assert(subfield !== data.records[0].fields[1].subfields[0]);
+assert(subfield.equals(data.records[0].fields[1].subfields[0]));
+var subfield = MarcSubfield.clone({});
+assert(subfield === null);
+
+/*
* MarcSubfield.equals()
*/
var subfield1 = data.records[0].fields[1].subfields[0];
|
Added clone() to MarcVariableField, MarcControlField, MarcDataField, MarcSubfield.
|
afronkin_node-marcrecord
|
train
|
aa27b0d87a2f2b5e78cc6eedecd2236d4ab3c88e
|
diff --git a/docs/src/modules/components/withRoot.js b/docs/src/modules/components/withRoot.js
index <HASH>..<HASH> 100644
--- a/docs/src/modules/components/withRoot.js
+++ b/docs/src/modules/components/withRoot.js
@@ -52,17 +52,17 @@ const pages = [
pathname: '/style',
children: [
{
- pathname: '/style/css-baseline',
- title: 'CSS Baseline',
+ pathname: '/style/icons',
},
{
pathname: '/style/color',
},
{
- pathname: '/style/icons',
+ pathname: '/style/typography',
},
{
- pathname: '/style/typography',
+ pathname: '/style/css-baseline',
+ title: 'CSS Baseline',
},
],
},
@@ -76,10 +76,10 @@ const pages = [
pathname: '/layout/grid',
},
{
- pathname: '/layout/hidden',
+ pathname: '/layout/breakpoints',
},
{
- pathname: '/layout/breakpoints',
+ pathname: '/layout/hidden',
},
],
},
@@ -90,6 +90,9 @@ const pages = [
pathname: '/utils/modal',
},
{
+ pathname: '/utils/transitions',
+ },
+ {
pathname: '/utils/popover',
},
{
@@ -99,9 +102,6 @@ const pages = [
pathname: '/utils/portal',
},
{
- pathname: '/utils/transitions',
- },
- {
pathname: '/utils/click-away-listener',
},
],
@@ -118,19 +118,19 @@ const pages = [
pathname: '/customization',
children: [
{
- pathname: '/customization/overrides',
- },
- {
pathname: '/customization/themes',
},
{
- pathname: '/customization/default-theme',
- title: 'Default Theme',
+ pathname: '/customization/overrides',
},
{
pathname: '/customization/css-in-js',
title: 'CSS in JS',
},
+ {
+ pathname: '/customization/default-theme',
+ title: 'Default Theme',
+ },
],
},
{
@@ -141,40 +141,40 @@ const pages = [
title: 'API Design Approach',
},
{
- pathname: '/guides/minimizing-bundle-size',
+ pathname: '/guides/typescript',
+ title: 'TypeScript',
},
{
pathname: '/guides/interoperability',
title: 'Style Library Interoperability',
},
{
- pathname: '/guides/migration-v0x',
- title: 'Migration From v0.x',
+ pathname: '/guides/minimizing-bundle-size',
+ },
+ {
+ pathname: '/guides/composition',
},
{
pathname: '/guides/server-rendering',
},
{
- pathname: '/guides/composition',
+ pathname: '/guides/migration-v0x',
+ title: 'Migration From v0.x',
},
{
pathname: '/guides/testing',
},
{
- pathname: '/guides/typescript',
- title: 'TypeScript',
+ pathname: '/guides/flow',
},
{
- pathname: '/guides/flow',
+ pathname: '/guides/right-to-left',
+ title: 'Right-to-left',
},
{
pathname: '/guides/csp',
title: 'Content Security Policy',
},
- {
- pathname: '/guides/right-to-left',
- title: 'Right-to-left',
- },
],
},
{
@@ -204,29 +204,29 @@ const pages = [
pathname: '/discover-more',
children: [
{
- pathname: '/discover-more/vision',
+ pathname: '/discover-more/showcase',
},
{
- pathname: '/discover-more/backers',
- title: 'Sponsors & Backers',
+ pathname: '/discover-more/related-projects',
},
{
- pathname: '/discover-more/community',
+ pathname: '/discover-more/roadmap',
},
{
- pathname: '/discover-more/related-projects',
+ pathname: '/discover-more/backers',
+ title: 'Sponsors & Backers',
},
{
- pathname: '/discover-more/showcase',
+ pathname: '/discover-more/vision',
},
{
- pathname: '/discover-more/roadmap',
+ pathname: '/discover-more/team',
},
{
- pathname: '/discover-more/changelog',
+ pathname: '/discover-more/community',
},
{
- pathname: '/discover-more/team',
+ pathname: '/discover-more/changelog',
},
{
pathname: '/discover-more/governance',
|
[docs] Use data to improve the ranking (#<I>)
|
mui-org_material-ui
|
train
|
289dc398bd88ea1bd9c952772e2323b64c0ec2a5
|
diff --git a/index/scorch/segment/zap/build.go b/index/scorch/segment/zap/build.go
index <HASH>..<HASH> 100644
--- a/index/scorch/segment/zap/build.go
+++ b/index/scorch/segment/zap/build.go
@@ -572,42 +572,3 @@ func persistDictionary(memSegment *mem.Segment, w *CountHashWriter, postingsLocs
return rv, nil
}
-
-// FooterSize is the size of the footer record in bytes
-// crc + ver + chunk + field offset + stored offset + num docs
-const FooterSize = 4 + 4 + 4 + 8 + 8 + 8
-
-func persistFooter(numDocs, storedIndexOffset, fieldIndexOffset uint64,
- chunkFactor uint32, w *CountHashWriter) error {
- // write out the number of docs
- err := binary.Write(w, binary.BigEndian, numDocs)
- if err != nil {
- return err
- }
- // write out the stored field index location:
- err = binary.Write(w, binary.BigEndian, storedIndexOffset)
- if err != nil {
- return err
- }
- // write out the field index location
- err = binary.Write(w, binary.BigEndian, fieldIndexOffset)
- if err != nil {
- return err
- }
- // write out 32-bit chunk factor
- err = binary.Write(w, binary.BigEndian, chunkFactor)
- if err != nil {
- return err
- }
- // write out 32-bit version
- err = binary.Write(w, binary.BigEndian, version)
- if err != nil {
- return err
- }
- // write out CRC-32 of everything upto but not including this CRC
- err = binary.Write(w, binary.BigEndian, w.Sum32())
- if err != nil {
- return err
- }
- return nil
-}
diff --git a/index/scorch/segment/zap/merge.go b/index/scorch/segment/zap/merge.go
index <HASH>..<HASH> 100644
--- a/index/scorch/segment/zap/merge.go
+++ b/index/scorch/segment/zap/merge.go
@@ -84,6 +84,7 @@ func Merge(segments []*Segment, drops []*roaring.Bitmap, path string,
return newDocNums, nil
}
+// mapFields takes the fieldsInv list and builds the map
func mapFields(fields []string) map[string]uint16 {
rv := make(map[string]uint16)
for i, fieldName := range fields {
@@ -92,6 +93,8 @@ func mapFields(fields []string) map[string]uint16 {
return rv
}
+// computeNewDocCount determines how many documents will be in the newly
+// merged segment when obsoleted docs are dropped
func computeNewDocCount(segments []*Segment, drops []*roaring.Bitmap) uint64 {
var newSegDocCount uint64
for segI, segment := range segments {
diff --git a/index/scorch/segment/zap/write.go b/index/scorch/segment/zap/write.go
index <HASH>..<HASH> 100644
--- a/index/scorch/segment/zap/write.go
+++ b/index/scorch/segment/zap/write.go
@@ -93,3 +93,42 @@ func persistFields(fieldsInv []string, w *CountHashWriter, dictLocs []uint64) (u
return rv, nil
}
+
+// FooterSize is the size of the footer record in bytes
+// crc + ver + chunk + field offset + stored offset + num docs
+const FooterSize = 4 + 4 + 4 + 8 + 8 + 8
+
+func persistFooter(numDocs, storedIndexOffset, fieldIndexOffset uint64,
+ chunkFactor uint32, w *CountHashWriter) error {
+ // write out the number of docs
+ err := binary.Write(w, binary.BigEndian, numDocs)
+ if err != nil {
+ return err
+ }
+ // write out the stored field index location:
+ err = binary.Write(w, binary.BigEndian, storedIndexOffset)
+ if err != nil {
+ return err
+ }
+ // write out the field index location
+ err = binary.Write(w, binary.BigEndian, fieldIndexOffset)
+ if err != nil {
+ return err
+ }
+ // write out 32-bit chunk factor
+ err = binary.Write(w, binary.BigEndian, chunkFactor)
+ if err != nil {
+ return err
+ }
+ // write out 32-bit version
+ err = binary.Write(w, binary.BigEndian, version)
+ if err != nil {
+ return err
+ }
+ // write out CRC-32 of everything upto but not including this CRC
+ err = binary.Write(w, binary.BigEndian, w.Sum32())
+ if err != nil {
+ return err
+ }
+ return nil
+}
|
more refacotring of build/merge
|
blevesearch_bleve
|
train
|
02bdbf6b0c871a89d6d55d657974e96d9073df4c
|
diff --git a/sonar-server/src/main/java/org/sonar/server/db/migrations/v44/MeasureDataMigration.java b/sonar-server/src/main/java/org/sonar/server/db/migrations/v44/MeasureDataMigration.java
index <HASH>..<HASH> 100644
--- a/sonar-server/src/main/java/org/sonar/server/db/migrations/v44/MeasureDataMigration.java
+++ b/sonar-server/src/main/java/org/sonar/server/db/migrations/v44/MeasureDataMigration.java
@@ -25,7 +25,6 @@ import org.sonar.server.db.migrations.DatabaseMigration;
import org.sonar.server.db.migrations.MassUpdater;
import org.sonar.server.db.migrations.SqlUtil;
-import java.sql.Blob;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
@@ -58,7 +57,8 @@ public class MeasureDataMigration implements DatabaseMigration {
public Row load(ResultSet rs) throws SQLException {
Row row = new Row();
row.measure_id = SqlUtil.getLong(rs, 1);
- row.data = rs.getBlob(2);
+ // Don't use getBlob as it fails on Postgres and mssql
+ row.data = rs.getBytes(2);
return row;
}
},
@@ -70,7 +70,7 @@ public class MeasureDataMigration implements DatabaseMigration {
@Override
public boolean convert(Row row, PreparedStatement updateStatement) throws SQLException {
- updateStatement.setBlob(1, row.data);
+ updateStatement.setBytes(1, row.data);
updateStatement.setLong(2, row.measure_id);
return true;
}
@@ -80,7 +80,7 @@ public class MeasureDataMigration implements DatabaseMigration {
private static class Row {
private Long measure_id;
- private Blob data;
+ private byte[] data;
}
}
|
SONAR-<I> Try to fix migration of measure data on PgSQL and MsSql
|
SonarSource_sonarqube
|
train
|
8fb1e230af5af114714a9e8c86e0ecf6970fe26e
|
diff --git a/Makefile.dryice.js b/Makefile.dryice.js
index <HASH>..<HASH> 100644
--- a/Makefile.dryice.js
+++ b/Makefile.dryice.js
@@ -172,8 +172,8 @@ function buildFirefox(destDir) {
project.assumeAllFilesLoaded();
var sources = copy.createDataObject();
copy({
- // This list of dependencies should be the same as in gclitest/index.js
- source: { project: project, require: [ 'gclitest/suite' ] },
+ // This list of dependencies should be the same as in suffix-test.js
+ source: { project: project, require: [ 'gclitest/index' ] },
filter: copy.filter.moduleDefines,
dest: sources
});
@@ -405,14 +405,8 @@ function test() {
}
});
- // A minimum fake dom to get us through the JS tests
- var document = { title: 'Fake DOM' };
- requirejs('gcli/types/javascript').setGlobalObject({
- window: { document: document },
- document: document
- });
-
- requirejs('gclitest/suite');
+ var gclitest = requirejs('gclitest/index');
+ gclitest.run({ useFakeDom: true, detailedResultLog: true });
}
// Now everything is defined properly, start working
diff --git a/index.html b/index.html
index <HASH>..<HASH> 100644
--- a/index.html
+++ b/index.html
@@ -45,8 +45,9 @@
});
// This list should be the same as the browser part of Makefile.dryice.js
- var deps = [ 'gcli/index', 'demo/index', 'gclitest/index' ];
- require(deps, function(gcli) {
+ var deps = [ 'gcli/index', 'gclitest/index', 'demo/index' ];
+ require(deps, function(gcli, gclitest) {
+ gclitest.run({ useFakeDom: true });
gcli.createView();
});
</script>
diff --git a/mozilla/build/suffix-test.js b/mozilla/build/suffix-test.js
index <HASH>..<HASH> 100644
--- a/mozilla/build/suffix-test.js
+++ b/mozilla/build/suffix-test.js
@@ -20,7 +20,9 @@ function onLoad() {
try {
openConsole();
- define.globalDomain.require("gclitest/index");
+
+ var gclitest = define.globalDomain.require("gclitest/index");
+ gclitest.run();
}
catch (ex) {
failed = ex;
|
Bug <I> (jumpscratch): Make calls to test suite
use the new .run() method rather than relying on autorun.
|
joewalker_gcli
|
train
|
e7d4094887306a469071c9a296b45c898ec57c5d
|
diff --git a/lib/ydim/debitor.rb b/lib/ydim/debitor.rb
index <HASH>..<HASH> 100644
--- a/lib/ydim/debitor.rb
+++ b/lib/ydim/debitor.rb
@@ -20,7 +20,7 @@ module YDIM
lns = [@name]
lns.push(["z.H.", @salutation, @contact].compact.join(' '))
lns.concat(@address_lines)
- lns.push(@email)
+ lns.push(@location, @email)
lns.compact!
lns
end
|
First Bug: @location was not included in Debitor#address
|
zdavatz_ydim
|
train
|
05d2b425c8082c2b5dfba90dd4f2db978d6d2774
|
diff --git a/paramiko/channel.py b/paramiko/channel.py
index <HASH>..<HASH> 100644
--- a/paramiko/channel.py
+++ b/paramiko/channel.py
@@ -40,7 +40,9 @@ from paramiko import pipe
# lower bound on the max packet size we'll accept from the remote host
-MIN_PACKET_SIZE = 1024
+# Minimum packet size is 32768 bytes according to
+# http://www.ietf.org/rfc/rfc4254.txt
+MIN_PACKET_SIZE = 2 ** 15
class Channel (object):
|
Bump the MIN_PACKET_SIZE to what's in the RFC.
|
paramiko_paramiko
|
train
|
2c64fdc852853d65cb4442468b4e7db103673c60
|
diff --git a/squad/core/admin.py b/squad/core/admin.py
index <HASH>..<HASH> 100644
--- a/squad/core/admin.py
+++ b/squad/core/admin.py
@@ -145,7 +145,7 @@ class TestRunAdmin(admin.ModelAdmin):
class PatchSourceForm(ModelForm):
- password = forms.CharField(max_length=128)
+ password = forms.CharField(max_length=128, required=False)
def __init__(self, *args, **kwargs):
super(PatchSourceForm, self).__init__(*args, **kwargs)
|
core: admin: mark password field as not required
Mark PatchSource's password field as not required in the
Django admin view. It's not required if the implementation
type is Github or if it's Gerrit using ssh.
|
Linaro_squad
|
train
|
60c0d29585a0a077729cf56e5e438c386b4cf255
|
diff --git a/pyprophet/export.py b/pyprophet/export.py
index <HASH>..<HASH> 100644
--- a/pyprophet/export.py
+++ b/pyprophet/export.py
@@ -57,7 +57,7 @@ def export_tsv(infile, outfile, format, outcsv, ipf, peptide, protein):
protein_present = False
if protein:
- peptide_present = _check_sqlite_table(con, "SCORE_PROTEIN")
+ protein_present = _check_sqlite_table(con, "SCORE_PROTEIN")
if protein_present and protein:
data_protein_run = pd.read_sql_query("select run_id as id_run, protein_id as id_protein, qvalue as m_score_protein_run_specific from score_protein where context == 'run-specific';", con)
|
[FIX] Export protein q-values when present
|
PyProphet_pyprophet
|
train
|
b6831d2ab2caf31510197aa12d6e4e7ee208358d
|
diff --git a/src/Symfony/Component/Form/Extension/Core/Type/DateIntervalType.php b/src/Symfony/Component/Form/Extension/Core/Type/DateIntervalType.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Form/Extension/Core/Type/DateIntervalType.php
+++ b/src/Symfony/Component/Form/Extension/Core/Type/DateIntervalType.php
@@ -102,6 +102,7 @@ class DateIntervalType extends AbstractType
$childOptions[$part] = array();
$childOptions[$part]['error_bubbling'] = true;
if ('choice' === $options['widget']) {
+ $childOptions[$part]['choice_translation_domain'] = false;
$childOptions[$part]['choices'] = $options[$part];
$childOptions[$part]['placeholder'] = $options['placeholder'][$part];
}
diff --git a/src/Symfony/Component/Form/Tests/Extension/Core/Type/DateIntervalTypeTest.php b/src/Symfony/Component/Form/Tests/Extension/Core/Type/DateIntervalTypeTest.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Form/Tests/Extension/Core/Type/DateIntervalTypeTest.php
+++ b/src/Symfony/Component/Form/Tests/Extension/Core/Type/DateIntervalTypeTest.php
@@ -11,6 +11,7 @@
namespace Symfony\Component\Form\Tests\Extension\Core\Type;
+use Symfony\Component\Form\Extension\Core\Type\DateIntervalType;
use Symfony\Component\Form\FormError;
use Symfony\Component\Form\Test\TypeTestCase as TestCase;
@@ -364,4 +365,23 @@ class DateIntervalTypeTest extends TestCase
$this->assertSame(array(), iterator_to_array($form['years']->getErrors()));
$this->assertSame(array($error), iterator_to_array($form->getErrors()));
}
+ public function testTranslationsAreDisabledForChoiceWidget()
+ {
+ $form = $this->factory->create(
+ DateIntervalType::class,
+ null,
+ array(
+ 'widget' => 'choice',
+ 'with_hours' => true,
+ 'with_minutes' => true,
+ 'with_seconds' => true,
+ )
+ );
+ $this->assertFalse($form->get('years')->getConfig()->getOption('choice_translation_domain'));
+ $this->assertFalse($form->get('months')->getConfig()->getOption('choice_translation_domain'));
+ $this->assertFalse($form->get('days')->getConfig()->getOption('choice_translation_domain'));
+ $this->assertFalse($form->get('hours')->getConfig()->getOption('choice_translation_domain'));
+ $this->assertFalse($form->get('minutes')->getConfig()->getOption('choice_translation_domain'));
+ $this->assertFalse($form->get('seconds')->getConfig()->getOption('choice_translation_domain'));
+ }
}
|
[Form] DateIntervalType: Do not try to translate choices
|
symfony_symfony
|
train
|
a72a1561aa6c880b80707842667442daca3ca069
|
diff --git a/src/main/java/com/sd_editions/collatex/Block.java b/src/main/java/com/sd_editions/collatex/Block.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/sd_editions/collatex/Block.java
+++ b/src/main/java/com/sd_editions/collatex/Block.java
@@ -5,6 +5,7 @@ import java.util.HashMap;
/**
* Our basic block class.
* This shouldn't be used itself, use one of it's subclasses or create your own
+ * Quick check for svn usage
*
*/
public abstract class Block {
|
Check to make use we can push to svn o.k.
|
interedition_collatex
|
train
|
79e5bb84f368df8b5a8ac039d83974024a9c9cc4
|
diff --git a/src/Executor/Result.php b/src/Executor/Result.php
index <HASH>..<HASH> 100644
--- a/src/Executor/Result.php
+++ b/src/Executor/Result.php
@@ -12,7 +12,7 @@ class Result {
$return_val = 0;
// Suppress StdErr output.
$dev_null = ' 2> /dev/null';
- if (stripos(PHP_OS, 'win') !== FALSE) {
+ if (stripos(PHP_OS, 'win') === 0) {
$dev_null = ' 2> nul';
}
$command .= $dev_null;
|
Prevent Darwin (Mac OSX) from generating a nul file.
|
drutiny_drutiny
|
train
|
76e6315473671b87f3d5fe64e4c35a79658789d3
|
diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
index <HASH>..<HASH> 100755
--- a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
+++ b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
@@ -119,13 +119,13 @@ def assert_sets_equal(set1, set2):
lines = []
if difference1:
- lines.append('Items in the first set but not the second:')
+ lines.append(' -- Items in the left set but not the right:')
for item in sorted(difference1):
- lines.append(repr(item))
+ lines.append(f' {item!r}')
if difference2:
- lines.append('Items in the second set but not the first:')
+ lines.append(' -- Items in the right set but not the left:')
for item in sorted(difference2):
- lines.append(repr(item))
+ lines.append(f' {item!r}')
standard_msg = '\n'.join(lines)
raise AssertionError(standard_msg)
@@ -155,6 +155,7 @@ def parse_module_data(provider_data, resource_type, yaml_file_path):
def check_completeness_of_list_of_hooks_sensors_hooks(yaml_files: Dict[str, Dict]):
print("Checking completeness of list of {sensors, hooks, operators}")
+ print(" -- {sensors, hooks, operators} - Expected modules(Left): Current Modules(Right)")
for (yaml_file_path, provider_data), resource_type in product(
yaml_files.items(), ["sensors", "operators", "hooks"]
):
@@ -193,6 +194,8 @@ def check_duplicates_in_integrations_names_of_hooks_sensors_operators(yaml_files
def check_completeness_of_list_of_transfers(yaml_files: Dict[str, Dict]):
print("Checking completeness of list of transfers")
resource_type = 'transfers'
+
+ print(" -- Expected transfers modules(Left): Current transfers Modules(Right)")
for yaml_file_path, provider_data in yaml_files.items():
expected_modules, provider_package, resource_data = parse_module_data(
provider_data, resource_type, yaml_file_path
@@ -309,7 +312,10 @@ def check_doc_files(yaml_files: Dict[str, Dict]):
}
try:
+ print(" -- Checking document urls: expected(left), current(right)")
assert_sets_equal(set(expected_doc_urls), set(current_doc_urls))
+
+ print(" -- Checking logo urls: expected(left), current(right)")
assert_sets_equal(set(expected_logo_urls), set(current_logo_urls))
except AssertionError as ex:
print(ex)
|
Added print statements for clarity in provider yaml checks (#<I>)
|
apache_airflow
|
train
|
b3e93ffadf2a56c9e82d5471072c61497a866ef4
|
diff --git a/lib/models/event.js b/lib/models/event.js
index <HASH>..<HASH> 100644
--- a/lib/models/event.js
+++ b/lib/models/event.js
@@ -78,6 +78,8 @@ module.exports.MatrixEvent = function MatrixEvent(
this.status = null;
this.forwardLooking = true;
this._pushActions = null;
+ this._date = this.event.origin_server_ts ?
+ new Date(this.event.origin_server_ts) : null;
this._clearEvent = {};
this._keysProved = {};
@@ -143,6 +145,14 @@ utils.extend(module.exports.MatrixEvent.prototype, {
},
/**
+ * Get the timestamp of this event, as a Date object.
+ * @return {Date} The event date, e.g. <code>new Date(1433502692297)</code>
+ */
+ getDate: function() {
+ return this._date;
+ },
+
+ /**
* Get the (decrypted, if necessary) event content JSON.
*
* @return {Object} The event content JSON, or an empty object.
@@ -374,6 +384,17 @@ utils.extend(module.exports.MatrixEvent.prototype, {
setPushActions: function(pushActions) {
this._pushActions = pushActions;
},
+
+ /**
+ * Replace the `event` property and recalculate any properties based on it.
+ * @param {Object} event the object to assign to the `event` property
+ */
+ handleRemoteEcho: function(event) {
+ this.event = event;
+ // successfully sent.
+ this.status = null;
+ this._date = new Date(this.event.origin_server_ts);
+ }
});
diff --git a/lib/models/room.js b/lib/models/room.js
index <HASH>..<HASH> 100644
--- a/lib/models/room.js
+++ b/lib/models/room.js
@@ -674,10 +674,7 @@ Room.prototype._handleRemoteEcho = function(remoteEvent, localEvent) {
// replace the event source (this will preserve the plaintext payload if
// any, which is good, because we don't want to try decoding it again).
- localEvent.event = remoteEvent.event;
-
- // successfully sent.
- localEvent.status = null;
+ localEvent.handleRemoteEcho(remoteEvent.event);
for (var i = 0; i < this._timelineSets.length; i++) {
var timelineSet = this._timelineSets[i];
|
Add `getDate` function to MatrixEvent (#<I>)
* Add `getDate` function to MatrixEvent
`getDate` can be used to get the timestamp of the event as a `Date` instance.
Adds handleRemoteEcho function to be called on change of internal event of MatrixEvent, so that the internal `_date` can be updated when a remote echo replaces a local one.
|
matrix-org_matrix-js-sdk
|
train
|
1380789375bb3eb75dbb4f951a44ec1876e5b5ee
|
diff --git a/src/Events/Database/ConnectionSet.php b/src/Events/Database/ConnectionSet.php
index <HASH>..<HASH> 100644
--- a/src/Events/Database/ConnectionSet.php
+++ b/src/Events/Database/ConnectionSet.php
@@ -15,7 +15,7 @@
namespace Hyn\Tenancy\Events\Database;
use Hyn\Tenancy\Abstracts\AbstractEvent;
-use Hyn\Tenancy\Models\Website;
+use Hyn\Tenancy\Contracts\Website;
class ConnectionSet extends AbstractEvent
{
|
Use Website Contract instead of Models (#<I>) (#<I>)
|
tenancy_multi-tenant
|
train
|
0f1687b42bf24c5a94b3653a10d5b4a026177cde
|
diff --git a/vent/api/menu_helpers.py b/vent/api/menu_helpers.py
index <HASH>..<HASH> 100644
--- a/vent/api/menu_helpers.py
+++ b/vent/api/menu_helpers.py
@@ -59,8 +59,9 @@ class MenuHelper:
constraint_options(constraints, [])
if not prev_installed:
tools.append((match[0], ''))
- # only add stuff not already installed
- if tools:
+ # only add stuff not already installed or repo specification
+ if ((tools) or
+ (isinstance(matches, list) and len(matches) == 0)):
status = self.plugin.add(core_repo,
tools=tools,
branch=branch,
|
allow for repo specification only in tool adding
|
CyberReboot_vent
|
train
|
3ca2861c2dd9abf94cfc8d1b760c9a3d93dee469
|
diff --git a/code/extensions/LeftAndMainSubsites.php b/code/extensions/LeftAndMainSubsites.php
index <HASH>..<HASH> 100644
--- a/code/extensions/LeftAndMainSubsites.php
+++ b/code/extensions/LeftAndMainSubsites.php
@@ -217,12 +217,10 @@ class LeftAndMainSubsites extends Extension {
// Update current subsite in session
Subsite::changeSubsite($_GET['SubsiteID']);
- if ($this->owner->canView(Member::currentUser())) {
- //Redirect to clear the current page
- return $this->owner->redirect($this->owner->Link());
+ if (!$this->owner->canView(Member::currentUser())) {
+ //Redirect to the default CMS section
+ return $this->owner->redirect('admin/');
}
- //Redirect to the default CMS section
- return $this->owner->redirect('admin/');
}
// Automatically redirect the session to appropriate subsite when requesting a record.
@@ -234,12 +232,10 @@ class LeftAndMainSubsites extends Extension {
// Update current subsite in session
Subsite::changeSubsite($record->SubsiteID);
- if ($this->owner->canView(Member::currentUser())) {
- //Redirect to clear the current page
- return $this->owner->redirect($this->owner->Link());
+ if (!$this->owner->canView(Member::currentUser())) {
+ //Redirect to the default CMS section
+ return $this->owner->redirect('admin/');
}
- //Redirect to the default CMS section
- return $this->owner->redirect('admin/');
}
}
|
FIX: Removed unnecessary redirect. This is early enough in the script that the correct subsite will be used from hereon.
|
silverstripe_silverstripe-subsites
|
train
|
c91b6640de0627628da6c8b80406c54a2eaa13f8
|
diff --git a/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestConfiguration.java b/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestConfiguration.java
index <HASH>..<HASH> 100644
--- a/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestConfiguration.java
+++ b/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestConfiguration.java
@@ -10,12 +10,6 @@ import java.util.Collections;
import java.util.List;
public class IngestConfiguration extends Configuration {
-
- private int port = 19000;
-
- @NotEmpty
- private String host = "localhost";
-
@NotEmpty
private List<String> cassandraHosts = Lists.newArrayList("127.0.0.1:19180");
@@ -34,18 +28,7 @@ public class IngestConfiguration extends Configuration {
// debug setting where the API endpoint ignore collection times passed in.
private boolean forceNewCollectionTime = false;
-
- @JsonProperty
- public String getHost() { return host; }
-
- @JsonProperty
- public void setHost(String s) { this.host = s; }
-
- @JsonProperty
- public int getPort() { return port; }
-
- @JsonProperty
- public void setPort(int i) { this.port = i; }
+
@JsonProperty
public void setCassandraHosts(List<String> l) { this.cassandraHosts = l; }
diff --git a/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestionService.java b/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestionService.java
index <HASH>..<HASH> 100644
--- a/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestionService.java
+++ b/blueflood-dropwizard/src/main/java/com/rackspacecloud/blueflood/dw/ingest/IngestionService.java
@@ -18,12 +18,18 @@ import java.util.Map;
public class IngestionService implements com.rackspacecloud.blueflood.service.IngestionService {
private static final String YAML =
- "host: %s\n"+
- "port: %d\n"+
- "cassandraHosts:[%s],\n"+
+ "server:\n"+
+ " applicationConnectors:\n"+
+ " - type: http\n"+
+ " bindHost: %s\n"+
+ " port: %s\n"+
+ " adminConnectors:\n"+
+ " - type: http\n"+
+ " bindHost: %s\n"+
+ " port: %s\n"+
+ "cassandraHosts: [%s]\n"+
"rollupKeyspace: %s\n"+
"metricsWriterClass: \"%s\"\n"+
- "scopingTenants:[%s]\n"+
"forceNewCollectionTime: %s";
public IngestionService() {
@@ -45,10 +51,11 @@ public class IngestionService implements com.rackspacecloud.blueflood.service.In
// gotta use strings, because I don't want to depend on the http module.
config.getStringProperty("HTTP_INGESTION_HOST"),
config.getStringProperty("HTTP_INGESTION_PORT"),
- config.getStringProperty(CoreConfig.CASSANDRA_HOSTS),
+ config.getStringProperty("HTTP_INGESTION_HOST"),
+ Integer.toString(Integer.parseInt(config.getStringProperty("HTTP_INGESTION_PORT"))+1),
+ makeSafeYamlList(config.getStringProperty(CoreConfig.CASSANDRA_HOSTS), ","),
config.getStringProperty(CoreConfig.ROLLUP_KEYSPACE),
config.getStringProperty(CoreConfig.IMETRICS_WRITER),
- "THERE_ARE_NONE",
"false");
return new ByteArrayInputStream(replaced.getBytes(Charsets.UTF_8));
}
@@ -61,6 +68,7 @@ public class IngestionService implements com.rackspacecloud.blueflood.service.In
//namespaceAttrs.put("file", null);
namespaceAttrs.put("command", "server");
namespaceAttrs.put("version", null);
+ namespaceAttrs.put("file", "/this/path/does/not/matter");
Namespace namespace = new Namespace(namespaceAttrs);
ServerCommand<IngestConfiguration> serverCommand = new ServerCommand<IngestConfiguration>(ingestApplication);
@@ -72,4 +80,17 @@ public class IngestionService implements com.rackspacecloud.blueflood.service.In
}
}
+
+ private static String makeSafeYamlList(String s, String delimiter) {
+ String[] parts = s.split(delimiter, -1);
+ StringBuilder sb = new StringBuilder();
+ for (String part : parts) {
+ if (sb.length() == 0) {
+ sb = sb.append(String.format("\"%s\"", part));
+ } else {
+ sb = sb.append(String.format(",\"%s\"", part));
+ }
+ }
+ return sb.toString();
+ }
}
|
Honor ingest host/port. Also, use port<I> to set DW admin port.
|
rackerlabs_blueflood
|
train
|
df892d2a7630e4bb31ae7136c9486b0f525c280e
|
diff --git a/HISTORY.rst b/HISTORY.rst
index <HASH>..<HASH> 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -4,6 +4,11 @@ Release history
development
+++++++++++
+0.3.1 (2018-01-23)
+++++++++++++++++++
+
+- fix copy issue in reusable generator ``.replace()``
+
0.3.0 (2018-01-22)
++++++++++++++++++
diff --git a/gentools/__about__.py b/gentools/__about__.py
index <HASH>..<HASH> 100644
--- a/gentools/__about__.py
+++ b/gentools/__about__.py
@@ -1,4 +1,4 @@
-__version__ = '0.3.0'
+__version__ = '0.3.1'
__author__ = 'Arie Bovenberg'
__copyright__ = '2017, Arie Bovenberg'
__description__ = ('like itertools, for generators, generator functions, '
diff --git a/gentools/types.py b/gentools/types.py
index <HASH>..<HASH> 100644
--- a/gentools/types.py
+++ b/gentools/types.py
@@ -4,7 +4,6 @@ import inspect
import sys
import typing as t
from collections import OrderedDict
-from copy import copy
from itertools import starmap
from types import GeneratorType
@@ -118,6 +117,7 @@ class ReusableGenerator(Generable[T_yield, T_send, T_return],
**kwargs
fields to replace
"""
- copied = copy(self._bound_args)
+ copied = self.__signature__.bind(*self._bound_args.args,
+ **self._bound_args.kwargs)
copied.arguments.update(**kwargs)
return self.__class__(*copied.args, **copied.kwargs)
diff --git a/tests/test_core.py b/tests/test_core.py
index <HASH>..<HASH> 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -150,7 +150,9 @@ class TestReusable:
assert gen != object()
changed = gen.replace(b=9)
+ assert gen == gentype(4, 5, d=6, e=5, foo=10)
assert changed == gentype(4, 9, d=6, foo=10)
+ assert changed.b == 9
class TestSendReturn:
|
fix issue with reusable generator .replace()
|
ariebovenberg_gentools
|
train
|
51bdff41d531a986098bda4383a0291ddc47cb1c
|
diff --git a/Tests/Command/LoadFilesCommandTest.php b/Tests/Command/LoadFilesCommandTest.php
index <HASH>..<HASH> 100644
--- a/Tests/Command/LoadFilesCommandTest.php
+++ b/Tests/Command/LoadFilesCommandTest.php
@@ -87,23 +87,7 @@ class LoadFilesCommandTest extends \PHPUnit_Framework_TestCase
$tester->execute(
array(
'command' => $this->command->getName(),
- 'files' => array(__DIR__ . '/../../testdata/part_1.yml'),
- '--manager' => 'mongodb',
- '--drop' => true
- )
- );
- }
-
- public function testLoadWihtoutDefaultManager()
- {
- $this->managerMock->expects($this->once())->method('load');
-
- $tester = new CommandTester($this->command);
-
- $tester->execute(
- array(
- 'command' => $this->command->getName(),
- 'files' => array(__DIR__ . '/../../testdata/part_1.yml'),
+ 'files' => array(__DIR__ . '/../testdata/part_1.yml'),
'--manager' => 'mongodb',
'--drop' => true
)
diff --git a/Tests/Command/LoadSetsCommandTest.php b/Tests/Command/LoadSetsCommandTest.php
index <HASH>..<HASH> 100644
--- a/Tests/Command/LoadSetsCommandTest.php
+++ b/Tests/Command/LoadSetsCommandTest.php
@@ -86,22 +86,7 @@ class LoadSetsCommandTest extends \PHPUnit_Framework_TestCase
array(
'command' => $this->command->getName(),
'--manager' => 'mongodb',
- 'sets' => array(__DIR__ . '/../../testdata/SimpleSet.php'),
- )
- );
- }
-
- public function testLoadWithoutDefaultManager()
- {
- $this->managerMock->expects($this->once())->method('load');
-
- $tester = new CommandTester($this->command);
-
- $tester->execute(
- array(
- 'command' => $this->command->getName(),
- '--manager' => 'mongodb',
- 'sets' => array(__DIR__ . '/../../testdata/SimpleSet.php'),
+ 'sets' => array(__DIR__ . '/../testdata/SimpleSet.php'),
)
);
}
diff --git a/Tests/DependencyInjection/ConfigurationTest.php b/Tests/DependencyInjection/ConfigurationTest.php
index <HASH>..<HASH> 100644
--- a/Tests/DependencyInjection/ConfigurationTest.php
+++ b/Tests/DependencyInjection/ConfigurationTest.php
@@ -48,7 +48,6 @@ class ConfigurationTest extends AbstractConfigurationTestCase
'seed' => 9876,
'do_flush' => false,
- 'object_manager' => 'doctrine_object_manager',
'schema_tool' => 'doctrine_schema_tool',
'doctrine' => 'mongodb-odm',
@@ -64,7 +63,6 @@ class ConfigurationTest extends AbstractConfigurationTestCase
'seed' => 9876,
'do_flush' => false,
- 'object_manager' => 'doctrine_object_manager',
'schema_tool' => 'doctrine_schema_tool',
'doctrine' => 'mongodb-odm',
@@ -86,7 +84,6 @@ class ConfigurationTest extends AbstractConfigurationTestCase
'seed' => 9876,
'do_flush' => false,
- 'object_manager' => 'doctrine_object_manager',
'schema_tool' => 'doctrine_schema_tool',
'doctrine' => 'mongodb-odm',
@@ -102,7 +99,6 @@ class ConfigurationTest extends AbstractConfigurationTestCase
'seed' => 9876,
'do_flush' => false,
- 'object_manager' => 'doctrine_object_manager',
'schema_tool' => 'doctrine_schema_tool',
'doctrine' => 'mongodb-odm',
|
Update PHPUnit testing, using @covers now.
|
h4cc_AliceFixturesBundle
|
train
|
fd8c21985c0f044101c11c4295931edbeca3acc0
|
diff --git a/steam/client/__init__.py b/steam/client/__init__.py
index <HASH>..<HASH> 100644
--- a/steam/client/__init__.py
+++ b/steam/client/__init__.py
@@ -565,6 +565,14 @@ class SteamClient(CMClient, BuiltinBase):
while True:
gevent.sleep(300)
+ def sleep(self, seconds):
+ """Yeild and sleep N seconds. Allows other greenlets to run"""
+ gevent.sleep(seconds)
+
+ def idle(self):
+ """Yeild in the current greenlet and let other greenlets run"""
+ gevent.idle()
+
def cli_login(self, username='', password=''):
"""Generates CLI prompts to complete the login process
|
SteamClient: added sleep & idle methods
|
ValvePython_steam
|
train
|
08500466ddf07d645fd324c7bcc1813d1371f57c
|
diff --git a/test/normalized_has_many_test.rb b/test/normalized_has_many_test.rb
index <HASH>..<HASH> 100644
--- a/test/normalized_has_many_test.rb
+++ b/test/normalized_has_many_test.rb
@@ -26,6 +26,7 @@ class NormalizedHasManyTest < IdentityCache::TestCase
def test_defining_a_denormalized_has_many_cache_caches_the_list_of_associated_ids_on_the_parent_record_during_cache_miss
fetched_record = Record.fetch(@record.id)
assert_equal [2, 1], fetched_record.cached_associated_record_ids
+ assert_equal false, fetched_record.associated_records.loaded?
end
def test_fetching_associated_ids_will_populate_the_value_if_the_record_isnt_from_the_cache
|
Assert that normalized associations are loaded on fetch.
Previously this was happening through Marshal.load calling respond_to? on
the association.
|
Shopify_identity_cache
|
train
|
1c83da2ee48f58894dfa2bd6249bc20544741951
|
diff --git a/src/main/java/org/apache/commons/jexl2/JadeJexlArithmetic.java b/src/main/java/org/apache/commons/jexl2/JadeJexlArithmetic.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/apache/commons/jexl2/JadeJexlArithmetic.java
+++ b/src/main/java/org/apache/commons/jexl2/JadeJexlArithmetic.java
@@ -12,15 +12,7 @@ public class JadeJexlArithmetic extends JexlArithmetic {
public JadeJexlArithmetic(boolean lenient) {
super(lenient);
}
- /**
- * Performs a comparison.
- * @param left the left operand
- * @param right the right operator
- * @param operator the operator
- * @return -1 if left < right; +1 if left > > right; 0 if left == right
- * @throws ArithmeticException if either left or right is null
- * @since 2.1
- */
+
protected int compare(Object left, Object right, String operator) {
if (left != null && right != null) {
if (left instanceof BigDecimal || right instanceof BigDecimal) {
|
Fixing issue <I>: NumberFormatException when comparing loop-iterator with a String
|
neuland_jade4j
|
train
|
df21300d6b83e8dc1922d5efd526658086614383
|
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -22,6 +22,8 @@ function byClassName(elt, cls) {
return found;
}
+var ie_lt8 = /MSIE [1-7]\b/.test(navigator.userAgent);
+
test("fromTextArea", function() {
var te = document.getElementById("code");
te.value = "CONTENT";
@@ -464,7 +466,7 @@ testCM("wrappingAndResizing", function(cm) {
var coords = cm.charCoords(pos);
eqPos(pos, cm.coordsChar({x: coords.x + 2, y: coords.y + 2}));
});
-});
+}, null, ie_lt8);
testCM("measureEndOfLine", function(cm) {
cm.setSize(null, "auto");
@@ -483,7 +485,7 @@ testCM("measureEndOfLine", function(cm) {
is(endPos.x > w - 20, "not at right");
endPos = cm.charCoords({line: 0, ch: 18});
eqPos(cm.coordsChar({x: endPos.x, y: endPos.y + 2}), {line: 0, ch: 18});
-}, {mode: "text/html", value: "<!-- foo barrr -->", lineWrapping: true});
+}, {mode: "text/html", value: "<!-- foo barrr -->", lineWrapping: true}, ie_lt8);
testCM("scrollVerticallyAndHorizontally", function(cm) {
cm.setSize(100, 100);
@@ -508,7 +510,7 @@ testCM("moveV stuck", function(cm) {
cm.setCursor({line: 0, ch: val.length - 1});
cm.moveV(-1, "line");
eqPos(cm.getCursor(), {line: 0, ch: 26});
-}, {lineWrapping: true});
+}, {lineWrapping: true}, ie_lt8);
testCM("clickTab", function(cm) {
var p0 = cm.charCoords({line: 0, ch: 0}), p1 = cm.charCoords({line: 0, ch: 1});
|
Set some tests to expect failure on IE7
|
codemirror_CodeMirror
|
train
|
5305607e95ed1c8528249dfe6196e9b15451433a
|
diff --git a/preferencesfx/src/main/java/com/dlsc/preferencesfx/PreferencesFx.java b/preferencesfx/src/main/java/com/dlsc/preferencesfx/PreferencesFx.java
index <HASH>..<HASH> 100644
--- a/preferencesfx/src/main/java/com/dlsc/preferencesfx/PreferencesFx.java
+++ b/preferencesfx/src/main/java/com/dlsc/preferencesfx/PreferencesFx.java
@@ -113,7 +113,7 @@ public class PreferencesFx {
* @param modal window or not modal, that's the question.
*/
public void show(boolean modal) {
- new PreferencesFxDialog(preferencesFxModel, preferencesFxView, modal);
+ new PreferencesFxDialog(preferencesFxModel, preferencesFxView).show(modal);
}
/**
diff --git a/preferencesfx/src/main/java/com/dlsc/preferencesfx/view/PreferencesFxDialog.java b/preferencesfx/src/main/java/com/dlsc/preferencesfx/view/PreferencesFxDialog.java
index <HASH>..<HASH> 100644
--- a/preferencesfx/src/main/java/com/dlsc/preferencesfx/view/PreferencesFxDialog.java
+++ b/preferencesfx/src/main/java/com/dlsc/preferencesfx/view/PreferencesFxDialog.java
@@ -34,7 +34,6 @@ public class PreferencesFxDialog extends DialogPane {
private StorageHandler storageHandler;
private boolean persistWindowState;
private boolean saveSettings;
- private boolean modalWindow;
private ButtonType closeWindowBtnType = ButtonType.CLOSE;
private ButtonType cancelBtnType = ButtonType.CANCEL;
@@ -43,12 +42,10 @@ public class PreferencesFxDialog extends DialogPane {
*
* @param model the model of PreferencesFX
* @param preferencesFxView the master view to be display in this {@link DialogPane}
- * @param modal flag to set the dialog as modal (true) or not
*/
- public PreferencesFxDialog(PreferencesFxModel model, PreferencesFxView preferencesFxView, boolean modal) {
+ public PreferencesFxDialog(PreferencesFxModel model, PreferencesFxView preferencesFxView) {
this.model = model;
this.preferencesFxView = preferencesFxView;
- this.modalWindow = modal;
persistWindowState = model.isPersistWindowState();
saveSettings = model.isSaveSettings();
storageHandler = model.getStorageHandler();
@@ -57,21 +54,29 @@ public class PreferencesFxDialog extends DialogPane {
setupDialogClose();
loadLastWindowState();
setupButtons();
- dialog.show();
if (model.getHistoryDebugState()) {
setupDebugHistoryTable();
}
}
- private void layoutForm() {
- dialog.setTitle("PreferencesFx");
- dialog.setResizable(true);
- getButtonTypes().addAll(closeWindowBtnType, cancelBtnType);
- if (modalWindow) {
+ public void show() {
+ show(false);
+ }
+
+ public void show(boolean modal) {
+ if(modal) {
dialog.initModality(Modality.APPLICATION_MODAL);
+ dialog.showAndWait();
} else {
dialog.initModality(Modality.NONE);
+ dialog.show();
}
+ }
+
+ private void layoutForm() {
+ dialog.setTitle("PreferencesFx");
+ dialog.setResizable(true);
+ getButtonTypes().addAll(closeWindowBtnType, cancelBtnType);
dialog.setDialogPane(this);
setContent(preferencesFxView);
}
|
Rework how modality is applied on PreferencesFXDialog
- modality is set before showing, not at construction time
- a show() method was added, opening the door for #6
|
dlemmermann_PreferencesFX
|
train
|
5b1e8589e9d2638032e5e20261ab01d58bcece71
|
diff --git a/pkg/apiserver/api_installer.go b/pkg/apiserver/api_installer.go
index <HASH>..<HASH> 100644
--- a/pkg/apiserver/api_installer.go
+++ b/pkg/apiserver/api_installer.go
@@ -22,6 +22,7 @@ import (
"net/url"
gpath "path"
"reflect"
+ "sort"
"strings"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
@@ -65,8 +66,16 @@ func (a *APIInstaller) Install() (ws *restful.WebService, errors []error) {
redirectHandler := (&RedirectHandler{a.group.Storage, a.group.Codec, a.group.Context, a.info})
proxyHandler := (&ProxyHandler{a.prefix + "/proxy/", a.group.Storage, a.group.Codec, a.group.Context, a.info})
- for path, storage := range a.group.Storage {
- if err := a.registerResourceHandlers(path, storage, ws, watchHandler, redirectHandler, proxyHandler); err != nil {
+ // Register the paths in a deterministic (sorted) order to get a deterministic swagger spec.
+ paths := make([]string, len(a.group.Storage))
+ var i int = 0
+ for path := range a.group.Storage {
+ paths[i] = path
+ i++
+ }
+ sort.Strings(paths)
+ for _, path := range paths {
+ if err := a.registerResourceHandlers(path, a.group.Storage[path], ws, watchHandler, redirectHandler, proxyHandler); err != nil {
errors = append(errors, err)
}
}
|
Updating api_installer to register paths in a deterministic order to get a deterministic swagger spec
|
kubernetes_kubernetes
|
train
|
482e023be94de69f4ff754be50333727c24d36d7
|
diff --git a/lib/extensions/report_core/extensionHooks.js b/lib/extensions/report_core/extensionHooks.js
index <HASH>..<HASH> 100644
--- a/lib/extensions/report_core/extensionHooks.js
+++ b/lib/extensions/report_core/extensionHooks.js
@@ -26,11 +26,12 @@ module.exports = function(extOptions) {
callback({ message : "Error parsing JSON report file : "+ options.input, error : e.toString() });
}
if (report !== null) {
- var newOptions = ce.cloneextend(options, { report : report });
- $f(require("./generators/structureGenerator.js"))({name : "structureJson"}).generate(newOptions)
- .with(require("./generators/summaryGenerator.js"))({name : "summaryJson"}).generate(newOptions)
- .with(require("./generators/glossaryGenerator.js"))({name : "glossaryReport"}).generate(newOptions)
- .with(require("./generators/stepsGenerator.js"))({name : "stepsReport"}).generate(newOptions)
+ options.report = report;
+
+ $f(require("./generators/structureGenerator.js"))({name : "structureJson"}).generate(options)
+ .with(require("./generators/summaryGenerator.js"))({name : "summaryJson"}).generate(options)
+ .with(require("./generators/glossaryGenerator.js"))({name : "glossaryReport"}).generate(options)
+ .with(require("./generators/stepsGenerator.js"))({name : "stepsReport"}).generate(options)
.go(function(err, res) {
if (err) {callback(err);} else {
self.renderData = {
diff --git a/lib/extensions/report_core/generators/structureGenerator.js b/lib/extensions/report_core/generators/structureGenerator.js
index <HASH>..<HASH> 100644
--- a/lib/extensions/report_core/generators/structureGenerator.js
+++ b/lib/extensions/report_core/generators/structureGenerator.js
@@ -4,12 +4,8 @@ var statusOrder = ["passed", "skipped", "pending", "stepless", "undefined", "fai
var statusManager = require("./statusManager.js")(statusOrder);
exports.generate = function(options, callback) {
- fs.readFile(options.input, function(err, jsonResults) {
- if (err) {callback(err);} else {
- callback(null, buildStructure(JSON.parse(jsonResults.toString())));
- }
- });
-
+ callback(null, buildStructure(options.report));
+
function buildStructure(jsonReport) {
var structure = {
"id" : "root",
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "cuke-tree",
- "version": "0.2.23",
+ "version": "0.2.24",
"description": "A hierarchical Cucumber test report generator and IDE.",
"author": {
"name": "Pete Clark",
|
Release <I>
Improved test suite performance.
|
peteclark82_cuke-tree
|
train
|
d56ccdcbbc80fb88eb39441a0afb1c7bd3c3f64a
|
diff --git a/flake8_import_order/flake8_linter.py b/flake8_import_order/flake8_linter.py
index <HASH>..<HASH> 100644
--- a/flake8_import_order/flake8_linter.py
+++ b/flake8_import_order/flake8_linter.py
@@ -26,7 +26,7 @@ class Linter(ImportOrderChecker):
default=DEFAULT_IMPORT_ORDER_STYLE,
action="store",
type="string",
- help="Style to follow. Available: cryptography, google"
+ help="Style to follow. Available: cryptography, google, smarkets"
)
parser.config_options.append("application-import-names")
parser.config_options.append("import-order-style")
|
Add smarkets style to help text
|
PyCQA_flake8-import-order
|
train
|
5dbd1b34b7e2349762bf32ac9f5f3b817039860b
|
diff --git a/listener_test.go b/listener_test.go
index <HASH>..<HASH> 100644
--- a/listener_test.go
+++ b/listener_test.go
@@ -12,17 +12,69 @@ var _ = Describe("listener", func() {
It("should store data from ApigeeSync in the database", func(done Done) {
- var event = common.ChangeList{}
- rowitemp := common.Row{}
- scv := &common.ColumnVal{
- Value: "api_product_0",
- Type: 1,
- }
- rowitem["id"] = scv
- event.Changes = []Change{
+ var event = ChangeSet{}
+ event.Changes = []ChangePayload{
+ {
+ Data: DataPayload{
+ EntityType: "apiproduct",
+ Operation: "create",
+ PldCont: Payload{
+ Organization: "test_org",
+ AppName: "Api_product_sync",
+ Resources: []string{"/**", "/test"},
+ Environments: []string{"Env_0", "Env_1"},
+ },
+ },
+ },
+ {
+ Data: DataPayload{
+ EntityType: "developer",
+ Operation: "create",
+ EntityIdentifier: "developer_id_sync",
+ PldCont: Payload{
+ Organization: "test_org",
+ Email: "person_sync@apigee.com",
+ Status: "Active",
+ UserName: "user_sync",
+ FirstName: "user_first_name_sync",
+ LastName: "user_last_name_sync",
+ },
+ },
+ },
+ {
+ Data: DataPayload{
+ EntityType: "app",
+ Operation: "create",
+ EntityIdentifier: "application_id_sync",
+ PldCont: Payload{
+ Organization: "test_org",
+ Email: "person_sync@apigee.com",
+ Status: "Approved",
+ AppName: "application_id_sync",
+ DeveloperId: "developer_id_sync",
+ CallbackUrl: "call_back_url",
+ },
+ },
+ },
{
- Table: "api_product",
- NewRow: rowitemp,
+ Data: DataPayload{
+ EntityType: "credential",
+ Operation: "create",
+ EntityIdentifier: "credential_sync",
+ PldCont: Payload{
+ Organization: "test_org",
+ AppId: "application_id_sync",
+ Status: "Approved",
+ ConsumerSecret: "consumer_secret_sync",
+ IssuedAt: 349583485,
+ ApiProducts: []Apip{
+ {
+ ApiProduct: "Api_product_sync",
+ Status: "Approved",
+ },
+ },
+ },
+ },
},
}
|
Remove change related tests.(for now)
|
apid_apidApiMetadata
|
train
|
3463e18ae05d7225a400c6a6520a853a598c1059
|
diff --git a/src/Phpro/SoapClient/Console/Command/GenerateConfigCommand.php b/src/Phpro/SoapClient/Console/Command/GenerateConfigCommand.php
index <HASH>..<HASH> 100644
--- a/src/Phpro/SoapClient/Console/Command/GenerateConfigCommand.php
+++ b/src/Phpro/SoapClient/Console/Command/GenerateConfigCommand.php
@@ -61,7 +61,7 @@ CONFIRMATION;
// Client
$this->addNonEmptySetter($context, 'setClientDestination', $baseDir);
- $this->addNonEmptySetter($context, 'setClientName', $name.'Client');
+ $this->addNonEmptySetter($context, 'setClientName', $name);
$this->addNonEmptySetter($context, 'setClientNamespace', $namespace);
// Classmap
|
Do not append Client to the Client name, let the user decide
|
phpro_soap-client
|
train
|
4f0e5455d5089df459542e0e9463d9c67bfd86a0
|
diff --git a/lib/faraday.rb b/lib/faraday.rb
index <HASH>..<HASH> 100644
--- a/lib/faraday.rb
+++ b/lib/faraday.rb
@@ -92,7 +92,7 @@ module Faraday
alias require_lib require_libs
- def respond_to_missing?(symbol, include_private = false)
+ def respond_to?(symbol, include_private = false)
default_connection.respond_to?(symbol) || super
end
|
Use `respond_to?` for compatibility.
Older versions of Ruby do not support `respond_to_missing?`. In order
to reach a wider audience, we simply define `respond_to?`.
|
lostisland_faraday
|
train
|
f31b8d1356f12105d5a76dbef989441b1c2a0a62
|
diff --git a/master/buildbot/www/authz/roles.py b/master/buildbot/www/authz/roles.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/www/authz/roles.py
+++ b/master/buildbot/www/authz/roles.py
@@ -64,7 +64,7 @@ class RolesFromEmails(RolesFromBase):
class RolesFromDomain(RolesFromEmails):
def __init__(self, **kwargs):
- super(RolesFromEmails, self).__init__()
+ RolesFromBase.__init__(self)
self.domain_roles = {}
for role, domains in iteritems(kwargs):
|
Fix pylint complaint about first argument given to super.
Ensure we follow the same pattern as rest of code.
|
buildbot_buildbot
|
train
|
9d79c8f8d5b2e0419319ce85bdcda6eb741db2de
|
diff --git a/test/test_bad_start.py b/test/test_bad_start.py
index <HASH>..<HASH> 100755
--- a/test/test_bad_start.py
+++ b/test/test_bad_start.py
@@ -71,7 +71,7 @@ HIGH_PORT = 65488
run = 0 # We will open some ports but not close them (yes it's not good) and
# so we will open a range from a high port
-class template_Test_Daemon_Bad_Start():
+class template_Daemon_Bad_Start():
def get_login_and_group(self, p):
try:
@@ -143,23 +143,23 @@ class template_Test_Daemon_Bad_Start():
os.rmdir(d1.workdir)
-class Test_Broker_Bad_Start(template_Test_Daemon_Bad_Start, unittest.TestCase):
+class Test_Broker_Bad_Start(template_Daemon_Bad_Start, unittest.TestCase):
daemon_cls = Broker
-class Test_Scheduler_Bad_Start(template_Test_Daemon_Bad_Start, unittest.TestCase):
+class Test_Scheduler_Bad_Start(template_Daemon_Bad_Start, unittest.TestCase):
daemon_cls = Shinken
-class Test_Poller_Bad_Start(template_Test_Daemon_Bad_Start, unittest.TestCase):
+class Test_Poller_Bad_Start(template_Daemon_Bad_Start, unittest.TestCase):
daemon_cls = Poller
-class Test_Reactionner_Bad_Start(template_Test_Daemon_Bad_Start, unittest.TestCase):
+class Test_Reactionner_Bad_Start(template_Daemon_Bad_Start, unittest.TestCase):
daemon_cls = Reactionner
-class Test_Arbiter_Bad_Start(template_Test_Daemon_Bad_Start, unittest.TestCase):
+class Test_Arbiter_Bad_Start(template_Daemon_Bad_Start, unittest.TestCase):
daemon_cls = Arbiter
def create_daemon(self):
|
Fix : test/test_bad_start.py : Removing Test from the template name
Unitests only run tests that subclasses unittest.TestCase but jenkins run tests with nosetests that doesn't. We have to change the template name to prevent nosetests from reading it.
|
Alignak-monitoring_alignak
|
train
|
866709aa85768d5d0ed3e3b32464e4e9316ae201
|
diff --git a/elki-clustering/src/main/java/elki/clustering/em/MultivariateGaussianModel.java b/elki-clustering/src/main/java/elki/clustering/em/MultivariateGaussianModel.java
index <HASH>..<HASH> 100644
--- a/elki-clustering/src/main/java/elki/clustering/em/MultivariateGaussianModel.java
+++ b/elki-clustering/src/main/java/elki/clustering/em/MultivariateGaussianModel.java
@@ -200,7 +200,7 @@ public class MultivariateGaussianModel implements EMClusterModel<NumberVector, E
for(int i = 0; i < covariance.length; i++) {
s += covariance[i][i];
}
- s *= SINGULARITY_CHEAT / covariance.length;
+ s = s > 1e-100 ? (s * SINGULARITY_CHEAT / covariance.length) : 1e-50;
for(int i = 0; i < covariance.length; i++) {
covariance[i][i] += s;
}
|
Small tweak to singularity avoidance in GMM.
|
elki-project_elki
|
train
|
f7c5c79b3a388cfe1b881f584f2e1077590913ef
|
diff --git a/app/src/Bolt/Composer/CommandRunner.php b/app/src/Bolt/Composer/CommandRunner.php
index <HASH>..<HASH> 100644
--- a/app/src/Bolt/Composer/CommandRunner.php
+++ b/app/src/Bolt/Composer/CommandRunner.php
@@ -53,14 +53,15 @@ class CommandRunner
public function check()
{
- $response = $this->execute("update --dry-run -d extensions/");
- if($response[2] === "") {
- return "All packages are up to date";
- } else {
- $output = "The following operations are available to run...<br>";
- $output .= implode(array_slice($response, 2), "<br>" );
- return $output;
+ $updates = array();
+ $packages = array_filter($this->execute("show -i -N -d extensions/"));
+ foreach($packages as $package) {
+ $response = array_filter(array_slice($this->execute('update --dry-run '.$package),2));
+ if(count($response)) {
+ $updates[] = $package;
+ }
}
+ return json_encode($updates);
}
diff --git a/app/view/js/bolt-extend.js b/app/view/js/bolt-extend.js
index <HASH>..<HASH> 100644
--- a/app/view/js/bolt-extend.js
+++ b/app/view/js/bolt-extend.js
@@ -54,11 +54,16 @@ var BoltExtender = Object.extend(Object, {
var controller = this;
controller.find('.update-container').show();
- var target = controller.find(".update-output" );
+ var target = controller.find(".update-list-items" );
active_console = target;
active_console.html("Searching for available updates...");
jQuery.get(baseurl+'check', function(data) {
- target.html(data);
+ console.log(data);
+ for(var e in data) {
+ ext = data[e];
+ target.append("<tr><td class='ext-list'><strong class='title'>"+ext.title+"</strong></td><td> <a data-action='update-package' class='btn btn-sm btn-danger' href='"+baseurl+"update?package="+ext.name+"'>Install Update</a></td></tr>");
+ }
+
});
},
diff --git a/app/view/twig/extend/extend.twig b/app/view/twig/extend/extend.twig
index <HASH>..<HASH> 100644
--- a/app/view/twig/extend/extend.twig
+++ b/app/view/twig/extend/extend.twig
@@ -19,6 +19,19 @@
<div class='update-container hide-block'>
<h3>{{ __('Updates') }}</h3>
<pre class="update-output console">{{ __('Checking for available updates (this may take up to 60 seconds)')}}..</pre>
+ <div class="update-list hide-block">
+ <table class="dashboardlisting table">
+ <thead>
+ <tr>
+ <th>{{ __('Extension') }}</th>
+ <th>{{ __('Actions') }}</th>
+ </tr>
+ </thead>
+ <tbody class="installed-list-items">
+
+ </tbody>
+ </table>
+ </div>
</div>
<section class="installed-container">
<h3>{{ __('Your Currently Installed Extensions') }}</h3>
|
work on allowing running of updates individually
|
bolt_bolt
|
train
|
646468684f8c0ca28d4ab237511f274222ed324d
|
diff --git a/admin/jqadm/templates/type/price/list-standard.php b/admin/jqadm/templates/type/price/list-standard.php
index <HASH>..<HASH> 100644
--- a/admin/jqadm/templates/type/price/list-standard.php
+++ b/admin/jqadm/templates/type/price/list-standard.php
@@ -174,7 +174,6 @@ $columnList = [
'catalog' => $this->translate( 'admin', 'catalog' ),
'customer' => $this->translate( 'admin', 'customer' ),
'media' => $this->translate( 'admin', 'media' ),
- 'price' => $this->translate( 'admin', 'price' ),
'product' => $this->translate( 'admin', 'product' ),
'service' => $this->translate( 'admin', 'service' ),
'supplier' => $this->translate( 'admin', 'supplier' ),
@@ -188,6 +187,7 @@ $columnList = [
]],
'price.type.code' => [],
'price.type.label' => [],
+ 'price.type.position' => ['op' => '>=', 'type' => 'number'],
'price.type.ctime' => ['op' => '-', 'type' => 'datetime-local'],
'price.type.mtime' => ['op' => '-', 'type' => 'datetime-local'],
'price.type.editor' => [],
|
Add missing position columns (#<I>)
Add missing position columns and remove the price duplicated from the domain list
|
aimeos_ai-admin-jqadm
|
train
|
721a47e6072ccfec05774c1f867a7b286a8968f2
|
diff --git a/lib/executor/variable_looping_vus.go b/lib/executor/variable_looping_vus.go
index <HASH>..<HASH> 100644
--- a/lib/executor/variable_looping_vus.go
+++ b/lib/executor/variable_looping_vus.go
@@ -39,10 +39,6 @@ import (
const variableLoopingVUsType = "variable-looping-vus"
-// How often we can make VU adjustments when processing stages
-// TODO: make configurable, in some bounds?
-const minIntervalBetweenVUAdjustments = 100 * time.Millisecond
-
func init() {
lib.RegisterExecutorConfigType(
variableLoopingVUsType,
@@ -200,11 +196,18 @@ func (vlvc VariableLoopingVUsConfig) getRawExecutionSteps(et *lib.ExecutionTuple
}
// Reserve the scaled StartVUs at the beginning
- prevScaledVUs := et.ES.Scale(vlvc.StartVUs.Int64)
+ prevScaledVUs := et.ScaleInt64(vlvc.StartVUs.Int64)
steps := []lib.ExecutionStep{{TimeOffset: 0, PlannedVUs: uint64(prevScaledVUs)}}
timeFromStart := time.Duration(0)
totalDuration := time.Duration(0)
+ addStep := func(step lib.ExecutionStep) {
+ if len(steps) == 0 || steps[len(steps)-1].PlannedVUs != step.PlannedVUs {
+ steps = append(steps, step)
+ prevScaledVUs = int64(step.PlannedVUs)
+ }
+ }
+
for _, stage := range vlvc.Stages {
stageEndVUs := stage.Target.Int64
stageDuration := time.Duration(stage.Duration.Duration)
@@ -221,18 +224,14 @@ func (vlvc VariableLoopingVUsConfig) getRawExecutionSteps(et *lib.ExecutionTuple
// Handle 0-duration stages, i.e. instant VU jumps
if stageDuration == 0 {
fromVUs = stageEndVUs
- prevScaledVUs = et.ES.Scale(stageEndVUs)
- steps = append(steps, lib.ExecutionStep{
+ addStep(lib.ExecutionStep{
TimeOffset: timeFromStart,
- PlannedVUs: uint64(prevScaledVUs),
+ PlannedVUs: uint64(et.ScaleInt64(stageEndVUs)),
})
continue
}
- // For each stage, limit any VU adjustments between the previous
- // number of VUs and the stage's target to happen at most once
- // every minIntervalBetweenVUAdjustments. No floats or ratios,
- // since nanoseconds should be good enough for anyone... :)
+ // No floats or ratios,since nanoseconds should be good enough for anyone... :)
stepInterval := stageDuration / time.Duration(stageVUAbsDiff)
// Loop through the potential steps, adding an item to the
@@ -247,29 +246,22 @@ func (vlvc VariableLoopingVUsConfig) getRawExecutionSteps(et *lib.ExecutionTuple
stepGlobalVUs := fromVUs + int64(
math.Round((float64(t)*float64(stageEndVUs-fromVUs))/float64(stageDuration)),
)
- stepScaledVus := et.ES.Scale(stepGlobalVUs)
-
- if stepScaledVus == prevScaledVUs {
- // only add steps when there's a change in the number of VUs
- continue
- }
// VU reservation for gracefully ramping down is handled as a
// separate method: reserveVUsForGracefulRampDowns()
- steps = append(steps, lib.ExecutionStep{
+ addStep(lib.ExecutionStep{
TimeOffset: timeFromStart + t,
- PlannedVUs: uint64(stepScaledVus),
+ PlannedVUs: uint64(et.ScaleInt64(stepGlobalVUs)),
})
- prevScaledVUs = stepScaledVus
}
fromVUs = stageEndVUs
- prevScaledVUs = et.ES.Scale(stageEndVUs)
timeFromStart += stageDuration
- steps = append(steps, lib.ExecutionStep{
+
+ addStep(lib.ExecutionStep{
TimeOffset: timeFromStart,
- PlannedVUs: uint64(prevScaledVUs),
+ PlannedVUs: uint64(et.ScaleInt64(stageEndVUs)),
})
}
diff --git a/lib/executor/variable_looping_vus_test.go b/lib/executor/variable_looping_vus_test.go
index <HASH>..<HASH> 100644
--- a/lib/executor/variable_looping_vus_test.go
+++ b/lib/executor/variable_looping_vus_test.go
@@ -286,7 +286,6 @@ func TestVariableLoopingVUsConfigExecutionPlanExampleOneThird(t *testing.T) {
expRawStepsNoZeroEnd := []lib.ExecutionStep{
{TimeOffset: 0 * time.Second, PlannedVUs: 1},
{TimeOffset: 1 * time.Second, PlannedVUs: 2},
- {TimeOffset: 2 * time.Second, PlannedVUs: 2},
{TimeOffset: 4 * time.Second, PlannedVUs: 1},
{TimeOffset: 7 * time.Second, PlannedVUs: 0},
{TimeOffset: 8 * time.Second, PlannedVUs: 1},
@@ -294,7 +293,6 @@ func TestVariableLoopingVUsConfigExecutionPlanExampleOneThird(t *testing.T) {
{TimeOffset: 12 * time.Second, PlannedVUs: 1},
{TimeOffset: 15 * time.Second, PlannedVUs: 0},
{TimeOffset: 16 * time.Second, PlannedVUs: 1},
- {TimeOffset: 18 * time.Second, PlannedVUs: 1},
{TimeOffset: 20 * time.Second, PlannedVUs: 0},
}
rawStepsNoZeroEnd := conf.getRawExecutionSteps(et, false)
|
Fix not adding steps with the same amount of VUs
|
loadimpact_k6
|
train
|
11f64aec4c8fa7c8d7705c5fc82192825acdde65
|
diff --git a/components/list/ListHeader.js b/components/list/ListHeader.js
index <HASH>..<HASH> 100644
--- a/components/list/ListHeader.js
+++ b/components/list/ListHeader.js
@@ -1,8 +1,8 @@
-import React, { PropTypes, Text } from 'react-native';
+import React, { Text } from 'react-native';
const THEMES = require('./style/index.ios').ThemesList;
export default class Header extends React.Component {
render() {
return (<Text style={THEMES.Header}>{this.props.children}</Text>);
}
-});
+}
|
1. Add RN version for button&list components.
|
ant-design_ant-design-mobile
|
train
|
da1f7055011c2316faf3460428e13264ca3c3e8a
|
diff --git a/photon.py b/photon.py
index <HASH>..<HASH> 100644
--- a/photon.py
+++ b/photon.py
@@ -232,7 +232,7 @@ def requester(url):
####
def zap(url):
- response = get(url + '/robots.txt').text # makes request to robots.txt
+ response = get(url + '/robots.txt', verify=False).text # makes request to robots.txt
if '<body' not in response: # making sure robots.txt isn't some fancy 404 page
matches = findall(r'Allow: (.*)|Disallow: (.*)', response) # If you know it, you know it
if matches:
@@ -243,7 +243,7 @@ def zap(url):
storage.add(url) # add the url to storage list for crawling
robots.add(url) # add the url to robots list
print('%s URLs retrieved from robots.txt: %s' % (good, len(robots)))
- response = get(url + '/sitemap.xml').text # makes request to sitemap.xml
+ response = get(url + '/sitemap.xml',verify=False).text # makes request to sitemap.xml
if '<body' not in response: # making sure robots.txt isn't some fancy 404 page
matches = findall(r'<loc>[^<]*</loc>', response) # regex for extracting urls
if matches: # if there are any matches
|
fix the error of SSL mismatch in the certificate
|
s0md3v_Photon
|
train
|
035eb7444d529ef52e150fe0eecf87f9fbb0b800
|
diff --git a/src/info/timeline.js b/src/info/timeline.js
index <HASH>..<HASH> 100644
--- a/src/info/timeline.js
+++ b/src/info/timeline.js
@@ -149,6 +149,7 @@ d3plus.info.timeline = function(vars) {
.attr("font-family",vars.style.timeline.tick.family)
.attr("font-size",vars.style.timeline.tick.size)
.attr("text-anchor",vars.style.timeline.tick.align)
+ .attr("opacity",0)
.text(function(d){
return d
})
@@ -185,6 +186,12 @@ d3plus.info.timeline = function(vars) {
else {
var start_x = vars.width.value/2 - timeline_width/2
}
+
+ text
+ .text(function(d,i){
+ return i%step == 0 ? d : ""
+ })
+ .attr("opacity",1)
text.transition().duration(vars.style.timing.transitions)
.attr("fill",function(d){
@@ -202,9 +209,6 @@ d3plus.info.timeline = function(vars) {
.attr("x",function(d,i){
return start_x + (label_width*i) + label_width/2
})
- .text(function(d,i){
- return i%step == 0 ? d : ""
- })
.attr("y",function(d){
var diff = diff = parseFloat(d3.select(this).style("font-size"),10)/5
var y = vars.style.timeline.padding+vars.style.timeline.height/2+this.getBBox().height/2 - diff
@@ -271,8 +275,7 @@ d3plus.info.timeline = function(vars) {
.attr("opacity",1)
.call(brush)
- brush_group.selectAll("rect")
- .transition().duration(vars.style.timing.transitions)
+ brush_group.selectAll("rect.background, rect.extent")
.attr("height",vars.style.timeline.height)
brush_group.selectAll("rect.background")
|
fixed timeline brush style error with new d3
|
alexandersimoes_d3plus
|
train
|
5e357be0dc23cd08b391a60aeaba16d0bccec822
|
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -17,6 +17,7 @@
],
"dependencies": {
"chrono-node": "^1.2.5",
+ "lodash.clonedeep": "^4.5.0",
"lodash.isequal": "^4.5.0",
"talisman": "^0.15.0"
},
diff --git a/parser/encounter.js b/parser/encounter.js
index <HASH>..<HASH> 100644
--- a/parser/encounter.js
+++ b/parser/encounter.js
@@ -1,5 +1,7 @@
'use strict';
+const cloneDeep = require('lodash.clonedeep');
+
function scorePartial(tokens, maxDepth, score) {
const distance = Math.abs(score - tokens);
@@ -171,6 +173,14 @@ class Match {
this.score = score;
this.data = data;
}
+
+ copy() {
+ const r = new Match(this.index, this.score, cloneDeep(this.data));
+ if(this.expression) {
+ r.expression = cloneDeep(r.expression);
+ }
+ return r;
+ }
}
module.exports = Encounter;
diff --git a/resolver/value.js b/resolver/value.js
index <HASH>..<HASH> 100644
--- a/resolver/value.js
+++ b/resolver/value.js
@@ -38,9 +38,17 @@ class Value extends Node {
).then(value => {
if(typeof value !== 'undefined' && value !== null) {
nextMatched.forEach(match => {
- match.data.values[this.id] = value;
+ if(encounter.partial && Array.isArray(value)) {
+ value.forEach(v => {
+ match = match.copy();
+ match.data.values[this.id] = v;
+ results.push(match);
+ });
+ } else {
+ match.data.values[this.id] = value;
+ results.push(match);
+ }
});
- results.push(...nextMatched);
return nextMatched;
}
@@ -58,7 +66,9 @@ class Value extends Node {
return encounter.next(1.0, 0);
}
- return promise.then(() => results.length > 0 ? results : null);
+ return promise.then(() => {
+ return results.length > 0 ? results : null;
+ });
}
toString() {
diff --git a/test/resolver.test.js b/test/resolver.test.js
index <HASH>..<HASH> 100644
--- a/test/resolver.test.js
+++ b/test/resolver.test.js
@@ -303,4 +303,51 @@ describe('Resolver', function() {
});
});
});
+
+ describe('Graph with custom value', function() {
+ const values = [
+ 'one',
+ 'two',
+ 'three'
+ ];
+ const resolver = new Builder(lang)
+ .value('name', function(encounter) {
+ let text = encounter.text();
+ if(encounter.partial) {
+ return values.filter(f => {
+ return f.indexOf(text) === 0;
+ });
+ } else {
+ if(values.indexOf(text) >= 0) {
+ return text;
+ }
+ }
+ })
+ .add('do {name}')
+ .build();
+
+ it('Match', function() {
+ return resolver.match('do one')
+ .then(results => {
+ expect(results.matches.length).to.equal(1);
+ expect(results.best.values.name).to.equal('one');
+ });
+ });
+
+ it('No match', function() {
+ return resolver.match('do four')
+ .then(results => {
+ expect(results.matches.length).to.equal(0);
+ });
+ });
+
+ it('Partial', function() {
+ return resolver.match('do t', {
+ partial: true
+ })
+ .then(results => {
+ expect(results.matches.length).to.equal(2);
+ });
+ });
+ })
})
|
Partial matching for custom values now support multiple results
|
aholstenson_ecolect-js
|
train
|
c2699d4855eb236050469cc90a4c18a9aad895df
|
diff --git a/plugins/Login/Controller.php b/plugins/Login/Controller.php
index <HASH>..<HASH> 100644
--- a/plugins/Login/Controller.php
+++ b/plugins/Login/Controller.php
@@ -71,7 +71,7 @@ class Controller extends \Piwik\Plugin\Controller
* @param string $messageNoAccess Access error message
* @param bool $infoMessage
* @internal param string $currentUrl Current URL
- * @return void
+ * @return string
*/
function login($messageNoAccess = null, $infoMessage = false)
{
diff --git a/plugins/Login/Login.php b/plugins/Login/Login.php
index <HASH>..<HASH> 100644
--- a/plugins/Login/Login.php
+++ b/plugins/Login/Login.php
@@ -46,7 +46,8 @@ class Login extends \Piwik\Plugin
$exceptionMessage = $exception->getMessage();
$controller = new Controller();
- $controller->login($exceptionMessage, '' /* $exception->getTraceAsString() */);
+
+ echo $controller->login($exceptionMessage, '' /* $exception->getTraceAsString() */);
}
/**
|
make sure to display login form if user has not have enough permissions
|
matomo-org_matomo
|
train
|
08ca67072bdef0780704010eddbd8d0842ffcf04
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -22,8 +22,13 @@ exports.default = function (_ref3) {
return;
}
+ var param = [t.spreadProperty('arg')];
+ var body = t.binaryExpression('*', t.identifier('a'), t.identifier('b'));
+ var lambda = t.arrowFunctionExpression(param, body);
+
var args = t.isSequenceExpression(path.node.left) ? path.node.left.expressions : [path.node.left];
- path.replaceWith(t.callExpression(path.node.right, args));
+ // path.replaceWith(t.callExpression(path.node.right, args));
+ path.replaceWith(lambda);
}
}
};
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -9,8 +9,14 @@ export default ({ types: t }) => ({
return;
}
- const args = t.isSequenceExpression(path.node.left) ? path.node.left.expressions : [path.node.left];
- path.replaceWith(t.callExpression(path.node.right, args));
+ const lambda = t.arrowFunctionExpression(
+ [t.restElement(t.identifier('args'))],
+ t.callExpression(path.node.right, [
+ t.callExpression(path.node.left, [t.spreadElement(t.identifier('args'))])
+ ])
+ );
+
+ path.replaceWith(lambda);
}
}
});
|
Well, it works. I'll add more tests
|
haskellcamargo_babel-plugin-function-composition
|
train
|
1f6abeee81777fc988be90aafba6160526860749
|
diff --git a/apiserver/resource.go b/apiserver/resource.go
index <HASH>..<HASH> 100644
--- a/apiserver/resource.go
+++ b/apiserver/resource.go
@@ -35,7 +35,7 @@ func (deps resourcesHandlerDeps) ConnectForUnitAgent(req *http.Request) (*state.
unit, ok := ent.(*state.Unit)
if !ok {
- logger.Criticalf("unexpected type: %T", ent)
+ logger.Errorf("unexpected type: %T", ent)
return nil, nil, errors.Errorf("unexpected type: %T", ent)
}
return st, unit, nil
|
Downgrade a logger call.
|
juju_juju
|
train
|
05b770a93f87ac12bd40dd94318afc21dc2e6ff7
|
diff --git a/telethon/extensions/markdown.py b/telethon/extensions/markdown.py
index <HASH>..<HASH> 100644
--- a/telethon/extensions/markdown.py
+++ b/telethon/extensions/markdown.py
@@ -90,7 +90,11 @@ def parse(message, delimiters=None, url_re=None):
for ent in result:
# If the end is after our start, it is affected
if ent.offset + ent.length > i:
- ent.length -= len(delim)
+ # If the old start is also before ours, it is fully enclosed
+ if ent.offset <= i:
+ ent.length -= len(delim) * 2
+ else:
+ ent.length -= len(delim)
# Append the found entity
ent = delimiters[delim]
|
Fix directly nested markdown entities
|
LonamiWebs_Telethon
|
train
|
7dc28e1c5d9f99d7f3e65d0bdf38bd63d4f7c66f
|
diff --git a/lib/media-stream.js b/lib/media-stream.js
index <HASH>..<HASH> 100644
--- a/lib/media-stream.js
+++ b/lib/media-stream.js
@@ -23,7 +23,6 @@ function MediaStream (media, opts) {
debug('new mediastream %s %s', media, JSON.stringify(opts))
self._mediaSource = new MediaSource()
- self._playing = false
self._sourceBuffer = null
self._cb = null
@@ -61,10 +60,6 @@ MediaStream.prototype._write = function (chunk, encoding, cb) {
self._sourceBuffer.appendBuffer(chunk)
debug('appendBuffer %s', chunk.length)
self._cb = cb
- if (!self._playing) {
- self.media.play()
- self._playing = true
- }
}
MediaStream.prototype._flow = function () {
|
media-stream: don't autoplay media
autoplay can be triggered by setting autoplay property on the `video`
or `audio` tag.
```html
<video autoplay>
```
|
webtorrent_webtorrent
|
train
|
ebe3f92f7b3d102f3b2f8f70f848788cbb7b6395
|
diff --git a/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseHistoryMojo.java b/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseHistoryMojo.java
index <HASH>..<HASH> 100644
--- a/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseHistoryMojo.java
+++ b/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseHistoryMojo.java
@@ -14,7 +14,7 @@ public class LiquibaseHistoryMojo extends AbstractLiquibaseMojo {
@Override
protected void performLiquibaseTask(Liquibase liquibase) throws LiquibaseException {
- CommandScope historyCommand = new CommandScope("history");
+ CommandScope historyCommand = new CommandScope(InternalHistoryCommandStep.COMMAND_NAME);
historyCommand.addArgumentValue(InternalHistoryCommandStep.DATABASE_ARG, getLiquibase().getDatabase());
diff --git a/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseSyncHubMojo.java b/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseSyncHubMojo.java
index <HASH>..<HASH> 100644
--- a/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseSyncHubMojo.java
+++ b/liquibase-maven-plugin/src/main/java/org/liquibase/maven/plugins/LiquibaseSyncHubMojo.java
@@ -41,7 +41,7 @@ public class LiquibaseSyncHubMojo extends AbstractLiquibaseChangeLogMojo {
throws LiquibaseException {
super.performLiquibaseTask(liquibase);
Database database = liquibase.getDatabase();
- CommandScope syncHub = new CommandScope("syncHub");
+ CommandScope syncHub = new CommandScope(InternalSyncHubCommandStep.COMMAND_NAME);
syncHub
.addArgumentValue(InternalSyncHubCommandStep.CHANGELOG_FILE_ARG, changeLogFile)
.addArgumentValue(InternalSyncHubCommandStep.URL_ARG, database.getConnection().getURL())
|
Use internal commands for Maven History and SyncHub
DAT-<I>
|
liquibase_liquibase
|
train
|
183f783c6ceb35237c00df68787b3430fca44a7b
|
diff --git a/src/main/java/net/openhft/chronicle/map/AbstractChronicleMapBuilder.java b/src/main/java/net/openhft/chronicle/map/AbstractChronicleMapBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/openhft/chronicle/map/AbstractChronicleMapBuilder.java
+++ b/src/main/java/net/openhft/chronicle/map/AbstractChronicleMapBuilder.java
@@ -774,9 +774,8 @@ public abstract class AbstractChronicleMapBuilder<K, V,
ReplicationChannel channel) throws IOException {
for (int i = 0; i < 10; i++) {
if (file.exists() && file.length() > 0) {
- FileInputStream fis = new FileInputStream(file);
- ObjectInputStream ois = new ObjectInputStream(fis);
- try {
+ try (FileInputStream fis = new FileInputStream(file);
+ ObjectInputStream ois = new ObjectInputStream(fis)) {
VanillaChronicleMap<K, ?, ?, V, ?, ?> map =
(VanillaChronicleMap<K, ?, ?, V, ?, ?>) ois.readObject();
map.headerSize = roundUpMapHeaderSize(fis.getChannel().position());
@@ -784,8 +783,6 @@ public abstract class AbstractChronicleMapBuilder<K, V,
return establishReplication(map, singleHashReplication, channel);
} catch (ClassNotFoundException e) {
throw new IOException(e);
- } finally {
- ois.close();
}
}
if (file.createNewFile() || file.length() == 0) {
@@ -803,15 +800,12 @@ public abstract class AbstractChronicleMapBuilder<K, V,
VanillaChronicleMap<K, ?, ?, V, ?, ?> map = newMap(singleHashReplication, channel);
- FileOutputStream fos = new FileOutputStream(file);
- ObjectOutputStream oos = new ObjectOutputStream(fos);
- try {
+ try (FileOutputStream fos = new FileOutputStream(file);
+ ObjectOutputStream oos = new ObjectOutputStream(fos)) {
oos.writeObject(map);
oos.flush();
map.headerSize = roundUpMapHeaderSize(fos.getChannel().position());
map.createMappedStoreAndSegments(file);
- } finally {
- oos.close();
}
return establishReplication(map, singleHashReplication, channel);
|
try-with-resources in AbstractChronicleMapBuilder
|
OpenHFT_Chronicle-Map
|
train
|
3404f379b881b5b88df55b85722d28afe40eb478
|
diff --git a/lib/jekyll/locale/auto_page.rb b/lib/jekyll/locale/auto_page.rb
index <HASH>..<HASH> 100644
--- a/lib/jekyll/locale/auto_page.rb
+++ b/lib/jekyll/locale/auto_page.rb
@@ -2,17 +2,19 @@
module Jekyll
class Locale::AutoPage < Page
- extend Forwardable
include Locale::Helper
+ attr_reader :path
attr_accessor :data, :content, :output
- def_delegators :@canon, :site, :extname, :relative_path
def initialize(canon, locale)
setup(canon, locale)
@path = canon.path
@content = canon.content
@data = canon.data
+ @name = File.basename(@path)
+ @relative_path = canon.relative_path
+ process(@name)
end
def url
@@ -20,7 +22,7 @@ module Jekyll
end
def to_liquid
- @to_liquid ||= Locale::PageDrop.new(self)
+ @to_liquid ||= configure_payload(canon.to_liquid)
end
end
end
diff --git a/lib/jekyll/locale/drop.rb b/lib/jekyll/locale/drop.rb
index <HASH>..<HASH> 100644
--- a/lib/jekyll/locale/drop.rb
+++ b/lib/jekyll/locale/drop.rb
@@ -7,11 +7,4 @@ module Jekyll
mutable false
private def_delegator :@obj, :data, :fallback_data
end
-
- #
-
- class Locale::PageDrop < Locale::Drop
- def_delegator :@obj, :relative_path, :path
- def_delegators :@obj, :url
- end
end
diff --git a/lib/jekyll/locale/mixins/helper.rb b/lib/jekyll/locale/mixins/helper.rb
index <HASH>..<HASH> 100644
--- a/lib/jekyll/locale/mixins/helper.rb
+++ b/lib/jekyll/locale/mixins/helper.rb
@@ -46,5 +46,12 @@ module Jekyll
end
Jekyll::Utils.deep_merge_hashes(canon.data, @data)
end
+
+ def configure_payload(payload)
+ payload.to_h.tap do |data|
+ data["path"] = self.relative_path
+ data["url"] = self.url
+ end
+ end
end
end
diff --git a/lib/jekyll/locale/page.rb b/lib/jekyll/locale/page.rb
index <HASH>..<HASH> 100644
--- a/lib/jekyll/locale/page.rb
+++ b/lib/jekyll/locale/page.rb
@@ -18,7 +18,7 @@ module Jekyll
end
def to_liquid
- @to_liquid ||= Locale::PageDrop.new(self)
+ @to_liquid ||= configure_payload(super)
end
def template
|
Replace Locale::PageDrop with a Hash
|
ashmaroli_jekyll-locale
|
train
|
0c0b5badc5347a51604bbe329d882775946802aa
|
diff --git a/src/kids/cmd/cmd.py b/src/kids/cmd/cmd.py
index <HASH>..<HASH> 100644
--- a/src/kids/cmd/cmd.py
+++ b/src/kids/cmd/cmd.py
@@ -92,6 +92,24 @@ def get_obj_subcmds(obj):
return OrderedDict(subcmds)
+def get_module_resources(mod):
+ """Return probed sub module names from given module"""
+
+ path = os.path.dirname(os.path.realpath(mod.__file__))
+ prefix = kf.basename(mod.__file__, (".py", ".pyc"))
+
+ if not os.path.exists(mod.__file__):
+ import pkg_resources
+ for resource_name in pkg_resources.resource_listdir(mod.__name__, ''):
+ if resource_name.startswith("%s_" % prefix) and resource_name.endswith(".py"):
+ module_name, _ext = os.path.splitext(kf.basename(resource_name))
+ yield module_name
+
+ for f in glob.glob(os.path.join(path, '%s_*.py' % prefix)):
+ module_name, _ext = os.path.splitext(kf.basename(f))
+ yield module_name
+
+
def get_mod_subcmds(mod):
"""Fetch action in same directory in python module
@@ -103,17 +121,12 @@ def get_mod_subcmds(mod):
subcmds = get_obj_subcmds(mod)
- ##
-
path = os.path.dirname(os.path.realpath(mod.__file__))
- prefix = kf.basename(mod.__file__, (".py", ".pyc"))
-
if mod.__package__ is None:
sys.path.insert(0, os.path.dirname(path))
mod.__package__ = kf.basename(path)
- for f in glob.glob(os.path.join(path, '%s_*.py' % prefix)):
- module_name, _ext = os.path.splitext(kf.basename(f))
+ for module_name in get_module_resources(mod):
try:
mod = importlib.import_module(".%s" % module_name, mod.__package__)
except ImportError as e:
|
new: [cmd] support for discovering module commands in ``pkg_resources`` eggs.
Previously, command were not discovered if your command got packaged to
a single file zipped egg.
|
0k_kids.cmd
|
train
|
1caf33e9ba1f0fb41b3943b17fd34c268421fbbc
|
diff --git a/EventListener/FilterEventListener.php b/EventListener/FilterEventListener.php
index <HASH>..<HASH> 100644
--- a/EventListener/FilterEventListener.php
+++ b/EventListener/FilterEventListener.php
@@ -28,6 +28,7 @@
namespace whatwedo\TableBundle\EventListener;
use Doctrine\ORM\Query\Expr;
+use InvalidArgumentException;
use UnexpectedValueException;
use whatwedo\TableBundle\Event\DataLoadEvent;
use whatwedo\TableBundle\Extension\FilterExtension;
@@ -100,11 +101,19 @@ class FilterEventListener
}
$addedJoins[] = $joinAlias;
$method = 'join';
+ $conditionType = null;
+ $condition = null;
if (is_array($join)) {
+ if (sizeof($join) == 4) {
+ $conditionType = $join[2];
+ $condition = $join[3];
+ } else if (sizeof($join) != 2) {
+ throw new InvalidArgumentException(sprintf('Invalid join options supplied for "%s".', $joinAlias));
+ }
$method = $join[0];
$join = $join[1];
}
- $this->queryBuilder()->$method($join, $joinAlias);
+ $this->queryBuilder()->$method($join, $joinAlias, $conditionType, $condition);
}
$w = $filter->getType()->addToQueryBuilder(
|
feat(filter): allow setting join conditions (#<I>)
* feat(join_condition): implement suggestion
* feat(join_condition): improve parameter-handling
* fix(join_condition): improve parameter-handling
- hints from @lukasluecke
|
whatwedo_TableBundle
|
train
|
6559faa43f5f3f9a29551abc19f2a04ed2939cc7
|
diff --git a/benchmark/concurrent.js b/benchmark/concurrent.js
index <HASH>..<HASH> 100644
--- a/benchmark/concurrent.js
+++ b/benchmark/concurrent.js
@@ -3,7 +3,7 @@
// Benchmark comparing performance of promise setups (concurrent)
// To run it, do following in package path:
//
-// $ npm install Q jquery when
+// $ npm install Q jquery when kew
// $ node benchmark/concurrent.js
var generate = require('es5-ext/array/generate')
diff --git a/benchmark/one-after-another.js b/benchmark/one-after-another.js
index <HASH>..<HASH> 100644
--- a/benchmark/one-after-another.js
+++ b/benchmark/one-after-another.js
@@ -4,7 +4,7 @@
// (one by after another)
// To run it, do following in package path:
//
-// $ npm install Q jquery when
+// $ npm install Q jquery when kew
// $ node benchmark/one-after-another.js
var forEach = require('es5-ext/object/for-each')
|
Add `kew` to install command
|
medikoo_deferred
|
train
|
17126043138c0c829236cfe458c34e62bdee3cbc
|
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -37,6 +37,7 @@
"karma-babel-preprocessor": "^8.0.0-beta.0",
"karma-browserify": "^5.2.0",
"karma-chrome-launcher": "^2.2.0",
+ "karma-firefox-launcher": "^1.1.0",
"karma-ie-launcher": "^1.0.0",
"karma-mocha": "^1.3.0",
"karma-mocha-reporter": "^2.2.5",
diff --git a/test/assertThrows.js b/test/assertThrows.js
index <HASH>..<HASH> 100644
--- a/test/assertThrows.js
+++ b/test/assertThrows.js
@@ -8,7 +8,11 @@ function assertThrows(fn, message, expose = () => {}) {
throw new Error('Should have thrown an error')
} catch (err) {
assert(typeof err !== 'undefined', 'threw undefined!')
- assert.equal(err.message, message)
+ if (message instanceof RegExp) {
+ assert(err.message.match(message))
+ } else {
+ assert.equal(err.message, message)
+ }
expose(err)
}
}
diff --git a/test/valueObjectTest.js b/test/valueObjectTest.js
index <HASH>..<HASH> 100644
--- a/test/valueObjectTest.js
+++ b/test/valueObjectTest.js
@@ -583,7 +583,7 @@ describe('ValueObject', () => {
assertThrows(
() => foo.dingbat = 'badger',
- "Cannot add property dingbat, object is not extensible"
+ /is not extensible/
)
})
@@ -593,7 +593,7 @@ describe('ValueObject', () => {
assertThrows(
() => foo.ok = 'badger',
- "Cannot assign to read only property 'ok' of object '#<Foo>'"
+ /read.only/
)
})
@@ -1108,7 +1108,7 @@ describe('ValueObject', () => {
const foo = new Foo({ x: 'hello' })
assertThrows(
() => { foo.z = 'yeah' },
- 'Cannot add property z, object is not extensible'
+ /not extensible/
)
} finally {
ValueObject.enableFreeze()
diff --git a/value-object.js b/value-object.js
index <HASH>..<HASH> 100644
--- a/value-object.js
+++ b/value-object.js
@@ -487,7 +487,9 @@ InvalidProperty.prototype.describe = function () {
}
function ValidationError(object, failures) {
- Error.captureStackTrace(this, ValidationError)
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, ValidationError)
+ }
this.object = object
this.failures = failures
this.message = functionName(object.constructor) + ' is invalid: ' + failures.describe()
|
Fix a few minor issues in firefox
|
cucumber-ltd_value-object.js
|
train
|
67e4c7e02b0a80b9c08a4034aa103954b2f9f2fb
|
diff --git a/lib/Doctrine/ODM/PHPCR/UnitOfWork.php b/lib/Doctrine/ODM/PHPCR/UnitOfWork.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ODM/PHPCR/UnitOfWork.php
+++ b/lib/Doctrine/ODM/PHPCR/UnitOfWork.php
@@ -1689,7 +1689,7 @@ class UnitOfWork
if ($this->getDocumentById($childId)) {
$child = $this->merge($child);
} else {
- $this->persistNew($targetClass, $child, ClassMetadata::GENERATOR_TYPE_ASSIGNED, $parent);
+ $this->persistNew($targetClass, $child, null, $parent);
}
$this->computeChangeSet($targetClass, $child);
|
Don't override ID generator when persisting children
|
doctrine_phpcr-odm
|
train
|
726a75b9001d406d21679c9b46c9b861b723ad44
|
diff --git a/glooey/widget.py b/glooey/widget.py
index <HASH>..<HASH> 100644
--- a/glooey/widget.py
+++ b/glooey/widget.py
@@ -76,13 +76,18 @@ class Widget (pyglet.event.EventDispatcher, HoldUpdatesMixin):
widget hierarchy to make space for the widgets that need it, then calls
resize() on any widget that need to adapt to the new space allocation.
"""
+ # Guarantee that do_resize() is only called if the size of the widget
+ # actually changed. This is probably doesn't have a significant effect
+ # on performance, but hopefully it gives people reimplementing
+ # do_resize() less to worry about.
if self._rect is None or self._rect != new_rect:
self._rect = new_rect
self.do_resize()
- # Even if this widget didn't change size, it may be that the children
- # need to move or be rearranged. This came up for me with a custom
- # placement function, but I can imagine other scenarios as well.
+ # The children may need to be resized even if this widget doesn't. For
+ # example, consider a container that takes up the whole window. It's
+ # size won't change when a widget is added or removed from it, but it's
+ # children will still need to be resized.
self.do_resize_children()
# Try to redraw the widget. This won't do anything if the widget isn't
@@ -93,9 +98,12 @@ class Widget (pyglet.event.EventDispatcher, HoldUpdatesMixin):
"""
Change the pyglet graphics group associated with this widget.
"""
+ # Changing the group is often an expensive operation, so don't do
+ # anything unless we have to. It is assumed that do_regroup_children()
+ # depends only of self._group, so if self._group doesn't change,
+ # self.do_regroup_children() doesn't need to be called.
if self._group is None or self._group != new_group:
self._group = new_group
-
self.do_regroup()
self.do_regroup_children()
|
Elaborate on why resize() does what it does.
I tried to add a check resize() to only call do_resize_children() if the
size of the widget actually changed. But this caused all sorts of
problems children widgets not updating when they should, so I undid the
change and added a comment explaining why things need to be the way they
are.
|
kxgames_glooey
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.