hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
b7e7d59bac6b8672d882d6c34357ea42f289bc9b
|
diff --git a/anndata/readwrite/h5ad.py b/anndata/readwrite/h5ad.py
index <HASH>..<HASH> 100644
--- a/anndata/readwrite/h5ad.py
+++ b/anndata/readwrite/h5ad.py
@@ -193,7 +193,9 @@ def write_series(group, key, series, dataset_kwargs=MappingProxyType({})):
write_array(group, category_key, cats, dataset_kwargs)
write_array(group, key, codes, dataset_kwargs)
+
group[key].attrs["categories"] = group[category_key].ref
+ group[category_key].attrs["ordered"] = series.cat.ordered
else:
group[key] = series.values
@@ -357,7 +359,9 @@ def read_series(dataset) -> Union[np.ndarray, pd.Categorical]:
if "categories" in dataset.attrs:
categories = dataset.attrs["categories"]
if isinstance(categories, h5py.Reference):
- categories = dataset.parent[dataset.attrs["categories"]][...]
+ categories_dset = dataset.parent[dataset.attrs["categories"]]
+ categories = categories_dset[...]
+ ordered = categories_dset.attrs.get("ordered", False)
else:
# TODO: remove this code at some point post 0.7
# TODO: Add tests for this
@@ -368,7 +372,7 @@ def read_series(dataset) -> Union[np.ndarray, pd.Categorical]:
FutureWarning,
)
return pd.Categorical.from_codes(
- dataset[...], categories, ordered=False
+ dataset[...], categories, ordered=ordered
)
else:
return dataset[...]
diff --git a/anndata/readwrite/zarr.py b/anndata/readwrite/zarr.py
index <HASH>..<HASH> 100644
--- a/anndata/readwrite/zarr.py
+++ b/anndata/readwrite/zarr.py
@@ -109,7 +109,10 @@ def write_series(group, key, series, dataset_kwargs=MappingProxyType({})):
write_array(group, category_key, cats, dataset_kwargs)
write_array(group, key, codes, dataset_kwargs)
+
group[key].attrs["categories"] = category_key
+ # Must coerce np.bool_ to bool for json writing
+ group[category_key].attrs["ordered"] = bool(series.cat.ordered)
else:
group[key] = series.values
@@ -330,9 +333,12 @@ def read_series(dataset: zarr.Array) -> Union[np.ndarray, pd.Categorical]:
if "categories" in dataset.attrs:
categories = dataset.attrs["categories"]
if isinstance(categories, str):
+ categories_key = categories
parent_name = dataset.name.rstrip(dataset.basename)
parent = zarr.open(dataset.store)[parent_name]
- categories = parent[categories][...]
+ categories_dset = parent[categories_key]
+ categories = categories_dset[...]
+ ordered = categories_dset.attrs.get("ordered", False)
else:
# TODO: remove this code at some point post 0.7
# TODO: Add tests for this
@@ -343,7 +349,7 @@ def read_series(dataset: zarr.Array) -> Union[np.ndarray, pd.Categorical]:
FutureWarning,
)
return pd.Categorical.from_codes(
- dataset[...], categories, ordered=False
+ dataset[...], categories, ordered=ordered
)
else:
return dataset[...]
diff --git a/anndata/tests/helpers.py b/anndata/tests/helpers.py
index <HASH>..<HASH> 100644
--- a/anndata/tests/helpers.py
+++ b/anndata/tests/helpers.py
@@ -38,6 +38,7 @@ def gen_typed_df(n, index=None):
return pd.DataFrame(
dict(
cat=pd.Categorical(np.random.choice(letters, n)),
+ cat_ordered=pd.Categorical(np.random.choice(letters, n), ordered=True),
int64=np.random.randint(-50, 50, n),
float64=np.random.random(n),
uint8=np.random.randint(255, size=n, dtype="uint8"),
@@ -297,9 +298,6 @@ def are_equal_dataframe(a, b, exact=False, elem_name=None):
report_name(pd.testing.assert_frame_equal)(
a,
b,
- check_names=False,
- check_categorical=False, # check encoded values, but not codes
- # Should different orderings be allowed?
_elem_name=elem_name,
)
else:
@@ -308,7 +306,6 @@ def are_equal_dataframe(a, b, exact=False, elem_name=None):
b,
check_exact=True,
check_index_type=True,
- check_names=False,
_elem_name=elem_name,
)
|
Initial support for ordered categoricals.
* IO supported via attribute on categories
* Tests now check order of categoricals
|
theislab_anndata
|
train
|
057c80557d16c195e7d95f431147007857a0fcfe
|
diff --git a/stripe.go b/stripe.go
index <HASH>..<HASH> 100644
--- a/stripe.go
+++ b/stripe.go
@@ -426,6 +426,7 @@ func (s *BackendImplementation) Do(req *http.Request, body *bytes.Buffer, v inte
}
s.requestMetricsBuffer.inputChannel <- metrics
+ <-s.requestMetricsBuffer.done // wait for object to be placed in the circular buffer
}
}
@@ -866,6 +867,7 @@ type requestTelemetry struct {
type ringBuffer struct {
inputChannel chan requestMetrics
outputChannel chan requestMetrics
+ done chan struct{}
logger Printfer
}
@@ -952,6 +954,7 @@ func newBackendImplementation(backendType SupportedBackend, config *BackendConfi
// outputChannel.
inputChannel: make(chan requestMetrics),
outputChannel: make(chan requestMetrics, telemetryBufferSize),
+ done: make(chan struct{}),
logger: config.Logger,
}
@@ -983,6 +986,7 @@ func (r *ringBuffer) run() {
r.outputChannel <- v
}
r.logger.Printf("Enqueued message: %#v", v)
+ r.done <- struct{}{}
}
close(r.outputChannel)
|
add some hacky synchronization to wait for the circular buffer goroutine
|
stripe_stripe-go
|
train
|
c6645d0e1d5d91b5ab0a68d9db573db9c078ded9
|
diff --git a/jquery.uniform.js b/jquery.uniform.js
index <HASH>..<HASH> 100644
--- a/jquery.uniform.js
+++ b/jquery.uniform.js
@@ -768,7 +768,7 @@ Enjoy!
//noSelect v1.0
$.uniform.noSelect = function (elem) {
- var f = function () {
+ function f() {
return false;
};
|
Removing code
Per Crockford,
function f () {...}
is interpreted as
var f;
f = function f () {...}
Thus, there's no point in putting var in front.
|
AudithSoftworks_Uniform
|
train
|
bf33b145d910f148152b91e0ba5b490b470372c0
|
diff --git a/test/safe_test.rb b/test/safe_test.rb
index <HASH>..<HASH> 100644
--- a/test/safe_test.rb
+++ b/test/safe_test.rb
@@ -30,6 +30,21 @@ class FakeFSSafeTest < Minitest::Test
refute File.exist?(path)
end
+ def test_FakeFS_method_presents_persistent_fs
+ path = 'file.txt'
+
+ FakeFS do
+ File.open(path, 'w') { |f| f.write 'Yatta!' }
+ assert File.exist?(path)
+ end
+
+ refute File.exist?(path)
+
+ FakeFS do
+ assert File.exist?(path)
+ end
+ end
+
def test_FakeFS_method_returns_value_of_yield
result = FakeFS do
File.open('myfile.txt', 'w') { |f| f.write 'Yatta!' }
|
Codify the fact that independent FakeFS blocks share a persistent fake system
|
fakefs_fakefs
|
train
|
26917dbf250b662ae3c5891a50945f94266c3a89
|
diff --git a/recipe-client-addon/test/browser/browser_PreferenceExperiments.js b/recipe-client-addon/test/browser/browser_PreferenceExperiments.js
index <HASH>..<HASH> 100644
--- a/recipe-client-addon/test/browser/browser_PreferenceExperiments.js
+++ b/recipe-client-addon/test/browser/browser_PreferenceExperiments.js
@@ -197,26 +197,6 @@ add_task(withMockExperiments(withMockPreferences(async function(mockExperiments,
stop.restore();
})));
-// startObserver should observe changes to the default preference value.
-add_task(withMockExperiments(withMockPreferences(async function(mockExperiments, mockPreferences) {
- const stop = sinon.stub(PreferenceExperiments, "stop");
- mockPreferences.set("fake.preference", "startvalue", "default");
-
- // NOTE: startObserver does not modify the pref
- PreferenceExperiments.startObserver("test", "fake.preference", "experimentvalue");
-
- // Setting it to the experimental value should not trigger the call.
- DefaultPreferences.set("fake.preference", "experimentvalue");
- ok(!stop.called, "Changing to the experimental pref value did not trigger the observer");
-
- // Setting it to something different should trigger the call.
- DefaultPreferences.set("fake.preference", "newvalue");
- ok(stop.called, "Changing to a different value triggered the observer");
-
- PreferenceExperiments.stopAllObservers();
- stop.restore();
-})));
-
add_task(withMockExperiments(async function testHasObserver() {
PreferenceExperiments.startObserver("test", "fake.preference", "experimentValue");
|
recipe-client-addon: Remove watching default branch test; it cannot pass.
The implementation of Preferences.jsm doesn't support watching a
preference on the default branch, only on the combined set of
preferences. This test cannot pass.
|
mozilla_normandy
|
train
|
af7803703ff991ec44a74cc621a3930f289560c3
|
diff --git a/src/Pin.php b/src/Pin.php
index <HASH>..<HASH> 100644
--- a/src/Pin.php
+++ b/src/Pin.php
@@ -60,7 +60,7 @@ class Pin
*/
public function __construct($id, DateTime $time, Layout $layout)
{
- $this->time = $time->copy();
+ $this->time = clone $time;
$this->layout = $layout;
$this->id = $id;
}
diff --git a/src/Pin/Notification/Generic.php b/src/Pin/Notification/Generic.php
index <HASH>..<HASH> 100644
--- a/src/Pin/Notification/Generic.php
+++ b/src/Pin/Notification/Generic.php
@@ -52,7 +52,7 @@ class Generic extends Base
*/
public function setTime(DateTime $time)
{
- $this->time = $time->copy();
+ $this->time = clone $time;
return $this;
}
diff --git a/src/Pin/Reminder/Generic.php b/src/Pin/Reminder/Generic.php
index <HASH>..<HASH> 100644
--- a/src/Pin/Reminder/Generic.php
+++ b/src/Pin/Reminder/Generic.php
@@ -31,7 +31,7 @@ class Generic extends Base
{
$this->title = $title;
$this->tinyIcon = $tinyIcon;
- $this->time = $time->copy();
+ $this->time = clone $time;
}
/**
|
Clone not copy for base DateTime objects.
|
valorin_pinpusher
|
train
|
52cfcd25d64fec457e910903d82ee3f358a65b15
|
diff --git a/test/functional/client_side_encryption_prose_tests.js b/test/functional/client_side_encryption_prose_tests.js
index <HASH>..<HASH> 100644
--- a/test/functional/client_side_encryption_prose_tests.js
+++ b/test/functional/client_side_encryption_prose_tests.js
@@ -487,7 +487,7 @@ describe('Client Side Encryption Prose Tests', function() {
return this.client
.db(keyVaultDbName)
.collection(keyVaultCollName)
- .insertOne(limitsKey);
+ .insertOne(limitsKey, { w: 'majority' });
})
);
});
@@ -754,7 +754,7 @@ describe('Client Side Encryption Prose Tests', function() {
return this.client
.db(keyVaultDbName)
.collection(keyVaultCollName)
- .insertOne(externalKey);
+ .insertOne(externalKey, { w: 'majority' });
})
);
});
|
test(encryption): ensure that inserting to key vault uses w:majority
|
mongodb_node-mongodb-native
|
train
|
5ef143db51e7ac57286ef2bee13b9e0c2706b0ea
|
diff --git a/lib/praxis/file_group.rb b/lib/praxis/file_group.rb
index <HASH>..<HASH> 100644
--- a/lib/praxis/file_group.rb
+++ b/lib/praxis/file_group.rb
@@ -5,8 +5,14 @@ module Praxis
attr_reader :groups, :base
def initialize(base, &block)
+ if base.nil?
+ raise ArgumentError, "base must not be nil." \
+ "Are you missing a call Praxis::Application.instance.setup?"
+ end
+
+
@groups = Hash.new
- @base = base
+ @base = Pathname.new(base)
if block_given?
self.instance_eval(&block)
@@ -18,17 +24,15 @@ module Praxis
end
def map(name, pattern, &block)
+ return unless base.exist?
+
if block_given?
@groups[name] = FileGroup.new(base + pattern, &block)
else
@groups[name] ||= []
- return unless base.exist?
- file_enum = base.find.to_a
- files = file_enum.select do |file|
- path = file.relative_path_from(base)
- file.file? && path.fnmatch?(pattern, File::FNM_PATHNAME)
- end
- files.sort_by { |file| [file.to_s.split('/').size, file.to_s] }.each { |file| @groups[name] << file }
+ files = Pathname.glob(base+pattern).select { |file| file.file? }
+ files.sort_by! { |file| [file.to_s.split('/').size, file.to_s] }
+ files.each { |file| @groups[name] << file }
end
end
diff --git a/spec/praxis/file_group_spec.rb b/spec/praxis/file_group_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/praxis/file_group_spec.rb
+++ b/spec/praxis/file_group_spec.rb
@@ -4,6 +4,11 @@ describe Praxis::FileGroup do
let(:app) { Praxis::Application.instance }
let(:layout) { app.file_layout }
+ context '#initialize' do
+ it 'raises an error if given nil for the base path' do
+ expect { Praxis::FileGroup.new(nil) }.to raise_error(ArgumentError)
+ end
+ end
context '#base' do
it 'returns the base path for the group' do
expect(layout[:design].base.to_s).to eq(File.join(app.root, 'design/'))
|
Rework FileGroup to use Pathname.glob instead of Pathname#find.
Should close #<I>.
|
praxis_praxis
|
train
|
037fd00f9b76f086f84620fbd8ba919548d177a2
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,22 +1,21 @@
#! /usr/bin/env python
#
from distutils.core import setup
-import version
+from ginga.version import version
setup(
name = "Ginga",
- version = version.version,
+ version = version,
author = "Eric Jeschke",
author_email = "eric@naoj.org",
description = ("An astronomical (FITS) image viewer."),
license = "BSD",
keywords = "FITS image viewer astronomy",
url = "http://ejeschke.github.com/ginga",
- package_dir = { '': 'Ginga' },
packages = ['ginga', 'ginga.gtkw', 'ginga.gtkw.plugins', 'ginga.gtkw.tests',
'ginga.qtw', 'ginga.qtw.plugins', 'ginga.qtw.tests',
'ginga.misc', 'ginga.misc.plugins',
- 'ginga.icons', 'ginga.tests', 'ginga.util',
+ 'ginga.icons', 'ginga.util',
'ginga.doc'],
package_data = { 'icons': ['*.ppm', '*.png'], 'doc': ['manual/*.html'], },
scripts = ['ginga.py'],
|
Fixes to distutils script
|
ejeschke_ginga
|
train
|
ef593fa8d820f5ad8468b4f5f34c680923cdad15
|
diff --git a/src/Symfony/Component/Finder/Iterator/FilePathsIterator.php b/src/Symfony/Component/Finder/Iterator/FilePathsIterator.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Finder/Iterator/FilePathsIterator.php
+++ b/src/Symfony/Component/Finder/Iterator/FilePathsIterator.php
@@ -41,6 +41,11 @@ class FilePathsIterator extends \ArrayIterator
private $subPathname;
/**
+ * @var SplFileInfo
+ */
+ private $current;
+
+ /**
* @param array $paths List of paths returned by shell command
* @param string $baseDir Base dir for relative path building
*/
@@ -70,21 +75,27 @@ class FilePathsIterator extends \ArrayIterator
*/
public function current()
{
- return new SplFileInfo(parent::current(), $this->subPath, $this->subPathname);
+ return $this->current;
+ }
+
+ /**
+ * @return string
+ */
+ public function key()
+ {
+ return $this->current->getPathname();
}
public function next()
{
parent::next();
-
- $this->buildSubPath();
+ $this->buildProperties();
}
public function rewind()
{
parent::rewind();
-
- $this->buildSubPath();
+ $this->buildProperties();
}
/**
@@ -103,7 +114,7 @@ class FilePathsIterator extends \ArrayIterator
return $this->subPathname;
}
- private function buildSubPath()
+ private function buildProperties()
{
$absolutePath = parent::current();
@@ -114,5 +125,7 @@ class FilePathsIterator extends \ArrayIterator
} else {
$this->subPath = $this->subPathname = '';
}
+
+ $this->current = new SplFileInfo(parent::current(), $this->subPath, $this->subPathname);
}
}
|
[Finder] Fixed iterator keys
|
symfony_symfony
|
train
|
ed955d51807e05d17d9546eedbc5cba129d00ed2
|
diff --git a/fake_filesystem_test.py b/fake_filesystem_test.py
index <HASH>..<HASH> 100755
--- a/fake_filesystem_test.py
+++ b/fake_filesystem_test.py
@@ -1570,19 +1570,29 @@ class FakeOsModuleTest(FakeOsModuleTestBase):
def testChangeCaseInCaseInsensitiveFileSystem(self):
"""Can use `rename()` to change filename case in a case-insensitive
file system."""
- self.skipRealFs()
- self.filesystem.is_case_sensitive = False
- directory = 'xyzzy'
- old_file_path = '/%s/fileName' % directory
- new_file_path = '/%s/FileNAME' % directory
- self.filesystem.CreateFile(old_file_path, contents='test contents')
- self.assertEqual(old_file_path,
- self.filesystem.NormalizeCase(old_file_path))
+ self.testCaseInsensitiveFs()
+ old_file_path = self.makePath('fileName')
+ new_file_path = self.makePath('FileNAME')
+ self.createFile(old_file_path, contents='test contents')
+ if not self.useRealFs():
+ self.assertEqual(old_file_path,
+ self.filesystem.NormalizeCase(old_file_path))
self.os.rename(old_file_path, new_file_path)
self.assertTrue(self.os.path.exists(old_file_path))
self.assertTrue(self.os.path.exists(new_file_path))
- self.assertEqual(new_file_path,
- self.filesystem.NormalizeCase(old_file_path))
+ if not self.useRealFs():
+ self.assertEqual(new_file_path,
+ self.filesystem.NormalizeCase(old_file_path))
+
+ def testRenameSymlinkWithChangedCase(self):
+ # Regression test for #313
+ self.testCaseInsensitiveFs()
+ self.skipIfSymlinkNotSupported()
+ link_path = self.makePath('link')
+ self.os.symlink(self.base_path, link_path)
+ link_path = self.os.path.join(link_path, 'link')
+ link_path_upper = self.makePath('link', 'LINK')
+ self.os.rename(link_path_upper, link_path)
def testRenameDirectory(self):
"""Can rename a directory to an unused name."""
@@ -4228,7 +4238,7 @@ class FakePathModuleTest(TestCase):
self.assertFalse(self.path.islink('foo'))
# An object can be both a link and a file or file, according to the
- # comments in Python!Lib!posixpath.py.
+ # comments in Python/Lib/posixpath.py.
self.assertTrue(self.path.islink('foo!link_to_file'))
self.assertTrue(self.path.isfile('foo!link_to_file'))
@@ -4237,6 +4247,8 @@ class FakePathModuleTest(TestCase):
self.assertFalse(self.path.islink('it_dont_exist'))
+ @unittest.skipIf(TestCase.is_windows and sys.version_info < (3, 3),
+ 'Links are not supported under Windows before Python 3.3')
def testIsLinkCaseSensitive(self):
# Regression test for #306
self.filesystem.is_case_sensitive = False
diff --git a/pyfakefs/fake_filesystem.py b/pyfakefs/fake_filesystem.py
index <HASH>..<HASH> 100644
--- a/pyfakefs/fake_filesystem.py
+++ b/pyfakefs/fake_filesystem.py
@@ -685,13 +685,16 @@ class FakeDirectory(FakeFile):
Raises:
KeyError: if no child exists by the specified name.
"""
+ pathname_name = self._normalized_entryname(pathname_name)
+ return self.contents[pathname_name]
+
+ def _normalized_entryname(self, pathname_name):
if not self.filesystem.is_case_sensitive:
matching_names = [name for name in self.contents
- if name.lower() == pathname_name.lower()]
+ if name.lower() == pathname_name.lower()]
if matching_names:
pathname_name = matching_names[0]
-
- return self.contents[pathname_name]
+ return pathname_name
def RemoveEntry(self, pathname_name, recursive=True):
"""Removes the specified child file or directory.
@@ -707,7 +710,8 @@ class FakeDirectory(FakeFile):
OSError: if user lacks permission to delete the file,
or (Windows only) the file is open.
"""
- entry = self.contents[pathname_name]
+ pathname_name = self._normalized_entryname(pathname_name)
+ entry = self.GetEntry(pathname_name)
if self.filesystem.is_windows_fs:
if entry.st_mode & PERM_WRITE == 0:
raise OSError(errno.EACCES, 'Trying to remove object '
|
Fixed another problem with case-insensitive filesystem
- fixes #<I>
|
jmcgeheeiv_pyfakefs
|
train
|
a0c2e6c99c6300798005bc82795f4889d83aaed7
|
diff --git a/src/plugins/CachePluginFactory.js b/src/plugins/CachePluginFactory.js
index <HASH>..<HASH> 100644
--- a/src/plugins/CachePluginFactory.js
+++ b/src/plugins/CachePluginFactory.js
@@ -12,7 +12,7 @@ class CachePluginFactory {
if (!this.plugins[target]) {
this.plugins[target] = new CachePlugin();
}
- compiler.apply(this.plugins[target]);
+ this.plugins[target].apply(compiler);
if (this.dependencies[target]) {
compiler._lastCompilationFileDependencies = this.dependencies[target].file;
compiler._lastCompilationContextDependencies = this.dependencies[target].context;
diff --git a/src/plugins/ProgressPluginFactory.js b/src/plugins/ProgressPluginFactory.js
index <HASH>..<HASH> 100644
--- a/src/plugins/ProgressPluginFactory.js
+++ b/src/plugins/ProgressPluginFactory.js
@@ -8,7 +8,7 @@ class ProgressPluginFactory {
}
addPlugin(compiler, options) {
- compiler.apply(new ProgressPlugin({ profile: options.profile }));
+ (new ProgressPlugin({ profile: options.profile })).apply(compiler);
}
}
|
Fix "Tapable.apply is deprecated" warning (fixes #<I>) (#<I>)
Fix ProgressPluginFactory and CachePluginFactory
|
webpack-contrib_grunt-webpack
|
train
|
25d6640b71eefca97dbd7a5166aee80cc9be7e31
|
diff --git a/polyaxon/db/migrations/0021_auto_20190418_1600_v05.py b/polyaxon/db/migrations/0021_auto_20190418_1600_v05.py
index <HASH>..<HASH> 100644
--- a/polyaxon/db/migrations/0021_auto_20190418_1600_v05.py
+++ b/polyaxon/db/migrations/0021_auto_20190418_1600_v05.py
@@ -55,6 +55,18 @@ def migrate_tensorboard_jobs_config(apps, schema_editor):
TensorboardJob.objects.update(content=ExpressionWrapper(F('config'), output_field=str))
+def migrate_experimentgroup_hptuning(apps, schema_editor):
+ ExperimentGroup = apps.get_model('db', 'ExperimentGroup')
+
+ groups = []
+ for group in ExperimentGroup.objects.exclude(hptuning__early_stopping=None):
+ hptuning = group.hptuning
+ [e.pop('policy', None) for e in hptuning['early_stopping']]
+ group.hptuning = hptuning
+ groups.append(group)
+ ExperimentGroup.objects.bulk_update(groups, ['hptuning'])
+
+
class Migration(migrations.Migration):
dependencies = [
('db', '0020_auto_20190307_1611'),
@@ -342,5 +354,6 @@ class Migration(migrations.Migration):
migrations.RunPython(migrate_notebook_jobs_config),
migrations.RunPython(migrate_tensorboard_jobs_config),
migrations.RunPython(migrate_experimentgroup_config),
+ migrations.RunPython(migrate_experimentgroup_hptuning),
migrations.RunPython(create_cluster_owner),
]
|
Add migration for hptuning
|
polyaxon_polyaxon
|
train
|
01cffce2d762a2e948613ae4a8aadb0904649a04
|
diff --git a/bin/MigrationRunnerCommand.php b/bin/MigrationRunnerCommand.php
index <HASH>..<HASH> 100644
--- a/bin/MigrationRunnerCommand.php
+++ b/bin/MigrationRunnerCommand.php
@@ -56,6 +56,7 @@ class MigrationRunnerCommand extends Command
->addOption('db_pass', 'S', InputOption::VALUE_REQUIRED, 'Database password')
->addOption('db_host', 'H', InputOption::VALUE_REQUIRED, 'Database host')
->addOption('db_port', 'P', InputOption::VALUE_REQUIRED, 'Database port')
+ ->addOption('db_driver', 'D', InputOption::VALUE_REQUIRED, 'Database driver')
->addOption('force', 'f', InputOption::VALUE_NONE, 'Force confirmation');
}
@@ -73,6 +74,7 @@ class MigrationRunnerCommand extends Command
$this->dbConfig['pass'] = $this->input->getOption('db_pass') ?: Config::getValue('db', 'pass');
$this->dbConfig['host'] = $this->input->getOption('db_host') ?: Config::getValue('db', 'host');
$this->dbConfig['port'] = $this->input->getOption('db_port') ?: Config::getValue('db', 'port');
+ $this->dbConfig['driver'] = $this->input->getOption('db_driver') ?: Config::getValue('db', 'driver');
$this->migrate();
}
@@ -200,8 +202,9 @@ class MigrationRunnerCommand extends Command
private function connectToDatabase(): Db
{
$dbConfig = Objects::toString($this->dbConfig);
+ $db = new Db(false);
$this->output->write("<info>Connecting to db {$dbConfig}... </info>");
- $db = Db::getInstance();
+ $db->connectDb($this->dbConfig);
$this->output->writeln('<comment>DONE</comment>');
return $db;
}
|
[migrations] Added driver db param.
|
letsdrink_ouzo
|
train
|
124d5e02b07357f64c082926508eeee326a53731
|
diff --git a/lib/jets/build/linux_ruby.rb b/lib/jets/build/linux_ruby.rb
index <HASH>..<HASH> 100644
--- a/lib/jets/build/linux_ruby.rb
+++ b/lib/jets/build/linux_ruby.rb
@@ -194,7 +194,7 @@ class Jets::Build
end
def excludes
- excludes = %w[.git tmp log]
+ excludes = %w[.git tmp log spec]
excludes += get_excludes("#{full(tmp_app_root)}/.gitignore")
excludes += get_excludes("#{full(tmp_app_root)}/.dockerignore")
excludes = excludes.reject do |p|
|
exclude spec folder from zipfile packaging
|
tongueroo_jets
|
train
|
2f449587d9e0f05170813ef786e380ad151d4cc6
|
diff --git a/nodeApps/generateSignedFile.js b/nodeApps/generateSignedFile.js
index <HASH>..<HASH> 100755
--- a/nodeApps/generateSignedFile.js
+++ b/nodeApps/generateSignedFile.js
@@ -9,6 +9,7 @@ const git = require('simple-git')()
const releaseBranch = process.env.releaseBranch || 'staging'
const name = process.env.npm_package_name
const version = process.env.npm_package_version
+const distPath = path.resolve(process.cwd(), '/dist')
const checkPrerequisites = callback => {
if (!process.env.npm_package_name) return callback('ERROR: run this as an npm script (npm run release)')
@@ -43,7 +44,7 @@ const checkBranch = callback => {
}
const checkAlreadyReleased = callback => {
- const fullPath = path.resolve(__dirname, `../dist/${name}.min.${version}.js`)
+ const fullPath = path.resolve(`${distPath}/${name}.min.${version}.js`)
if (fs.existsSync(fullPath)) {
return callback(`Already exported ${name}.min.${version}.js`)
}
@@ -67,8 +68,8 @@ const getSignature = (file, callback) => {
})
}
-const getSignedStagingFile = getSignature.bind(null, `${name}.staging.min.js`)
-const getSignedProductionFile = getSignature.bind(null, `${name}.min.js`)
+const getSignedStagingFile = getSignature.bind(null, `${distPath}/${name}.staging.min.js`)
+const getSignedProductionFile = getSignature.bind(null, `${distPath}/${name}.min.js`)
const updateChangelog = (versionedFile, signature, callback) => {
fs.readFile('CHANGELOG.md', 'utf-8', (readErr, contents) => {
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "deployment-helpers",
- "version": "1.7.0",
+ "version": "1.7.1",
"description": "A collection of scripts that can be used as part of a deployment process.",
"main": "src/utils",
"bin": {
diff --git a/src/utils.js b/src/utils.js
index <HASH>..<HASH> 100644
--- a/src/utils.js
+++ b/src/utils.js
@@ -1,6 +1,5 @@
// utils for building, not needed in the built output
-const path = require('path')
const childProcess = require('child_process')
const devUtils = module.exports = {}
@@ -15,15 +14,13 @@ devUtils.getIntegrity = (file, callback) => {
devUtils.createVersionedDistFile = (file, callback) => {
if (!process.env.npm_package_version) return callback('Version missing - must run this as an npm script')
const versionedFile = file.replace('.js', `.${process.env.npm_package_version}.js`)
- const distPath = path.resolve(__dirname, '../dist')
- const cmd = `cp ${distPath}/${file} ${distPath}/${versionedFile}`
+ const cmd = `cp ${file} ${versionedFile}`
childProcess.exec(cmd, err => {
callback(err, versionedFile)
})
}
devUtils.getSignature = (file, callback) => {
- const distPath = path.resolve(__dirname, '../dist')
- const cmd = `cat ${distPath}/${file} | openssl dgst -sha256 -binary | openssl enc -base64 -A`
+ const cmd = `cat ${file} | openssl dgst -sha256 -binary | openssl enc -base64 -A`
childProcess.exec(cmd, callback)
}
diff --git a/test/unit/utilsTest.js b/test/unit/utilsTest.js
index <HASH>..<HASH> 100644
--- a/test/unit/utilsTest.js
+++ b/test/unit/utilsTest.js
@@ -1,5 +1,4 @@
const utils = require('../../src/utils')
-const path = require('path')
const childProcess = require('child_process')
describe('utils', function () {
@@ -16,7 +15,6 @@ describe('utils', function () {
beforeEach(function () {
callback = sandbox.stub()
sandbox.stub(childProcess, 'exec')
- sandbox.stub(path, 'resolve').returns('PATH')
})
describe('when we have version', function () {
@@ -28,7 +26,7 @@ describe('utils', function () {
it('copies the file', function () {
expect(childProcess.exec).to.have.been.calledOnce()
- .and.calledWith('cp PATH/foo.js PATH/foo.VERSION.js')
+ .and.calledWith('cp foo.js foo.VERSION.js')
})
it('yields versioned file name', function () {
@@ -93,7 +91,6 @@ describe('utils', function () {
beforeEach(function () {
callback = sandbox.stub()
- sandbox.stub(path, 'resolve')
sandbox.stub(childProcess, 'exec').yields(null, 'SIGNATURE')
})
|
:wrench: fix relative path
left in __dir when I copied this from payframe, which made files relative to this
instead of relative to cwd
|
holidayextras_deployment-helpers
|
train
|
030fa37bdcff945696719e6710fdfd38f5b28345
|
diff --git a/macroeco/models/test_distributions.py b/macroeco/models/test_distributions.py
index <HASH>..<HASH> 100644
--- a/macroeco/models/test_distributions.py
+++ b/macroeco/models/test_distributions.py
@@ -369,6 +369,40 @@ class TestPlnorm(TestCase):
test = plnorm.pmf([0, 50, 1000], 2.34, 5)
assert_array_almost_equal(md_res, test)
+ # Unit test from test_macroeco_distributions
+
+ # Test values for Poisson lognomal are chosen from Table 1 and Table 2
+ # in Grundy Biometrika 38:427-434.
+ # In Table 1 the values are deducted from 1 which give p(0).
+ pln_table1 = [[-2.0, 2, '0.9749'],
+ [-2.0, 8, '0.9022'],
+ [-2.0, 16, '0.8317'],
+ [0.5, 2, '0.1792'],
+ [0.5, 8, '0.2908'],
+ [0.5, 16, '0.3416'],
+ [3, 2, '0.0000'],
+ [3, 8, '0.0069'],
+ [3, 16, '0.0365']]
+
+ pln_table2 = [[-2.0, 2, '0.0234'],
+ [-2.0, 8, '0.0538'],
+ [-2.0, 16, '0.0593'],
+ [0.5, 2, '0.1512'],
+ [0.5, 8, '0.1123'],
+ [0.5, 16, '0.0879'],
+ [3, 2, '0.0000'],
+ [3, 8, '0.0065'],
+ [3, 16, '0.0193']]
+
+ for vals in pln_table1:
+ test = plnorm.pmf(0, np.log(10 ** vals[0]), vals[1] ** .5)
+ assert_almost_equal(test, float(vals[2]), decimal=4)
+
+ for vals in pln_table2:
+ test = plnorm.pmf(1, np.log(10 ** vals[0]), vals[1] ** .5)
+ assert_almost_equal(test, float(vals[2]), decimal=4)
+
+
def test_cdf(self):
# Test against R VGAM fxn: ppolono(c(0, 15, 10000), .1, 2)
@@ -396,6 +430,7 @@ class TestPlnorm(TestCase):
md_res = (1.3195580310886075, 1.1876019842774048)
assert_array_almost_equal(md_res, fits, decimal=4)
+
def test_rank(self):
pass
|
Unit testing plnorm against Grundy values
|
jkitzes_macroeco
|
train
|
d401ccef51be93401194e86d866993851a8cddf6
|
diff --git a/mockserver-core/src/main/java/org/mockserver/mock/MockServer.java b/mockserver-core/src/main/java/org/mockserver/mock/MockServer.java
index <HASH>..<HASH> 100644
--- a/mockserver-core/src/main/java/org/mockserver/mock/MockServer.java
+++ b/mockserver-core/src/main/java/org/mockserver/mock/MockServer.java
@@ -51,7 +51,11 @@ public class MockServer extends EqualsHashCodeToString {
for (Expectation expectation : expectations) {
if (expectation.matches(httpRequest)) {
if (!expectation.getTimes().greaterThenZero()) {
- this.expectations.remove(expectation);
+ synchronized (this.expectations) {
+ if (this.expectations.contains(expectation)) {
+ this.expectations.remove(expectation);
+ }
+ }
}
return expectation.getHttpResponse();
}
@@ -63,7 +67,11 @@ public class MockServer extends EqualsHashCodeToString {
if (httpRequest != null) {
for (Expectation expectation : new ArrayList<>(expectations)) {
if (expectation.matches(httpRequest)) {
- expectations.remove(expectation);
+ synchronized (this.expectations) {
+ if (this.expectations.contains(expectation)) {
+ this.expectations.remove(expectation);
+ }
+ }
}
}
} else {
@@ -72,7 +80,9 @@ public class MockServer extends EqualsHashCodeToString {
}
public void reset() {
- expectations.clear();
+ synchronized (this.expectations) {
+ this.expectations.clear();
+ }
}
public void dumpToLog(HttpRequest httpRequest) {
diff --git a/mockserver-core/src/main/java/org/mockserver/proxy/filters/LogFilter.java b/mockserver-core/src/main/java/org/mockserver/proxy/filters/LogFilter.java
index <HASH>..<HASH> 100644
--- a/mockserver-core/src/main/java/org/mockserver/proxy/filters/LogFilter.java
+++ b/mockserver-core/src/main/java/org/mockserver/proxy/filters/LogFilter.java
@@ -53,7 +53,9 @@ public class LogFilter implements ProxyResponseFilter {
}
public void reset() {
- requestResponseLog.clear();
+ synchronized (this.requestResponseLog) {
+ requestResponseLog.clear();
+ }
}
public void clear(HttpRequest httpRequest) {
@@ -61,7 +63,9 @@ public class LogFilter implements ProxyResponseFilter {
HttpRequestMatcher httpRequestMatcher = matcherBuilder.transformsToMatcher(httpRequest);
for (HttpRequest key : new LinkedHashSet<>(requestResponseLog.keySet())) {
if (httpRequestMatcher.matches(key)) {
- requestResponseLog.removeAll(key);
+ synchronized (this.requestResponseLog) {
+ requestResponseLog.removeAll(key);
+ }
}
}
} else {
|
fixing issue where two tests are both resetting of deleting entries from the mock server which causes an exception
|
jamesdbloom_mockserver
|
train
|
da5d251dac01b36eaf20fd7db018a50c37ae4e8e
|
diff --git a/core/src/main/java/com/github/srec/jemmy/JemmyDSL.java b/core/src/main/java/com/github/srec/jemmy/JemmyDSL.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/github/srec/jemmy/JemmyDSL.java
+++ b/core/src/main/java/com/github/srec/jemmy/JemmyDSL.java
@@ -42,6 +42,7 @@ import org.netbeans.jemmy.operators.JTextComponentOperator;
import org.netbeans.jemmy.operators.JTextFieldOperator;
import org.netbeans.jemmy.operators.JToggleButtonOperator;
import org.netbeans.jemmy.operators.Operator;
+import org.netbeans.jemmy.operators.Operator.StringComparator;
import org.netbeans.jemmy.util.NameComponentChooser;
import java.awt.FontMetrics;
@@ -89,6 +90,7 @@ import static org.apache.commons.lang.StringUtils.isBlank;
*/
public class JemmyDSL {
private static final Logger logger = Logger.getLogger(JemmyDSL.class);
+ private static final StringComparator comparator = new Operator.DefaultStringComparator(true,false);
public enum ComponentType {
text_field(JTextFieldOperator.class, JTextField.class),
@@ -1500,16 +1502,21 @@ public class JemmyDSL {
clickMenu(texts);
return this;
}
-
+
public MenuBar clickMenu(String... texts) {
if (texts.length == 0)
return this;
component.showMenuItem(texts[0]);
for (int i = 1; i < texts.length; i++) {
String text = texts[i];
- new JMenuOperator(currentWindow().getComponent(), texts[i - 1]).showMenuItem(new String[]{text});
+ JMenuOperator jmenu = new JMenuOperator(currentWindow().getComponent(), texts[i - 1]);
+ jmenu.setComparator(comparator);
+ jmenu.showMenuItem(new String[]{text});
}
- new JMenuItemOperator(currentWindow().getComponent(), texts[texts.length - 1]).clickMouse();
+ String text = texts[texts.length - 1];
+ ComponentChooser chooser = new JMenuItemOperator.JMenuItemByLabelFinder(text, comparator);
+ new JMenuItemOperator(currentWindow().getComponent(), chooser).clickMouse();
+
return this;
}
|
Compare Menu's texts using equals
|
vtatai_srec
|
train
|
b7eb8161a6374a651018a5e1e0952e661edccd94
|
diff --git a/src/AdldapAuthUserProvider.php b/src/AdldapAuthUserProvider.php
index <HASH>..<HASH> 100644
--- a/src/AdldapAuthUserProvider.php
+++ b/src/AdldapAuthUserProvider.php
@@ -67,14 +67,14 @@ class AdldapAuthUserProvider extends EloquentUserProvider
$username = Arr::get($username, 0);
}
- // Get the password input array key.
- $key = $this->getPasswordKey();
+ // Retrieve the password from the submitted credentials.
+ $password = Arr::get($credentials, $this->getPasswordKey());
// Try to log the user in.
- if ($this->authenticate($username, $credentials[$key])) {
+ if (! is_null($password) && $this->authenticate($username, $password)) {
// Login was successful, we'll create a new
// Laravel model with the Adldap user.
- return $this->getModelFromAdldap($user, $credentials[$key]);
+ return $this->getModelFromAdldap($user, $password);
}
}
}
|
Validate that the password isn't null.
|
Adldap2_Adldap2-Laravel
|
train
|
8b259b4f1474c535fe9f206559a8f7e7165486b2
|
diff --git a/packages/bonde-styleguide/src/layout/Footer/Footer.js b/packages/bonde-styleguide/src/layout/Footer/Footer.js
index <HASH>..<HASH> 100644
--- a/packages/bonde-styleguide/src/layout/Footer/Footer.js
+++ b/packages/bonde-styleguide/src/layout/Footer/Footer.js
@@ -29,17 +29,17 @@ const Footer = styled(({ children, className, btnHelpLabel, btnHelpClick }) => (
<FooterContent>
{children}
</FooterContent>
- {btnHelpLabel && btnHelpClick && (
- <Button dark onClick={btnHelpClick}>{btnHelpLabel}</Button>
- )}
</div>
))`{
+ position: fixed;
display: flex;
align-items: center;
height: 94px;
background: #000;
- padding: 0 150px 0;
+ padding: 0 150px;
overflow: hidden;
+ bottom: 0;
+ width: 100%;
}`
const { oneOfType, node, func, string } = PropTypes
diff --git a/packages/bonde-styleguide/src/layout/Header/Header.js b/packages/bonde-styleguide/src/layout/Header/Header.js
index <HASH>..<HASH> 100644
--- a/packages/bonde-styleguide/src/layout/Header/Header.js
+++ b/packages/bonde-styleguide/src/layout/Header/Header.js
@@ -1,5 +1,13 @@
+import React from 'react'
import styled from 'styled-components'
+const Fixed = styled.div`{
+ position: 'fixed';
+ top: '0';
+ width: '100%';
+ zIndex: '2';
+}`
+
const Header = styled.div`{
position: relative;
width: inherit;
@@ -9,4 +17,8 @@ const Header = styled.div`{
}`
/* @component */
-export default Header
+export default ({ id, ...props }) => (
+ <Fixed id={id}>
+ <Header {...props} />
+ </Fixed>
+)
diff --git a/packages/bonde-styleguide/src/layout/Page/Page.js b/packages/bonde-styleguide/src/layout/Page/Page.js
index <HASH>..<HASH> 100644
--- a/packages/bonde-styleguide/src/layout/Page/Page.js
+++ b/packages/bonde-styleguide/src/layout/Page/Page.js
@@ -4,18 +4,20 @@ import styled from 'styled-components'
const PageContainer = styled.div`{
position: relative;
- padding: 32px 155px 0;
+ padding-top: ${props => props.top ? `calc(${props.top}px + 32px)` : '32px'};
+ padding-left: 155px;
+ padding-bottom: 172px;
+ padding-right: 155px;
background-color: ${props => props.bgColor || '#fff'};
}`
const PageContent = styled.div`{
position: relative;
display: flex;
- min-height: 100vh;
}`
-const Page = ({ children, menuComponent: MenuComponent, bgColor }) => (
- <PageContainer bgColor={bgColor}>
+const Page = ({ children, menuComponent: MenuComponent, bgColor, top }) => (
+ <PageContainer bgColor={bgColor} top={top}>
{MenuComponent && <MenuComponent />}
<PageContent>
{children}
|
feat(styleguide): prepare header footer and page to work with fixed layout
|
nossas_bonde-client
|
train
|
da606544820ec0c71757f2d1caf766989e164027
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -48,9 +48,9 @@ copyright = u'2013, Chris Cornutt'
# built documents.
#
# The short X.Y version.
-version = '1.5'
+version = '1.6'
# The full version, including alpha/beta/rc tags.
-release = '1.5'
+release = '1.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
|
bumping docs version up to <I>
|
enygma_expose
|
train
|
8a83d0eea8d9350cd518cf814f4465abb066e754
|
diff --git a/bower.json b/bower.json
index <HASH>..<HASH> 100644
--- a/bower.json
+++ b/bower.json
@@ -1,7 +1,7 @@
{
"name": "taboo",
"main": "taboo.js",
- "version": "0.0.0",
+ "version": "0.0.1",
"homepage": "https://github.com/mrmagooey/taboo",
"description": "tabular data",
"license": "MIT",
@@ -14,6 +14,6 @@
"tests"
],
"devDependencies": {
- "jasmine": "~2.1.3"
+ "Faker": "~2.1.1"
}
}
diff --git a/jasmine/spec/tabooSpec.js b/jasmine/spec/tabooSpec.js
index <HASH>..<HASH> 100644
--- a/jasmine/spec/tabooSpec.js
+++ b/jasmine/spec/tabooSpec.js
@@ -25,7 +25,6 @@ describe("Table", function() {
});
it("should be able to add row objects", function(){
- table.addColumns(['name'], ['color']);
table.addRows(dogs);
expect(table.getRows().length).toEqual(3);
});
@@ -35,6 +34,15 @@ describe("Table", function() {
table.addRows(_.values(dogs));
expect(table.getRows().length).toEqual(3);
});
+
+ it("should be able to add new columns", function(){
+ table.addRows(dogs);
+ expect(table.getRows().length).toEqual(3);
+ table.addRows([{newColumnName:'blah'}]);
+ console.log(table.print());
+ expect(table.getColumnHeaders().length).toEqual(3);
+ expect(table.getRows().length).toEqual(4);
+ });
});
diff --git a/taboo.js b/taboo.js
index <HASH>..<HASH> 100644
--- a/taboo.js
+++ b/taboo.js
@@ -70,15 +70,31 @@ function Table(tableName){
@params {Array} rows Takes an array of either objects or arrays.
*/
this.addRows = function(rows){
- var headers = this.getColumnHeaders();
+
var _this = this;
+ // add data
rows.forEach(function(row, index){
+ var currentHeaders = _this.getColumnHeaders();
if (_.isArray(row)){
row.forEach(function(cell, i){
- _this._addCell(headers[i], cell);
+ // ignore array elements out of table column range
+ if (i < currentHeaders.length){
+ _this._addCell(currentHeaders[i], cell);
+ } else {
+ // ignore
+ }
});
_this._clean();
} else if (_.isObject(row)){
+ // add any new columns
+ var rowHeaders = _.keys(row);
+ _this._addHeaders(rowHeaders);
+ // newHeaders = _.difference(rowHeaders, currentHeaders);
+ // newHeaders.forEach(function(header, index){
+ // _this._data.push({header: header, data: []});
+ // });
+ // _this._clean();
+ // add data
_.pairs(row).forEach(function(pair, index){
_this._addCell(pair[0], pair[1]);
});
@@ -405,13 +421,23 @@ function Table(tableName){
this._addRowCellObjects = function(row){
var _this = this;
var headers = _.pluck(row, 'header');
- var uniqueHeaders = _.unique(headers);
+ this._addHeaders(headers);
+ row.forEach(function(cell){
+ _this._addCell(cell['header'], cell['data']);
+ });
+ this._clean();
+ };
+
+ this._addHeaders = function(headers){
+ var _this = this,
+ currentHeaders = this.getColumnHeaders(),
+ uniqueHeaders = _.unique(headers);
if (uniqueHeaders.length !== headers.length){
throw "Can\'t add a row with duplicate headers";
}
- this._clean();
- row.forEach(function(cell){
- _this._addCell(cell['header'], cell['data']);
+ var newHeaders = _.difference(headers, currentHeaders);
+ newHeaders.forEach(function(header, index){
+ _this._data.push({header: header, data: []});
});
this._clean();
};
@@ -424,16 +450,6 @@ function Table(tableName){
var column = _.find(this._data, function(column){
return column.header === colName;
});
- // check if this is a new column
- if (typeof column === 'undefined'){
- // add if column doesn't exist
- column = {
- header: colName,
- data: [],
- };
- this._data.push(column);
- }
- // finally add data to column
column["data"].push(cellValue);
};
|
Bugfix for new columns being added to existing tables, internal cleanup
of header code
|
mrmagooey_taboo
|
train
|
d526fdadc1de0f612dfc291f7d391f02a6d3a056
|
diff --git a/lib/Cake/Model/Model.php b/lib/Cake/Model/Model.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/Model/Model.php
+++ b/lib/Cake/Model/Model.php
@@ -3485,7 +3485,7 @@ class Model extends Object implements CakeEventListener {
$this->tablePrefix = $db->config['prefix'];
}
- $this->schemaName = $db->getSchemaName();
+ $this->schemaName = (empty($this->schemaName) ? $db->getSchemaName() : $this->schemaName);
}
/**
|
Base model class no longer changes an already defined schemaName value
|
cakephp_cakephp
|
train
|
c2c7aee70c03a20250d9eac2b379f9e8e4b6dd5f
|
diff --git a/pandas/core/series.py b/pandas/core/series.py
index <HASH>..<HASH> 100644
--- a/pandas/core/series.py
+++ b/pandas/core/series.py
@@ -2094,21 +2094,8 @@ class Series(generic.NDFrame):
----------
values : list-like
The sequence of values to test. Passing in a single string will
- raise a ``TypeError``:
-
- .. code-block:: python
-
- from pandas import Series
- s = Series(list('abc'))
- s.isin('a')
-
- Instead, turn a single string into a ``list`` of one element:
-
- .. code-block:: python
-
- from pandas import Series
- s = Series(list('abc'))
- s.isin(['a'])
+ raise a ``TypeError``. Instead, turn a single string into a
+ ``list`` of one element.
Returns
-------
@@ -2122,6 +2109,26 @@ class Series(generic.NDFrame):
See Also
--------
pandas.DataFrame.isin
+
+ Examples
+ --------
+
+ >>> s = pd.Series(list('abc'))
+ >>> s.isin(['a', 'c', 'e'])
+ 0 True
+ 1 False
+ 2 True
+ dtype: bool
+
+ Passing a single string as ``s.isin('a')`` will raise an error. Use
+ a list of one element instead:
+
+ >>> s.isin(['a'])
+ 0 True
+ 1 False
+ 2 False
+ dtype: bool
+
"""
if not com.is_list_like(values):
raise TypeError("only list-like objects are allowed to be passed"
diff --git a/pandas/io/html.py b/pandas/io/html.py
index <HASH>..<HASH> 100644
--- a/pandas/io/html.py
+++ b/pandas/io/html.py
@@ -759,20 +759,16 @@ def read_html(io, match='.+', flavor=None, header=None, index_col=None,
This is a dictionary of attributes that you can pass to use to identify
the table in the HTML. These are not checked for validity before being
passed to lxml or Beautiful Soup. However, these attributes must be
- valid HTML table attributes to work correctly. For example,
-
- .. code-block:: python
-
- attrs = {'id': 'table'}
-
+ valid HTML table attributes to work correctly. For example, ::
+
+ attrs = {'id': 'table'}
+
is a valid attribute dictionary because the 'id' HTML tag attribute is
a valid HTML attribute for *any* HTML tag as per `this document
- <http://www.w3.org/TR/html-markup/global-attributes.html>`__.
-
- .. code-block:: python
-
- attrs = {'asdf': 'table'}
-
+ <http://www.w3.org/TR/html-markup/global-attributes.html>`__. ::
+
+ attrs = {'asdf': 'table'}
+
is *not* a valid attribute dictionary because 'asdf' is not a valid
HTML attribute even if it is a valid XML attribute. Valid HTML 4.01
table attributes can be found `here
|
DOC: remove usage of code-block in docstrings (#<I>)
|
pandas-dev_pandas
|
train
|
b9b206bab3f23c8580aa09fe45affeaf3cd5bfe7
|
diff --git a/Parser/Crawler/CrawlerManager.php b/Parser/Crawler/CrawlerManager.php
index <HASH>..<HASH> 100755
--- a/Parser/Crawler/CrawlerManager.php
+++ b/Parser/Crawler/CrawlerManager.php
@@ -16,15 +16,27 @@ use Symfony\Component\DomCrawler\Crawler;
class CrawlerManager
{
+
+ /**
+ * @var Crawler
+ */
private $crawler;
+ /**
+ * @param Crawler $crawler
+ */
public function __construct(Crawler $crawler)
{
$this->crawler = $crawler;
}
+ /**
+ * @param $url
+ * @return Crawler
+ */
public function getCrawler($url)
{
+ $this->crawler->clear();
$this->crawler->addContent($this->getContent($url), 'text/html');
return $this->crawler;
}
@@ -49,6 +61,10 @@ class CrawlerManager
return $this->crawler;
}
+ /**
+ * @param $feed
+ * @return string
+ */
private function getContent($feed)
{
$content = file_get_contents($feed);
diff --git a/Parser/RecentRaces/RecentRacesParser.php b/Parser/RecentRaces/RecentRacesParser.php
index <HASH>..<HASH> 100644
--- a/Parser/RecentRaces/RecentRacesParser.php
+++ b/Parser/RecentRaces/RecentRacesParser.php
@@ -61,7 +61,7 @@ class RecentRacesParser
$row->category = $td->nodeValue;
}
if (7 === $index) {
- $row->name = utf8_encode($td->nodeValue);
+ $row->name = $td->nodeValue;
$aEl = $td->getElementsByTagName('a')->item(0);
//$a = 'http://cqranking.com/men/asp/gen/' . ;
$row->url = $aEl->getAttribute('href');
|
reset crawlercontent when doing a 'get'
do not encode stuff here, leave it as is
|
ErikTrapman_CQRankingParserBundle
|
train
|
2241d713137e398f07e327a201e086cc7a24d714
|
diff --git a/pyemma/coordinates/api.py b/pyemma/coordinates/api.py
index <HASH>..<HASH> 100644
--- a/pyemma/coordinates/api.py
+++ b/pyemma/coordinates/api.py
@@ -52,6 +52,9 @@ from pyemma.coordinates.clustering.uniform_time import UniformTimeClustering as
from pyemma.coordinates.clustering.regspace import RegularSpaceClustering as _RegularSpaceClustering
from pyemma.coordinates.clustering.assign import AssignCenters as _AssignCenters
+# stat
+from pyemma.coordinates.util.stat import histogram
+
_logger = _getLogger('coordinates.api')
__author__ = "Frank Noe, Martin Scherer"
diff --git a/pyemma/coordinates/util/stat.py b/pyemma/coordinates/util/stat.py
index <HASH>..<HASH> 100644
--- a/pyemma/coordinates/util/stat.py
+++ b/pyemma/coordinates/util/stat.py
@@ -1,4 +1,3 @@
-
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Free University
# Berlin, 14195 Berlin, Germany.
# All rights reserved.
@@ -24,12 +23,18 @@
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
+from pyemma.util.annotators import deprecated
__author__ = 'Fabian Paul'
-__all__ = ['hist']
+__all__ = ['histogram']
+@deprecated("Please use pyemma.coordinates.histogram()")
def hist(transform, dimensions, nbins):
+ return histogram(transform, dimensions, nbins)
+
+
+def histogram(transform, dimensions, nbins):
'''Computes the N-dimensional histogram of the transformed data.
Parameters
@@ -55,10 +60,10 @@ def hist(transform, dimensions, nbins):
>>> import matplotlib.pyplot as plt
>>> %matplotlib inline # only for ipython notebook
- >>> counts, edges=hist(transform, dimensions=(0,1), nbins=(20, 30))
+ >>> counts, edges=histogram(transform, dimensions=(0,1), nbins=(20, 30))
>>> plt.pcolormesh(edges[0], edges[1], counts.T)
- >>> counts, edges=hist(transform, dimensions=(1,), nbins=(50,))
+ >>> counts, edges=histogram(transform, dimensions=(1,), nbins=(50,))
>>> plt.bar(edges[0][:-1], counts, width=edges[0][1:]-edges[0][:-1])
'''
maximum = np.ones(len(dimensions)) * (-np.inf)
@@ -83,4 +88,4 @@ def hist(transform, dimensions, nbins):
for _, chunk in transform:
part, _ = np.histogramdd(chunk[:, dimensions], bins=bins)
res += part
- return res, bins
\ No newline at end of file
+ return res, bins
|
[coor/api] imported histogram function to API and deprecated hist().
|
markovmodel_PyEMMA
|
train
|
fba3f9bb6552281788adadefec7e7f947a362040
|
diff --git a/jre_emul/android/platform/libcore/luni/src/test/java/libcore/java/text/NumberFormatTest.java b/jre_emul/android/platform/libcore/luni/src/test/java/libcore/java/text/NumberFormatTest.java
index <HASH>..<HASH> 100644
--- a/jre_emul/android/platform/libcore/luni/src/test/java/libcore/java/text/NumberFormatTest.java
+++ b/jre_emul/android/platform/libcore/luni/src/test/java/libcore/java/text/NumberFormatTest.java
@@ -249,9 +249,11 @@ public class NumberFormatTest extends junit.framework.TestCase {
// Allow either full-width (0xFFE5) or regular width yen sign (0xA5).
assertTrue(result.equals("¥50") || result.equals("¥50"));
- // Armenian Dram 2 fractional digits.
+ // Armenian Dram 0 fractional digits.
nf = NumberFormat.getCurrencyInstance(Locale.forLanguageTag("hy-AM"));
- assertEquals("50,00\u00a0֏", nf.format(50.00));
+ result = nf.format(50.00);
+ // Allow different versions of the ICU CLDR.
+ assertTrue(result.equals("֏\u00a050") || result.equals("50\u00a0֏"));
// Swiss Francs 2 fractional digits.
nf = NumberFormat.getCurrencyInstance(Locale.forLanguageTag("de-CH"));
|
Allow multiple ICU results in number format test.
PiperOrigin-RevId: <I>
|
google_j2objc
|
train
|
f3871241d648434f39310e95fcf1e442b28280a7
|
diff --git a/Command/ClearChunkCommand.php b/Command/ClearChunkCommand.php
index <HASH>..<HASH> 100644
--- a/Command/ClearChunkCommand.php
+++ b/Command/ClearChunkCommand.php
@@ -18,7 +18,7 @@ class ClearChunkCommand extends ContainerAwareCommand
protected function execute(InputInterface $input, OutputInterface $output)
{
- $manager = $this->getContainer()->get('oneup_uploader.chunks.manager');
+ $manager = $this->getContainer()->get('oneup_uploader.chunk_manager');
$manager->clear();
}
}
\ No newline at end of file
diff --git a/Controller/UploaderController.php b/Controller/UploaderController.php
index <HASH>..<HASH> 100644
--- a/Controller/UploaderController.php
+++ b/Controller/UploaderController.php
@@ -102,7 +102,7 @@ class UploaderController implements UploadControllerInterface
// we'll take the first chunk and append the others to it
// this way we don't need another file in temporary space for assembling
$chunks = $chunkManager->getChunks($uuid);
-
+ die();
// assemble parts
$assembled = $chunkManager->assembleChunks($chunks);
$path = $assembled->getPath();
diff --git a/Resources/config/uploader.xml b/Resources/config/uploader.xml
index <HASH>..<HASH> 100644
--- a/Resources/config/uploader.xml
+++ b/Resources/config/uploader.xml
@@ -23,6 +23,7 @@
<service id="oneup_uploader.orphanage_manager" class="%oneup_uploader.orphanage.manager.class%">
<argument type="service" id="service_container" />
+ <argument>%oneup_uploader.orphanage%</argument>
</service>
<!-- namer -->
diff --git a/Uploader/Chunk/ChunkManager.php b/Uploader/Chunk/ChunkManager.php
index <HASH>..<HASH> 100644
--- a/Uploader/Chunk/ChunkManager.php
+++ b/Uploader/Chunk/ChunkManager.php
@@ -29,7 +29,7 @@ class ChunkManager implements ChunkManagerInterface
try
{
- $finder->in($this->configuration['directory'])->date('<=' . -1 * (int) $this->configuration['maxage'] . 'seconds');
+ $finder->in($this->configuration['directory'])->date('<=' . -1 * (int) $this->configuration['maxage'] . 'seconds')->files();
}
catch(\InvalidArgumentException $e)
{
diff --git a/Uploader/Orphanage/OrphanageManager.php b/Uploader/Orphanage/OrphanageManager.php
index <HASH>..<HASH> 100644
--- a/Uploader/Orphanage/OrphanageManager.php
+++ b/Uploader/Orphanage/OrphanageManager.php
@@ -2,17 +2,46 @@
namespace Oneup\UploaderBundle\Uploader\Orphanage;
+use Symfony\Component\Finder\Finder;
+use Symfony\Component\Filesystem\Filesystem;
use Symfony\Component\DependencyInjection\ContainerInterface;
class OrphanageManager
{
- public function __construct(ContainerInterface $container)
+ protected $config;
+ protected $container;
+
+ public function __construct(ContainerInterface $container, array $config)
{
$this->container = $container;
+ $this->config = $config;
}
public function get($key)
{
return $this->container->get(sprintf('oneup_uploader.orphanage.%s', $key));
}
+
+ public function clear()
+ {
+ $system = new Filesystem();
+ $finder = new Finder();
+
+ try
+ {
+ $finder->in($this->config['directory'])->date('<=' . -1 * (int) $this->config['maxage'] . 'seconds')->files();
+ }
+ catch(\InvalidArgumentException $e)
+ {
+ // the finder will throw an exception of type InvalidArgumentException
+ // if the directory he should search in does not exist
+ // in that case we don't have anything to clean
+ return;
+ }
+
+ foreach($finder as $file)
+ {
+ $system->remove($file);
+ }
+ }
}
\ No newline at end of file
|
Fixed the two bundled commands for clearing chunks and orphans.
|
1up-lab_OneupUploaderBundle
|
train
|
4f46bd19d0b123327596785dc3975d5e4008d761
|
diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go
index <HASH>..<HASH> 100644
--- a/cmd/geth/chaincmd.go
+++ b/cmd/geth/chaincmd.go
@@ -99,6 +99,7 @@ func importChain(ctx *cli.Context) error {
utils.Fatalf("Failed to read database stats: %v", err)
}
fmt.Println(stats)
+ fmt.Printf("Trie cache misses: %d\n\n", trie.CacheMisses())
// Compact the entire database to more accurately measure disk io and print the stats
start = time.Now()
@@ -113,7 +114,6 @@ func importChain(ctx *cli.Context) error {
utils.Fatalf("Failed to read database stats: %v", err)
}
fmt.Println(stats)
- fmt.Println("Trie cache misses:", trie.CacheMisses())
}
return nil
}
diff --git a/cmd/geth/main.go b/cmd/geth/main.go
index <HASH>..<HASH> 100644
--- a/cmd/geth/main.go
+++ b/cmd/geth/main.go
@@ -134,8 +134,9 @@ participating.
utils.KeyStoreDirFlag,
utils.OlympicFlag,
utils.FastSyncFlag,
- utils.CacheFlag,
utils.LightKDFFlag,
+ utils.CacheFlag,
+ utils.TrieCacheGenFlag,
utils.JSpathFlag,
utils.ListenPortFlag,
utils.MaxPeersFlag,
diff --git a/cmd/geth/usage.go b/cmd/geth/usage.go
index <HASH>..<HASH> 100644
--- a/cmd/geth/usage.go
+++ b/cmd/geth/usage.go
@@ -73,7 +73,13 @@ var AppHelpFlagGroups = []flagGroup{
utils.IdentityFlag,
utils.FastSyncFlag,
utils.LightKDFFlag,
+ },
+ },
+ {
+ Name: "PERFORMANCE TUNING",
+ Flags: []cli.Flag{
utils.CacheFlag,
+ utils.TrieCacheGenFlag,
},
},
{
diff --git a/cmd/utils/flags.go b/cmd/utils/flags.go
index <HASH>..<HASH> 100644
--- a/cmd/utils/flags.go
+++ b/cmd/utils/flags.go
@@ -141,11 +141,6 @@ var (
Usage: "Document Root for HTTPClient file scheme",
Value: DirectoryString{homeDir()},
}
- CacheFlag = cli.IntFlag{
- Name: "cache",
- Usage: "Megabytes of memory allocated to internal caching (min 16MB / database forced)",
- Value: 128,
- }
FastSyncFlag = cli.BoolFlag{
Name: "fast",
Usage: "Enable fast syncing through state downloads",
@@ -154,6 +149,17 @@ var (
Name: "lightkdf",
Usage: "Reduce key-derivation RAM & CPU usage at some expense of KDF strength",
}
+ // Performance tuning settings
+ CacheFlag = cli.IntFlag{
+ Name: "cache",
+ Usage: "Megabytes of memory allocated to internal caching (min 16MB / database forced)",
+ Value: 128,
+ }
+ TrieCacheGenFlag = cli.IntFlag{
+ Name: "trie-cache-gens",
+ Usage: "Number of trie node generations to keep in memory",
+ Value: int(state.MaxTrieCacheGen),
+ }
// Fork settings
SupportDAOFork = cli.BoolFlag{
Name: "support-dao-fork",
@@ -721,6 +727,10 @@ func RegisterEthService(ctx *cli.Context, stack *node.Node, extra []byte) {
}
ethConf.PowTest = true
}
+ // Override any global options pertaining to the Ethereum protocol
+ if gen := ctx.GlobalInt(TrieCacheGenFlag.Name); gen > 0 {
+ state.MaxTrieCacheGen = uint16(gen)
+ }
if err := stack.Register(func(ctx *node.ServiceContext) (node.Service, error) {
return eth.New(ctx, ethConf)
diff --git a/core/state/statedb.go b/core/state/statedb.go
index <HASH>..<HASH> 100644
--- a/core/state/statedb.go
+++ b/core/state/statedb.go
@@ -38,14 +38,14 @@ import (
// created.
var StartingNonce uint64
+// Trie cache generation limit after which to evic trie nodes from memory.
+var MaxTrieCacheGen = uint16(120)
+
const (
// Number of past tries to keep. This value is chosen such that
// reasonable chain reorg depths will hit an existing trie.
maxPastTries = 12
- // Trie cache generation limit.
- maxTrieCacheGen = 120
-
// Number of codehash->size associations to keep.
codeSizeCacheSize = 100000
)
@@ -89,7 +89,7 @@ type StateDB struct {
// Create a new state from a given trie
func New(root common.Hash, db ethdb.Database) (*StateDB, error) {
- tr, err := trie.NewSecure(root, db, maxTrieCacheGen)
+ tr, err := trie.NewSecure(root, db, MaxTrieCacheGen)
if err != nil {
return nil, err
}
@@ -158,7 +158,7 @@ func (self *StateDB) openTrie(root common.Hash) (*trie.SecureTrie, error) {
return &tr, nil
}
}
- return trie.NewSecure(root, self.db, maxTrieCacheGen)
+ return trie.NewSecure(root, self.db, MaxTrieCacheGen)
}
func (self *StateDB) pushTrie(t *trie.SecureTrie) {
|
cmd, core/state: allow configurable trie cache generations
|
ethereum_go-ethereum
|
train
|
979f7270f662a72a414edc60693db1986ddbd690
|
diff --git a/cnxepub/scripts/collated_single_html/main.py b/cnxepub/scripts/collated_single_html/main.py
index <HASH>..<HASH> 100644
--- a/cnxepub/scripts/collated_single_html/main.py
+++ b/cnxepub/scripts/collated_single_html/main.py
@@ -40,7 +40,7 @@ def main(argv=None):
binder = reconstitute(args.collated_html)
if args.dump_tree:
- print(pformat(cnxepub.model_to_tree(binder)).encode('utf-8'),
+ print(pformat(cnxepub.model_to_tree(binder)),
file=sys.stdout)
# TODO Check for documents that have no identifier.
diff --git a/cnxepub/tests/scripts/test_collated_single_html.py b/cnxepub/tests/scripts/test_collated_single_html.py
index <HASH>..<HASH> 100644
--- a/cnxepub/tests/scripts/test_collated_single_html.py
+++ b/cnxepub/tests/scripts/test_collated_single_html.py
@@ -17,6 +17,8 @@ from lxml import etree
from ...html_parsers import HTML_DOCUMENT_NAMESPACES
from ...testing import TEST_DATA_DIR, captured_output
+IS_PY3 = sys.version_info.major == 3
+
class CollatedSingleHTMLTestCase(unittest.TestCase):
maxDiff = None
@@ -38,10 +40,13 @@ class CollatedSingleHTMLTestCase(unittest.TestCase):
# Capture stdout
orig_stdout = sys.stdout
self.addCleanup(setattr, sys, 'stdout', orig_stdout)
- stdout = sys.stdout = io.BytesIO()
+ if IS_PY3:
+ stdout = sys.stdout = io.TextIOWrapper(io.BytesIO())
+ else:
+ stdout = sys.stdout = io.BytesIO()
return_code = self.target([self.path_to_xhtml, '-d'])
self.assertEqual(return_code, 0)
stdout.seek(0)
- self.assertIn('Fruity', stdout.read())
+ self.assertIn("'title': 'Fruity'", stdout.read())
|
fixup! Revised validate-collated script to print to sys.stdout
|
openstax_cnx-epub
|
train
|
9a1373dabf314b0142470c757207c9e9ff1a7cf3
|
diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -152,6 +152,7 @@ numpydoc_validation_checks = {
"YD01", # Yields: No plan to enforce
}
numpydoc_validation_exclude = { # set of regex
+ r'\.PointSet$', # necessary for this abstract class
r'\.Plotter$', # Issue with class parameter documentation
r'\.from_dict$',
r'\.to_dict$',
diff --git a/pyvista/core/pointset.py b/pyvista/core/pointset.py
index <HASH>..<HASH> 100644
--- a/pyvista/core/pointset.py
+++ b/pyvista/core/pointset.py
@@ -390,11 +390,22 @@ class PointSet(_vtk.vtkPointSet, _PointSet):
"""
- def __init__(self, points=None, deep=False, force_float=True):
- """Initialize the pointset."""
- if pyvista.vtk_version_info < (9, 1, 0): # pragma: no cover
+ def __new__(cls, *args, **kwargs):
+ """Construct a new PointSet object.
+
+ Wrapping this is necessary for us to show an informative error
+ message when the VTK version is too old, causing PointSet to be
+ an abstract class. Since we inherit the ``__new__()`` method of
+ ``vtk.vtkPointSet``, we would otherwise see a generic error about
+ the class being abstract.
+
+ """
+ if pyvista.vtk_version_info < (9, 1, 0):
raise VTKVersionError("pyvista.PointSet requires VTK >= 9.1.0")
+ return super().__new__(cls, *args, **kwargs)
+ def __init__(self, points=None, deep=False, force_float=True):
+ """Initialize the pointset."""
super().__init__()
if points is not None:
self.SetPoints(pyvista.vtk_points(points, deep=deep, force_float=force_float))
|
Make VTK version error clear when PointSet is still abstract (#<I>)
* Move vtk version error from PointSet.__init__ to __new__
* Minor docstring tweak
* ignore documentation validation for pointset
|
vtkiorg_vtki
|
train
|
5f439de97c363213af3fed743d6ed9243c2a6007
|
diff --git a/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/CircuitBreakerConfig.java b/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/CircuitBreakerConfig.java
index <HASH>..<HASH> 100644
--- a/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/CircuitBreakerConfig.java
+++ b/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/CircuitBreakerConfig.java
@@ -125,8 +125,8 @@ public class CircuitBreakerConfig {
* @return the CircuitBreakerConfig.Builder
*/
public Builder waitDurationInOpenState(Duration waitDurationInOpenState) {
- if (waitDurationInOpenState.getSeconds() < 1) {
- throw new IllegalArgumentException("waitDurationInOpenState must be at least 1000[ms]");
+ if (waitDurationInOpenState.toMillis() < 1) {
+ throw new IllegalArgumentException("waitDurationInOpenState must be at least 1[ms]");
}
this.waitDurationInOpenState = waitDurationInOpenState;
return this;
|
support waitDurationInOpenState with millisecond
|
resilience4j_resilience4j
|
train
|
5312bc7954b2056a782a3c64952a72354f6c3149
|
diff --git a/src/module-elasticsuite-catalog/Model/Product/Indexer/Fulltext/Action/Full.php b/src/module-elasticsuite-catalog/Model/Product/Indexer/Fulltext/Action/Full.php
index <HASH>..<HASH> 100644
--- a/src/module-elasticsuite-catalog/Model/Product/Indexer/Fulltext/Action/Full.php
+++ b/src/module-elasticsuite-catalog/Model/Product/Indexer/Fulltext/Action/Full.php
@@ -53,6 +53,11 @@ class Full
{
$productId = 0;
+ // Magento is only sending children ids here. Ensure to reindex also the parents product ids, if any.
+ if (!empty($productIds)) {
+ $productIds = array_unique(array_merge($productIds, $this->resourceModel->getRelationsByChild($productIds)));
+ }
+
do {
$products = $this->getSearchableProducts($storeId, $productIds, $productId);
|
Ensure to process parent product when reindexing with a list of product ids.
|
Smile-SA_elasticsuite
|
train
|
3fa2615c01e87d260e9d2814a52c32bf4c23b61d
|
diff --git a/spec/support/vcr.rb b/spec/support/vcr.rb
index <HASH>..<HASH> 100644
--- a/spec/support/vcr.rb
+++ b/spec/support/vcr.rb
@@ -11,8 +11,8 @@ VCR.configure do |vcr|
vcr.configure_rspec_metadata!
vcr.default_cassette_options = {
- :record => :new_episodes,
- #:record => :none,
+ #:record => :new_episodes,
+ :record => :none,
:match_requests_on => [:method, :path, :body],
:update_content_length_header => true
}
|
Ensure VCR is not recording on normal spec runs
|
brightbox_brightbox-cli
|
train
|
72f71762a0ad804cb539811190227deda0284d9a
|
diff --git a/lib/JsonpMainTemplatePlugin.js b/lib/JsonpMainTemplatePlugin.js
index <HASH>..<HASH> 100644
--- a/lib/JsonpMainTemplatePlugin.js
+++ b/lib/JsonpMainTemplatePlugin.js
@@ -29,6 +29,23 @@ class JsonpMainTemplatePlugin {
const chunkMaps = chunk.getChunkMaps();
const crossOriginLoading = this.outputOptions.crossOriginLoading;
const chunkLoadTimeout = this.outputOptions.chunkLoadTimeout || 120000;
+ const scriptSrcPath = this.applyPluginsWaterfall("asset-path", JSON.stringify(chunkFilename), {
+ hash: `" + ${this.renderCurrentHashCode(hash)} + "`,
+ hashWithLength: length => `" + ${this.renderCurrentHashCode(hash, length)} + "`,
+ chunk: {
+ id: "\" + chunkId + \"",
+ hash: `" + ${JSON.stringify(chunkMaps.hash)}[chunkId] + "`,
+ hashWithLength(length) {
+ const shortChunkHashMap = Object.create(null);
+ Object.keys(chunkMaps.hash).forEach(chunkId => {
+ if(typeof chunkMaps.hash[chunkId] === "string")
+ shortChunkHashMap[chunkId] = chunkMaps.hash[chunkId].substr(0, length);
+ });
+ return `" + ${JSON.stringify(shortChunkHashMap)}[chunkId] + "`;
+ },
+ name: `" + (${JSON.stringify(chunkMaps.name)}[chunkId]||chunkId) + "`
+ }
+ });
return this.asString([
"var script = document.createElement('script');",
"script.type = 'text/javascript';",
@@ -39,23 +56,7 @@ class JsonpMainTemplatePlugin {
`if (${this.requireFn}.nc) {`,
this.indent(`script.setAttribute("nonce", ${this.requireFn}.nc);`),
"}",
- `script.src = ${this.requireFn}.p + ${this.applyPluginsWaterfall("asset-path", JSON.stringify(chunkFilename), {
- hash: `" + ${this.renderCurrentHashCode(hash)} + "`,
- hashWithLength: length => `" + ${this.renderCurrentHashCode(hash, length)} + "`,
- chunk: {
- id: "\" + chunkId + \"",
- hash: `" + ${JSON.stringify(chunkMaps.hash)}[chunkId] + "`,
- hashWithLength(length) {
- const shortChunkHashMap = Object.create(null);
- Object.keys(chunkMaps.hash).forEach(chunkId => {
- if(typeof chunkMaps.hash[chunkId] === "string")
- shortChunkHashMap[chunkId] = chunkMaps.hash[chunkId].substr(0, length);
- });
- return `" + ${JSON.stringify(shortChunkHashMap)}[chunkId] + "`;
- },
- name: `" + (${JSON.stringify(chunkMaps.name)}[chunkId]||chunkId) + "`
- }
- })};`,
+ `script.src = ${this.requireFn}.p + ${scriptSrcPath};`,
`var timeout = setTimeout(onScriptComplete, ${chunkLoadTimeout});`,
"script.onerror = script.onload = onScriptComplete;",
"function onScriptComplete() {",
|
remove nested template string
linter seem to have a problem with it
|
webpack_webpack
|
train
|
a56586e48bf3a88dd9d19068d53fdb67f3e2faac
|
diff --git a/lib/crispy/crispy_internal/class_spy.rb b/lib/crispy/crispy_internal/class_spy.rb
index <HASH>..<HASH> 100644
--- a/lib/crispy/crispy_internal/class_spy.rb
+++ b/lib/crispy/crispy_internal/class_spy.rb
@@ -17,8 +17,12 @@ module Crispy
end
def define_wrapper method_name
+ #return method_name if method_name == :initialize
+ return method_name if method_name == :method_missing
+ p method_name
define_method method_name do|*arguments, &attached_block|
- ::Crispy::CrispyInternal::ClassSpy.of_class(self.class).received_messages <<
+ #::Kernel.print "e" # <= with this statement, prepend-ing :method_missing doesn't cause the segfault.
+ ::Crispy::CrispyInternal::ClassSpy.of_class(::Kernel.p self.class).received_messages <<
::Crispy::CrispyReceivedMessageWithReceiver.new(self, method_name, *arguments, &attached_block)
super(*arguments, &attached_block)
end
|
add more codes to reproduce segfault more.
|
igrep_crispy
|
train
|
c036092ae274b772826ab36e4f61a45c382726a2
|
diff --git a/lib/wice_grid.rb b/lib/wice_grid.rb
index <HASH>..<HASH> 100644
--- a/lib/wice_grid.rb
+++ b/lib/wice_grid.rb
@@ -311,7 +311,15 @@ module Wice
v = ar.deep_send(*messages)
uniq_vals << v unless v.nil?
end
- return uniq_vals.to_a.map{|i|[i,i]}
+ return uniq_vals.to_a.map{|i|
+ if i.is_a?(Array) && i.size == 2
+ i
+ elsif i.is_a?(Hash) && i.size == 1
+ i.to_a.flatten
+ else
+ [i,i]
+ end
+ }
end
def output_csv?
|
Improvement to custom filters, namely to
:custom => :symbol
and
:custom => [:symbol1, :symbol2, symbol3]
flavors: if the last method returns an array of 2 elements, the first element becomes
the select option label and the second - the select option value (usually id)
|
leikind_wice_grid
|
train
|
711fb32302db344604c660942429cbbf958daaab
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -124,8 +124,8 @@ def get_version_info():
vinfo = _version_helper.generate_git_version_info()
except:
vinfo = vdummy()
- vinfo.version = '1.16.6'
- vinfo.release = 'True'
+ vinfo.version = '1.16.dev7'
+ vinfo.release = 'False'
with open('pycbc/version.py', 'w') as f:
f.write("# coding: utf-8\n")
|
Set back to development (#<I>)
|
gwastro_pycbc
|
train
|
9dbcc91e00942b4616454f86adf323c5f35542e7
|
diff --git a/advanced_filters/tests/test_get_field_choices_view.py b/advanced_filters/tests/test_get_field_choices_view.py
index <HASH>..<HASH> 100644
--- a/advanced_filters/tests/test_get_field_choices_view.py
+++ b/advanced_filters/tests/test_get_field_choices_view.py
@@ -47,7 +47,10 @@ if sys.version_info >= (3, 5):
else:
ARGUMENT_LENGTH_ERROR = "need more than 1 value to unpack"
-MISSING_FIELD_ERROR = "SalesRep has no field named 'baz'"
+if sys.version_info < (3, ) and django.VERSION < (1, 11):
+ MISSING_FIELD_ERROR = "SalesRep has no field named u'baz'"
+else:
+ MISSING_FIELD_ERROR = "SalesRep has no field named 'baz'"
def test_invalid_view_kwargs(client):
|
fixup! test: refactor unittest test cases to pytest
|
modlinltd_django-advanced-filters
|
train
|
edcde28ecdfa56cd55ed68dd4e2a6a0566e5aaa7
|
diff --git a/smack-core/src/main/java/org/jivesoftware/smack/AbstractXMPPConnection.java b/smack-core/src/main/java/org/jivesoftware/smack/AbstractXMPPConnection.java
index <HASH>..<HASH> 100644
--- a/smack-core/src/main/java/org/jivesoftware/smack/AbstractXMPPConnection.java
+++ b/smack-core/src/main/java/org/jivesoftware/smack/AbstractXMPPConnection.java
@@ -294,6 +294,12 @@ public abstract class AbstractXMPPConnection implements XMPPConnection {
Thread thread = new Thread(runnable);
thread.setName("Smack Cached Executor");
thread.setDaemon(true);
+ thread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
+ @Override
+ public void uncaughtException(Thread t, Throwable e) {
+ LOGGER.log(Level.WARNING, t + " encountered uncaught exception", e);
+ }
+ });
return thread;
}
});
|
Set UncaughtExceptionHandler for CACHED_EXECUTOR_SERVICE
In order to avoid uncaught exceptions from terminating the
program (SMACK-<I>).
|
igniterealtime_Smack
|
train
|
7e4ebbce11fe1365fd37bbc5212f090fdde7a880
|
diff --git a/altimetry/tools/nctools.py b/altimetry/tools/nctools.py
index <HASH>..<HASH> 100755
--- a/altimetry/tools/nctools.py
+++ b/altimetry/tools/nctools.py
@@ -597,10 +597,10 @@ class nc :
root_grp.setncatts(attrStr)
#Get dimensions
- dimStr=data.pop('_dimensions')
- ndims=dimStr.pop('_ndims')
- dimlist = dimStr.keys()
- dimVal = dimStr.values()
+ dimDict=data.pop('_dimensions')
+ ndims=dimDict.pop('_ndims')
+ dimlist = dimDict.keys()
+ dimVal = dimDict.values()
#Get variables
parlist = data.keys()
@@ -610,8 +610,8 @@ class nc :
#Put dimensions
for d in dimlist :
- self.message(2, 'Adding D {0}={1}'.format(d,dimStr[d]))
- root_grp.createDimension(d, dimStr[d])
+ self.message(2, 'Adding D {0}={1}'.format(d,dimDict[d]))
+ root_grp.createDimension(d, dimDict[d])
# if data.has_key('p') : self.Error('Variable name \'p\' is not available for use as NetCDF variable name')
@@ -621,7 +621,8 @@ class nc :
#Get dimensions for current variable
if not data[p].has_key('_dimensions') : self.Error('_dimension attribute is not set for variable'+p)
pardim=data[p].pop('_dimensions')
- if isinstance(pardim,dict) :pardim=tuple(pardim.keys()[1:]) if pardim.has_key("_ndims") else tuple(pardim.keys())
+ if isinstance(pardim,dimDict) : pardim=pardim.keys()
+ elif isinstance(pardim,dict) :pardim=tuple(pardim.keys()[1:]) if pardim.has_key("_ndims") else tuple(pardim.keys())
elif isinstance(pardim,list) : pardim = tuple(pardim)
elif isinstance(pardim,tuple) : pass
else : self.Error('_dimensions must be dict, list or tuple - not {0}'.type(pardim))
@@ -635,7 +636,7 @@ class nc :
# if not (data[p]['data'].dtype == '|S6') and not (data[p]['data'].dtype == '|S2') :
self.message(2, 'Adding V {0} (dims={{{1}}},attr={{{2}}})'.
format(p,
- ', '.join(['\'{0}\':{1}'.format(d,dimStr[d]) for d in pardim]),
+ ', '.join(['\'{0}\':{1}'.format(d,dimDict[d]) for d in pardim]),
', '.join(['\'{0}\':{1}'.format(d,data[p][d]) for d in data[p].keys() if (d != '_dimensions') and (d != 'data')]) )
)
|
RD - corrected bug when writing NC files using dataStr
|
rdussurget_py-altimetry
|
train
|
3d9eafc4bd877282d289969c1b00a87323f1f5cf
|
diff --git a/tests/integration/modules/test_file.py b/tests/integration/modules/test_file.py
index <HASH>..<HASH> 100644
--- a/tests/integration/modules/test_file.py
+++ b/tests/integration/modules/test_file.py
@@ -211,4 +211,3 @@ class FileModuleTest(ModuleCase):
with salt.utils.fopen(self.myfile, 'r') as fp:
content = fp.read()
self.assertEqual(content, 'Hello' + os.linesep + 'Goodbye' + os.linesep)
-
diff --git a/tests/integration/output/test_output.py b/tests/integration/output/test_output.py
index <HASH>..<HASH> 100644
--- a/tests/integration/output/test_output.py
+++ b/tests/integration/output/test_output.py
@@ -165,4 +165,3 @@ class OutputReturnTest(ShellCase):
expected = ['minion:', ' True']
ret = self.run_salt('"minion" test.ping --static')
self.assertEqual(ret, expected)
-
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index <HASH>..<HASH> 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -596,4 +596,3 @@ PATCHLEVEL = 3
MagicMock(return_value=resolv_mock)):
get_dns = core.dns()
self.assertEqual(get_dns, ret)
-
|
Lint: Remove extra empty lines at end of files
|
saltstack_salt
|
train
|
4c0fcda20c33b78d77fddee18cf7a07b6da65fe7
|
diff --git a/src/php/tests/generated_code/AbstractGeneratedCodeTest.php b/src/php/tests/generated_code/AbstractGeneratedCodeTest.php
index <HASH>..<HASH> 100644
--- a/src/php/tests/generated_code/AbstractGeneratedCodeTest.php
+++ b/src/php/tests/generated_code/AbstractGeneratedCodeTest.php
@@ -39,6 +39,14 @@ abstract class AbstractGeneratedCodeTest extends PHPUnit_Framework_TestCase {
protected static $client;
protected static $timeout;
+ public function testWaitForNotReady() {
+ $this->assertFalse(self::$client->waitForReady(1));
+ }
+
+ public function testWaitForReady() {
+ $this->assertTrue(self::$client->waitForReady(250000));
+ }
+
public function testSimpleRequest() {
$div_arg = new math\DivArgs();
$div_arg->setDividend(7);
|
php: add tests for waitForReady
|
grpc_grpc
|
train
|
1db82083cf48a53f6d78e48e7df1faac7f6743dc
|
diff --git a/lib/uservoice/user_voice.rb b/lib/uservoice/user_voice.rb
index <HASH>..<HASH> 100644
--- a/lib/uservoice/user_voice.rb
+++ b/lib/uservoice/user_voice.rb
@@ -14,6 +14,7 @@ module UserVoice
end
Unauthorized = Class.new(APIError)
NotFound = Class.new(APIError)
+ ApplicationError = Class.new(APIError)
def self.generate_sso_token(subdomain_key, sso_key, user_hash, valid_for = 5 * 60)
user_hash[:expires] ||= (Time.now.utc + valid_for).to_s unless valid_for.nil?
@@ -141,7 +142,7 @@ module UserVoice
when 'record_not_found'
raise NotFound.new(attrs)
when 'application_error'
- raise NotFound.new(attrs)
+ raise ApplicationError.new(attrs)
else
raise APIError.new(attrs)
end
|
Differentiate ApplicationError.
|
uservoice_uservoice-ruby
|
train
|
0184a3fd45d3a4e8f59cdd396ec1c567374e9e8b
|
diff --git a/lib/blocklib.php b/lib/blocklib.php
index <HASH>..<HASH> 100644
--- a/lib/blocklib.php
+++ b/lib/blocklib.php
@@ -1785,5 +1785,5 @@ function blocks_add_default_system_blocks() {
$subpagepattern = null;
}
- $page->blocks->add_blocks(array(BLOCK_POS_RIGHT => array('myprofile', 'private_files', 'online_users'), 'content' => array('course_overview')), 'my-index', $subpagepattern, false);
+ $page->blocks->add_blocks(array(BLOCK_POS_RIGHT => array('private_files', 'online_users'), 'content' => array('course_overview')), 'my-index', $subpagepattern, false);
}
diff --git a/lib/db/upgrade.php b/lib/db/upgrade.php
index <HASH>..<HASH> 100644
--- a/lib/db/upgrade.php
+++ b/lib/db/upgrade.php
@@ -3928,7 +3928,7 @@ AND EXISTS (SELECT 'x'
}
- if ($result && $oldversion < 2010050402) { // my_pages for My Moodle and Public Profile pages
+ if ($result && $oldversion < 2010050403) { // my_pages for My Moodle and Public Profile pages
/// Define table my_pages to be created
$table = new xmldb_table('my_pages');
@@ -3952,7 +3952,7 @@ AND EXISTS (SELECT 'x'
$dbman->create_table($table);
}
- /// Add two lines of data into this new table
+ /// Add two lines of data into this new table. These are the default pages.
$mypage = new object();
$mypage->userid = NULL;
$mypage->name = '__default';
@@ -3965,9 +3965,45 @@ AND EXISTS (SELECT 'x'
if (!$DB->record_exists('my_pages', array('userid'=>NULL, 'private'=>1))) {
$result = $result && $DB->insert_record('my_pages', $mypage);
}
+
+ /// This bit is a "illegal" hack, unfortunately, but there is not a better way to install default
+ /// blocks right now, since the upgrade function need to be called after core AND plugins upgrade,
+ /// and there is no such hook yet. Sigh.
+
+ if ($mypage = $DB->get_record('my_pages', array('userid'=>NULL, 'private'=>1))) {
+ if (!$DB->record_exists('block_instances', array('pagetypepattern'=>'my-index', 'parentcontextid'=>SITEID, 'subpagepattern'=>$mypage->id))) {
+
+ // No default exist there yet, let's put a few into My Moodle so it's useful.
+
+ $blockinstance = new stdClass;
+ $blockinstance->parentcontextid = SITEID;
+ $blockinstance->showinsubcontexts = 0;
+ $blockinstance->pagetypepattern = 'my-index';
+ $blockinstance->subpagepattern = $mypage->id;
+ $blockinstance->configdata = '';
+
+ $blockinstance->blockname = 'private_files';
+ $blockinstance->defaultregion = 'side-post';
+ $blockinstance->defaultweight = 0;
+ $blockinstanceid = $DB->insert_record('block_instances', $blockinstance);
+ get_context_instance(CONTEXT_BLOCK, $blockinstanceid);
+
+ $blockinstance->blockname = 'online_users';
+ $blockinstance->defaultregion = 'side-post';
+ $blockinstance->defaultweight = 1;
+ $blockinstanceid = $DB->insert_record('block_instances', $blockinstance);
+ get_context_instance(CONTEXT_BLOCK, $blockinstanceid);
+
+ $blockinstance->blockname = 'course_overview';
+ $blockinstance->defaultregion = 'content';
+ $blockinstance->defaultweight = 0;
+ $blockinstanceid = $DB->insert_record('block_instances', $blockinstance);
+ get_context_instance(CONTEXT_BLOCK, $blockinstanceid);
+ }
+ }
/// Main savepoint reached
- upgrade_main_savepoint($result, 2010050402);
+ upgrade_main_savepoint($result, 2010050403);
}
diff --git a/version.php b/version.php
index <HASH>..<HASH> 100644
--- a/version.php
+++ b/version.php
@@ -6,7 +6,7 @@
// This is compared against the values stored in the database to determine
// whether upgrades should be performed (see lib/db/*.php)
- $version = 2010050402; // YYYYMMDD = date of the last version bump
+ $version = 2010050403; // YYYYMMDD = date of the last version bump
// XX = daily increments
$release = '2.0 dev (Build: 20100504)'; // Human-friendly version name
|
MDL-<I> Reimplementing the My Moodle default blocks for upgraded sites, to match new installs
|
moodle_moodle
|
train
|
bd2a25d0d28f134f4803f3fd7acdf0d1096a5165
|
diff --git a/pysat/tests/test_files.py b/pysat/tests/test_files.py
index <HASH>..<HASH> 100644
--- a/pysat/tests/test_files.py
+++ b/pysat/tests/test_files.py
@@ -1024,7 +1024,7 @@ class TestFilesRaceCondition():
# create a test instrument, make sure it is getting files from
# filesystem
- re_load(pysat.instruments.pysat_testing)
+ reload(pysat.instruments.pysat_testing)
pysat.instruments.pysat_testing.list_files = list_versioned_files
# create a bunch of files by year and doy
self.testInst = \
|
STY: update new syntax from merge
|
rstoneback_pysat
|
train
|
5bc64b19bb428c5e8ac442e1a1ed7b781ca38d48
|
diff --git a/packages/selenium-ide/src/neo/stores/view/UiState.js b/packages/selenium-ide/src/neo/stores/view/UiState.js
index <HASH>..<HASH> 100644
--- a/packages/selenium-ide/src/neo/stores/view/UiState.js
+++ b/packages/selenium-ide/src/neo/stores/view/UiState.js
@@ -173,8 +173,12 @@ class UiState {
}
@action.bound resizeConsole(height) {
+ var maxConsoleHeight = this.windowHeight - this.minContentHeight;
+ var tmpHeight = height > maxConsoleHeight ? maxConsoleHeight : height;
+
this.storedConsoleHeight = height > this.minConsoleHeight + 20 ? height : this.storedConsoleHeight;
- this.consoleHeight = height > this.minConsoleHeight ? height : this.minConsoleHeight;
+ this.consoleHeight = height > this.minConsoleHeight ? tmpHeight : this.minConsoleHeight;
+
storage.set({
consoleSize: this.consoleHeight
});
|
console height bug fix when changing window size with console minimized
|
SeleniumHQ_selenium-ide
|
train
|
3f9a4f74427cdb3e8d51d3a82963cf3beaf72055
|
diff --git a/app/app.js b/app/app.js
index <HASH>..<HASH> 100644
--- a/app/app.js
+++ b/app/app.js
@@ -25,7 +25,6 @@ const {
dialog,
shell,
ipcMain,
- ipcRenderer,
autoUpdater,
Menu,
BrowserWindow,
@@ -106,8 +105,8 @@ function showUpdateModal () {
],
defaultId: 0,
cancelId: 1,
- title: 'New Update Available!',
- message: 'Exciting news!\n\nA fresh new update has been downloaded and is ready to install\n\n\n~Gregory'
+ title: 'Insomnia Update Available',
+ message: 'Exciting news!\n\nA new version of Insomnia has been downloaded and is ready to install\n\n\n~Gregory'
}, id => {
if (id === 0) {
console.log('-- Installing Update --');
@@ -127,8 +126,8 @@ function showDownloadModal (version) {
],
defaultId: 0,
cancelId: 1,
- title: 'New Update Available!',
- message: `Exciting news!\n\nVersion ${version} is available.\n\n\n~Gregory`
+ title: 'Insomnia Update Available',
+ message: `Exciting news!\n\nVersion ${version} of Insomnia is now available.\n\n\n~Gregory`
}, id => {
if (id === 0) {
console.log('-- Installing Update --');
|
Added reference to Insomnia in update notification
|
getinsomnia_insomnia
|
train
|
4b3b023854485d0d8edc28b36c3faed69efbaf4f
|
diff --git a/lib/yelp.rb b/lib/yelp.rb
index <HASH>..<HASH> 100644
--- a/lib/yelp.rb
+++ b/lib/yelp.rb
@@ -2,6 +2,4 @@ require "yelp/version"
require 'yelp/client'
module Yelp
- class << self
- end
end
diff --git a/lib/yelp/client.rb b/lib/yelp/client.rb
index <HASH>..<HASH> 100644
--- a/lib/yelp/client.rb
+++ b/lib/yelp/client.rb
@@ -1,7 +1,7 @@
-require 'yelp/deep_struct'
require 'faraday'
require 'faraday_middleware'
+require 'yelp/deep_struct'
require 'yelp/client/business'
require 'yelp/client/search'
@@ -15,6 +15,8 @@ module Yelp
attr_reader *AUTH_KEYS, :connection
+ # Creates an instance of the Yelp client
+ # Takes a hash then creates instance variables for each key, value pair passed
def initialize(options = {})
AUTH_KEYS.each do |key|
instance_variable_set("@#{key}", options[key])
@@ -23,6 +25,7 @@ module Yelp
configure
end
+ # Configure Faraday for the API connection
def configure
keys = { consumer_key: @consumer_key,
consumer_secret: @consumer_secret,
@@ -30,7 +33,10 @@ module Yelp
token_secret: @token_secret }
@connection = Faraday.new API_HOST do |conn|
+ # Use the Faraday OAuth middleware for OAuth 1.0 requests
conn.request :oauth, keys
+
+ # Using default http library, had to specify to get working
conn.adapter :net_http
end
end
diff --git a/lib/yelp/client/business.rb b/lib/yelp/client/business.rb
index <HASH>..<HASH> 100644
--- a/lib/yelp/client/business.rb
+++ b/lib/yelp/client/business.rb
@@ -1,15 +1,19 @@
require 'json'
-require 'pry'
module Yelp
class Client
module Business
PATH = '/v2/business/'
+ # Return a formatted/structured response from a request
+ # to the business endpoint at the API
def business(id)
DeepStruct.new(JSON.parse(business_request(id).body))
end
+ # Make a request to the business endpoint of the API
+ # The endpoint requires a format of /v2/business/{business-id}
+ # so the primary request parameter is concatenated
def business_request(id)
@connection.get PATH + id
end
diff --git a/lib/yelp/client/search.rb b/lib/yelp/client/search.rb
index <HASH>..<HASH> 100644
--- a/lib/yelp/client/search.rb
+++ b/lib/yelp/client/search.rb
@@ -5,6 +5,8 @@ module Yelp
module Search
PATH = '/v2/search'
+ # Take a search_request and return the formatted/structured
+ # response from the API
def search(location, params = {}, locale = {})
params.merge!(locale)
params.merge!({location: location})
@@ -12,6 +14,8 @@ module Yelp
DeepStruct.new(JSON.parse(search_request(params).body))
end
+ # Make a request against the search endpoint from the API
+ # and return the raw response
def search_request(params)
@connection.get PATH, params
end
|
Clean up a little bit and add comments
|
Yelp_yelp-ruby
|
train
|
819810f946a964e20504270b28a3532ee893f1f5
|
diff --git a/state/txns.go b/state/txns.go
index <HASH>..<HASH> 100644
--- a/state/txns.go
+++ b/state/txns.go
@@ -52,8 +52,8 @@ func (st *State) ResumeTransactions() error {
func (st *State) MaybePruneTransactions() error {
runner, closer := st.database.TransactionRunner()
defer closer()
- // Prune txns only when txn count has doubled since last prune.
- return runner.MaybePruneTransactions(2.0)
+ // Prune txns when txn count has increased by 10% since last prune.
+ return runner.MaybePruneTransactions(1.1)
}
type multiModelRunner struct {
|
state: Prune transactions after <I>% growth since last prune
Previously we would only prune after 2x growth which wasn't aggressive
enough and made the prune process impact system resources much more
when it did run. Now that the pruning process is much more efficient
running it more often is ok.
|
juju_juju
|
train
|
fd6f9ac45fb44e2af7b91931d4afdb34203f161d
|
diff --git a/fastlane/spec/plugins_specs/plugin_generator_spec.rb b/fastlane/spec/plugins_specs/plugin_generator_spec.rb
index <HASH>..<HASH> 100644
--- a/fastlane/spec/plugins_specs/plugin_generator_spec.rb
+++ b/fastlane/spec/plugins_specs/plugin_generator_spec.rb
@@ -273,36 +273,37 @@ describe Fastlane::PluginGenerator do
end
describe "All tests and style validation of the new plugin are passing" do
+ before (:all) do
+ # let(:gem_name) is not available in before(:all), so pass the directory
+ # in explicitly once instead of making this a before(:each)
+ plugin_sh 'bundle install', 'fastlane-plugin-tester_thing'
+ end
+
it "rspec tests are passing" do
# Actually run our generated spec as part of this spec #yodawg
- Dir.chdir(gem_name) do
- Bundler.setup do
- `rspec &> /dev/null`
- expect($?.exitstatus).to be(0)
- end
- end
+ plugin_sh 'bundle exec rspec'
+ expect($?.exitstatus).to eq(0)
end
it "rubocop validations are passing" do
# Actually run our generated spec as part of this spec #yodawg
- Dir.chdir(gem_name) do
- Bundler.setup do
- `rubocop &> /dev/null`
- expect($?.exitstatus).to be(0)
- end
- end
+ plugin_sh 'bundle exec rubocop'
+ expect($?.exitstatus).to eq(0)
end
it "`rake` runs both rspec and rubocop" do
- Dir.chdir(gem_name) do
- Bundler.setup do
- result = `rake`
- expect($?.exitstatus).to be(0)
- expect(result).to include("no offenses detected") # rubocop
- expect(result).to include("example, 0 failures") # rspec
- end
- end
+ # Actually run our generated spec as part of this spec #yodawg
+ result = plugin_sh 'bundle exec rake'
+ expect($?.exitstatus).to eq(0)
+ expect(result).to include("no offenses detected") # rubocop
+ expect(result).to include("example, 0 failures") # rspec
end
end
end
+
+ private
+
+ def plugin_sh(command, plugin_path = gem_name)
+ Dir.chdir(plugin_path) { |path| `#{command}` }
+ end
end
|
Remove Bundler.setup in favor of shelling out to the Bundler in a new plugin directory in unit tests. (#<I>)
|
fastlane_fastlane
|
train
|
e784fbcdad673ae24a6454e4e041cc8b2bbb63b2
|
diff --git a/fullstop-jobs/src/main/java/org/zalando/stups/fullstop/jobs/iam/NoPasswordViolationWriter.java b/fullstop-jobs/src/main/java/org/zalando/stups/fullstop/jobs/iam/NoPasswordViolationWriter.java
index <HASH>..<HASH> 100644
--- a/fullstop-jobs/src/main/java/org/zalando/stups/fullstop/jobs/iam/NoPasswordViolationWriter.java
+++ b/fullstop-jobs/src/main/java/org/zalando/stups/fullstop/jobs/iam/NoPasswordViolationWriter.java
@@ -3,7 +3,7 @@ package org.zalando.stups.fullstop.jobs.iam;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
-import org.zalando.stups.fullstop.jobs.iam.csv.User;
+import org.zalando.stups.fullstop.jobs.iam.csv.CSVReportEntry;
import org.zalando.stups.fullstop.violation.ViolationBuilder;
import org.zalando.stups.fullstop.violation.ViolationSink;
@@ -25,16 +25,16 @@ public class NoPasswordViolationWriter {
this.violationSink = violationSink;
}
- public void writeViolation(String accountId, User CSVReportEntry) {
- log.info("Found IAM user {} that has a password in account {}", user.getName(), accountId);
+ public void writeViolation(String accountId, CSVReportEntry csvReportEntry) {
+ log.info("Found IAM user {} that has a password in account {}", csvReportEntry.getUser(), accountId);
violationSink.put(
new ViolationBuilder()
- .withEventId("check-iam-user_" + user.getName())
+ .withEventId("check-iam-user_" + csvReportEntry.getUser())
.withAccountId(accountId)
.withRegion(NO_REGION)
.withPluginFullyQualifiedClassName(NoPasswordsJob.class)
.withType(PASSWORD_USED)
- .withMetaInfo(singletonMap("user_name", user.getName()))
+ .withMetaInfo(singletonMap("user_name", csvReportEntry.getUser()))
.build());
}
}
|
#<I> adapt csv report parser and entry
|
zalando-stups_fullstop
|
train
|
a1740ab124f17ef0fc44c616d431c1a173584902
|
diff --git a/lib/appsignal/transaction/params_sanitizer.rb b/lib/appsignal/transaction/params_sanitizer.rb
index <HASH>..<HASH> 100644
--- a/lib/appsignal/transaction/params_sanitizer.rb
+++ b/lib/appsignal/transaction/params_sanitizer.rb
@@ -53,6 +53,9 @@ module Appsignal
def inspected(value)
value.inspect
+ rescue
+ # It turns out that sometimes inspect can fail
+ "#<#{value.class.to_s}/>"
end
end
end
diff --git a/spec/lib/appsignal/transaction/params_sanitizer_spec.rb b/spec/lib/appsignal/transaction/params_sanitizer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/appsignal/transaction/params_sanitizer_spec.rb
+++ b/spec/lib/appsignal/transaction/params_sanitizer_spec.rb
@@ -1,5 +1,11 @@
require 'spec_helper'
+class ErrorOnInspect
+ def inspect
+ raise 'Error'
+ end
+end
+
describe Appsignal::Transaction::ParamsSanitizer do
let(:klass) { Appsignal::Transaction::ParamsSanitizer }
let(:file) { uploaded_file }
@@ -16,7 +22,8 @@ describe Appsignal::Transaction::ParamsSanitizer do
{
:key => 'value',
:file => file,
- }
+ },
+ ErrorOnInspect.new
]
}
}
@@ -47,6 +54,7 @@ describe Appsignal::Transaction::ParamsSanitizer do
its([1]) { should == 'else' }
its([2]) { should be_instance_of String }
its([2]) { should include '::UploadedFile' }
+ its([4]) { should == '#<ErrorOnInspect/>' }
context "nested hash" do
subject { params[:hash][:nested_array][3] }
|
Rescue failing inspects in param sanitizer
|
appsignal_appsignal-ruby
|
train
|
6aa64d9e4824f782b379bf8177a5d521dc814930
|
diff --git a/calendar-bundle/contao/classes/Calendar.php b/calendar-bundle/contao/classes/Calendar.php
index <HASH>..<HASH> 100644
--- a/calendar-bundle/contao/classes/Calendar.php
+++ b/calendar-bundle/contao/classes/Calendar.php
@@ -234,9 +234,7 @@ class Calendar extends \Frontend
}
// Create the file
- $objRss = new \File('share/' . $strFile . '.xml', true);
- $objRss->write($this->replaceInsertTags($objFeed->$strType()));
- $objRss->close();
+ \File::putContent('share/' . $strFile . '.xml', $this->replaceInsertTags($objFeed->$strType()));
}
|
[Calendar] Add the `File::putContent()` method (see #<I>)
|
contao_contao
|
train
|
416eec7c4ba99178fbb1b3414cacb3b9e41ef0b1
|
diff --git a/app/models/task_manager/plan.rb b/app/models/task_manager/plan.rb
index <HASH>..<HASH> 100644
--- a/app/models/task_manager/plan.rb
+++ b/app/models/task_manager/plan.rb
@@ -15,10 +15,13 @@ module TaskManager
default_value_for :ahead_of_time, 0
attr_accessible :autocompletable, :data, :last_task_created_at,
- :name, :plan_type, :ahead_of_time
+ :name, :plan_type, :ahead_of_time, :begin_to_remind, :enabled_at
validates :name, presence: true, uniqueness: true
validates :plan_type, presence: true
validates :ahead_of_time, numericality: { greater_than_or_equal_to: 0 }
+ validates :begin_to_remind, presence: true,
+ numericality: { less_than_or_equal_to: 0 }
+ validates :enabled_at, presence: true
end
end
diff --git a/db/migrate/20121102055137_create_task_manager_plans.rb b/db/migrate/20121102055137_create_task_manager_plans.rb
index <HASH>..<HASH> 100644
--- a/db/migrate/20121102055137_create_task_manager_plans.rb
+++ b/db/migrate/20121102055137_create_task_manager_plans.rb
@@ -3,10 +3,13 @@ class CreateTaskManagerPlans < ActiveRecord::Migration
create_table :task_manager_plans do |t|
t.string :name
t.hstore :data
- t.timestamp :last_task_created_at
t.boolean :autocompletable
t.string :plan_type
t.integer :ahead_of_time
+ t.integer :begin_to_remind
+
+ t.timestamp :enabled_at
+ t.timestamp :last_task_created_at
t.timestamps
end
diff --git a/spec/dummy/db/migrate/20121102055631_create_task_manager_plans.task_manager.rb b/spec/dummy/db/migrate/20121102055631_create_task_manager_plans.task_manager.rb
index <HASH>..<HASH> 100644
--- a/spec/dummy/db/migrate/20121102055631_create_task_manager_plans.task_manager.rb
+++ b/spec/dummy/db/migrate/20121102055631_create_task_manager_plans.task_manager.rb
@@ -4,10 +4,13 @@ class CreateTaskManagerPlans < ActiveRecord::Migration
create_table :task_manager_plans do |t|
t.string :name
t.hstore :data
- t.timestamp :last_task_created_at
t.boolean :autocompletable
t.string :plan_type
t.integer :ahead_of_time
+ t.integer :begin_to_remind
+
+ t.timestamp :enabled_at
+ t.timestamp :last_task_created_at
t.timestamps
end
diff --git a/spec/dummy/db/schema.rb b/spec/dummy/db/schema.rb
index <HASH>..<HASH> 100644
--- a/spec/dummy/db/schema.rb
+++ b/spec/dummy/db/schema.rb
@@ -11,7 +11,7 @@
#
# It's strongly recommended to check this file into your version control system.
-ActiveRecord::Schema.define(:version => 20121102183150) do
+ActiveRecord::Schema.define(:version => 20121102151448) do
create_table "task_manager_assignables", :force => true do |t|
t.integer "plan_id"
@@ -35,10 +35,12 @@ ActiveRecord::Schema.define(:version => 20121102183150) do
create_table "task_manager_plans", :force => true do |t|
t.string "name"
t.hstore "data"
- t.datetime "last_task_created_at"
t.boolean "autocompletable"
t.string "plan_type"
t.integer "ahead_of_time"
+ t.integer "begin_to_remind"
+ t.datetime "enabled_at"
+ t.datetime "last_task_created_at"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
diff --git a/spec/factories/plans_factory.rb b/spec/factories/plans_factory.rb
index <HASH>..<HASH> 100644
--- a/spec/factories/plans_factory.rb
+++ b/spec/factories/plans_factory.rb
@@ -5,5 +5,7 @@ FactoryGirl.define do
data {{ x: ['A', 'B', 'C'], y: [1, 2, 3] }}
last_task_created_at 1.day.ago
autocompletable false
+ enabled_at { Time.now }
+ begin_to_remind -24
end
end
diff --git a/spec/models/plan_spec.rb b/spec/models/plan_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/models/plan_spec.rb
+++ b/spec/models/plan_spec.rb
@@ -16,5 +16,9 @@ describe TaskManager::Plan do
it { should ensure_inclusion_of(:plan_type).in_array(TaskManager::Plan.plan_type.values) }
it { should validate_numericality_of :ahead_of_time }
it { should_not allow_value(-1).for(:ahead_of_time) }
+ it { should validate_presence_of :enabled_at }
+ it { should validate_presence_of :begin_to_remind }
+ it { should validate_numericality_of :begin_to_remind }
+ it { should_not allow_value(1).for(:begin_to_remind) }
end
end
|
Added `begin_to_remind` and `enabled_at` to Plan
|
menglifang_task-manager
|
train
|
f39a335cbe78595427fa6b50a353543719d8faf7
|
diff --git a/py3status/modules/sysdata.py b/py3status/modules/sysdata.py
index <HASH>..<HASH> 100644
--- a/py3status/modules/sysdata.py
+++ b/py3status/modules/sysdata.py
@@ -5,8 +5,8 @@ Display system RAM, SWAP and CPU utilization.
Configuration parameters:
cache_timeout: how often we refresh this module in seconds (default 10)
format: output format string
- *(default '[\?color=cpu CPU: {cpu_usage:.2f}%], '
- '[\?color=mem Mem: {mem_used:.2f}/{mem_total:.2f} GB ({mem_used_percent:.2f}%)]')*
+ *(default '[\?color=cpu CPU: {cpu_usage}%], '
+ '[\?color=mem Mem: {mem_used}/{mem_total} GB ({mem_used_percent}%)]')*
mem_unit: the unit of memory to use in report, case insensitive.
['dynamic', 'KiB', 'MiB', 'GiB'] (default 'GiB')
swap_unit: the unit of swap to use in report, case insensitive.
@@ -216,12 +216,6 @@ class Py3status:
def deprecate_function(config):
# support old thresholds
- padding = config.get('padding', 0)
- precision = config.get('precision', 2)
- format_vals = '[\?min_length={padding} {{value:.{precision}f}}]'.format(
- padding=padding, precision=precision)
- format_temp = '[\?min_length={padding} {{value:.{precision}f}}{{unit}}]'.format(
- padding=padding, precision=precision)
return {
'thresholds': [
(0, 'good'),
|
Attempt to use `update_placeholder_format`
|
ultrabug_py3status
|
train
|
2e0846743086537323a74d7acb08476cb27bdc77
|
diff --git a/packages/react-admin/src/mui/input/AutocompleteInput.js b/packages/react-admin/src/mui/input/AutocompleteInput.js
index <HASH>..<HASH> 100644
--- a/packages/react-admin/src/mui/input/AutocompleteInput.js
+++ b/packages/react-admin/src/mui/input/AutocompleteInput.js
@@ -92,6 +92,7 @@ const styles = theme => ({
export class AutocompleteInput extends React.Component {
state = {
dirty: false,
+ inputValue: null,
searchText: '',
selectedItem: null,
suggestions: [],
@@ -101,6 +102,7 @@ export class AutocompleteInput extends React.Component {
const selectedItem = this.getSelectedItem();
this.setState({
selectedItem,
+ inputValue: this.props.input.value,
searchText: this.getSuggestionText(selectedItem),
suggestions: selectedItem ? [selectedItem] : this.props.choices,
});
@@ -108,10 +110,11 @@ export class AutocompleteInput extends React.Component {
componentWillReceiveProps(nextProps) {
const { choices, input } = nextProps;
- if (input.value !== this.props.input.value) {
+ if (input.value !== this.state.inputValue) {
const selectedItem = this.getSelectedItem(nextProps);
this.setState({
selectedItem,
+ inputValue: input.value,
searchText: this.getSuggestionText(selectedItem),
dirty: false,
suggestions: selectedItem ? [selectedItem] : this.props.choices,
@@ -132,12 +135,14 @@ export class AutocompleteInput extends React.Component {
}
}
- getSelectedItem = ({ selectedItem, choices, input } = this.props) =>
+ getSelectedItem = ({ selectedItem, choices } = this.props) =>
selectedItem
? selectedItem
- : choices && input.value
+ : choices && this.state.inputValue
? choices.find(
- choice => this.getSuggestionValue(choice) === input.value
+ choice =>
+ this.getSuggestionValue(choice) ===
+ this.state.inputValue
)
: null;
@@ -159,16 +164,17 @@ export class AutocompleteInput extends React.Component {
handleSuggestionSelected = (event, { suggestion, method }) => {
const { input } = this.props;
- input &&
- input.onChange &&
- input.onChange(this.getSuggestionValue(suggestion));
-
+ const inputValue = this.getSuggestionValue(suggestion);
this.setState({
dirty: false,
+ inputValue,
selectedItem: suggestion,
searchText: this.getSuggestionText(suggestion),
suggestions: [suggestion],
});
+
+ input && input.onChange && input.onChange(inputValue);
+
if (method === 'enter') {
event.preventDefault();
}
@@ -197,7 +203,7 @@ export class AutocompleteInput extends React.Component {
);
if (match) {
const nextId = this.getSuggestionValue(match);
- if (input.value !== nextId) {
+ if (this.state.inputValue !== nextId) {
input.onChange(this.getSuggestionValue(match));
this.setState({
suggestions: [match],
@@ -345,14 +351,14 @@ export class AutocompleteInput extends React.Component {
if (searchText === '' && allowEmpty) {
input && input.onBlur && input.onBlur(null);
} else {
- input && input.onBlur && input.onBlur(input.value);
+ input && input.onBlur && input.onBlur(this.state.inputValue);
this.setState({
dirty: false,
searchText: this.getSuggestionText(selectedItem),
});
}
} else {
- input && input.onBlur && input.onBlur(input.value);
+ input && input.onBlur && input.onBlur(this.state.inputValue);
}
};
|
Make inputValue a state variable so no rerender will happen after the value has changed.
|
marmelab_react-admin
|
train
|
b3676994b6c9ce715c7ea2a6e2124020af7af805
|
diff --git a/squad/api/rest.py b/squad/api/rest.py
index <HASH>..<HASH> 100644
--- a/squad/api/rest.py
+++ b/squad/api/rest.py
@@ -75,7 +75,12 @@ class ProjectFilter(filters.FilterSet):
def filter_full_name(self, queryset, field_name, value):
if value:
- queryset = queryset.annotate(fullname=Concat(F('group__slug'), V('/'), F('slug'),
+ group_slug = 'group__slug'
+ project_slug = 'slug'
+ if queryset.model is not Project:
+ group_slug = 'project__%s' % group_slug
+ project_slug = 'project__%s' % project_slug
+ queryset = queryset.annotate(fullname=Concat(F(group_slug), V('/'), F(project_slug),
output_field=CharField())).filter(fullname__startswith=value)
return queryset
diff --git a/test/api/test_rest.py b/test/api/test_rest.py
index <HASH>..<HASH> 100644
--- a/test/api/test_rest.py
+++ b/test/api/test_rest.py
@@ -504,6 +504,11 @@ class RestApiTest(APITestCase):
data = self.hit('/api/builds/?created_at=%s' % created_at)
self.assertEqual(1, len(data['results']))
+ def test_build_filter_by_project(self):
+ project_full_name = self.build3.project.full_name
+ data = self.hit('/api/builds/?project__full_name=%s' % project_full_name)
+ self.assertEqual(6, len(data['results']))
+
def test_testjob(self):
data = self.hit('/api/testjobs/%d/' % self.testjob.id)
self.assertEqual('myenv', data['environment'])
|
api: rest: fix project filter
An update in django filters made ProjectFilter
switch queryset's model from Project to nested, crashing
requests filtering builds by project's full_name.
This patch makes sure to use correct field name
based on queryset model.
|
Linaro_squad
|
train
|
46bee075e88908275ad382fd8bef2c7e857d3a0e
|
diff --git a/src/sync.js b/src/sync.js
index <HASH>..<HASH> 100644
--- a/src/sync.js
+++ b/src/sync.js
@@ -330,73 +330,78 @@
return promise;
},
- autoMerge: function(obj) {
- var newValue, oldValue, i;
-
- if (!obj.remote) {
- return obj;
- }
- if (!obj.local) {
- if (obj.remote) {
- if (obj.path.substr(-1) === '/') {
- newValue = (typeof(obj.remote.itemsMap) === 'object' && Object.keys(obj.remote.itemsMap).length ? obj.remote.itemsMap : undefined);
- oldValue = (typeof(obj.common.itemsMap) === 'object' && Object.keys(obj.common.itemsMap).length ? obj.common.itemsMap : undefined);
- haveRemote = (obj.remote.itemsMap !== undefined);
+ autoMerge: function(node) {
+ var newValue, oldValue;
+
+ if (!node.remote) {
+ return node;
+ }
+
+ if (!node.local) {
+ if (node.remote) {
+ if (node.path.substr(-1) === '/') {
+ newValue = (typeof(node.remote.itemsMap) === 'object' && Object.keys(node.remote.itemsMap).length ? node.remote.itemsMap : undefined);
+ oldValue = (typeof(node.common.itemsMap) === 'object' && Object.keys(node.common.itemsMap).length ? node.common.itemsMap : undefined);
+ haveRemote = (node.remote.itemsMap !== undefined);
} else {
- newValue = (obj.remote.body === false ? undefined : obj.remote.body);
- oldValue = (obj.common.body === false ? undefined : obj.common.body);
- haveRemote = (obj.remote.body !== undefined);
+ newValue = (node.remote.body === false ? undefined : node.remote.body);
+ oldValue = (node.common.body === false ? undefined : node.common.body);
+ haveRemote = (node.remote.body !== undefined);
}
if (haveRemote) {
this.local._emit('change', {
- origin: 'remote',
- path: obj.path,
+ origin: 'remote',
+ path: node.path,
oldValue: oldValue,
newValue: newValue
});
- obj.common = obj.remote;
- delete obj.remote;
+ node.common = node.remote;
+ delete node.remote;
}
}
- return obj;
+ return node;
}
- if (obj.path.substr(-1) === '/') {
+
+ if (node.path.substr(-1) === '/') {
//auto merge folder once remote was fetched:
- if (obj.remote.itemsMap) {
- obj.common = obj.remote;
- delete obj.remote;
- if (obj.common.itemsMap) {
- for (i in obj.common.itemsMap) {
- if (!obj.local.itemsMap[i]) {
- //indicates the node is either newly being fetched
- //has been deleted locally (whether or not leading to conflict);
- //before listing it in local listings, check if a local deletion
- //exists.
- obj.local.itemsMap[i] = false;
+ if (node.remote.itemsMap) {
+ node.common = node.remote;
+ delete node.remote;
+
+ if (node.common.itemsMap) {
+ for (var i in node.common.itemsMap) {
+ if (!node.local.itemsMap[i]) {
+ // Indicates the node is either newly being fetched
+ // has been deleted locally (whether or not leading to conflict);
+ // before listing it in local listings, check if a local deletion
+ // exists.
+ node.local.itemsMap[i] = false;
}
}
}
}
- return obj;
+ return node;
} else {
- if (obj.remote.body !== undefined) {
+ if (node.remote.body !== undefined) {
//keep/revert:
- RemoteStorage.log('emitting keep/revert');
+ RemoteStorage.log('Emitting keep/revert');
+
this.local._emit('change', {
- origin: 'conflict',
- path: obj.path,
- oldValue: obj.local.body,
- newValue: obj.remote.body,
- oldContentType: obj.local.contentType,
- newContentType: obj.remote.contentType
+ origin: 'conflict',
+ path: node.path,
+ oldValue: node.local.body,
+ newValue: node.remote.body,
+ oldContentType: node.local.contentType,
+ newContentType: node.remote.contentType
});
- obj.common = obj.remote;
- delete obj.remote;
- delete obj.local;
+
+ node.common = node.remote;
+ delete node.remote;
+ delete node.local;
}
- delete obj.push;
- return obj;
+ delete node.push;
+ return node;
}
},
|
Use more expressive naming for autoMerge method
|
remotestorage_remotestorage.js
|
train
|
3fd72f979b7c1f5cc8e79961272f74b064c6ab58
|
diff --git a/python/src/nnabla/core/variable_batch_size.py b/python/src/nnabla/core/variable_batch_size.py
index <HASH>..<HASH> 100644
--- a/python/src/nnabla/core/variable_batch_size.py
+++ b/python/src/nnabla/core/variable_batch_size.py
@@ -82,7 +82,8 @@ def variable_batch_size(network):
pf.args['shape'] = [-1] + arg_shape[1:]
for var in network.variables.values():
- if var.name not in special_variable and var.shape[0] == expect_batch_size:
- var.shape = (-1,) + var.shape[1:]
+ if var.shape:
+ if var.name not in special_variable and var.shape[0] == expect_batch_size:
+ var.shape = (-1,) + var.shape[1:]
network.batch_size = expect_batch_size
diff --git a/python/test/core/test_variable_batch_size.py b/python/test/core/test_variable_batch_size.py
index <HASH>..<HASH> 100644
--- a/python/test/core/test_variable_batch_size.py
+++ b/python/test/core/test_variable_batch_size.py
@@ -20,7 +20,7 @@ import nnabla.parametric_functions as PF
from nnabla.utils import load
from nnabla.utils import save
from nnabla.utils import nnp_graph
-from helper import forward_variable
+from helper import forward_variable, create_temp_with_dir
def base_axis_0_reshape_with_neg_1(x):
@@ -174,3 +174,22 @@ def test_variable_batch_size(tmpdir, batch_size, variable_batch_size, model_def,
if expect_batch_size != -1:
batch_size = expect_batch_size
assert (batch_size == out.shape[0])
+
+
+def test_scalar_save_variable_batch_size():
+ x = nn.Variable((128, 1, 28, 28))
+ h = PF.convolution(x, 32, kernel=(3, 3))
+ loss = F.mean(h)
+ with create_temp_with_dir("tmp.nnp") as temp_nnp_file_name:
+ runtime_contents = {
+ 'networks': [
+ {'name': 'runtime',
+ 'batch_size': 1,
+ 'outputs': {'loss': loss},
+ 'names': {'x': x}}],
+ 'executors': [
+ {'name': 'runtime',
+ 'network': 'runtime',
+ 'data': ['x'],
+ 'output': ['loss']}]}
+ nn.utils.save.save(temp_nnp_file_name, runtime_contents)
|
fix error when save network with F.mean()
|
sony_nnabla
|
train
|
80004497d92318e066fb0c4392c9d2ba6b4755b2
|
diff --git a/src/internal/ppsutil/util.go b/src/internal/ppsutil/util.go
index <HASH>..<HASH> 100644
--- a/src/internal/ppsutil/util.go
+++ b/src/internal/ppsutil/util.go
@@ -310,7 +310,7 @@ func UpdateJobState(pipelines col.PostgresReadWriteCollection, jobs col.ReadWrit
// Update job info
var err error
- if state == pps.JobState_JOB_RUNNING {
+ if jobInfo.State == pps.JobState_JOB_STARTING && state == pps.JobState_JOB_RUNNING {
jobInfo.Started, err = types.TimestampProto(time.Now())
if err != nil {
return err
|
Fix job start time being reset during update (#<I>)
|
pachyderm_pachyderm
|
train
|
ec620747c71501e65f6f4073d1ade00d557427fa
|
diff --git a/html/pfappserver/root/static.alt/src/views/Configuration/_api/index.js b/html/pfappserver/root/static.alt/src/views/Configuration/_api/index.js
index <HASH>..<HASH> 100644
--- a/html/pfappserver/root/static.alt/src/views/Configuration/_api/index.js
+++ b/html/pfappserver/root/static.alt/src/views/Configuration/_api/index.js
@@ -1076,6 +1076,11 @@ export default {
return response.data
})
},
+ createLetsEncryptCertificate: data => {
+ return apiCall.put(`config/certificate/${data.id}/lets_encrypt`, data).then(response => {
+ return response.data
+ })
+ },
generateCertificateSigningRequest: data => {
return apiCall.post(`config/certificate/${data.id}/generate_csr`, data).then(response => {
return response.data
diff --git a/html/pfappserver/root/static.alt/src/views/Configuration/_components/CertificatesView.vue b/html/pfappserver/root/static.alt/src/views/Configuration/_components/CertificatesView.vue
index <HASH>..<HASH> 100644
--- a/html/pfappserver/root/static.alt/src/views/Configuration/_components/CertificatesView.vue
+++ b/html/pfappserver/root/static.alt/src/views/Configuration/_components/CertificatesView.vue
@@ -65,11 +65,11 @@
:column-label="$t('Validate certificate chain')"
></pf-form-range-toggle>
<pf-form-range-toggle
- v-model="find_intermediate_cas"
+ v-model="findIntermediateCAs"
:column-label="$t('Find intermediate CA certificates automatically')"
></pf-form-range-toggle>
<pf-form-fields
- v-if="!find_intermediate_cas"
+ v-if="!findIntermediateCAs"
v-model="certs[id].intermediate_cas"
:column-label="$t('Intermediate CA certificate(s)')"
:button-label="$t('Add certificate')"
@@ -215,7 +215,7 @@ export default {
state: ''
},
csr: '',
- find_intermediate_cas: false,
+ findIntermediateCAs: false,
caCertificateField: {
component: pfField,
attrs: {
@@ -311,18 +311,16 @@ export default {
})
},
save (id) {
- if (this.find_intermediate_cas) {
- delete this.certs[id].intermediate_cas
- }
+ let creationPromise
if (this.isEnabled(this.certs[id].lets_encrypt)) {
- this.certs[id] = {
- id,
- lets_encrypt: this.certs[id].lets_encrypt,
- common_name: this.certs[id].common_name,
- check_chain: 'disabled'
+ creationPromise = this.$store.dispatch(`${this.storeName}/createLetsEncryptCertificate`, this.certs[id])
+ } else {
+ if (this.findIntermediateCAs) {
+ delete this.certs[id].intermediate_cas
}
+ creationPromise = this.$store.dispatch(`${this.storeName}/createCertificate`, this.certs[id])
}
- this.$store.dispatch(`${this.storeName}/createCertificate`, this.certs[id]).then(() => {
+ creationPromise.then(() => {
this.$store.dispatch('notification/info', { message: this.$i18n.t('{certificate} certificate saved', { certificate: id.toUpperCase() }) })
}).finally(() => window.scrollTo(0, 0))
},
diff --git a/html/pfappserver/root/static.alt/src/views/Configuration/_store/certificates.js b/html/pfappserver/root/static.alt/src/views/Configuration/_store/certificates.js
index <HASH>..<HASH> 100644
--- a/html/pfappserver/root/static.alt/src/views/Configuration/_store/certificates.js
+++ b/html/pfappserver/root/static.alt/src/views/Configuration/_store/certificates.js
@@ -62,6 +62,21 @@ const actions = {
throw err
})
},
+ createLetsEncryptCertificate: ({ commit }, data) => {
+ const request = {
+ id: data.id,
+ lets_encrypt: data.lets_encrypt,
+ common_name: data.common_name
+ }
+ commit('ITEM_REQUEST')
+ return api.createLetsEncryptCertificate(request).then(response => {
+ commit('ITEM_REPLACED', data)
+ return response
+ }).catch(err => {
+ commit('ITEM_ERROR', err.response)
+ throw err
+ })
+ },
generateCertificateSigningRequest: ({ commit }, data) => {
commit('ITEM_REQUEST')
return api.generateCertificateSigningRequest(data).then(response => {
|
(web admin) Fix Let's Encrypt support
|
inverse-inc_packetfence
|
train
|
10a521d27029f00981051920c1ca8fc91d061876
|
diff --git a/Operations/Create.php b/Operations/Create.php
index <HASH>..<HASH> 100644
--- a/Operations/Create.php
+++ b/Operations/Create.php
@@ -22,26 +22,28 @@ use \Phramework\Exceptions\RequestExceptionException;
use \Phramework\Exceptions\NotFoundException;
/**
- * create model
+ * Create operation for databases
* @license https://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @author Xenofon Spafaridis <nohponex@gmail.com>
* @since 0
-
-
*/
class Create
{
- //const RETURN_RECORDS = 1;
- const RETURN_ID = 0;
- const RETURN_NUMBER_OF_RECORDS = 2;
+
+ const RETURN_ID = 1;
+ const RETURN_RECORDS = 2;
+ const RETURN_NUMBER_OF_RECORDS = 4;
/**
* Create a new record in database
- * @param array $attributes Key-value array with records's attributes
+ * @param array|object $attributes Key-value array or object with records's attributes
* @param string $table Table's name
- * @param string|null $schema [Optional] Table's schema, default is null for no schema
- * @param [type] $return Return method type
- * @return int|array
+ * @param string|null $schema [Optional] Table's schema, default is null for no schema
+ * @param integer $return Return method type
+ * - if RETURN_ID will return the id of last inserted record
+ * - if RETURN_RECORDS will return the inserted record
+ * - if RETURN_NUMBER_OF_RECORDS will return the number of records affected
+ * @return integer|array
* @todo Check RETURNING id for another primary key attribute
*/
public static function create(
@@ -50,6 +52,10 @@ class Create
$schema = null,
$return = self::RETURN_ID
) {
+ if (is_object($attributes)) {
+ $attributes = (array)$attributes;
+ }
+
//prepare query
$query_keys = implode('" , "', array_keys($attributes));
$query_parameter_string = trim(str_repeat('?,', count($attributes)), ',');
@@ -81,13 +87,18 @@ class Create
}
return Database::executeLastInsertId($query, $query_values);
- } else {
+ } elseif ($return == self::RETURN_RECORDS) {
//Return number of records affected
- if ($driver == 'postgresql') {
- $query .= 'RETURNING ' . '*';
+ if ($driver != 'postgresql') {
+ throw new \Phramework\Excetpions\ServerExcetion(
+ 'RETURN_RECORDS works only with postgresql adapter'
+ );
}
+ $query .= 'RETURNING *';
return Database::executeAndFetch($query, $query_values);
+ } else {
+ return Database::execute($query, $query_values);
}
}
}
diff --git a/Operations/Delete.php b/Operations/Delete.php
index <HASH>..<HASH> 100644
--- a/Operations/Delete.php
+++ b/Operations/Delete.php
@@ -19,7 +19,7 @@ namespace Phramework\Database\Operations;
use \Phramework\Database\Database;
/**
- * Delete model
+ * Delete operation for databases
* @license https://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @author Xenofon Spafaridis <nohponex@gmail.com>
* @since 1
diff --git a/Operations/Update.php b/Operations/Update.php
index <HASH>..<HASH> 100644
--- a/Operations/Update.php
+++ b/Operations/Update.php
@@ -23,7 +23,7 @@ use \Phramework\Exceptions\NotFoundException;
// @codingStandardsIgnoreStart
/**
- * update model
+ * Update operation for databases
* @license https://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @author Xenofon Spafaridis <nohponex@gmail.com>
* @since 0
|
Improve Create operation
Add RETURN_RECORDS
|
phramework_database
|
train
|
476903eaaebac0351a37c65f62013f0b29a230dc
|
diff --git a/lib/mini_fb.rb b/lib/mini_fb.rb
index <HASH>..<HASH> 100644
--- a/lib/mini_fb.rb
+++ b/lib/mini_fb.rb
@@ -540,13 +540,13 @@ module MiniFB
resp = RestClient.get url
end
- puts 'resp=' + resp.body.to_s if @@logging
+ puts 'resp=' + resp.to_s if @@logging
begin
- res_hash = JSON.parse(resp.body)
+ res_hash = JSON.parse(resp.to_s)
rescue
# quick fix for things like stream.publish that don't return json
- res_hash = JSON.parse("{\"response\": #{resp.body.to_s}}")
+ res_hash = JSON.parse("{\"response\": #{resp.to_s}}")
end
if res_hash.is_a? Array # fql return this
|
Changed resp.body to resp.to_s, apparently it was a mistake in a previous version of rest_client: <URL>
|
appoxy_mini_fb
|
train
|
b4bca5496df592686ed039b3c595c136f9408c07
|
diff --git a/hystrix-core/src/main/java/com/netflix/hystrix/strategy/concurrency/HystrixRequestContext.java b/hystrix-core/src/main/java/com/netflix/hystrix/strategy/concurrency/HystrixRequestContext.java
index <HASH>..<HASH> 100644
--- a/hystrix-core/src/main/java/com/netflix/hystrix/strategy/concurrency/HystrixRequestContext.java
+++ b/hystrix-core/src/main/java/com/netflix/hystrix/strategy/concurrency/HystrixRequestContext.java
@@ -56,7 +56,7 @@ import com.netflix.hystrix.HystrixRequestLog;
*
* </blockquote>
* <p>
- * You can find an implementation at <a href="https://github.com/Netflix/Hystrix/tree/master/hystrix-contrib/hystrix-request-servlet">hystrix-contrib/hystrix-request-servlet</a> on GitHub.
+ * You can find an implementation at <a target="_top" href="https://github.com/Netflix/Hystrix/tree/master/hystrix-contrib/hystrix-request-servlet">hystrix-contrib/hystrix-request-servlet</a> on GitHub.
* <p>
* <b>NOTE:</b> If <code>initializeContext()</code> is called then <code>shutdown()</code> must also be called or a memory leak will occur.
*/
|
href target so it opens from javadoc frame
|
Netflix_Hystrix
|
train
|
25579f794844ad22613c27f425d77d63b45d3df1
|
diff --git a/niworkflows/viz/utils.py b/niworkflows/viz/utils.py
index <HASH>..<HASH> 100644
--- a/niworkflows/viz/utils.py
+++ b/niworkflows/viz/utils.py
@@ -291,6 +291,8 @@ def compose_view(bg_svgs, fg_svgs, ref=0, out_file='report.svg'):
sizes.append((width, height))
nsvgs = len(bg_svgs)
+ sizes = np.array(sizes)
+
# Calculate the scale to fit all widths
width = sizes[ref, 0]
scales = width / sizes[:, 0]
|
convert sizes list to numpy array in compose view, new scale calculations depend on it.
|
poldracklab_niworkflows
|
train
|
b166cab9a252f4093af1f33cb178a86f6047d08a
|
diff --git a/Parsedown.php b/Parsedown.php
index <HASH>..<HASH> 100644
--- a/Parsedown.php
+++ b/Parsedown.php
@@ -115,7 +115,7 @@ class Parsedown
# Blocks
#
- private function lines(array $lines)
+ protected function lines(array $lines)
{
$CurrentBlock = null;
|
Make `lines` protected to allow for extendability
|
erusev_parsedown
|
train
|
f22696bff8715d388b4b51a6e349db98feb4eaca
|
diff --git a/src/components/VSelect/VSelect.js b/src/components/VSelect/VSelect.js
index <HASH>..<HASH> 100644
--- a/src/components/VSelect/VSelect.js
+++ b/src/components/VSelect/VSelect.js
@@ -59,6 +59,7 @@ export default {
return {
cachedItems: [],
content: {},
+ defaultColor: 'primary',
inputValue: (this.multiple || this.tags) && !this.value ? [] : this.value,
isBooted: false,
lastItem: 20,
@@ -80,22 +81,19 @@ export default {
appendIconCb: Function,
auto: Boolean,
autocomplete: Boolean,
+ browserAutocomplete: {
+ type: String,
+ default: 'on'
+ },
cacheItems: Boolean,
chips: Boolean,
clearable: Boolean,
- color: {
- type: String,
- default: 'primary'
- },
combobox: Boolean,
debounceSearch: {
type: [Number, String],
default: 200
},
- browserAutocomplete: {
- type: String,
- default: 'on'
- },
+ editable: Boolean,
items: {
type: Array,
default: () => []
@@ -104,6 +102,10 @@ export default {
type: String,
default: 'avatar'
},
+ itemDisabled: {
+ type: String,
+ default: 'disabled'
+ },
itemText: {
type: String,
default: 'text'
@@ -112,10 +114,6 @@ export default {
type: String,
default: 'value'
},
- itemDisabled: {
- type: String,
- default: 'disabled'
- },
maxHeight: {
type: [Number, String],
default: 300
@@ -126,16 +124,15 @@ export default {
},
multiple: Boolean,
multiLine: Boolean,
- solo: Boolean,
+ overflow: Boolean,
+ returnObject: Boolean,
searchInput: {
default: null
},
- singleLine: Boolean,
- tags: Boolean,
- returnObject: Boolean,
- overflow: Boolean,
segmented: Boolean,
- editable: Boolean
+ singleLine: Boolean,
+ solo: Boolean,
+ tags: Boolean
},
computed: {
diff --git a/src/components/VSelect/mixins/select-generators.js b/src/components/VSelect/mixins/select-generators.js
index <HASH>..<HASH> 100644
--- a/src/components/VSelect/mixins/select-generators.js
+++ b/src/components/VSelect/mixins/select-generators.js
@@ -258,9 +258,7 @@ export default {
data.props.disabled = disabled
}
- if (this.color && this.addTextColorClassChecks) {
- data.props.activeClass = Object.keys(this.addTextColorClassChecks()).join(' ')
- }
+ data.props.activeClass = Object.keys(this.addTextColorClassChecks()).join(' ')
if (this.$scopedSlots.item) {
return this.$createElement('v-list-tile', data,
@@ -288,7 +286,7 @@ export default {
return this.$createElement('v-list-tile-action', data, [
this.$createElement('v-checkbox', {
props: {
- color: this.color,
+ color: this.computedColor,
inputValue: active
}
})
|
refactor: order of props
|
vuetifyjs_vuetify
|
train
|
912c5c0dc18abf8491e9d70b6057a60176d31143
|
diff --git a/remix-simulator/src/methods/blocks.js b/remix-simulator/src/methods/blocks.js
index <HASH>..<HASH> 100644
--- a/remix-simulator/src/methods/blocks.js
+++ b/remix-simulator/src/methods/blocks.js
@@ -1,6 +1,7 @@
var Web3 = require("web3")
-var Blocks = function (options) {
+var Blocks = function (_options) {
+ const options = _options || {}
this.coinbase = options.coinbase || "0x0000000000000000000000000000000000000000"
this.blockNumber = 0
}
@@ -9,8 +10,8 @@ Blocks.prototype.methods = function () {
return {
eth_getBlockByNumber: this.eth_getBlockByNumber.bind(this),
eth_gasPrice: this.eth_gasPrice.bind(this),
- eth_coinbase: this.coinbase.bind(this),
- eth_blockNumber: this.blockNumber.bind(this)
+ eth_coinbase: this.eth_coinbase.bind(this),
+ eth_blockNumber: this.eth_blockNumber.bind(this)
}
}
|
fix calls to coinbase and blockNumber method
|
ethereum_remix
|
train
|
675f967d4a56874792f19bab4601d7d14da6983d
|
diff --git a/util/btcctl/btcctl.go b/util/btcctl/btcctl.go
index <HASH>..<HASH> 100644
--- a/util/btcctl/btcctl.go
+++ b/util/btcctl/btcctl.go
@@ -7,6 +7,7 @@ import (
"fmt"
"github.com/conformal/btcjson"
"github.com/conformal/btcutil"
+ "github.com/conformal/btcws"
"github.com/conformal/go-flags"
"github.com/davecgh/go-spew/spew"
"io/ioutil"
@@ -46,6 +47,7 @@ var (
// to validate correctness and perform the command.
var commandHandlers = map[string]*handlerData{
"addnode": {2, 0, displayJSONDump, nil, makeAddNode, "<ip> <add/remove/onetry>"},
+ "createencryptedwallet": {1, 0, displayGeneric, nil, makeCreateEncryptedWallet, "<passphrase>"},
"createrawtransaction": {2, 0, displayGeneric, nil, makeCreateRawTransaction, "\"[{\"txid\":\"id\",\"vout\":n},...]\" \"{\"address\":amount,...}\""},
"debuglevel": {1, 0, displayGeneric, nil, makeDebugLevel, "<levelspec>"},
"decoderawtransaction": {1, 0, displayJSONDump, nil, makeDecodeRawTransaction, "<txhash>"},
@@ -202,6 +204,12 @@ func makeAddNode(args []interface{}) (btcjson.Cmd, error) {
args[1].(string))
}
+// makeCreateEncryptedWallet generates the cmd structure for
+// createencryptedwallet commands.
+func makeCreateEncryptedWallet(args []interface{}) (btcjson.Cmd, error) {
+ return btcws.NewCreateEncryptedWalletCmd("btcctl", args[0].(string)), nil
+}
+
// makeCreateRawTransaction generates the cmd structure for createrawtransaction
// commands.
func makeCreateRawTransaction(args []interface{}) (btcjson.Cmd, error) {
|
Add createencryptedwallet support to btcctl.
This is a btcwallet extension and will not work when talking to btcd
or bitcoind.
|
btcsuite_btcd
|
train
|
37538b374cab7494098c8837fa84dca8a4f3e700
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -20,4 +20,6 @@
*/
module.exports = require('./exec.js');
module.exports.spawn = require('./spawn.js');
+module.exports.util = require('./util.js');
module.exports.progress = require('./util.js').progress;
+module.exports.tilda = require('./util.js').tilda;
|
Expose utilities and tilda function from index
|
timthesinner_exec-promised
|
train
|
617d2891c843f2a94db33bc0a049fa9c4e8c9538
|
diff --git a/lib/danger_plugin.rb b/lib/danger_plugin.rb
index <HASH>..<HASH> 100755
--- a/lib/danger_plugin.rb
+++ b/lib/danger_plugin.rb
@@ -47,12 +47,10 @@ module Danger
raise 'swiftlint is not installed' unless swiftlint.installed?
config = if config_file
- File.expand_path(config_file)
- elsif File.file?('.swiftlint.yml')
- File.expand_path('.swiftlint.yml')
- else
- nil
- end
+ File.expand_path(config_file)
+ elsif File.file?('.swiftlint.yml')
+ File.expand_path('.swiftlint.yml')
+ end
log "Using config file: #{config}"
dir_selected = directory ? File.expand_path(directory) : Dir.pwd
@@ -95,7 +93,7 @@ module Danger
# Fail Danger on errors
if fail_on_error && errors.count.positive?
- fail 'Failed due to SwiftLint errors'
+ raise 'Failed due to SwiftLint errors'
end
end
end
diff --git a/spec/danger_plugin_spec.rb b/spec/danger_plugin_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/danger_plugin_spec.rb
+++ b/spec/danger_plugin_spec.rb
@@ -160,11 +160,11 @@ module Danger
it 'default config is nil, unspecified' do
allow(@swiftlint.git).to receive(:added_files).and_return([])
allow(@swiftlint.git).to receive(:modified_files).and_return([
- 'spec/fixtures/SwiftFile.swift',
- ])
+ 'spec/fixtures/SwiftFile.swift'
+ ])
expect_any_instance_of(Swiftlint).to receive(:lint)
- .with(hash_including(:config => nil), '')
+ .with(hash_including(config: nil), '')
.once
.and_return(@swiftlint_response)
@@ -174,13 +174,13 @@ module Danger
it 'expands default config file (if present) to absolute path' do
allow(@swiftlint.git).to receive(:added_files).and_return([])
allow(@swiftlint.git).to receive(:modified_files).and_return([
- 'spec/fixtures/SwiftFile.swift',
- ])
+ 'spec/fixtures/SwiftFile.swift'
+ ])
expect(File).to receive(:file?).and_return(true)
expect(YAML).to receive(:load_file).and_return({})
expect_any_instance_of(Swiftlint).to receive(:lint)
- .with(hash_including(:config => File.expand_path('.swiftlint.yml')), '')
+ .with(hash_including(config: File.expand_path('.swiftlint.yml')), '')
.once
.and_return(@swiftlint_response)
@@ -190,11 +190,11 @@ module Danger
it 'expands specified config file to absolute path' do
allow(@swiftlint.git).to receive(:added_files).and_return([])
allow(@swiftlint.git).to receive(:modified_files).and_return([
- 'spec/fixtures/SwiftFile.swift',
- ])
+ 'spec/fixtures/SwiftFile.swift'
+ ])
expect_any_instance_of(Swiftlint).to receive(:lint)
- .with(hash_including(:config => File.expand_path('spec/fixtures/some_config.yml')), '')
+ .with(hash_including(config: File.expand_path('spec/fixtures/some_config.yml')), '')
.once
.and_return(@swiftlint_response)
|
New run of autocorrect after rebase
|
ashfurrow_danger-ruby-swiftlint
|
train
|
167666dc8ec69ae4f8522c7fe97ef34121333fff
|
diff --git a/elasticsearch-client/elasticsearch-client-v7/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/v7/WorkplaceSearchClientV7.java b/elasticsearch-client/elasticsearch-client-v7/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/v7/WorkplaceSearchClientV7.java
index <HASH>..<HASH> 100644
--- a/elasticsearch-client/elasticsearch-client-v7/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/v7/WorkplaceSearchClientV7.java
+++ b/elasticsearch-client/elasticsearch-client-v7/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/v7/WorkplaceSearchClientV7.java
@@ -27,6 +27,7 @@ import fr.pilato.elasticsearch.crawler.fs.client.ESSearchResponse;
import fr.pilato.elasticsearch.crawler.fs.client.ElasticsearchClient;
import fr.pilato.elasticsearch.crawler.fs.client.ElasticsearchClientUtil;
import fr.pilato.elasticsearch.crawler.fs.client.WorkplaceSearchClient;
+import fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil;
import fr.pilato.elasticsearch.crawler.fs.settings.FsSettings;
import fr.pilato.elasticsearch.crawler.fs.thirdparty.wpsearch.WPSearchClient;
import org.apache.logging.log4j.LogManager;
@@ -150,7 +151,8 @@ public class WorkplaceSearchClientV7 implements WorkplaceSearchClient {
document.put("body", doc.getContent());
// Index main metadata
- document.put("title", doc.getMeta().getTitle());
+ // We use the name of the file if no title has been found in the document metadata
+ document.put("title", FsCrawlerUtil.isNullOrEmpty(doc.getMeta().getTitle()) ? doc.getFile().getFilename() : doc.getMeta().getTitle());
document.put("author", doc.getMeta().getAuthor());
document.put("keywords", doc.getMeta().getKeywords());
document.put("language", doc.getMeta().getLanguage());
|
Use filename as title when no title is available
For Workplace Search, we use the name of the file if no title has been found in the document metadata.
|
dadoonet_fscrawler
|
train
|
b04adc4a5b6c8535aa0d8ce1abe198f001ca48ac
|
diff --git a/spec/oauth2/strategy/assertion_spec.rb b/spec/oauth2/strategy/assertion_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/oauth2/strategy/assertion_spec.rb
+++ b/spec/oauth2/strategy/assertion_spec.rb
@@ -1,3 +1,5 @@
+require 'jwt'
+
RSpec.describe OAuth2::Strategy::Assertion do
subject { client.assertion }
|
Include JWT for spec assertions
|
oauth-xx_oauth2
|
train
|
3e9b10f562009fcf2bf6e9b1318f00154f2fb849
|
diff --git a/rig/machine_control/scp_connection.py b/rig/machine_control/scp_connection.py
index <HASH>..<HASH> 100644
--- a/rig/machine_control/scp_connection.py
+++ b/rig/machine_control/scp_connection.py
@@ -1,11 +1,8 @@
"""A blocking implementation of the SCP protocol.
"""
-import logging
import socket
from . import consts, packets
-logger = logging.getLogger(__name__)
-
class SCPConnection(object):
"""Implements the SCP protocol for communicating with a SpiNNaker chip.
@@ -82,15 +79,6 @@ class SCPConnection(object):
The packet that was received in acknowledgement of the transmitted
packet.
"""
- logger.debug(
- "SCP transmit cmd={}, arg1={}, arg2={}, arg3={} [{}]".format(
- cmd,
- "NA" if arg1 is None else hex(arg1),
- "NA" if arg2 is None else hex(arg2),
- "NA" if arg3 is None else hex(arg3),
- len(data)
- )
- )
self.sock.settimeout(self.default_timeout + timeout)
# Construct the packet that will be sent
diff --git a/rig/tests/conftest.py b/rig/tests/conftest.py
index <HASH>..<HASH> 100644
--- a/rig/tests/conftest.py
+++ b/rig/tests/conftest.py
@@ -1,12 +1,9 @@
import pytest
import _pytest
-import logging
from collections import defaultdict
from toposort import toposort
-logging.basicConfig(level=logging.DEBUG)
-
@pytest.fixture(scope='session')
def spinnaker_ip(request):
|
@mossblaser suggestions.
Removes logging from tests.
|
project-rig_rig
|
train
|
c8034603eae3d532c464808eef980c25f2f37b92
|
diff --git a/sdk/python/sawtooth_sdk/client/stream.py b/sdk/python/sawtooth_sdk/client/stream.py
index <HASH>..<HASH> 100644
--- a/sdk/python/sawtooth_sdk/client/stream.py
+++ b/sdk/python/sawtooth_sdk/client/stream.py
@@ -73,13 +73,16 @@ class _SendReceiveThread(Thread):
message_list = validator_pb2.MessageList()
message_list.ParseFromString(msg_bytes)
for message in message_list.messages:
- try:
- self._futures.set_result(
- message.correlation_id,
- FutureResult(message_type=message.message_type,
- content=message.content))
- except FutureCollectionKeyError:
- # if we are getting an initial message, not a response
+ if message.correlation_id:
+ try:
+ self._futures.set_result(
+ message.correlation_id,
+ FutureResult(message_type=message.message_type,
+ content=message.content))
+ except FutureCollectionKeyError:
+ # if we are getting an initial message, not a response
+ self._recv_queue.put_nowait(message)
+ else:
self._recv_queue.put_nowait(message)
@asyncio.coroutine
@@ -151,8 +154,8 @@ class _SendReceiveThread(Thread):
self._sock.identity = "{}-{}".format(self.__class__.__name__,
os.getpid()).encode('ascii')
self._sock.connect('tcp://' + self._url)
- self._send_queue = asyncio.Queue()
- self._recv_queue = asyncio.Queue()
+ self._send_queue = asyncio.Queue(loop=self._event_loop)
+ self._recv_queue = asyncio.Queue(loop=self._event_loop)
with self._condition:
self._condition.notify_all()
asyncio.ensure_future(self._send_message(), loop=self._event_loop)
|
Ensure same event loop and avoid exceptions
|
hyperledger_sawtooth-core
|
train
|
8704bf3f3e3b30a4f00d9efe41ffd70801f78dec
|
diff --git a/inginious/frontend/pages/course_admin/task_list.py b/inginious/frontend/pages/course_admin/task_list.py
index <HASH>..<HASH> 100644
--- a/inginious/frontend/pages/course_admin/task_list.py
+++ b/inginious/frontend/pages/course_admin/task_list.py
@@ -9,7 +9,7 @@ from collections import OrderedDict
import bson
import web
-from inginious.common.toc import check_toc
+
from inginious.frontend.pages.course_admin.utils import INGIniousAdminPage
class CourseTaskListPage(INGIniousAdminPage):
@@ -27,15 +27,15 @@ class CourseTaskListPage(INGIniousAdminPage):
errors = []
user_input = web.input()
try:
- new_toc = json.loads(user_input["course_structure"])
- valid, message = check_toc(new_toc)
- if valid:
- self.course_factory.update_course_descriptor_element(courseid, 'toc', new_toc)
+ task_dispenser = course.get_task_dispenser()
+ data, msg = task_dispenser.check_dispenser_data(user_input["course_structure"])
+ if data:
+ self.course_factory.update_course_descriptor_element(course.get_id(), 'toc', data)
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False) # don't forget to reload the modified course
else:
- errors.append(_("Invalid table of content: ") + message)
- except:
- errors.append(_("Something wrong happened"))
+ errors.append(_("Invalid table of content: ") + msg)
+ except Exception as e:
+ errors.append(_("Something wrong happened: ") + str(e))
for taskid in json.loads(user_input["deleted_tasks"]):
try:
@@ -86,4 +86,4 @@ class CourseTaskListPage(INGIniousAdminPage):
for taskid in tasks:
tasks_data[taskid] = {"name": tasks[taskid].get_name(self.user_manager.session_language()),
"url": self.submission_url_generator(taskid)}
- return self.template_helper.get_renderer().course_admin.task_list(course, course.get_task_dispenser().get_dispenser_data(), tasks_data, errors, validated, self.webdav_host)
\ No newline at end of file
+ return self.template_helper.get_renderer().course_admin.task_list(course, tasks_data, errors, validated, self.webdav_host)
\ No newline at end of file
diff --git a/inginious/frontend/task_dispensers/toc.py b/inginious/frontend/task_dispensers/toc.py
index <HASH>..<HASH> 100644
--- a/inginious/frontend/task_dispensers/toc.py
+++ b/inginious/frontend/task_dispensers/toc.py
@@ -1,5 +1,7 @@
+import json
from collections import OrderedDict
+from inginious.common.toc import check_toc
from inginious.common.toc import SectionsList
from inginious.frontend.task_dispensers import TaskDispenser
@@ -25,15 +27,14 @@ class TableOfContents(TaskDispenser):
return template_helper.get_renderer(with_layout=False).task_dispensers.toc(
course, self._task_list, tasks_data, tag_list, self._toc)
- def update_data(self):
- pass
+ def check_dispenser_data(self, dispenser_data):
+ new_toc = json.loads(dispenser_data)
+ valid, errors = check_toc(new_toc)
+ return new_toc if valid else None, errors
def is_task_accessible(self, username):
pass
- def save_data(self):
- pass
-
def get_ordered_tasks(self):
return OrderedDict(sorted(list(self._task_list.items()), key=lambda t: (self.get_task_order(t[1].get_id()), t[1].get_id())))
diff --git a/inginious/frontend/templates/course_admin/task_list.html b/inginious/frontend/templates/course_admin/task_list.html
index <HASH>..<HASH> 100644
--- a/inginious/frontend/templates/course_admin/task_list.html
+++ b/inginious/frontend/templates/course_admin/task_list.html
@@ -1,4 +1,4 @@
-$def with (course, course_structure, tasks, errors, validated, webdav_host)
+$def with (course, tasks, errors, validated, webdav_host)
$#
$# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
|
Move task list structure check to task dispenser
|
UCL-INGI_INGInious
|
train
|
0abeeed4db10ba6789aa6ba83c673cf4b848eec5
|
diff --git a/.gitignore b/.gitignore
index <HASH>..<HASH> 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,4 @@ spec/reports
test/tmp
test/version_tmp
tmp
+*.swp
diff --git a/lib/rack/route.rb b/lib/rack/route.rb
index <HASH>..<HASH> 100644
--- a/lib/rack/route.rb
+++ b/lib/rack/route.rb
@@ -37,10 +37,13 @@ module Rack
pattern_match[1].to_sym
end
pattern.gsub(WILDCARD_PATTERN,'(?:/(.*)|)')
- elsif pattern_match = pattern.match(NAMED_SEGMENTS_PATTERN)
- pattern.gsub(NAMED_SEGMENTS_REPLACEMENT_PATTERN, '/(?<\1>[^$/]+)')
else
- pattern
+ p = if pattern_match = pattern.match(NAMED_SEGMENTS_PATTERN)
+ pattern.gsub(NAMED_SEGMENTS_REPLACEMENT_PATTERN, '/(?<\1>[^.$/]+)')
+ else
+ pattern
+ end
+ p + '(?:\.(?<format>.*))?'
end
Regexp.new("\\A#{src}\\Z")
end
@@ -50,13 +53,15 @@ module Rack
raise ArgumentError.new("path is required")
end
- if path_match = path.split(DOT).first.match(regexp)
+ if path_match = path.match(regexp)
params = if @wildcard_name
{ @wildcard_name => path_match[1].to_s.split('/') }
else
Hash[path_match.names.map(&:to_sym).zip(path_match.captures)]
end
+ params.delete(:format) if params.has_key?(:format) && params[:format].nil?
+
if meets_constraints(params)
params
end
diff --git a/lib/rack/version.rb b/lib/rack/version.rb
index <HASH>..<HASH> 100644
--- a/lib/rack/version.rb
+++ b/lib/rack/version.rb
@@ -1,5 +1,5 @@
module Rack
class Router
- VERSION = "0.2.1"
+ VERSION = "0.3.0"
end
end
diff --git a/rack-router.gemspec b/rack-router.gemspec
index <HASH>..<HASH> 100644
--- a/rack-router.gemspec
+++ b/rack-router.gemspec
@@ -3,7 +3,7 @@ Gem::Specification.new do |gem|
gem.authors = ["Paul Barry"]
gem.email = ["mail@paulbarry.com"]
gem.description = %q{A simple router for Rack apps}
- gem.summary = %q{A simple router for rack apps}
+ gem.summary = %q{A simple router for Rack apps}
gem.homepage = "https://github.com/pjb3/rack-router"
gem.files = `git ls-files`.split($\)
@@ -11,5 +11,5 @@ Gem::Specification.new do |gem|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "rack-router"
gem.require_paths = ["lib"]
- gem.version = "0.2.1"
+ gem.version = "0.3.0"
end
diff --git a/test/route_test.rb b/test/route_test.rb
index <HASH>..<HASH> 100644
--- a/test/route_test.rb
+++ b/test/route_test.rb
@@ -25,7 +25,8 @@ class RouteTest < Test::Unit::TestCase
match "/posts/:id" , "/posts/42" , { :id => "42" }
match "/posts/:id" , "/posts" , nil
match "/:x/:y" , "/a/b" , { :x => "a" , :y => "b" }
- match "/posts/:id" , "/posts/42.html", { :id => "42" }
+ match "/posts" , "/posts.json" , { :format => "json" }
+ match "/posts/:id" , "/posts/42.json", { :id => "42", :format => "json" }
end
def test_match_with_constraints
|
Added support for an optional format param based on the extension of the path
|
pjb3_rack-router
|
train
|
bfcbc5e741a4d7c8fc2d478262c8dcbe3d07143d
|
diff --git a/lib/devices/air-purifier.js b/lib/devices/air-purifier.js
index <HASH>..<HASH> 100644
--- a/lib/devices/air-purifier.js
+++ b/lib/devices/air-purifier.js
@@ -2,6 +2,13 @@
const Device = require('../device');
+/**
+ * Abstraction over a Mi Air Purifier.
+ *
+ * Air Purifiers have a mode that indicates if is on or not. Changing the mode
+ * to `idle` will power off the device, all other modes will power on the
+ * device.
+ */
class AirPurifier extends Device {
static get TYPE() { return 'air-purifier' }
@@ -10,19 +17,32 @@ class AirPurifier extends Device {
this.type = AirPurifier.TYPE;
+ // Properties related to if the device is on and in which mode
this.defineProperty('power', v => v === 'on');
this.defineProperty('mode');
- this.defineProperty('temp_dec', v => v / 10.0);
+
+ // Sensor values reported by the device
+ this.defineProperty('temp_dec', {
+ name: 'temperature',
+ mapper: v => v / 10.0
+ });
this.defineProperty('aqi');
this.defineProperty('humidity');
this.monitor();
}
+ /**
+ * Get if the device is powered on or not.
+ */
get power() {
return this.property('power');
}
+ /**
+ * Switch the power of the device, either turning it off or turning it on
+ * in the last mode used.
+ */
setPower(on) {
return this.call('set_power', [ on ? 'on' : 'off '], {
refresh: true
@@ -30,14 +50,26 @@ class AirPurifier extends Device {
.then(() => on);
}
+ /**
+ * Get the mode that the device is in.
+ */
get mode() {
return this.property('mode');
}
+ /**
+ * Get the modes that are available to set.
+ *
+ * TODO: Does this change with the model?
+ */
get modes() {
return [ 'idle', 'auto', 'silent', 'low', 'medium', 'high' ];
}
+ /**
+ * Set the mode of the device. Setting this to `idle` will power off the
+ * device and any other supported mode will power it on.
+ */
setMode(mode) {
return this.call('set_mode', [ mode ], {
refresh: true
@@ -48,14 +80,23 @@ class AirPurifier extends Device {
});
}
+ /**
+ * Get the current reported temperature in degrees Celsius.
+ */
get temperature() {
- return this.property('temp_dec');
+ return this.property('temperature');
}
+ /**
+ * Get the relative humidity reported by the device.
+ */
get humidity() {
return this.property('humidity');
}
+ /**
+ * Get the calculated Air Quality Index (PM2.5 sensor).
+ */
get aqi() {
return this.property('aqi');
}
|
Updating the air purifier device to map temp_dec differently
|
aholstenson_miio
|
train
|
05197bd861728d37c26e8372589f15c6688043a1
|
diff --git a/app/models/tag.rb b/app/models/tag.rb
index <HASH>..<HASH> 100644
--- a/app/models/tag.rb
+++ b/app/models/tag.rb
@@ -18,6 +18,7 @@ class Tag
index :tag_type
validates_presence_of :tag_id, :title, :tag_type
+ validates_uniqueness_of :tag_id, scope: :tag_type
validates_with TagIdValidator
validates_with SafeHtml
diff --git a/test/models/tag_test.rb b/test/models/tag_test.rb
index <HASH>..<HASH> 100644
--- a/test/models/tag_test.rb
+++ b/test/models/tag_test.rb
@@ -102,6 +102,16 @@ class TagTest < ActiveSupport::TestCase
assert_equal %w{Business Chips Crime Pie}, tags.map(&:title).sort
end
+ test "should be invalid when tag id already exists for the tag type" do
+ Tag.create!(tag_id: "cars", tag_type: "vehicles", title: "Cars")
+ Tag.create!(tag_id: "cars", tag_type: "gary-numan-songs", title: "Cars")
+
+ tag = Tag.new(tag_id: "cars", tag_type: "vehicles")
+
+ refute tag.valid?
+ assert tag.errors.has_key?(:tag_id)
+ end
+
test "should validate with TagIdValidator" do
assert_includes Tag.validators.map(&:class), TagIdValidator
end
|
Validate tag ids are unique within their tag type
We already have a uniqueness index on this, but we should catch this
at validation time too.
|
alphagov_govuk_content_models
|
train
|
84cb89d12bcaf6ddef2047b17a893d3fca0a6239
|
diff --git a/lib/exceptions/backends/rollbar.rb b/lib/exceptions/backends/rollbar.rb
index <HASH>..<HASH> 100644
--- a/lib/exceptions/backends/rollbar.rb
+++ b/lib/exceptions/backends/rollbar.rb
@@ -51,12 +51,9 @@ module Exceptions
if error_class_or_exception_or_string.is_a?(Exception)
error_class_or_exception_or_string
else
- Class.new(StandardError) do
- define_singleton_method(:name) do
- error_class_or_exception_or_string.to_s
- end
- define_method(:message) { error_message.to_s }
- end.new
+ PlaceholderError.new(
+ class_name: error_class_or_exception_or_string.to_s,
+ error_message: error_message.to_s)
end
end
@@ -75,6 +72,21 @@ module Exceptions
end
RollbarExtractor = Object.new.extend(::Rollbar::RequestDataExtractor)
+
+ class PlaceholderError < StandardError
+ def initialize(class_name:, error_message:)
+ @class_name = class_name
+ @error_message = error_message
+ end
+
+ def class
+ OpenStruct.new(name: @class_name)
+ end
+
+ def message
+ @error_message
+ end
+ end
end
end
end
diff --git a/spec/exceptions/backends/rollbar_spec.rb b/spec/exceptions/backends/rollbar_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/exceptions/backends/rollbar_spec.rb
+++ b/spec/exceptions/backends/rollbar_spec.rb
@@ -97,5 +97,20 @@ describe Exceptions::Backends::Rollbar do
wrapped.call(env)
expect(rollbar).to have_received(:scoped).with(request: request_data)
end
+
+ it "passes along the error_class and error_message params" do
+ allow(app).to receive(:call) do
+ backend.notify(error_class: "MyError", error_message: "The message")
+ response
+ end
+ expect(rollbar).to receive(:log) do |level, error, description, extra|
+ expect(level).to eq("error")
+ expect(error.class.name).to eq("MyError")
+ expect(description).to eq("The message")
+ expect(extra).to eq(use_exception_level_filters: true)
+ end
+ wrapped.call(env)
+ expect(rollbar).to have_received(:scoped).with(request: request_data)
+ end
end
end
|
don't gen anonymous classes when reporting manual errors
we were seeing an uptick in OOMs in r<I>-api around the same time we
released <URL>
|
remind101_exceptions
|
train
|
f42c4357fdc4f00d548361f212b687a705ad6774
|
diff --git a/src/modules/abstract-row-col-model/abstract-row-col-model.spec.js b/src/modules/abstract-row-col-model/abstract-row-col-model.spec.js
index <HASH>..<HASH> 100644
--- a/src/modules/abstract-row-col-model/abstract-row-col-model.spec.js
+++ b/src/modules/abstract-row-col-model/abstract-row-col-model.spec.js
@@ -168,6 +168,68 @@ function testAbstractModel(modelCreatorFn, name, lengthName, defaultLength) {
});
});
+ describe('selection', function () {
+ beforeEach(function () {
+ model.add(model.create());
+ });
+
+ it('should be able to select ' + name + 's', function () {
+ model.select(0);
+ expect(model.getSelected()).toEqual([0]);
+ });
+
+ it('should be idempotent', function () {
+ model.select(0);
+ model.select(0);
+ expect(model.getSelected()).toEqual([0]);
+ });
+
+ it('should be able to clear', function () {
+ model.select(0);
+ model.clearSelected();
+ expect(model.getSelected()).toEqual([]);
+ });
+
+ it('should be able to deselect', function () {
+ model.select(0);
+ model.deselect(0);
+ expect(model.getSelected()).toEqual([]);
+ });
+
+ it('should be able to toggle select', function () {
+ model.toggleSelect(0);
+ expect(model.getSelected()).toEqual([0]);
+ model.toggleSelect(0);
+ expect(model.getSelected()).toEqual([]);
+ });
+
+ it('should set a selected flag on the descriptor', function () {
+ model.select(0);
+ expect(model.get(0).selected).toBe(true);
+ });
+
+ it('should fire an event on change', function () {
+ var spy = jasmine.createSpy('selection change');
+ grid.eventLoop.bind('grid-' + name + 'selection-change', spy);
+ model.select(0);
+ expect(spy).toHaveBeenCalled();
+ spy.reset();
+ model.deselect(0);
+ expect(spy).toHaveBeenCalled();
+ spy.reset();
+ model.toggleSelect(0);
+ expect(spy).toHaveBeenCalled();
+ spy.reset();
+ //select two so we can ensure it only gets called once
+ model.add(model.create());
+ model.select(1);
+ spy.reset();
+ model.clearSelected();
+ expect(spy).toHaveBeenCalled();
+ expect(spy.callCount).toBe(1);
+ });
+ });
+
}
diff --git a/src/modules/abstract-row-col-model/index.js b/src/modules/abstract-row-col-model/index.js
index <HASH>..<HASH> 100644
--- a/src/modules/abstract-row-col-model/index.js
+++ b/src/modules/abstract-row-col-model/index.js
@@ -9,12 +9,17 @@ module.exports = function (_grid, name, lengthName, defaultLength) {
var numFixed = 0;
var numHeaders = 0;
var dirtyClean = require('@grid/dirty-clean')(grid);
+ var selected = [];
function setDescriptorsDirty() {
grid.eventLoop.fire('grid-' + name + '-change');
dirtyClean.setDirty();
}
+ function fireSelectionChange() {
+ grid.eventLoop.fire('grid-' + name + 'selection-change');
+ }
+
var api = {
isDirty: dirtyClean.isDirty,
add: function (toAdd) {
@@ -71,6 +76,44 @@ module.exports = function (_grid, name, lengthName, defaultLength) {
numFixed: function () {
return numFixed;
},
+ select: function (index) {
+ var descriptor = api.get(index);
+ if (!descriptor.selected) {
+ descriptor.selected = true;
+ selected.push(index);
+ fireSelectionChange();
+ }
+ },
+ deselect: function (index, dontNotify) {
+ var descriptor = api.get(index);
+ if (descriptor.selected) {
+ descriptor.selected = false;
+ selected.splice(selected.indexOf(index), 1);
+ if (!dontNotify) {
+ fireSelectionChange();
+ }
+ }
+ },
+ toggleSelect: function (index) {
+ var descriptor = api.get(index);
+ if (descriptor.selected) {
+ api.deselect(index);
+ } else {
+ api.select(index);
+ }
+ },
+ clearSelected: function () {
+ var length = selected.length;
+ selected.slice(0).forEach(function (index) {
+ api.deselect(index, true);
+ });
+ if (length) {
+ fireSelectionChange();
+ }
+ },
+ getSelected: function () {
+ return selected;
+ },
create: function () {
var descriptor = {};
var fixed = false;
|
have selection model for rows and cols
|
gridgrid_grid
|
train
|
03c050ddde273de0b2d9cd32d78486b1613a338b
|
diff --git a/test/signal-exit-test.js b/test/signal-exit-test.js
index <HASH>..<HASH> 100644
--- a/test/signal-exit-test.js
+++ b/test/signal-exit-test.js
@@ -20,7 +20,7 @@ describe('signal-exit', function () {
it('receives an exit event when a process is terminated with sigint', function (done) {
exec(process.execPath + ' ./test/fixtures/sigint.js', function (err, stdout, stderr) {
- expect(err.code).to.equal(null)
+ assert(err)
stdout.should.match(/exited with sigint, 130, SIGINT/)
done()
})
@@ -28,7 +28,7 @@ describe('signal-exit', function () {
it('receives an exit event when a process is terminated with sigterm', function (done) {
exec(process.execPath + ' ./test/fixtures/sigterm.js', function (err, stdout, stderr) {
- expect(err.code).to.equal(null)
+ assert(err)
stdout.should.match(/exited with sigterm, 143, SIGTERM/)
done()
})
@@ -44,7 +44,7 @@ describe('signal-exit', function () {
it('does not exit if user handles signal', function (done) {
exec(process.execPath + ' ./test/fixtures/signal-listener.js', function (err, stdout, stderr) {
- assert.equal(err.code, null)
+ assert(err)
assert.equal(stdout, 'exited calledListener=4, code=129, signal="SIGHUP"\n')
done()
})
@@ -52,7 +52,7 @@ describe('signal-exit', function () {
it('ensures that if alwaysLast=true, the handler is run last', function (done) {
exec(process.execPath + ' ./test/fixtures/signal-last.js', function (err, stdout, stderr) {
- assert.equal(err.code, null)
+ assert(err)
stdout.should.match(/first counter=1/)
stdout.should.match(/last counter=2/)
done()
|
interesting how error object differs on OSX vs. linux
|
tapjs_signal-exit
|
train
|
110d040d888b26aec133b327d8d16d674e2610d5
|
diff --git a/conf/global_settings.py b/conf/global_settings.py
index <HASH>..<HASH> 100644
--- a/conf/global_settings.py
+++ b/conf/global_settings.py
@@ -220,7 +220,7 @@ ACCOUNTS_ORG_NAME = 'Example'
#
# default: ALLOW_REGISTRATIONS = False
#
-# ALLOW_REGISTRATIONS = False
+# ALLOW_REGISTRATIONS = True
# Do we allow any logged in user to access all usage information?
#
|
Change commented out value of ALLOW_REGISTRATIONS.
Makes no sense to have it the default value.
Change-Id: I7c0b<I>ebd7c<I>fce<I>a3f1f<I>ceb<I>a9b5b6
|
Karaage-Cluster_karaage
|
train
|
103136d05cea7977e5c2e653e174e91e137892da
|
diff --git a/luigi/scheduler.py b/luigi/scheduler.py
index <HASH>..<HASH> 100644
--- a/luigi/scheduler.py
+++ b/luigi/scheduler.py
@@ -474,6 +474,12 @@ class SimpleTaskState(object):
def get_active_tasks_by_status(self, *statuses):
return itertools.chain.from_iterable(six.itervalues(self._status_tasks[status]) for status in statuses)
+ def get_active_task_count_for_status(self, status):
+ if status:
+ return len(self._status_tasks[status])
+ else:
+ return len(self._tasks)
+
def get_batch_running_tasks(self, batch_id):
assert batch_id is not None
return [
@@ -1335,7 +1341,13 @@ class Scheduler(object):
"""
Query for a subset of tasks by status.
"""
+ if not search:
+ count_limit = max_shown_tasks or self._config.max_shown_tasks
+ pre_count = self._state.get_active_task_count_for_status(status)
+ if limit and pre_count > count_limit:
+ return {'num_tasks': -1 if upstream_status else pre_count}
self.prune()
+
result = {}
upstream_status_table = {} # used to memoize upstream status
if search is None:
diff --git a/luigi/static/visualiser/js/visualiserApp.js b/luigi/static/visualiser/js/visualiserApp.js
index <HASH>..<HASH> 100644
--- a/luigi/static/visualiser/js/visualiserApp.js
+++ b/luigi/static/visualiser/js/visualiserApp.js
@@ -743,7 +743,7 @@ function visualiserApp(luigi) {
var taskCount;
/* Check for integers in tasks. This indicates max-shown-tasks was exceeded */
if (tasks.length === 1 && typeof(tasks[0]) === 'number') {
- taskCount = tasks[0];
+ taskCount = tasks[0] === -1 ? 'unknown' : tasks[0];
missingCategories[category] = {name: category, count: taskCount};
}
else {
diff --git a/test/scheduler_api_test.py b/test/scheduler_api_test.py
index <HASH>..<HASH> 100644
--- a/test/scheduler_api_test.py
+++ b/test/scheduler_api_test.py
@@ -1767,6 +1767,21 @@ class SchedulerApiTest(unittest.TestCase):
self.search_pending('ClassA 2016-02-01 num', {expected})
+ def test_upstream_beyond_limit(self):
+ sch = Scheduler(max_shown_tasks=3)
+ for i in range(4):
+ sch.add_task(worker=WORKER, family='Test', params={'p': str(i)}, task_id='Test_%i' % i)
+ self.assertEqual({'num_tasks': -1}, sch.task_list('PENDING', 'FAILED'))
+ self.assertEqual({'num_tasks': 4}, sch.task_list('PENDING', ''))
+
+ def test_do_not_prune_on_beyond_limit_check(self):
+ sch = Scheduler(max_shown_tasks=3)
+ sch.prune = mock.Mock()
+ for i in range(4):
+ sch.add_task(worker=WORKER, family='Test', params={'p': str(i)}, task_id='Test_%i' % i)
+ self.assertEqual({'num_tasks': 4}, sch.task_list('PENDING', ''))
+ sch.prune.assert_not_called()
+
def test_search_results_beyond_limit(self):
sch = Scheduler(max_shown_tasks=3)
for i in range(4):
|
Speed up task_list when beyond limit (#<I>)
When the number of tasks get into the millions, even refreshing the
visualizer can take a minute or more, causing havoc in the pipeline.
Since all we really want in these situations is the counts, we can skip
the more expensive bits of computation and just return the sizes. This
prevents doing upstream checks, but saves a lot of time.
We may want to institute a higher threshold so we can get upstream
numbers if you're only a little above the limit for returning all
tasks.
|
spotify_luigi
|
train
|
12a18a0321c90e8ffe33be22a8072d30450a70d7
|
diff --git a/resources/views/admin/shop/edit.php b/resources/views/admin/shop/edit.php
index <HASH>..<HASH> 100644
--- a/resources/views/admin/shop/edit.php
+++ b/resources/views/admin/shop/edit.php
@@ -225,7 +225,7 @@ $view->layout();
</label>
<div class="col-lg-4">
- <p class="js-link-to form-control-static" id="link-to"></p>
+ <p class="js-link-to form-control-plaintext" id="link-to"></p>
</div>
<label class="col-lg-6 help-text" for="link-to">
diff --git a/resources/views/admin/shop/popupPicker.php b/resources/views/admin/shop/popupPicker.php
index <HASH>..<HASH> 100644
--- a/resources/views/admin/shop/popupPicker.php
+++ b/resources/views/admin/shop/popupPicker.php
@@ -49,7 +49,7 @@
</label>
</div>
<div class="form-group">
- <p class="form-control-static">
+ <p class="form-control-plaintext">
已选 <span class="js-shop-popup-picker-selected-num">0</span> 个,
可选 <span class="js-shop-popup-picker-max-num">...</span> 个
</p>
|
refactoring: form-control-static => form-control-plaintext
|
miaoxing_shop
|
train
|
9fca7d690fbb1a3018762eb736eced966b121f26
|
diff --git a/src/Storage/EntityManager.php b/src/Storage/EntityManager.php
index <HASH>..<HASH> 100644
--- a/src/Storage/EntityManager.php
+++ b/src/Storage/EntityManager.php
@@ -211,6 +211,16 @@ class EntityManager
}
/**
+ * Gets the DBAL Driver Connection.
+ *
+ * @return Connection
+ */
+ public function getConnection()
+ {
+ return $this->conn;
+ }
+
+ /**
* Gets the Event Manager.
*
* @return EventDispatcherInterface
|
Getter for the DBAL driver connection
|
bolt_bolt
|
train
|
fa5cfa3fa5377f2a8e0af2541dfc5a3a8f9e4b4f
|
diff --git a/src/main/java/me/normanmaurer/niosmtp/transport/DeliveryMode.java b/src/main/java/me/normanmaurer/niosmtp/transport/DeliveryMode.java
index <HASH>..<HASH> 100644
--- a/src/main/java/me/normanmaurer/niosmtp/transport/DeliveryMode.java
+++ b/src/main/java/me/normanmaurer/niosmtp/transport/DeliveryMode.java
@@ -1,3 +1,19 @@
+/**
+* Licensed to niosmtp developers ('niosmtp') under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* niosmtp licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
package me.normanmaurer.niosmtp.transport;
diff --git a/src/main/java/me/normanmaurer/niosmtp/transport/impl/internal/SMTPCallbackHandlerAdapter.java b/src/main/java/me/normanmaurer/niosmtp/transport/impl/internal/SMTPCallbackHandlerAdapter.java
index <HASH>..<HASH> 100644
--- a/src/main/java/me/normanmaurer/niosmtp/transport/impl/internal/SMTPCallbackHandlerAdapter.java
+++ b/src/main/java/me/normanmaurer/niosmtp/transport/impl/internal/SMTPCallbackHandlerAdapter.java
@@ -34,6 +34,9 @@ import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
*/
public class SMTPCallbackHandlerAdapter extends SimpleChannelUpstreamHandler {
+ // Attachment which will get set once we handled the response or exception
+ private final static Object HANDLED = new Object();
+
private final SMTPResponseCallback callback;
private final SMTPClientSession session;
@@ -45,11 +48,14 @@ public class SMTPCallbackHandlerAdapter extends SimpleChannelUpstreamHandler {
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception {
Object msg = e.getMessage();
- if (msg instanceof SMTPResponse) {
- callback.onResponse(session, (SMTPResponse) msg);
+ if (msg instanceof SMTPResponse && ctx.getAttachment() == null) {
+
+ ctx.setAttachment(HANDLED);
+ callback.onResponse(session, (SMTPResponse) msg);
// Remove this handler once we handed over the response to the callback
ctx.getChannel().getPipeline().remove(this);
+
} else {
super.messageReceived(ctx, e);
}
@@ -62,8 +68,9 @@ public class SMTPCallbackHandlerAdapter extends SimpleChannelUpstreamHandler {
// See:
//
// https://issues.jboss.org/browse/NETTY-430
- if ((e.getCause() instanceof NullPointerException) == false) {
- e.getCause().printStackTrace();
+ if ((e.getCause() instanceof NullPointerException) == false && ctx.getAttachment() == null) {
+ ctx.setAttachment(HANDLED);
+
callback.onException(session, e.getCause());
// Remove this handler once we handed over the exception to the callback
ctx.getChannel().getPipeline().remove(this);
|
Make sure we only handle one response per callback in all cases
|
normanmaurer_niosmtp
|
train
|
0247ddff18806ad8ea653424e8bf7123c4fbbea2
|
diff --git a/database/ldb/block.go b/database/ldb/block.go
index <HASH>..<HASH> 100644
--- a/database/ldb/block.go
+++ b/database/ldb/block.go
@@ -202,14 +202,9 @@ func (db *LevelDb) ExistsSha(sha *btcwire.ShaHash) (bool, error) {
// returns true if it is present in the database.
// CALLED WITH LOCK HELD
func (db *LevelDb) blkExistsSha(sha *btcwire.ShaHash) (bool, error) {
- _, err := db.getBlkLoc(sha)
- switch err {
- case nil:
- return true, nil
- case leveldb.ErrNotFound, database.ErrBlockShaMissing:
- return false, nil
- }
- return false, err
+ key := shaBlkToKey(sha)
+
+ return db.lDb.Has(key, db.ro)
}
// FetchBlockShaByHeight returns a block hash based on its height in the
diff --git a/database/ldb/tx.go b/database/ldb/tx.go
index <HASH>..<HASH> 100644
--- a/database/ldb/tx.go
+++ b/database/ldb/tx.go
@@ -187,14 +187,9 @@ func (db *LevelDb) ExistsTxSha(txsha *btcwire.ShaHash) (bool, error) {
// existsTxSha returns if the given tx sha exists in the database.o
// Must be called with the db lock held.
func (db *LevelDb) existsTxSha(txSha *btcwire.ShaHash) (bool, error) {
- _, _, _, _, err := db.getTxData(txSha)
- switch err {
- case nil:
- return true, nil
- case leveldb.ErrNotFound:
- return false, nil
- }
- return false, err
+ key := shaTxToKey(txSha)
+
+ return db.lDb.Has(key, db.ro)
}
// FetchTxByShaList returns the most recent tx of the name fully spent or not
|
Use the goleveldb Has() API.
This change converts the leveldb database's ExistsSha() and
ExistsTxSha to use the goleveldb API. Has() only returns if
the key exists and does not need to read the entire value into
memory resulting in less disk i/o and much less GC.
|
btcsuite_btcd
|
train
|
1fdd09896ff14c4c8f6cefa68f55dbd4e77df0d8
|
diff --git a/lib/multirepo/git/git.rb b/lib/multirepo/git/git.rb
index <HASH>..<HASH> 100644
--- a/lib/multirepo/git/git.rb
+++ b/lib/multirepo/git/git.rb
@@ -15,7 +15,12 @@ module MultiRepo
def self.run_in_working_dir(path, git_command, show_output)
full_command = "git -C \"#{path}\" #{git_command}";
+
+ # True fix for the -C flag issue in pre-commit hook where the status command would
+ # fail to provide correct results if a pathspec was provided when performing a commit.
+ # http://thread.gmane.org/gmane.comp.version-control.git/263319/focus=263323
full_command = "unset $(git rev-parse --local-env-vars); " + full_command if Config.instance.running_git_hook
+
run(full_command, show_output)
end
|
Added a comment describing the pre-commit hook bug with a link to the git mailing list reply that provided the correct fix.
|
fortinmike_git-multirepo
|
train
|
6c9bd6be743b836d60fcd6df0b19da42f91cb4d4
|
diff --git a/src/ploneintranet/workspace/tests/base.py b/src/ploneintranet/workspace/tests/base.py
index <HASH>..<HASH> 100644
--- a/src/ploneintranet/workspace/tests/base.py
+++ b/src/ploneintranet/workspace/tests/base.py
@@ -34,7 +34,7 @@ class BaseTestCase(unittest.TestCase):
def logout(self):
z2.logout()
- def add_user_to_workspace(self, username, workspace, groups):
+ def add_user_to_workspace(self, username, workspace, groups=[]):
"""
helper method which adds a user to team and then clears the cache
@@ -43,7 +43,7 @@ class BaseTestCase(unittest.TestCase):
:param workspace: the workspace to add this user
:type workspace: ploneintranet.workspace.workspacefolder
:param groups: the groups to which this user should be added
- :type groups: set
+ :type groups: iterable
:rtype: None
"""
|
we do not have to pass in a group to add a user
|
ploneintranet_ploneintranet.workspace
|
train
|
1eef3bd900a488de39ecdb1dc36d9f66c0c3a017
|
diff --git a/src/org/opencms/db/CmsSecurityManager.java b/src/org/opencms/db/CmsSecurityManager.java
index <HASH>..<HASH> 100644
--- a/src/org/opencms/db/CmsSecurityManager.java
+++ b/src/org/opencms/db/CmsSecurityManager.java
@@ -1096,7 +1096,11 @@ public final class CmsSecurityManager {
byte[] content,
List<CmsProperty> properties) throws CmsException {
- if (existsResource(context, resourcename, CmsResourceFilter.IGNORE_EXPIRATION)) {
+ String checkExistsPath = "/".equals(resourcename) ? "/" : CmsFileUtil.removeTrailingSeparator(resourcename);
+ // We use checkExistsPath instead of resourcename because when creating a folder /foo/bar/, we want to fail
+ // if a file /foo/bar already exists.
+
+ if (existsResource(context, checkExistsPath, CmsResourceFilter.ALL)) {
// check if the resource already exists by name
throw new CmsVfsResourceAlreadyExistsException(org.opencms.db.generic.Messages.get().container(
org.opencms.db.generic.Messages.ERR_RESOURCE_WITH_NAME_ALREADY_EXISTS_1,
@@ -1136,7 +1140,6 @@ public final class CmsSecurityManager {
List<CmsProperty> properties) throws CmsException {
CmsDbContext dbc = m_dbContextFactory.getDbContext(context);
-
CmsResource sibling = null;
try {
checkOfflineProject(dbc);
|
Fixed DB corruption problem with creating folders having the same name
as deleted files.
|
alkacon_opencms-core
|
train
|
c51f9b579664d17e285d08b1930bd8f49987a99b
|
diff --git a/src/web/org/codehaus/groovy/grails/web/metaclass/TagLibMetaClass.java b/src/web/org/codehaus/groovy/grails/web/metaclass/TagLibMetaClass.java
index <HASH>..<HASH> 100644
--- a/src/web/org/codehaus/groovy/grails/web/metaclass/TagLibMetaClass.java
+++ b/src/web/org/codehaus/groovy/grails/web/metaclass/TagLibMetaClass.java
@@ -38,10 +38,10 @@ import org.codehaus.groovy.grails.web.servlet.GrailsApplicationAttributes;
*/
public class TagLibMetaClass extends DelegatingMetaClass implements AdapterMetaClass {
private MetaClass adaptee;
+ private static final Closure EMPTY_TAG_BODY = new Closure(DelegatingMetaClass.class){};
-
- public TagLibMetaClass(MetaClass adaptee) {
+ public TagLibMetaClass(MetaClass adaptee) {
super(adaptee);
this.adaptee = adaptee;
}
@@ -56,7 +56,10 @@ public class TagLibMetaClass extends DelegatingMetaClass implements AdapterMetaC
public MetaClass getAdaptee() {
return adaptee;
}
-
+
+ public void setAdaptee(MetaClass newAdaptee) {
+ this.adaptee = newAdaptee;
+ }
/* (non-Javadoc)
* @see groovy.lang.ProxyMetaClass#invokeMethod(java.lang.Object, java.lang.String, java.lang.Object[])
*/
@@ -80,11 +83,27 @@ public class TagLibMetaClass extends DelegatingMetaClass implements AdapterMetaC
Closure tag = (Closure)original.clone();
tagLibrary.setProperty(TagLibDynamicMethods.OUT_PROPERTY,taglib.getProperty(TagLibDynamicMethods.OUT_PROPERTY));
- return tag.call(arguments);
- }
+
+ switch(tag.getParameterTypes().length) {
+ case 1:
+ switch(arguments.length) {
+ case 1: return tag.call(arguments);
+ case 2: return tag.call(new Object[]{arguments[0]});
+ }
+ break;
+ case 2:
+ switch(arguments.length) {
+ case 1: return tag.call(new Object[]{arguments[0], EMPTY_TAG_BODY});
+ case 2: return tag.call(arguments);
+ }
+ }
+ return null;
+ }
}
- /* (non-Javadoc)
+
+
+ /* (non-Javadoc)
* @see org.codehaus.groovy.grails.commons.metaclass.PropertyAccessProxyMetaClass#getProperty(java.lang.Object, java.lang.String)
*/
public Object getProperty(Object object, String property) {
@@ -106,7 +125,10 @@ public class TagLibMetaClass extends DelegatingMetaClass implements AdapterMetaC
return original.clone();
}
- }
-
-
+ }
+
+
+ public String toString() {
+ return "[TagLibMetaClass (Adapter): "+super.toString()+"]";
+ }
}
|
fixed problem where you have to pass an empty closure when invoking another tag that has no body
git-svn-id: <URL>
|
grails_grails-core
|
train
|
69f4fec007989f2ce47a59abf84a98bd02ed8fac
|
diff --git a/src/java/org/apache/cassandra/tools/NodeCmd.java b/src/java/org/apache/cassandra/tools/NodeCmd.java
index <HASH>..<HASH> 100644
--- a/src/java/org/apache/cassandra/tools/NodeCmd.java
+++ b/src/java/org/apache/cassandra/tools/NodeCmd.java
@@ -539,19 +539,19 @@ public class NodeCmd
switch (command)
{
- case RING : nodeCmd.printRing(System.out); break;
- case INFO : nodeCmd.printInfo(System.out); break;
- case CFSTATS : nodeCmd.printColumnFamilyStats(System.out); break;
- case DECOMMISSION : probe.decommission(); break;
- case LOADBALANCE : probe.loadBalance(); break;
- case CLEARSNAPSHOT : probe.clearSnapshot(); break;
- case TPSTATS : nodeCmd.printThreadPoolStats(System.out); break;
- case VERSION : nodeCmd.printReleaseVersion(System.out); break;
- case COMPACTIONSTATS : nodeCmd.printCompactionStats(System.out); break;
- case DISABLEGOSSIP : probe.stopGossiping(); break;
- case ENABLEGOSSIP : probe.startGossiping(); break;
- case DISABLETHRIFT : probe.stopThriftServer(); break;
- case ENABLETHRIFT : probe.startThriftServer(); break;
+ case RING : complainNonzeroArgs(arguments, command); nodeCmd.printRing(System.out); break;
+ case INFO : complainNonzeroArgs(arguments, command); nodeCmd.printInfo(System.out); break;
+ case CFSTATS : complainNonzeroArgs(arguments, command); nodeCmd.printColumnFamilyStats(System.out); break;
+ case DECOMMISSION : complainNonzeroArgs(arguments, command); probe.decommission(); break;
+ case LOADBALANCE : complainNonzeroArgs(arguments, command); probe.loadBalance(); break;
+ case CLEARSNAPSHOT : complainNonzeroArgs(arguments, command); probe.clearSnapshot(); break;
+ case TPSTATS : complainNonzeroArgs(arguments, command); nodeCmd.printThreadPoolStats(System.out); break;
+ case VERSION : complainNonzeroArgs(arguments, command); nodeCmd.printReleaseVersion(System.out); break;
+ case COMPACTIONSTATS : complainNonzeroArgs(arguments, command); nodeCmd.printCompactionStats(System.out); break;
+ case DISABLEGOSSIP : complainNonzeroArgs(arguments, command); probe.stopGossiping(); break;
+ case ENABLEGOSSIP : complainNonzeroArgs(arguments, command); probe.startGossiping(); break;
+ case DISABLETHRIFT : complainNonzeroArgs(arguments, command); probe.stopThriftServer(); break;
+ case ENABLETHRIFT : complainNonzeroArgs(arguments, command); probe.startThriftServer(); break;
case DRAIN :
try { probe.drain(); }
@@ -647,6 +647,15 @@ public class NodeCmd
System.exit(3);
}
+ private static void complainNonzeroArgs(String[] args, NodeCommand cmd)
+ {
+ if (args.length > 0) {
+ System.err.println("Too many arguments for command '"+cmd.toString()+"'.");
+ printUsage();
+ System.exit(1);
+ }
+ }
+
private static void optionalKSandCFs(NodeCommand nc, String[] cmdArgs, NodeProbe probe) throws InterruptedException, IOException
{
// if there is one additional arg, it's the keyspace; more are columnfamilies
|
Do not allow extra params to nodetool commands to prevent confusion.
Patch by Jon Hermes, reviewed by brandonwilliams for CASSANDRA-<I>
git-svn-id: <URL>
|
Stratio_stratio-cassandra
|
train
|
0bea680c6de7953ded739f68d4a21dc86c6bfe5e
|
diff --git a/src/Illuminate/Database/Eloquent/Relations/HasManyThrough.php b/src/Illuminate/Database/Eloquent/Relations/HasManyThrough.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Database/Eloquent/Relations/HasManyThrough.php
+++ b/src/Illuminate/Database/Eloquent/Relations/HasManyThrough.php
@@ -138,7 +138,7 @@ class HasManyThrough extends Relation
{
$table = $this->parent->getTable();
- $this->query->whereIn($table.'.'.$this->firstKey, $this->getKeys($models));
+ $this->query->whereIn($table.'.'.$this->firstKey, $this->getKeys($models, $this->localKey));
}
/**
diff --git a/tests/Database/DatabaseEloquentHasManyThroughTest.php b/tests/Database/DatabaseEloquentHasManyThroughTest.php
index <HASH>..<HASH> 100644
--- a/tests/Database/DatabaseEloquentHasManyThroughTest.php
+++ b/tests/Database/DatabaseEloquentHasManyThroughTest.php
@@ -36,6 +36,36 @@ class DatabaseEloquentHasManyThroughTest extends PHPUnit_Framework_TestCase
$relation->addEagerConstraints([$model1, $model2]);
}
+ public function testEagerConstraintsAreProperlyAddedWithCustomKey()
+ {
+ $builder = m::mock('Illuminate\Database\Eloquent\Builder');
+ $builder->shouldReceive('join')->once()->with('users', 'users.id', '=', 'posts.user_id');
+ $builder->shouldReceive('where')->with('users.country_id', '=', 1);
+
+ $country = m::mock('Illuminate\Database\Eloquent\Model');
+ $country->shouldReceive('getKeyName')->andReturn('id');
+ $country->shouldReceive('offsetGet')->andReturn(1);
+ $country->shouldReceive('getForeignKey')->andReturn('country_id');
+
+ $user = m::mock('Illuminate\Database\Eloquent\Model');
+ $user->shouldReceive('getTable')->andReturn('users');
+ $user->shouldReceive('getQualifiedKeyName')->andReturn('users.id');
+ $post = m::mock('Illuminate\Database\Eloquent\Model');
+ $post->shouldReceive('getTable')->andReturn('posts');
+
+ $builder->shouldReceive('getModel')->andReturn($post);
+
+ $relation = new HasManyThrough($builder, $country, $user, 'country_id', 'user_id', 'not_id');
+ $relation->getQuery()->shouldReceive('whereIn')->once()->with('users.country_id', [3, 4]);
+ $model1 = new EloquentHasManyThroughModelStub;
+ $model1->id = 1;
+ $model1->not_id = 3;
+ $model2 = new EloquentHasManyThroughModelStub;
+ $model2->id = 2;
+ $model2->not_id = 4;
+ $relation->addEagerConstraints([$model1, $model2]);
+ }
+
public function testModelsAreProperlyMatchedToParents()
{
$relation = $this->getRelation();
|
Fix #<I> by considering local key in the relation (#<I>)
|
laravel_framework
|
train
|
15692c71e082bae098ffad8da9d3a86f1ba2d165
|
diff --git a/raven/contrib/django/client.py b/raven/contrib/django/client.py
index <HASH>..<HASH> 100644
--- a/raven/contrib/django/client.py
+++ b/raven/contrib/django/client.py
@@ -18,6 +18,7 @@ from django.template.loader import LoaderOrigin
from raven.base import Client
from raven.contrib.django.utils import get_data_from_template, get_host
+from raven.contrib.django.middleware import SentryLogMiddleware
from raven.utils.wsgi import get_headers, get_environ
__all__ = ('DjangoClient',)
@@ -122,6 +123,9 @@ class DjangoClient(Client):
else:
data = kwargs['data']
+ if request is None:
+ request = getattr(SentryLogMiddleware.thread, 'request', None)
+
is_http_request = isinstance(request, HttpRequest)
if is_http_request:
data.update(self.get_data_from_request(request))
diff --git a/raven/contrib/django/handlers.py b/raven/contrib/django/handlers.py
index <HASH>..<HASH> 100644
--- a/raven/contrib/django/handlers.py
+++ b/raven/contrib/django/handlers.py
@@ -24,9 +24,6 @@ class SentryHandler(BaseSentryHandler):
client = property(_get_client)
def _emit(self, record):
- from raven.contrib.django.middleware import SentryLogMiddleware
-
- # Fetch the request from a threadlocal variable, if available
- request = getattr(record, 'request', getattr(SentryLogMiddleware.thread, 'request', None))
+ request = getattr(record, 'request', None)
return super(SentryHandler, self)._emit(record, request=request)
|
Move request threadlocal extraction for Django into Client
|
getsentry_raven-python
|
train
|
6c5baac11e9a58d8802c27101c53414f7ed218d8
|
diff --git a/gromacs/utilities.py b/gromacs/utilities.py
index <HASH>..<HASH> 100644
--- a/gromacs/utilities.py
+++ b/gromacs/utilities.py
@@ -119,7 +119,13 @@ class AttributeDict(dict):
try:
super(AttributeDict,self).__setitem__(name, value)
except KeyError:
- super(AttributeDict,self).__setattr__(name, value)
+ super(AttributeDict,self).__setattr__(name, value)
+
+ def __getstate__(self):
+ return self
+
+ def __setstate__(self, state):
+ self.update(state)
@contextmanager
def openany(datasource, mode='r'):
@@ -490,7 +496,7 @@ class Timedelta(datetime.timedelta):
def strftime(self, fmt="%d:%H:%M:%S"):
"""Primitive string formatter.
- The only dirctives understood are the following:
+ The only directives understood are the following:
============ ==========================
Directive meaning
============ ==========================
|
AttributeDict can be pickled
|
Becksteinlab_GromacsWrapper
|
train
|
264bb030ef9291c620678b1cced015ebfdf12d7c
|
diff --git a/lib/drizzlepac/adriz_versions.py b/lib/drizzlepac/adriz_versions.py
index <HASH>..<HASH> 100644
--- a/lib/drizzlepac/adriz_versions.py
+++ b/lib/drizzlepac/adriz_versions.py
@@ -10,7 +10,7 @@ else:
__version__ = '1.1.5dev'
__full_version__ = __version__+sversion
-__vdate__ = '23-Oct-2012'
+__vdate__ = '30-Nov-2012'
def main():
print '%s(%s)'%(__version__,__vdate__)
diff --git a/lib/drizzlepac/adrizzle.py b/lib/drizzlepac/adrizzle.py
index <HASH>..<HASH> 100644
--- a/lib/drizzlepac/adrizzle.py
+++ b/lib/drizzlepac/adrizzle.py
@@ -422,6 +422,8 @@ def updateInputDQArray(dqfile,dq_extn,chip, crmaskname,cr_bits_value):
def buildDrizParamDict(configObj,single=True):
chip_pars = ['units','wt_scl','pixfrac','kernel','fillval','bits']
+ cfunc_pars = {'pixfrac':float}
+
# Initialize paramDict with global parameter(s)
paramDict = {'build':configObj['build'],'stepsize':configObj['stepsize'],
'coeffs':configObj['coeffs'],'wcskey':configObj['wcskey']}
@@ -443,7 +445,10 @@ def buildDrizParamDict(configObj,single=True):
else:
paramDict[par] = configObj[section_name][driz_prefix+par]
else:
- paramDict[par] = configObj[section_name][driz_prefix+par]
+ val = configObj[section_name][driz_prefix+par]
+ if par in cfunc_pars:
+ val = cfunc_pars[par](val)
+ paramDict[par] = val
return paramDict
def _setDefaults(configObj={}):
@@ -863,6 +868,9 @@ def run_driz_chip(img,virtual_outputs,chip,output_wcs,outwcs,template,paramDict,
if kw[:3] == 'out':
outputvals[kw] = img.outputNames[kw]
outputvals['exptime'] = chip._exptime
+ outputvals['expstart'] = chip._expstart
+ outputvals['expend'] = chip._expend
+
outputvals['wt_scl_val'] = chip._wtscl
_hdrlist.append(outputvals)
@@ -899,7 +907,6 @@ def run_driz_chip(img,virtual_outputs,chip,output_wcs,outwcs,template,paramDict,
else:
_expscale = img.outputValues['texptime']
np.multiply(_outsci, _expscale, _outsci)
-
#
# Write output arrays to FITS file(s)
#
diff --git a/lib/drizzlepac/outputimage.py b/lib/drizzlepac/outputimage.py
index <HASH>..<HASH> 100644
--- a/lib/drizzlepac/outputimage.py
+++ b/lib/drizzlepac/outputimage.py
@@ -116,9 +116,9 @@ class OutputImage:
_outweight = plist[0]['outSWeight']
_outcontext = plist[0]['outSContext']
# Only report values appropriate for single exposure
- self.texptime = plist[0]['texptime']
- self.expstart = plist[0]['texpstart']
- self.expend = plist[0]['texpend']
+ self.texptime = plist[0]['exptime']
+ self.expstart = plist[0]['expstart']
+ self.expend = plist[0]['expend']
else:
if self.build:
_outdata = plist[0]['outFinal']
|
Update astrodrizzle to correctly report the exposure time, exposure start, and exposure end for the single drizzle products, in addition to insuring the final drizzle values remain correct.
This revision also includes initial changes to safeguard the C code from getting improperly cast values from the configObj input.
git-svn-id: <URL>
|
spacetelescope_drizzlepac
|
train
|
d2528dd03522c553e5d22d6f7ea73460483ecd75
|
diff --git a/xwiki-commons-tools/xwiki-commons-tool-xar/xwiki-commons-tool-xar-plugin/src/main/java/org/xwiki/tool/xar/AbstractVerifyMojo.java b/xwiki-commons-tools/xwiki-commons-tool-xar/xwiki-commons-tool-xar-plugin/src/main/java/org/xwiki/tool/xar/AbstractVerifyMojo.java
index <HASH>..<HASH> 100644
--- a/xwiki-commons-tools/xwiki-commons-tool-xar/xwiki-commons-tool-xar-plugin/src/main/java/org/xwiki/tool/xar/AbstractVerifyMojo.java
+++ b/xwiki-commons-tools/xwiki-commons-tool-xar/xwiki-commons-tool-xar-plugin/src/main/java/org/xwiki/tool/xar/AbstractVerifyMojo.java
@@ -237,7 +237,7 @@ public abstract class AbstractVerifyMojo extends AbstractXARMojo
* <li>If the page name matches one of the regexes defined by the user as content pages then check that the
* default language is {@link #defaultLanguage}.</li>
* <li>If the page name matches one of the regexes defined by the user as technial pages then check that the
- * default language is empty. Matching technical pages have precedence over matching content pages.</li>
+ * default language is empty. Matching content pages have precedence over matching technical pages.</li>
* <li>If there's no other translation of the file then consider default language to be empty to signify that
* it's a technical document. </li>
* <li>If there are other translations ("(prefix).(language).xml" format) then the default language should be
|
XCOMMONS-<I>: Ensure that technical pages are hidden
* Updated the javadoc to reflect the change in priority between content pages and technical pages for determining the language that a page should have
|
xwiki_xwiki-commons
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.