hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
0ca8d3c4c5d823db692af234a162a430ee35f311
diff --git a/src/AspectMock/Intercept/BeforeMockTransformer.php b/src/AspectMock/Intercept/BeforeMockTransformer.php index <HASH>..<HASH> 100644 --- a/src/AspectMock/Intercept/BeforeMockTransformer.php +++ b/src/AspectMock/Intercept/BeforeMockTransformer.php @@ -11,7 +11,7 @@ class BeforeMockTransformer extends WeavingTransformer protected $before = " if ((\$__am_res = __amock_before(\$this, __CLASS__, __FUNCTION__, array(%s), false)) !== __AM_CONTINUE__) return \$__am_res; "; protected $beforeStatic = " if ((\$__am_res = __amock_before(get_called_class(), __CLASS__, __FUNCTION__, array(%s), true)) !== __AM_CONTINUE__) return \$__am_res; "; - public function transform(StreamMetaData $metadata) + public function transform(StreamMetaData $metadata): string { $result = self::RESULT_ABSTAIN; $reflectedFile = new ReflectionFile($metadata->uri, $metadata->syntaxTree); diff --git a/src/AspectMock/Kernel.php b/src/AspectMock/Kernel.php index <HASH>..<HASH> 100644 --- a/src/AspectMock/Kernel.php +++ b/src/AspectMock/Kernel.php @@ -58,7 +58,7 @@ class Kernel extends AspectKernel include FilterInjectorTransformer::rewrite($file); } - protected function registerTransformers() + protected function registerTransformers(): array { $cachePathManager = $this->getContainer()->get('aspect.cache.path.manager');;
Forward compatibility with goaop/framework <I>.
Codeception_AspectMock
train
cd3c2fd0b8414c029e48b4bd3cc86bd12797096c
diff --git a/test.py b/test.py index <HASH>..<HASH> 100755 --- a/test.py +++ b/test.py @@ -26,6 +26,7 @@ import os import os.path import platform import sys +import tempfile import textwrap import threading import unittest @@ -2447,9 +2448,11 @@ class PgnTestCase(unittest.TestCase): class CraftyTestCase(unittest.TestCase): def setUp(self): + self.tmpdir = tempfile.TemporaryDirectory(prefix="crafty") try: - self.engine = chess.xboard.popen_engine("crafty") + self.engine = chess.xboard.popen_engine("crafty", cwd=self.tmpdir.name) except OSError: + self.tmpdir.cleanup() self.skipTest("need crafty") self.engine.xboard() @@ -2459,6 +2462,8 @@ class CraftyTestCase(unittest.TestCase): if self.engine.is_alive(): self.engine.quit() + self.tmpdir.cleanup() + def test_undo(self): self.engine.new() board = chess.Board() @@ -3243,14 +3248,15 @@ class EngineTestCase(unittest.TestCase): def test_crafty_play_to_mate(self): logging.disable(logging.WARNING) try: - with chess.engine.SimpleEngine.popen_xboard("crafty") as engine: - board = chess.Board("2bqkbn1/2pppp2/np2N3/r3P1p1/p2N2B1/5Q2/PPPPKPP1/RNB2r2 w KQkq - 0 1") - limit = chess.engine.Limit(depth=10) - while not board.is_game_over() and len(board.move_stack) < 5: - result = engine.play(board, limit, ponder=True) - board.push(result.move) - self.assertTrue(board.is_checkmate()) - engine.quit() + with tempfile.TemporaryDirectory(prefix="crafty") as tmpdir: + with chess.engine.SimpleEngine.popen_xboard("crafty", cwd=tmpdir) as engine: + board = chess.Board("2bqkbn1/2pppp2/np2N3/r3P1p1/p2N2B1/5Q2/PPPPKPP1/RNB2r2 w KQkq - 0 1") + limit = chess.engine.Limit(depth=10) + while not board.is_game_over() and len(board.move_stack) < 5: + result = engine.play(board, limit, ponder=True) + board.push(result.move) + self.assertTrue(board.is_checkmate()) + engine.quit() finally: logging.disable(logging.NOTSET) @@ -3258,20 +3264,22 @@ class EngineTestCase(unittest.TestCase): def test_crafty_analyse(self): logging.disable(logging.WARNING) try: - with chess.engine.SimpleEngine.popen_xboard("crafty") as engine: - board = chess.Board("2bqkbn1/2pppp2/np2N3/r3P1p1/p2N2B1/5Q2/PPPPKPP1/RNB2r2 w KQkq - 0 1") - limit = chess.engine.Limit(depth=7, time=2.0) - info = engine.analyse(board, limit) - self.assertTrue(info["score"] > chess.engine.Cp(1000)) - engine.quit() + with tempfile.TemporaryDirectory(prefix="crafty") as tmpdir: + with chess.engine.SimpleEngine.popen_xboard("crafty", cwd=tmpdir) as engine: + board = chess.Board("2bqkbn1/2pppp2/np2N3/r3P1p1/p2N2B1/5Q2/PPPPKPP1/RNB2r2 w KQkq - 0 1") + limit = chess.engine.Limit(depth=7, time=2.0) + info = engine.analyse(board, limit) + self.assertTrue(info["score"] > chess.engine.Cp(1000)) + engine.quit() finally: logging.disable(logging.NOTSET) @catchAndSkip(FileNotFoundError, "need crafty") def test_crafty_ping(self): - with chess.engine.SimpleEngine.popen_xboard("crafty") as engine: - engine.ping() - engine.quit() + with tempfile.TemporaryDirectory(prefix="crafty") as tmpdir: + with chess.engine.SimpleEngine.popen_xboard("crafty", cwd=tmpdir) as engine: + engine.ping() + engine.quit() def test_uci_ping(self): @asyncio.coroutine
Run crafty tests in a temporary directory
niklasf_python-chess
train
9b641ef7cda17abdfce8994bfae970b303706d0f
diff --git a/lib/ohai/plugins/linux/platform.rb b/lib/ohai/plugins/linux/platform.rb index <HASH>..<HASH> 100644 --- a/lib/ohai/plugins/linux/platform.rb +++ b/lib/ohai/plugins/linux/platform.rb @@ -105,7 +105,7 @@ Ohai.plugin(:Platform) do platform_family "debian" when /fedora/ platform_family "fedora" - when /oracle/, /centos/, /redhat/, /scientific/, /enterpriseenterprise/, /amazon/, /xenserver/, /cloudlinux/ # Note that 'enterpriseenterprise' is oracle's LSB "distributor ID" + when /oracle/, /centos/, /redhat/, /scientific/, /enterpriseenterprise/, /amazon/, /xenserver/, /cloudlinux/, /ibm_powerkvm/ # Note that 'enterpriseenterprise' is oracle's LSB "distributor ID" platform_family "rhel" when /suse/ platform_family "suse" diff --git a/spec/unit/plugins/linux/platform_spec.rb b/spec/unit/plugins/linux/platform_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/plugins/linux/platform_spec.rb +++ b/spec/unit/plugins/linux/platform_spec.rb @@ -93,6 +93,14 @@ describe Ohai::System, "Linux plugin platform" do @plugin.run @plugin[:platform].should == "scientific" end + + it "should set platform to ibm_powerkvm and platform_family to rhel when [:lsb][:id] contains IBM_PowerKVM" do + @plugin[:lsb][:id] = "IBM_PowerKVM" + @plugin[:lsb][:release] = "2.1" + @plugin.run + @plugin[:platform].should == "ibm_powerkvm" + @plugin[:platform_family].should == "rhel" + end end describe "on debian" do @@ -218,6 +226,12 @@ describe Ohai::System, "Linux plugin platform" do @plugin.run @plugin[:platform_family].should == "rhel" end + + it "should set the platform_family to redhat if the LSB name is ibm-ish" do + @plugin[:lsb][:id] = "IBM_PowerKVM" + @plugin.run + @plugin[:platform_family].should == "rhel" + end end describe "without lsb_release results" do
Add 'ibm_powerkvm' platform This change will allow Ohai to detect IBM PowerKVM specific operating systems and map them to RHEL platform families. Fixes: OHAI-<I> Obvious Fix
chef_ohai
train
89879676a19b2096e931be11fc5555bd7859d1d2
diff --git a/rapidoid-http/src/main/java/com/rapidoid/http/WebExchange.java b/rapidoid-http/src/main/java/com/rapidoid/http/WebExchange.java index <HASH>..<HASH> 100644 --- a/rapidoid-http/src/main/java/com/rapidoid/http/WebExchange.java +++ b/rapidoid-http/src/main/java/com/rapidoid/http/WebExchange.java @@ -28,6 +28,8 @@ public interface WebExchange extends Ctx { Data verb(); + Data uri(); + Data path(); Data subpath(); diff --git a/rapidoid-http/src/main/java/com/rapidoid/http/WebExchangeImpl.java b/rapidoid-http/src/main/java/com/rapidoid/http/WebExchangeImpl.java index <HASH>..<HASH> 100644 --- a/rapidoid-http/src/main/java/com/rapidoid/http/WebExchangeImpl.java +++ b/rapidoid-http/src/main/java/com/rapidoid/http/WebExchangeImpl.java @@ -35,6 +35,7 @@ public class WebExchangeImpl extends Exchange implements WebExchange { static final int HEADER = 2; static final int BODY_PART = 3; + final Range uri = new Range(); final Range verb = new Range(); final Range path = new Range(); final Range query = new Range(); @@ -60,6 +61,7 @@ public class WebExchangeImpl extends Exchange implements WebExchange { private final Range subpathRange = new Range(); private final Data _body; + private final Data _uri; private final Data _verb; private final Data _path; private final Data _subpath; @@ -72,6 +74,7 @@ public class WebExchangeImpl extends Exchange implements WebExchange { reset(); this._body = data(body); + this._uri = data(uri); this._verb = data(verb); this._path = decodedData(path); this._subpath = decodedData(subpathRange); @@ -88,6 +91,7 @@ public class WebExchangeImpl extends Exchange implements WebExchange { isKeepAlive = false; verb.reset(); + uri.reset(); path.reset(); query.reset(); protocol.reset(); @@ -129,6 +133,11 @@ public class WebExchangeImpl extends Exchange implements WebExchange { } @Override + public Data uri() { + return _uri; + } + + @Override public Data verb() { return _verb; }
Added URI to web request structure.
rapidoid_rapidoid
train
442f40dab6b1e0dbe50283c0aad083e5323ca401
diff --git a/telemetry/telemetry/core/platform/profiler/netlog_profiler.py b/telemetry/telemetry/core/platform/profiler/netlog_profiler.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/core/platform/profiler/netlog_profiler.py +++ b/telemetry/telemetry/core/platform/profiler/netlog_profiler.py @@ -36,6 +36,8 @@ class NetLogProfiler(profiler.Profiler): if self._platform_backend.GetOSName() == 'android': host_output_file = '%s.json' % self._output_path self._browser_backend.adb.Adb().Adb().Pull(output_file, host_output_file) + # Clean the device + self._browser_backend.adb.Adb().RunShellCommand('rm %s' % output_file) output_file = host_output_file print 'Net-internals log saved as %s' % output_file print 'To view, open in chrome://net-internals'
[Telemetry] Remove the log file from the android device after pulling it. Review URL: <URL>
catapult-project_catapult
train
abc24af2c3611fae5628a22360dd59ab9ef56292
diff --git a/cerberus/__init__.py b/cerberus/__init__.py index <HASH>..<HASH> 100644 --- a/cerberus/__init__.py +++ b/cerberus/__init__.py @@ -8,6 +8,6 @@ """ -__version__ = "0.0.3" +__version__ = "0.0.4" from cerberus import Validator, ValidationError, SchemaError
Bumped version to <I>
pyeve_cerberus
train
80fa9c4c3e74d09807f71606d993b6d9fb45137f
diff --git a/tests/test_select.py b/tests/test_select.py index <HASH>..<HASH> 100644 --- a/tests/test_select.py +++ b/tests/test_select.py @@ -74,7 +74,7 @@ class TheModel(RawlBase): res = self.select( "SELECT {0}" - " FROM rawl"RawlConnection + " FROM rawl" " WHERE rawl_id={1}", self.columns, rawl_id)
Some stray text in the source.
mikeshultz_rawl
train
f17222c13dc54fd4485ee66562e8efd76de1eeb5
diff --git a/browserscripts/pageinfo/longTask.js b/browserscripts/pageinfo/longTask.js index <HASH>..<HASH> 100644 --- a/browserscripts/pageinfo/longTask.js +++ b/browserscripts/pageinfo/longTask.js @@ -1,28 +1,25 @@ (function(minLength) { - const supported = PerformanceObserver.supportedEntryTypes; - if (!supported || supported.indexOf('longtask') === -1) { - return; - } - const longTaskObserver = new PerformanceObserver(list => {}); - longTaskObserver.observe({type: 'longtask', buffered: true}); - const cleaned = []; - for (let entry of longTaskObserver.takeRecords()) { - if (entry.duration >= minLength) { - const e = {}; - e.duration = entry.duration; - e.name = entry.name; - e.startTime = entry.startTime; - e.attribution = []; - for (let at of entry.attribution) { - const a = {}; - a.containerId = at.containerId; - a.containerName = at.containerName; - a.containerSrc = at.containerSrc; - a.containerType = at.containerType; - e.attribution.push(a); + if (window.__bt_longtask) { + const cleaned = []; + for (let event of window.__bt_longtask.e) { + if (event.duration >= minLength) { + const e = {}; + e.duration = event.duration; + e.name = event.name; + e.startTime = event.startTime; + e.attribution = []; + for (let at of event.attribution) { + const a = {}; + a.containerId = at.containerId; + a.containerName = at.containerName; + a.containerSrc = at.containerSrc; + a.containerType = at.containerType; + e.attribution.push(a); + } + cleaned.push(e); } - cleaned.push(e); - } + } + window.__bt_longtask.e = []; + return cleaned; } - return cleaned; -})(arguments[arguments.length - 1]); +})(arguments[arguments.length - 1]); \ No newline at end of file diff --git a/lib/chrome/chromeDevtoolsProtocol.js b/lib/chrome/chromeDevtoolsProtocol.js index <HASH>..<HASH> 100644 --- a/lib/chrome/chromeDevtoolsProtocol.js +++ b/lib/chrome/chromeDevtoolsProtocol.js @@ -102,6 +102,19 @@ class ChromeDevtoolsProtocol { }); } + async setupLongTask() { + const source = ` + !function() { + let lt = window.__bt_longtask={e:[]}; + lt.o = new PerformanceObserver(function(a) { + lt.e=lt.e.concat(a.getEntries()); + }); + lt.o.observe({entryTypes:['longtask']}); + }();`; + + return this.cdpClient.Page.addScriptToEvaluateOnNewDocument({ source }); + } + async setBasicAuth(basicAuth) { const parts = basicAuth.split('@'); const basic = 'Basic ' + btoa(parts[0] + ':' + parts[1]); diff --git a/lib/chrome/webdriver/chromium.js b/lib/chrome/webdriver/chromium.js index <HASH>..<HASH> 100644 --- a/lib/chrome/webdriver/chromium.js +++ b/lib/chrome/webdriver/chromium.js @@ -97,6 +97,8 @@ class Chromium { await this.cdpClient.setupCPUThrottling(this.chrome.CPUThrottlingRate); } + await this.cdpClient.setupLongTask(); + // Make sure we clear the console log // Hopefully one time is enough? return runner.getLogs(Type.BROWSER);
Revert to the old way of getting long tasks #<I> (#<I>)
sitespeedio_browsertime
train
dc56e7db638955fea252e30417fd09982ce842d6
diff --git a/tests/jsgrid.tests.js b/tests/jsgrid.tests.js index <HASH>..<HASH> 100755 --- a/tests/jsgrid.tests.js +++ b/tests/jsgrid.tests.js @@ -1489,8 +1489,12 @@ $(function() { test("pager functionality", function() { var $element = $("#jsGrid"), pager, + pageChangedArgs, grid = new Grid($element, { data: [{}, {}, {}, {}, {}, {}, {}, {}, {}], + onPageChanged: function(args) { + pageChangedArgs = args; + }, paging: true, pageSize: 2, pageButtonCount: 3 @@ -1512,6 +1516,7 @@ $(function() { ok(pager.find("." + grid.pageClass).eq(1).hasClass(grid.currentPageClass), "second page is current"); equal(pager.find("." + grid.pageClass).length, 3, "three pages displayed"); equal(pager.find("." + grid.pagerNavButtonClass).length, 5, "five nav buttons displayed: First Prev Next Last and ..."); + equal(pageChangedArgs.pageIndex, 2, "onPageChanged callback provides pageIndex in arguments"); grid.showNextPages(); equal(grid._firstDisplayingPage, 3, "navigate by pages forward");
Tests: Add test for onPageChanged callback
tabalinas_jsgrid
train
92da4ab33421fba278d3f43503349ef7980e0dce
diff --git a/src/PHPMailer.php b/src/PHPMailer.php index <HASH>..<HASH> 100644 --- a/src/PHPMailer.php +++ b/src/PHPMailer.php @@ -4147,7 +4147,7 @@ class PHPMailer { $ret = ['dirname' => '', 'basename' => '', 'extension' => '', 'filename' => '']; $pathinfo = []; - if (preg_match('#^(.*?)[\\\\/]*(([^/\\\\]*?)(\.([^\.\\\\/]+?)|))[\\\\/\.]*$#im', $path, $pathinfo)) { + if (preg_match('#^(.*?)[\\\\/]*(([^/\\\\]*?)(\.([^.\\\\/]+?)|))[\\\\/.]*$#m', $path, $pathinfo)) { if (array_key_exists(1, $pathinfo)) { $ret['dirname'] = $pathinfo[1]; }
Don't need case insensitivity, don't need to escape . in char classes
PHPMailer_PHPMailer
train
14af1fcb5325c37c71b37543ffbb65b976f35774
diff --git a/dicelib.rb b/dicelib.rb index <HASH>..<HASH> 100644 --- a/dicelib.rb +++ b/dicelib.rb @@ -247,13 +247,14 @@ module Dice attr :count attr :sides attr :parts + attr :options def initialize(part) @total = nil @tally = [] @value = part - @count = part[:xdx][:count] - @sides = part[:xdx][:sides] + @count = part[:count] + @sides = part[:sides] # Our Default Options @options = { @@ -263,18 +264,7 @@ module Dice :reroll => 0 } - if part.has_key?(:options) - @options.update(part[:options]) - - # Negate :drop if it's non-zero, since - # in #roll, it's used as a negative index - # for an array slice. - @options[:drop] = -(@options[:drop]) if @options[:drop] > 0 - - # Check for nil :explode and set it - # to @sides. - @options[:explode] = @sides if @options[:explode].nil? - end + @options.update(part[:options]) if part.has_key?(:options) end # Checks to see if this instance has rolled yet @@ -286,7 +276,7 @@ module Dice # Rolls a single die from the xDx string. def roll_die() num = 0 - reroll = (@options[:reroll] >= self.sides) ? 0 : @options[:reroll] + reroll = @options[:reroll] while num <= reroll num = rand(self.sides) + 1 @@ -297,20 +287,17 @@ module Dice def roll results = [] + explode = @options[:explode] self.count.times do roll = self.roll_die() results.push(roll) - if @options[:explode] > 0 - explode_limit = 0 - - while roll >= @options[:explode] + unless explode.zero? + while roll >= explode roll = self.roll_die() results.push(roll) - explode_limit += 1 - break if explode_limit >= ExplodeLimit end end end @@ -512,7 +499,15 @@ module Dice end val = part.last - val = val.is_a?(Hash) ? RollPart.new(val) : StaticPart.new(val) + + # If the value is a hash, it's an :xdx hash. + # Normalize it. + if val.is_a?(Hash) + xdx = normalize_xdx(val) + val = RollPart.new(xdx) + else + val = StaticPart.new(val) + end part = [op, val] end @@ -524,10 +519,57 @@ module Dice # This further massages the xDx hashes. Mostly, # this now just deletes empty :options values. def self.normalize_xdx(xdx) - if xdx[:options].to_s.strip.empty? + count = xdx[:xdx][:count] + sides = xdx[:xdx][:sides] + notes = [] + + # Default to at least 1 die. + count = 1 if count.zero? or count.nil? + + # Set the :count and :sides keys directly + # and get ride of the :xdx sub-hash. + xdx[:count] = count + xdx[:sides] = sides + xdx.delete(:xdx) + + if xdx[:options].empty? xdx.delete(:options) + else + # VALIDATE ALL THE OPTIONS!!! + + # Prevent Explosion abuse. + if xdx[:options].has_key?(:explode) + explode = xdx[:options][:explode] + if explode.nil? or explode.zero? or explode == 1 + xdx[:options][:explode] = sides + notes.push("Explode set to #{sides}") + end + end + + # Prevent Reroll abuse. + if xdx[:options].has_key?(:reroll) and xdx[:options][:reroll] >= sides + xdx[:options][:reroll] = 0 + notes.push("Reroll reset to 0.") + end + + # Make sure there are enough dice to + # handle both Drop and Keep values. + # If not, both are reset to 0. Harsh. + drop = xdx[:options][:drop] || 0 + keep = xdx[:options][:keep] || 0 + + if (drop + keep) >= count + xdx[:options][:drop] = 0 + xdx[:options][:keep] = 0 + notes.push("Drop and Keep Conflict. Both reset to 0.") + end + + # Negate :drop. See why in RollPart#roll. + xdx[:options][:drop] = -(drop) end + xdx[:options][:notes] = notes unless notes.empty? + return xdx end
Option validations and a couple minor bug fixes.
syntruth_Dice-Bag
train
c4bf821768cbb8ac74e48c39c18d3dac092b4656
diff --git a/tests/lib/Persistence/Legacy/Content/FieldValue/Converter/XmlTextTest.php b/tests/lib/Persistence/Legacy/Content/FieldValue/Converter/XmlTextTest.php index <HASH>..<HASH> 100644 --- a/tests/lib/Persistence/Legacy/Content/FieldValue/Converter/XmlTextTest.php +++ b/tests/lib/Persistence/Legacy/Content/FieldValue/Converter/XmlTextTest.php @@ -7,14 +7,14 @@ * @version //autogentag// */ -namespace ezp\Persistence\Storage\Legacy\Tests\Content\FieldValue\Converter; +namespace eZ\Publish\Core\Persistence\Legacy\Tests\Content\FieldValue\Converter; use ezp\Content\FieldType\XmlText\Value as XmlTextValue, ezp\Content\FieldType\FieldSettings, - ezp\Persistence\Content\FieldValue, - ezp\Persistence\Storage\Legacy\Content\StorageFieldValue, - ezp\Persistence\Storage\Legacy\Content\StorageFieldDefinition, - ezp\Persistence\Storage\Legacy\Content\FieldValue\Converter\XmlText as XmlTextConverter, - ezp\Persistence\Content\Type\FieldDefinition as PersistenceFieldDefinition, + eZ\Publish\SPI\Persistence\Content\FieldValue, + eZ\Publish\Core\Persistence\Legacy\Content\StorageFieldValue, + eZ\Publish\Core\Persistence\Legacy\Content\StorageFieldDefinition, + eZ\Publish\Core\Persistence\Legacy\Content\FieldValue\Converter\XmlText as XmlTextConverter, + eZ\Publish\SPI\Persistence\Content\Type\FieldDefinition as PersistenceFieldDefinition, PHPUnit_Framework_TestCase, DOMDocument; @@ -27,7 +27,7 @@ use ezp\Content\FieldType\XmlText\Value as XmlTextValue, class XmlTextTest extends PHPUnit_Framework_TestCase { /** - * @var \ezp\Persistence\Storage\Legacy\Content\FieldValue\Converter\Author + * @var \eZ\Publish\Core\Persistence\Legacy\Content\FieldValue\Converter\Author */ protected $converter; @@ -55,7 +55,7 @@ class XmlTextTest extends PHPUnit_Framework_TestCase } /** - * @covers \ezp\Persistence\Storage\Legacy\Content\FieldValue\Converter\XmlText::toStorageValue + * @covers \eZ\Publish\Core\Persistence\Legacy\Content\FieldValue\Converter\XmlText::toStorageValue */ public function testToStorageValue() { @@ -91,7 +91,7 @@ class XmlTextTest extends PHPUnit_Framework_TestCase } /** - * @covers \ezp\Persistence\Storage\Legacy\Content\FieldValue\Converter\XmlText::toFieldValue + * @covers \eZ\Publish\Core\Persistence\Legacy\Content\FieldValue\Converter\XmlText::toFieldValue */ public function testToFieldValue() {
Refactor Persistence code to new structure
ezsystems_ezplatform-xmltext-fieldtype
train
bcbe2678b1f653866f56e23ae1d0acb2b77e6e08
diff --git a/library/src/main/java/android/view/IconicsButton.java b/library/src/main/java/android/view/IconicsButton.java index <HASH>..<HASH> 100644 --- a/library/src/main/java/android/view/IconicsButton.java +++ b/library/src/main/java/android/view/IconicsButton.java @@ -21,19 +21,23 @@ import android.util.AttributeSet; import android.widget.Button; import com.mikpenz.iconics.Iconics; +import com.mikpenz.iconics.typeface.FontAwesome; public class IconicsButton extends Button { public IconicsButton(Context context) { super(context); + setTypeface(new FontAwesome().getTypeface(context)); } public IconicsButton(Context context, AttributeSet attrs) { super(context, attrs); + setTypeface(new FontAwesome().getTypeface(context)); } public IconicsButton(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); + setTypeface(new FontAwesome().getTypeface(context)); } @Override
* It seems buttons only allow a single font. * temp fix #1
mikepenz_Android-Iconics
train
b132c1b3d35d1dbe2f61fd9b6c0f148afafc1c57
diff --git a/tools/nj_config.py b/tools/nj_config.py index <HASH>..<HASH> 100644 --- a/tools/nj_config.py +++ b/tools/nj_config.py @@ -19,10 +19,10 @@ def julia_base_from_where_julia(): DEVNULL = open(os.devnull, 'w') where_julia = subprocess.Popen(["where","julia.exe"],stdout=subprocess.PIPE,stderr=DEVNULL).communicate()[0]; if len(where_julia) > 0: - real_path = os.path.realpath(where_julia.split()[0]) + real_path = os.path.realpath(where_julia.rstrip(os.linesep)) if real_path: - head,tail = os.path.split(real_path) - path,tail = os.path.split(head) + dirname = os.path.dirname(real_path) + path = os.path.split(dirname)[0] return path def julia_base_from_home_directory():
Allow spaced in path to julia on Windows
waTeim_node-julia
train
20a2bae1d37fda07b41f869d702325eb13a1406b
diff --git a/packages/bazel/src/modify_tsconfig.js b/packages/bazel/src/modify_tsconfig.js index <HASH>..<HASH> 100644 --- a/packages/bazel/src/modify_tsconfig.js +++ b/packages/bazel/src/modify_tsconfig.js @@ -18,24 +18,33 @@ function main(args) { if (args.length < 3) { console.error('Usage: $0 input.tsconfig.json output.tsconfig.json newRoot binDir'); } - [input, output, newRoot, binDir] = args; + const [input, output, newRoot, binDir] = args; const data = JSON.parse(fs.readFileSync(input, {encoding: 'utf-8'})); - data['compilerOptions']['target'] = 'es5'; - data['bazelOptions']['es5Mode'] = true; - data['compilerOptions']['outDir'] = path.posix.join(data['compilerOptions']['outDir'], newRoot); + const {compilerOptions, bazelOptions} = data; + + // Relative path to the execroot that refers to the directory for the ES5 output files. + const newOutputBase = path.posix.join(binDir, newRoot); + + // Update the compiler options to produce ES5 output. Also ensure that the new ES5 output + // directory is used. + compilerOptions['target'] = 'es5'; + compilerOptions['outDir'] = path.posix.join(compilerOptions['outDir'], newRoot); + + bazelOptions['es5Mode'] = true; + bazelOptions['tsickleExternsPath'] = + bazelOptions['tsickleExternsPath'].replace(binDir, newOutputBase); + if (data['angularCompilerOptions']) { - // Relative path to the execroot that refers to the directory for the ES5 output files. - const newOutputBase = path.posix.join(binDir, newRoot); + const {angularCompilerOptions} = data; // Don't enable tsickle's closure conversions - data['angularCompilerOptions']['annotateForClosureCompiler'] = false; + angularCompilerOptions['annotateForClosureCompiler'] = false; // Note: It's important that the "expectedOut" is only modified in a way that still // keeps posix normalized paths. Otherwise this could cause unexpected behavior because // ngc-wrapped is expecting POSIX paths and the TypeScript Bazel rules by default only pass // POSIX paths as well. - data['angularCompilerOptions']['expectedOut'] = - data['angularCompilerOptions']['expectedOut'].map( - f => f.replace(/\.closure\.js$/, '.js').replace(binDir, newOutputBase)); + angularCompilerOptions['expectedOut'] = angularCompilerOptions['expectedOut'].map( + f => f.replace(/\.closure\.js$/, '.js').replace(binDir, newOutputBase)); } fs.writeFileSync(output, JSON.stringify(data)); }
fix(bazel): do not throw error when writing tsickle externs (#<I>) * Currently when building the ES5 and ES<I> output, `ngc_wrapped` will fail because it tries to write the `fs.openSync` the tsickle output file at the same time. This causes a runtime exception in Windows and can be fixed by just writing the externs for ES5 mode to the proper ES5 "output root". PR Close #<I>
angular_angular
train
c9564f188d97d61e4204ba81da86587a21045dc0
diff --git a/lib/readers/elasticsearch_date_range/slicer.js b/lib/readers/elasticsearch_date_range/slicer.js index <HASH>..<HASH> 100644 --- a/lib/readers/elasticsearch_date_range/slicer.js +++ b/lib/readers/elasticsearch_date_range/slicer.js @@ -10,7 +10,7 @@ var event = require('../../utils/events'); var base64url = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o,', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', - 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-', '_']; + 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '\-', '_']; function newSlicer(context, job, retryData, client) { var opConfig = job.readerConfig; diff --git a/lib/utils/elastic_utils.js b/lib/utils/elastic_utils.js index <HASH>..<HASH> 100644 --- a/lib/utils/elastic_utils.js +++ b/lib/utils/elastic_utils.js @@ -87,10 +87,10 @@ function buildQuery(source, msg) { //used for slicing on id's if (msg.key) { if (query.q) { - query.q = query.q + '&_id:' + msg.key; + query.q = query.q + '&_id:' + msg.key + '*'; } else { - query.q = '_id:' + msg.key; + query.q = '_id:' + msg.key + '*'; } } return query;
more fixes (#<I>) * more fixes * right fixes now
terascope_teraslice
train
81cd7928691ba0476c7a96b5afb65f31ae7dc79a
diff --git a/controller/client/client.go b/controller/client/client.go index <HASH>..<HASH> 100644 --- a/controller/client/client.go +++ b/controller/client/client.go @@ -44,6 +44,10 @@ func newClient(key string, url string, http *http.Client) *Client { // NewClient creates a new Client pointing at uri and using key for // authentication. func NewClient(uri, key string) (*Client, error) { + return NewClientWithHTTP(uri, key, http.DefaultClient) +} + +func NewClientWithHTTP(uri, key string, httpClient *http.Client) (*Client, error) { if uri == "" { uri = "http://flynn-controller.discoverd" }
controller/client: Add NewClientWithHTTP method Allows using any http.Client
flynn_flynn
train
129c7fc6a2a9266ec1d9b64155b6e7b8794ed970
diff --git a/src/main/java/org/dasein/cloud/google/network/LoadBalancerSupport.java b/src/main/java/org/dasein/cloud/google/network/LoadBalancerSupport.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dasein/cloud/google/network/LoadBalancerSupport.java +++ b/src/main/java/org/dasein/cloud/google/network/LoadBalancerSupport.java @@ -390,9 +390,9 @@ public class LoadBalancerSupport extends AbstractLoadBalancerSupport<Google> { } catch (NullPointerException ex) { logger.error("toLoadBalancerHealthCheck for " + loadBalancerName + " got exception while trying to hc.getHost() " + ex); } - + LoadBalancerHealthCheck lbhc = null; try { - LoadBalancerHealthCheck lbhc = LoadBalancerHealthCheck.getInstance( + lbhc = LoadBalancerHealthCheck.getInstance( loadBalancerName, hc.getName(), hc.getDescription(), @@ -405,10 +405,10 @@ public class LoadBalancerSupport extends AbstractLoadBalancerSupport<Google> { hc.getHealthyThreshold(), hc.getUnhealthyThreshold()); lbhc.addProviderLoadBalancerId(loadBalancerName); - return lbhc; } catch (NullPointerException ex) { - throw new InternalException("LB name: " + loadBalancerName + " " + ex); + // if it blows up, its a bogus LB, so return null } + return lbhc; } /*
return null for LoadBalancerHealthCheck's that are bogus.
dasein-cloud_dasein-cloud-google
train
324e161f5022df6fd6c426a8344af037a577c2ba
diff --git a/spec/requests/main_app_integration_spec.rb b/spec/requests/main_app_integration_spec.rb index <HASH>..<HASH> 100644 --- a/spec/requests/main_app_integration_spec.rb +++ b/spec/requests/main_app_integration_spec.rb @@ -1,8 +1,9 @@ require 'spec_helper' describe "main_app_integration" do it "should use main_app layout" do - Monologue.layout = "layouts/application" - visit root_path - page.should have_content("dummy app layout file!") + # TODO: make a working test with that. Changing layout on the fly does not seem to work. + # Monologue.layout = "layouts/application" + # visit root_path + # page.should have_content("dummy app layout file!") end end \ No newline at end of file
commented a test until a solution is found to test different layout
jipiboily_monologue
train
f369b1234a5bd0fdf87e8697342890a8a6d22a30
diff --git a/test/e2e/storage/drivers/csi.go b/test/e2e/storage/drivers/csi.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/drivers/csi.go +++ b/test/e2e/storage/drivers/csi.go @@ -147,6 +147,7 @@ func InitHostPathCSIDriver() storageframework.TestDriver { storageframework.CapBlock: true, storageframework.CapPVCDataSource: true, storageframework.CapControllerExpansion: true, + storageframework.CapOfflineExpansion: true, storageframework.CapOnlineExpansion: true, storageframework.CapSingleNodeVolume: true, @@ -810,6 +811,7 @@ func InitGcePDCSIDriver() storageframework.TestDriver { storageframework.CapVolumeLimits: false, storageframework.CapTopology: true, storageframework.CapControllerExpansion: true, + storageframework.CapOfflineExpansion: true, storageframework.CapOnlineExpansion: true, storageframework.CapNodeExpansion: true, storageframework.CapSnapshotDataSource: true, diff --git a/test/e2e/storage/drivers/in_tree.go b/test/e2e/storage/drivers/in_tree.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/drivers/in_tree.go +++ b/test/e2e/storage/drivers/in_tree.go @@ -1251,6 +1251,7 @@ func InitGcePdDriver() storageframework.TestDriver { storageframework.CapExec: true, storageframework.CapMultiPODs: true, storageframework.CapControllerExpansion: true, + storageframework.CapOfflineExpansion: true, storageframework.CapOnlineExpansion: true, storageframework.CapNodeExpansion: true, // GCE supports volume limits, but the test creates large @@ -1702,6 +1703,7 @@ func InitAwsDriver() storageframework.TestDriver { storageframework.CapMultiPODs: true, storageframework.CapControllerExpansion: true, storageframework.CapNodeExpansion: true, + storageframework.CapOfflineExpansion: true, storageframework.CapOnlineExpansion: true, // AWS supports volume limits, but the test creates large // number of volumes and times out test suites. diff --git a/test/e2e/storage/external/external.go b/test/e2e/storage/external/external.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/external/external.go +++ b/test/e2e/storage/external/external.go @@ -203,13 +203,20 @@ func loadDriverDefinition(filename string) (*driverDefinition, error) { return nil, fmt.Errorf("%s: %w", filename, err) } - // to ensure backward compatibility if controller expansion is enabled then set online expansion to true + // To ensure backward compatibility: if controller expansion is enabled, + // then set both online and offline expansion to true if _, ok := driver.GetDriverInfo().Capabilities[storageframework.CapOnlineExpansion]; !ok && driver.GetDriverInfo().Capabilities[storageframework.CapControllerExpansion] { caps := driver.DriverInfo.Capabilities caps[storageframework.CapOnlineExpansion] = true driver.DriverInfo.Capabilities = caps } + if _, ok := driver.GetDriverInfo().Capabilities[storageframework.CapOfflineExpansion]; !ok && + driver.GetDriverInfo().Capabilities[storageframework.CapControllerExpansion] { + caps := driver.DriverInfo.Capabilities + caps[storageframework.CapOfflineExpansion] = true + driver.DriverInfo.Capabilities = caps + } return driver, nil } diff --git a/test/e2e/storage/framework/testdriver.go b/test/e2e/storage/framework/testdriver.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/framework/testdriver.go +++ b/test/e2e/storage/framework/testdriver.go @@ -166,10 +166,18 @@ const ( CapRWX Capability = "RWX" // support ReadWriteMany access modes CapControllerExpansion Capability = "controllerExpansion" // support volume expansion for controller CapNodeExpansion Capability = "nodeExpansion" // support volume expansion for node - CapOnlineExpansion Capability = "onlineExpansion" // supports online volume expansion - CapVolumeLimits Capability = "volumeLimits" // support volume limits (can be *very* slow) - CapSingleNodeVolume Capability = "singleNodeVolume" // support volume that can run on single node (like hostpath) - CapTopology Capability = "topology" // support topology + + // offlineExpansion and onlineExpansion both default to true when + // controllerExpansion is true. The only reason to set offlineExpansion + // to false is when a CSI driver can only expand a volume while it's + // attached to a pod. Conversely, onlineExpansion can be set to false + // if the driver can only expand a volume while it is detached. + CapOfflineExpansion Capability = "offlineExpansion" // supports offline volume expansion (default: true) + CapOnlineExpansion Capability = "onlineExpansion" // supports online volume expansion (default: true) + + CapVolumeLimits Capability = "volumeLimits" // support volume limits (can be *very* slow) + CapSingleNodeVolume Capability = "singleNodeVolume" // support volume that can run on single node (like hostpath) + CapTopology Capability = "topology" // support topology // The driver publishes storage capacity information: when the storage class // for dynamic provisioning exists, the driver is expected to provide diff --git a/test/e2e/storage/testsuites/volume_expand.go b/test/e2e/storage/testsuites/volume_expand.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/testsuites/volume_expand.go +++ b/test/e2e/storage/testsuites/volume_expand.go @@ -177,6 +177,10 @@ func (v *volumeExpandTestSuite) DefineTests(driver storageframework.TestDriver, init() defer cleanup() + if !driver.GetDriverInfo().Capabilities[storageframework.CapOfflineExpansion] { + e2eskipper.Skipf("Driver %q does not support offline volume expansion - skipping", driver.GetDriverInfo().Name) + } + var err error ginkgo.By("Creating a pod with dynamically provisioned volume") podConfig := e2epod.Config{
e2e: add storage capability for offline volume expansion
kubernetes_kubernetes
train
72deb9c861d538ae0f8a41a215d63567e76e088e
diff --git a/theme/boost/classes/output/core_renderer.php b/theme/boost/classes/output/core_renderer.php index <HASH>..<HASH> 100644 --- a/theme/boost/classes/output/core_renderer.php +++ b/theme/boost/classes/output/core_renderer.php @@ -606,6 +606,7 @@ class core_renderer extends \core_renderer { if ($context->contextlevel == CONTEXT_MODULE) { + $this->page->navigation->initialise(); $node = $this->page->navigation->find_active_node(); $buildmenu = false; // If the settings menu has been forced then show the menu.
MDL-<I> theme_boost: Edit settings menu not always showing when it should
moodle_moodle
train
98e8ad04c9cbf67719a5e0c2e13fb79e4aa2da72
diff --git a/src/fields/field/index.spec.js b/src/fields/field/index.spec.js index <HASH>..<HASH> 100644 --- a/src/fields/field/index.spec.js +++ b/src/fields/field/index.spec.js @@ -91,7 +91,11 @@ describe('Field', () => { describe('#data', () => { it('should return the associated data', () => { - const expected = [1488393000000, 1488479400000, 1573065000000]; + const expected = [ + new Date(2017, 3 - 1, 2).getTime(), + new Date(2017, 3 - 1, 3).getTime(), + new Date(2019, 11 - 1, 7).getTime() + ]; expect(field.data()).to.eql(expected); }); }); diff --git a/src/fields/temporal/index.spec.js b/src/fields/temporal/index.spec.js index <HASH>..<HASH> 100644 --- a/src/fields/temporal/index.spec.js +++ b/src/fields/temporal/index.spec.js @@ -31,7 +31,11 @@ describe('Temporal', () => { describe('#calculateDataDomain', () => { it('should return the field domain', () => { - const expected = [1488306600000, 1488393000000, 1488479400000]; + const expected = [ + new Date(2017, 3 - 1, 1).getTime(), + new Date(2017, 3 - 1, 2).getTime(), + new Date(2017, 3 - 1, 3).getTime() + ]; expect(tempField.calculateDataDomain()).to.eql(expected); }); @@ -42,7 +46,11 @@ describe('Temporal', () => { rowDiffset = '1-2,4-5'; tempField = new Temporal(partField, rowDiffset); - const expected = [1488393000000, 1488479400000, 1573065000000]; + const expected = [ + new Date(2017, 3 - 1, 2).getTime(), + new Date(2017, 3 - 1, 3).getTime(), + new Date(2019, 11 - 1, 7).getTime(), + ]; expect(tempField.calculateDataDomain()).to.eql(expected); }); }); diff --git a/src/index.spec.js b/src/index.spec.js index <HASH>..<HASH> 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -85,9 +85,9 @@ describe('DataModel', () => { }); let expected = { data: [ - ['Rousan', 804882600000], - ['Sumant', 839097000000], - ['Akash', 757535400000] + ['Rousan', new Date(1995, 7 - 1, 5).getTime()], + ['Sumant', new Date(1996, 8 - 1, 4).getTime()], + ['Akash', new Date(1994, 1 - 1, 3).getTime()] ], schema: [ { name: 'name', type: 'dimension', subtype: 'categorical' }, @@ -221,9 +221,9 @@ describe('DataModel', () => { const dataModel = new DataModel(data, schema); const expected = { data: [ - ['Rousan', 804882600000, 0], - ['Sumant', 839097000000, 1], - ['Akash', 757535400000, 2] + ['Rousan', new Date(1995, 7 - 1, 5).getTime(), 0], + ['Sumant', new Date(1996, 8 - 1, 4).getTime(), 1], + ['Akash', new Date(1994, 1 - 1, 3).getTime(), 2] ], schema: [ { name: 'name', type: 'dimension', subtype: 'categorical' },
Use calcuated value instead of hardcoded value of date in temporal testcases
chartshq_datamodel
train
273bf45d667a7c9efb6c4f0a81d1e3b9f7fe8f03
diff --git a/barcode/codex.py b/barcode/codex.py index <HASH>..<HASH> 100755 --- a/barcode/codex.py +++ b/barcode/codex.py @@ -173,9 +173,9 @@ class Code128(Barcode): codes = [] if self._charset == 'C' and not char.isdigit(): - if char in code128.B.keys(): + if char in code128.B: codes = self._new_charset('B') - elif char in code128.A.keys(): + elif char in code128.A: codes = self._new_charset('A') if len(self._buffer) == 1: codes.append(self._convert(self._buffer[0])) @@ -183,14 +183,14 @@ class Code128(Barcode): elif self._charset == 'B': if look_next(): codes = self._new_charset('C') - elif char not in code128.B.keys(): - if char in code128.A.keys(): + elif char not in code128.B: + if char in code128.A: codes = self._new_charset('A') elif self._charset == 'A': if look_next(): codes = self._new_charset('C') - elif char not in code128.A.keys(): - if char in code128.B.keys(): + elif char not in code128.A: + if char in code128.B: codes = self._new_charset('B') return codes @@ -200,7 +200,7 @@ class Code128(Barcode): elif self._charset == 'B': return code128.B[char] elif self._charset == 'C': - if char in code128.C.keys(): + if char in code128.C: return code128.C[char] elif char.isdigit(): self._buffer += char
Removed .keys() calls.
WhyNotHugo_python-barcode
train
393d5c66ee8ba82f507c2fd4b52126bfeb6f477d
diff --git a/src/data.js b/src/data.js index <HASH>..<HASH> 100644 --- a/src/data.js +++ b/src/data.js @@ -4,7 +4,12 @@ import flatten from "lodash/array/flatten"; import take from "lodash/array/take"; import union from "lodash/array/union"; import isEmpty from "lodash/lang/isEmpty"; +import isFunction from "lodash/lang/isFunction"; +import isUndefined from "lodash/lang/isUndefined"; +import isNull from "lodash/lang/isNull"; import merge from "lodash/object/merge"; +import identity from "lodash/utility/identity"; +import property from "lodash/utility/property"; import lodashRange from "lodash/utility/range"; import uniq from "lodash/array/uniq"; import zipObject from "lodash/array/zipObject"; @@ -209,5 +214,17 @@ module.exports = { const colorScale = Array.isArray(props.colorScale) ? props.colorScale : Style.getColorScale(props.colorScale); return colorScale[index % colorScale.length]; + }, + + createAccessor(key) { + // creates a data accessor function given a property key, path, array index, or null for identity. + if (isFunction(key)) { + return key; + } else if (isNull(key) || isUndefined(key)) { + // null/undefined means "return the data item itself" + return identity; + } + // otherwise, assume it is an array index, property key or path (_.property handles all three) + return property(key); } };
Data.createAccessor method for making accessor functions
FormidableLabs_victory
train
ba44cce8fc2208d7df5f29fdd6b71d3b96bb0144
diff --git a/jsonschema/validators.py b/jsonschema/validators.py index <HASH>..<HASH> 100644 --- a/jsonschema/validators.py +++ b/jsonschema/validators.py @@ -149,8 +149,11 @@ def create(meta_schema, validators=(), version=None, default_types=None): raise UnknownType(type, instance, self.schema) pytypes = self._types[type] + # FIXME: draft < 6 + if isinstance(instance, float) and type == "integer": + return instance.is_integer() # bool inherits from int, so ensure bools aren't reported as ints - if isinstance(instance, bool): + elif isinstance(instance, bool): pytypes = _utils.flatten(pytypes) is_number = any( issubclass(pytype, numbers.Number) for pytype in pytypes
zeroTerminatedFloats. That's a really annoying change...
Julian_jsonschema
train
7765aa9a301aba15c769b3362f808afd891c327c
diff --git a/bob/bio/spear/test/test_extractors.py b/bob/bio/spear/test/test_extractors.py index <HASH>..<HASH> 100644 --- a/bob/bio/spear/test/test_extractors.py +++ b/bob/bio/spear/test/test_extractors.py @@ -45,7 +45,7 @@ def test_mfcc(): # read input wave file rate, wav = _wav() - extractor = bob.bio.base.load_resource("mfcc-60", "extractor") + extractor = bob.bio.base.load_resource("mfcc60", "extractor") assert isinstance(extractor, bob.bio.spear.extractor.Cepstral) # test the Cepstral extractor
Fix the naming of the mfcc<I> extractor.
bioidiap_bob.bio.spear
train
00dd0b61cc89349aec7fd19a3a1abe364905e4fe
diff --git a/app/controllers/api/changesets_controller.rb b/app/controllers/api/changesets_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/api/changesets_controller.rb +++ b/app/controllers/api/changesets_controller.rb @@ -12,8 +12,8 @@ class Api::ChangesetsController < Api::ApiController - before_filter :find_environment before_filter :find_changeset, :only => [:show, :destroy, :promote, :dependencies] + before_filter :find_environment before_filter :authorize def rules @@ -78,10 +78,6 @@ class Api::ChangesetsController < Api::ApiController @environment = KTEnvironment.find(params[:environment_id]) raise HttpErrors::NotFound, _("Couldn't find environment '#{params[:environment_id]}'") if @environment.nil? @environment - else - #didnt' find an environment, just do the first the user has access to - list = KTEnvironment.changesets_readable(current_organization).where(:locker=>false) - @environment ||= list.first || current_organization.locker end end
changesets api - fix in getting environment for permissions
Katello_katello
train
ba7166c3772f8ae09ec25025587308398da3d407
diff --git a/lib/moodlelib.php b/lib/moodlelib.php index <HASH>..<HASH> 100644 --- a/lib/moodlelib.php +++ b/lib/moodlelib.php @@ -524,6 +524,13 @@ function get_moodle_cookie() { } } +function is_internal_auth() { +/// Returns true if an internal authentication method is being used. + + global $CFG; + + return ($CFG->auth == "email" || $CFG->auth == "none" || $CFG->auth == "manual"); +} function create_user_record($username, $password) { /// Creates a bare-bones user record
Added is_internal_auth() function to quickly identify internal authentication
moodle_moodle
train
27cd4e823da9b67bd9a7214fa81e1a2980edf750
diff --git a/lib/database/domain.js b/lib/database/domain.js index <HASH>..<HASH> 100644 --- a/lib/database/domain.js +++ b/lib/database/domain.js @@ -210,6 +210,10 @@ Domain.prototype = { delete this._tableName; return value; }, + setId: function(value) { + this.id = value; + return this; + }, get domainId() { return this.id + '/' + this.name; diff --git a/test/api-search.test.js b/test/api-search.test.js index <HASH>..<HASH> 100644 --- a/test/api-search.test.js +++ b/test/api-search.test.js @@ -1,8 +1,9 @@ var utils = require('./test-utils'); var assert = require('chai').assert; -var http = require('http'); var fs = require('fs'); +var Domain = require('../lib/database').Domain; + suite('Search API', function() { var server; var context; @@ -20,19 +21,33 @@ suite('Search API', function() { server.close(); }); - function testSearch(path, message, host, callback) { + function testSearch(path, message, host) { + var setup, callback; + var callbacks = Array.prototype.slice.call(arguments, 3); + if (callbacks.length > 1) { + setup = callbacks[0]; + callback = callbacks[1]; + } else { + callback = callbacks[0]; + } test('GET ' + path + ' ' + message, function(done) { + if (setup) setup(); var options = { host: 'localhost', port: utils.testPort, path: path, - headers: {Host: host} + headers: { Host: host } }; utils .get(path, { Host: host }) .next(function(response) { var normalizedBody = normalizeSearchResult(response.body); - var normalizedBody = JSON.parse(normalizedBody); + try { + normalizedBody = JSON.parse(normalizedBody); + } catch(error) { + console.log(normalizedBody); + throw error; + } callback({ statusCode: response.statusCode, body: response.body, @@ -412,4 +427,134 @@ suite('Search API', function() { } ); }); + + suite('search options', function() { + var domain; + + setup(function() { + domain = new Domain('people', context) + .setId('00000000000000000000000000').createSync(); + domain.getIndexField('realname').setType('text').createSync(); + domain.getIndexField('nickname').setType('text').createSync(); + domain.loadSync([ + { id: 'id1', realname: 'Jack Sparrow', + nickname: 'Captain' }, + { id: 'id2', realname: 'Pumpkin Man', + nickname: 'Jack-o\'-Lantern' } + ]); + }); + + testSearch('/2011-02-01/search?q=Jack', + 'should match both records', + 'search-people-00000000000000000000000000.localhost', + function(response) { + var expected = { + rank: '-text_relevance', + 'match-expr': "(label 'Jack')", + hits: { + found: 2, + start: 0, + hit: [ + { + id: 'id2', + data: { + _id: [2], + _key: ['id2'], + realname: ['Pumpkin Man'], + nickname: ['Jack-o\'-Lantern'] + } + }, + { + id: 'id1', + data: { + _id: [1], + _key: ['id1'], + realname: ['Jack Sparrow'], + nickname: ['Captain'] + } + } + ] + }, + info: { + rid: '000000000000000000000000000000000000000000000000000000000000000', + 'time-ms': 0, // always 0 + 'cpu-time-ms': 0 + } + }; + assert.deepEqual(response.normalizedBody, expected); + } + ); + + testSearch('/2011-02-01/search?q=Jack', + 'should match only realname, by default search field', + 'search-people-00000000000000000000000000.localhost', + function() { + domain.defaultSearchField = 'realname'; + }, + function(response) { + var expected = { + rank: '-text_relevance', + 'match-expr': "(label 'Jack')", + hits: { + found: 1, + start: 0, + hit: [ + { + id: 'id1', + data: { + _id: [1], + _key: ['id1'], + realname: ['Jack Sparrow'], + nickname: ['Captain'] + } + } + ] + }, + info: { + rid: '000000000000000000000000000000000000000000000000000000000000000', + 'time-ms': 0, // always 0 + 'cpu-time-ms': 0 + } + }; + assert.deepEqual(response.normalizedBody, expected); + } + ); + +/* searchability of text field cannot be configured. how should I test it? + testSearch('/2011-02-01/search?q=Jack', + 'should match only nickname, by searchability', + 'search-people-00000000000000000000000000.localhost', + function() { + domain.getIndexField('realname').setSearchEnabled(false).saveOptionsSync(); + }, + function(response) { + var expected = { + rank: '-text_relevance', + 'match-expr': "(label 'Jack')", + hits: { + found: 1, + start: 0, + hit: [ + { + id: 'id2', + data: { + _id: [2], + _key: ['id2'], + realname: ['Pumpkin Man'], + nickname: ['Jack-o\'-Lantern'] + } + } + ] + }, + info: { + rid: '000000000000000000000000000000000000000000000000000000000000000', + 'time-ms': 0, // always 0 + 'cpu-time-ms': 0 + } + }; + assert.deepEqual(response.normalizedBody, expected); + } + ); +*/ + }); });
Add tests for search options with search API
groonga_gcs
train
2678f1b4720f1769607d421033b2b556b65a40be
diff --git a/iktomi/unstable/db/sqla/factories.py b/iktomi/unstable/db/sqla/factories.py index <HASH>..<HASH> 100644 --- a/iktomi/unstable/db/sqla/factories.py +++ b/iktomi/unstable/db/sqla/factories.py @@ -119,22 +119,3 @@ class PseudoModel(str): def __getattr__(self, name): return PseudoModel(name, self) - def __eq__(self, other): - # deprecated - return self._binary_op('==', other) - - def __ne__(self, other): - # deprecated - return self._binary_op('!=', other) - - def __and__(self, other): - # deprecated - return self._binary_op('&', other) - - def __or__(self, other): - # deprecated - return self._binary_op('|', other) - - @deprecated('Use lambda to postpone operations on model attributes') - def _binary_op(self, op, other): - return ' '.join([self, op, str(other)])
-deprecated code in PseudoModel class of model factories
SmartTeleMax_iktomi
train
048d2b8e13bfaaeb7e42308779db6ce83a2d0b15
diff --git a/pyads/ads.py b/pyads/ads.py index <HASH>..<HASH> 100644 --- a/pyads/ads.py +++ b/pyads/ads.py @@ -12,7 +12,7 @@ import struct from ctypes import memmove, addressof, c_ubyte, Array, Structure, sizeof from collections import OrderedDict -from .utils import platform_is_linux +from .utils import platform_is_linux, deprecated from .filetimes import filetime_to_dt from .pyads_ex import ( @@ -162,6 +162,7 @@ def set_local_address(ams_netid): ) # pragma: no cover +@deprecated() def read_state(adr): # type: (AmsAddr) -> Optional[Tuple[int, int]] """Read the current ADS-state and the machine-state. @@ -180,6 +181,7 @@ def read_state(adr): return None +@deprecated() def write_control(adr, ads_state, device_state, data, plc_datatype): # type: (AmsAddr, int, int, Any, Type) -> None """Change the ADS state and the machine-state of the ADS-server. @@ -206,6 +208,7 @@ def write_control(adr, ads_state, device_state, data, plc_datatype): ) +@deprecated() def read_device_info(adr): # type: (AmsAddr) -> Optional[Tuple[str, AdsVersion]] """Read the name and the version number of the ADS-server. @@ -221,6 +224,7 @@ def read_device_info(adr): return None +@deprecated() def write(adr, index_group, index_offset, value, plc_datatype): # type: (AmsAddr, int, int, Any, Type) -> None """Send data synchronous to an ADS-device. @@ -240,6 +244,7 @@ def write(adr, index_group, index_offset, value, plc_datatype): ) +@deprecated() def read_write( adr, index_group, @@ -286,6 +291,7 @@ def read_write( return None +@deprecated() def read( adr, index_group, index_offset, plc_datatype, return_ctypes=False, check_length=True ): @@ -319,6 +325,7 @@ def read( return None +@deprecated() def read_by_name(adr, data_name, plc_datatype, return_ctypes=False, check_length=True): # type: (AmsAddr, str, Type, bool) -> Any """Read data synchronous from an ADS-device from data name. @@ -342,6 +349,7 @@ def read_by_name(adr, data_name, plc_datatype, return_ctypes=False, check_length return None +@deprecated() def write_by_name(adr, data_name, value, plc_datatype): # type: (AmsAddr, str, Any, Type) -> None """Send data synchronous to an ADS-device from data name. @@ -412,8 +420,9 @@ def delete_route(adr): return adsDelRoute(adr.netIdStruct()) +@deprecated() def add_device_notification(adr, data, attr, callback, user_handle=None): - # type: (AmsAddr, Union[str, Tuple[int, int], NotificationAttrib, Callable, int) -> Optional[Tuple[int, int]] # noqa: E501 + # type: (AmsAddr, Union[str, Tuple[int, int]], NotificationAttrib, Callable, int) -> Optional[Tuple[int, int]] # noqa: E501 """Add a device notification. :param pyads.structs.AmsAddr adr: AMS Address associated with the routing @@ -440,6 +449,7 @@ def add_device_notification(adr, data, attr, callback, user_handle=None): return None +@deprecated() def del_device_notification(adr, notification_handle, user_handle): # type: (AmsAddr, int, int) -> None """Remove a device notification.
Add deprecated decorator to functions
stlehmann_pyads
train
e1af0e01ee1226db5ab0d6a9d8d366c6e27a7650
diff --git a/tests/TypeReconciliation/TypeAlgebraTest.php b/tests/TypeReconciliation/TypeAlgebraTest.php index <HASH>..<HASH> 100644 --- a/tests/TypeReconciliation/TypeAlgebraTest.php +++ b/tests/TypeReconciliation/TypeAlgebraTest.php @@ -186,7 +186,7 @@ class TypeAlgebraTest extends \Psalm\Tests\TestCase if ($a) { // do nothing here } elseif ($b) { - $a = ""; + $a = null; } else { return "bad"; } @@ -1141,21 +1141,6 @@ class TypeAlgebraTest extends \Psalm\Tests\TestCase echo $array["other"];', 'error_message' => 'InvalidArrayOffset', ], - 'allEventualitiesMet' => [ - '<?php - function resize(bool $landscape, bool $crop) : int { - if (($landscape && $crop) || (!$landscape && !$crop)) { - return 100; - } - - if ((!$landscape && $crop) || ($landscape && !$crop)) { - return 50; - } - - throw new \UnexpectedValueException("bad"); - }', - 'error_message' => 'ParadoxicalCondition' - ], ]; } }
Revert code back to how it was
vimeo_psalm
train
9b405899ab55539c93f9918d27a4dc95397ea2ff
diff --git a/conn.go b/conn.go index <HASH>..<HASH> 100644 --- a/conn.go +++ b/conn.go @@ -43,7 +43,7 @@ func (c *Conn) RemoteAddr() net.Addr { } func (c *Conn) RemoteMultiaddr() ma.Multiaddr { - a, err := ma.NewMultiaddr(fmt.Sprintf("/ipfs/%s/p2p-circuit/ipfs/%s", c.Conn().RemotePeer().Pretty(), c.remote.ID.Pretty())) + a, err := ma.NewMultiaddr(fmt.Sprintf("/ipfs/%s/p2p-circuit", c.Conn().RemotePeer().Pretty())) if err != nil { panic(err) }
connection RemoteMultiaddr returns partial relay address for consistency
libp2p_go-libp2p-circuit
train
f11ab3e8dea00e503b8fbe368f104b6eb1575d52
diff --git a/angr/analyses/cfg.py b/angr/analyses/cfg.py index <HASH>..<HASH> 100644 --- a/angr/analyses/cfg.py +++ b/angr/analyses/cfg.py @@ -21,7 +21,7 @@ class CFGNode(object): ''' This guy stands for each single node in CFG. ''' - def __init__(self, callstack_key, addr, cfg, input_state=None, simprocedure_name=None): + def __init__(self, callstack_key, addr, size, cfg, input_state=None, simprocedure_name=None): ''' Note: simprocedure_name is not used to recreate the SimProcedure object. It's only there for better __repr__. @@ -31,6 +31,7 @@ class CFGNode(object): self.addr = addr self.input_state = input_state self.simprocedure_name = simprocedure_name + self.size = size self._cfg = cfg @property @@ -45,7 +46,7 @@ class CFGNode(object): if self.simprocedure_name is not None: s = "<CFGNode %s (0x%x)>" % (self.simprocedure_name, self.addr) else: - s = "<CFGNode 0x%x>" % (self.addr) + s = "<CFGNode 0x%x (%d)>" % (self.addr, self.size) return s @@ -54,7 +55,7 @@ class CFGNode(object): raise ValueError("You do not want to be comparing a SimRun to a CFGNode.") if not isinstance(other, CFGNode): return False - return self.callstack_key == other.callstack_key and self.addr == other.addr + return self.callstack_key == other.callstack_key and self.addr == other.addr and self.size == other.size def __hash__(self): return hash((self.callstack_key, self.addr)) @@ -333,6 +334,7 @@ class CFG(Analysis, CFGBase): # pt = simuvex.procedures.SimProcedures["stubs"]["PathTerminator"](self._project.state_generator.entry_point(), addr=ex[-1]) pt = CFGNode(callstack_key=ex[:-1], addr=ex[-1], + size=None, cfg=self, input_state=None, simprocedure_name="PathTerminator") @@ -769,12 +771,14 @@ class CFG(Analysis, CFGBase): cfg_node = CFGNode(simrun_key[:-1], simrun.addr, + None, self, input_state=None, simprocedure_name=simproc_name) else: cfg_node = CFGNode(simrun_key[:-1], simrun.addr, + simrun.irsb.size, self, input_state=None) if self._keep_input_state: @@ -1255,6 +1259,69 @@ class CFG(Analysis, CFGBase): self._graph.remove_edge(b, succ) l.debug("Removing partial loop header edge %s -> %s", b, succ) + def normalize(self): + """ + Normalize the CFG, making sure there are no overlapping basic blocks. + """ + + graph = self.graph + + end_addresses = defaultdict(list) + + for n in graph.nodes(): + if n.simprocedure_name is not None: + continue + end_addr = n.addr + n.size + end_addresses[(end_addr, n.callstack_key)].append(n) + + while any([ len(l) > 1 for l in end_addresses.itervalues() ]): + tpl_to_find = None + for tpl, l in end_addresses.iteritems(): + if len(l) > 1: + tpl_to_find = tpl + break + + end_addr, callstack_key = tpl_to_find + all_nodes = end_addresses[tpl_to_find] + + all_nodes = sorted(all_nodes, key=lambda n: n.size) + smallest_node = all_nodes[0] + print "Smallest from endaddr %x: " % end_addr, smallest_node + other_nodes = all_nodes[ 1 : ] + + # Break other nodes + for n in other_nodes: + new_size = smallest_node.addr - n.addr + print "New size for node %s is %d" % (n, new_size) + new_end_addr = n.addr + new_size + + # Does it already exist? + new_node = None + tpl = (new_end_addr, n.callstack_key) + if tpl in end_addresses: + nodes = [ i for i in end_addresses[tpl] if i.addr == n.addr ] + if len(nodes) > 0: + new_node = nodes[0] + + if new_node is None: + # Create a new one + new_node = CFGNode(callstack_key, n.addr, new_size, self) + # Put the newnode into end_addresses + end_addresses[tpl].append(new_node) + + # Modify the CFG + original_predecessors = list(graph.in_edges_iter([n], data=True)) + for p, _, _ in original_predecessors: + graph.remove_edge(p, n) + graph.remove_node(n) + + for p, _, data in original_predecessors: + graph.add_edge(p, new_node, data) + + graph.add_edge(new_node, smallest_node, jumpkind='Ijk_Boring') + + end_addresses[tpl_to_find] = [ smallest_node ] + def _analyze_calling_conventions(self): ''' Concretely execute part of the function and watch the changes of sp
Added normalize() support for CFG. Now you can generate IDA-style CFGs - just call CFG.normalize() on your CFG.
angr_angr
train
fe51bebeae6a8ec1ee64be1978391869bff9a6db
diff --git a/src/View/Widget/WidgetRegistry.php b/src/View/Widget/WidgetRegistry.php index <HASH>..<HASH> 100644 --- a/src/View/Widget/WidgetRegistry.php +++ b/src/View/Widget/WidgetRegistry.php @@ -68,7 +68,7 @@ class WidgetRegistry { * * @param \Cake\View\StringTemplate $templates Templates instance to use. * @param \Cake\View\View $view The view instance to set as a widget. - * @param mixed $widgets See add() method for more information. + * @param string|array $widgets See add() method for more information. */ public function __construct(StringTemplate $templates, View $view, $widgets = []) { $this->_templates = $templates;
Fix for var type in doc comments
cakephp_cakephp
train
cedd5d3cb6091dd09f8fccbf2fff5324f2155b9e
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -530,10 +530,6 @@ exports.updatePatient = function(patient){ request("update_patient", patient, "POST", done); }; -exports.listAvailableHoken = function(patientId, ati, cb){ - request("list_available_hoken", { patient_id: patientId, at: at }, "GET", cb); -}; - exports.getShahokokuho = function(shahokokuhoId, cb){ request("get_shahokokuho", { shahokokuho_id: shahokokuhoId }, "GET", cb); };
removed duplicated (broken) listAvailableHoken
hangilc_myclinic-service-api
train
7dbb17e4855bcb47250339bec2dd46d59e52def5
diff --git a/salt/cloud/clouds/ec2.py b/salt/cloud/clouds/ec2.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/ec2.py +++ b/salt/cloud/clouds/ec2.py @@ -3122,6 +3122,10 @@ def describe_snapshots(kwargs=None, call=None): params = {'Action': 'DescribeSnapshots'} + # The AWS correct way is to use non-plurals like snapshot_id INSTEAD of snapshot_ids. + if 'snapshot_ids' in kwargs: + kwargs['snapshot_id'] = kwargs['snapshot_ids'] + if 'snapshot_id' in kwargs: snapshot_ids = kwargs['snapshot_id'].split(',') for snapshot_index, snapshot_id in enumerate(snapshot_ids):
describe_snapshots change to non-plural fix
saltstack_salt
train
68921f90a2bec949b46b1065da0efa27c9ef2bdd
diff --git a/src/gl/gl_vertex_layout.js b/src/gl/gl_vertex_layout.js index <HASH>..<HASH> 100644 --- a/src/gl/gl_vertex_layout.js +++ b/src/gl/gl_vertex_layout.js @@ -68,10 +68,12 @@ export default class GLVertexLayout { // to read those attribs that it does recognize, using the attrib offsets to skip others. enable (gl, program) { + var attrib, location; + // Enable all attributes for this layout for (var a=0; a < this.attribs.length; a++) { - var attrib = this.attribs[a]; - var location = program.attribute(attrib.name).location; + attrib = this.attribs[a]; + location = program.attribute(attrib.name).location; if (location !== -1) { gl.enableVertexAttribArray(location); @@ -81,16 +83,16 @@ export default class GLVertexLayout { } // Disable any previously bound attributes that aren't for this layout - var unused_attribs = []; for (location in GLVertexLayout.enabled_attribs) { - if (GLVertexLayout.enabled_attribs[location] !== program) { - gl.disableVertexAttribArray(location); - unused_attribs.push(location); - } + this.disableUnusedAttribute(gl, location, program); } + } - // Mark attribs as unused - for (location of unused_attribs) { + // Disable an attribute if it was not enabled for the specified program + // NOTE: this was moved out of the inner loop in enable() to assist w/VM optimization + disableUnusedAttribute (gl, location, program) { + if (GLVertexLayout.enabled_attribs[location] !== program) { + gl.disableVertexAttribArray(location); delete GLVertexLayout.enabled_attribs[location]; } }
small vertex layout enable optimizations - avoid creating extra object per frame (could still eliminate Object.keys call) - move attrib disable code to separate function to assist w/Chrome optimization (was getting 'Not optimized: ForInStatement is not fast case' warning)
tangrams_tangram
train
796e22ca5aa6f7cf418971dd08d9775d82772b8f
diff --git a/src/configs/webpack/shared.js b/src/configs/webpack/shared.js index <HASH>..<HASH> 100644 --- a/src/configs/webpack/shared.js +++ b/src/configs/webpack/shared.js @@ -75,9 +75,9 @@ function configureDevServer(config) { config.devServer = { contentBase: PROJECT_BUILD_DIR, headers: { + 'Access-Control-Allow-Headers': '*', + 'Access-Control-Allow-Methods': '*', 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, PATCH, OPTIONS', - 'Access-Control-Allow-Headers': 'X-Requested-With, content-type, Authorization', }, index: 'index.html', stats: config.stats,
Simplify dev server headers
borela-tech_js-toolbox
train
bb5f4bbd13b36922464170b568e68ccb9fe64134
diff --git a/lib/rudy/huxtable.rb b/lib/rudy/huxtable.rb index <HASH>..<HASH> 100644 --- a/lib/rudy/huxtable.rb +++ b/lib/rudy/huxtable.rb @@ -168,7 +168,7 @@ module Rudy def current_machine_group - [@@global.environment, @@global.role].join(Rudy::DELIM) + [@@global.project, @@global.environment, @@global.role].compact.join(Rudy::DELIM) end def current_group_name diff --git a/lib/rudy/metadata.rb b/lib/rudy/metadata.rb index <HASH>..<HASH> 100644 --- a/lib/rudy/metadata.rb +++ b/lib/rudy/metadata.rb @@ -4,7 +4,7 @@ module Rudy module Metadata include Rudy::Huxtable - COMMON_FIELDS = [:region, :zone, :environment, :role].freeze + COMMON_FIELDS = [:region, :zone, :project, :environment, :role].freeze @@rsdb = nil @@domain = Rudy::Huxtable.domain @@ -182,8 +182,8 @@ module Rudy end def name(*other) - parts = [@rtype, @zone, @environment, @role, @position, *other].flatten - parts.join Rudy::DELIM + parts = [@rtype, @zone, @project, @environment, @role, @position, *other] + parts.compact.flatten.join Rudy::DELIM end def save(replace=false)
Put the project name in metadata keys and machine_group_name.
solutious_rudy
train
2a6af900a632316275657e8a562124a28e0131b2
diff --git a/examples/single_logger.php b/examples/single_logger.php index <HASH>..<HASH> 100644 --- a/examples/single_logger.php +++ b/examples/single_logger.php @@ -7,5 +7,5 @@ $logger = Cascade::getLogger('some_logger'); $logger->pushHandler(new Monolog\Handler\StreamHandler('php://stdout')); $logger->info('Hellooooo World!'); -// you should see the follwing in the stdout: +// you should see the following in the stdout: // [YYYY-mm-dd hh:mm:ss] some_logger.INFO: Hellooooo World!
fix typo "following" (#<I>)
theorchard_monolog-cascade
train
efb2adc4cc1f022f6f0109e94c01744a2487374c
diff --git a/bundles/BlockManagerBundle/DependencyInjection/Configuration.php b/bundles/BlockManagerBundle/DependencyInjection/Configuration.php index <HASH>..<HASH> 100644 --- a/bundles/BlockManagerBundle/DependencyInjection/Configuration.php +++ b/bundles/BlockManagerBundle/DependencyInjection/Configuration.php @@ -378,6 +378,7 @@ class Configuration implements ConfigurationInterface return array_values(array_unique($v)); }) ->end() + ->performNoDeepMerging() ->requiresAtLeastOneElement() ->prototype('scalar') ->cannotBeEmpty()
Implement eZ Content field block definition
netgen-layouts_layouts-core
train
f47d21d19fe98494fe2fa900d874b810395e8608
diff --git a/salt/daemons/flo/core.py b/salt/daemons/flo/core.py index <HASH>..<HASH> 100644 --- a/salt/daemons/flo/core.py +++ b/salt/daemons/flo/core.py @@ -235,6 +235,8 @@ class SaltRaetRoadStackJoiner(ioflo.base.deeding.Deed): for remote in stack.remotes.values(): stack.removeRemote(remote, clear=True) + stack.puid = stack.Uid # reset puid so reuse same uid each time + for master in self.masters: mha = master['external'] stack.addRemote(RemoteEstate(stack=stack,
Reset puid when refreshing master for minion joiner
saltstack_salt
train
2b61c82388c45bece4605974c67425e6c96e418e
diff --git a/lib/rules/rules.js b/lib/rules/rules.js index <HASH>..<HASH> 100644 --- a/lib/rules/rules.js +++ b/lib/rules/rules.js @@ -1138,10 +1138,8 @@ function Rules(values) { var proto = Rules.prototype; function resolveInlineValues(str) { - if (!str || typeof str !== 'string') { - return ''; - } - return str.replace(MULTI_LINE_VALUE_RE, function(_, __, key, value) { + str = str && str.trim(); + return str && str.replace(MULTI_LINE_VALUE_RE, function(_, __, key, value) { inlineValues = inlineValues || {}; if (!inlineValues[key]) { inlineValues[key] = value; @@ -1150,8 +1148,13 @@ function resolveInlineValues(str) { }); } +function resolveInlineValuesFn(item) { + item.text = resolveInlineValues(item.text); + return item; +} + function trimInlineValues(text) { - return Array.isArray(text) ? text.map(resolveInlineValues) : resolveInlineValues(text); + return Array.isArray(text) ? text.map(resolveInlineValuesFn) : resolveInlineValues(text); } proto.parse = function(text, root) {
feat: support for setting mutil inline value in Rules
avwo_whistle
train
b83aaed56162aeb4263407b4ce1430e48a5ef884
diff --git a/tweepy/api.py b/tweepy/api.py index <HASH>..<HASH> 100644 --- a/tweepy/api.py +++ b/tweepy/api.py @@ -751,14 +751,24 @@ class API: Returns up to 100 user IDs belonging to users who have retweeted the Tweet specified by the ``id`` parameter. - :param id: |sid| - :param count: |count| - :param cursor: |cursor| - :param stringify_ids: |stringify_ids| + Parameters + ---------- + id + |sid| + count + |count| + cursor + |cursor| + stringify_ids + |stringify_ids| - :rtype: list of :class:`int` + Returns + ------- + :py:class:`List`\ [:class:`int`] - :reference: https://developer.twitter.com/en/docs/twitter-api/v1/tweets/post-and-engage/api-reference/get-statuses-retweeters-ids + References + ---------- + https://developer.twitter.com/en/docs/twitter-api/v1/tweets/post-and-engage/api-reference/get-statuses-retweeters-ids """ return self.request( 'GET', 'statuses/retweeters/ids', endpoint_parameters=(
Update and improve documentation for API.retweeters
tweepy_tweepy
train
17fb7c70286ed516f9cf94c65c0cb56eb4cd765e
diff --git a/core/src/main/java/org/hibernate/ogm/datastore/map/impl/MapHelpers.java b/core/src/main/java/org/hibernate/ogm/datastore/map/impl/MapHelpers.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/hibernate/ogm/datastore/map/impl/MapHelpers.java +++ b/core/src/main/java/org/hibernate/ogm/datastore/map/impl/MapHelpers.java @@ -66,10 +66,10 @@ public final class MapHelpers { case CLEAR: underlyingMap.clear(); break; - case PUT_NULL: case PUT: underlyingMap.put( action.getKey(), MapHelpers.associationRowToMap( action.getValue() ) ); break; + case PUT_NULL: case REMOVE: underlyingMap.remove( action.getKey() ); break;
OGM-<I> Treat associationRow PUT_NULL like REMOVE In practice the test suite does not reach PUT_NULL in that case
hibernate_hibernate-ogm
train
95a53e19e56c2463449d022f30d90651cbb3f185
diff --git a/safe/impact_functions/generic/classified_polygon_people/impact_function.py b/safe/impact_functions/generic/classified_polygon_people/impact_function.py index <HASH>..<HASH> 100644 --- a/safe/impact_functions/generic/classified_polygon_people/impact_function.py +++ b/safe/impact_functions/generic/classified_polygon_people/impact_function.py @@ -134,10 +134,11 @@ class ClassifiedPolygonHazardPolygonPeopleFunction( if geometry is not None: bbox = geometry.boundingBox() else: - continue + bbox = None # clip the exposure geometry to requested extent if necessary if not extent_exposure.contains(bbox): geometry = geometry.intersection(extent_exposure_geom) + area_type = f[type_attr] area_id = f.attribute(area_id_attribute) all_areas_population[area_id] = f.attribute(area_population_attribute)
a fix for one travis failure
inasafe_inasafe
train
6602e631dcc1345706974f9a5ef5cc3f3aa8de61
diff --git a/views/js/generis.tree.browser.js b/views/js/generis.tree.browser.js index <HASH>..<HASH> 100644 --- a/views/js/generis.tree.browser.js +++ b/views/js/generis.tree.browser.js @@ -372,13 +372,14 @@ define(['jquery', 'i18n', 'generis.tree', 'helpers', 'context'], function($, __, if (options.selectNode) { treeOptions.selected = options.selectNode; } - - var tmpTree = $.tree.reference(selector); - if (tmpTree != null) { - tmpTree.destroy(); - } - tmpTree = null; - + + if($(selector).length){ + var tmpTree = $.tree.reference(selector); + if (tmpTree != null) { + tmpTree.destroy(); + } + tmpTree = null; + } /* * Create and initialize the tree here */
prevent generis tree to crash when it tries to remove an unexisting tree
oat-sa_tao-core
train
ac0924d3271157b4c1e3ac8984593d88a0b75288
diff --git a/gremlin-test/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/step/map/AddVertexTest.java b/gremlin-test/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/step/map/AddVertexTest.java index <HASH>..<HASH> 100644 --- a/gremlin-test/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/step/map/AddVertexTest.java +++ b/gremlin-test/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/step/map/AddVertexTest.java @@ -209,7 +209,7 @@ public abstract class AddVertexTest extends AbstractGremlinTest { @FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_VERTICES) @FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_PROPERTY) @FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_MULTI_PROPERTIES) - public void xxx() { + public void g_V_addVXanimalX_propertyXname_valuesXnameXX_propertyXname_an_animalX_propertyXvaluesXnameX_labelX() { final Traversal<Vertex, Vertex> traversal = get_g_V_addVXanimalX_propertyXname_valuesXnameXX_propertyXname_an_animalX_propertyXvaluesXnameX_labelX(); printTraversalForm(traversal); while (traversal.hasNext()) {
found a wacky named test in AddVertexTest. Fixed it. CTR.
apache_tinkerpop
train
3dbd627227e0c771f97fc47edc8af09c5012f5fd
diff --git a/lib/cistern/collection.rb b/lib/cistern/collection.rb index <HASH>..<HASH> 100644 --- a/lib/cistern/collection.rb +++ b/lib/cistern/collection.rb @@ -35,7 +35,7 @@ class Cistern::Collection < Array end def clear - @loaded = true + @loaded = false super end @@ -62,6 +62,7 @@ class Cistern::Collection < Array for object in objects self << new(object) end + @loaded = true self end
Fix bugs with Collection loaded status - should not be loaded after #clear - sould be loaded after #load
lanej_cistern
train
435bd86795128c3c8619d924147c36dea5c00766
diff --git a/master/buildbot/scripts/logwatcher.py b/master/buildbot/scripts/logwatcher.py index <HASH>..<HASH> 100644 --- a/master/buildbot/scripts/logwatcher.py +++ b/master/buildbot/scripts/logwatcher.py @@ -49,7 +49,7 @@ class TailProcess(protocol.ProcessProtocol): self.lw.dataReceived(data) def errReceived(self, data): - print("ERR: '{}'".format(data)) + self.lw.print_output("ERR: '{}'".format(data)) class LogWatcher(LineOnlyReceiver): @@ -69,8 +69,7 @@ class LogWatcher(LineOnlyReceiver): def start(self): # If the log file doesn't exist, create it now. - if not os.path.exists(self.logfile): - open(self.logfile, 'a').close() + self.create_logfile(self.logfile) # return a Deferred that fires when the reconfig process has # finished. It errbacks with TimeoutError if the startup has not @@ -121,6 +120,13 @@ class LogWatcher(LineOnlyReceiver): self.in_reconfig = False self.d.callback(results) + def create_logfile(self, path): # pragma: no cover + if not os.path.exists(path): + open(path, 'a').close() + + def print_output(self, output): # pragma: no cover + print(output) + def lineReceived(self, line): if not self.running: return None @@ -130,7 +136,7 @@ class LogWatcher(LineOnlyReceiver): self.in_reconfig = True if self.in_reconfig: - print(line.decode()) + self.print_output(line.decode()) # certain lines indicate progress, so we "cancel" the timeout # and it will get re-added when it fires diff --git a/master/buildbot/test/unit/scripts/test_logwatcher.py b/master/buildbot/test/unit/scripts/test_logwatcher.py index <HASH>..<HASH> 100644 --- a/master/buildbot/test/unit/scripts/test_logwatcher.py +++ b/master/buildbot/test/unit/scripts/test_logwatcher.py @@ -13,8 +13,6 @@ # # Copyright Buildbot Team Members -import os - import mock from twisted.internet import defer @@ -28,6 +26,19 @@ from buildbot.test.util import dirs from buildbot.test.util.misc import TestReactorMixin +class MockedLogWatcher(LogWatcher): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.printed_output = [] + self.created_paths = [] + + def create_logfile(self, path): + self.created_paths.append(path) + + def print_output(self, output): + self.printed_output.append(output) + + class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): def setUp(self): @@ -39,17 +50,17 @@ class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): self.reactor.spawnProcess = mock.Mock(return_value=self.spawned_process) def test_start(self): - lw = LogWatcher('workdir/test.log', _reactor=self.reactor) + lw = MockedLogWatcher('workdir/test.log', _reactor=self.reactor) lw._start = mock.Mock() lw.start() self.reactor.spawnProcess.assert_called() - self.assertTrue(os.path.exists('workdir/test.log')) + self.assertEqual(lw.created_paths, ['workdir/test.log']) self.assertTrue(lw.running) @defer.inlineCallbacks def test_success_before_timeout(self): - lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) + lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') @@ -58,7 +69,7 @@ class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): @defer.inlineCallbacks def test_failure_after_timeout(self): - lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) + lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(5.1) lw.lineReceived(b'BuildMaster is running') @@ -67,7 +78,7 @@ class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): @defer.inlineCallbacks def test_progress_restarts_timeout(self): - lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) + lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'added builder') @@ -90,8 +101,7 @@ class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): ] for line, expected in lines_and_expected: - lw = LogWatcher('workdir/test.log', timeout=5, - _reactor=self.reactor) + lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.lineReceived(line)
logwatcher: Extract extenal actions to separate functions for mocking
buildbot_buildbot
train
e51ebfee392f974089ec237efbc99afc836aee98
diff --git a/app/assets/javascripts/govuk_publishing_components/components/copy-to-clipboard.js b/app/assets/javascripts/govuk_publishing_components/components/copy-to-clipboard.js index <HASH>..<HASH> 100644 --- a/app/assets/javascripts/govuk_publishing_components/components/copy-to-clipboard.js +++ b/app/assets/javascripts/govuk_publishing_components/components/copy-to-clipboard.js @@ -6,11 +6,15 @@ window.GOVUK.Modules = window.GOVUK.Modules || {}; GOVUK.Modules.CopyToClipboard = function () { this.start = function (element) { + var input = element[0].querySelector('.gem-c-input') var copyButton = element[0].querySelector('.gem-c-button') + input.addEventListener('click', function() { + input.select() + }) + copyButton.addEventListener('click', function (event) { event.preventDefault() - var input = element[0].querySelector('.gem-c-input') input.select() document.execCommand('copy') })
Select text in copy to clipboard on click This makes it easier for users to copy to clipboard by selecting all the text in the input on click. This is consistent with how many other systems implement this UI pattern, an example of this is the clone repository field on github
alphagov_govuk_publishing_components
train
b484db9ed7427ec57cc2af6fe8ee6b93fabe1367
diff --git a/lib/bud/collections.rb b/lib/bud/collections.rb index <HASH>..<HASH> 100644 --- a/lib/bud/collections.rb +++ b/lib/bud/collections.rb @@ -405,13 +405,9 @@ class Bud end def tick - # tuples inserted during bootstrap (@budtime==0) need to get sent in the next tick - # so only clear @pending if @budtime > 0 - if @bud_instance.budtime > 0 - @storage = {} - # never turn pending outbounds into real tuples - @pending = {} - end + @storage = {} + # never turn pending outbounds into real tuples + @pending = {} end def flush
Remove kludge from BudChannel implementation. Previously, we had to special-case tick() for time step #1, to avoid the bootstrap problem. Now that we have a more general (albeit ugly) solution, the kludge is no longer needed.
bloom-lang_bud
train
acf85a5992da637ecc8b8b3c284c84001b57f70f
diff --git a/core/phantomas.js b/core/phantomas.js index <HASH>..<HASH> 100644 --- a/core/phantomas.js +++ b/core/phantomas.js @@ -534,7 +534,15 @@ phantomas.prototype = { this.emitInternal('init'); // @desc page has been initialized, scripts can be injected }, - onLoadStarted: function() { + onLoadStarted: function(url, isFrame) { + if (this.onLoadStartedEmitted) { + return; + } + + // onLoadStarted is called for the page and each iframe + // tigger "loadStarted" event just once + this.onLoadStartedEmitted = true; + this.log('Page loading started'); this.emitInternal('loadStarted'); // @desc page loading has started },
onLoadStarted: trigger just once
macbre_phantomas
train
a0c3d2b28d2ea992bceca127f219198314ef0edf
diff --git a/webroot/js/default.js b/webroot/js/default.js index <HASH>..<HASH> 100644 --- a/webroot/js/default.js +++ b/webroot/js/default.js @@ -79,8 +79,8 @@ $(function () { * Change the visibility of some elements. * Elements with "to-be-shown" class will be shown, while elements with "to-be-hidden" class will be hidden */ - $('.hidden.to-be-shown:hidden').removeClass('hidden to-be-shown'); - $('.to-be-hidden:visible').addClass('hidden').removeClass('to-be-hidden'); + $('.hidden.to-be-shown:hidden').removeClass('d-none to-be-shown'); + $('.to-be-hidden:visible').addClass('d-none').removeClass('to-be-hidden'); /** * Closes automatically the flash messages after a preset time
fixed code to change the visibility of some elements
mirko-pagliai_me-tools
train
a54bff5928b20c0d0bd8c82bcf7414907742f778
diff --git a/src/utils.js b/src/utils.js index <HASH>..<HASH> 100644 --- a/src/utils.js +++ b/src/utils.js @@ -454,6 +454,7 @@ export function validateLayout(layout: Layout, contextName: string): void { } if (item.i && typeof item.i !== 'string') { // number is also ok, so comment the error + // TODO confirm if commenting the line below doesn't cause unexpected problems // throw new Error('VueGridLayout: ' + contextName + '[' + i + '].i must be a string!'); } if (item.static !== undefined && typeof item.static !== 'boolean') {
added TODO about change to GridItem.i String check
jbaysolutions_vue-grid-layout
train
926badd856f863af4ef4b881ea76f413fe7de035
diff --git a/master/buildbot/steps/mswin.py b/master/buildbot/steps/mswin.py index <HASH>..<HASH> 100644 --- a/master/buildbot/steps/mswin.py +++ b/master/buildbot/steps/mswin.py @@ -13,6 +13,8 @@ # # Copyright Buildbot Team Members +from twisted.python import log +from twisted.python.failure import Failure from buildbot.steps.shell import ShellCommand from buildbot.status.results import SUCCESS, WARNINGS, FAILURE @@ -48,6 +50,12 @@ class Robocopy(ShellCommand): """ renderables = ['source', 'destination', 'files', 'exclude'] + return_flags = { + FAILURE: [8, 16], + WARNINGS: [2, 4], + SUCCESS: [0, 1] + } + def __init__(self, source, destination, files=None, recursive=False, @@ -64,11 +72,6 @@ class Robocopy(ShellCommand): self.move = move self.exclude = exclude self.verbose = verbose - kwargs['decodeRC'] = { - 0: SUCCESS, 1: SUCCESS, - 2: WARNINGS, 4: WARNINGS, - 8: FAILURE, 16: FAILURE - } ShellCommand.__init__(self, **kwargs) def start(self): @@ -89,3 +92,16 @@ class Robocopy(ShellCommand): command += ['/TEE', '/UNICODE', '/NP'] self.setCommand(command) ShellCommand.start(self) + + def evaluateCommand(self, cmd): + # If we have a "clean" return code, it's good. + # Otherwise, look for errors first, warnings second. + if cmd.rc == 0 or cmd.rc == 1: + return SUCCESS + for result in [FAILURE, WARNINGS]: + for flag in self.return_flags[result]: + if (cmd.rc & flag) == flag: + return result + + log.err(Failure(), "Unknown return code for Robocopy: %s" % cmd.rc) + return EXCEPTION
Fixed Robocopy step status Robocopy actually returns a combination of flags to indicate success/warning/failure status.
buildbot_buildbot
train
a4f4c42a15172dcf200c265f9a9d30bfee827620
diff --git a/cmd/influxd/config.go b/cmd/influxd/config.go index <HASH>..<HASH> 100644 --- a/cmd/influxd/config.go +++ b/cmd/influxd/config.go @@ -51,7 +51,7 @@ type Config struct { Initialization struct { JoinURLs string `toml:"join-urls"` - } + } `toml:"initialization"` Authentication struct { Enabled bool `toml:"enabled"`
Explicity name "initialization" section in config
influxdata_influxdb
train
70f2c40ebb25a194fdfa22cbc12880f72c29f881
diff --git a/core-bundle/src/Security/User/ContaoUserProvider.php b/core-bundle/src/Security/User/ContaoUserProvider.php index <HASH>..<HASH> 100644 --- a/core-bundle/src/Security/User/ContaoUserProvider.php +++ b/core-bundle/src/Security/User/ContaoUserProvider.php @@ -14,7 +14,6 @@ use Contao\BackendUser; use Contao\CoreBundle\ContaoCoreBundle; use Contao\CoreBundle\ContaoFramework; use Contao\FrontendUser; -use Symfony\Component\DependencyInjection\ContainerAware; use Symfony\Component\DependencyInjection\ContainerInterface; use Symfony\Component\Security\Core\User\UserProviderInterface; use Symfony\Component\Security\Core\User\UserInterface; @@ -26,7 +25,7 @@ use Symfony\Component\Security\Core\Exception\UnsupportedUserException; * * @author Andreas Schempp <https://github.com/aschempp> */ -class ContaoUserProvider extends ContainerAware implements UserProviderInterface +class ContaoUserProvider implements UserProviderInterface { /** * @var ContaoFramework
[Core] Security user provider is not container aware
contao_contao
train
f93e3d5f5fd93e179f36622dbf4a768bf4ba5c10
diff --git a/src/main/java/io/github/classgraph/MethodParameterInfo.java b/src/main/java/io/github/classgraph/MethodParameterInfo.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/github/classgraph/MethodParameterInfo.java +++ b/src/main/java/io/github/classgraph/MethodParameterInfo.java @@ -152,7 +152,9 @@ public class MethodParameterInfo { if (annotationInfo == null || annotationInfo.length == 0) { return AnnotationInfoList.EMPTY_LIST; } else { - return AnnotationInfoList.getIndirectAnnotations(new AnnotationInfoList(annotationInfo), /* annotatedClass = */ null); + final AnnotationInfoList annotationInfoList = new AnnotationInfoList(annotationInfo.length); + Collections.addAll(annotationInfoList, annotationInfo); + return AnnotationInfoList.getIndirectAnnotations(annotationInfoList, /* annotatedClass = */ null); } }
Fix last commit (oops -- that's what I get for doing this in the GitHub web interface)
classgraph_classgraph
train
a65b38b245831ff9de3ea816fc5be39302fa731c
diff --git a/bin/ipa-resign.js b/bin/ipa-resign.js index <HASH>..<HASH> 100755 --- a/bin/ipa-resign.js +++ b/bin/ipa-resign.js @@ -63,8 +63,6 @@ Example: process.exit(1); } console.log('IPA is now signed.'); - }).on('done', (msg) => { - console.log('Done!'); }).on('message', (msg) => { console.log(colors.msg(msg)); }).on('error', (msg) => { diff --git a/session.js b/session.js index <HASH>..<HASH> 100644 --- a/session.js +++ b/session.js @@ -99,7 +99,7 @@ module.exports = class ApplesignSession { /* Event Wrapper API with cb support */ emit (ev, msg, cb) { function isEnder (ev) { - return (ev === 'error' || ev === 'done'); + return (ev === 'error'); } if (isEnder(ev) && msg && typeof cb === 'function') { cb(msg); @@ -122,7 +122,6 @@ module.exports = class ApplesignSession { self.ipafyDirectory((error, res) => { if (error) { self.emit('error', error, cb); } self.cleanup((ignored_error) => { - self.emit('done', '', cb); cb(ignored_error, res); }); });
Do not emit 'done' for consistency
nowsecure_node-applesign
train
c4a28a20eabb42544db265f6c5ac48e0968b49ae
diff --git a/aioelasticsearch/pool.py b/aioelasticsearch/pool.py index <HASH>..<HASH> 100644 --- a/aioelasticsearch/pool.py +++ b/aioelasticsearch/pool.py @@ -3,7 +3,7 @@ import collections import logging import random -from elasticsearch.connection_pool import ConnectionSelector +from elasticsearch.connection_pool import RoundRobinSelector from .compat import create_future from .exceptions import ImproperlyConfigured @@ -11,19 +11,6 @@ from .exceptions import ImproperlyConfigured logger = logging.getLogger('elasticsearch') -class RoundRobinSelector(ConnectionSelector): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self._current = 0 - - def select(self, connections): - self._current += 1 - if self._current >= len(connections): - self._current = 0 - return connections[self._current] - - class AIOHttpConnectionPool: def __init__(
Drop custom round robin (#<I>) * Drop ensure_future * Code cleanup * Rip extra test
aio-libs_aioelasticsearch
train
81e942a4d2cff23fb991228a5257ad9d89f4be46
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ Unreleased ---------- - Extend date field range (trthhrtz) - Fix parsing of server errors in ClickHouse v19.3.3+ +- Fix pagination when asking for the last page on a query that matches no records v1.0.4 ------ diff --git a/src/infi/clickhouse_orm/database.py b/src/infi/clickhouse_orm/database.py index <HASH>..<HASH> 100644 --- a/src/infi/clickhouse_orm/database.py +++ b/src/infi/clickhouse_orm/database.py @@ -285,7 +285,7 @@ class Database(object): count = self.count(model_class, conditions) pages_total = int(ceil(count / float(page_size))) if page_num == -1: - page_num = pages_total + page_num = max(pages_total, 1) elif page_num < 1: raise ValueError('Invalid page number: %d' % page_num) offset = (page_num - 1) * page_size @@ -296,7 +296,7 @@ class Database(object): query += ' LIMIT %d, %d' % (offset, page_size) query = self._substitute(query, model_class) return Page( - objects=list(self.select(query, model_class, settings)), + objects=list(self.select(query, model_class, settings)) if count else [], number_of_objects=count, pages_total=pages_total, number=page_num, diff --git a/tests/test_database.py b/tests/test_database.py index <HASH>..<HASH> 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -112,6 +112,14 @@ class DatabaseTestCase(TestCaseWithData): self.assertEqual([obj.to_tsv() for obj in page_a.objects], [obj.to_tsv() for obj in page_b.objects]) + def test_pagination_empty_page(self): + for page_num in (-1, 1, 2): + page = self.database.paginate(Person, 'first_name, last_name', page_num, 10, conditions="first_name = 'Ziggy'") + self.assertEqual(page.number_of_objects, 0) + self.assertEqual(page.objects, []) + self.assertEqual(page.pages_total, 0) + self.assertEqual(page.number, max(page_num, 1)) + def test_pagination_invalid_page(self): self._insert_and_check(self._sample_data(), len(data)) for page_num in (0, -2, -100):
Fix pagination when asking for the last page on a query that matches no records
Infinidat_infi.clickhouse_orm
train
80615fc6e8e79bb6818b7ffdbbc828171bd8fea8
diff --git a/sbe-tool/src/main/java/uk/co/real_logic/sbe/generation/cpp/CppGenerator.java b/sbe-tool/src/main/java/uk/co/real_logic/sbe/generation/cpp/CppGenerator.java index <HASH>..<HASH> 100755 --- a/sbe-tool/src/main/java/uk/co/real_logic/sbe/generation/cpp/CppGenerator.java +++ b/sbe-tool/src/main/java/uk/co/real_logic/sbe/generation/cpp/CppGenerator.java @@ -222,6 +222,9 @@ public class CppGenerator implements CodeGenerator indent + " }\n", dimensionHeaderLength)); + final long minCount = numInGroupToken.encoding().applicableMinValue().longValue(); + final String minCheck = minCount > 0 ? "count < " + minCount + " || " : ""; + sb.append(String.format("\n" + indent + " inline void wrapForEncode(char *buffer, const %3$s count," + " std::uint64_t *pos, const std::uint64_t actingVersion, const std::uint64_t bufferLength)\n" + @@ -230,7 +233,7 @@ public class CppGenerator implements CodeGenerator indent + "#pragma GCC diagnostic push\n" + indent + "#pragma GCC diagnostic ignored \"-Wtype-limits\"\n" + indent + "#endif\n" + - indent + " if (count < %5$d || count > %6$d)\n" + + indent + " if (%5$scount > %6$d)\n" + indent + " {\n" + indent + " throw std::runtime_error(\"count outside of allowed range [E110]\");\n" + indent + " }\n" + @@ -250,7 +253,7 @@ public class CppGenerator implements CodeGenerator indent + " *m_positionPtr = *m_positionPtr + %4$d;\n" + indent + " }\n", cppTypeForBlockLength, blockLength, cppTypeForNumInGroup, dimensionHeaderLength, - numInGroupToken.encoding().applicableMinValue().longValue(), + minCheck, numInGroupToken.encoding().applicableMaxValue().longValue())); sb.append(String.format("\n" + @@ -301,7 +304,8 @@ public class CppGenerator implements CodeGenerator .append(indent).append(" {\n") .append(indent).append(" while (hasNext())\n") .append(indent).append(" {\n") - .append(indent).append(" next(); func(*this);\n") + .append(indent).append(" next();\n") + .append(indent).append(" func(*this);\n") .append(indent).append(" }\n") .append(indent).append(" }\n\n") .append(indent).append("#else\n") @@ -309,7 +313,8 @@ public class CppGenerator implements CodeGenerator .append(indent).append(" {\n") .append(indent).append(" while (hasNext())\n") .append(indent).append(" {\n") - .append(indent).append(" next(); func(*this);\n") + .append(indent).append(" next();\n") + .append(indent).append(" func(*this);\n") .append(indent).append(" }\n") .append(indent).append(" }\n\n") .append(indent).append("#endif\n\n");
[Java] Only generate lower bounds check on group wrap when min is greater than zero to avoid warning. Issue #<I>.
real-logic_simple-binary-encoding
train
3a39f76903f7ea65164768d5e68a4ff4b9b6e548
diff --git a/django_airavata/settings.py b/django_airavata/settings.py index <HASH>..<HASH> 100644 --- a/django_airavata/settings.py +++ b/django_airavata/settings.py @@ -251,7 +251,7 @@ AUTHENTICATION_OPTIONS = { # Seconds each connection in the pool is able to stay alive. If open connection # has lived longer than this period, it will be closed. # (https://github.com/Thriftpy/thrift_connector) -THRIFT_CLIENT_POOL_KEEPALIVE = 10 +THRIFT_CLIENT_POOL_KEEPALIVE = 5 # Webpack loader WEBPACK_LOADER = webpack_loader_util.create_webpack_loader_config() @@ -290,7 +290,7 @@ LOGGING = { 'level': 'DEBUG' if DEBUG else 'INFO' }, 'root': { - 'handlers': ['console', 'mail_admins'], + 'handlers': ['console'], 'level': 'WARNING' } }, diff --git a/django_airavata/utils.py b/django_airavata/utils.py index <HASH>..<HASH> 100644 --- a/django_airavata/utils.py +++ b/django_airavata/utils.py @@ -203,8 +203,8 @@ class CustomThriftClient(connection_pool.ThriftClient): def ping(self): try: self.client.getAPIVersion() - except Exception: - log.exception("getAPIVersion failed") + except Exception as e: + log.debug("getAPIVersion failed: {}".format(str(e))) raise
AIRAVATA-<I> Log getAPIVersion failures at debug level Trying to cut down on getAPIVersion error emails
apache_airavata-django-portal
train
02271f37111baa92592280b7e01ecf3dd1dd9f2f
diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py index <HASH>..<HASH> 100644 --- a/salt/modules/mysql.py +++ b/salt/modules/mysql.py @@ -208,19 +208,22 @@ def _connect(**kwargs): ''' connargs = dict() - def _connarg(name, key=None): + def _connarg(name, key=None, get_opts=True): ''' - Add key to connargs, only if name exists in our kwargs or as - mysql.<name> in __opts__ or __pillar__ Evaluate in said order - kwargs, - opts then pillar. To avoid collision with other functions, kwargs-based - connection arguments are prefixed with 'connection_' (i.e. - 'connection_host', 'connection_user', etc.). + Add key to connargs, only if name exists in our kwargs or, + if get_opts is true, as mysql.<name> in __opts__ or __pillar__ + + If get_opts is true, evaluate in said order - kwargs, opts + then pillar. To avoid collision with other functions, + kwargs-based connection arguments are prefixed with 'connection_' + (i.e. 'connection_host', 'connection_user', etc.). ''' if key is None: key = name + if name in kwargs: connargs[key] = kwargs[name] - else: + elif get_opts: prefix = 'connection_' if name.startswith(prefix): try: @@ -231,15 +234,22 @@ def _connect(**kwargs): if val is not None: connargs[key] = val - _connarg('connection_host', 'host') - _connarg('connection_user', 'user') - _connarg('connection_pass', 'passwd') - _connarg('connection_port', 'port') - _connarg('connection_db', 'db') - _connarg('connection_conv', 'conv') - _connarg('connection_unix_socket', 'unix_socket') - _connarg('connection_default_file', 'read_default_file') - _connarg('connection_default_group', 'read_default_group') + # If a default file is explicitly passed to kwargs, don't grab the + # opts/pillar settings, as it can override info in the defaults file + if 'connection_default_file' in kwargs: + get_opts = False + else: + get_opts = True + + _connarg('connection_host', 'host', get_opts) + _connarg('connection_user', 'user', get_opts) + _connarg('connection_pass', 'passwd', get_opts) + _connarg('connection_port', 'port', get_opts) + _connarg('connection_db', 'db', get_opts) + _connarg('connection_conv', 'conv', get_opts) + _connarg('connection_unix_socket', 'unix_socket', get_opts) + _connarg('connection_default_file', 'read_default_file', get_opts) + _connarg('connection_default_group', 'read_default_group', get_opts) # MySQLdb states that this is required for charset usage # but in fact it's more than it's internally activated # when charset is used, activating use_unicode here would
Don't grab opts/pillar values when there is an explicit default file Simple fix for bug #<I>. If a defaults file is *explicitly* passed in at the same time that a mysql.user (etc) exists in opts/pillar, then the opts/pillar user will override the default file instead of the other way around. This change would fix this, but could in theory break things for older version. The ideal solution would be to actually parse the defaults file, but that seems way more error prone.
saltstack_salt
train
0835807b4e21c75e55fd14cf6cdc3db1c0f7b84d
diff --git a/utils/seed-classes.js b/utils/seed-classes.js index <HASH>..<HASH> 100644 --- a/utils/seed-classes.js +++ b/utils/seed-classes.js @@ -1,4 +1,4 @@ -import appendStatus from './append-status'; +import appendStatus from './append-status' export default function(base, statuses = {}, classes = {}) { if (classes[base]) base = classes[base]; @@ -24,7 +24,7 @@ export default function(base, statuses = {}, classes = {}) { function addStatuses(sx, arr = []) { for (var s in sx) { - arr.push('-' + appendStatus(sx[s], s)); + arr.push('-' + appendStatus(sx[s], s.replace('@', sx[s]))); } return arr; }
replace @ with value of prop This allows us to pass a prop value as a status and have it return only one class
ui-kit_ui-kit
train
88a3d7793cceb135d0bf9be896ca9806e6b0916a
diff --git a/closure/goog/fx/draglistgroup.js b/closure/goog/fx/draglistgroup.js index <HASH>..<HASH> 100644 --- a/closure/goog/fx/draglistgroup.js +++ b/closure/goog/fx/draglistgroup.js @@ -195,6 +195,15 @@ goog.fx.DragListGroup.prototype.currDragItem_; /** + * The drag list that {@code this.currDragItem_} is currently hovering over, or + * null if it is not hovering over a list. + * @type {Element} + * @private + */ +goog.fx.DragListGroup.prototype.currHoverList_; + + +/** * The original drag list that the current drag item came from. We need to * remember this in case the user drops the item outside of any lists, in which * case we return the item to its original location. @@ -429,6 +438,29 @@ goog.fx.DragListGroup.prototype.disposeInternal = function() { /** + * Caches the heights of each drag list and drag item, except for the current + * drag item. + * + * @param {Element} currDragItem The item currently being dragged. + * @private + */ +goog.fx.DragListGroup.prototype.recacheListAndItemBounds_ = function( + currDragItem) { + for (var i = 0, n = this.dragLists_.length; i < n; i++) { + var dragList = this.dragLists_[i]; + dragList.dlgBounds_ = goog.style.getBounds(dragList); + } + + for (var i = 0, n = this.dragItems_.length; i < n; i++) { + var dragItem = this.dragItems_[i]; + if (dragItem != currDragItem) { + dragItem.dlgBounds_ = goog.style.getBounds(dragItem); + } + } +}; + + +/** * Handles the start of a drag action (i.e. MOUSEDOWN on any drag item). * * @param {goog.events.BrowserEvent} e Event object fired on a drag item handle. @@ -459,6 +491,7 @@ goog.fx.DragListGroup.prototype.handleDragStart_ = function(e) { this.origList_ = /** @type {Element} */ (currDragItem.parentNode); this.origNextItem_ = goog.dom.getNextElementSibling(currDragItem); this.currHoverItem_ = this.origNextItem_; + this.currHoverList_ = this.origList_; // Create a clone for dragging. var draggerEl = this.cloneNode_(currDragItem); @@ -506,16 +539,7 @@ goog.fx.DragListGroup.prototype.handleDragStart_ = function(e) { if (this.updateWhileDragging_) { currDragItem.style.display = 'none'; } - for (var i = 0, n = this.dragLists_.length; i < n; i++) { - var dragList = this.dragLists_[i]; - dragList.dlgBounds_ = goog.style.getBounds(dragList); - } - for (var i = 0, n = this.dragItems_.length; i < n; i++) { - var dragItem = this.dragItems_[i]; - if (dragItem != currDragItem) { - dragItem.dlgBounds_ = goog.style.getBounds(dragItem); - } - } + this.recacheListAndItemBounds_(currDragItem); currDragItem.style.display = ''; // Create the dragger object. @@ -576,7 +600,6 @@ goog.fx.DragListGroup.prototype.handleDragMove_ = function(dragEvent) { // the dragged item, but update the hovered item instead. this.updateCurrHoverItem(hoverNextItem, draggerElCenter); } - this.currDragItem_.style.display = ''; // Add drag list's hover class (if any). if (hoverList.dlgDragHoverClass_) { @@ -589,6 +612,7 @@ goog.fx.DragListGroup.prototype.handleDragMove_ = function(dragEvent) { if (!this.isCurrDragItemAlwaysDisplayed_) { this.currDragItem_.style.display = 'none'; } + // Remove hover classes (if any) from all drag lists. for (var i = 0, n = this.dragLists_.length; i < n; i++) { var dragList = this.dragLists_[i]; @@ -598,6 +622,13 @@ goog.fx.DragListGroup.prototype.handleDragMove_ = function(dragEvent) { } } + // If the current hover list is different than the last, the lists may have + // shrunk, so we should recache the bounds. + if (hoverList != this.currHoverList_) { + this.currHoverList_ = hoverList; + this.recacheListAndItemBounds_(this.currDragItem_); + } + this.dispatchEvent( new goog.fx.DragListGroupEvent( goog.fx.DragListGroup.EventType.DRAGMOVE, this, dragEvent, @@ -670,6 +701,7 @@ goog.fx.DragListGroup.prototype.handleDragEnd_ = function(dragEvent) { // Clear all our temporary fields that are only defined while dragging. this.currDragItem_ = null; + this.currHoverList_ = null; this.origList_ = null; this.origNextItem_ = null; this.draggerEl_ = null;
Fixes wonky behavior in DragListGroup due to heights being cached, but not updated correctly. R=kai DELTA=<I> (<I> added, <I> deleted, 1 changed) Revision created by MOE tool push_codebase. MOE_MIGRATION=<I> git-svn-id: <URL>
google_closure-library
train
249d917ff5362cd427691c90a315781b5b249930
diff --git a/configcatclient/lazyloadingcachepolicy.py b/configcatclient/lazyloadingcachepolicy.py index <HASH>..<HASH> 100644 --- a/configcatclient/lazyloadingcachepolicy.py +++ b/configcatclient/lazyloadingcachepolicy.py @@ -32,7 +32,15 @@ class LazyLoadingCachePolicy(CachePolicy): finally: self._lock.release_read() - self.force_refresh() + try: + self._lock.acquire_write() + # If while waiting to acquire the write lock another + # thread has updated the content, then don't bother requesting + # to the server to minimise time. + if self._last_updated is None or self._last_updated + self._cache_time_to_live <= datetime.datetime.utcnow(): + self.force_refresh() + finally: + self._lock.release_write() try: self._lock.acquire_read() @@ -43,12 +51,6 @@ class LazyLoadingCachePolicy(CachePolicy): def force_refresh(self): try: - self._lock.acquire_write() - # If while waiting to acquire the write lock another - # thread has updated the content, then don't bother requesting - # to the server to minimise time. - if self._last_updated is not None and self._last_updated + self._cache_time_to_live >= datetime.datetime.utcnow(): - return configuration_response = self._config_fetcher.get_configuration_json() # set _last_updated regardless of whether the cache is updated # or whether a 304 not modified has been sent back as the content @@ -63,8 +65,6 @@ class LazyLoadingCachePolicy(CachePolicy): ' Received unexpected response: %s' % str(e.response)) except: log.exception(sys.exc_info()[0]) - finally: - self._lock.release_write() def stop(self): diff --git a/configcatclienttests/test_lazyloadingcachepolicy.py b/configcatclienttests/test_lazyloadingcachepolicy.py index <HASH>..<HASH> 100644 --- a/configcatclienttests/test_lazyloadingcachepolicy.py +++ b/configcatclienttests/test_lazyloadingcachepolicy.py @@ -91,12 +91,12 @@ class LazyLoadingCachePolicyTests(unittest.TestCase): # this indicates that is_fetched() was correctly called and # the setting of the new last updated didn't occur self.assertEqual(not_modified_fetch_response.json.call_count, 0) - self.assertEqual(mock_datetime.datetime.utcnow.call_count, 4) + self.assertEqual(mock_datetime.datetime.utcnow.call_count, 3) # last updated should still be set in the case of a 304 self.assertEqual(cache_policy._last_updated, new_time) cache_policy.stop() - def test_force_refresh_skips_hitting_api_after_update(self): + def test_get_skips_hitting_api_after_update_from_different_thread(self): config_fetcher = mock.MagicMock() successful_fetch_response = mock.MagicMock() successful_fetch_response.is_fetched.return_value = True @@ -110,16 +110,16 @@ class LazyLoadingCachePolicyTests(unittest.TestCase): now = datetime.datetime(2020, 5, 20, 0, 0, 0) mock_datetime.datetime.utcnow.return_value = now self.assertIsNone(cache_policy._last_updated) - cache_policy.force_refresh() + cache_policy.get() self.assertEqual(config_fetcher.get_configuration_json.call_count, 1) # when the cache timeout is still within the limit skip any network # requests, as this could be that multiple threads have attempted # to acquire the lock at the same time, but only really one needs to update cache_policy._last_updated = now - datetime.timedelta(seconds=159) - cache_policy.force_refresh() + cache_policy.get() self.assertEqual(config_fetcher.get_configuration_json.call_count, 1) cache_policy._last_updated = now - datetime.timedelta(seconds=161) - cache_policy.force_refresh() + cache_policy.get() self.assertEqual(config_fetcher.get_configuration_json.call_count, 2) def test_http_error(self):
move check for last updated outside of force_refresh function As a way to keep force_refresh always updating the cache when called, move the check for last update outside of the force_refresh function. This its aimed to minimize the total amount of time a number of threads that would hit force_refresh if they were all called at the same time.
configcat_python-sdk
train
771765aed8ab9a2365aa5ca89d9f83b69faff4ad
diff --git a/lib/express_templates/components/forms/express_form.rb b/lib/express_templates/components/forms/express_form.rb index <HASH>..<HASH> 100644 --- a/lib/express_templates/components/forms/express_form.rb +++ b/lib/express_templates/components/forms/express_form.rb @@ -52,6 +52,10 @@ module ExpressTemplates (@config[:resource_name] || @config[:id]).to_s end + def namespace + @config[:namespace] + end + private diff --git a/test/components/forms/express_form_test.rb b/test/components/forms/express_form_test.rb index <HASH>..<HASH> 100644 --- a/test/components/forms/express_form_test.rb +++ b/test/components/forms/express_form_test.rb @@ -99,22 +99,10 @@ class ExpressFormTest < ActiveSupport::TestCase assert_equal 'foo', expanded_nodes.first.resource_name end - -# test "simplest form compiled source is legible " do -# @example_compiled = -> { -# "<form action=\"/resources/#{@resource.id}\" method=\"post\"> -# <div style=\"display:none\"> -# "+%Q(#{utf8_enforcer_tag})+%Q(#{method_tag(:post)})+%Q(#{token_tag})+" -# </div> -# <div class=\"form-group widget-buttons\"> -# "+%Q(#{submit_tag("Save it!", class: "submit primary")})+"</div> -# </form> -# " -# }.source_body -# ExpressTemplates::Markup::Tag.formatted do -# ctx, fragment = simplest_form(resource) -# assert_equal @example_compiled, ExpressTemplates.compile(&fragment) -# end -# end + test "express_form has a namespace option with nil default" do + form = ExpressTemplates::Components::Forms::ExpressForm + assert_nil form.new(:person).namespace + assert_equal 'express_engine', form.new(:person, namespace: 'express_engine').namespace + end end \ No newline at end of file
Add namespace attribute/option to express_form
aelogica_express_templates
train
6c21fc3c7a9f0a479289fb37c3898861830b32a9
diff --git a/test/api.js b/test/api.js index <HASH>..<HASH> 100644 --- a/test/api.js +++ b/test/api.js @@ -3,7 +3,7 @@ var CSSselect = require(".."), bools = require("boolbase"), assert = require("assert"); -var dom = makeDom("<div id=foo><p></p></div>")[0]; +var dom = makeDom("<div id=foo><p>foo</p></div>")[0]; describe("API", function() { describe("removes duplicates", function() { @@ -60,4 +60,16 @@ describe("API", function() { assert.equal(func, bools.trueFunc); }); }); + + describe("should have a functional parent selector (<)", function() { + it("should select the right element", function() { + var matches = CSSselect.selectAll("p < div", [dom]); + assert.equal(matches.length, 1); + assert.equal(matches[0], dom); + }); + it("should not select nodes without children", function() { + var matches = CSSselect.selectAll("p < div", [dom]); + assert.deepEqual(matches, CSSselect.selectAll("* < *", [dom])); + }); + }); });
test: added test cases for the parent selector * < *
fb55_css-select
train
ff4c75c72e5ef1699b90faaa23d83a29de2dfb73
diff --git a/lib/fetcheable_on_api.rb b/lib/fetcheable_on_api.rb index <HASH>..<HASH> 100644 --- a/lib/fetcheable_on_api.rb +++ b/lib/fetcheable_on_api.rb @@ -53,6 +53,11 @@ module FetcheableOnApi apply_pagination(collection) end + + def valid_parameters?(parameters) + parameters.is_a?(ActionController::Parameters) || + parameters.is_a?(Hash) + end end ActiveSupport.on_load :action_controller do diff --git a/lib/fetcheable_on_api/filtreable.rb b/lib/fetcheable_on_api/filtreable.rb index <HASH>..<HASH> 100644 --- a/lib/fetcheable_on_api/filtreable.rb +++ b/lib/fetcheable_on_api/filtreable.rb @@ -45,6 +45,9 @@ module FetcheableOnApi protected def apply_filters(collection) + return collection unless valid_parameters?(params) + return collection unless valid_parameters?(params[:filter]) + return collection if params[:filter].blank? filter_params = params.require(:filter) diff --git a/lib/fetcheable_on_api/pagineable.rb b/lib/fetcheable_on_api/pagineable.rb index <HASH>..<HASH> 100644 --- a/lib/fetcheable_on_api/pagineable.rb +++ b/lib/fetcheable_on_api/pagineable.rb @@ -39,10 +39,5 @@ module FetcheableOnApi collection.limit(limit).offset(offset) end - - def valid_parameters?(parameters) - parameters.is_a?(ActionController::Parameters) || - parameters.is_a?(Hash) - end end end diff --git a/lib/fetcheable_on_api/sortable.rb b/lib/fetcheable_on_api/sortable.rb index <HASH>..<HASH> 100644 --- a/lib/fetcheable_on_api/sortable.rb +++ b/lib/fetcheable_on_api/sortable.rb @@ -48,6 +48,9 @@ module FetcheableOnApi protected def apply_sort(collection) + return collection unless valid_parameters?(params) + return collection unless valid_parameters?(params[:sort]) + return collection if params[:sort].blank? ordering = {}
Add check params type for sort and fetch
fabienpiette_fetcheable_on_api
train
e8254194cd199c3039a61a58c65039eadecfb562
diff --git a/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/TextToSpeech.java b/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/TextToSpeech.java index <HASH>..<HASH> 100644 --- a/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/TextToSpeech.java +++ b/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/TextToSpeech.java @@ -1071,7 +1071,7 @@ public class TextToSpeech extends BaseService { multipartBuilder.addFormDataPart("metadata", addCustomPromptOptions.metadata().toString()); okhttp3.RequestBody fileBody = RequestUtils.inputStreamBody(addCustomPromptOptions.file(), "audio/wav"); - multipartBuilder.addFormDataPart("file", addCustomPromptOptions.filename(), fileBody); + multipartBuilder.addFormDataPart("file", "filename", fileBody); builder.body(multipartBuilder.build()); ResponseConverter<Prompt> responseConverter = ResponseConverterUtils.getValue( diff --git a/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/model/AddCustomPromptOptions.java b/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/model/AddCustomPromptOptions.java index <HASH>..<HASH> 100644 --- a/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/model/AddCustomPromptOptions.java +++ b/text-to-speech/src/main/java/com/ibm/watson/text_to_speech/v1/model/AddCustomPromptOptions.java @@ -25,7 +25,6 @@ public class AddCustomPromptOptions extends GenericModel { protected String promptId; protected PromptMetadata metadata; protected InputStream file; - protected String filename; /** Builder. */ public static class Builder { @@ -33,14 +32,12 @@ public class AddCustomPromptOptions extends GenericModel { private String promptId; private PromptMetadata metadata; private InputStream file; - private String filename; private Builder(AddCustomPromptOptions addCustomPromptOptions) { this.customizationId = addCustomPromptOptions.customizationId; this.promptId = addCustomPromptOptions.promptId; this.metadata = addCustomPromptOptions.metadata; this.file = addCustomPromptOptions.file; - this.filename = addCustomPromptOptions.filename; } /** Instantiates a new builder. */ @@ -53,19 +50,13 @@ public class AddCustomPromptOptions extends GenericModel { * @param promptId the promptId * @param metadata the metadata * @param file the file - * @param filename the filename */ public Builder( - String customizationId, - String promptId, - PromptMetadata metadata, - InputStream file, - String filename) { + String customizationId, String promptId, PromptMetadata metadata, InputStream file) { this.customizationId = customizationId; this.promptId = promptId; this.metadata = metadata; this.file = file; - this.filename = filename; } /** @@ -122,17 +113,6 @@ public class AddCustomPromptOptions extends GenericModel { } /** - * Set the filename. - * - * @param filename the filename - * @return the AddCustomPromptOptions builder - */ - public Builder filename(String filename) { - this.filename = filename; - return this; - } - - /** * Set the file. * * @param file the file @@ -141,7 +121,6 @@ public class AddCustomPromptOptions extends GenericModel { */ public Builder file(File file) throws FileNotFoundException { this.file = new FileInputStream(file); - this.filename = file.getName(); return this; } } @@ -152,12 +131,10 @@ public class AddCustomPromptOptions extends GenericModel { com.ibm.cloud.sdk.core.util.Validator.notEmpty(builder.promptId, "promptId cannot be empty"); com.ibm.cloud.sdk.core.util.Validator.notNull(builder.metadata, "metadata cannot be null"); com.ibm.cloud.sdk.core.util.Validator.notNull(builder.file, "file cannot be null"); - com.ibm.cloud.sdk.core.util.Validator.notNull(builder.filename, "filename cannot be null"); customizationId = builder.customizationId; promptId = builder.promptId; metadata = builder.metadata; file = builder.file; - filename = builder.filename; } /** @@ -227,15 +204,4 @@ public class AddCustomPromptOptions extends GenericModel { public InputStream file() { return file; } - - /** - * Gets the filename. - * - * <p>The filename for file. - * - * @return the filename - */ - public String filename() { - return filename; - } }
fix(text-to-speech-v1): generated using api def sdk-<I>-<I>-<I>-rerelease and gen <I>
watson-developer-cloud_java-sdk
train
d5073c598a38fee1f5ad2ede53fdfc836e8d127b
diff --git a/lib/mini_magick.rb b/lib/mini_magick.rb index <HASH>..<HASH> 100755 --- a/lib/mini_magick.rb +++ b/lib/mini_magick.rb @@ -224,19 +224,24 @@ module MiniMagick run_command("mogrify", "-quality", "100", "#{path}[0]") end + # Writes the temporary file out to either a file location (by passing in a String) or by + # passing in a Stream that you can #write(chunk) to repeatedly + # + # @param output_to [IOStream, String] Some kind of stream object that needs to be read or a file path as a String + # @return [IOStream, Boolean] If you pass in a file location [String] then you get a success boolean. If its a stream, you get it back. # Writes the temporary image that we are using for processing to the output path - def write(output) - if output.kind_of?(String) || !output.respond_to?(:write) - FileUtils.copy_file @path, output - run_command "identify", output # Verify that we have a good image + def write(output_to) + if output_to.kind_of?(String) || !output_to.respond_to?(:write) + FileUtils.copy_file @path, output_to + run_command "identify", output_to # Verify that we have a good image else # stream File.open(@path, "rb", ) do |f| f.binmode while chunk = f.read(8192) - output.write(chunk) + output_to.write(chunk) end end - output + output_to end end diff --git a/test/image_test.rb b/test/image_test.rb index <HASH>..<HASH> 100755 --- a/test/image_test.rb +++ b/test/image_test.rb @@ -1,6 +1,7 @@ require 'rubygems' require 'test/unit' require 'pathname' +require 'stringio' require File.expand_path('../../lib/mini_magick', __FILE__) #MiniMagick.processor = :gm
merging, fixing some bugs, and adding in some documentation
minimagick_minimagick
train
e3e8321075e22180dc057a98741c6661abb40d52
diff --git a/lib/yard/parser/ruby/ruby_parser.rb b/lib/yard/parser/ruby/ruby_parser.rb index <HASH>..<HASH> 100644 --- a/lib/yard/parser/ruby/ruby_parser.rb +++ b/lib/yard/parser/ruby/ruby_parser.rb @@ -202,8 +202,7 @@ module YARD begin; undef on_#{event}; rescue NameError; end def on_#{event}(tok) unless @last_ns_token == [:kw, "def"] || - (@tokens.last && @tokens.last[0] == :symbeg) || - (!@newline && %w(if while until unless).include?(tok)) + (@tokens.last && @tokens.last[0] == :symbeg) (@map[tok] ||= []) << [lineno, charno] end visit_ns_token(:#{event}, tok, true) @@ -442,6 +441,8 @@ module YARD module_eval(<<-eof, __FILE__, __LINE__ + 1) begin; undef on_#{kw}; rescue NameError; end def on_#{kw}(*args) + mapping = @map[#{kw.to_s.sub(/_mod$/, '').inspect}] + mapping.pop if mapping sr = args.last.source_range.first..args.first.source_range.last lr = args.last.line_range.first..args.first.line_range.last #{node_class}.new(:#{kw}, args, :line => lr, :char => sr) diff --git a/spec/parser/ruby/ruby_parser_spec.rb b/spec/parser/ruby/ruby_parser_spec.rb index <HASH>..<HASH> 100644 --- a/spec/parser/ruby/ruby_parser_spec.rb +++ b/spec/parser/ruby/ruby_parser_spec.rb @@ -529,5 +529,15 @@ eof eof expect(ast.jump(:class).docstring).to eq "comment 1\ncomment 2" end + + %w(if unless).each do |type| + let(:condition_type) { type } + let(:ast) { stmt '"#{' + type + ' condition?; 42; end}" ' + type + ' verbose?' } + let(:subject) { ast.jump(:string_embexpr)[0][0].source } + + it "returns correct source for interpolated non-ternary '#{type}' conditionals" do + is_expected.to eq "#{condition_type} condition?; 42; end" + end + end end end if HAVE_RIPPER
Fix handling of interpolated if/unless blocks Resolves "beginless range" error. Fixes #<I>, #<I>, #<I>, #<I>
lsegal_yard
train
64d9a584a3d30e36914f98016f431375fe4ebf41
diff --git a/src/article/shared/FigureComponent.js b/src/article/shared/FigureComponent.js index <HASH>..<HASH> 100644 --- a/src/article/shared/FigureComponent.js +++ b/src/article/shared/FigureComponent.js @@ -43,6 +43,8 @@ export default class FigureComponent extends Component { el.addClass(`sm-${contentModel.type}`) if (mode === PREVIEW_MODE) { + // TODO: We could return the PreviewComponent directly. + // However this yields an error we need to investigate. el.append( $$(PreviewComponent, { id: this.props.model.id, diff --git a/src/article/shared/ReferenceComponent.js b/src/article/shared/ReferenceComponent.js index <HASH>..<HASH> 100644 --- a/src/article/shared/ReferenceComponent.js +++ b/src/article/shared/ReferenceComponent.js @@ -5,28 +5,26 @@ import PreviewComponent from './PreviewComponent' export default class ReferenceComponent extends NodeComponent { render ($$) { const refNode = this.getNode() - let el = $$('div').addClass('sc-reference') let label = _getReferenceLabel(refNode) let html = this.context.api.renderEntity(refNode) // TODO: do we want to display something like this // if so, use the label provider html = html || '<i>Not available</i>' if (this.props.mode === PREVIEW_MODE) { - el.append( - $$(PreviewComponent, { - id: this.props.model.id, - selected: this.props.selected, - label: label, - description: $$('div').html(html) - }) - ) + // NOTE: We return PreviewComponent directly, to prevent inheriting styles from .sc-reference + return $$(PreviewComponent, { + id: this.props.model.id, + label: label, + description: $$('div').html(html) + }) } else { + let el = $$('div').addClass('sc-reference') el.append( $$('div').addClass('se-label').append(label), $$('div').addClass('se-text').html(html) ).attr('data-id', refNode.id) + return el } - return el } }
Prevent previews from inheriting top level styles.
substance_texture
train
d88f686a91a57e46db742e2340c18212a1f38ab4
diff --git a/proxy/grpcproxy/cache/store.go b/proxy/grpcproxy/cache/store.go index <HASH>..<HASH> 100644 --- a/proxy/grpcproxy/cache/store.go +++ b/proxy/grpcproxy/cache/store.go @@ -99,9 +99,12 @@ func (c *cache) Add(req *pb.RangeRequest, resp *pb.RangeResponse) { iv = c.cachedRanges.Find(ivl) if iv == nil { - c.cachedRanges.Insert(ivl, []string{key}) + val := map[string]struct{}{key: {}} + c.cachedRanges.Insert(ivl, val) } else { - iv.Val = append(iv.Val.([]string), key) + val := iv.Val.(map[string]struct{}) + val[key] = struct{}{} + iv.Val = val } } @@ -141,8 +144,8 @@ func (c *cache) Invalidate(key, endkey []byte) { ivs = c.cachedRanges.Stab(ivl) for _, iv := range ivs { - keys := iv.Val.([]string) - for _, key := range keys { + keys := iv.Val.(map[string]struct{}) + for key := range keys { c.lru.Remove(key) } }
grpcproxy: fix memory leak use set instead of slice as interval value fixes #<I>
etcd-io_etcd
train
2fa1cb126e6a1989c1629ff6355ae4ccabee1f8e
diff --git a/Consumer.js b/Consumer.js index <HASH>..<HASH> 100644 --- a/Consumer.js +++ b/Consumer.js @@ -21,7 +21,7 @@ module.exports = std.Class(Client, function(supr) { } this._onConnected = function(callback) { - callback() + if (callback) { callback() } this._intervalID = setInterval(std.bind(this, '_pollForMessages'), this._pollInterval) }
The callback may be null, if the connector did not care about when we get connected (might simply be listening for the "message" event emitting
marcuswestin_node-kafka
train
b0666e97f0f4e7f04badbed20bb67ea89c7e3d5e
diff --git a/fake.py b/fake.py index <HASH>..<HASH> 100644 --- a/fake.py +++ b/fake.py @@ -70,7 +70,8 @@ the run method in a subthread, with locking of critical regions. import os, sys, time, signal, pty, termios # fcntl, array, struct import threading, socket, select import gps -import packet as sniffer +# TODO: Need to fix this import! +#import packet as sniffer # The two magic numbers below have to be derived from observation. If # they're too high you'll slow the tests down a lot. If they're too low
Comment out and add TODO so I can at a python3 prompt successfully run: import fake
wadda_gps3
train
c505f235cec17e49bc2af741c4330059f5d13310
diff --git a/source/rafcon/gui/models/abstract_state.py b/source/rafcon/gui/models/abstract_state.py index <HASH>..<HASH> 100644 --- a/source/rafcon/gui/models/abstract_state.py +++ b/source/rafcon/gui/models/abstract_state.py @@ -395,7 +395,8 @@ class AbstractStateModel(MetaModel, Hashable): # TODO: Should be removed with next minor release if not os.path.exists(path_meta_data): - # print "use backup because {0} is not found".format(path_meta_data) + logger.debug("Because meta data was not found in {0} use backup option {1}" + "".format(path_meta_data, os.path.join(path, storage.FILE_NAME_META_DATA_OLD))) path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA_OLD) try: @@ -406,7 +407,7 @@ class AbstractStateModel(MetaModel, Hashable): # if os.path.exists(os.path.dirname(path)): # logger.debug("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) if not path.startswith(constants.RAFCON_TEMP_PATH_STORAGE) and not os.path.exists(os.path.dirname(path)): - logger.warning("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) + logger.debug("Because '{1}' meta data of {0} was not loaded properly.".format(self, e)) tmp_meta = {} # JSON returns a dict, which must be converted to a Vividict
change warning in abstract meta data load to debug logs
DLR-RM_RAFCON
train
c6e37b2a3a1ebd5a07d103bd22857d532ea45ea6
diff --git a/lib/jrubyfx/utils/common_converters.rb b/lib/jrubyfx/utils/common_converters.rb index <HASH>..<HASH> 100644 --- a/lib/jrubyfx/utils/common_converters.rb +++ b/lib/jrubyfx/utils/common_converters.rb @@ -140,6 +140,9 @@ module JRubyFX }, :color => lambda { |value| new_value = NAME_TO_COLORS[value.to_s.gsub(/_/, "")] + if !new_value && value.kind_of?(Symbol) + raise ArgumentError.new("No such color: #{value.to_s}") + end new_value ? new_value : value }, :rectangle2d => lambda { |value|
Sample of better error reporting for converters (#<I>)
jruby_jrubyfx
train
555800fe91334487e892209fb632c71dc75dff70
diff --git a/src/Graviton/ProxyBundle/Controller/ProxyController.php b/src/Graviton/ProxyBundle/Controller/ProxyController.php index <HASH>..<HASH> 100644 --- a/src/Graviton/ProxyBundle/Controller/ProxyController.php +++ b/src/Graviton/ProxyBundle/Controller/ProxyController.php @@ -131,7 +131,6 @@ class ProxyController $psrRequest = $this->diactorosFactory->createRequest($newRequest); $psrResponse = $this->proxy->forward($psrRequest)->to($this->getHostWithScheme($url)); - $response = $this->httpFoundationFactory->createResponse($psrResponse); $this->transformationHandler->transformResponse( $api['apiName'], @@ -220,6 +219,10 @@ class ProxyController /** * get host, scheme and port + * + * @param string $url the url + * + * @return string */ private function getHostWithScheme($url) { diff --git a/src/Graviton/ProxyBundle/Service/ApiDefinitionLoader.php b/src/Graviton/ProxyBundle/Service/ApiDefinitionLoader.php index <HASH>..<HASH> 100644 --- a/src/Graviton/ProxyBundle/Service/ApiDefinitionLoader.php +++ b/src/Graviton/ProxyBundle/Service/ApiDefinitionLoader.php @@ -68,11 +68,14 @@ class ApiDefinitionLoader /** * get the origin service definition * + * @param bool $forceReload Switch to force a new api definition object will be provided. + * * @return mixed the origin service definition (type depends on dispersal strategy) */ public function getOriginDefinition($forceReload = false) { $this->loadApiDefinition($forceReload); + return $this->definition->getOrigin(); } diff --git a/src/Graviton/ProxyBundle/Tests/Definition/Loader/DispersalStrategy/SwaggerStrategyTest.php b/src/Graviton/ProxyBundle/Tests/Definition/Loader/DispersalStrategy/SwaggerStrategyTest.php index <HASH>..<HASH> 100644 --- a/src/Graviton/ProxyBundle/Tests/Definition/Loader/DispersalStrategy/SwaggerStrategyTest.php +++ b/src/Graviton/ProxyBundle/Tests/Definition/Loader/DispersalStrategy/SwaggerStrategyTest.php @@ -23,11 +23,6 @@ class SwaggerStrategyTest extends \PHPUnit_Framework_TestCase private $sut; /** - * @var /stdClass - */ - private $swagger; - - /** * @inheritDoc * * @return void @@ -39,17 +34,6 @@ class SwaggerStrategyTest extends \PHPUnit_Framework_TestCase ->setMethods(['setDocument']) ->getMock(); $this->sut = new SwaggerStrategy($swaggerParserMock); - - - /*$this->swagger = new \stdClass(); - $this->swagger->swagger = "2.0"; - $this->swagger->paths = new \stdClass(); - $this->swagger->definitions = new \stdClass(); - $this->swagger->info = new \stdClass(); - $this->swagger->info->title = "test swagger"; - $this->swagger->info->version = "1.0.0"; - $this->swagger->basePath = "/api/prefix"; - $this->swagger->host = "testapi.local";*/ } /**
fix cs and scrutinizer bug
libgraviton_graviton
train
9c1cfc282cc411eb17f8553ff66a5a1cbdc909b0
diff --git a/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb b/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb index <HASH>..<HASH> 100644 --- a/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb +++ b/elasticsearch-api/lib/elasticsearch/api/actions/bulk.rb @@ -25,6 +25,8 @@ module Elasticsearch # or the conveniency "combined" format where data is passed along with the header # in a single item in a custom `:data` key. # + # @note The body argument is required and cannot be empty. + # # @example Perform three operations in a single request, passing actions and data as an array of hashes # # client.bulk body: [
[API] Note that a non-empty body argument is required for the bulk api
elastic_elasticsearch-ruby
train
20a5e7ae5089549509ce528204580f6373661b57
diff --git a/holoviews/plotting/plot.py b/holoviews/plotting/plot.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/plot.py +++ b/holoviews/plotting/plot.py @@ -8,7 +8,7 @@ from matplotlib import pyplot as plt from matplotlib import gridspec, animation import param -from ..core import UniformNdMapping, ViewableElement, HoloMap, \ +from ..core import ViewableElement, HoloMap, \ AdjointLayout, NdLayout, AxisLayout, LayoutTree, Element, CompositeOverlay from ..core.options import Store, Compositor from ..core import traversal
Removed unused import from plot.py
pyviz_holoviews
train
6eb25f256a86b93dcb4e4944eff286fee6866a31
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -2,7 +2,6 @@ let Botkit = require('botkit'); let _ = require('lodash'); -let express = require('express'); module.exports = (config) => { _.defaults(config, {debug: false, plugins: []}); @@ -20,19 +19,15 @@ module.exports = (config) => { } let controller = Botkit.slackbot(slackbotConfig); - let server; addHelpListeners(controller, config.plugins); - - if (config.port) { - server = startServer(config, controller); - } + startServer(config, controller); let bot = controller.spawn({ token: config.slackToken }); - startRtm(config, controller, bot, server); + startRtm(config, controller, bot); // restart if disconnected controller.on('rtm_close', (bot) => { @@ -54,11 +49,10 @@ module.exports = (config) => { * @param controller */ function startServer(config, controller) { - let expressApp = express(); - - expressApp.listen(config.port); - controller.log('listening on port ' + config.port); - return expressApp; + // TODO port will be required for multi-team auth + if (config.port) { + controller.setupWebserver(config.port); + } } /** @@ -110,7 +104,7 @@ function registerHelpListener(controller, helpInfo) { }); } -function startRtm(config, controller, bot, server) { +function startRtm(config, controller, bot) { bot.startRTM((err, connectedBot) => { if (err) { logError(controller, err, 'Error connecting to RTM'); @@ -118,7 +112,7 @@ function startRtm(config, controller, bot, server) { } _.forEach(config.plugins, (plugin) => { - plugin.init(controller, connectedBot, server); + plugin.init(controller, connectedBot, controller.webserver); }); }); } diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,6 @@ "homepage": "https://github.com/colestrode/skellington#readme", "dependencies": { "botkit": "0.2.2", - "express": "^4.13.4", "lodash": "^4.6.1" }, "devDependencies": { diff --git a/test/index.spec.js b/test/index.spec.js index <HASH>..<HASH> 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -12,7 +12,6 @@ describe('Skellington', function() { let skellington; let botkitMock; let controllerMock; - let expressMock; let expressAppMock; let botMock; let connectedBotMock; @@ -30,28 +29,26 @@ describe('Skellington', function() { } }; + expressAppMock = {}; + controllerMock = { hears: sinon.stub(), spawn: sinon.stub().returns(botMock), log: sinon.stub(), - on: sinon.stub() + on: sinon.stub(), + setupWebserver: sinon.stub(), + webserver: expressAppMock }; botkitMock = { slackbot: sinon.stub().returns(controllerMock) }; - expressAppMock = { - listen: sinon.stub() - }; - expressMock = sinon.stub().returns(expressAppMock); - exitOrig = process.exit; process.exit = sinon.stub(); skellington = proxyquire('../index', { - 'botkit': botkitMock, - 'express': expressMock + 'botkit': botkitMock }); }); @@ -65,9 +62,9 @@ describe('Skellington', function() { skellington({slackToken: 'abc123'}); expect(botkitMock.slackbot).to.have.been.calledWith({debug: false}); - expect(expressMock).not.to.have.been.called; expect(botMock.startRTM).to.be.called; expect(controllerMock.on).to.be.calledWithMatch('rtm_close'); + expect(controllerMock.setupWebserver).not.to.be.called; }); it('should allow passed in configs', function() { @@ -82,7 +79,7 @@ describe('Skellington', function() { expect(botkitMock.slackbot).to.have.been.calledWith({debug: true, storage: storageMock}); expect(controllerMock.spawn).to.have.been.calledWith({token: 'abc123'}); - expect(expressAppMock.listen).to.have.been.calledWith(1234); + expect(controllerMock.setupWebserver).to.be.calledWith(1234); }); }); @@ -138,15 +135,7 @@ describe('Skellington', function() { expect(plugin.init).to.have.been.calledWith(controllerMock, connectedBotMock, expressAppMock); expect(anotherExternalBot.init).to.have.been.calledWith(controllerMock, connectedBotMock, expressAppMock); }); - - it('should not pass an express app if port is not set', function() { - skellington({plugins: [plugin]}); - botMock.startRTM.args[0][0](null, connectedBotMock); - - expect(plugin.init).to.have.been.calledWith(controllerMock, connectedBotMock, undefined); - }); }); - }); describe('register help listeners', function() {
uses built in botkit webserver (#<I>)
Skellington-Closet_skellington
train
abb791c07cb1c20bbd8199aeccc86cc01cb401c5
diff --git a/doc/source/conf.py b/doc/source/conf.py index <HASH>..<HASH> 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -32,6 +32,7 @@ with warnings.catch_warnings() : extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', + 'sphinx.ext.inheritance_diagram', 'sphinx.ext.intersphinx', 'sphinx.ext.pngmath']
Added sphinx extension for inheritance diagram
spacetelescope_pysynphot
train
0b6bf28dcd3686a907d04e73a090e17204f96141
diff --git a/src/main/java/org/mariadb/jdbc/internal/queryresults/resultset/MariaSelectResultSet.java b/src/main/java/org/mariadb/jdbc/internal/queryresults/resultset/MariaSelectResultSet.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/mariadb/jdbc/internal/queryresults/resultset/MariaSelectResultSet.java +++ b/src/main/java/org/mariadb/jdbc/internal/queryresults/resultset/MariaSelectResultSet.java @@ -198,7 +198,6 @@ public class MariaSelectResultSet implements ResultSet { int resultSetScrollType) { this.statement = null; this.isClosed = false; - this.protocol = protocol; if (protocol != null) { this.options = protocol.getOptions(); this.cal = protocol.getCalendar(); @@ -210,10 +209,11 @@ public class MariaSelectResultSet implements ResultSet { this.dataTypeMappingFlags = 3; this.returnTableAlias = false; } + this.protocol = null; this.columnsInformation = columnInformation; this.columnNameMap = new ColumnNameMap(columnsInformation); this.columnInformationLength = columnInformation.length; - this.isEof = false; + this.isEof = true; this.isBinaryEncoded = false; this.fetchSize = 1; this.resultSetScrollType = resultSetScrollType; diff --git a/src/test/java/org/mariadb/jdbc/ResultSetTest.java b/src/test/java/org/mariadb/jdbc/ResultSetTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/mariadb/jdbc/ResultSetTest.java +++ b/src/test/java/org/mariadb/jdbc/ResultSetTest.java @@ -276,4 +276,18 @@ public class ResultSetTest extends BaseTest { + ", 'row" + i + "')"); } } + + /** + * CONJ-403: NPE in getGenerated keys. + * + * @throws SQLException if error occur + */ + @Test + public void generatedKeyNpe() throws SQLException { + createTable("generatedKeyNpe", "id int not null primary key auto_increment, val int"); + Statement statement = sharedConnection.createStatement(); + statement.execute("INSERT INTO generatedKeyNpe(val) values (0)"); + ResultSet rs = statement.getGeneratedKeys(); + rs.close(); + } }
[CONJ-<I>] Clear Calendar instance before using it - part 2 +add tests
MariaDB_mariadb-connector-j
train
661daac473801f6774e08eca385dc0d7c7c0dda6
diff --git a/lib/couchrest/mixins/design_doc.rb b/lib/couchrest/mixins/design_doc.rb index <HASH>..<HASH> 100644 --- a/lib/couchrest/mixins/design_doc.rb +++ b/lib/couchrest/mixins/design_doc.rb @@ -6,9 +6,6 @@ module CouchRest def self.included(base) base.extend(ClassMethods) - base.send(:extlib_inheritable_accessor, :design_doc) - base.send(:extlib_inheritable_accessor, :design_doc_slug_cache) - base.send(:extlib_inheritable_accessor, :design_doc_fresh) end module ClassMethods diff --git a/lib/couchrest/mixins/properties.rb b/lib/couchrest/mixins/properties.rb index <HASH>..<HASH> 100644 --- a/lib/couchrest/mixins/properties.rb +++ b/lib/couchrest/mixins/properties.rb @@ -9,7 +9,7 @@ module CouchRest def self.included(base) base.class_eval <<-EOS, __FILE__, __LINE__ - extlib_inheritable_accessor(:properties) + extlib_inheritable_accessor(:properties) unless self.respond_to?(:properties) self.properties ||= [] EOS base.extend(ClassMethods)
fixed an inheritance problem with ExtendedDocument subclasses
couchrest_couchrest
train
1d3696b81b239bb41d5e44896b49a187813082fe
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup( long_description=readme, url='https://github.com/tell-k/goolabs', author='tell-k', - author_email='ffk2005 at gmail.com', + author_email='ffk2005@gmail.com', classifiers=classifiers, keywords=['goolabs', 'web', 'api', 'client'], install_requires=requires,
Update author_email in setup.py
tell-k_goolabs
train
74c6c86b3eecc7b89afc19456c207ccf709a69cb
diff --git a/lib/capybara/driver/rack_test_driver.rb b/lib/capybara/driver/rack_test_driver.rb index <HASH>..<HASH> 100644 --- a/lib/capybara/driver/rack_test_driver.rb +++ b/lib/capybara/driver/rack_test_driver.rb @@ -244,6 +244,13 @@ class Capybara::Driver::RackTest < Capybara::Driver::Base def put(*args, &block); reset_cache; super; end def delete(*args, &block); reset_cache; super; end + def follow_redirects! + 5.times do + follow_redirect! if response.redirect? + end + raise Capybara::InfiniteRedirectError, "infinite redirect detected!" if response.redirect? + end + private def reset_cache @@ -259,16 +266,6 @@ private request.path rescue "" end - def follow_redirects! - 5.times do - follow_redirect! if response.redirect? - end - - if response.redirect? - raise Capybara::InfiniteRedirectError, "infinite redirect detected!" - end - end - def env env = {} begin
Make follow_redirects! public
teamcapybara_capybara
train
79b598d7827d3b2a6ebe084e4b3f016e72da7b86
diff --git a/lib/puppet-lint/lexer.rb b/lib/puppet-lint/lexer.rb index <HASH>..<HASH> 100644 --- a/lib/puppet-lint/lexer.rb +++ b/lib/puppet-lint/lexer.rb @@ -204,9 +204,13 @@ class PuppetLint end def new_token(type, value, chunk) - lines = chunk.split("\n") - line_no = lines.empty? ? 1 : lines.count - column = lines.empty? ? 1 : lines.last.length + line_no = chunk.count("\n") + 1 + if line_no == 1 + column = chunk.length + else + column = chunk.length - chunk.rindex("\n") - 1 + end + column += 1 if column == 0 token = Token.new(type, value, line_no, column) unless tokens.last.nil?
Faster way to calculate line & column
rodjek_puppet-lint
train
f310cfa96762c37e37cdce81efb2f5a487b6aea0
diff --git a/go_agent/src/bosh/agent/action/apply.go b/go_agent/src/bosh/agent/action/apply.go index <HASH>..<HASH> 100644 --- a/go_agent/src/bosh/agent/action/apply.go +++ b/go_agent/src/bosh/agent/action/apply.go @@ -18,11 +18,9 @@ func newApply(fs boshsys.FileSystem) (apply applyAction) { } func (a applyAction) Run(payloadBytes []byte) (value interface{}, err error) { - type payloadType struct { + var payload struct { Arguments []interface{} } - - var payload payloadType err = json.Unmarshal(payloadBytes, &payload) if err != nil { return
anonymize payload struct on apply action this is idiomatic when the type is only used to facilitate unmarshaling.
cloudfoundry_bosh
train
99e62d67447361300510304ddef27c4466c6873f
diff --git a/vfsgen_test.go b/vfsgen_test.go index <HASH>..<HASH> 100644 --- a/vfsgen_test.go +++ b/vfsgen_test.go @@ -13,7 +13,8 @@ import ( //go:generate go run vfsgen_test_generate.go -func Example() { +// Basic functionality test. +func ExampleBasic() { var fs http.FileSystem = assets walkFn := func(path string, fi os.FileInfo, err error) error {
Hide Example test from godoc. It's an internal package test (implemented as an example), not an actual example of usage, so it's better to hide it to avoid confusion.
shurcooL_vfsgen
train
5dea7eff0cb86cd57e7d29f7e0d45af1c308ee38
diff --git a/tensorboard/compat/__init__.py b/tensorboard/compat/__init__.py index <HASH>..<HASH> 100644 --- a/tensorboard/compat/__init__.py +++ b/tensorboard/compat/__init__.py @@ -63,7 +63,6 @@ def import_tf_v2(): if USING_TF: # Check if this is TF 2.0 by looking for a known 2.0-only tf.summary symbol. # TODO(nickfelt): determine a cleaner way to do this. - # DO NOT SUBMIT - replicate to GitHub if hasattr(tf, 'summary') and hasattr(tf.summary, 'write'): return tf else:
remove an accidentally submitted DO NOT SUBMIT (#<I>)
tensorflow_tensorboard
train
da228f0fa8d0ab6aeb2fd7db2e753447b94eeafa
diff --git a/changelog.markdown b/changelog.markdown index <HASH>..<HASH> 100644 --- a/changelog.markdown +++ b/changelog.markdown @@ -1,5 +1,8 @@ # Changelog ## 3.5 +### 3.5.2 +Persist whitelist across Config instances + ### 3.5.1 Correctly set version diff --git a/lib/sauce/config.rb b/lib/sauce/config.rb index <HASH>..<HASH> 100644 --- a/lib/sauce/config.rb +++ b/lib/sauce/config.rb @@ -100,7 +100,10 @@ module Sauce @undefaulted_opts.merge! load_options_from_yaml @undefaulted_opts.merge! load_options_from_environment @undefaulted_opts.merge! load_options_from_heroku unless ENV["SAUCE_DISABLE_HEROKU_CONFIG"] - @undefaulted_opts.merge! Sauce.get_config.opts rescue {} + + global_config = Sauce.get_config + @undefaulted_opts.merge! global_config.opts if global_config.opts + @whitelisted_capabilities = global_config.whitelisted_capabilities end @undefaulted_opts.merge! opts diff --git a/lib/sauce/version.rb b/lib/sauce/version.rb index <HASH>..<HASH> 100644 --- a/lib/sauce/version.rb +++ b/lib/sauce/version.rb @@ -1,6 +1,6 @@ module Sauce MAJOR_VERSION = '3.5' - PATCH_VERSION = '1' + PATCH_VERSION = '2' def version "#{MAJOR_VERSION}.#{PATCH_VERSION}" diff --git a/spec/sauce/config/config_spec.rb b/spec/sauce/config/config_spec.rb index <HASH>..<HASH> 100644 --- a/spec/sauce/config/config_spec.rb +++ b/spec/sauce/config/config_spec.rb @@ -269,6 +269,13 @@ describe Sauce::Config do config.to_desired_capabilities[:new_option].should include 'elderflower' end + it 'should include them when created anew' do + config.whitelist :new_option + config_two = Sauce::Config.new({}) + config_two[:new_option] = 'elderflower' + config_two.to_desired_capabilities[:new_option].should include 'elderflower' + end + it 'should allow multiple exceptions' do config[:new_option] = 'elderflower' config[:another_option] = 'mint'
Have whitelist persist across all instances of Sauce.config
saucelabs_sauce_ruby
train
ac82798d0aa32079656e6ac0a84c55fe29db7dba
diff --git a/cmd/fs-v1-helpers.go b/cmd/fs-v1-helpers.go index <HASH>..<HASH> 100644 --- a/cmd/fs-v1-helpers.go +++ b/cmd/fs-v1-helpers.go @@ -228,15 +228,6 @@ func fsStatDir(ctx context.Context, statDir string) (os.FileInfo, error) { return fi, nil } -// Returns if the dirPath is a directory. -func fsIsDir(ctx context.Context, dirPath string) bool { - fi, err := fsStat(ctx, dirPath) - if err != nil { - return false - } - return fi.IsDir() -} - // Lookup if file exists, returns file attributes upon success. func fsStatFile(ctx context.Context, statFile string) (os.FileInfo, error) { fi, err := fsStat(ctx, statFile) @@ -418,11 +409,6 @@ func fsRenameFile(ctx context.Context, sourcePath, destPath string) error { return err } - // Verify if source path exists. - if _, err := os.Stat(sourcePath); err != nil { - return osErrToFSFileErr(err) - } - if err := renameAll(sourcePath, destPath); err != nil { logger.LogIf(ctx, err) return err diff --git a/cmd/fs-v1-helpers_test.go b/cmd/fs-v1-helpers_test.go index <HASH>..<HASH> 100644 --- a/cmd/fs-v1-helpers_test.go +++ b/cmd/fs-v1-helpers_test.go @@ -548,18 +548,6 @@ func TestFSRemoveMeta(t *testing.T) { } } -func TestFSIsDir(t *testing.T) { - dirPath, err := ioutil.TempDir(globalTestTmpDir, "minio-") - if err != nil { - t.Fatalf("Unable to create tmp directory %s", err) - } - defer os.RemoveAll(dirPath) - - if !fsIsDir(context.Background(), dirPath) { - t.Fatalf("Expected %s to be a directory", dirPath) - } -} - func TestFSIsFile(t *testing.T) { dirPath, err := ioutil.TempDir(globalTestTmpDir, "minio-") if err != nil { diff --git a/cmd/fs-v1.go b/cmd/fs-v1.go index <HASH>..<HASH> 100644 --- a/cmd/fs-v1.go +++ b/cmd/fs-v1.go @@ -691,10 +691,6 @@ func (fs *FSObjects) defaultFsJSON(object string) fsMetaV1 { func (fs *FSObjects) getObjectInfo(ctx context.Context, bucket, object string) (oi ObjectInfo, e error) { fsMeta := fsMetaV1{} if hasSuffix(object, slashSeparator) { - // Since we support PUT of a "directory" object, we allow HEAD. - if !fsIsDir(ctx, pathJoin(fs.fsPath, bucket, object)) { - return oi, errFileNotFound - } fi, err := fsStatDir(ctx, pathJoin(fs.fsPath, bucket, object)) if err != nil { return oi, err
Remove uneeded calls on FS (#<I>)
minio_minio
train
b3b2b604b8b4d55ea6a1285b35b8071f03928785
diff --git a/lwr/lwr_client/client.py b/lwr/lwr_client/client.py index <HASH>..<HASH> 100644 --- a/lwr/lwr_client/client.py +++ b/lwr/lwr_client/client.py @@ -259,22 +259,15 @@ class JobClient(BaseJobClient): check_complete_response = self._raw_execute("check_complete", {"job_id": self.job_id}) return check_complete_response - def check_complete(self, response=None): - """ - Return boolean indicating whether the job is complete. - """ - if response is None: - response = self.raw_check_complete() - return response["complete"] == "true" - def get_status(self): check_complete_response = self.raw_check_complete() # Older LWR instances won't set status so use 'complete', at some # point drop backward compatibility. status = check_complete_response.get("status", None) - # Bug in certains older LWR instances returned literal "status". if status in ["status", None]: - complete = self.check_complete(check_complete_response) + # LEGACY: Bug in certains older LWR instances returned literal + # "status". + complete = self.raw_check_complete()["complete"] == "true" old_status = "complete" if complete else "running" status = old_status return status
Eliminate another unneeded method in JobClient.
galaxyproject_pulsar
train
a66181c36e465ee37dc9cbbc4144d28e60e404aa
diff --git a/holoviews/__init__.py b/holoviews/__init__.py index <HASH>..<HASH> 100644 --- a/holoviews/__init__.py +++ b/holoviews/__init__.py @@ -9,7 +9,7 @@ sys.path.insert(0, os.path.join(_cwd, '..', 'param')) import param -__version__ = param.Version(release=(1,8,2), fpath=__file__, +__version__ = param.Version(release=(1,8,3), fpath=__file__, commit="$Format:%h$", reponame='holoviews') from .core import archive, config # noqa (API import) diff --git a/meta.yaml b/meta.yaml index <HASH>..<HASH> 100644 --- a/meta.yaml +++ b/meta.yaml @@ -1,6 +1,6 @@ package: name: holoviews - version: 1.8.2 + version: 1.8.3 source: path: . diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,7 @@ Check out the `HoloViews web site <http://holoviews.org>`_ for extensive example setup_args.update(dict( name='holoviews', - version="1.8.2", + version="1.8.3", install_requires = install_requires, extras_require = extras_require, description='Stop plotting your data - annotate your data and let it visualize itself.',
Bumped version to <I>
pyviz_holoviews
train
bbdd9b5017181deea6dd51bcc9165e892ad5d5d7
diff --git a/lib/openapi/mongoid/spec_builder.rb b/lib/openapi/mongoid/spec_builder.rb index <HASH>..<HASH> 100644 --- a/lib/openapi/mongoid/spec_builder.rb +++ b/lib/openapi/mongoid/spec_builder.rb @@ -48,31 +48,32 @@ module Openapi class_methods do def spec_params(options) - self.spec_collection_name = options[:collection_name] - self.spec_resource_name = options[:resource_name] self.spec_resource_class = options[:resource_class] - self.spec_except_actions = options[:except_actions] + self.spec_resource_name = options[:resource_name] + self.spec_collection_name = options[:collection_name] self.spec_relative_path = options[:relative_path] + self.spec_except_actions = options[:except_actions] end def build_openapi_specification(options) - self.spec_base_path = options[:base_path] + self.spec_except_actions ||= + [] + + self.spec_base_path = + options[:base_path] self.spec_relative_path ||= ('/' + to_s.remove(/Controller$/).gsub('::', '/').underscore). remove(spec_base_path) - self.spec_except_actions ||= [] - - self.spec_collection_name ||= - to_s.split('::').last.sub(/Controller$/, '') + self.spec_resource_class ||= + self.try(:crud_resource_class) self.spec_resource_name ||= - self.spec_collection_name.singularize + self.spec_resource_class.to_s.remove('::') - self.spec_resource_class ||= self.try(:crud_resource_class) - self.spec_resource_class ||= - self.spec_resource_name.constantize + self.spec_collection_name ||= + self.spec_resource_name.pluralize build_openapi_definitions build_openapi_paths @@ -88,9 +89,9 @@ module Openapi end def build_openapi_definitions - collection_name = spec_collection_name - resource_class = spec_resource_class - resource_name = spec_resource_name + collection_name = spec_collection_name + resource_class = spec_resource_class + resource_name = spec_resource_name resource_property_name = resource_name.underscore.to_sym swagger_schema resource_name do @@ -169,7 +170,7 @@ module Openapi if include_index operation :get do key :tags, [plural_name] - key :summary, 'Index' + key :summary, "index#{plural_name}" key :operationId, "index#{plural_name}" key :produces, json_mime @@ -257,7 +258,7 @@ module Openapi if include_create operation :post do key :tags, [plural_name] - key :summary, 'Create' + key :summary, "create#{plural_name}" key :operationId, "create#{plural_name}" key :produces, json_mime @@ -309,7 +310,7 @@ module Openapi if include_show operation :get do key :tags, [plural_name] - key :summary, 'Show' + key :summary, "show#{name}ById" key :operationId, "show#{name}ById" key :produces, json_mime @@ -354,7 +355,7 @@ module Openapi if include_update operation :put do key :tags, [plural_name] - key :summary, 'Update' + key :summary, "update#{name}" key :operationId, "update#{name}" key :produces, json_mime @@ -408,7 +409,7 @@ module Openapi if include_destroy operation :delete do key :tags, [plural_name] - key :summary, 'Destroy' + key :summary, "destroy#{name}" key :operationId, "destroy#{name}" parameter do diff --git a/lib/openapi/version.rb b/lib/openapi/version.rb index <HASH>..<HASH> 100644 --- a/lib/openapi/version.rb +++ b/lib/openapi/version.rb @@ -1,3 +1,3 @@ module Openapi - VERSION = '0.3.5'.freeze + VERSION = '0.3.6'.freeze end
Slight change on how resource class is identified in spec
slate-studio_openapi-rails
train
f2c292a4f15d4033db005f67579823aefbd21d7a
diff --git a/src/Context/ApiContext.php b/src/Context/ApiContext.php index <HASH>..<HASH> 100644 --- a/src/Context/ApiContext.php +++ b/src/Context/ApiContext.php @@ -205,6 +205,27 @@ class ApiContext implements ApiClientAwareContext, ArrayContainsComparatorAwareC } /** + * Set multipart form parameters + * + * @param TableNode $table Table with name / value pairs + * @return self + * + * @Given the following multipart are set: + */ + public function setRequestMultipart(TableNode $table) { + if (!isset($this->requestOptions['multipart'])) { + $this->requestOptions['multipart'] = []; + } + foreach ($table as $row) { + $this->requestOptions['multipart'][] = [ + 'name' => $row['name'], + 'contents' => $row['value'], + ]; + } + return $this; + } + + /** * Set the request body to a string * * @param resource|string|PyStringNode $string The content to set as the request body
Add helper method to submit data for multipart/form-data.
imbo_behat-api-extension
train