hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
3ee6e53bcf155e8e1d3ced2a73dda3a398b792a4
diff --git a/unit_tests.py b/unit_tests.py index <HASH>..<HASH> 100644 --- a/unit_tests.py +++ b/unit_tests.py @@ -518,4 +518,8 @@ class TablePivotTests_Slotted(unittest.TestCase, TablePivotTests, UsingSlottedOb if __name__ == '__main__': + if sys.version_info[:2] == (2, 6): + print('unit_tests.py only runs on Python 2.7 or later') + sys.exit(0) + unittest.main()
disable unit_tests on Py<I>, incompatible unittest features
ptmcg_littletable
train
60850510fd75c28243940af5cc0bb5b060f4ece3
diff --git a/src/Aggregation/Metric/ScriptedMetricAggregation.php b/src/Aggregation/Metric/ScriptedMetricAggregation.php index <HASH>..<HASH> 100644 --- a/src/Aggregation/Metric/ScriptedMetricAggregation.php +++ b/src/Aggregation/Metric/ScriptedMetricAggregation.php @@ -72,7 +72,7 @@ class ScriptedMetricAggregation extends AbstractAggregation */ public function getType() { - return 'stats'; + return 'scripted_metric'; } /**
Scripted metric type missmatch
ongr-io_ElasticsearchDSL
train
0dd7b0599059e992eff68faffc417bafca7db935
diff --git a/vcr/cassette.py b/vcr/cassette.py index <HASH>..<HASH> 100644 --- a/vcr/cassette.py +++ b/vcr/cassette.py @@ -84,23 +84,22 @@ class Cassette(object): return CassetteContextDecorator.from_args(cls, *args, **kwargs) def __init__(self, path, serializer=yamlserializer, record_mode='once', - match_on=(uri, method), filter_headers=(), - filter_query_parameters=(), before_record_request=None, - before_record_response=None, ignore_hosts=(), - ignore_localhost=(), custom_patches=()): + match_on=(uri, method), before_record_request=None, + before_record_response=None, custom_patches=()): + self._path = path self._serializer = serializer self._match_on = match_on self._before_record_request = before_record_request or (lambda x: x) self._before_record_response = before_record_response or (lambda x: x) + self.record_mode = record_mode + self.custom_patches = custom_patches # self.data is the list of (req, resp) tuples self.data = [] self.play_counts = Counter() self.dirty = False self.rewound = False - self.record_mode = record_mode - self.custom_patches = custom_patches @property def play_count(self):
Get rid of all the constructor parameters that were removed in <I>c3b<I>c<I>a3a<I>b<I>fac8fcf6fde<I>e<I>
kevin1024_vcrpy
train
868bbedacdab50a178681efceb9bba79b3e93435
diff --git a/src/Location/WorkingSchedule.php b/src/Location/WorkingSchedule.php index <HASH>..<HASH> 100644 --- a/src/Location/WorkingSchedule.php +++ b/src/Location/WorkingSchedule.php @@ -325,7 +325,8 @@ class WorkingSchedule $timeslots = []; $datePeriod = $this->createPeriodForDays($dateTime); - foreach ($datePeriod as $date) { + + foreach ($datePeriod ?: [] as $date) { $dateString = $date->toDateString(); $periodTimeslot = $this->forDate($date) @@ -450,9 +451,9 @@ class WorkingSchedule protected function createPeriodForDays($dateTime) { - $startDate = $this->nextOpenAt( - $dateTime->copy()->startOfDay()->subDays(2) - ); + $startDate = $dateTime->copy()->startOfDay()->subDays(2); + if (!$startDate = $this->nextOpenAt($startDate)) + return FALSE; $endDate = $dateTime->copy()->endOfDay()->addDays($this->days); if ($this->forDate($endDate)->closesLate())
Fix bug where startDate is null when closed (#<I>) * Fix bug where startDate is null when closed * refactor
tastyigniter_flame
train
f5b94e311f3fd994af5a45ffea47f8161c3ba34e
diff --git a/core/src/main/java/com/google/errorprone/bugpatterns/MathAbsoluteRandom.java b/core/src/main/java/com/google/errorprone/bugpatterns/MathAbsoluteRandom.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/google/errorprone/bugpatterns/MathAbsoluteRandom.java +++ b/core/src/main/java/com/google/errorprone/bugpatterns/MathAbsoluteRandom.java @@ -18,7 +18,6 @@ package com.google.errorprone.bugpatterns; import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; import static com.google.errorprone.matchers.Matchers.allOf; -import static com.google.errorprone.matchers.Matchers.anyOf; import static com.google.errorprone.matchers.Matchers.argument; import static com.google.errorprone.matchers.Matchers.instanceMethod; import static com.google.errorprone.matchers.Matchers.staticMethod; @@ -44,13 +43,13 @@ public class MathAbsoluteRandom extends BugChecker implements MethodInvocationTr staticMethod().onClass("java.lang.Math").named("abs"), argument( 0, - anyOf( - instanceMethod().onDescendantOf("java.util.Random"), - staticMethod().onClass("java.lang.Math").named("random")))); + instanceMethod() + .onDescendantOf("java.util.Random") + .namedAnyOf("nextInt", "nextLong") + .withParameters())); @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { - if (RANDOM_ABS_VAL.matches(tree, state)) { return describeMatch(tree); } diff --git a/core/src/test/java/com/google/errorprone/bugpatterns/MathAbsoluteRandomTest.java b/core/src/test/java/com/google/errorprone/bugpatterns/MathAbsoluteRandomTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/com/google/errorprone/bugpatterns/MathAbsoluteRandomTest.java +++ b/core/src/test/java/com/google/errorprone/bugpatterns/MathAbsoluteRandomTest.java @@ -29,21 +29,23 @@ public class MathAbsoluteRandomTest { CompilationTestHelper.newInstance(MathAbsoluteRandom.class, getClass()); @Test - public void math() { + public void random() { helper .addSourceLines( "Test.java", + "import java.util.Random;", "class Test {", + "private static final Random random = new Random();", " void f() {", " // BUG: Diagnostic contains: MathAbsoluteRandom", - " Math.abs(Math.random()); ", + " Math.abs(random.nextInt()); ", " }", "}") .doTest(); } @Test - public void random() { + public void randomWithBounds() { helper .addSourceLines( "Test.java", @@ -51,8 +53,7 @@ public class MathAbsoluteRandomTest { "class Test {", "private static final Random random = new Random();", " void f() {", - " // BUG: Diagnostic contains: MathAbsoluteRandom", - " Math.abs(random.nextInt()); ", + " Math.abs(random.nextInt(10)); ", " }", "}") .doTest(); @@ -97,4 +98,18 @@ public class MathAbsoluteRandomTest { "}") .doTest(); } + + @Test + public void negativeDouble() { + helper + .addSourceLines( + "Test.java", + "import java.util.Random;", + "class Test {", + " void f() {", + " double random = Math.abs(new Random().nextDouble());", + " }", + "}") + .doTest(); + } } diff --git a/docs/bugpattern/MathAbsoluteRandom.md b/docs/bugpattern/MathAbsoluteRandom.md index <HASH>..<HASH> 100644 --- a/docs/bugpattern/MathAbsoluteRandom.md +++ b/docs/bugpattern/MathAbsoluteRandom.md @@ -9,8 +9,12 @@ long veryNegativeLong = Math.abs(Long.MIN_VALUE); ``` When trying to generate positive random numbers by using `Math.abs` around a -random positive-or-negative number, there will be (very infrequent) occasions -where the random number will be negative. +random positive-or-negative integer (or long), there will a rare edge case where +the returned value will be negative. + +This is because there is no positive integer with the same magnitude as +`Integer.MIN_VALUE`, which is equal to `-Integer.MAX_VALUE - 1`. Floating point +numbers don't suffer from this problem, as the sign is stored in a separate bit. Instead, one should use random number generation functions that are guaranteed to generate positive numbers:
MathAbsoluteRandom: don't complain about Math.abs(double|float) ------------- Created by MOE: <URL>
google_error-prone
train
7bb158e00e1c91268fd4a9547179b8c443b14dd7
diff --git a/app/models/redirect.rb b/app/models/redirect.rb index <HASH>..<HASH> 100644 --- a/app/models/redirect.rb +++ b/app/models/redirect.rb @@ -8,7 +8,8 @@ class Redirect < ApplicationRecord before_validation do # Adding a leading slash at this point makes sure the uniqueness validation # keeps working. - self.source_uri = source_uri.gsub(/^(?!\/)/, '/') if source_uri.present? + self.source_uri = add_leading_slash(source_uri) + self.destination_uri = add_leading_slash(destination_uri) end def enabled? @@ -29,4 +30,10 @@ class Redirect < ApplicationRecord return next_in_chain.works?(base_url: base_url) if next_in_chain.present? response.redirect_works?(base_url + destination_uri) end + + private + + def add_leading_slash(value) + value.to_s.gsub(/^(?!\/)/, '/') if value.present? + end end diff --git a/spec/decorators/redirect_decorator_spec.rb b/spec/decorators/redirect_decorator_spec.rb index <HASH>..<HASH> 100644 --- a/spec/decorators/redirect_decorator_spec.rb +++ b/spec/decorators/redirect_decorator_spec.rb @@ -1,10 +1,10 @@ require 'rails_helper' describe RedirectDecorator do - let(:instance) { build(:redirect).decorate } + subject { build(:redirect).decorate } it '#status_code_collection' do - expect(instance.status_code_collection).to eq [ + expect(subject.status_code_collection).to eq [ ['301 (Moved Permanently)', '301'], ['303 (See Other)', '303'], ['307 (Temporary Redirect)', '307'] @@ -15,11 +15,11 @@ describe RedirectDecorator do redirect = create(:redirect, source_uri: 'foo', destination_uri: 'bar').decorate I18n.with_locale :nl do - expect(redirect.summary).to eq 'Van foo naar bar' + expect(redirect.summary).to eq 'Van /foo naar /bar' end end it '#respond_to?' do - expect(instance).to respond_to(:status_code_collection, :summary) + expect(subject).to respond_to(:status_code_collection, :summary) end end
add leading slash functionality to Redirect#destination_uri as well
udongo_udongo
train
4030824d26845239355868ffa1fa855c9144fd28
diff --git a/scripts/pixel-integration.py b/scripts/pixel-integration.py index <HASH>..<HASH> 100644 --- a/scripts/pixel-integration.py +++ b/scripts/pixel-integration.py @@ -2,45 +2,76 @@ import numpy as np import scipy as sp import scipy.ndimage as nd +import matplotlib.pyplot as pl + from cbamf import const, runner from cbamf.test import init from cbamf.states import prepare_image -# the factor of coarse-graining, goal particle size, and larger size -f = 8 -sigma = 1e-5 +def set_image(state, cg, sigma): + image = cg + np.random.randn(*cg.shape)*sigma + image = np.pad(image, const.PAD, mode='constant', constant_values=const.PADVAL) + state.set_image(image) + state.sigma = sigma + state.reset() + +def pxint(factor=8, dx=np.array([0,0,0])): + # the factor of coarse-graining, goal particle size, and larger size + f = factor + + goalsize = 8 + goalpsf = np.array([2.0, 1.0, 3.0]) + + bigsize = goalsize * f + bigpsf = goalpsf * np.array([f,f,1]) + + s0 = init.create_single_particle_state( + imsize=np.array((4*goalsize, 4*bigsize, 4*bigsize)), + radius=bigsize, psfargs={'params': bigpsf, 'error': 1e-6}, + stateargs={'zscale': 1.0*f}) + s0.obj.pos += np.array([0,1,1]) * (f-1.0)/2.0 + s0.obj.pos += np.array([1,f,f]) * dx + s0.reset() + + # coarse-grained image + sl = np.s_[s0.pad:-s0.pad,s0.pad:-s0.pad,s0.pad:-s0.pad] + m = s0.get_model_image()[sl] -goalsize = 8 -goalpsf = np.array([2.0, 1.0, 3.0/f]) + # indices for coarse-graining + e = m.shape[1] + i = np.linspace(0, e/f, e, endpoint=False).astype('int') + j = np.linspace(0, e/f, e/f, endpoint=False).astype('int') + z,y,x = np.meshgrid(*(j,i,i), indexing='ij') + ind = x + e*y + e*e*z -bigsize = goalsize * f -bigpsf = goalpsf * f + # finally, c-g'ed image + cg = nd.mean(m, labels=ind, index=np.unique(ind)).reshape(e/f, e/f, e/f) -s0 = init.create_single_particle_state(imsize=np.array((4*goalsize, 4*bigsize, 4*bigsize)), - radius=bigsize, psfargs={'params': bigpsf, 'error': 1e-6}, stateargs={'zscale': 1.0*f}) -s0.obj.pos[:,1:] += (f-1.0)/2.0 -s0.reset() + # place that into a new image at the expected parameters + s = init.create_single_particle_state(imsize=4*goalsize, sigma=0.05, + radius=goalsize, psfargs={'params': goalpsf, 'error': 1e-6}) + s.obj.pos += dx + s.reset() -# coarse-grained image -sl = np.s_[s0.pad:-s0.pad,s0.pad:-s0.pad,s0.pad:-s0.pad] + # measure the true inferred parameters + return s, cg -m = s0.get_model_image()[sl] -e = m.shape[1] +def doplot(): + """ + we want to display the errors introduced by pixelation so we plot: + * zero noise, cg image, fit + * SNR 20, cg image, fit + * CRB for both + """ + s,im = pxint() -# indices for coarse-graining -i = np.linspace(0, e/f, e, endpoint=False).astype('int') -j = np.linspace(0, e/f, e/f, endpoint=False).astype('int') -z,y,x = np.meshgrid(*(j,i,i), indexing='ij') -ind = x + e*y + e*e*z + labels = [ + 'pos-z', 'pos-y', 'pos-x', 'rad', + 'psf-x', 'psf-y', 'psf-z', 'ilm', + 'off', 'rscale', 'zscale', 'sigma' + ] -# finally, c-g'ed image -cg = nd.mean(m, labels=ind, index=np.unique(ind)).reshape(e/f, e/f, e/f) -image = cg + np.random.randn(*cg.shape)*sigma -image = np.pad(image, const.PAD, mode='constant', constant_values=const.PADVAL) + set_image(s, im, 1e-6) + set_image(s, im, 5e-2) -s = init.create_single_particle_state(imsize=4*goalsize, sigma=sigma, - radius=goalsize, psfargs={'params': goalpsf, 'error': 1e-6}) -s.set_image(image) -diff = s.image - s.get_model_image() -h,l = runner.do_samples(s, 15, 0) -diff2 = s.image - s.get_model_image() + fig = pl.figure()
verified pixel integration to the 8bit level
peri-source_peri
train
8f37825225c1c8466495ea7ec993440d9e449684
diff --git a/tcconfig/shaper/_interface.py b/tcconfig/shaper/_interface.py index <HASH>..<HASH> 100644 --- a/tcconfig/shaper/_interface.py +++ b/tcconfig/shaper/_interface.py @@ -33,6 +33,10 @@ class ShaperInterface(object): pass @abc.abstractmethod + def get_netem_qdisc_major_id(self, base_id): # pragma: no cover + pass + + @abc.abstractmethod def make_qdisc(self): # pragma: no cover pass @@ -64,7 +68,7 @@ class AbstractShaper(ShaperInterface): parent = "{:s}:{:d}".format( self._tc_obj.qdisc_major_id_str, self.get_qdisc_minor_id()) handle = "{:x}".format( - self._tc_obj.get_netem_qdisc_major_id(self._tc_obj.qdisc_major_id)) + self.get_netem_qdisc_major_id(self._tc_obj.qdisc_major_id)) command_list = [ "tc qdisc add", "dev {:s}".format(self._tc_obj.get_tc_device()), diff --git a/tcconfig/shaper/tbf.py b/tcconfig/shaper/tbf.py index <HASH>..<HASH> 100644 --- a/tcconfig/shaper/tbf.py +++ b/tcconfig/shaper/tbf.py @@ -40,6 +40,17 @@ class TbfShaper(AbstractShaper): raise ValueError("unknown direction: {}".format(self.direction)) + def get_netem_qdisc_major_id(self, base_id): + if self.direction == TrafficDirection.OUTGOING: + direction_offset = 0 + elif self.direction == TrafficDirection.INCOMING: + direction_offset = 1 + + return ( + base_id + + self.__NETEM_QDISC_MAJOR_ID_OFFSET + + direction_offset) + def make_qdisc(self): handle = "{:s}:".format(self._tc_obj.qdisc_major_id_str) command = " ".join([ @@ -63,7 +74,7 @@ class TbfShaper(AbstractShaper): return 0 parent = "{:x}:{:d}".format( - self._tc_obj.get_netem_qdisc_major_id(self._tc_obj.qdisc_major_id), + self.get_netem_qdisc_major_id(self._tc_obj.qdisc_major_id), self.get_qdisc_minor_id()) handle = "{:d}:".format(20) diff --git a/tcconfig/traffic_control.py b/tcconfig/traffic_control.py index <HASH>..<HASH> 100644 --- a/tcconfig/traffic_control.py +++ b/tcconfig/traffic_control.py @@ -179,17 +179,6 @@ class TrafficControl(object): raise ValueError("unknown direction: " + self.direction) - def get_netem_qdisc_major_id(self, base_id): - if self.direction == TrafficDirection.OUTGOING: - direction_offset = 0 - elif self.direction == TrafficDirection.INCOMING: - direction_offset = 1 - - return ( - base_id + - self.__NETEM_QDISC_MAJOR_ID_OFFSET + - direction_offset) - def set_tc(self): self.__setup_ifb() self.__shaper.set_shaping()
Move a method from tc class to shaper class
thombashi_tcconfig
train
45acad53ce7bbb5d5706d6ca86b8081d16a551e7
diff --git a/assets/src/scripts/charcoal/admin/widget.js b/assets/src/scripts/charcoal/admin/widget.js index <HASH>..<HASH> 100644 --- a/assets/src/scripts/charcoal/admin/widget.js +++ b/assets/src/scripts/charcoal/admin/widget.js @@ -233,8 +233,8 @@ Charcoal.Admin.Widget.prototype.dialog = function (dialog_opts, callback) { showHeader: showHeader, showFooter: showFooter, onshown: function () { - Charcoal.Admin.manager().render(); - } + Charcoal.Admin.manager().render(); + } }; var dialogOptions = $.extend({}, defaultOptions, userOptions); @@ -336,7 +336,7 @@ Charcoal.Admin.Widget.prototype.dialog = function (dialog_opts, callback) { return $message; }; - BootstrapDialog.show(dialogOptions); + return new BootstrapDialog.show(dialogOptions); }; Charcoal.Admin.Widget.prototype.confirm = function (dialog_opts, confirmed_callback, cancel_callback) { diff --git a/assets/src/scripts/charcoal/admin/widget/attachment.js b/assets/src/scripts/charcoal/admin/widget/attachment.js index <HASH>..<HASH> 100644 --- a/assets/src/scripts/charcoal/admin/widget/attachment.js +++ b/assets/src/scripts/charcoal/admin/widget/attachment.js @@ -280,10 +280,11 @@ Charcoal.Admin.Widget_Attachment.prototype.create_attachment = function (type, i } } }; + var immutableOpts = {}; var dialogOpts = $.extend({}, defaultOpts, customOpts, immutableOpts); - this.dialog(dialogOpts, function (response) { + var dialog = this.dialog(data, function (response) { if (response.success) { // Call the quickForm widget js. // Really not a good place to do that. @@ -300,7 +301,7 @@ Charcoal.Admin.Widget_Attachment.prototype.create_attachment = function (type, i obj_id: id, save_callback: function (response) { callback(response); - BootstrapDialog.closeAll(); + dialog.close(); } });
widget.js : Return the BootstrapDialog upon creation to ease manipulation attachment.js: only close the concerned BootstrapDialog
locomotivemtl_charcoal-admin
train
b679e3bb75be5a243f68109ccebf8cc0f1db61f6
diff --git a/test/integration/connection/test-execute-cached.js b/test/integration/connection/test-execute-cached.js index <HASH>..<HASH> 100644 --- a/test/integration/connection/test-execute-cached.js +++ b/test/integration/connection/test-execute-cached.js @@ -24,9 +24,9 @@ connection.execute(q, [123], function (err, _rows, _fields) { throw err; } rows2 = _rows; - assert(Object.keys(connection._statements).length == 1); - assert(connection._statements[key].query == q); - assert(connection._statements[key].parameters.length == 1); + assert(connection._statements.length == 1); + assert(connection._statements.get(key).query == q); + assert(connection._statements.get(key).parameters.length == 1); connection.end(); }); }); @@ -38,4 +38,3 @@ process.on('exit', function () { assert.deepEqual(rows1, [{'test': 125}]); assert.deepEqual(rows2, [{'test': 126}]); }); -
fixed test cases to use lru
sidorares_node-mysql2
train
19607b8a374572c0795ad80a7ef37edc599c80ce
diff --git a/lib/agent/index.js b/lib/agent/index.js index <HASH>..<HASH> 100644 --- a/lib/agent/index.js +++ b/lib/agent/index.js @@ -103,7 +103,7 @@ var boot = function() { commands.start_watching(); // add/remove from list when started or stopped if (config.get('auto_update')) - updater.keep_checking(); // check every one hour for new releases + updater.check_every(60 * 60 * 1000); // check every one hour for new releases logger.info('Initialized.'); }); @@ -223,6 +223,7 @@ var shutdown = function(cb) { running = false; commands.stop_watching(); + updater.stop_checking(); logger.debug('Unloading plugins.'); unload_plugins(cb); diff --git a/lib/agent/updater.js b/lib/agent/updater.js index <HASH>..<HASH> 100644 --- a/lib/agent/updater.js +++ b/lib/agent/updater.js @@ -5,6 +5,8 @@ var join = require('path').join, system = common.system, child_process = require('child_process'); // need to use child_process for stubbing to work in test +var timer; // for interval check + var no_versions_support_error = function() { var err = new Error('No versions support.'); err.code = 'NO_VERSIONS_SUPPORT'; @@ -105,9 +107,15 @@ exports.check = function(cb){ check_for_update(cb); }; -exports.keep_checking = function(cb) { +exports.check_every = function(interval, cb) { if (!system.paths.versions) return cb && cb(no_versions_support_error()); - setInterval(check_for_update, 60 * 60 * 1000); // check every one hour + var interval = interval || 60 * 60 * 1000; // one hour by default + timer = setInterval(check_for_update, interval); } + +exports.stop_checking = function() { + if (timer) clearInterval(timer); + timer = null; +} \ No newline at end of file
Export updater.stop_checking() so that we can cancel the one-hour interval check for new releases.
prey_prey-node-client
train
b450c41a423bd3cfebfe6439dcc55eba698140f7
diff --git a/src/iogi/ClassConstructor.java b/src/iogi/ClassConstructor.java index <HASH>..<HASH> 100644 --- a/src/iogi/ClassConstructor.java +++ b/src/iogi/ClassConstructor.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import com.google.common.base.Joiner; import com.thoughtworks.paranamer.BytecodeReadingParanamer; import com.thoughtworks.paranamer.CachingParanamer; @@ -115,4 +116,9 @@ public class ClassConstructor { throw new IogiException(e); } } + + @Override + public String toString() { + return "(" + Joiner.on(",").join(names) + ")"; + } } diff --git a/src/iogi/Instantiatior.java b/src/iogi/Instantiatior.java index <HASH>..<HASH> 100644 --- a/src/iogi/Instantiatior.java +++ b/src/iogi/Instantiatior.java @@ -18,10 +18,12 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.ListIterator; import java.util.NoSuchElementException; import java.util.Set; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import com.google.common.collect.Sets; @@ -69,7 +71,7 @@ public class Instantiatior { ClassConstructor desiredConstructor = desiredConstructor(relevantParameters); Set<ClassConstructor> matchingConstructors = findMatchingConstructors(candidateConstructors, desiredConstructor); - signalErrorIfNoMatchingConstructorFound(target, matchingConstructors); + signalErrorIfNoMatchingConstructorFound(target, matchingConstructors, desiredConstructor); ClassConstructor firstMatchingConstructor = matchingConstructors.iterator().next(); @@ -82,10 +84,11 @@ public class Instantiatior { } private <T> void signalErrorIfNoMatchingConstructorFound(Target<T> target, - Set<ClassConstructor> matchingConstructors) { + Set<ClassConstructor> matchingConstructors, ClassConstructor desiredConstructor) { if (matchingConstructors.isEmpty()) - throw new NoConstructorFoundException("No constructor found for to instantiate a %s named %s", - target.getClassType(), target.getName()); + throw new NoConstructorFoundException("No constructor found to instantiate a %s named %s " + + "with parameter names %s", + target.getClassType(), target.getName(), desiredConstructor); } private List<Parameter> relevantParameters(List<Parameter> parameters, Target<?> target) { @@ -130,14 +133,38 @@ public class Instantiatior { ParameterizedType listType = (ParameterizedType)target.getType(); Type typeArgument = listType.getActualTypeArguments()[0]; Target<Object> listElementTarget = new Target<Object>(typeArgument, target.getName()); + Collection<List<Parameter>> parameterLists = breakList(parameters); + System.out.println(parameterLists); ArrayList<Object> newList = new ArrayList<Object>(); - for (Parameter parameter : parameters) { - Object listElement = instantiate(listElementTarget, parameter); + for (List<Parameter> parameterListForAnElement : parameterLists) { + Object listElement = instantiate(listElementTarget, parameterListForAnElement); newList.add(listElement); } return newList; } + + private int countToFirstRepeatedParameterName(List<Parameter> parameters) { + if (parameters.isEmpty()) + return 0; + + int count = 1; + ListIterator<Parameter> parametersIterator = parameters.listIterator(); + String firstParameterName = parametersIterator.next().getName(); + + while (parametersIterator.hasNext()) { + if (parametersIterator.next().getName().equals(firstParameterName)) { + break; + } + count++; + } + + return count; + } + private Collection<List<Parameter>> breakList(List<Parameter> parameters) { + int listSize = countToFirstRepeatedParameterName(parameters); + return Lists.partition(parameters, listSize); + } } diff --git a/test/iogi/InstantiatorTests.java b/test/iogi/InstantiatorTests.java index <HASH>..<HASH> 100644 --- a/test/iogi/InstantiatorTests.java +++ b/test/iogi/InstantiatorTests.java @@ -176,6 +176,27 @@ public class InstantiatorTests { instantiator.instantiate(target, parameter); } + @SuppressWarnings("unchecked") + @Test + public void canInstantiateAListWhoseElementsHaveMoreThanOneConstructorParameter() throws Exception { + Parameter p1 = new Parameter("root.one", "1"); + Parameter p2 = new Parameter("root.two", "2"); + Parameter p3 = new Parameter("root.one", "11"); + Parameter p4 = new Parameter("root.two", "22"); + + Type parameterizedListType = ContainsParameterizedList.class.getDeclaredField("listOfTwoArguments").getGenericType(); + + Target<List> target = new Target(parameterizedListType, "root"); + List objects = instantiator.instantiate(target, p1, p2, p3, p4); + + assertEquals(2, objects.size()); + TwoArguments first = (TwoArguments)objects.get(0); + assertEquals(1, first.getOne()); + assertEquals(2, first.getTwo()); + TwoArguments second = (TwoArguments)objects.get(1); + assertEquals(11, second.getOne()); + assertEquals(22, second.getTwo()); + } abstract static class AbstractClass { } @@ -304,5 +325,6 @@ public class InstantiatorTests { static class ContainsParameterizedList { List<OneString> listOfOneString; + List<TwoArguments> listOfTwoArguments; } }
Can handle lists whose elements have more than one constructor parameter.
rafaeldff_Iogi
train
9223e6576787e5b2a90110ed0fd524b2c5a854df
diff --git a/tests/utils.py b/tests/utils.py index <HASH>..<HASH> 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,7 +21,7 @@ def is_reachable(api_host): s = socket.create_connection((host, port), timeout=1) s.close() return True - except Exception: + except Exception as err: return False @@ -36,9 +36,7 @@ def create_container(model=None, name=None, image="redis"): def create_config(): try: config = K8sConfig(kubeconfig=kubeconfig_fallback) - except SyntaxError: - config = K8sConfig() - except IOError: + except Exception: config = K8sConfig() return config @@ -72,7 +70,10 @@ def create_secret(config=None, name=None): def cleanup_objects(): - cleanup_pods() + config = K8sConfig(kubeconfig=kubeconfig_fallback) + if is_reachable(config.api_host): + cleanup_pods() + cleanup_rcs() def cleanup_pods(): @@ -80,9 +81,22 @@ def cleanup_pods(): if is_reachable(pod.config.api_host): pods = pod.list() for p in pods: - result = K8sPod.get_by_name(name=p['metadata']['name']) + _list = K8sPod.get_by_name(name=p['metadata']['name']) + try: + [x.delete() for x in _list] + except NotFoundException: + continue + time.sleep(2) # let the pods die + + +def cleanup_rcs(): + rc = create_rc(name="throwaway") + if is_reachable(rc.config.api_host): + rcs = rc.list() + for rc in rcs: + _list = K8sReplicationController.get_by_name(name=rc['metadata']['name']) try: - [x.delete() for x in result] + [x.delete() for x in _list] except NotFoundException: continue - time.sleep(3) # let the pods die \ No newline at end of file + time.sleep(2) # let the replication controllers die
test/utils: add method for cleanup of replication controllers
mnubo_kubernetes-py
train
9473c036cf125b25bc23839f127db29fdb530700
diff --git a/openquake/calculators/hazard/uhs/ath.py b/openquake/calculators/hazard/uhs/ath.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/uhs/ath.py +++ b/openquake/calculators/hazard/uhs/ath.py @@ -80,17 +80,3 @@ def remaining_tasks_in_block(job_id, num_tasks, start_count): target = start_count + num_tasks while running_total() < target: yield target - running_total() # remaining - - -def uhs_task_handler(job_id, num_tasks, start_count): - """Async task handler for counting calculation results and determining when - a batch of tasks is complete.""" - remaining_gen = remaining_tasks_in_block(job_id, num_tasks, start_count) - - while True: - time.sleep(0.5) - try: - remaining_gen.next() - except StopIteration: - # No more tasks remaining in this batch. - break diff --git a/openquake/calculators/hazard/uhs/core.py b/openquake/calculators/hazard/uhs/core.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/uhs/core.py +++ b/openquake/calculators/hazard/uhs/core.py @@ -35,7 +35,6 @@ from openquake.calculators.hazard.general import set_gmpe_params from openquake.calculators.hazard.general import store_gmpe_map from openquake.calculators.hazard.general import store_source_model from openquake.calculators.hazard.uhs.ath import completed_task_count -from openquake.calculators.hazard.uhs.ath import uhs_task_handler from openquake.db.models import Output from openquake.db.models import UhSpectra from openquake.db.models import UhSpectrum @@ -250,42 +249,3 @@ class UHSCalculator(Calculator): for sampling source model and gmpe logic trees """ write_uh_spectra(self.calc_proxy) - - source_model_lt = self.calc_proxy.params.get( - 'SOURCE_MODEL_LOGIC_TREE_FILE') - gmpe_lt = self.calc_proxy.params.get('GMPE_LOGIC_TREE_FILE') - basepath = self.calc_proxy.params.get('BASE_PATH') - self.lt_processor = logictree.LogicTreeProcessor( - basepath, source_model_lt, gmpe_lt) - - def execute(self): - - calc_proxy = self.calc_proxy - job_profile = calc_proxy.oq_job_profile - - src_model_rnd = random.Random(job_profile.source_model_lt_random_seed) - gmpe_rnd = random.Random(job_profile.gmpe_lt_random_seed) - - for rlz in xrange(calc_proxy.oq_job_profile.realizations): - - # Sample the gmpe and source models: - store_source_model( - calc_proxy.job_id, src_model_rnd.getrandbits(32), - calc_proxy.params, self.lt_processor) - store_gmpe_map( - calc_proxy.job_id, gmpe_rnd.getrandbits(32), self.lt_processor) - - tf_args = dict(job_id=calc_proxy.job_id, realization=rlz) - - num_tasks_completed = completed_task_count(self.calc_proxy.job_id) - ath_args = dict(job_id=self.calc_proxy.job_id, num_tasks=None, - start_count=num_tasks_completed) - - distribute( - compute_uhs_task, ('site', calc_proxy.sites_to_compute()), - tf_args=tf_args, ath=uhs_task_handler, ath_args=ath_args) - # Notes: the async task handler could probably just operate by - # checking counters. - - def post_execute(self): - stats.delete_job_counters(self.calc_proxy.job_id)
temporarily cleaning up some incomplete pieces for an initial pull request Former-commit-id: <I>ca<I>fde<I>ba6b5c<I>c<I>b [formerly <I>f6aeffa<I>b<I>a5d9e<I>f<I>af9] Former-commit-id: cc<I>b<I>e<I>a<I>ddb<I>a<I>b<I>b6d1cf
gem_oq-engine
train
2b507bf3ccd8e6f3aef9e97d31d1d2f2d0c3c41e
diff --git a/lib/disposable/twin/setup.rb b/lib/disposable/twin/setup.rb index <HASH>..<HASH> 100644 --- a/lib/disposable/twin/setup.rb +++ b/lib/disposable/twin/setup.rb @@ -38,7 +38,7 @@ module Disposable def setup_value_for(dfn, options) name = dfn.name - options[name.to_sym] || mapper.send(name) # model.title. + mapper.send(name) # model.title. end def setup_write!(dfn, value)
simplify #setup_value_for.
apotonick_disposable
train
fb43e4e21ae767309a522129f7315612aa8be0c9
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py index <HASH>..<HASH> 100644 --- a/salt/netapi/rest_cherrypy/app.py +++ b/salt/netapi/rest_cherrypy/app.py @@ -372,10 +372,10 @@ def salt_api_acl_tool(username, request): # ACL Config. acl = cherrypy_conf.get('api_acl', None) + ip = request.remote.ip if acl: users = acl.get('users', {}) if users: - ip = request.remote.ip if username in users: if ip in users[username]: logger.info(success_str.format(username, ip))
Fixing problem with variable assignment being too late.
saltstack_salt
train
b5e33313a0dfa97ab2f81c7dc406f0718ae9c821
diff --git a/tests/lib/Item/Serializer/ItemSerializerTest.php b/tests/lib/Item/Serializer/ItemSerializerTest.php index <HASH>..<HASH> 100644 --- a/tests/lib/Item/Serializer/ItemSerializerTest.php +++ b/tests/lib/Item/Serializer/ItemSerializerTest.php @@ -50,7 +50,7 @@ class ItemSerializerTest extends TestCase $this->itemRendererMock = $this->createMock(ItemRendererInterface::class); $this->config = new Configuration( - 'ezcontent', + 'item_type', array( 'preview' => array( 'enabled' => true,
Remove unnneed mentions of ezcontent from tests
netgen-layouts_content-browser
train
0cbfa7017846dd6e33dc1c8507e57115926476cb
diff --git a/src/lib/Supra/Controller/Pages/Configuration/BlockControllerConfiguration.php b/src/lib/Supra/Controller/Pages/Configuration/BlockControllerConfiguration.php index <HASH>..<HASH> 100644 --- a/src/lib/Supra/Controller/Pages/Configuration/BlockControllerConfiguration.php +++ b/src/lib/Supra/Controller/Pages/Configuration/BlockControllerConfiguration.php @@ -116,15 +116,19 @@ class BlockControllerConfiguration extends ComponentConfiguration if (is_array($this->propertyGroups) && ! empty($this->propertyGroups)) { $propertyGroups = array(); - foreach ($this->propertyGroups as $group) { /* @var $group BlockPropertyGroupConfiguration */ - if (isset($propertyGroups[$group->id])) { - \Log::warn('Property group with id "' . $group->id . '" already exist in property group list. Skipping group. Configuration: ', $group); - continue; - } + if ($group instanceof BlockPropertyGroupConfiguration) { - $propertyGroups[$group->id] = $group; + if (isset($propertyGroups[$group->id])) { + \Log::warn('Property group with id "' . $group->id . '" already exist in property group list. Skipping group. Configuration: ', $group); + continue; + } + + $propertyGroups[$group->id] = $group; + } else { + \Log::warn('Group should be instance of BlockPropertyGroupConfiguration ', $group); + } } $this->propertyGroups = array_values($propertyGroups); diff --git a/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyConfiguration.php b/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyConfiguration.php index <HASH>..<HASH> 100644 --- a/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyConfiguration.php +++ b/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyConfiguration.php @@ -59,7 +59,7 @@ class BlockPropertyConfiguration implements ConfigurationInterface * @var array */ public $properties = array(); - + /** * Hash table for editable additional properties * @var array @@ -91,6 +91,10 @@ class BlockPropertyConfiguration implements ConfigurationInterface \Log::warn("No additional parameter setter found for editable {$this->editable} with name {$name}"); } } + + if ( ! empty($this->group)) { + $this->editableInstance->setGroupId($this->group); + } } public function fillFromEditable(EditableInterface $editable, $name) diff --git a/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyGroupConfiguration.php b/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyGroupConfiguration.php index <HASH>..<HASH> 100644 --- a/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyGroupConfiguration.php +++ b/src/lib/Supra/Controller/Pages/Configuration/BlockPropertyGroupConfiguration.php @@ -12,8 +12,8 @@ use Supra\Controller\Pages\BlockPropertyGroupCollection; */ class BlockPropertyGroupConfiguration implements ConfigurationInterface { - const TYPE_TOP = 1, - TYPE_SIDEBAR = 2; + const TYPE_TOP = 'top', + TYPE_SIDEBAR = 'sidebar'; /** * @var string @@ -53,11 +53,8 @@ class BlockPropertyGroupConfiguration implements ConfigurationInterface $type = mb_strtolower(trim($this->type)); switch ($type) { - case 'top': - $this->type = self::TYPE_TOP; - break; - case 'sidebar': - $this->type = self::TYPE_SIDEBAR; + case self::TYPE_TOP: + case self::TYPE_SIDEBAR: break; default: \Log::warn('Property group type is not "top" or "sidebar", will use default type "sidebar". ', $this);
Task #<I> Property groups should be configurable
sitesupra_sitesupra
train
122d22ff6595a8a993ccb41ec0b95d4cb5d78cab
diff --git a/spec/helper-spec.js b/spec/helper-spec.js index <HASH>..<HASH> 100644 --- a/spec/helper-spec.js +++ b/spec/helper-spec.js @@ -23,9 +23,9 @@ describe('linter helpers', function () { waitsForAsync(async function () { await atom.workspace.open(somethingFile) const textEditor = atom.workspace.getActiveTextEditor() - expect(helpers.rangeFromLineNumber(textEditor)).toEqual([[0, 0], [0, 30]]) - expect(helpers.rangeFromLineNumber(textEditor, -1)).toEqual([[0, 0], [0, 30]]) - expect(helpers.rangeFromLineNumber(textEditor, 'a')).toEqual([[0, 0], [0, 30]]) + expect(helpers.rangeFromLineNumber(textEditor).serialize()).toEqual([[0, 0], [0, 30]]) + expect(helpers.rangeFromLineNumber(textEditor, -1).serialize()).toEqual([[0, 0], [0, 30]]) + expect(helpers.rangeFromLineNumber(textEditor, 'a').serialize()).toEqual([[0, 0], [0, 30]]) }) ) @@ -33,8 +33,8 @@ describe('linter helpers', function () { waitsForAsync(async function () { await atom.workspace.open(somethingFile) const textEditor = atom.workspace.getActiveTextEditor() - expect(helpers.rangeFromLineNumber(textEditor, 7, -1)).toEqual([[7, 0], [7, 43]]) - expect(helpers.rangeFromLineNumber(textEditor, 7, 'a')).toEqual([[7, 0], [7, 43]]) + expect(helpers.rangeFromLineNumber(textEditor, 7, -1).serialize()).toEqual([[7, 0], [7, 43]]) + expect(helpers.rangeFromLineNumber(textEditor, 7, 'a').serialize()).toEqual([[7, 0], [7, 43]]) }) ) @@ -42,7 +42,7 @@ describe('linter helpers', function () { waitsForAsync(async function () { await atom.workspace.open(somethingFile) const textEditor = atom.workspace.getActiveTextEditor() - const range = helpers.rangeFromLineNumber(textEditor, 7) + const range = helpers.rangeFromLineNumber(textEditor, 7).serialize() expect(range).toEqual([[7, 0], [7, 43]]) }) ) @@ -51,7 +51,7 @@ describe('linter helpers', function () { waitsForAsync(async function () { await atom.workspace.open(somethingFile) const textEditor = atom.workspace.getActiveTextEditor() - const range = helpers.rangeFromLineNumber(textEditor, 7, 4) + const range = helpers.rangeFromLineNumber(textEditor, 7, 4).serialize() expect(range).toEqual([[7, 4], [7, 11]]) }) ) @@ -80,10 +80,10 @@ describe('linter helpers', function () { waitsForAsync(async function () { await atom.workspace.open(mixedIndentFile) const textEditor = atom.workspace.getActiveTextEditor() - expect(helpers.rangeFromLineNumber(textEditor, 0)).toEqual([[0, 0], [0, 3]]) - expect(helpers.rangeFromLineNumber(textEditor, 1)).toEqual([[1, 2], [1, 5]]) - expect(helpers.rangeFromLineNumber(textEditor, 2)).toEqual([[2, 1], [2, 4]]) - expect(helpers.rangeFromLineNumber(textEditor, 3)).toEqual([[3, 2], [3, 5]]) + expect(helpers.rangeFromLineNumber(textEditor, 0).serialize()).toEqual([[0, 0], [0, 3]]) + expect(helpers.rangeFromLineNumber(textEditor, 1).serialize()).toEqual([[1, 2], [1, 5]]) + expect(helpers.rangeFromLineNumber(textEditor, 2).serialize()).toEqual([[2, 1], [2, 4]]) + expect(helpers.rangeFromLineNumber(textEditor, 3).serialize()).toEqual([[3, 2], [3, 5]]) }) ) @@ -91,10 +91,10 @@ describe('linter helpers', function () { waitsForAsync(async function() { await atom.workspace.open(mixedIndentFile) const textEditor = atom.workspace.getActiveTextEditor() - expect(helpers.rangeFromLineNumber(textEditor, 0, 0)).toEqual([[0, 0], [0, 3]]) - expect(helpers.rangeFromLineNumber(textEditor, 1, 0)).toEqual([[1, 0], [1, 5]]) - expect(helpers.rangeFromLineNumber(textEditor, 2, 0)).toEqual([[2, 0], [2, 4]]) - expect(helpers.rangeFromLineNumber(textEditor, 3, 0)).toEqual([[3, 0], [3, 5]]) + expect(helpers.rangeFromLineNumber(textEditor, 0, 0).serialize()).toEqual([[0, 0], [0, 3]]) + expect(helpers.rangeFromLineNumber(textEditor, 1, 0).serialize()).toEqual([[1, 0], [1, 5]]) + expect(helpers.rangeFromLineNumber(textEditor, 2, 0).serialize()).toEqual([[2, 0], [2, 4]]) + expect(helpers.rangeFromLineNumber(textEditor, 3, 0).serialize()).toEqual([[3, 0], [3, 5]]) }) }) })
:arrow_up: Upgrade specs to match latest behavior
steelbrain_atom-linter
train
ca45f4dde671f5fedecce82886f74fd09eb7963e
diff --git a/app/controllers/humpyard/elements_controller.rb b/app/controllers/humpyard/elements_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/humpyard/elements_controller.rb +++ b/app/controllers/humpyard/elements_controller.rb @@ -8,7 +8,8 @@ module Humpyard @element = Humpyard::config.element_types[params[:type]].new( :page_id => params[:page_id], :container_id => params[:container_id].to_i > 0 ? params[:container_id].to_i : nil, - :page_yield_name => params[:yield_name].blank? ? 'main' : params[:yield_name]) + :page_yield_name => params[:yield_name].blank? ? 'main' : params[:yield_name], + :shared_state => 0) authorize! :create, @element.element
Set shared_status as not shared for new Elements
humpyard_humpyard
train
9ea088eb4a6e2a43bbad1b46405dff206a53564d
diff --git a/ast/term.go b/ast/term.go index <HASH>..<HASH> 100644 --- a/ast/term.go +++ b/ast/term.go @@ -71,6 +71,32 @@ func (loc *Location) String() string { return fmt.Sprintf("%v:%v", loc.Row, loc.Col) } +// Compare returns -1, 0, or 1 to indicate if this loc is less than, equal to, +// or greater than the other. Comparison is performed on the file, row, and +// column of the Location (but not on the text.) +func (loc *Location) Compare(other *Location) int { + if loc == nil && other == nil { + return 0 + } else if loc == nil { + return 1 + } else if other == nil { + return -1 + } else if loc.File < other.File { + return -1 + } else if loc.File > other.File { + return 1 + } else if loc.Row < other.Row { + return -1 + } else if loc.Row > other.Row { + return 1 + } else if loc.Col < other.Col { + return -1 + } else if loc.Col > other.Col { + return 1 + } + return 0 +} + // Value declares the common interface for all Term values. Every kind of Term value // in the language is represented as a type that implements this interface: //
Add comparison function for ast.Location
open-policy-agent_opa
train
b41aee176ca451c25f3b04d19f428b5951caab63
diff --git a/go/kbfs/libfuse/dir.go b/go/kbfs/libfuse/dir.go index <HASH>..<HASH> 100644 --- a/go/kbfs/libfuse/dir.go +++ b/go/kbfs/libfuse/dir.go @@ -375,6 +375,7 @@ func (f *Folder) fillAttrWithUIDAndWritePerm( ctx context.Context, node libkbfs.Node, ei *data.EntryInfo, a *fuse.Attr) (err error) { a.Valid = 1 * time.Minute + node.FillCacheDuration(&a.Valid) a.Size = ei.Size a.Blocks = getNumBlocksFromSize(ei.Size) @@ -584,6 +585,8 @@ func (d *Dir) Lookup(ctx context.Context, req *fuse.LookupRequest, resp *fuse.Lo if n, ok := d.folder.nodes[newNode.GetID()]; ok { return n, nil } + + newNode.FillCacheDuration(&resp.EntryValid) } switch de.Type { diff --git a/go/kbfs/libfuse/file.go b/go/kbfs/libfuse/file.go index <HASH>..<HASH> 100644 --- a/go/kbfs/libfuse/file.go +++ b/go/kbfs/libfuse/file.go @@ -119,6 +119,8 @@ func (f *File) attr(ctx context.Context, a *fuse.Attr) (err error) { return err } + f.node.FillCacheDuration(&a.Valid) + return f.fillAttrWithMode(ctx, &de, a) } diff --git a/go/kbfs/libkbfs/interfaces.go b/go/kbfs/libkbfs/interfaces.go index <HASH>..<HASH> 100644 --- a/go/kbfs/libkbfs/interfaces.go +++ b/go/kbfs/libkbfs/interfaces.go @@ -219,6 +219,9 @@ type Node interface { EntryType() data.EntryType // GetBlockID returns the block ID of the node. GetBlockID() kbfsblock.ID + // FillCacheDuration sets `d` to the suggested cache time for this + // node, if desired. + FillCacheDuration(d *time.Duration) } // KBFSOps handles all file system operations. Expands all indirect diff --git a/go/kbfs/libkbfs/libkbfs_mocks_test.go b/go/kbfs/libkbfs/libkbfs_mocks_test.go index <HASH>..<HASH> 100644 --- a/go/kbfs/libkbfs/libkbfs_mocks_test.go +++ b/go/kbfs/libkbfs/libkbfs_mocks_test.go @@ -3320,6 +3320,18 @@ func (mr *MockNodeMockRecorder) EntryType() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "EntryType", reflect.TypeOf((*MockNode)(nil).EntryType)) } +// FillCacheDuration mocks base method +func (m *MockNode) FillCacheDuration(arg0 *time.Duration) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "FillCacheDuration", arg0) +} + +// FillCacheDuration indicates an expected call of FillCacheDuration +func (mr *MockNodeMockRecorder) FillCacheDuration(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FillCacheDuration", reflect.TypeOf((*MockNode)(nil).FillCacheDuration), arg0) +} + // GetBasename mocks base method func (m *MockNode) GetBasename() string { m.ctrl.T.Helper() diff --git a/go/kbfs/libkbfs/node.go b/go/kbfs/libkbfs/node.go index <HASH>..<HASH> 100644 --- a/go/kbfs/libkbfs/node.go +++ b/go/kbfs/libkbfs/node.go @@ -9,6 +9,7 @@ import ( "fmt" "os" "runtime" + "time" "github.com/keybase/client/go/kbfs/data" "github.com/keybase/client/go/kbfs/kbfsblock" @@ -131,3 +132,5 @@ func (n *nodeStandard) GetFile(_ context.Context) billy.File { func (n *nodeStandard) EntryType() data.EntryType { return n.core.entryType } + +func (n *nodeStandard) FillCacheDuration(d *time.Duration) {}
libfs: fill in cache valid time for special files Special files that can change without proper kernel invalidations should always fill in their cache time to 0. Issue: KBFS-<I>
keybase_client
train
c7ceb6e22fadd5637e0ab70baeb129efef9198bf
diff --git a/lib/fortitude/rendering_context.rb b/lib/fortitude/rendering_context.rb index <HASH>..<HASH> 100644 --- a/lib/fortitude/rendering_context.rb +++ b/lib/fortitude/rendering_context.rb @@ -75,12 +75,16 @@ module Fortitude @newline_needed = true end + def current_indent + (" " * @indent).freeze + end + def about_to_output_non_whitespace! if @newline_needed if @have_output o = @output_buffer_holder.output_buffer o.original_concat(NEWLINE) - o.original_concat(" " * @indent) + o.original_concat(current_indent) end @newline_needed = false diff --git a/lib/fortitude/widget.rb b/lib/fortitude/widget.rb index <HASH>..<HASH> 100644 --- a/lib/fortitude/widget.rb +++ b/lib/fortitude/widget.rb @@ -234,6 +234,9 @@ module Fortitude @_fortitude_widget_nesting_depth ||= @_fortitude_rendering_context.current_widget_depth end + MAX_ASSIGNS_LENGTH_BEFORE_MULTIPLE_LINES = 200 + START_COMMENT_EXTRA_INDENT_FOR_NEXT_LINE = " " * 5 + def start_and_end_comments if self.class.start_and_end_comments fo = self.class.format_output @@ -242,18 +245,34 @@ module Fortitude assign_keys = assigns.keys if assign_keys.length > 0 - comment_text << ": " - assign_keys.each_with_index do |assign, index| - comment_text << ", " unless index == 0 + + assign_text = assign_keys.map do |assign| value = assigns[assign] - comment_text << ":#{assign} => " - comment_text << "(DEFAULT) " if assigns.is_default?(assign) + out = ":#{assign} => " + out << "(DEFAULT) " if assigns.is_default?(assign) value_string = if value.respond_to?(:to_fortitude_comment_string) then value.to_fortitude_comment_string else value.inspect end if value_string.length > MAX_START_COMMENT_VALUE_STRING_LENGTH value_string = value_string[0..(MAX_START_COMMENT_VALUE_STRING_LENGTH - START_COMMENT_VALUE_STRING_TOO_LONG_ELLIPSIS.length)] + START_COMMENT_VALUE_STRING_TOO_LONG_ELLIPSIS end - comment_text << value_string + out << value_string + out + end + + total_length = assign_text.map(&:length).inject(0, &:+) + if total_length > MAX_ASSIGNS_LENGTH_BEFORE_MULTIPLE_LINES + newline_and_indent = "\n#{@_fortitude_rendering_context.current_indent}" + newline_and_extra_indent = newline_and_indent + START_COMMENT_EXTRA_INDENT_FOR_NEXT_LINE + + comment_text << ":" + assign_text.each do |at| + comment_text << newline_and_extra_indent + comment_text << at + end + comment_text << newline_and_indent + else + comment_text << ": " + comment_text << assign_text.join(", ") end end comment comment_text diff --git a/spec/system/start_end_comments_system_spec.rb b/spec/system/start_end_comments_system_spec.rb index <HASH>..<HASH> 100644 --- a/spec/system/start_end_comments_system_spec.rb +++ b/spec/system/start_end_comments_system_spec.rb @@ -324,9 +324,9 @@ hi expect(render(outer)).to eq(%{<div> <div> <!-- #{eb(inner,1 )}: - :foo => \"#{"a" * 47}..., - :bar => \"#{"b" * 47}... - --> + :foo => \"#{"a" * 97}... + :bar => \"#{"b" * 97}... + --> hi <!-- #{ee(inner, 1)} --> </div>
[place_page=<I>ms] Split up BEGIN comments to multiple lines if their data is long enough.
ageweke_fortitude
train
d6a55a2d58f2abb8e685c221a9df97af98d6f9f2
diff --git a/test/require.js b/test/require.js index <HASH>..<HASH> 100644 --- a/test/require.js +++ b/test/require.js @@ -455,4 +455,48 @@ describe("require", function () { }); + describe("preloading", function () { + + function preloadTest () {/*CODE*/ + gpf.require.define({ + data: "data.js" + }, function (require) { + return require.data; + }); /*CODE*/ + } + + function preloadData () {/*CODE*/ + module.exports = "Hello World!"; /*CODE*/ + } + + function _extract (container) { + return container.toString().split("/*CODE*/")[1]; + } + + beforeEach(function () { + gpf.require.configure({ + clearCache: true, + base: "", + preload: { + "preload/test.js": _extract(preloadTest), + "preload/data.js": _extract(preloadData) + } + }); + }); + + it("loads everything recursively", function (done) { + gpf.require.define({ + result: "preload/test.js" + }, function (require) { + try { + assert(require.result === "Hello World!"); + done(); + } catch (e) { + done(e); + } + }); + }); + + }); + });
Testing preload (#<I>)
ArnaudBuchholz_gpf-js
train
1fe46e480f93a2bb3723191f2f0f0ad9f3926dc9
diff --git a/test/test-plugin-api.js b/test/test-plugin-api.js index <HASH>..<HASH> 100644 --- a/test/test-plugin-api.js +++ b/test/test-plugin-api.js @@ -370,8 +370,15 @@ exports['task add events'] = function (test) { test.equal(task.type, '$email'); test.equal(task.from, 'from'); test.equal(task.to, 'to'); - setImmediate(function () { - manager.stop(test.done); + + hoodie.task.success(dbname, task, function (err) { + if (err) { + return test.done(err); + } + // give events from the finish call time to fire + setTimeout(function () { + manager.stop(test.done); + }, 200); }); }); hoodie.database.add('testdb', function (err, db) {
add test to make sure processed tasks don't cause add events to fire This commit was sponsored by Coding Robin. Robin, a Berlin-based freelance dev, loves open source, programming & organizing events. He's excited to see the promising future of web development with tools like Hoodie. Robin is always looking for interesting new projects: <URL>
hoodiehq-archive_hoodie-plugins-manager
train
4865ba39496d72c951e783cebdfd00cc4ce571b4
diff --git a/packages/kittik-parser/src/index.js b/packages/kittik-parser/src/index.js index <HASH>..<HASH> 100644 --- a/packages/kittik-parser/src/index.js +++ b/packages/kittik-parser/src/index.js @@ -1,7 +1,7 @@ const nearley = require('nearley'); const grammar = require('./grammar/grammar'); -const parser = new nearley.Parser(nearley.Grammar.fromCompiled(grammar)); module.exports = function parse(input) { - return parser.feed(input); -} + const parser = new nearley.Parser(nearley.Grammar.fromCompiled(grammar)); + return parser.feed(input).finish().results[0]; +};
Fix issue with sharing states between different parsings
ghaiklor_kittik
train
86372c2248e1992741a3bbc607020283295ba054
diff --git a/lib/bmc-daemon-lib/logger_pool.rb b/lib/bmc-daemon-lib/logger_pool.rb index <HASH>..<HASH> 100644 --- a/lib/bmc-daemon-lib/logger_pool.rb +++ b/lib/bmc-daemon-lib/logger_pool.rb @@ -6,7 +6,9 @@ module BmcDaemonLib class LoggerPool include Singleton - def get pipe + def get pipe = nil + pipe = :default if pipe.to_s.blank? + @loggers ||= {} @loggers[pipe] ||= create(pipe) end @@ -31,7 +33,7 @@ module BmcDaemonLib def logfile pipe # Disabled if no valid config - return nil unless Conf[:logs].is_a?(Hash) + return nil unless Conf[:logs].is_a?(Hash) && Conf.at(:logs, pipe) # Compute logfile and check if we can write there logfile = File.expand_path(Conf[:logs][pipe].to_s, Conf[:logs][:path].to_s)
logger: disable logging if no path given for that pipe
bmedici_bmc-daemon-lib
train
3a7d1b269948e6ce3ea9578d49f803c584d8f045
diff --git a/lib/active_record_upsert/active_record.rb b/lib/active_record_upsert/active_record.rb index <HASH>..<HASH> 100644 --- a/lib/active_record_upsert/active_record.rb +++ b/lib/active_record_upsert/active_record.rb @@ -1,4 +1,6 @@ -require 'active_record/connection_adapters/postgresql_adapter' +unless defined?(::ActiveRecord::ConnectionAdapters::PostgreSQLAdapter) + require 'active_record/connection_adapters/postgresql_adapter' +end Dir.glob(File.join(__dir__, 'active_record/**/*.rb')) do |f| require f
Avoid loading pg gem by mistake
jesjos_active_record_upsert
train
68d4c5ddd059c033356585948c4994bc7afe589c
diff --git a/src/RedisServiceProvider.php b/src/RedisServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/RedisServiceProvider.php +++ b/src/RedisServiceProvider.php @@ -9,6 +9,11 @@ use Illuminate\Redis\RedisServiceProvider as ServiceProvider; class RedisServiceProvider extends ServiceProvider { /** + * Don't defer the loadig of the service provider. + */ + protected $defer = false; + + /** * Register custom Redis cache driver. * * @return void
Don't defer the loadig of the service provider.
tillkruss_laravel-phpredis
train
0709be6ae2b04efd359b9fb5f1c494095724b561
diff --git a/lib/rails_exception_handler/parser.rb b/lib/rails_exception_handler/parser.rb index <HASH>..<HASH> 100644 --- a/lib/rails_exception_handler/parser.rb +++ b/lib/rails_exception_handler/parser.rb @@ -49,7 +49,11 @@ class RailsExceptionHandler::Parser def user_info config = RailsExceptionHandler.configuration.store_user_info return nil unless(config) - user_object = @controller.send(config[:method]) + begin + user_object = @controller.send(config[:method]) + rescue + user_object = nil + end user_object ? user_object.send(config[:field]) : 'Anonymous' end
rescue exceptions during calls to current_user
bjorntrondsen_rails_exception_handler
train
0b5c5169ee8532bd41c49a466cdbe470f25b675f
diff --git a/packages/perspective-viewer-d3fc/src/js/d3fcChart.js b/packages/perspective-viewer-d3fc/src/js/d3fcChart.js index <HASH>..<HASH> 100644 --- a/packages/perspective-viewer-d3fc/src/js/d3fcChart.js +++ b/packages/perspective-viewer-d3fc/src/js/d3fcChart.js @@ -61,7 +61,7 @@ function renderBar(config, container, horizontal, hiddenElements, update) { let orientation = horizontal ? "horizontal" : "vertical"; let labels = interpretLabels(config); - let isSplitBy = labels.splitLabel != null; + let isSplitBy = labels.splitLabel != ""; let [dataset, stackedBarData, color] = interpretDataset(isSplitBy, config, hiddenElements);
corrected isSplitBy assessment logic.
finos_perspective
train
ecfdb04e4d761bdf0bb7c127f43372ae914d1664
diff --git a/src/android/test/io/jxcore/node/ConnectivityMonitorTest.java b/src/android/test/io/jxcore/node/ConnectivityMonitorTest.java index <HASH>..<HASH> 100644 --- a/src/android/test/io/jxcore/node/ConnectivityMonitorTest.java +++ b/src/android/test/io/jxcore/node/ConnectivityMonitorTest.java @@ -161,6 +161,8 @@ public class ConnectivityMonitorTest { currentBTState = !currentBTState; mBluetoothManager.setBluetoothEnabled(currentBTState); + Thread.sleep(5000); + assertThat("Proper state of BT is set when switched on", mConnectivityMonitor.isBluetoothEnabled(), is(mBluetoothManager.isBluetoothEnabled()));
Added timeout in ConnectivityMonitorTest
thaliproject_Thali_CordovaPlugin
train
aa6a6af3272a74a22ad860f5cc6766e7517eaeb3
diff --git a/src/main/java/com/google/cloud/tools/project/AppYaml.java b/src/main/java/com/google/cloud/tools/project/AppYaml.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/google/cloud/tools/project/AppYaml.java +++ b/src/main/java/com/google/cloud/tools/project/AppYaml.java @@ -39,6 +39,10 @@ public class AppYaml { /** * @param appYaml the app.yaml file * @throws IOException if reading app.yaml fails due to I/O errors + * @throws org.yaml.snakeyaml.scanner.ScannerException if reading app.yaml fails while scanning + * due to malformed YAML (undocumented {@link RuntimeException} from {@link Yaml#load}) + * @throws org.yaml.snakeyaml.parser.ParserException if reading app.yaml fails while parsing + * due to malformed YAML (undocumented {@link RuntimeException} from {@link Yaml#load}) */ public AppYaml(Path appYaml) throws IOException { try (InputStream in = Files.newInputStream(appYaml)) {
Update AppYaml constructor Javadoc (#<I>) * Update AppYaml constructor Javadoc * Add ParserException too
GoogleCloudPlatform_appengine-plugins-core
train
ffb170183435df21b38bc24ba1fd20da153cd52e
diff --git a/src/Monolog/Formatter/LogstashFormatter.php b/src/Monolog/Formatter/LogstashFormatter.php index <HASH>..<HASH> 100644 --- a/src/Monolog/Formatter/LogstashFormatter.php +++ b/src/Monolog/Formatter/LogstashFormatter.php @@ -90,19 +90,22 @@ class LogstashFormatter extends NormalizerFormatter } $message = array( '@timestamp' => $record['datetime'], - '@message' => @$record['message'], - '@tags' => array(@$record['channel']), '@source' => $this->systemName, - '@fields' => array( - 'channel' => @$record['channel'], - 'level' => @$record['level'] - ) + '@fields' => array() ); - + if (isset($record['message'])) { + $message['@message'] = $record['message']; + } + if (isset($record['channel'])) { + $message['@tags'] = array($record['channel']); + $message['@fields']['channel'] = $record['channel']; + } + if (isset($record['level'])) { + $message['@fields']['level'] = $record['level']; + } if ($this->applicationName) { $message['@type'] = $this->applicationName; } - if (isset($record['extra']['server'])) { $message['@source_host'] = $record['extra']['server']; } @@ -131,13 +134,18 @@ class LogstashFormatter extends NormalizerFormatter $message = array( '@timestamp' => $record['datetime'], '@version' => 1, - 'message' => @$record['message'], 'host' => $this->systemName, - 'type' => @$record['channel'], - 'channel' => @$record['channel'], - 'level' => @$record['level_name'] ); - + if (isset($record['message'])) { + $message['message'] = $record['message']; + } + if (isset($record['channel'])) { + $message['type'] = $record['channel']; + $message['channel'] = $record['channel']; + } + if (isset($record['level_name'])) { + $message['level'] = $record['level_name']; + } if ($this->applicationName) { $message['type'] = $this->applicationName; }
Checking each entry individually instead of using @.
Seldaek_monolog
train
6dedb1080ff84861392797b7fc094845821b7c9b
diff --git a/bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/RetryOptions.java b/bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/RetryOptions.java index <HASH>..<HASH> 100644 --- a/bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/RetryOptions.java +++ b/bigtable-client-core-parent/bigtable-client-core/src/main/java/com/google/cloud/bigtable/config/RetryOptions.java @@ -229,7 +229,6 @@ public class RetryOptions implements Serializable { * @param backoffMultiplier a double. * @param maxElaspedBackoffMillis a int. * @param streamingBufferSize a int. - * @param streamingBatchSize a int. * @param readPartialRowTimeoutMillis a int. * @param maxScanTimeoutRetries a int. * @param statusToRetryOn a Set.
Fixing a RetryOptions comment. (#<I>)
googleapis_cloud-bigtable-client
train
dc223d5f5c167c3128753b53e7b103ce1d89611d
diff --git a/molmod/minimizer.py b/molmod/minimizer.py index <HASH>..<HASH> 100644 --- a/molmod/minimizer.py +++ b/molmod/minimizer.py @@ -828,7 +828,7 @@ class ConvergenceCondition(object): class StopLossCondition(object): """Callable object that checks if minimizer has lost track""" - def __init__(self, max_iter=None, fun_margin=None, grad_margin=None): + def __init__(self, max_iter=None, fun_margin=None, grad_margin=None, step_min=None): """ Optional arguments: | ``max_iter`` -- the maximum number of iterations allowed @@ -838,6 +838,8 @@ class StopLossCondition(object): | ``grad_margin`` -- if the RMS value of the gradient components goes above the lowest value plus this threshold, the minimization is aborted + | ``step_min`` -- If the RMS step size drops below this margin, the + optimization is interrupted. Only the present arguments define when the minimization has lost track. @@ -845,6 +847,7 @@ class StopLossCondition(object): self.max_iter = max_iter self.fun_margin = fun_margin self.grad_margin = grad_margin + self.step_min = step_min self.reset() @@ -852,7 +855,7 @@ class StopLossCondition(object): self.fn_lowest = None self.grad_rms_lowest = None - def __call__(self, counter, fn, gradient): + def __call__(self, counter, fn, gradient, step): """Return True when the minimizer has lost track""" if self.max_iter is not None and counter >= self.max_iter: return True @@ -870,6 +873,11 @@ class StopLossCondition(object): elif grad_rms > self.grad_rms_lowest + self.grad_margin: return True + if self.step_min is not None: + step_rms = numpy.sqrt((step**2).mean()) + if self.step_min > step_rms: + return True + # all is fine return False @@ -1090,7 +1098,7 @@ class Minimizer(object): self._screen("CONVERGED", newline=True) return True # check stop loss on the gradient in original basis - lost = self.stop_loss_condition(self.counter, self.f, gradient_orig) + lost = self.stop_loss_condition(self.counter, self.f, gradient_orig, step_orig) if lost: self._screen("LOST", newline=True) return False diff --git a/test/test_minimizer.py b/test/test_minimizer.py index <HASH>..<HASH> 100644 --- a/test/test_minimizer.py +++ b/test/test_minimizer.py @@ -221,3 +221,15 @@ class MinimizerTestCase(BaseTestCase): self.assertArraysAlmostEqual(orig, check) check = prec_fun.undo(prec_fun.do(orig)) self.assertArraysAlmostEqual(orig, check) + + def test_stop_loss_step(self): + x_init = numpy.zeros(2, float) + search_direction = ConjugateGradient() + line_search = NewtonLineSearch() + convergence = ConvergenceCondition(grad_rms=1e-6, step_rms=1e-6, grad_max=3e-6, step_max=3e-6) + stop_loss = StopLossCondition(max_iter=50, step_min=1e-2) + minimizer = Minimizer( + x_init, fun, search_direction, line_search, convergence, stop_loss, + anagrad=True, verbose=False, + ) + assert numpy.sqrt((minimizer.step**2).mean()) < 1e-2
step_min in StopLossCondition
molmod_molmod
train
233922e7c5d0aa92bc2d094ad0fbb5b2cae94265
diff --git a/benchmark.js b/benchmark.js index <HASH>..<HASH> 100644 --- a/benchmark.js +++ b/benchmark.js @@ -63,19 +63,7 @@ var run = bench([ function fastSafeStringifyDeepCircBench (cb) { fastSafeStringify(deepCirc) setImmediate(cb) - }, - function jsonStringifyDeepTryFirstBench (cb) { - tryStringify(deep) || jsonStringifySafe(deep) - setImmediate(cb) - }, - function fastSafeStringifyDeepTryFirstBench (cb) { - tryStringify(deep) || fastSafeStringify(deep) - setImmediate(cb) } ], 10000) -function tryStringify (obj) { - try { return JSON.stringify(obj) } catch (_) {} -} - run(run)
benchmarks: remove obsolete benchmarks This code only tests JSON.stringify itself and that should be obsolete.
davidmarkclements_fast-safe-stringify
train
b8b7049f39b2037981652a43ab911164acb4d009
diff --git a/zipline/finance/performance/period.py b/zipline/finance/performance/period.py index <HASH>..<HASH> 100644 --- a/zipline/finance/performance/period.py +++ b/zipline/finance/performance/period.py @@ -457,7 +457,7 @@ class PerformancePeriod(object): self.ending_cash + self.ending_value) account.total_positions_value = \ getattr(self, 'total_positions_value', self.ending_value) - account.total_positions_value = \ + account.total_positions_exposure = \ getattr(self, 'total_positions_exposure', self.ending_exposure) account.regt_equity = \ getattr(self, 'regt_equity', self.ending_cash)
BUG: Fixes incorrect value assignment in perf period
quantopian_zipline
train
912a678eccb385ba754529c48457a087b1817ab7
diff --git a/keyrings/alt/file_base.py b/keyrings/alt/file_base.py index <HASH>..<HASH> 100644 --- a/keyrings/alt/file_base.py +++ b/keyrings/alt/file_base.py @@ -96,9 +96,9 @@ class Keyring(FileBacked, KeyringBackend): """ Read the password from the file. """ + assoc = self._generate_assoc(service, username) service = escape_for_ini(service) username = escape_for_ini(username) - assoc = (service + '\0' + username).encode() # load the passwords from the file config = configparser.RawConfigParser() @@ -123,8 +123,7 @@ class Keyring(FileBacked, KeyringBackend): def set_password(self, service, username, password): """Write the password in the file. """ - assoc = (escape_for_ini(service) + '\0' + - escape_for_ini(username)).encode() + assoc = self._generate_assoc(service, username) # encrypt the password password_encrypted = self.encrypt(password.encode('utf-8'), assoc) # encode with base64 and add line break to untangle config file @@ -132,6 +131,12 @@ class Keyring(FileBacked, KeyringBackend): self._write_config_value(service, username, password_base64) + def _generate_assoc(self, service, username): + """Generate tamper resistant bytestring of associated data + """ + return (escape_for_ini(service) + '\0' + + escape_for_ini(username)).encode() + def _write_config_value(self, service, key, value): # ensure the file exists self._ensure_file_path()
file_base.py: consolidate generation of associated data
jaraco_keyrings.alt
train
a0904aac5e940f92ee5c9a20e83bca605f61dab0
diff --git a/LDAP.js b/LDAP.js index <HASH>..<HASH> 100644 --- a/LDAP.js +++ b/LDAP.js @@ -7,10 +7,11 @@ try { LDAPConnection = require('./build/Release/LDAP').LDAPConnection; } -function LDAPError(message, msgid) { +function LDAPError(message, msgid, errcode) { this.name = 'LDAPError'; this.message = message || 'Default Message'; this.msgid = msgid; + this.code = errcode; } LDAPError.prototype = new Error(); LDAPError.prototype.constructor = LDAPError; @@ -80,12 +81,12 @@ var LDAP = function(opts) { args: args, tm: setTimeout(function() { delete callbacks[msgid]; - fn(new LDAPError('Timeout', msgid)); + fn(new LDAPError('Timeout', msgid, -2)); }, opts.timeout) } } } else { - fn(new Error('LDAP Error ' + binding.err2string(), msgid)); + fn(new LDAPError('LDAP Error ' + binding.err2string(), msgid, msgid)); reconnect(); } return msgid; @@ -319,11 +320,11 @@ var LDAP = function(opts) { },function(msgid, errcode, data, cookie) { //searchresult callback stats.searchresults++; - handleCallback(msgid, (errcode?new Error(binding.err2string(errcode)):undefined), data, cookie); + handleCallback(msgid, (errcode?new LDAPError(binding.err2string(errcode), msgid, errcode):undefined), data, cookie); }, function(msgid, errcode, data) { //result callback stats.results++; - handleCallback(msgid, (errcode?new Error(binding.err2string(errcode)):undefined), data); + handleCallback(msgid, (errcode?new LDAPError(binding.err2string(errcode), msgid, errcode):undefined), data); }, function() { //error callback stats.errors++;
Return ldap result code with error object.
jeremycx_node-LDAP
train
a1f340b4d1ed10c778a24f22b6d6ee52ab1178df
diff --git a/jwe/jwe_test.go b/jwe/jwe_test.go index <HASH>..<HASH> 100644 --- a/jwe/jwe_test.go +++ b/jwe/jwe_test.go @@ -379,8 +379,6 @@ func TestEncode_ECDH(t *testing.T) { for _, alg := range algorithms { alg := alg t.Run(alg.String(), func(t *testing.T) { - t.Parallel() - encrypted, err := jwe.Encrypt(plaintext, alg, &privkey.PublicKey, jwa.A256GCM, jwa.NoCompress) if !assert.NoError(t, err, "Encrypt succeeds") { return
Ah, right. -tags debug0 gives that race condition
lestrrat-go_jwx
train
4c6fd2207713bd25adcad60d13d8d812c751a2f0
diff --git a/tests/bootstrap.php b/tests/bootstrap.php index <HASH>..<HASH> 100644 --- a/tests/bootstrap.php +++ b/tests/bootstrap.php @@ -8,11 +8,6 @@ * @package PageTypeBuilder */ -// Activates this plugin in WordPress so it can be tested. -$GLOBALS['wp_tests_options'] = array( - 'active_plugins' => array('page-type-builder/ptb-loader.php'), -); - // If the develop repo location is defined (as WP_DEVELOP_DIR), use that // location. Otherwise, we'll just assume that this plugin is installed in a // WordPress develop SVN checkout. @@ -20,15 +15,23 @@ $GLOBALS['wp_tests_options'] = array( if (getenv('WP_DEVELOP_DIR') !== false) { $test_root = getenv('WP_DEVELOP_DIR'); } else if (file_exists('../../../../tests/phpunit/includes/bootstrap.php')) { - $test_root = '../../../../includes'; + $test_root = '../../../../'; } else if (file_exists('/tmp/wordpress-tests-lib/includes/bootstrap.php')) { - $test_root = '/tmp/wordpress-tests-lib/includes'; + $test_root = '/tmp/wordpress-tests-lib/'; } else if (file_exists('../../../develop/tests/phpunit/includes/bootstrap.php')) { - $test_root = '../../../develop/tests/phpunit/includes'; + $test_root = '../../../develop/tests/phpunit/'; } +require $test_root . '/includes/functions.php'; + +function _manually_load_plugin() { + require dirname(__FILE__) . '/../ptb-loader.php'; +} + +tests_add_filter('muplugins_loaded', '_manually_load_plugin'); + // Load phpunit -require $test_root . '/bootstrap.php'; +require $test_root . '/includes/bootstrap.php'; // Load utilities file for testing. require 'lib/utilities.php'; \ No newline at end of file diff --git a/tests/test-ptb-plugin.php b/tests/test-ptb-plugin.php index <HASH>..<HASH> 100644 --- a/tests/test-ptb-plugin.php +++ b/tests/test-ptb-plugin.php @@ -16,8 +16,7 @@ class WP_PTB_Plugin extends WP_UnitTestCase { */ public function test_plugin_activated () { - $directory = basename(dirname(dirname(__FILE__))); - $this->assertTrue(is_plugin_active($directory . '/ptb-loader.php')); + $this->assertTrue(class_exists('PTB_Core') && class_exists('PTB_Admin')); } /**
Chaning how tests is loaded and changing activation test
wp-papi_papi
train
c6d85b34d877306d70a0b9bd55bbc8488fcf3f30
diff --git a/dougrain/builder.py b/dougrain/builder.py index <HASH>..<HASH> 100644 --- a/dougrain/builder.py +++ b/dougrain/builder.py @@ -34,6 +34,10 @@ class Builder(object): def embed(self, rel, target, wrap=False): new_embed = target.as_object() self.add_rel('_embedded', rel, new_embed, wrap) + + if self.draft.automatic_link: + self.add_link(rel, target, wrap) + return self def add_rel(self, key, rel, thing, wrap): diff --git a/test/test_builder.py b/test/test_builder.py index <HASH>..<HASH> 100644 --- a/test/test_builder.py +++ b/test/test_builder.py @@ -291,6 +291,85 @@ class EmbedBuilderTests(BuilderTests): names = [embedded.properties['name'] for embedded in item_embeds] self.assertSequenceEqual(names, ['first', 'second', 'third']) + def testEmbedAlsoAddsLinkWithDraft5(self): + self.builder = Builder(self.uri, draft=drafts.DRAFT_5) + self.builder.embed('item', self.make_target('first')) + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + item_links = doc.links['item'] + urls = [link.url() for link in item_links] + item_embeds = doc.embedded['item'] + expected_urls = [embedded.url() for embedded in item_embeds] + self.assertSequenceEqual(urls, expected_urls) + + def testEmbedDoesNotAddLinkWithDraft4(self): + self.builder = Builder(self.uri, draft=drafts.DRAFT_4) + self.builder.embed('item', self.make_target('first')) + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + item_links = doc.links.get('item', []) + urls = [link.url() for link in item_links] + item_embeds = doc.embedded['item'] + expected_urls = [] + self.assertSequenceEqual(urls, expected_urls) + + def testEmbedDoesNotAddLinkWithDraft3(self): + self.builder = Builder(self.uri, draft=drafts.DRAFT_3) + self.builder.embed('item', self.make_target('first')) + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + item_links = doc.links.get('item', []) + urls = [link.url() for link in item_links] + item_embeds = doc.embedded['item'] + expected_urls = [] + self.assertSequenceEqual(urls, expected_urls) + + def testEmbedAddsUnnecessaryLinkWithDraft5(self): + self.builder = Builder(self.uri, draft=drafts.DRAFT_5) + target = self.make_target('first') + + self.builder.add_link('item', target, wrap=True) + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + urls_before_embed = [link.url() for link in doc.links['item']] + + self.builder.embed('item', target, wrap=True) + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + urls_after_embed = [link.url() for link in doc.links['item']] + + item_embeds = doc.embedded['item'] + embedded_urls = [embedded.url() for embedded in item_embeds] + expected_urls = urls_before_embed + embedded_urls + self.assertSequenceEqual(urls_after_embed, expected_urls) + + def testAutomaticLinkIsNotWrappedDefault(self): + target = self.make_target("first") + self.builder.embed('item', target) + + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + self.assertIsInstance(self.builder.as_object()['_links']['item'], + dict) + + def testAutomaticLinkIsNotWrappedWithUnwrappedEmbed(self): + target = self.make_target("first") + self.builder.embed('item', target, wrap=False) + + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + self.assertIsInstance(self.builder.as_object()['_links']['item'], + dict) + + def testAutomaticLinkIsWrappedWithWrappedEmbed(self): + target = self.make_target("first") + self.builder.embed('item', target, wrap=True) + + doc = Document.from_object(self.builder.as_object(), + base_uri="http://localhost") + self.assertIsInstance(self.builder.as_object()['_links']['item'], + list) + class EmbedBuilderDocumentTests(EmbedBuilderTests): def make_target(self, name):
Make draft 5 builder embed automatically link.
wharris_dougrain
train
7b40246a5971f66c7d07eec5c586715cfd2205b0
diff --git a/packages/node-pico-engine-core/src/DB.js b/packages/node-pico-engine-core/src/DB.js index <HASH>..<HASH> 100644 --- a/packages/node-pico-engine-core/src/DB.js +++ b/packages/node-pico-engine-core/src/DB.js @@ -98,11 +98,11 @@ module.exports = function(opts){ removeChannel: function(pico_id, eci, callback){ ldb.del(["pico", pico_id, "channel", eci], callback); }, - putEntVar: function(pico_id, var_name, val, callback){ - ldb.put(["pico", pico_id, "vars", var_name], val, callback); + putEntVar: function(pico_id, rid, var_name, val, callback){ + ldb.put(["pico", pico_id, rid, "vars", var_name], val, callback); }, - getEntVar: function(pico_id, var_name, callback){ - ldb.get(["pico", pico_id, "vars", var_name], callback); + getEntVar: function(pico_id, rid, var_name, callback){ + ldb.get(["pico", pico_id, rid, "vars", var_name], callback); }, putAppVar: function(rid, var_name, val, callback){ ldb.put(["resultset", rid, "vars", var_name], val, callback); diff --git a/packages/node-pico-engine-core/src/index.js b/packages/node-pico-engine-core/src/index.js index <HASH>..<HASH> 100644 --- a/packages/node-pico-engine-core/src/index.js +++ b/packages/node-pico-engine-core/src/index.js @@ -71,10 +71,10 @@ module.exports = function(conf){ var mkPersistent = function(pico_id, rid){ return { getEnt: function(key){ - return db.getEntVarFuture(pico_id, key).wait(); + return db.getEntVarFuture(pico_id, rid, key).wait(); }, putEnt: function(key, value){ - db.putEntVarFuture(pico_id, key, value).wait(); + db.putEntVarFuture(pico_id, rid, key, value).wait(); }, getApp: function(key, value){ return db.getAppVarFuture(rid, key).wait();
ent vars need pico_id and rid in path
Picolab_pico-engine
train
671559d05126baa762b6c5b4210e54366be81b9c
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,11 @@ # Changelog All notable changes to this project will be documented in this file. +## [0.6.0] 2018-03-28 +### Changed +- Removed verbose code from `#__actual_context__` and `#method` methods of + `Symbiont::PublicTrigger` and `Symbiont::PrivateTrigger` classes. + ## [0.5.0] 2018-03-28 ### Added - Support for method dispatching for `BasicObject` instances (which does not support `#respond_to?` method); diff --git a/lib/symbiont/private_trigger.rb b/lib/symbiont/private_trigger.rb index <HASH>..<HASH> 100644 --- a/lib/symbiont/private_trigger.rb +++ b/lib/symbiont/private_trigger.rb @@ -33,12 +33,10 @@ module Symbiont # this situation is caused when the context object does not respodond to # #resond_to? method (BasicObject instances for example) - context_singleton = __extract_singleton_object__(context) + context_singleton = __extract_singleton_class__(context) - context_singleton.private_methods(true).include?(method_name) || - context_singleton.methods(true).include?(method_name) || - context_singleton.superclass.private_instance_methods(true).include?(method_name) || - context_singleton.superclass.instance_methods(true).include?(method_name) + context_singleton.private_instance_methods(true).include?(method_name) || + context_singleton.instance_methods(true).include?(method_name) end end || super end @@ -66,8 +64,8 @@ module Symbiont # to #method method (BasicObject instances for example). We can extract # method objects via it's singleton class. - __context_singleton__ = __extract_singleton_object__(__context__) - __context_singleton__.superclass.instance_method(method_name).bind(__context__) + __context_singleton__ = __extract_singleton_class__(__context__) + __context_singleton__.instance_method(method_name).bind(__context__) end end end diff --git a/lib/symbiont/public_trigger.rb b/lib/symbiont/public_trigger.rb index <HASH>..<HASH> 100644 --- a/lib/symbiont/public_trigger.rb +++ b/lib/symbiont/public_trigger.rb @@ -33,10 +33,8 @@ module Symbiont # this situation is caused when the context object does not respodond to # #resond_to? method (BasicObject instances for example) - context_singleton = __extract_singleton_object__(context) - - context_singleton.public_methods(false).include?(method_name) || - context_singleton.superclass.public_instance_methods(false).include?(method_name) + context_singleton = __extract_singleton_class__(context) + context_singleton.public_instance_methods(true).include?(method_name) end end || super end @@ -44,6 +42,8 @@ module Symbiont # Returns a corresponding public method object of the actual context. # # @param method_name [String,Symbol] Method name + # @raise [::NameError] + # @raise [Symbiont::Trigger::ContextNoMethodError, ::NoMethodError] # @return [Method] # # @see [Symbiont::Trigger#method] @@ -64,8 +64,8 @@ module Symbiont # to #method method (BasicObject instances for example). We can extract # method objects via it's singleton class. - __context_singleton__ = __extract_singleton_object__(__context__) - __context_singleton__.superclass.public_instance_method(method_name).bind(__context__) + __context_singleton__ = __extract_singleton_class__(__context__) + __context_singleton__.public_instance_method(method_name).bind(__context__) end end end diff --git a/lib/symbiont/trigger.rb b/lib/symbiont/trigger.rb index <HASH>..<HASH> 100644 --- a/lib/symbiont/trigger.rb +++ b/lib/symbiont/trigger.rb @@ -280,7 +280,7 @@ class Symbiont::Trigger < BasicObject # # @api private # @sionce 0.5.0 - def __extract_singleton_object__(object) + def __extract_singleton_class__(object) # NOTE: `<<` approach is used cuz BasicObject does not support #singleton_class method. class << object; self; end end
[trigger] removed verbose code
0exp_symbiont-ruby
train
cb014a87a20b91ad44025918115d8f90bd5da4e8
diff --git a/plugins/roc-plugin-style-css/src/roc/index.js b/plugins/roc-plugin-style-css/src/roc/index.js index <HASH>..<HASH> 100644 --- a/plugins/roc-plugin-style-css/src/roc/index.js +++ b/plugins/roc-plugin-style-css/src/roc/index.js @@ -1,14 +1,19 @@ import { isArray, isObject, isString, oneOf } from 'roc/validators'; -import { lazyFunctionRequire } from 'roc'; +import { lazyFunctionRequire, generateDependencies } from 'roc'; import config from '../config/roc.config.js'; import meta from '../config/roc.config.meta.js'; +import { packageJSON } from './util'; + const lazyRequire = lazyFunctionRequire(require); export default { config, meta, + dependencies: { + exports: generateDependencies(packageJSON, ['extract-text-webpack-plugin']), + }, actions: [{ hook: 'build-webpack', description: 'Adds CSS support.', diff --git a/plugins/roc-plugin-style-css/src/roc/util.js b/plugins/roc-plugin-style-css/src/roc/util.js index <HASH>..<HASH> 100644 --- a/plugins/roc-plugin-style-css/src/roc/util.js +++ b/plugins/roc-plugin-style-css/src/roc/util.js @@ -1,6 +1,6 @@ import { runHook } from 'roc'; -const packageJSON = require('../../package.json'); +export const packageJSON = require('../../package.json'); /** * Helper function for invoking/running a hook, pre-configured for the current package.
feat(roc-plugin-style-css): Export the ExtractText plugin as a dependency
rocjs_extensions
train
10ece08e80061ac0eeb52a87052a716fdc808f6a
diff --git a/tests/functional/test.py b/tests/functional/test.py index <HASH>..<HASH> 100755 --- a/tests/functional/test.py +++ b/tests/functional/test.py @@ -36,6 +36,7 @@ VIM_DIFF = False # Will show visual diff using (g?)vimdiff on test failure UPDATE = False # True and test will be updated on failure FOUT = sys.stdout # Output file. By default stdout but can be captured changing this TEMP_DIR = None +QUIET = False # True so suppress output (useful for testing) class TempTestFile(object): @@ -84,6 +85,13 @@ def _error(msg, exit_code=None): exit(exit_code) +def _msg(msg, force=False): + """ Shows a msg to the FOUT output if not in QUIET mode or force == True + """ + if not QUIET or force: + FOUT.write(msg) + + def get_file_lines(filename, ignore_regexp=None, replace_regexp=None, replace_what='.', replace_with='.'): """ Opens source file <filename> and load its lines, @@ -276,17 +284,17 @@ def testFiles(file_list): result = None COUNTER += 1 - FOUT.write(("%4i " % COUNTER) + fname + ':') + _msg(("%4i " % COUNTER) + fname + ':') if result: - FOUT.write('ok \r') + _msg('ok \r') FOUT.flush() elif result is None: - FOUT.write('?\r') + _msg('?\r') else: FAILED += 1 EXIT_CODE = 1 - FOUT.write('FAIL\n') + _msg('FAIL\n') def upgradeTest(fileList, f3diff): @@ -354,13 +362,13 @@ def upgradeTest(fileList, f3diff): x = x.strip() y = y.strip() c = '=' if x == y else '!' - print('"%s"%s"%s"' % (x.strip(), c, y.strip())) + _msg('"%s"%s"%s"\n' % (x.strip(), c, y.strip())) os.unlink(tfname) continue # Not the same diff os.unlink(fname1) os.rename(tfname, fname1) - print("\rTest: %s (%s) updated" % (fname, fname1)) + _msg("\rTest: %s (%s) updated\n" % (fname, fname1)) def set_temp_dir(tmp_dir): @@ -391,6 +399,7 @@ def main(argv=None): global VIM_DIFF global UPDATE global TEMP_DIR + global QUIET parser = argparse.ArgumentParser(description='Test compiler output against source code samples') parser.add_argument('-d', '--show-diff', action='store_true', help='Shows output difference on failure') @@ -401,33 +410,38 @@ def main(argv=None): parser.add_argument('-U', '--force-update', action='store_true', help='Updates all failed test with the new output') parser.add_argument('--tmp-dir', type=str, default=TEMP_DIR, help='Temporary directory for tests generation') parser.add_argument('FILES', nargs='+', type=str, help='List of files to be processed') + parser.add_argument('-q', '--quiet', action='store_true', help='Run quietly, suppressing normal output') args = parser.parse_args(argv) temp_dir_created = False try: + QUIET = args.quiet + PRINT_DIFF = args.show_diff + VIM_DIFF = args.show_visual_diff + UPDATE = args.force_update + temp_dir_created = set_temp_dir(args.tmp_dir) if args.update: upgradeTest(args.FILES, args.update) exit(EXIT_CODE) - PRINT_DIFF = args.show_diff - VIM_DIFF = args.show_visual_diff - UPDATE = args.force_update testFiles(args.FILES) finally: if temp_dir_created: os.rmdir(TEMP_DIR) + TEMP_DIR = None + if __name__ == '__main__': CLOSE_STDERR = True main() if COUNTER: - print("Total: %i, Failed: %i (%3.2f%%)" % (COUNTER, FAILED, 100.0 * FAILED / float(COUNTER))) + _msg("Total: %i, Failed: %i (%3.2f%%)\n" % (COUNTER, FAILED, 100.0 * FAILED / float(COUNTER))) else: - print('No tests found') + _msg("No tests found\n") EXIT_CODE = 1 exit(EXIT_CODE)
chore: allow --quiet flag This will allow to execute test.py silently and determine if the tests passed or not just by the returner error level.
boriel_zxbasic
train
e77361107ef9e8d9e425a4f61bc592fd904b5066
diff --git a/tests/spec/fs.rmdir.spec.js b/tests/spec/fs.rmdir.spec.js index <HASH>..<HASH> 100644 --- a/tests/spec/fs.rmdir.spec.js +++ b/tests/spec/fs.rmdir.spec.js @@ -97,4 +97,27 @@ describe('fs.rmdir', function() { }); }); }); + + it('(promise) should be a function', function() { + var fsPromises = util.fs().promises; + expect(fsPromises.rmdir).to.be.a('function'); + }); + + it('(promise) should return an error if the path does not exist', function() { + var fsPromises = util.fs().promises; + return fsPromises.rmdir('/tmp/mydir') + .catch(error => { + expect(error).to.exist; + expect(error.code).to.equal('ENOENT'); + }); + }); + + it('(promise) should return an error if attempting to remove the root directory', function() { + var fsPromises = util.fs().promises; + return fsPromises.rmdir('/') + .catch(error => { + expect(error).to.exist; + expect(error.code).to.equal('EBUSY'); + }); + }); });
Issue <I> - Add tests to fsPromises.rmdir when directory doesn't exist and when trying to delete root directory (#<I>) * Fix #<I> - Fixed indentation and ENOENT -> EBUSY on line <I> * Deleted extra line at the end of document * Fix #<I> - Added tests for fsPromises.rmdir when trying to remove a nonexistent directory and when trying to remove root directory
filerjs_filer
train
537bf028eb28f1614c3f113cc6facf428757236c
diff --git a/lib/reveal-ck/builders/create_slides_html.rb b/lib/reveal-ck/builders/create_slides_html.rb index <HASH>..<HASH> 100644 --- a/lib/reveal-ck/builders/create_slides_html.rb +++ b/lib/reveal-ck/builders/create_slides_html.rb @@ -35,12 +35,19 @@ module RevealCK end def apply_filters_to(html) - pipeline = HTML::Pipeline.new([HTML::Pipeline::RevealCKEmojiFilter]) + filters = get_classes_from_array(config.filters) + pipeline = HTML::Pipeline.new(filters) filtered_html_string = FilteredHtmlString.new(html: html, config: config.to_h, pipeline: pipeline) filtered_html_string.render end + + def get_classes_from_array(array_of_names) + array_of_names.map do |name| + name.split('::').reduce(Object) { |a, e| a.const_get(e) } + end + end end end end diff --git a/spec/lib/reveal-ck/builders/create_slides_html_spec.rb b/spec/lib/reveal-ck/builders/create_slides_html_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/reveal-ck/builders/create_slides_html_spec.rb +++ b/spec/lib/reveal-ck/builders/create_slides_html_spec.rb @@ -31,7 +31,7 @@ module RevealCK end end - it 'can handle transforming emoji' do + it 'can transform emoji' do Dir.mktmpdir do |dir| Dir.chdir(dir) do @@ -40,11 +40,13 @@ module RevealCK file.puts('# I :heart: Slides') end + config = Config.new + config.filters = ['HTML::Pipeline::RevealCKEmojiFilter'] application = Rake::Application.new slides_html = CreateSlidesHtml.new(slides_file: slides_file_initial, output_dir: dir, - config: Config.new, + config: config, application: application) slides_html.prepare
[configurable-filters] List of filters are now fetched from Config
jedcn_reveal-ck
train
b826e479b957b5127b73be580809ea639eb1e212
diff --git a/openshift/dynamic/client.py b/openshift/dynamic/client.py index <HASH>..<HASH> 100644 --- a/openshift/dynamic/client.py +++ b/openshift/dynamic/client.py @@ -14,6 +14,7 @@ from kubernetes import watch from kubernetes.client.rest import ApiException from openshift.dynamic.exceptions import ResourceNotFoundError, ResourceNotUniqueError, api_exception, KubernetesValidateMissing +from urllib3.exceptions import ProtocolError, MaxRetryError try: import kubernetes_validate @@ -671,7 +672,16 @@ class Discoverer(object): def _load_server_info(self): if not self._cache.get('version'): - self._cache['version'] = {'kubernetes': load_json(self.client.request('get', '/version'))} + try: + self._cache['version'] = {'kubernetes': load_json(self.client.request('get', '/version'))} + except (ValueError, MaxRetryError) as e: + if isinstance(e, MaxRetryError) and not isinstance(e.reason, ProtocolError): + raise + if not self.client.configuration.host.startswith("https://"): + raise ValueError("Host value %s should start with https:// when talking to HTTPS endpoint" % + self.client.configuration.host) + else: + raise try: self._cache['version']['openshift'] = load_json(self.client.request('get', '/version/openshift')) except ApiException:
Improve error message on protocol mismatch Catch attempts to talk HTTP to HTTPS services
openshift_openshift-restclient-python
train
6594adaf0d3c71e61cb8f7568fef9360b2ce5cda
diff --git a/app/models/katello/subscription.rb b/app/models/katello/subscription.rb index <HASH>..<HASH> 100644 --- a/app/models/katello/subscription.rb +++ b/app/models/katello/subscription.rb @@ -42,6 +42,10 @@ module Katello pools.any?(&:recently_expired?) end + def multi_entitlement? + pools.where("#{Katello::Pool.table_name}.multi_entitlement" => true).any? + end + def virt_who_pools pools.where("#{Katello::Pool.table_name}.virt_who" => true) end diff --git a/app/views/katello/api/v2/subscriptions/base.json.rabl b/app/views/katello/api/v2/subscriptions/base.json.rabl index <HASH>..<HASH> 100644 --- a/app/views/katello/api/v2/subscriptions/base.json.rabl +++ b/app/views/katello/api/v2/subscriptions/base.json.rabl @@ -10,7 +10,8 @@ attributes :account_number, :contract_number attributes :support_level attributes :product_id attributes :sockets, :cores, :ram -attributes :instance_multiplier, :stacking_id, :multi_entitlement +attributes :instance_multiplier, :stacking_id +attributes :multi_entitlement? => :multi_entitlement attributes :type attributes :name => :product_name attributes :unmapped_guest diff --git a/webpack/scenes/Subscriptions/Details/SubscriptionAttributes.js b/webpack/scenes/Subscriptions/Details/SubscriptionAttributes.js index <HASH>..<HASH> 100644 --- a/webpack/scenes/Subscriptions/Details/SubscriptionAttributes.js +++ b/webpack/scenes/Subscriptions/Details/SubscriptionAttributes.js @@ -14,6 +14,6 @@ export default { support_type: __('Support Type'), arch: __('Architecture(s)'), type: __('Type'), - mutli_entitlement: __('Multi-entitlement'), + multi_entitlement: __('Multi-entitlement'), stacking_id: __('Stacking ID'), };
Fixes #<I> - Multi-entitlement missing on sub. details
Katello_katello
train
9cd09874c2ab9ab8962e919fd2cd3b19731fe9e0
diff --git a/backend/sublime/testdata/view_test.py b/backend/sublime/testdata/view_test.py index <HASH>..<HASH> 100644 --- a/backend/sublime/testdata/view_test.py +++ b/backend/sublime/testdata/view_test.py @@ -26,9 +26,9 @@ hocus pocus v.settings().set("test", 10) assert v.settings().get("test") == 10 assert v.sel()[0] == (46, 46) - v.run_command("move", {"by": "characters", "forward": True}) - assert v.sel()[0] == (47, 47) v.run_command("move", {"by": "characters", "forward": False}) + assert v.sel()[0] == (45, 45) + v.run_command("move", {"by": "characters", "forward": True}) assert v.sel()[0] == (46, 46) except: print(sys.exc_info()[1])
backend/sublime: Fix broken test. ST3 doesn't allow moving outside of the buffer.
limetext_backend
train
6dfd34399af136e4a688b6c0f23ae3f181376627
diff --git a/test/test_quaternion.py b/test/test_quaternion.py index <HASH>..<HASH> 100644 --- a/test/test_quaternion.py +++ b/test/test_quaternion.py @@ -266,8 +266,9 @@ def test_quaternion_subtract(Qs): for p in Qs[Qs_finite]: assert q-p==quaternion.quaternion(q.w-p.w,q.x-p.x,q.y-p.y,q.z-p.z) strict_assert(False) # Check non-finite +@pytest.mark.xfail def test_quaternion_copysign(Qs): - strict_assert(False) + assert False ## Quaternion-quaternion or quaternion-scalar binary quaternion returners @@ -504,6 +505,7 @@ def test_squad(Rs): assert np.all( np.abs( R_out_squad - R_out_slerp ) < squad_precision ) # assert False # Test unequal input time steps, and correct squad output [0,-2,-1] +@pytest.mark.xfail def test_arrfuncs(): # nonzero # copyswap @@ -513,7 +515,7 @@ def test_arrfuncs(): # compare # argmax # fillwithscalar - pass + assert False def test_setitem_quat(Qs): Ps = Qs[:] @@ -550,32 +552,35 @@ def test_setitem_quat(Qs): with pytest.raises(TypeError): Ps[0] = 's' -# def test_arraydescr(): -# # new -# # richcompare -# # hash -# # repr -# # str - - -# def test_casts(): -# # FLOAT, npy_float -# # DOUBLE, npy_double -# # LONGDOUBLE, npy_longdouble -# # BOOL, npy_bool -# # BYTE, npy_byte -# # UBYTE, npy_ubyte -# # SHORT, npy_short -# # USHORT, npy_ushort -# # INT, npy_int -# # UINT, npy_uint -# # LONG, npy_long -# # ULONG, npy_ulong -# # LONGLONG, npy_longlong -# # ULONGLONG, npy_ulonglong -# # CFLOAT, npy_float -# # CDOUBLE, npy_double -# # CLONGDOUBLE, npy_longdouble +@pytest.mark.xfail +def test_arraydescr(): + # new + # richcompare + # hash + # repr + # str + assert False + +@pytest.mark.xfail +def test_casts(): + # FLOAT, npy_float + # DOUBLE, npy_double + # LONGDOUBLE, npy_longdouble + # BOOL, npy_bool + # BYTE, npy_byte + # UBYTE, npy_ubyte + # SHORT, npy_short + # USHORT, npy_ushort + # INT, npy_int + # UINT, npy_uint + # LONG, npy_long + # ULONG, npy_ulong + # LONGLONG, npy_longlong + # ULONGLONG, npy_ulonglong + # CFLOAT, npy_float + # CDOUBLE, npy_double + # CLONGDOUBLE, npy_longdouble + assert False def test_ufuncs(Rs, Qs): assert np.allclose( np.abs(Rs), np.ones(Rs.shape), atol=1.e-14, rtol=1.e-15) @@ -645,18 +650,4 @@ def test_numpy_array_conversion(Qs): if __name__=='__main__': - print("quaternion_members") - test_quaternion_members() - print("quaternion_methods") - test_quaternion_methods() - print("qaternion_getset") - test_quaternion_getset() - print("arrfuncs") - test_arrfuncs() - print("arraydescr") - test_arraydescr() - print("casts") - test_casts() - print("numpy_array_conversion") - test_numpy_array_conversion() - print("Finished") + print("The tests should be run automatically via py.test (pip install pytest)")
Add xfails to note the tests that still need work
moble_quaternion
train
2daa1ea321a1fea106dfd5716bac720101f27fa1
diff --git a/oscrypto/_osx/_security.py b/oscrypto/_osx/_security.py index <HASH>..<HASH> 100644 --- a/oscrypto/_osx/_security.py +++ b/oscrypto/_osx/_security.py @@ -64,6 +64,9 @@ class security_const(): errSSLClosedGraceful = -9805 errSSLClosedNoNotify = -9816 errSSLClosedAbort = -9806 + errSSLXCertChainInvalid = -9807 + errSSLCertExpired = -9814 + errSSLCertNotYetValid = -9815 errSecVerifyFailed = -67808 errSecNoTrustSettings = -25263 diff --git a/oscrypto/_osx/tls.py b/oscrypto/_osx/tls.py index <HASH>..<HASH> 100644 --- a/oscrypto/_osx/tls.py +++ b/oscrypto/_osx/tls.py @@ -351,6 +351,8 @@ class TLSSocket(object): result = Security.SSLHandshake(session_context) while result == security_const.errSSLWouldBlock: result = Security.SSLHandshake(session_context) + if result in set([security_const.errSSLXCertChainInvalid, security_const.errSSLCertExpired, security_const.errSSLCertNotYetValid]): + raise TLSError('Server certificate verification failed') if result != security_const.errSSLWouldBlock: handle_sec_error(result)
Add an explicit TLSError exception on OS X when certificate validation fails
wbond_oscrypto
train
affaf1bffb623aa205f766c8b12d16dabea9e2ce
diff --git a/src/main/java/org/javamoney/moneta/spi/LoaderService.java b/src/main/java/org/javamoney/moneta/spi/LoaderService.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/javamoney/moneta/spi/LoaderService.java +++ b/src/main/java/org/javamoney/moneta/spi/LoaderService.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2012, 2013, Credit Suisse (Anatole Tresch), Werner Keil. Licensed under the Apache + * Copyright (c) 2012, 2014, Credit Suisse (Anatole Tresch), Werner Keil. Licensed under the Apache * License, Version 2.0 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License @@ -13,7 +13,6 @@ import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Map; -import java.util.Properties; import java.util.Set; import java.util.concurrent.Future;
CLOSED - removed unused import
JavaMoney_jsr354-ri
train
b2dff75b3b9cc24e8b8a57413ee068c77c6bd49a
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ api.boards.getBoard('[repoId]', callback); - [getIssue](https://github.com/ZenHubIO/API#get-issue-data) - [getIssueEvents](https://github.com/ZenHubIO/API#get-issue-events) - [setEstimateForIssue](https://github.com/ZenHubIO/API#set-estimate-for-issue) +- [moveIssueBetweenPipelines](https://github.com/ZenHubIO/API#move-an-issue-between-pipelines) **Boards** - [getBoard](https://github.com/ZenHubIO/API#get-the-zenhub-board-data-for-a-repository) diff --git a/lib/issues.js b/lib/issues.js index <HASH>..<HASH> 100644 --- a/lib/issues.js +++ b/lib/issues.js @@ -36,14 +36,30 @@ Issues.prototype.getIssueEvents = function (repoId, issueNumber, callback) { * Set estimate in issue * This method set estimate for an issue on Issues. * @param int repoId github id of repository - * @param int issueId github id of issue to convert + * @param int issueNumber github id of issue * @param object payload contains estimate to set for the issue, see https://github.com/IssuesIO/API#set-estimate-for-issue for payload format * @callback complete * @memberof Issues * @method setEstimateForIssue */ -Issues.prototype.setEstimateForIssue = function (repoId, issueId, payload, callback) { - this._http._put('repositories/' + repoId + '/issues/' + issueId + '/estimate', {}, payload, function (error, body) { +Issues.prototype.setEstimateForIssue = function (repoId, issueNumber, payload, callback) { + this._http._put('repositories/' + repoId + '/issues/' + issueNumber + '/estimate', {}, payload, function (error, body) { + callback(error, body); + }); +}; + +/** + * Moves an issue between the Pipelines in your repository. + * + * @param int repoId github id of repository + * @param int issue_number github id of issue to convert + * @param object payload see https://github.com/ZenHubIO/API#move-an-issue-between-pipelines for payload format + * @callback complete + * @memberof Issues + * @method moveIssueBetweenPipelines + */ +Issues.prototype.moveIssueBetweenPipelines = function (repoId, issueNumber, payload, callback) { + this._http._post('repositories/' + repoId + '/issues/' + issueNumber + '/moves', {}, payload, function (error, body) { callback(error, body); }); }; diff --git a/test/zenhubWriteTest.js b/test/zenhubWriteTest.js index <HASH>..<HASH> 100644 --- a/test/zenhubWriteTest.js +++ b/test/zenhubWriteTest.js @@ -13,6 +13,31 @@ describe('ZenHub Write API', function() { assert(nock.isDone(), 'not all expected HTTP requests were made'); }); + describe('Issues test', function() { + var issueId = 457; + + it('Set estimate for issue test', function(done) { + var payload = { + estimate: 8 + }; + nock('https://api.zenhub.io/p1') + .put('/repositories/' + repoId + '/issues/' + issueId + '/estimate' + tokenQueryString, payload) + .reply(200, { status: 'OK' }); + api.issues.setEstimateForIssue(repoId, issueId, payload, done); + }); + + it('Move issue between pipelines test', function(done) { + var payload = { + pipeline_id: '595d430add03f01d32460080', + position: 1 + }; + nock('https://api.zenhub.io/p1') + .post('/repositories/' + repoId + '/issues/' + issueId + '/moves' + tokenQueryString, payload) + .reply(200, { status: 'OK' }); + api.issues.moveIssueBetweenPipelines(repoId, issueId, payload, done); + }); + }); + describe('Add/remove issues to epic test', function() { var epicId = 123; @@ -77,20 +102,6 @@ describe('ZenHub Write API', function() { }); }); - describe('Set estimate for issue test', function() { - var issueId = 457; - - it('should send payload to the ZenHub API', function(done) { - var payload = { - estimate: 8 - }; - nock('https://api.zenhub.io/p1') - .put('/repositories/' + repoId + '/issues/' + issueId + '/estimate' + tokenQueryString, payload) - .reply(200, { status: 'OK' }); - api.issues.setEstimateForIssue(repoId, issueId, payload, done); - }); - }); - describe('Set start_date for milestone test', function() { var milestoneNumber = 457;
feat (Lib) added move issue between pipelines
ilbonzo_node-zenhub
train
a8656df4e66f169f347f13942a37b502f2367a81
diff --git a/dynamic_dynamodb/core/circuit_breaker.py b/dynamic_dynamodb/core/circuit_breaker.py index <HASH>..<HASH> 100644 --- a/dynamic_dynamodb/core/circuit_breaker.py +++ b/dynamic_dynamodb/core/circuit_breaker.py @@ -57,7 +57,9 @@ def is_open(table_name=None, table_key=None, gsi_name=None, gsi_key=None): url=match.group('url')) auth = (match.group('username'), match.group('password')) - headers = {"x-table-name": table_name} + headers = {} + if table_name: + headers["x-table-name"] = table_name if gsi_name: headers["x-gsi-name"] = gsi_name
Wrap some code around an if statement for consistency
sebdah_dynamic-dynamodb
train
381579477ce0a8fde2e74854c3f570e26680dcb0
diff --git a/utxonursery.go b/utxonursery.go index <HASH>..<HASH> 100644 --- a/utxonursery.go +++ b/utxonursery.go @@ -8,14 +8,14 @@ import ( "sync" "sync/atomic" - "github.com/davecgh/go-spew/spew" - "github.com/lightningnetwork/lnd/chainntnfs" - "github.com/lightningnetwork/lnd/channeldb" - "github.com/lightningnetwork/lnd/lnwallet" "github.com/btcsuite/btcd/blockchain" "github.com/btcsuite/btcd/txscript" "github.com/btcsuite/btcd/wire" "github.com/btcsuite/btcutil" + "github.com/davecgh/go-spew/spew" + "github.com/lightningnetwork/lnd/chainntnfs" + "github.com/lightningnetwork/lnd/channeldb" + "github.com/lightningnetwork/lnd/lnwallet" ) // SUMMARY OF OUTPUT STATES @@ -962,7 +962,7 @@ func (u *utxoNursery) createSweepTx(kgtnOutputs []kidOutput, // sweep. case lnwallet.HtlcOfferedTimeoutSecondLevel: weightEstimate.AddWitnessInput( - lnwallet.SecondLevelHtlcSuccessWitnessSize, + lnwallet.ToLocalTimeoutWitnessSize, ) csvOutputs = append(csvOutputs, input) @@ -971,7 +971,7 @@ func (u *utxoNursery) createSweepTx(kgtnOutputs []kidOutput, // sweep. case lnwallet.HtlcAcceptedSuccessSecondLevel: weightEstimate.AddWitnessInput( - lnwallet.SecondLevelHtlcSuccessWitnessSize, + lnwallet.ToLocalTimeoutWitnessSize, ) csvOutputs = append(csvOutputs, input)
utxonursery: use symmetric second level htlc witness size This commit switches over the estimates for htlc success/timeout witness sizes to use a symmetric variable, highlighting their equivalence in size.
lightningnetwork_lnd
train
c566c4c3e05f7c65d5fd13dc2c897fb930544ea7
diff --git a/Concentrate/Concentrator.php b/Concentrate/Concentrator.php index <HASH>..<HASH> 100644 --- a/Concentrate/Concentrator.php +++ b/Concentrate/Concentrator.php @@ -52,9 +52,35 @@ class Concentrate_Concentrator return $this; } - public function sort() + public function compareFiles($file1, $file2) { - return $this->getFileSortOrder(); + if ($file1 == $file2) { + return 0; + } + + $sortOrder = $this->getFileSortOrder(); + + if (!isset($sortOrder[$file1]) && !isset($sortOrder[$file2])) { + return 0; + } + + if (isset($sortOrder[$file1]) && !isset($sortOrder[$file2])) { + return -1; + } + + if (!isset($sortOrder[$file1]) && isset($sortOrder[$file2])) { + return 1; + } + + if ($sortOrder[$file1] < $sortOrder[$file2]) { + return -1; + } + + if ($sortOrder[$file1] > $sortOrder[$file2]) { + return 1; + } + + return 0; } public function getConflicts(array $files) @@ -159,6 +185,7 @@ class Concentrate_Concentrator // get flat list of file dependencies for each file $fileDependencies = array(); + if (isset($info['Provides']) && is_array($info['Provides'])) { foreach ($info['Provides'] as $file => $fileInfo) { if (!isset($fileDependencies[$file])) { @@ -203,6 +230,39 @@ class Concentrate_Concentrator ); } + $fileSortOrder = array_flip($fileSortOrder); + + // add combines as dependencies of all contained files + $combines = false; + foreach ($data as $package_id => $info) { + if (isset($info['Combines']) && is_array($info['Combines'])) { + foreach ($info['Combines'] as $combine => $combineInfo) { + if ( isset($combineInfo['Contains']) + && is_array($combineInfo['Contains']) + ) { + foreach ($combineInfo['Contains'] as $file) { + if ( !isset($fileSortOrder[$file]) + || !is_array($fileSortOrder[$file]) + ) { + $fileSortOrder[$file] = array(); + } + $fileSortOrder[$file][$combine] = array(); + $combines = true; + } + } + } + } + } + + // re-traverse to get dependency order of combines + if ($combines) { + $temp = array(); + $fileSortOrder = $this->filterTree( + $fileSortOrder, + $temp + ); + } + // return indexed by file, with values being the relative sort // order $this->fileSortOrder = array_flip($fileSortOrder); @@ -260,7 +320,9 @@ class Concentrate_Concentrator protected function filterTree(array $nodes, array &$visited) { foreach ($nodes as $node => $childNodes) { - $this->filterTree($childNodes, $visited); + if (is_array($childNodes)) { + $this->filterTree($childNodes, $visited); + } if (!in_array($node, $visited)) { $visited[] = $node; }
Include combined files in the sort order.
silverorange_Concentrate
train
84303c3205bb4dbcd9afd79ba1280d631c4ccb46
diff --git a/volume_services/volume_services.go b/volume_services/volume_services.go index <HASH>..<HASH> 100644 --- a/volume_services/volume_services.go +++ b/volume_services/volume_services.go @@ -36,7 +36,10 @@ var _ = VolumeServicesDescribe("Volume Services", func() { appName = random_name.CATSRandomName("APP") workflowhelpers.AsUser(TestSetup.AdminUserContext(), TestSetup.ShortTimeout(), func() { - session := cf.Cf("curl", "/routing/v1/router_groups").Wait() + session := cf.Cf("enable-feature-flag", "diego_docker").Wait() + Expect(session).To(Exit(0), "cannot enable diego_docker feature flag") + + session = cf.Cf("curl", "/routing/v1/router_groups").Wait() Expect(session).To(Exit(0), "cannot retrieve current router groups") routerGroupGuid, reservablePorts = routerGroupIdAndPorts(session.Out.Contents()) @@ -131,6 +134,9 @@ var _ = VolumeServicesDescribe("Volume Services", func() { payload := fmt.Sprintf(`{ "reservable_ports":"%s", "name":"default-tcp", "type": "tcp"}`, reservablePorts) session := cf.Cf("curl", fmt.Sprintf("/routing/v1/router_groups/%s", routerGroupGuid), "-X", "PUT", "-d", payload).Wait() Expect(session).To(Exit(0), "cannot retrieve current router groups") + + session = cf.Cf("disable-feature-flag", "diego_docker").Wait() + Expect(session).To(Exit(0), "cannot disable diego_docker feature flag") }) })
Enable docker before tests - and disable again afterwards [#<I>](<URL>)
cloudfoundry_cf-acceptance-tests
train
5a4fa3759512b5828249769bc4ded827d933010a
diff --git a/src/ducks/connections/index.js b/src/ducks/connections/index.js index <HASH>..<HASH> 100644 --- a/src/ducks/connections/index.js +++ b/src/ducks/connections/index.js @@ -100,7 +100,11 @@ const reducer = (state = {}, action) => { triggers: { ...get(newState, [konnectorSlug, 'triggers'], []), data: [ - ...get(newState, [konnectorSlug, 'triggers', 'data'], []), + ...get( + newState, + [konnectorSlug, 'triggers', 'data'], + [] + ).filter(({ _id }) => _id !== doc._id), doc ], [triggerId]: {
fix: Don't duplicate connections trigger data
cozy_cozy-home
train
35815e1c316e8d30b153e027a4f89f11512f452a
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocalTxExecuter.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocalTxExecuter.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocalTxExecuter.java +++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocalTxExecuter.java @@ -27,7 +27,6 @@ import com.orientechnologies.orient.core.hook.ORecordHook; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.storage.OCluster; import com.orientechnologies.orient.core.storage.OPhysicalPosition; -import com.orientechnologies.orient.core.storage.ORawBuffer; import com.orientechnologies.orient.core.storage.OStorage; import com.orientechnologies.orient.core.tx.OTransaction; import com.orientechnologies.orient.core.tx.OTransactionAbstract; @@ -108,16 +107,13 @@ public class OStorageLocalTxExecuter { // DELETED throw new OTransactionException("Can't retrieve the updated record #" + iRid); - final ORawBuffer buffer = new ORawBuffer(storage.getDataSegment(ppos.dataSegment).getRecord(ppos.dataPosition), ppos.version, - ppos.type); - // MVCC TRANSACTION: CHECK IF VERSION IS THE SAME - if (iVersion > -1 && buffer.version != iVersion) + if (iVersion > -1 && ppos.version != iVersion) throw new OConcurrentModificationException( "Can't update the record " + iRid + " because the version is not the latest one. Probably you are updating an old record or it has been modified by another user (db=v" - + buffer.version + " your=v" + iVersion + ")"); + + ppos.version + " your=v" + iVersion + ")"); final ODataLocal dataSegment = storage.getDataSegment(storage.getDataSegmentForRecord(iClusterSegment, iContent));
Fix by Sylvain to improve record updates in transaction context
orientechnologies_orientdb
train
1a17513a4d8793838ac729b1feb186fad06af775
diff --git a/lib/page_title_helper.rb b/lib/page_title_helper.rb index <HASH>..<HASH> 100644 --- a/lib/page_title_helper.rb +++ b/lib/page_title_helper.rb @@ -7,7 +7,7 @@ module PageTitleHelper def self.all self.instance_methods(false).sort end - + def self.interpolate(pattern, *args) all.reverse.inject(pattern.dup) do |result, tag| result.gsub(/:#{tag}/) do |match| @@ -25,6 +25,11 @@ module PageTitleHelper end end + # Add new, custom, interpolation. + def self.interpolates(key, &block) + Interpolations.send(:define_method, key, &block) + end + def page_title(options = nil, &block) if block_given? page_title = yield
changed behaviour so that interpolations can be added easier
lwe_page_title_helper
train
aace022ee2da8922b425724b81476393e3d0a7cb
diff --git a/sonar-core/src/test/java/org/sonar/core/issue/db/IssueDaoTest.java b/sonar-core/src/test/java/org/sonar/core/issue/db/IssueDaoTest.java index <HASH>..<HASH> 100644 --- a/sonar-core/src/test/java/org/sonar/core/issue/db/IssueDaoTest.java +++ b/sonar-core/src/test/java/org/sonar/core/issue/db/IssueDaoTest.java @@ -24,6 +24,7 @@ import com.google.common.base.Function; import com.google.common.collect.Iterables; import org.apache.ibatis.executor.result.DefaultResultHandler; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.sonar.api.issue.IssueQuery; import org.sonar.api.rule.RuleKey; @@ -332,6 +333,7 @@ public class IssueDaoTest extends AbstractDaoTestCase { } @Test + @Ignore public void should_select_issues_all_columns() { setupData("shared", "should_return_all_columns");
Temporary ignore failing unit test on Oracle
SonarSource_sonarqube
train
65324f434bceca29ca57a08f204b45d216d95443
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -27,7 +27,7 @@ }, "peerDependencies": { "jquery": "3.3.1", - "ol": "5.2.0" + "ol": "5.3.0" }, "dependencies": { "better-scroll": "1.13.2", @@ -54,7 +54,7 @@ "http-server": "0.11.1", "jquery": "3.3.1", "mocha": "5.2.0", - "ol": "5.2.0", + "ol": "5.3.0", "phantomjs-prebuilt": "2.1.14", "selenium-webdriver": "3.6.0", "wait-on": "3.1.0" diff --git a/src/configurators/LayerConfigurator.js b/src/configurators/LayerConfigurator.js index <HASH>..<HASH> 100644 --- a/src/configurators/LayerConfigurator.js +++ b/src/configurators/LayerConfigurator.js @@ -301,9 +301,11 @@ import { Attributions } from '../Attributions' /** * A vector source config. * @typedef {SourceConfig} VectorSourceConfig - * @property {string} [loadingStrategy] "BBOX" or "ALL" - * @property {number} [bboxRatio] only applies if loadingStrategy is BBOX. If bigger than 1 this much more will be - * loaded around a bbox. + * @property {string} [loadingStrategy='ALL'] Either 'BBOX', 'ALL' or 'TILE' + * If BBOX or TILE the given url has to contain the parameters {minx}, {miny}, {maxx}, {maxy}. + * @property {number} [bboxRatio=1] If set the bbox loading strategy will increase the load extent by this factor + * @property {module:ol/proj~ProjectionLike} [urlProjection] coordinates will be inserted into the url in this format. + * defaults to the sourceProjection * @property {boolean} [localised=false] if set to true the loader will send accept-language headers. */ diff --git a/src/configurators/LayerFactory.js b/src/configurators/LayerFactory.js index <HASH>..<HASH> 100644 --- a/src/configurators/LayerFactory.js +++ b/src/configurators/LayerFactory.js @@ -296,11 +296,7 @@ export class LayerFactory extends Observable { this.configureLayerSourceLoadingStrategy_(optionsCopy.source) optionsCopy.source.url = URL.extractFromConfig(optionsCopy.source, 'url', undefined, this.map_) // not finalized - if (superType === SuperType.QUERYLAYER) { - this.superTypeNotSupported(layerType, superType) - } else { - optionsCopy.source = new ArcGISRESTFeatureSource(optionsCopy.source) - } + optionsCopy.source = new ArcGISRESTFeatureSource(optionsCopy.source) layer = new VectorLayer(optionsCopy) break diff --git a/src/search/connectors/G4UServerSearchConnector.js b/src/search/connectors/G4UServerSearchConnector.js index <HASH>..<HASH> 100644 --- a/src/search/connectors/G4UServerSearchConnector.js +++ b/src/search/connectors/G4UServerSearchConnector.js @@ -41,7 +41,6 @@ export class G4UServerSearchConnector extends SearchConnector { .expandTemplate('layerconfigid', layerConfigId) .finalize() - $.ajax({ url: finalUrl, dataType: 'text', diff --git a/tests/measurementButton_spec.js b/tests/measurementButton_spec.js index <HASH>..<HASH> 100644 --- a/tests/measurementButton_spec.js +++ b/tests/measurementButton_spec.js @@ -16,11 +16,10 @@ function getPixelFromCoordinate (coordinate) { describe('measurementButton', function () { // before and after /////////////////////////////////////////////////////// - + this.timeout(config.mochaTimeout) let driver before(function () { - this.timeout(config.mochaTimeout) driver = customDriver() driver.manage().window().setSize(1200, 800) driver.manage().setTimeouts(config.seleniumTimeouts)
bumped ol version and linting
KlausBenndorf_guide4you
train
1b85e6005e520dabfba01dece3202afa0d608d19
diff --git a/switchboard.py b/switchboard.py index <HASH>..<HASH> 100644 --- a/switchboard.py +++ b/switchboard.py @@ -1,4 +1,4 @@ class switchboard(): def __init__(self, filelist): self.filelist = filelist - print 'created switchboard' + print 'created switchboard' diff --git a/torrent.py b/torrent.py index <HASH>..<HASH> 100644 --- a/torrent.py +++ b/torrent.py @@ -32,10 +32,16 @@ class torrent(): raise ValueError('Torrent file has bad hash') self.bitfield.setall(False) self.reactor = reactor.Reactor() - self.multifile = True if self.torrent_dict['info']['files'] else False - print self.multifile - self.outfile = open('{}'.format(self.torrent_dict['info']['name']), - 'w') + if 'files' in self.torrent_dict['info']: + self.multifile = True + else: + self.multifile = False + + if self.multifile: + self.outfile = switchboard(self.torrent_dict['info']['files']) + else: + outfile = open('{}'.format(self.torrent_dict['info']['name']), 'w') + self.outfile = outfile @property def piece_length(self):
Specify outfile based on multifile t/f
jefflovejapan_drench
train
7204b84b9d67ef820ee582b97c5baba0b63330d9
diff --git a/tests/avro/test_cached_client.py b/tests/avro/test_cached_client.py index <HASH>..<HASH> 100644 --- a/tests/avro/test_cached_client.py +++ b/tests/avro/test_cached_client.py @@ -138,16 +138,16 @@ class TestCacheSchemaRegistryClient(unittest.TestCase): self.assertTupleEqual(('/path/to/cert', '/path/to/key'), self.client._session.cert) def test_cert_path(self): - self.client = CachedSchemaRegistryClient(url='https://127.0.0.1:65534', - ca_location='/path/to/ca') - self.assertEqual('/path/to/ca', self.client._session.verify) + self.client = CachedSchemaRegistryClient(url='https://127.0.0.1:65534', + ca_location='/path/to/ca') + self.assertEqual('/path/to/ca', self.client._session.verify) def test_context(self): - with self.client as c: - parsed = avro.loads(data_gen.BASIC_SCHEMA) - schema_id = c.register('test', parsed) - self.assertTrue(schema_id > 0) - self.assertEqual(len(c.id_to_schema), 1) + with self.client as c: + parsed = avro.loads(data_gen.BASIC_SCHEMA) + schema_id = c.register('test', parsed) + self.assertTrue(schema_id > 0) + self.assertEqual(len(c.id_to_schema), 1) def test_init_with_dict(self): self.client = CachedSchemaRegistryClient({
Fix flake8 over-indentation
confluentinc_confluent-kafka-python
train
eb1016363235a7dea130bfcd05ecd93c9bbb951d
diff --git a/h2o-py/tests/testdir_algos/automl/pyunit_automl_args.py b/h2o-py/tests/testdir_algos/automl/pyunit_automl_args.py index <HASH>..<HASH> 100644 --- a/h2o-py/tests/testdir_algos/automl/pyunit_automl_args.py +++ b/h2o-py/tests/testdir_algos/automl/pyunit_automl_args.py @@ -259,14 +259,14 @@ def test_stacked_ensembles_are_trained_after_timeout(): print("Check that Stacked Ensembles are still trained after timeout") max_runtime_secs = 20 ds = import_dataset() - aml = H2OAutoML(project_name="py_aml_SE_after_timeout", seed=1, max_runtime_secs=max_runtime_secs, exclude_algos=['DeepLearning']) + aml = H2OAutoML(project_name="py_aml_SE_after_timeout", seed=1, max_runtime_secs=max_runtime_secs, exclude_algos=['XGBoost', 'DeepLearning']) start = time.time() aml.train(y=ds['target'], training_frame=ds['train']) end = time.time() assert end-start - max_runtime_secs > 0 _, _, se = get_partitioned_model_names(aml.leaderboard) - assert len(se) == 2, "StackedEnsemble should still be trained after timeout" + assert len(se) > 0, "StackedEnsemble should still be trained after timeout" # we don't need to test if all SEs are built, there may be only one if just one model type was built. def test_automl_stops_after_max_models():
fixing SE in AutoML test failing intermittently (probably if only XGB models were built in allocated time) (#<I>)
h2oai_h2o-3
train
86227552140a3db0e8be61d9d27166d20b85fa84
diff --git a/lxd/db/db_internal_test.go b/lxd/db/db_internal_test.go index <HASH>..<HASH> 100644 --- a/lxd/db/db_internal_test.go +++ b/lxd/db/db_internal_test.go @@ -63,7 +63,7 @@ func (s *dbTestSuite) CreateTestDb() (*Cluster, func()) { s.Nil(err) } - db, cleanup := NewTestCluster(s.T().(*testing.T)) + db, cleanup := NewTestCluster(s.T()) return db, cleanup }
lxd/db: Revert 3da5aea1 fix, since in turn testify reverted the change
lxc_lxd
train
2443bbf9f81b61b3b05ce34000e6e2a76f3f6406
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,12 @@ So far, the following subpackages are available: * distance-to-polygon border calculation. +* **eobox.ml** contains machine learning related tools, e.g. + + * plotting a confusion matrix including with precision and recall + + * extended predict function which returns prediction, confidences, and probabilities. + ## Installation diff --git a/eobox/__init__.py b/eobox/__init__.py index <HASH>..<HASH> 100644 --- a/eobox/__init__.py +++ b/eobox/__init__.py @@ -1,15 +1,11 @@ -__version__ = "0.3.8" +__version__ = "0.3.9" from . import sampledata -from .sampledata import get_dataset from . import raster -from .raster import extract -from .raster import load_extracted -from .raster import MultiRasterIO from . import vector -from .vector import convert_polygons_to_lines -from .vector import calc_distance_to_border + +from . import ml __path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/eobox/raster/__init__.py b/eobox/raster/__init__.py index <HASH>..<HASH> 100644 --- a/eobox/raster/__init__.py +++ b/eobox/raster/__init__.py @@ -13,6 +13,26 @@ For more information on the package content, visit [readthedocs](https://eo-box. """ from .extraction import extract +from .extraction import get_paths_of_extracted from .extraction import load_extracted +from .extraction import add_vector_data_attributes_to_extracted +from .extraction import load_extracted_partitions +from .extraction import convert_df_to_geodf +from .extraction import load_extracted_dask +from .extraction import load_extracted_partitions_dask + from .rasterprocessing import MultiRasterIO from .rasterprocessing import windows_from_blocksize +from .rasterprocessing import window_from_window +from .rasterprocessing import create_distance_to_raster_border + +from .gdalutils import reproject_on_template_raster +from .gdalutils import rasterize + +from .cube import EOCube +from .cube import EOCubeSceneCollection +from .cube import create_virtual_time_series +from .cube import create_statistical_metrics + +from .utils import dtype_checker_df +from .utils import cleanup_df_values_for_given_dtype
adjusted imports in inits + readme ext
benmack_eo-box
train
9314ae8e8080fc09a5b31244271a192ee355c9b5
diff --git a/influxql/functions.go b/influxql/functions.go index <HASH>..<HASH> 100644 --- a/influxql/functions.go +++ b/influxql/functions.go @@ -339,6 +339,8 @@ type FloatHoltWintersReducer struct { // Interval between points interval int64 + // interval / 2 -- used to perform rounding + halfInterval int64 // Whether to include all data or only future values includeFitData bool @@ -376,6 +378,7 @@ func NewFloatHoltWintersReducer(h, m int, includeFitData bool, interval time.Dur seasonal: seasonal, includeFitData: includeFitData, interval: int64(interval), + halfInterval: int64(interval) / 2, optim: neldermead.New(), epsilon: defaultEpsilon, } @@ -399,7 +402,15 @@ func (r *FloatHoltWintersReducer) AggregateInteger(p *IntegerPoint) { } func (r *FloatHoltWintersReducer) roundTime(t int64) int64 { - return r.interval * ((t + r.interval/2) / r.interval) + // Overflow safe round function + remainder := t % r.interval + if remainder > r.halfInterval { + // Round up + return (t/r.interval + 1) * r.interval + } else { + // Round down + return (t / r.interval) * r.interval + } } func (r *FloatHoltWintersReducer) Emit() []FloatPoint { diff --git a/influxql/functions_test.go b/influxql/functions_test.go index <HASH>..<HASH> 100644 --- a/influxql/functions_test.go +++ b/influxql/functions_test.go @@ -3,6 +3,7 @@ package influxql_test import ( "math" "testing" + "time" "github.com/influxdata/influxdb/influxql" ) @@ -318,3 +319,68 @@ func TestHoltWinters_USPopulation_Missing(t *testing.T) { } } } +func TestHoltWinters_RoundTime(t *testing.T) { + maxTime := time.Unix(0, influxql.MaxTime).Round(time.Second).UnixNano() + data := []influxql.FloatPoint{ + {Time: maxTime - int64(5*time.Second+50*time.Millisecond), Value: 1}, + {Time: maxTime - int64(4*time.Second+103*time.Millisecond), Value: 10}, + {Time: maxTime - int64(3*time.Second+223*time.Millisecond), Value: 2}, + {Time: maxTime - int64(2*time.Second+481*time.Millisecond), Value: 11}, + } + hw := influxql.NewFloatHoltWintersReducer(2, 2, true, time.Second) + for _, p := range data { + hw.AggregateFloat(&p) + } + points := hw.Emit() + + forecasted := []influxql.FloatPoint{ + {Time: maxTime - int64(5*time.Second), Value: 1}, + {Time: maxTime - int64(4*time.Second), Value: 10.499068390422073}, + {Time: maxTime - int64(3*time.Second), Value: 2.002458220927272}, + {Time: maxTime - int64(2*time.Second), Value: 10.499826428426315}, + {Time: maxTime - int64(1*time.Second), Value: 2.898110014107811}, + {Time: maxTime - int64(0*time.Second), Value: 10.499786614238138}, + } + + if exp, got := len(forecasted), len(points); exp != got { + t.Fatalf("unexpected number of points emitted: got %d exp %d", got, exp) + } + for i := range forecasted { + if exp, got := forecasted[i].Time, points[i].Time; got != exp { + t.Errorf("unexpected time on points[%d] got %v exp %v", i, got, exp) + } + if exp, got := forecasted[i].Value, points[i].Value; !almostEqual(got, exp) { + t.Errorf("unexpected value on points[%d] got %v exp %v", i, got, exp) + } + } +} + +func TestHoltWinters_MaxTime(t *testing.T) { + data := []influxql.FloatPoint{ + {Time: influxql.MaxTime - 1, Value: 1}, + {Time: influxql.MaxTime, Value: 2}, + } + hw := influxql.NewFloatHoltWintersReducer(1, 0, true, 1) + for _, p := range data { + hw.AggregateFloat(&p) + } + points := hw.Emit() + + forecasted := []influxql.FloatPoint{ + {Time: influxql.MaxTime - 1, Value: 1}, + {Time: influxql.MaxTime, Value: 2.0058478778784132}, + {Time: influxql.MaxTime + 1, Value: 3.9399400964478106}, + } + + if exp, got := len(forecasted), len(points); exp != got { + t.Fatalf("unexpected number of points emitted: got %d exp %d", got, exp) + } + for i := range forecasted { + if exp, got := forecasted[i].Time, points[i].Time; got != exp { + t.Errorf("unexpected time on points[%d] got %v exp %v", i, got, exp) + } + if exp, got := forecasted[i].Value, points[i].Value; !almostEqual(got, exp) { + t.Errorf("unexpected value on points[%d] got %v exp %v", i, got, exp) + } + } +} diff --git a/influxql/iterator.go b/influxql/iterator.go index <HASH>..<HASH> 100644 --- a/influxql/iterator.go +++ b/influxql/iterator.go @@ -23,7 +23,7 @@ const ( // MaxTime is used as the maximum time value when computing an unbounded range. // This time is Jan 1, 2050 at midnight UTC. - MaxTime = models.MaxNanoTime + MaxTime = models.MaxNanoTime - 1 ) // Iterator represents a generic interface for all Iterators.
fix overflow in window iterator and holt winters roundTime
influxdata_influxdb
train
926f6e23c29167ff71585b89a3e4a7b6771759c8
diff --git a/rabird/windows_fix.py b/rabird/windows_fix.py index <HASH>..<HASH> 100644 --- a/rabird/windows_fix.py +++ b/rabird/windows_fix.py @@ -87,8 +87,13 @@ class stdout_thread_t(threading.Thread): self.screen_buffer.WriteConsole(s) except win32console.error: self.screen_buffer = None + + # If we could not output through screen buffer, we + # convert the unicode string to python native string, + # and write directly to old stndard output. + self.old_stdout.write(str(s)) else: - self.old_stdout.write(s) + self.old_stdout.write(str(s)) else : self.old_stdout.write(s)
Fixed unicode string will not output after met an error throw from screen buffer object.
starofrainnight_rabird.core
train
632f2842d94cc8128701f230442f619030d7ce8e
diff --git a/src/Domains/Resource/Relation/Types/MorphMany.php b/src/Domains/Resource/Relation/Types/MorphMany.php index <HASH>..<HASH> 100644 --- a/src/Domains/Resource/Relation/Types/MorphMany.php +++ b/src/Domains/Resource/Relation/Types/MorphMany.php @@ -5,11 +5,15 @@ namespace SuperV\Platform\Domains\Resource\Relation\Types; use Illuminate\Database\Eloquent\Relations\MorphMany as EloquentMorphMany; use Illuminate\Database\Eloquent\Relations\Relation as EloquentRelation; use SuperV\Platform\Domains\Database\Model\Contracts\EntryContract; +use SuperV\Platform\Domains\Resource\Action\ModalAction; +use SuperV\Platform\Domains\Resource\Contracts\ProvidesForm; use SuperV\Platform\Domains\Resource\Contracts\ProvidesTable; +use SuperV\Platform\Domains\Resource\Form\Form; +use SuperV\Platform\Domains\Resource\Form\FormConfig; use SuperV\Platform\Domains\Resource\Relation\Relation; use SuperV\Platform\Domains\Resource\Table\ResourceTable; -class MorphMany extends Relation implements ProvidesTable +class MorphMany extends Relation implements ProvidesTable, ProvidesForm { protected function newRelationQuery(EntryContract $relatedEntryInstance): EloquentRelation { @@ -28,6 +32,21 @@ class MorphMany extends Relation implements ProvidesTable ->setResource($this->getRelatedResource()) ->setQuery($this) ->setDataUrl(url()->current().'/data') - ->mergeFields($this->getPivotFields()); + ->addContextAction( + ModalAction::make('New '.str_singular(str_unslug($this->getName()))) + ->setModalUrl($this->route('create', $this->parentEntry)) + ); +// ->mergeFields($this->getPivotFields()); + } + + public function makeForm(): Form + { + return FormConfig::make($this->newQuery()->make()) + ->hideField(sv_resource($this->parentEntry)->getResourceKey().'_id') + ->makeForm(); + } + + public function getFormTitle(): string + { } } \ No newline at end of file
Add form and table action for morphmany
superv_platform
train
e440017f52448e1a587c534a802ade012b1e0948
diff --git a/variant_lexer.go b/variant_lexer.go index <HASH>..<HASH> 100644 --- a/variant_lexer.go +++ b/variant_lexer.go @@ -155,7 +155,8 @@ func varLexNormal(l *varLexer) lexState { l.emit(tokBool) continue } - } else if l.pos+5 <= len(l.input) { + } + if l.pos+5 <= len(l.input) { if l.input[l.pos:l.pos+5] == "false" { l.pos += 5 l.emit(tokBool) diff --git a/variant_parser.go b/variant_parser.go index <HASH>..<HASH> 100644 --- a/variant_parser.go +++ b/variant_parser.go @@ -220,11 +220,13 @@ func varNumAs(s string, sig Signature) (interface{}, error) { return nil, varTypeError{s, sig} } base := 10 - if strings.HasPrefix(s, "0") { - base = 8 - } if strings.HasPrefix(s, "0x") { base = 16 + s = s[2:] + } + if strings.HasPrefix(s, "0") && len(s) != 1 { + base = 8 + s = s[1:] } if isUnsigned { i, err := strconv.ParseUint(s, base, size) @@ -615,8 +617,8 @@ func (n dictNode) Sigs() sigSet { func (n dictNode) Value(sig Signature) (interface{}, error) { set := n.Sigs() if set.Empty() { - // no type intofmation -> empty dict - return reflect.MakeMap(typeFor(sig.str)), nil + // no type information -> empty dict + return reflect.MakeMap(typeFor(sig.str)).Interface(), nil } if !set[sig] { return nil, varTypeError{n.String(), sig} diff --git a/variant_test.go b/variant_test.go index <HASH>..<HASH> 100644 --- a/variant_test.go +++ b/variant_test.go @@ -34,9 +34,14 @@ var variantParseTests = []struct { v interface{} }{ {"1", int32(1)}, - {`"foo"`, "foo"}, {"true", true}, + {"false", false}, {"1.0", float64(1.0)}, + {"0x10", int32(16)}, + {"1e1", float64(10)}, + {`"foo"`, "foo"}, + {`"\a\b\f\n\r\t"`, "\x07\x08\x0c\n\r\t"}, + {`"\u00e4\U0001f603"`, "\u00e4\U0001f603"}, {"[1]", []int32{1}}, {"[1, 2, 3]", []int32{1, 2, 3}}, {"@ai []", []int32{}}, @@ -48,11 +53,12 @@ var variantParseTests = []struct { {`[[], [""]]`, [][]string{{}, {""}}}, {`@a{ss} {}`, map[string]string{}}, {`{"foo": 1}`, map[string]int32{"foo": 1}}, + {`[{}, {"foo": "bar"}]`, []map[string]string{{}, {"foo": "bar"}}}, {`{"a": <1>, "b": <"foo">}`, map[string]Variant{"a": MakeVariant(int32(1)), "b": MakeVariant("foo")}}, {`b''`, []byte{0}}, {`b"abc"`, []byte{'a', 'b', 'c', 0}}, - {`b"\x01\0002\n"`, []byte{1, 2, '\n', 0}}, + {`b"\x01\0002\a\b\f\n\r\t"`, []byte{1, 2, 0x7, 0x8, 0xc, '\n', '\r', '\t', 0}}, {`[[0], b""]`, [][]byte{{0}, {0}}}, {"int16 0", int16(0)}, {"byte 0", byte(0)},
Fix bugs and add tests for variant parsing The following cases are now handled properly: - "false" (yeah, really) - integers in octal / hexadecimal format - empty dictionaries with inferred type information
guelfey_go.dbus
train
9ce8d788351533b0c8ac9d456ff466b5cd56c533
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ - [#694](https://github.com/influxdata/telegraf/pull/694): DNS Query input, thanks @mjasion! ### Bugfixes +- [#701](https://github.com/influxdata/telegraf/pull/701): output write count shouldnt print in quiet mode. ## v0.10.3 [2016-02-18] diff --git a/agent/agent.go b/agent/agent.go index <HASH>..<HASH> 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -44,6 +44,8 @@ func NewAgent(config *config.Config) (*Agent, error) { // Connect connects to all configured outputs func (a *Agent) Connect() error { for _, o := range a.Config.Outputs { + o.Quiet = a.Config.Agent.Quiet + switch ot := o.Output.(type) { case telegraf.ServiceOutput: if err := ot.Start(); err != nil { diff --git a/internal/config/config.go b/internal/config/config.go index <HASH>..<HASH> 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -429,7 +429,6 @@ func (c *Config) addOutput(name string, table *ast.Table) error { ro.MetricBufferLimit = c.Agent.MetricBufferLimit } ro.FlushBufferWhenFull = c.Agent.FlushBufferWhenFull - ro.Quiet = c.Agent.Quiet c.Outputs = append(c.Outputs, ro) return nil }
Set running output quiet mode in agent connect func closes #<I>
influxdata_telegraf
train
38d0a0ee43492548da950ce85eaf7fbe5722a809
diff --git a/plugins/API/ProcessedReport.php b/plugins/API/ProcessedReport.php index <HASH>..<HASH> 100644 --- a/plugins/API/ProcessedReport.php +++ b/plugins/API/ProcessedReport.php @@ -356,9 +356,25 @@ class ProcessedReport $order[] = Piwik::translate($category); } } - return ($category = strcmp(array_search($a['category'], $order), array_search($b['category'], $order))) == 0 - ? (@$a['order'] < @$b['order'] ? -1 : 1) - : $category; + + $posA = array_search($a['category'], $order); + $posB = array_search($b['category'], $order); + + if ($posA === false && $posB === false) { + return strcmp($a['category'], $b['category']); + } elseif ($posA === false) { + return 1; + } elseif ($posB === false) { + return -1; + } + + $category = strcmp($posA, $posB); + + if ($category == 0) { + return (@$a['order'] < @$b['order'] ? -1 : 1); + } + + return $category; } public function getProcessedReport($idSite, $period, $date, $apiModule, $apiAction, $segment = false,
make sure to sort 3rd party reports correctly
matomo-org_matomo
train
d100fd02fcf3ec96e6b79eabae732955121eda38
diff --git a/demos/console.php b/demos/console.php index <HASH>..<HASH> 100644 --- a/demos/console.php +++ b/demos/console.php @@ -5,7 +5,6 @@ require 'init.php'; class Test extends \atk4\data\Model { use \atk4\core\DebugTrait; - use \atk4\core\AppScopeTrait; public function generateReport() {
hotfix <I> issue with console demo
atk4_ui
train
ddc08eaf99f679c59244439414702861774afa62
diff --git a/client/extensions/woocommerce/app/products/product-header.js b/client/extensions/woocommerce/app/products/product-header.js index <HASH>..<HASH> 100644 --- a/client/extensions/woocommerce/app/products/product-header.js +++ b/client/extensions/woocommerce/app/products/product-header.js @@ -27,7 +27,7 @@ function renderTrashButton( onTrash, product, isBusy, translate ) { return onTrash && ( <Button borderless scary onClick={ onTrash }> <Gridicon icon="trash" /> - <span>{ translate( 'Trash' ) } </span> + <span>{ translate( 'Delete' ) } </span> </Button> ); } diff --git a/client/extensions/woocommerce/app/products/product-update.js b/client/extensions/woocommerce/app/products/product-update.js index <HASH>..<HASH> 100644 --- a/client/extensions/woocommerce/app/products/product-update.js +++ b/client/extensions/woocommerce/app/products/product-update.js @@ -4,16 +4,19 @@ import React, { PropTypes } from 'react'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; +import { debounce } from 'lodash'; import { localize } from 'i18n-calypso'; /** * Internal dependencies */ +import accept from 'lib/accept'; import Main from 'components/main'; import SidebarNavigation from 'my-sites/sidebar-navigation'; +import { getLink } from 'woocommerce/lib/nav-utils'; import { successNotice, errorNotice } from 'state/notices/actions'; import { getActionList } from 'woocommerce/state/action-list/selectors'; -import { createProduct, fetchProduct } from 'woocommerce/state/sites/products/actions'; +import { createProduct, fetchProduct, deleteProduct as deleteProductAction } from 'woocommerce/state/sites/products/actions'; import { fetchProductCategories } from 'woocommerce/state/sites/product-categories/actions'; import { fetchProductVariations } from 'woocommerce/state/sites/product-variations/actions'; import { getSelectedSiteWithFallback } from 'woocommerce/state/sites/selectors'; @@ -27,6 +30,7 @@ import { editProductVariation } from 'woocommerce/state/ui/products/variations/a import { getProductVariationsWithLocalEdits } from 'woocommerce/state/ui/products/variations/selectors'; import { editProductCategory } from 'woocommerce/state/ui/product-categories/actions'; import { getProductCategoriesWithLocalEdits } from 'woocommerce/state/ui/product-categories/selectors'; +import page from 'page'; import ProductForm from './product-form'; import ProductHeader from './product-header'; @@ -77,8 +81,36 @@ class ProductUpdate extends React.Component { // TODO: Remove the product we added here from the edit state. } + // TODO: In v1, this deletes a product, as we don't have trash management. + // Once we have trashing management, we can introduce 'trash' instead. onTrash = () => { - // TODO: Add action dispatch to trash this product. + const { translate, site, product, deleteProduct } = this.props; + const areYouSure = translate( 'Are you sure you want to permanently delete \'%(name)s\'?', { + args: { name: product.name } + } ); + accept( areYouSure, function( accepted ) { + if ( ! accepted ) { + return; + } + const successAction = () => { + debounce( () => { + page.redirect( getLink( '/store/products/:site/', site ) ); + }, 1000 )(); + return successNotice( + translate( '%(product)s successfully deleted.', { + args: { product: product.name }, + } ) + ); + }; + const failureAction = () => { + return errorNotice( + translate( 'There was a problem deleting %(product)s. Please try again.', { + args: { product: product.name }, + } ) + ); + }; + deleteProduct( site.ID, product.id, successAction, failureAction ); + } ); } onSave = () => { @@ -160,6 +192,7 @@ function mapDispatchToProps( dispatch ) { { createProduct, createProductActionList, + deleteProduct: deleteProductAction, editProduct, editProductCategory, editProductAttribute, diff --git a/client/extensions/woocommerce/state/sites/products/actions.js b/client/extensions/woocommerce/state/sites/products/actions.js index <HASH>..<HASH> 100644 --- a/client/extensions/woocommerce/state/sites/products/actions.js +++ b/client/extensions/woocommerce/state/sites/products/actions.js @@ -118,7 +118,9 @@ export const deleteProduct = ( dispatch( deleteAction ); - return request( siteId ).del( `products/${ productId }` ) + // ?force=true deletes a product instead of trashing + // In v1, we don't have trash management. Later we can trash instead. + return request( siteId ).del( `products/${ productId }?force=true` ) .then( ( data ) => { dispatch( deleteProductSuccess( siteId, data ) ); if ( successAction ) {
WooCommerce: Connect the delete icon to the delete action. (#<I>)
Automattic_wp-calypso
train
ccde261bbb2aacc9eb8e481e3382697b1a450f19
diff --git a/lib/nmap/host.rb b/lib/nmap/host.rb index <HASH>..<HASH> 100644 --- a/lib/nmap/host.rb +++ b/lib/nmap/host.rb @@ -548,12 +548,12 @@ module Nmap # Converts the host to a String. # # @return [String] - # The address of the host. + # The hostname or address of the host. # # @see address # def to_s - address.to_s + (hostname || address).to_s end # diff --git a/spec/host_spec.rb b/spec/host_spec.rb index <HASH>..<HASH> 100644 --- a/spec/host_spec.rb +++ b/spec/host_spec.rb @@ -183,13 +183,23 @@ describe Host do end end - it "should convert to a String" do - expect(subject.to_s).to eq('74.207.244.221') + describe "#to_s" do + it "should return the first hostname" do + expect(subject.to_s).to eq('scanme.nmap.org') + end + + context "when #hostname returns nil" do + before { expect(subject).to receive(:hostname).and_return(nil) } + + it "should return the first address" do + expect(subject.to_s).to eq('74.207.244.221') + end + end end describe "#inspect" do - it "should include the address" do - expect(subject.inspect).to include(subject.address) + it "should include the String representation of the host" do + expect(subject.inspect).to include(subject.to_s) end end
Adjusted Host#to_s to use #hostname.to_s first, then #address.to_s.
sophsec_ruby-nmap
train
6e6250d49feb32a002b91d02322bdcd6dc9ec1e9
diff --git a/src/Validation/Validator.php b/src/Validation/Validator.php index <HASH>..<HASH> 100644 --- a/src/Validation/Validator.php +++ b/src/Validation/Validator.php @@ -362,7 +362,7 @@ class Validator implements \ArrayAccess, \IteratorAggregate, \Countable { * $message = 'This field cannot be empty'; * $validator->notEmpty('email'); // Email cannot be empty * $validator->notEmpty('email', $message, 'create'); // Email can be empty on update - * $validator->notEmpty('email', $message, update); // Email can be empty on create + * $validator->notEmpty('email', $message, 'update'); // Email can be empty on create * }}} * * It is possible to conditionally disallow emptiness on a field by passing a callback
Fix eg. code in docblock.
cakephp_cakephp
train
b4b4aacda409340c770055e20f56dad1c1df45c3
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +Version 0.4.1 +============= + +* Option to pass *allowed tags* to the `bleach` template filter added by Rafał Selewońko. +* Moved project to Github. + Version 0.4.0 ============= diff --git a/README.rst b/README.rst index <HASH>..<HASH> 100644 --- a/README.rst +++ b/README.rst @@ -117,7 +117,7 @@ where most of the work is done. In your templates ***************** -If you have a peice of content from somewhere that needs to be printed in a +If you have a piece of content from somewhere that needs to be printed in a template, you can use the ``bleach`` filter:: {% load bleach_tags %} @@ -130,8 +130,18 @@ as a parameter to the filter:: {{ some_unsafe_content|bleach:"p,span" }} +There is also ``bleach_linkify`` which uses the linkify_ function of bleach +which converts URL-like strings in an HTML fragment to links + +This function converts strings that look like URLs, domain names and email +addresses in text that may be an HTML fragment to links, while preserving: + +1. links already in the string +2. urls found in attributes +3. email addresses .. _bleach: https://github.com/jsocol/bleach "Bleach" .. _bleach documentation: https://github.com/jsocol/bleach/blob/master/README.rst "Bleach documentation - parameters" .. _django-ckeditor: https://github.com/shaunsephton/django-ckeditor "Django CKEditor widget" +.. _linkify: https://bleach.readthedocs.io/en/latest/linkify.html?highlight=linkify#bleach.linkify "linkify" diff --git a/django_bleach/templatetags/bleach_tags.py b/django_bleach/templatetags/bleach_tags.py index <HASH>..<HASH> 100644 --- a/django_bleach/templatetags/bleach_tags.py +++ b/django_bleach/templatetags/bleach_tags.py @@ -22,6 +22,7 @@ for setting, kwarg in possible_settings.items(): bleach_args[kwarg] = getattr(settings, setting) +@register.filter(name='bleach') def bleach_value(value, tags=None): if tags is not None: args = bleach_args.copy() @@ -31,9 +32,17 @@ def bleach_value(value, tags=None): bleached_value = bleach.clean(value, **args) return mark_safe(bleached_value) -register.filter('bleach', bleach_value) - @register.filter def bleach_linkify(value): + """ + Convert URL-like strings in an HTML fragment to links + + This function converts strings that look like URLs, domain names and email + addresses in text that may be an HTML fragment to links, while preserving: + + 1. links already in the string + 2. urls found in attributes + 3. email addresses + """ return bleach.linkify(value, parse_email=True) diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -48,9 +48,9 @@ copyright = u'2012, Tim Heap' # built documents. # # The short X.Y version. -version = '0.3' +version = '0.4' # The full version, including alpha/beta/rc tags. -release = '0.3.0' +release = '0.4.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ with open(path.join(this_directory, 'README.rst'), encoding='utf-8') as f: setup( name='django-bleach', - version="0.4.0", + version="0.4.1", description='Easily use bleach with Django models and templates', long_description=long_description, author='Mark Walker',
Preparing <I> release
marksweb_django-bleach
train
2193124455497f7f80fb0f4b8ca711f5a374caf6
diff --git a/src/jdk.compiler/share/classes/com/sun/tools/javac/code/Scope.java b/src/jdk.compiler/share/classes/com/sun/tools/javac/code/Scope.java index <HASH>..<HASH> 100644 --- a/src/jdk.compiler/share/classes/com/sun/tools/javac/code/Scope.java +++ b/src/jdk.compiler/share/classes/com/sun/tools/javac/code/Scope.java @@ -29,8 +29,6 @@ import com.sun.tools.javac.code.Kinds.Kind; import java.lang.ref.WeakReference; import java.util.*; import java.util.function.BiConsumer; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; import com.sun.tools.javac.code.Symbol.CompletionFailure; import com.sun.tools.javac.code.Symbol.TypeSymbol; @@ -40,6 +38,8 @@ import com.sun.tools.javac.util.List; import static com.sun.tools.javac.code.Scope.LookupKind.NON_RECURSIVE; import static com.sun.tools.javac.code.Scope.LookupKind.RECURSIVE; +import static com.sun.tools.javac.util.Iterators.createCompoundIterator; +import static com.sun.tools.javac.util.Iterators.createFilterIterator; /** A scope represents an area of visibility in a Java program. The * Scope class is a container for symbols which provides @@ -898,7 +898,11 @@ public abstract class Scope { return tsym.members().getSymbols(sf, lookupKind); } }; - return si.importFrom((TypeSymbol) origin.owner) :: iterator; + List<Iterable<Symbol>> results = + si.importFrom((TypeSymbol) origin.owner, List.nil()); + return () -> createFilterIterator(createCompoundIterator(results, + Iterable::iterator), + s -> filter.accepts(origin, s)); } catch (CompletionFailure cf) { cfHandler.accept(imp, cf); return Collections.emptyList(); @@ -918,7 +922,11 @@ public abstract class Scope { return tsym.members().getSymbolsByName(name, sf, lookupKind); } }; - return si.importFrom((TypeSymbol) origin.owner) :: iterator; + List<Iterable<Symbol>> results = + si.importFrom((TypeSymbol) origin.owner, List.nil()); + return () -> createFilterIterator(createCompoundIterator(results, + Iterable::iterator), + s -> filter.accepts(origin, s)); } catch (CompletionFailure cf) { cfHandler.accept(imp, cf); return Collections.emptyList(); @@ -942,22 +950,19 @@ public abstract class Scope { public SymbolImporter(boolean inspectSuperTypes) { this.inspectSuperTypes = inspectSuperTypes; } - Stream<Symbol> importFrom(TypeSymbol tsym) { + List<Iterable<Symbol>> importFrom(TypeSymbol tsym, List<Iterable<Symbol>> results) { if (tsym == null || !processed.add(tsym)) - return Stream.empty(); + return results; - Stream<Symbol> result = Stream.empty(); if (inspectSuperTypes) { // also import inherited names - result = importFrom(types.supertype(tsym.type).tsym); + results = importFrom(types.supertype(tsym.type).tsym, results); for (Type t : types.interfaces(tsym.type)) - result = Stream.concat(importFrom(t.tsym), result); + results = importFrom(t.tsym, results); } - return Stream.concat(StreamSupport.stream(doLookup(tsym).spliterator(), false) - .filter(s -> filter.accepts(origin, s)), - result); + return results.prepend(doLookup(tsym)); } abstract Iterable<Symbol> doLookup(TypeSymbol tsym); } diff --git a/src/jdk.compiler/share/classes/com/sun/tools/javac/util/Iterators.java b/src/jdk.compiler/share/classes/com/sun/tools/javac/util/Iterators.java index <HASH>..<HASH> 100644 --- a/src/jdk.compiler/share/classes/com/sun/tools/javac/util/Iterators.java +++ b/src/jdk.compiler/share/classes/com/sun/tools/javac/util/Iterators.java @@ -28,6 +28,7 @@ package com.sun.tools.javac.util; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.function.Function; +import java.util.function.Predicate; /** Utilities for Iterators. * @@ -92,4 +93,32 @@ public class Iterators { return null; } }; + + public static <E> Iterator<E> createFilterIterator(Iterator<E> input, Predicate<E> test) { + return new Iterator<E>() { + private E current = update(); + private E update () { + while (input.hasNext()) { + E sym = input.next(); + if (test.test(sym)) { + return sym; + } + } + + return null; + } + @Override + public boolean hasNext() { + return current != null; + } + + @Override + public E next() { + E res = current; + current = update(); + return res; + } + }; + } + }
<I>: javac performance should be improved Summary: Avoiding unnecessary use of Stream.empty(). Reviewed-by: mcimadamore
google_error-prone-javac
train
a5b15833bf09cd5f3885a2e3d9e4d63fcd480d01
diff --git a/src/ConfigLoader.php b/src/ConfigLoader.php index <HASH>..<HASH> 100644 --- a/src/ConfigLoader.php +++ b/src/ConfigLoader.php @@ -26,6 +26,8 @@ class ConfigLoader implements DIContainerIncludeInterface protected $configDirs = []; + protected $settedPaths = []; + /** @var Config[] */ protected $config = []; @@ -42,7 +44,6 @@ class ConfigLoader implements DIContainerIncludeInterface } } - public function setConfigDirs(array $paths, $level = null) { $this->levels = []; @@ -56,10 +57,19 @@ class ConfigLoader implements DIContainerIncludeInterface public function addConfigDir($path, $level = ConfigDir::LEVEL_DEFAULT, array $params = null) { + if (array_key_exists($path, $this->settedPaths)) { + if ($this->settedPaths[$path] >= $level) { + return; + } else { + unset($this->settedPaths[$path]); + } + } + $this->paths[$level][$path] = $path; $this->levels[$level] = $level; $this->params[$level][$path] = $params; $this->config = []; + $this->settedPaths[$path] = $level; } public function attachConfigDir(ConfigDir $dir, $level = null) @@ -67,8 +77,21 @@ class ConfigLoader implements DIContainerIncludeInterface if (null !== $level) { $dir->setLevel($level); } - $this->configDirs[$dir->getLevel()][$dir->getPath()] = $dir; + + $level = $dir->getLevel(); + $path = $dir->getPath(); + + if (array_key_exists($path, $this->settedPaths)) { + if ($this->settedPaths[$path] >= $level) { + return; + } else { + unset($this->settedPaths[$path]); + } + } + + $this->configDirs[$level][$path] = $dir; $this->levels[$level] = $level; + $this->settedPaths[$path] = $level; } public function getLevels() @@ -116,7 +139,8 @@ class ConfigLoader implements DIContainerIncludeInterface if (!FileSystem::inDir($this->getRootDir(), $path)) { throw new PathRestrictException('Path %s not in Root Path', $path); } - $this->configDirs[$level][$path] = new ConfigDir($path, $level, $params); + $dir = new ConfigDir($path, $level, $params); + $this->configDirs[$level][$dir->getPath()] = $dir; } } unset($this->paths[$level]);
Do not add config dirs with same paths (only add if new lever in bigger).
mrdatamapper_akademiano-config
train
14eaa69a375f45765e510b0f8371e9f1c49845e1
diff --git a/exrex.py b/exrex.py index <HASH>..<HASH> 100644 --- a/exrex.py +++ b/exrex.py @@ -153,9 +153,6 @@ def _gen(d, limit=20, count=False): strings = (strings or 1) * _gen(items, limit, True) * (r2-r1) ret = subprods(ret, ran, items, limit) elif i[0] == 'branch': - # TODO - # for y in _gen(list(x), limit): - # ret = mappend(ret, y) if count: for x in i[1][1]: strings += _gen(x, limit, True) @@ -308,13 +305,6 @@ def argparser(): def __main__(): from sys import exit, stderr - # 'as(d|f)qw(e|r|s)[a-zA-Z]{2,3}' - # 'as(QWE|Z([XC]|Y|U)V){2,3}asdf' - # '.?' - # '.+' - # 'asdf.{1,4}qwer{2,5}' - # 'a(b)?(c)?(d)?' - # 'a[b][c][d]?[e]? args = argparser() if args['verbose']: args['output'].write('%r%s' % (parse(args['regex']), args['delimiter'])) @@ -329,11 +319,13 @@ def __main__(): except Exception as e: stderr.write('[!] Error: %s\n' % e) exit(1) - for s in g: - try: - args['output'].write(s+args['delimiter']) - except: - break + try: + args['output'].write(g.next()) + for s in g: + args['output'].write(args['delimiter']) + args['output'].write(s) + except: + pass if __name__ == '__main__': __main__()
[enh] command line print mods, some comments removed
asciimoo_exrex
train
235b9084526d735d3a5ed7a66fcde2317a85b326
diff --git a/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/SeaGlassLookAndFeel.java b/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/SeaGlassLookAndFeel.java index <HASH>..<HASH> 100644 --- a/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/SeaGlassLookAndFeel.java +++ b/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/SeaGlassLookAndFeel.java @@ -721,7 +721,7 @@ public class SeaGlassLookAndFeel extends NimbusLookAndFeel { * */ private void defineDesktopPanes(UIDefaults d) { - d.put("nimbusBase.DesktopPane", new ColorUIResource(90, 120, 200)); + d.put("seaGlassDesktopPane", new ColorUIResource(0x556ba6)); String c = PAINTER_DIRECTORY + ".DesktopPanePainter"; d.put("DesktopPane[Enabled].backgroundPainter", new LazyPainter(c, DesktopPanePainter.Which.BACKGROUND_ENABLED)); diff --git a/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/painter/DesktopPanePainter.java b/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/painter/DesktopPanePainter.java index <HASH>..<HASH> 100644 --- a/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/painter/DesktopPanePainter.java +++ b/seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/painter/DesktopPanePainter.java @@ -27,7 +27,7 @@ import javax.swing.JComponent; import com.seaglasslookandfeel.painter.AbstractRegionPainter.PaintContext.CacheMode; /** - * Nimbus's DesktopPanePainter. + * Sea Glass's DesktopPanePainter. */ public final class DesktopPanePainter extends AbstractRegionPainter { public static enum Which { @@ -36,7 +36,7 @@ public final class DesktopPanePainter extends AbstractRegionPainter { private PaintContext ctx; - private Color desktopPaneColor = decodeColor("nimbusBase.DesktopPane", 0f, 0f, 0f, 0); + private Color desktopPaneColor = decodeColor("seaGlassDesktopPane"); public DesktopPanePainter(Which state) { super();
Regularize desktop pane background color.
khuxtable_seaglass
train
27bffcca6ff7aa61380f63fa526a05c438ca7492
diff --git a/lib/flor/punit/on.rb b/lib/flor/punit/on.rb index <HASH>..<HASH> 100644 --- a/lib/flor/punit/on.rb +++ b/lib/flor/punit/on.rb @@ -61,6 +61,7 @@ class Flor::Pro::On < Flor::Macro th = [ 'trap', [], l, *tree[3] ] th[1] << [ '_att', [ [ 'point', [], l ], [ '_sqs', 'signal', l ] ], l ] th[1] << [ '_att', [ [ 'name', [], l ], tname ], l ] + th[1] << [ '_att', [ [ 'payload', [], l ], [ '_sqs', 'event', l ] ], l ] atts.each { |ac| th[1] << Flor.dup(ac) } th[1] << [ 'set', [ diff --git a/spec/punit/on_spec.rb b/spec/punit/on_spec.rb index <HASH>..<HASH> 100644 --- a/spec/punit/on_spec.rb +++ b/spec/punit/on_spec.rb @@ -51,6 +51,8 @@ describe 'Flor punit' do on 'approve' push l sig push l msg.payload.ret + push l f.color + set f.color 'blue' signal 'approve' 'b' push l 'c'
Switch "on" to 'payload: "event"'
floraison_flor
train
8a19a4d43209fa5999aacd73b4f686fd17b4ed5f
diff --git a/src/main/java/com/metamx/emitter/core/HttpPostEmitter.java b/src/main/java/com/metamx/emitter/core/HttpPostEmitter.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/metamx/emitter/core/HttpPostEmitter.java +++ b/src/main/java/com/metamx/emitter/core/HttpPostEmitter.java @@ -166,7 +166,8 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter } emittingThread = new EmittingThread(config); concurrentBatch.set(new Batch(this, acquireBuffer(), 0)); - lastFillTimeMillis = config.minHttpTimeoutMillis; + // lastFillTimeMillis must not be 0, minHttpTimeoutMillis could be. + lastFillTimeMillis = Math.max(config.minHttpTimeoutMillis, 1); } @Override
lastFillTimeMillis must not be zero
metamx_java-util
train
fce4ff3cbaa966e3cc88cd019e0391e4ed5a42e4
diff --git a/src/actions.js b/src/actions.js index <HASH>..<HASH> 100644 --- a/src/actions.js +++ b/src/actions.js @@ -91,7 +91,6 @@ export const cut = (state: Types.StoreState<*>) => ({ }); export const paste = (state: Types.StoreState<*>) => { - console.log("paste"); const minPoint = PointSet.min(state.copied); type Accumulator = {| diff --git a/src/util.js b/src/util.js index <HASH>..<HASH> 100755 --- a/src/util.js +++ b/src/util.js @@ -103,7 +103,6 @@ export const getCellDimensions = ( ): ?Types.Dimensions => { const rowDimensions = state.rowDimensions[point.row]; const columnDimensions = state.columnDimensions[point.column]; - console.log(point, rowDimensions, columnDimensions) return ( rowDimensions && columnDimensions && { ...rowDimensions, ...columnDimensions }
Remove debugging console.logs
iddan_react-spreadsheet
train
db775859ec7ae823b731bfb04cb544ef3ae7ad3a
diff --git a/dygraph-utils.js b/dygraph-utils.js index <HASH>..<HASH> 100644 --- a/dygraph-utils.js +++ b/dygraph-utils.js @@ -1246,10 +1246,11 @@ Dygraph.isElementContainedBy = function(containee, container) { if (container === null || containee === null) { return false; } - while (containee && containee !== container) { - containee = containee.parentNode; + var containeeNode = /** @type {Node} */ (containee); + while (containeeNode && containeeNode !== container) { + containeeNode = containeeNode.parentNode; } - return (containee === container); + return (containeeNode === container); };
Closure fix; force cast from Element to Node.
danvk_dygraphs
train
d0dd0b75e74452495d9fcdf98ddaa3a2af09a0ed
diff --git a/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/position/resume/RepositoryResumeBreakPointManagerTest.java b/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/position/resume/RepositoryResumeBreakPointManagerTest.java index <HASH>..<HASH> 100644 --- a/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/position/resume/RepositoryResumeBreakPointManagerTest.java +++ b/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/job/position/resume/RepositoryResumeBreakPointManagerTest.java @@ -28,6 +28,7 @@ import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; +import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.verify; @RunWith(MockitoJUnitRunner.class) @@ -53,12 +54,12 @@ public final class RepositoryResumeBreakPointManagerTest { @Test public void assertPersistIncrementalPosition() { repositoryResumeBreakPointManager.persistIncrementalPosition(); - verify(registryRepository).persist("/base/incremental", "{}"); + verify(registryRepository, atLeastOnce()).persist("/base/incremental", "{}"); } @Test public void assertPersistInventoryPosition() { repositoryResumeBreakPointManager.persistInventoryPosition(); - verify(registryRepository).persist("/base/inventory", "{\"unfinished\":{},\"finished\":[]}"); + verify(registryRepository, atLeastOnce()).persist("/base/inventory", "{\"unfinished\":{},\"finished\":[]}"); } }
Optimize scaling unit test. (#<I>) Co-authored-by: qiulu3 <Lucas<I>>
apache_incubator-shardingsphere
train
e421c1ed1b32ae2db9eba5ce7139a2aef3db63fd
diff --git a/src/main/java/pro/zackpollard/telegrambot/api/chat/message/send/SendableTextMessage.java b/src/main/java/pro/zackpollard/telegrambot/api/chat/message/send/SendableTextMessage.java index <HASH>..<HASH> 100644 --- a/src/main/java/pro/zackpollard/telegrambot/api/chat/message/send/SendableTextMessage.java +++ b/src/main/java/pro/zackpollard/telegrambot/api/chat/message/send/SendableTextMessage.java @@ -124,6 +124,7 @@ public class SendableTextMessage implements SendableMessage, ReplyingOptions, No /** * Appends text and makes it bold. Text is escaped. + * * @param text Text to append */ public SendableTextBuilder bold(String text) { @@ -132,6 +133,7 @@ public class SendableTextMessage implements SendableMessage, ReplyingOptions, No /** * Appends text and makes it italic. Text is escaped. + * * @param text Text to append */ public SendableTextBuilder italics(String text) { @@ -140,6 +142,7 @@ public class SendableTextMessage implements SendableMessage, ReplyingOptions, No /** * Appends an inline URL in the text + * * @param text Text to link. Text is escaped * @param link Link to reference */ @@ -149,6 +152,7 @@ public class SendableTextMessage implements SendableMessage, ReplyingOptions, No /** * Appends inline-code to the text + * * @param text Text to format. Escaped. */ public SendableTextBuilder code(String text) { @@ -157,6 +161,7 @@ public class SendableTextMessage implements SendableMessage, ReplyingOptions, No /** * Appends pre-formatted fixed-width code block. + * * @param text Text to format. Escaped */ public SendableTextBuilder preformatted(String text) {
Fix javadocs for text formatting additions
zackpollard_JavaTelegramBot-API
train
ae0f39d4abe15ed9690ac51c7e8e8a22fcf4a2db
diff --git a/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/widgets/WidgetsUtils.java b/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/widgets/WidgetsUtils.java index <HASH>..<HASH> 100644 --- a/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/widgets/WidgetsUtils.java +++ b/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/widgets/WidgetsUtils.java @@ -76,6 +76,16 @@ public class WidgetsUtils { } } + private static void hideAndAppend(Element oldElement, Element newElement) { + assert oldElement != null && newElement != null; + GQuery.$(oldElement).hide().after(newElement); + String c = oldElement.getClassName(); + if (!c.isEmpty()) { + newElement.addClassName(c); + } + } + + /** * Replace a dom element by a widget. * Old element classes will be copied to the new widget. @@ -86,5 +96,16 @@ public class WidgetsUtils { replaceOrAppend(e, widget.getElement()); GqUi.attachWidget(widget); } + + /** + * Append a widget to a dom element, and hide it. + * Element classes will be copied to the new widget. + */ + public static void hideAndAppend(Element e, Widget widget) { + assert e != null && widget != null; + GqUi.detachWidget(widget); + hideAndAppend(e, widget.getElement()); + GqUi.attachWidget(widget); + } }
adding a new method to widgets utils
ArcBees_gwtquery
train
c7e8e886760d14dac0bae17c928a8e4da2dcd92b
diff --git a/banks/santander/index.js b/banks/santander/index.js index <HASH>..<HASH> 100644 --- a/banks/santander/index.js +++ b/banks/santander/index.js @@ -120,7 +120,7 @@ exports.parseEDIFile = function(fileContent){ } else if(segmento == 'U') { parsedFile.boletos[currentNossoNumero]['valor_pago'] = formatters.removeTrailingZeros(line.substring(77, 92)); - var paid = parsedFile.boletos[currentNossoNumero]['valor_pago'] == parsedFile.boletos[currentNossoNumero]['valor']; + var paid = parsedFile.boletos[currentNossoNumero]['valor_pago'] >= parsedFile.boletos[currentNossoNumero]['valor']; paid = paid && parsedFile.boletos[currentNossoNumero]['codigo_ocorrencia'] == '17'; parsedFile.boletos[currentNossoNumero]['pago'] = paid;
Mark as paid when paying more than the boleto's amount.
pagarme_node-boleto
train
32f404058016603d7ab5cd10c511b63f638a160d
diff --git a/cauldron/resources/examples/time-gender/S05-export-data.py b/cauldron/resources/examples/time-gender/S05-export-data.py index <HASH>..<HASH> 100644 --- a/cauldron/resources/examples/time-gender/S05-export-data.py +++ b/cauldron/resources/examples/time-gender/S05-export-data.py @@ -6,30 +6,30 @@ percentages = df['Female'] / df['Total'] smoothed = cd.shared.smooth_data(percentages, 4) normalized = [s - min(smoothed) for s in smoothed] -normalized /= max(normalized) +normalized = [n / max(normalized) for n in normalized] cd.display.json(percentages=normalized) cd.display.markdown( - """ - ## HTML &amp; JavaScript - - Cauldron is capable of working directly with HTML and JavaScript. - This is useful if you want to create custom displays. In this example - we'll create a custom graphic for the data above using D3 directly - from JavaScript. - - First we need to save some data from Python into our notebook page so - that JavaScript has access to it. To do that we use the JSON display - function: - - `` - cd.display.json(data=my_data) - `` - - where we set a key and a value. In this case we map the key "data" to the - the variable "my_data". It is important to understand that this data - is going to be serialized in JavaScript, which requires mapping only - data types that JSON serialization supports. - """ + """ + ## HTML &amp; JavaScript + + Cauldron is capable of working directly with HTML and JavaScript. + This is useful if you want to create custom displays. In this example + we'll create a custom graphic for the data above using D3 directly + from JavaScript. + + First we need to save some data from Python into our notebook page so + that JavaScript has access to it. To do that we use the JSON display + function: + + `` + cd.display.json(data=my_data) + `` + + where we set a key and a value. In this case we map the key "data" to the + the variable "my_data". It is important to understand that this data + is going to be serialized in JavaScript, which requires mapping only + data types that JSON serialization supports. + """ ) \ No newline at end of file
Example Updates Compatibility updates to examples for unit testing reliability.
sernst_cauldron
train
28b2b6fa8bc050e0dbc832c992aaf4860e81055f
diff --git a/usb1.py b/usb1.py index <HASH>..<HASH> 100644 --- a/usb1.py +++ b/usb1.py @@ -65,7 +65,7 @@ USBError = libusb1.USBError def __bindConstants(): global_dict = globals() PREFIX = 'LIBUSB_' - for name, value in libusb1.__dict__.iteritems(): + for name, value in libusb1.__dict__.items(): if name.startswith(PREFIX): name = name[len(PREFIX):] # Gah.
Fix python 3 support.
vpelletier_python-libusb1
train
456cbaee7655adfaae7f962ff611c7845fb4db4e
diff --git a/tools/run_tests/distribtest_targets.py b/tools/run_tests/distribtest_targets.py index <HASH>..<HASH> 100644 --- a/tools/run_tests/distribtest_targets.py +++ b/tools/run_tests/distribtest_targets.py @@ -106,14 +106,15 @@ class NodeDistribTest(object): """Tests Node package""" def __init__(self, platform, arch, docker_suffix, node_version): - self.name = 'node_npm_%s_%s_%s_%s' % (platform, arch, - docker_suffix, node_version) + self.name = 'node_npm_%s_%s_%s' % (platform, arch, node_version) self.platform = platform self.arch = arch - self.docker_suffix = docker_suffix self.node_version = node_version self.labels = ['distribtest', 'node', platform, arch, docker_suffix, 'node-%s' % node_version] + if docker_suffix is not None: + self.name += '_%s' % docker_suffix + self.docker_suffix = docker_suffix def pre_build_jobspecs(self): return [] @@ -234,6 +235,11 @@ def targets(): RubyDistribTest('linux', 'x64', 'ubuntu1504'), RubyDistribTest('linux', 'x64', 'ubuntu1510'), RubyDistribTest('linux', 'x64', 'ubuntu1604'), + NodeDistribTest('macos', 'x64', None, '0.10'), + NodeDistribTest('macos', 'x64', None, '0.12'), + NodeDistribTest('macos', 'x64', None, '3'), + NodeDistribTest('macos', 'x64', None, '4'), + NodeDistribTest('macos', 'x64', None, '5'), NodeDistribTest('linux', 'x86', 'jessie', '4') ] + [ NodeDistribTest('linux', 'x64', os, version)
Fixed node mac distrib tests
grpc_grpc
train
9dd36c1e78ffa0511e6e676953e2143322b50e48
diff --git a/src/soundjs/AbstractSoundInstance.js b/src/soundjs/AbstractSoundInstance.js index <HASH>..<HASH> 100644 --- a/src/soundjs/AbstractSoundInstance.js +++ b/src/soundjs/AbstractSoundInstance.js @@ -477,7 +477,7 @@ this.createjs = this.createjs || {}; * DEPRECATED, please use {{#crossLink "AbstractSoundInstance/muted:property"}}{{/crossLink}} directly as a property * * @deprecated - * @method setMute + * @method setMuted * @param {Boolean} value If the sound should be muted. * @return {AbstractSoundInstance} A reference to itself, intended for chaining calls. * @since 0.6.0 @@ -493,7 +493,7 @@ this.createjs = this.createjs || {}; * DEPRECATED, please use {{#crossLink "AbstractSoundInstance/muted:property"}}{{/crossLink}} directly as a property * * @deprecated - * @method getMute + * @method getMuted * @return {Boolean} If the sound is muted. * @since 0.6.0 */
setMuted method was doc'd as setMute.
CreateJS_SoundJS
train
3066640443170c03d0f59a131b11e190b164743b
diff --git a/bucket_collectionsmgr.go b/bucket_collectionsmgr.go index <HASH>..<HASH> 100644 --- a/bucket_collectionsmgr.go +++ b/bucket_collectionsmgr.go @@ -199,7 +199,7 @@ func (cm *CollectionManager) CreateCollection(spec CollectionSpec, opts *CreateC posts.Add("name", spec.Name) if spec.MaxExpiry > 0 { - posts.Add("maxTTL", fmt.Sprintf("%f", spec.MaxExpiry.Seconds())) + posts.Add("maxTTL", fmt.Sprintf("%d", int(spec.MaxExpiry.Seconds()))) } req := mgmtRequest{ diff --git a/testcluster_test.go b/testcluster_test.go index <HASH>..<HASH> 100644 --- a/testcluster_test.go +++ b/testcluster_test.go @@ -169,7 +169,7 @@ func (c *testCluster) SupportsFeature(feature FeatureCode) bool { case AnalyticsFeature: supported = !c.Version.Lower(srvVer600) && !c.Version.Equal(srvVer650DP) case CollectionsFeature: - supported = !c.Version.Lower(srvVer700) + supported = c.Version.Equal(srvVer650DP) || !c.Version.Lower(srvVer700) case SubdocMockBugFeature: supported = true case ExpandMacrosFeature: diff --git a/testsuite_test.go b/testsuite_test.go index <HASH>..<HASH> 100644 --- a/testsuite_test.go +++ b/testsuite_test.go @@ -94,7 +94,7 @@ func (suite *IntegrationTestSuite) SetupSuite() { globalBucket = globalCluster.Bucket(globalConfig.Bucket) - err = globalBucket.WaitUntilReady(1*time.Second, &WaitUntilReadyOptions{DesiredState: ClusterStateOnline}) + err = globalBucket.WaitUntilReady(5*time.Second, &WaitUntilReadyOptions{DesiredState: ClusterStateOnline}) if err != nil { panic(err.Error()) }
GOCBC-<I>: Send MaxTTL as int not float on collection creation Motivation ---------- When MaxExpiry is set for create collection then we send it as a float, it should be an int. Changes ------- Update MaxTTL to be sent as int not float. Change-Id: Iba<I>f<I>c<I>efa<I>fac6ff1a<I>fe9ee<I>e Reviewed-on: <URL>
couchbase_gocb
train
a161f7c325dff72e94b38b12d5c52730ebed248f
diff --git a/test/test_page.rb b/test/test_page.rb index <HASH>..<HASH> 100644 --- a/test/test_page.rb +++ b/test/test_page.rb @@ -12,13 +12,8 @@ describe "Page" do end it "renders its contents" do - expected_output = <<OUTPUT -<h2>About this site</h2> - -<p>Hello! I&#39;m an about page.</p> -OUTPUT - - subject.render.must_equal(expected_output.strip) + expected_output = render_fixture('page.erb') + subject.render.must_equal(expected_output) end it "publishes to a file" do diff --git a/test/test_post.rb b/test/test_post.rb index <HASH>..<HASH> 100644 --- a/test/test_post.rb +++ b/test/test_post.rb @@ -12,13 +12,8 @@ describe "Post" do end it "renders its contents" do - expected_output = <<OUTPUT -<h3>Hello</h3> - -<p>Welcome to my first post. This is <em>awesome</em>.</p> -OUTPUT - - subject.render.must_equal(expected_output.strip) + expected_output = render_fixture('post.erb') + subject.render.must_equal(expected_output) end it "publishes to a file" do
Switched to using the render_fixture method.
waferbaby_dimples
train
41e2d32065ba11086365613705c232e5aa05669c
diff --git a/cltk/corpus/greek/corpora.py b/cltk/corpus/greek/corpora.py index <HASH>..<HASH> 100644 --- a/cltk/corpus/greek/corpora.py +++ b/cltk/corpus/greek/corpora.py @@ -42,6 +42,12 @@ GREEK_CORPORA = [ 'name': 'greek_treebank_perseus', 'location': 'remote', 'type': 'treebank'}, + {'encoding': 'utf-8', + 'markup': 'xml', + 'origin': 'https://github.com/vgorman1/Greek-Dependency-Trees.git', + 'name': 'greek_treebank_gorman', + 'location': 'remote', + 'type': 'treebank'}, {'encoding': 'xml', 'markup': 'plaintext', 'origin': 'https://github.com/cltk/greek_lexica_perseus.git',
Add "Gorman Trees" corpus for Greek (fixes #<I>) (#<I>)
cltk_cltk
train