hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
333c3105815031d14eca7ae180f44184fd36a4c2
diff --git a/RT.py b/RT.py index <HASH>..<HASH> 100644 --- a/RT.py +++ b/RT.py @@ -632,7 +632,7 @@ def set_pipeline(filename, scan, fileroot='', paramfile='', **kwargs): logger.info('\t Search with %s and threshold %.1f.' % (d['searchtype'], d['sigma_image1'])) logger.info('\t Using %d DMs from %.1f to %.1f and dts %s.' % (len(d['dmarr']), min(d['dmarr']), max(d['dmarr']), d['dtarr'])) - logger.info('\t Using uvgrid npix=(%d,%d) and res=%d (%.1fx oversample).' % (d['npixx'], d['npixy'], d['uvres'], d['uvoversample'])) + logger.info('\t Using uvgrid npix=(%d,%d) and res=%d.' % (d['npixx'], d['npixy'], d['uvres'])) logger.info('\t Expect %d thermal false positives per segment.' % nfalse) (vismem, immem) = calc_memory_footprint(d) diff --git a/parsesdm.py b/parsesdm.py index <HASH>..<HASH> 100644 --- a/parsesdm.py +++ b/parsesdm.py @@ -150,7 +150,7 @@ def get_metadata(filename, scan, spw=[], chans=[], read_fdownsample=1, params='' logger.info('\t Freq range (%.3f -- %.3f). %d spw with %d chans.' % (d['freq'].min(), d['freq'].max(), d['nspw'], d['nchan'])) logger.info('\t Scan has %d ints (%.1f s) and inttime %.3f s' % (d['nints'], d['nints']*d['inttime'], d['inttime'])) logger.info('\t %d polarizations: %s' % (d['npol'], d['pols'])) - logger.info('\t Ideal uvgrid npix=(%d,%d) and res=%d' % (d['npixx_full'], d['npixy_full'], d['uvres_full'])) + logger.info('\t Ideal uvgrid npix=(%d,%d) and res=%d (oversample %.1f)' % (d['npixx_full'], d['npixy_full'], d['uvres_full'], d['uvoversample'])) return d
moved oversample to ideal uv calc area (parsesdm.py)
caseyjlaw_rtpipe
train
ab465e89960ff104276f0f0eea88b73892910d9a
diff --git a/src/app/Http/Controllers/Owner/OwnerController.php b/src/app/Http/Controllers/Owner/OwnerController.php index <HASH>..<HASH> 100644 --- a/src/app/Http/Controllers/Owner/OwnerController.php +++ b/src/app/Http/Controllers/Owner/OwnerController.php @@ -16,7 +16,10 @@ class OwnerController extends Controller public function store(ValidateOwnerRequest $request, Owner $owner) { - $owner->storeWithRoles($request->all(), $request->get('roleList')); + $owner = $owner->storeWithRoles( + $request->all(), + $request->get('roleList') + ); return [ 'message' => __('The entity was created!'), @@ -35,7 +38,10 @@ class OwnerController extends Controller public function update(ValidateOwnerRequest $request, Owner $owner) { - $owner->updateWithRoles($request->all(), $request->get('roleList')); + $owner->updateWithRoles( + $request->all(), + $request->get('roleList') + ); return ['message' => __(config('enso.labels.savedChanges'))]; } diff --git a/tests/features/OwnerTest.php b/tests/features/OwnerTest.php index <HASH>..<HASH> 100644 --- a/tests/features/OwnerTest.php +++ b/tests/features/OwnerTest.php @@ -58,9 +58,8 @@ class OwnerTest extends TestCase public function update() { $postParams = $this->postParams(); - $owner = Owner::create($postParams); + $owner = Owner::create($postParams)->append(['roleList']); $owner->name = 'edited'; - $owner->roleList = []; $this->patch(route('administration.owners.update', $owner->id, false), $owner->toArray()) ->assertStatus(200)
fixes store; refactors test
laravel-enso_Core
train
c876a0bdaa9542122ef27f2703d2944d2b9b207e
diff --git a/spec/bitbucket_rest_api/team_spec.rb b/spec/bitbucket_rest_api/team_spec.rb index <HASH>..<HASH> 100644 --- a/spec/bitbucket_rest_api/team_spec.rb +++ b/spec/bitbucket_rest_api/team_spec.rb @@ -14,15 +14,30 @@ describe BitBucket::Team do end context 'without a block' do - it 'should send a GET request for the teams of which the user is a member' do + it 'sends a GET request for the teams of which the user is a member' do team.list(:member) end end context 'with a block' do - it 'should send a GET request for the teams of which the user is a member' do + it 'sends a GET request for the teams of which the user is a member' do team.list(:member) { |team| team } end end end + + describe '.profile' do + before do + expect(team).to receive(:request).with( + :get, + '/2.0/teams/team_name', + {}, + {} + ) + end + + it 'sends a GET request for the profile for the team' do + team.profile('team_name') + end + end end
Add test for .profile to team_spec
bitbucket-rest-api_bitbucket
train
c39727117ce67baae397a48681cce40687efb99b
diff --git a/renku/core/management/repository.py b/renku/core/management/repository.py index <HASH>..<HASH> 100644 --- a/renku/core/management/repository.py +++ b/renku/core/management/repository.py @@ -423,6 +423,7 @@ class RepositoryApiMixin(GitCore): # parse file and process it template = Template(file.read_text()) rendered_content = template.render(metadata) + destination = Path(Template(str(destination)).render(metadata)) destination.write_text(rendered_content) except IsADirectoryError: destination.mkdir(parents=True, exist_ok=True) diff --git a/tests/core/commands/test_init.py b/tests/core/commands/test_init.py index <HASH>..<HASH> 100644 --- a/tests/core/commands/test_init.py +++ b/tests/core/commands/test_init.py @@ -193,3 +193,21 @@ def test_create_from_template(local_client): local_client.path ) assert expected_file.exists() + + +def test_template_filename(local_client): + """Test using a template with dynamic filenames. + """ + with TemporaryDirectory() as tempdir: + template_folder = Path(tempdir) / 'first' + + template_folder.mkdir(parents=True) + + template_file = template_folder / '{{ name }}.r' + template_file.write_text('{{ name }}') + + (local_client.path / '.renku').mkdir() + + create_from_template(template_folder, local_client, name='test') + + assert (local_client.path / 'test.r').exists()
Adds functionality to allow filenames to be templated (#<I>)
SwissDataScienceCenter_renku-python
train
467d02807a288afcf17b1f52283ff56928927ee8
diff --git a/src/navigation-commands.js b/src/navigation-commands.js index <HASH>..<HASH> 100644 --- a/src/navigation-commands.js +++ b/src/navigation-commands.js @@ -1,14 +1,31 @@ +/** + * A function returns true if passed parameter is of class NavigationCommand and + * false otherwise. To define if passed parameter is NavigationCommand function + * will check if parameter has a navigate method. + * @param {object} obj - the item to check. + * @return {boolean} + */ export function isNavigationCommand(obj){ return obj && typeof obj.navigate === 'function'; } +/** + * This class represents a redirection command. + */ export class Redirect{ + /** + * @param {String} url - the url to redirect to. + */ constructor(url) { this.url = url; this.shouldContinueProcessing = false; } + /** + * Runs a navigation proccess. + * @param {Router} appRouter - a router which should execute redirection. + */ navigate(appRouter){ (this.router || appRouter).navigate(this.url, { trigger: true, replace: true }); } -} \ No newline at end of file +}
doc(navigation-commands): add API docs Add API docs for Redirect and isNavigationCommand
aurelia_router
train
acc8d48b11890ec8a4f725e8e330ea56950a43d6
diff --git a/src/Dev/SapphireTest.php b/src/Dev/SapphireTest.php index <HASH>..<HASH> 100644 --- a/src/Dev/SapphireTest.php +++ b/src/Dev/SapphireTest.php @@ -1191,15 +1191,9 @@ class SapphireTest extends PHPUnit_Framework_TestCase implements TestOnly // Support fixture paths relative to the test class, rather than relative to webroot // String checking is faster than file_exists() calls. - $isRelativeToFile - = (strpos('/', $fixtureFilePath) === false) - || preg_match('/^(\.){1,2}/', $fixtureFilePath); - - if ($isRelativeToFile) { - $resolvedPath = realpath($this->getCurrentAbsolutePath() . '/' . $fixtureFilePath); - if ($resolvedPath) { - return $resolvedPath; - } + $resolvedPath = realpath($this->getCurrentAbsolutePath() . '/' . $fixtureFilePath); + if ($resolvedPath) { + return $resolvedPath; } // Check if file exists relative to base dir diff --git a/src/Dev/State/FixtureTestState.php b/src/Dev/State/FixtureTestState.php index <HASH>..<HASH> 100644 --- a/src/Dev/State/FixtureTestState.php +++ b/src/Dev/State/FixtureTestState.php @@ -155,15 +155,9 @@ class FixtureTestState implements TestState { // Support fixture paths relative to the test class, rather than relative to webroot // String checking is faster than file_exists() calls. - $isRelativeToFile - = (strpos($fixtureFilePath, '/') === false) - || preg_match('/^(\.){1,2}/', $fixtureFilePath); - - if ($isRelativeToFile) { - $resolvedPath = realpath($this->getTestAbsolutePath($test) . '/' . $fixtureFilePath); - if ($resolvedPath) { - return $resolvedPath; - } + $resolvedPath = realpath($this->getTestAbsolutePath($test) . '/' . $fixtureFilePath); + if ($resolvedPath) { + return $resolvedPath; } // Check if file exists relative to base dir @@ -199,6 +193,17 @@ class FixtureTestState implements TestState */ protected function testNeedsDB(SapphireTest $test) { + // test class explicitly enables DB + if ($test->getUsesDatabase()) { + return true; + } + + // presence of fixture file implicitly enables DB + $fixtures = $test::get_fixture_file(); + if (!empty($fixtures)) { + return true; + } + $annotations = $test->getAnnotations(); // annotation explicitly disables the DB @@ -213,17 +218,6 @@ class FixtureTestState implements TestState return true; } - // test class explicitly enables DB - if ($test->getUsesDatabase()) { - return true; - } - - // presence of fixture file implicitly enables DB - $fixtures = $test::get_fixture_file(); - if (!empty($fixtures)) { - return true; - } - return false; } }
FIX SapphireTest can load relative fixtures in subfolders, switch "needs db" priority check order A minute performance gain by checking instance properties for "uses database" before loading and parsing PHPDoc annotations for the same thing, rather than doing it afterwards.
silverstripe_silverstripe-framework
train
d6008c21ced99ae9353fa3fc4272df07e1486bf8
diff --git a/src/tlsAnalyser/tlsAnalyser.go b/src/tlsAnalyser/tlsAnalyser.go index <HASH>..<HASH> 100644 --- a/src/tlsAnalyser/tlsAnalyser.go +++ b/src/tlsAnalyser/tlsAnalyser.go @@ -238,6 +238,10 @@ func main() { msgs, err := broker.Consume(rxQueue) + if err != nil { + failOnError(err, "Failed to Consume from receiving queue") + } + for i := 0; i < cores; i++ { wg.Add(1) go worker(msgs)
Added error handling when trying to consume from queue.
mozilla_tls-observatory
train
f8ed0c8cc42a2c51749d2ed85ef521e0ee124911
diff --git a/base.php b/base.php index <HASH>..<HASH> 100644 --- a/base.php +++ b/base.php @@ -1564,14 +1564,6 @@ final class Base { register_shutdown_function(array($this,'unload')); } - /** - * Wrap-up - * @return NULL - **/ - function __destruct() { - Registry::clear(__CLASS__); - } - } //! Prefab for classes with constructors and static factory methods @@ -1591,18 +1583,10 @@ abstract class Prefab { return Registry::get($class); } - /** - * Wrap-up - * @return NULL - **/ - function __destruct() { - Registry::clear(get_called_class()); - } - } //! Cache engine -final class Cache extends Prefab { +class Cache extends Prefab { private //! Cache DSN @@ -1610,9 +1594,7 @@ final class Cache extends Prefab { //! Prefix for cache entries $prefix, //! MemCache object - $ref, - //! Built-in cache flag - $flag; + $ref; /** * Return timestamp and TTL of cache entry or FALSE if not found @@ -1804,14 +1786,6 @@ final class Cache extends Prefab { if (preg_match('/^folder\h*=\h*(.+)/',$dsn,$parts) && !is_dir($parts[1])) mkdir($parts[1],Base::MODE,TRUE); - $this->flag=(bool) - array_filter( - debug_backtrace(FALSE), - function($frame) { - return isset($frame['args'][0]) && - $frame['args'][0]=='CACHE'; - } - ); } $this->prefix=$fw->hash($fw->get('ROOT').$fw->get('BASE')); return $this->dsn=$dsn; @@ -1827,15 +1801,6 @@ final class Cache extends Prefab { $this->load($dsn); } - /** - * Wrap-up - * @return NULL - **/ - function __destruct() { - if ($this->flag) - parent::__destruct(); - } - } //! View handler @@ -2298,15 +2263,6 @@ final class Registry { return self::$table[$key]; } - /** - * Remove object from catalog - * @return NULL - * @param $key string - **/ - static function clear($key) { - unset(self::$table[$key]); - } - //! Prohibit cloning private function __clone() { }
Remove destructors (Issue #<I>)
bcosca_fatfree-core
train
2d1201a942668a1b61ac9c840ea337dbe06fafc1
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,14 +9,14 @@ setup( install_requires=[ 'requests', ], - version='0.8', + version='0.9', description='SteamSpy API on PyPI', long_description='SteamSpyPI: an API for SteamSpy, written in Python 3.', long_description_content_type='text/x-rst', author='Wok', author_email='wok@tuta.io', url='https://github.com/woctezuma/steamspypi', - download_url='https://github.com/woctezuma/steamspypi/archive/0.8.tar.gz', + download_url='https://github.com/woctezuma/steamspypi/archive/0.9.tar.gz', keywords=['steam', 'steamspy', 'api'], classifiers=[ 'Development Status :: 5 - Production/Stable',
Increase version number to <I>
woctezuma_steamspypi
train
0ff1e39c1fb1406566c65c60220dee2cadfcb8ce
diff --git a/src/main/java/org/osgl/Osgl.java b/src/main/java/org/osgl/Osgl.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/osgl/Osgl.java +++ b/src/main/java/org/osgl/Osgl.java @@ -2611,6 +2611,9 @@ public class Osgl implements Serializable { * the same value, and the last one is the winner and it's {@code _2} * will be put into the map * </p> + * @param <K> the key type + * @param <V> the value type + * @param list the list of tuples to be transformed into map * @return the map as described */ @SuppressWarnings("unused") @@ -5141,7 +5144,7 @@ public class Osgl implements Serializable { * 2. Wrapper type of primitive types, e.g. Integer.class * 3. String.class * 4. Any class extends `Enum.class` - * @param c + * @param c the class to be checked * @return `true` if the give type `c` is simple type as described above */ public static boolean isSimpleType(Class<?> c) { @@ -5464,6 +5467,7 @@ public class Osgl implements Serializable { * the class specified * @param c the class * @param name the name of the field + * @param noStatic specify if static fields shall be included * @return the field instance of `null` if not found */ public static Field fieldOf(Class<?> c, String name, boolean noStatic) { @@ -5560,6 +5564,7 @@ public class Osgl implements Serializable { * * After invocation, the method will be cached into the method bag supplied * + * @param methodBag A function to cache the method found by name and arguments * @param o the instance on which the virtual method will be invoked * @param methodName the method name * @param pa the arguments diff --git a/src/main/java/org/osgl/util/C.java b/src/main/java/org/osgl/util/C.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/osgl/util/C.java +++ b/src/main/java/org/osgl/util/C.java @@ -349,12 +349,15 @@ public enum C { Traversable<T> accept($.Function<? super T, ?> visitor); /** - * Alias of {@link #accept($.Function)} + * Alias of {@link #accept(Osgl.Function)} + * @param visitor the visitor to tranverse the elements + * @return this {@code Traversable} instance */ Traversable<T> each($.Function<? super T, ?> visitor); /** - * Alias of {@link #accept($.Function)} + * Alias of {@link #accept(Osgl.Function)} + * @param visitor the visitor function */ Traversable<T> forEach($.Function<? super T, ?> visitor); } @@ -413,6 +416,7 @@ public enum C { /** * Alias of {@link #take(int)} * + * @param n the number of elements to be taken into the return sequence * @return the first {@code n} element in the sequence * @since 0.2 */ @@ -719,8 +723,8 @@ public enum C { * * @param identity the identity value for the accumulating function * @param accumulator the function to accumulate two values + * @param <R> the aggregation result type * @return the reduced result - * @see #reduce(Object, $.Func2) * @since 0.2 */ <R> R reduceLeft(R identity, $.Func2<R, T, R> accumulator); @@ -783,8 +787,8 @@ public enum C { * * @param visitor the function to visit elements in this sequence * @return this sequence - * @see Traversable#accept($.Function) - * @see ReversibleSequence#acceptRight($.Function) + * @see Traversable#accept(Osgl.Function) + * @see ReversibleSequence#acceptRight(Osgl.Function) * @since 0.2 */ Sequence<T> acceptLeft($.Function<? super T, ?> visitor); @@ -1050,6 +1054,7 @@ public enum C { * @param identity the initial value * @param accumulator the function performs accumulation from {@code T} an {@code R} to anthoer {@code R} * @param <R> the accumulation result + * @return the aggregation result * @see #reduce(Object, Osgl.Func2) * @since 0.2 */
workaround the issue that release get blocked due to javadoc error
osglworks_java-tool
train
44801b9fed0ffd2e8706d4cea6197057d9ae321b
diff --git a/moco-core/src/test/java/com/github/dreamhead/moco/MocoWebsocketTest.java b/moco-core/src/test/java/com/github/dreamhead/moco/MocoWebsocketTest.java index <HASH>..<HASH> 100644 --- a/moco-core/src/test/java/com/github/dreamhead/moco/MocoWebsocketTest.java +++ b/moco-core/src/test/java/com/github/dreamhead/moco/MocoWebsocketTest.java @@ -262,7 +262,7 @@ public class MocoWebsocketTest extends AbstractMocoHttpTest { private byte[] getMessage() { try { - return message.get(2, TimeUnit.SECONDS); + return message.get(3, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException e) { return new byte[0]; } catch (TimeoutException e) { diff --git a/moco-runner/src/test/java/com/github/dreamhead/moco/MocoWebsocketStandaloneTest.java b/moco-runner/src/test/java/com/github/dreamhead/moco/MocoWebsocketStandaloneTest.java index <HASH>..<HASH> 100644 --- a/moco-runner/src/test/java/com/github/dreamhead/moco/MocoWebsocketStandaloneTest.java +++ b/moco-runner/src/test/java/com/github/dreamhead/moco/MocoWebsocketStandaloneTest.java @@ -141,7 +141,7 @@ public class MocoWebsocketStandaloneTest extends AbstractMocoStandaloneTest { private byte[] getMessage() { try { - return message.get(2, TimeUnit.SECONDS); + return message.get(3, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException e) { return new byte[0]; } catch (TimeoutException e) {
adjusted websocket client timeout for build
dreamhead_moco
train
7588d51146bfce89b827fb661c3591144bc1297d
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,10 @@ these changes are prefixed with "**BREAKING**" * `delphin.codecs.edspenman` now properly reads predicate names * `delphin.codecs.edspenman` and `delphin.codecs.dmrspenman` now wrap `PenmanError` in `PyDelphinException` ([#266][]) +* `delphin.mrs.MRS.quantification_pairs()` detects and ignores when + quantifier(s) are shared by multiple EPs ([#267][]) +* `delphin.dmrs.from_mrs()` detects when an INDEX is specified but is + not the intrinsic argument of any EP ([#267][]) ### Changed @@ -1248,4 +1252,5 @@ information about changes, except for [#263]: https://github.com/delph-in/pydelphin/issues/263 [#264]: https://github.com/delph-in/pydelphin/issues/264 [#266]: https://github.com/delph-in/pydelphin/issues/266 +[#267]: https://github.com/delph-in/pydelphin/issues/267 [#268]: https://github.com/delph-in/pydelphin/issues/268 diff --git a/delphin/dmrs/_operations.py b/delphin/dmrs/_operations.py index <HASH>..<HASH> 100644 --- a/delphin/dmrs/_operations.py +++ b/delphin/dmrs/_operations.py @@ -33,7 +33,11 @@ def from_mrs(m, representative_priority=None): for ep in m.rels if not ep.is_quantifier()} top = _mrs_get_top(m.top, hcmap, reps, id_to_nid) - index = iv_to_nid[m.index] if m.index else None + # some bad MRSs have an INDEX that isn't the ARG0 of any EP, so + # make sure it exists first + index = None + if m.index and m.index in iv_to_nid: + index = iv_to_nid[m.index] nodes = _mrs_to_nodes(m, id_to_nid) links = _mrs_to_links(m, hcmap, reps, iv_to_nid, id_to_nid) diff --git a/delphin/mrs/_mrs.py b/delphin/mrs/_mrs.py index <HASH>..<HASH> 100644 --- a/delphin/mrs/_mrs.py +++ b/delphin/mrs/_mrs.py @@ -290,7 +290,9 @@ class MRS(scope.ScopingSemanticStructure): pairs.append((ep, qmap.get(ep.iv))) # then unpaired quantifiers, if any for _, q in pairs: - if q is not None: + # some bad MRSs have multiple EPs share an ARG0; avoid the + # KeyError by checking if they are still in qmap + if q is not None and q.iv in qmap: del qmap[q.iv] for q in qmap.values(): pairs.append((None, q))
Better deal with bad MRSs in conversion There are two separate issues that are fixed here: * When multiple non-quantifier EPs share an ARG0 it caused problems with mapping variables to quantifiers * When the INDEX was not used by any EP it caused a lookup error Fixes #<I>
delph-in_pydelphin
train
406366c45b69b77d43dd493b19c749bc57683d9a
diff --git a/agent/dns.go b/agent/dns.go index <HASH>..<HASH> 100644 --- a/agent/dns.go +++ b/agent/dns.go @@ -337,7 +337,7 @@ func (d *DNSServer) addSOA(msg *dns.Msg) { // nameservers returns the names and ip addresses of up to three random servers // in the current cluster which serve as authoritative name servers for zone. func (d *DNSServer) nameservers(edns bool) (ns []dns.RR, extra []dns.RR) { - out, err := d.lookupServiceNodes(d.agent.config.Datacenter, structs.ConsulServiceName, "") + out, err := d.lookupServiceNodes(d.agent.config.Datacenter, structs.ConsulServiceName, "", false) if err != nil { d.logger.Printf("[WARN] dns: Unable to get list of servers: %s", err) return nil, nil @@ -415,7 +415,7 @@ PARSE: n = n + 1 } - switch labels[n-1] { + switch kind := labels[n-1]; kind { case "service": if n == 1 { goto INVALID @@ -433,7 +433,7 @@ PARSE: } // _name._tag.service.consul - d.serviceLookup(network, datacenter, labels[n-3][1:], tag, req, resp) + d.serviceLookup(network, datacenter, labels[n-3][1:], tag, false, req, resp) // Consul 0.3 and prior format for SRV queries } else { @@ -445,9 +445,17 @@ PARSE: } // tag[.tag].name.service.consul - d.serviceLookup(network, datacenter, labels[n-2], tag, req, resp) + d.serviceLookup(network, datacenter, labels[n-2], tag, false, req, resp) } + case "connect": + if n == 1 { + goto INVALID + } + + // name.connect.consul + d.serviceLookup(network, datacenter, labels[n-2], "", true, req, resp) + case "node": if n == 1 { goto INVALID @@ -898,8 +906,9 @@ func (d *DNSServer) trimDNSResponse(network string, req, resp *dns.Msg) (trimmed } // lookupServiceNodes returns nodes with a given service. -func (d *DNSServer) lookupServiceNodes(datacenter, service, tag string) (structs.IndexedCheckServiceNodes, error) { +func (d *DNSServer) lookupServiceNodes(datacenter, service, tag string, connect bool) (structs.IndexedCheckServiceNodes, error) { args := structs.ServiceSpecificRequest{ + Connect: connect, Datacenter: datacenter, ServiceName: service, ServiceTag: tag, @@ -935,8 +944,8 @@ func (d *DNSServer) lookupServiceNodes(datacenter, service, tag string) (structs } // serviceLookup is used to handle a service query -func (d *DNSServer) serviceLookup(network, datacenter, service, tag string, req, resp *dns.Msg) { - out, err := d.lookupServiceNodes(datacenter, service, tag) +func (d *DNSServer) serviceLookup(network, datacenter, service, tag string, connect bool, req, resp *dns.Msg) { + out, err := d.lookupServiceNodes(datacenter, service, tag, connect) if err != nil { d.logger.Printf("[ERR] dns: rpc error: %v", err) resp.SetRcode(req, dns.RcodeServerFailure) diff --git a/agent/dns_test.go b/agent/dns_test.go index <HASH>..<HASH> 100644 --- a/agent/dns_test.go +++ b/agent/dns_test.go @@ -17,6 +17,7 @@ import ( "github.com/hashicorp/serf/coordinate" "github.com/miekg/dns" "github.com/pascaldekloe/goe/verify" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -1041,6 +1042,48 @@ func TestDNS_ServiceLookupWithInternalServiceAddress(t *testing.T) { verify.Values(t, "extra", in.Extra, wantExtra) } +func TestDNS_ConnectServiceLookup(t *testing.T) { + t.Parallel() + + assert := assert.New(t) + a := NewTestAgent(t.Name(), "") + defer a.Shutdown() + + // Register a node with an external service. + { + args := structs.TestRegisterRequestProxy(t) + args.Service.ProxyDestination = "db" + args.Service.Port = 12345 + var out struct{} + assert.Nil(a.RPC("Catalog.Register", args, &out)) + } + + // Look up the service + questions := []string{ + "db.connect.consul.", + } + for _, question := range questions { + m := new(dns.Msg) + m.SetQuestion(question, dns.TypeSRV) + + c := new(dns.Client) + in, _, err := c.Exchange(m, a.DNSAddr()) + assert.Nil(err) + assert.Len(in.Answer, 1) + + srvRec, ok := in.Answer[0].(*dns.SRV) + assert.True(ok) + assert.Equal(12345, srvRec.Port) + assert.Equal("foo.node.dc1.consul.", srvRec.Target) + assert.Equal(0, srvRec.Hdr.Ttl) + + cnameRec, ok := in.Extra[0].(*dns.CNAME) + assert.True(ok) + assert.Equal("foo.node.dc1.consul.", cnameRec.Hdr.Name) + assert.Equal(0, srvRec.Hdr.Ttl) + } +} + func TestDNS_ExternalServiceLookup(t *testing.T) { t.Parallel() a := NewTestAgent(t.Name(), "")
agent: working DNS for Connect queries, I think, but have to implement Health endpoints to be sure
hashicorp_consul
train
f3b5306ecb647f9bd524c980d4456fd87ee51789
diff --git a/src/parser/parse-template.js b/src/parser/parse-template.js index <HASH>..<HASH> 100644 --- a/src/parser/parse-template.js +++ b/src/parser/parse-template.js @@ -46,7 +46,7 @@ function parseTemplate(source, options) { var walker = new Walker(source); var tagReg = /<(\/)?([a-z0-9-]+)\s*/ig; - var attrReg = /([-:0-9a-z\(\)\[\]]+)(=(['"])([^\3]*?)\3)?\s*/ig; + var attrReg = /([-:0-9a-z\(\)\[\]]+)(\s*=\s*(['"])([^\3]*?)\3)?\s*/ig; var tagMatch; var currentNode = rootNode;
fix error when using space around attribute's = operator
baidu_san
train
7507e3cd21aae0724026f9e19016b54ae429b34d
diff --git a/backend/remote-state/gcs/backend_state.go b/backend/remote-state/gcs/backend_state.go index <HASH>..<HASH> 100644 --- a/backend/remote-state/gcs/backend_state.go +++ b/backend/remote-state/gcs/backend_state.go @@ -91,50 +91,53 @@ func (b *gcsBackend) State(name string) (state.State, error) { } st := &remote.State{Client: c} - lockInfo := state.NewLockInfo() - lockInfo.Operation = "init" - lockID, err := st.Lock(lockInfo) - if err != nil { + + // Grab the value + if err := st.RefreshState(); err != nil { return nil, err } - // Local helper function so we can call it multiple places - unlock := func(baseErr error) error { - if err := st.Unlock(lockID); err != nil { - const unlockErrMsg = `%v -Additionally, unlocking the state file on Google Cloud Storage failed: - - Error message: %q - Lock ID (gen): %v - Lock file URL: %v + // If we have no state, we have to create an empty state + if v := st.State(); v == nil { -You may have to force-unlock this state in order to use it again. -The GCloud backend acquires a lock during initialization to ensure -the initial state file is created.` - return fmt.Errorf(unlockErrMsg, baseErr, err.Error(), lockID, c.lockFileURL()) + lockInfo := state.NewLockInfo() + lockInfo.Operation = "init" + lockID, err := st.Lock(lockInfo) + if err != nil { + return nil, err } - return baseErr - } + // Local helper function so we can call it multiple places + unlock := func(baseErr error) error { + if err := st.Unlock(lockID); err != nil { + const unlockErrMsg = `%v + Additionally, unlocking the state file on Google Cloud Storage failed: - // Grab the value - if err := st.RefreshState(); err != nil { - return nil, unlock(err) - } + Error message: %q + Lock ID (gen): %v + Lock file URL: %v + + You may have to force-unlock this state in order to use it again. + The GCloud backend acquires a lock during initialization to ensure + the initial state file is created.` + return fmt.Errorf(unlockErrMsg, baseErr, err.Error(), lockID, c.lockFileURL()) + } + + return baseErr + } - // If we have no state, we have to create an empty state - if v := st.State(); v == nil { if err := st.WriteState(terraform.NewState()); err != nil { return nil, unlock(err) } if err := st.PersistState(); err != nil { return nil, unlock(err) } - } - // Unlock, the state should now be initialized - if err := unlock(nil); err != nil { - return nil, err + // Unlock, the state should now be initialized + if err := unlock(nil); err != nil { + return nil, err + } + } return st, nil
backend/gcs: fix locking issue when used with terraform_remote_state Previously there was a problem with double-locking when using the GCS backend with the terraform_remote_state data source. Here we adjust the locking methodology to avoid that problem.
hashicorp_terraform
train
ad354d5d20da31d74784796cce73522ca14075d2
diff --git a/PutIO/Engines/PutIO/FilesEngine.php b/PutIO/Engines/PutIO/FilesEngine.php index <HASH>..<HASH> 100644 --- a/PutIO/Engines/PutIO/FilesEngine.php +++ b/PutIO/Engines/PutIO/FilesEngine.php @@ -48,13 +48,13 @@ class FilesEngine extends PutIOHelper /** - * Uploads a local file to your account. Returns false if the file does not exist. + * Uploads a local file to your account. * * NOTE 1: The response differs based on the uploaded file. For regular files, the * array key containing the info is 'file', but for torrents it's 'transfer'. * @see https://api.put.io/v2/docs/#files-upload * - * NOTE 2: Files need to be read into the memory when using native functions. Keep + * NOTE 2: Files need to be read into the memory when using NATIVE functions. Keep * that in mind when uploading large files or running multiple instances. * * @param string $file Path to local file. @@ -64,11 +64,6 @@ class FilesEngine extends PutIOHelper **/ public function upload($file, $parentID = 0) { - if (!$file = realpath($file)) - { - return false; - } - return $this->uploadFile('files/upload', array('parent_id', $parentID, 'file' => '@' . $file)); }
Turns out realpath() wan't needed
nicoSWD_put.io-api-v2
train
b00bd7ef27e0e41339f3dc7c4773eacc3b76231d
diff --git a/src/frontend/org/voltdb/SnapshotSaveAPI.java b/src/frontend/org/voltdb/SnapshotSaveAPI.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/SnapshotSaveAPI.java +++ b/src/frontend/org/voltdb/SnapshotSaveAPI.java @@ -309,6 +309,7 @@ public class SnapshotSaveAPI Runnable completionTask = SnapshotUtil.writeSnapshotDigest( txnId, + context.getExecutionSite().m_context.catalogCRC, file_path, file_nonce, tables, diff --git a/src/frontend/org/voltdb/sysprocs/SystemInformation.java b/src/frontend/org/voltdb/sysprocs/SystemInformation.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/sysprocs/SystemInformation.java +++ b/src/frontend/org/voltdb/sysprocs/SystemInformation.java @@ -365,6 +365,8 @@ public class SystemInformation extends VoltSystemProcedure vt.addRow(hostId, "LASTCATALOGUPDATETXNID", Long.toString(VoltDB.instance().getCatalogContext().m_transactionId)); + vt.addRow(hostId, "CATALOGCRC", + Long.toString(VoltDB.instance().getCatalogContext().catalogCRC)); return vt; } diff --git a/src/frontend/org/voltdb/sysprocs/saverestore/SnapshotUtil.java b/src/frontend/org/voltdb/sysprocs/saverestore/SnapshotUtil.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/sysprocs/saverestore/SnapshotUtil.java +++ b/src/frontend/org/voltdb/sysprocs/saverestore/SnapshotUtil.java @@ -76,6 +76,7 @@ public class SnapshotUtil { */ public static Runnable writeSnapshotDigest( long txnId, + long catalogCRC, String path, String nonce, List<Table> tables, @@ -119,7 +120,7 @@ public class SnapshotUtil { stringer.endObject(); } stringer.endArray(); - stringer.key("catalogCRC").value(VoltDB.instance().getCatalogContext().catalogCRC); + stringer.key("catalogCRC").value(catalogCRC); stringer.endObject(); } catch (JSONException e) { throw new IOException(e);
Enhancements for ENG-<I>. The catalog checksum is sent with the snapshot invocation buffers and compared by the WAN agent.
VoltDB_voltdb
train
09d3b8f273c5e60a915eb14f86bee7e2bbd61fc1
diff --git a/lib/entry/index.js b/lib/entry/index.js index <HASH>..<HASH> 100644 --- a/lib/entry/index.js +++ b/lib/entry/index.js @@ -1297,8 +1297,7 @@ function mergeQueue(dst, src, orig) { // {{{2 var d = dst[key]; -// if (typeof s === 'object' && typeof dst[key]) { TODO: Check, test - if (d && typeof d === typeof s === 'object') { + if (d && typeof d === 'object' && typeof s === 'object') { mergeQueue(d, s, orig && orig[key]); continue; }
Bugfix: Invalid use of `=== something ===`
OpenSmartEnvironment_ose
train
0bcb0d0e3ce395d42a5b1dae61b0090791ee018d
diff --git a/discord/app_commands/commands.py b/discord/app_commands/commands.py index <HASH>..<HASH> 100644 --- a/discord/app_commands/commands.py +++ b/discord/app_commands/commands.py @@ -126,7 +126,7 @@ else: CheckInputParameter = Union['Command[Any, ..., Any]', 'ContextMenu', CommandCallback, ContextMenuCallback] VALID_SLASH_COMMAND_NAME = re.compile(r'^[\w-]{1,32}$') -VALID_CONTEXT_MENU_NAME = re.compile(r'^[\w\s-]{1,32}$') +VALID_CONTEXT_MENU_NAME = re.compile(r'^[?!\w\s-]{1,32}$') CAMEL_CASE_REGEX = re.compile(r'(?<!^)(?=[A-Z])')
Allow context menus have ? and ! in their name
Rapptz_discord.py
train
2ec27dc5c4b6178448f933e0dd35ecb8ed0ff480
diff --git a/lib/zold/node/front.rb b/lib/zold/node/front.rb index <HASH>..<HASH> 100644 --- a/lib/zold/node/front.rb +++ b/lib/zold/node/front.rb @@ -129,7 +129,7 @@ module Zold remotes: settings.remotes.all.count, farm: settings.farm.to_json, entrance: settings.entrance.to_json, - date: `date --iso-8601=seconds -u`.strip, + date: Time.now.utc.iso8601, hours_alive: ((Time.now - settings.start) / (60 * 60)).round(2), home: 'https://www.zold.io' ) diff --git a/lib/zold/score.rb b/lib/zold/score.rb index <HASH>..<HASH> 100644 --- a/lib/zold/score.rb +++ b/lib/zold/score.rb @@ -138,6 +138,8 @@ module Zold suffixes: @suffixes, strength: @strength, hash: value.zero? ? nil : hash, + expired: expired?, + valid: valid?, minutes: ((Time.now - @time) / 60).to_i } end @@ -159,6 +161,7 @@ module Zold strength: @strength ) return score if score.valid? + return Score.new(Time.now, @host, @port, @invoice, [], strength: @strength) if score.expired? idx += 1 end end diff --git a/test/test_score.rb b/test/test_score.rb index <HASH>..<HASH> 100644 --- a/test/test_score.rb +++ b/test/test_score.rb @@ -38,6 +38,17 @@ class TestScore < Minitest::Test assert_equal(64, score.hash.length) end + def test_drops_to_zero_when_expired + score = Zold::Score.new( + Time.now - 24 * 60 * 60, + 'some-host', 9999, 'NOPREFIX@ffffffffffffffff', + strength: 50 + ).next + assert(score.valid?) + assert(!score.expired?) + assert_equal(0, score.value) + end + def test_validates_wrong_score score = Zold::Score.new( Time.parse('2017-07-19T21:24:51Z'), @@ -108,14 +119,6 @@ class TestScore < Minitest::Test assert(!score.expired?) end - def test_expires_correctly - score = Zold::Score.new( - Time.now - 100 * 60 * 60, 'localhost', 443, - 'NOPREFIX@ffffffffffffffff', strength: 2 - ).next.next.next - assert(score.expired?) - end - def test_dont_expire_correctly score = Zold::Score.new( Time.now - 10 * 60 * 60, 'localhost', 443,
#<I> drop score to zero when it is expired
zold-io_zold
train
67f137c5f0f69a37a3587e399b5cb86865cae603
diff --git a/python/proton/reactor.py b/python/proton/reactor.py index <HASH>..<HASH> 100644 --- a/python/proton/reactor.py +++ b/python/proton/reactor.py @@ -20,6 +20,7 @@ from __future__ import absolute_import from ._reactor import Container, ApplicationEvent, EventInjector, Handler,\ + LinkOption, ReceiverOption, SenderOption,\ AtLeastOnce, AtMostOnce, DynamicNodeProperties, Filter, Selector, DurableSubscription, Copy, Move,\ Reactor @@ -28,6 +29,9 @@ __all__ = [ 'ApplicationEvent', 'EventInjector', 'Handler', + 'LinkOption', + 'ReceiverOption', + 'SenderOption', 'AtLeastOnce', 'AtMostOnce', 'DynamicNodeProperties',
PROTON-<I>: [Python] Added back some needed symbols removed previously
apache_qpid-proton
train
898d1d1b509b1c443503330558f907ac99bdbdfe
diff --git a/lib/db/cortex.php b/lib/db/cortex.php index <HASH>..<HASH> 100644 --- a/lib/db/cortex.php +++ b/lib/db/cortex.php @@ -865,7 +865,7 @@ class Cortex extends Cursor { foreach ($result as &$record) { // factory new mappers $record = $this->mapper->factory($record); - unset($record, $mapper); + unset($record); } return $result; } elseif (!empty($this->preBinds) && !$count) { @@ -889,6 +889,24 @@ class Cortex extends Cursor { } /** + * use a raw sql query to find results and factory them into models + * @param $sql + * @param null $args + * @param int $ttl + * @return CortexCollection + */ + protected function findByRawSQL($query, $args=NULL, $ttl=0) { + $result = $this->db->exec($query, $args, $ttl); + $cx = new CortexCollection(); + foreach($result as $row) { + $new = $this->factory($row); + $cx->add($new); + unset($new); + } + return $cx; + } + + /** * Retrieve first object that satisfies criteria * @param null $filter * @param array $options
add method to use find with a raw sql query
ikkez_f3-cortex
train
d1e7446f6f2de40095b34d21216acb936f10d68f
diff --git a/psiturk/models.py b/psiturk/models.py index <HASH>..<HASH> 100644 --- a/psiturk/models.py +++ b/psiturk/models.py @@ -34,7 +34,7 @@ class Participant(Base): endhit = Column(DateTime) bonus = Column(Float, default = 0) status = Column(Integer, default = 1) - if 'postgres' in config.get('Database Parameters', 'database_url'): + if 'postgres://' in config.get('Database Parameters', 'database_url').lower(): datastring = Column(Text) else: datastring = Column(Text(4294967295))
ignore case on config option, what about if database table or username contains 'postgres'
NYUCCL_psiTurk
train
dedb577faf9662739b6d820a6a9ccb583ab53c16
diff --git a/pyqode/python/modes/autoindent.py b/pyqode/python/modes/autoindent.py index <HASH>..<HASH> 100644 --- a/pyqode/python/modes/autoindent.py +++ b/pyqode/python/modes/autoindent.py @@ -29,8 +29,11 @@ class PyAutoIndentMode(AutoIndentMode): if self._at_block_start(cursor, line): return pre, post # return pressed in comments + c2 = QTextCursor(cursor) + if c2.atBlockEnd(): + c2.movePosition(c2.Left) if (self._helper.is_comment_or_string( - cursor, formats=['comment', 'docstring']) or + c2, formats=['comment', 'docstring']) or fullline.endswith('"""')): if line.strip().startswith("#") and column != len(fullline): post += '# '
AutoIndent in comment: fix bug if cursor at block end
pyQode_pyqode.python
train
e2f7f0ce399bd3b09899bccf333fa8dc37fe9757
diff --git a/src/classes/diff/chunk.php b/src/classes/diff/chunk.php index <HASH>..<HASH> 100644 --- a/src/classes/diff/chunk.php +++ b/src/classes/diff/chunk.php @@ -49,12 +49,13 @@ class vcsDiffChunk extends vcsBaseStruct * @param array $chunks * @return void */ - public function __construct( $start = null, $startRange = 1, $end = null, $endRange = 1 ) + public function __construct( $start = null, $startRange = 1, $end = null, $endRange = 1, array $lines = array() ) { $this->start = (int) $start; $this->startRange = (int) $startRange; $this->end = (int) $end; $this->endRange = (int) $endRange; + $this->lines = $lines; } } diff --git a/src/classes/wrapper/svn-cli/file.php b/src/classes/wrapper/svn-cli/file.php index <HASH>..<HASH> 100644 --- a/src/classes/wrapper/svn-cli/file.php +++ b/src/classes/wrapper/svn-cli/file.php @@ -121,6 +121,22 @@ class vcsSvnCliFile extends vcsSvnCliResource implements vcsFile, vcsBlameable, */ public function getDiff( $version, $current = null ) { + $current = ( $current === null ) ? $this->getVersionString() : $current; + + if ( ( $diff = vcsCache::get( $this->path, $version, 'diff' ) ) === false ) + { + // Refetch the basic contentrmation, and cache it. + $process = new vcsSvnCliProcess(); + $process->argument( '-r' . $version . ':' . $current ); + + // Execute command + $return = $process->argument( 'diff' )->argument( $this->root . $this->path )->execute(); + $parser = new vcsUnifiedDiffParser(); + $diff = $parser->parseString( $process->stdoutOutput ); + vcsCache::cache( $this->path, $version, 'diff', $diff ); + } + + return $diff; } } diff --git a/tests/svn-cli/file.php b/tests/svn-cli/file.php index <HASH>..<HASH> 100644 --- a/tests/svn-cli/file.php +++ b/tests/svn-cli/file.php @@ -207,5 +207,28 @@ class vcsSvnCliFileTests extends vcsTestCase } catch ( vcsNoSuchVersionException $e ) { /* Expected */ } } + + public function testGetFileDiff() + { + $repository = new vcsSvnCliCheckout( $this->tempDir ); + $repository->initialize( 'file://' . realpath( __DIR__ . '/../data/svn' ) ); + $file = new vcsSvnCliFile( $this->tempDir, '/file' ); + + $diff = $file->getDiff( 1 ); + + + $this->assertEquals( + array( + new vcsDiffChunk( + 1, 1, 1, 2, + array( + new vcsDiffLine( 3, 'Some test file' ), + new vcsDiffLine( 1, 'A second line, in a later revision' ), + ) + ), + ), + $diff[0]->chunks + ); + } }
- Implemented: Diff in SVN Cli wrapper. # Wrapper is interface complete now.
Arbitracker_VCSWrapper
train
c44ef344e713bfeea0562aff7a67d5a896397de0
diff --git a/rinoh/structure.py b/rinoh/structure.py index <HASH>..<HASH> 100644 --- a/rinoh/structure.py +++ b/rinoh/structure.py @@ -9,7 +9,7 @@ from itertools import count, repeat from .draw import Line, LineStyle -from .flowable import GroupedFlowables, StaticGroupedFlowables +from .flowable import GroupedFlowables, StaticGroupedFlowables, PageBreak from .flowable import LabeledFlowable, GroupedLabeledFlowables from .flowable import Flowable, FlowableStyle, GroupedFlowablesStyle from .number import NumberStyle, Label, format_number @@ -20,7 +20,6 @@ from .reference import REFERENCE, TITLE, PAGE from .reference import Variable, PAGE_NUMBER, NUMBER_OF_PAGES from .reference import SECTION_NUMBER, SECTION_TITLE from .text import SingleStyledText, MixedStyledText, Tab -from .dimension import PT from .style import PARENT_STYLE @@ -32,7 +31,8 @@ __all__ = ['Section', 'Heading', 'ListStyle', 'List', 'ListItem', 'FieldList', class SectionSytyle(GroupedFlowablesStyle): - attributes = {'show_in_toc': True} + attributes = {'show_in_toc': True, + 'new_page': False} # TODO: EVEN, ODD class Section(Referenceable, StaticGroupedFlowables): @@ -52,6 +52,12 @@ class Section(Referenceable, StaticGroupedFlowables): def section(self): return self + def flowables(self, document): + if self.get_style('new_page', document): + yield PageBreak() + for flowable in super().flowables(document): + yield flowable + def show_in_toc(self, document): show_in_toc = self.get_style('show_in_toc', document) try: diff --git a/rinohlib/stylesheets/ieee.py b/rinohlib/stylesheets/ieee.py index <HASH>..<HASH> 100644 --- a/rinohlib/stylesheets/ieee.py +++ b/rinohlib/stylesheets/ieee.py @@ -117,6 +117,9 @@ styles('affiliation', base='author', space_below=6*PT + 12*PT) +styles('chapter', + new_page=True) + styles('heading level 1', typeface=Var('ieee_family').serif, font_weight=REGULAR, diff --git a/rinohlib/stylesheets/matcher.py b/rinohlib/stylesheets/matcher.py index <HASH>..<HASH> 100644 --- a/rinohlib/stylesheets/matcher.py +++ b/rinohlib/stylesheets/matcher.py @@ -53,6 +53,8 @@ matcher('author', Paragraph.like('author')) matcher('affiliation', Paragraph.like('affiliation')) +matcher('chapter', Section.like(level=1)) + for i in range(1, 6): matcher('heading level {}'.format(i), Heading.like(level=i)) matcher('unnumbered heading level {}'.format(i),
Allow starting sections on a new page
brechtm_rinohtype
train
c353794dff580c8aa63b357b2857c1fadff3104b
diff --git a/activerecord/CHANGELOG b/activerecord/CHANGELOG index <HASH>..<HASH> 100644 --- a/activerecord/CHANGELOG +++ b/activerecord/CHANGELOG @@ -1,5 +1,7 @@ *SVN* +* Fixed that pessimistic locking you reference the quoted table name (Josh Susser) [#67] + * Fixed that change_column should be able to use :null => true on a field that formerly had false [Nate Wiger] [#26] * Added that the MySQL adapter should map integer to either smallint, int, or bigint depending on the :limit just like PostgreSQL [DHH] diff --git a/activerecord/lib/active_record/locking/optimistic.rb b/activerecord/lib/active_record/locking/optimistic.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/locking/optimistic.rb +++ b/activerecord/lib/active_record/locking/optimistic.rb @@ -78,7 +78,7 @@ module ActiveRecord begin affected_rows = connection.update(<<-end_sql, "#{self.class.name} Update with optimistic locking") - UPDATE #{self.class.table_name} + UPDATE #{self.class.quoted_table_name} SET #{quoted_comma_pair_list(connection, attributes_with_quotes(false, false, attribute_names))} WHERE #{self.class.primary_key} = #{quote_value(id)} AND #{self.class.quoted_locking_column} = #{quote_value(previous_value)} diff --git a/activerecord/test/cases/locking_test.rb b/activerecord/test/cases/locking_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/locking_test.rb +++ b/activerecord/test/cases/locking_test.rb @@ -2,6 +2,7 @@ require "cases/helper" require 'models/person' require 'models/reader' require 'models/legacy_thing' +require 'models/reference' class LockWithoutDefault < ActiveRecord::Base; end @@ -15,7 +16,7 @@ class ReadonlyFirstNamePerson < Person end class OptimisticLockingTest < ActiveRecord::TestCase - fixtures :people, :legacy_things + fixtures :people, :legacy_things, :references # need to disable transactional fixtures, because otherwise the sqlite3 # adapter (at least) chokes when we try and change the schema in the middle @@ -138,6 +139,12 @@ class OptimisticLockingTest < ActiveRecord::TestCase end end end + + def test_quote_table_name + ref = references(:michael_magician) + ref.favourite = !ref.favourite + assert ref.save + end private diff --git a/activerecord/test/schema/schema.rb b/activerecord/test/schema/schema.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/schema/schema.rb +++ b/activerecord/test/schema/schema.rb @@ -206,6 +206,7 @@ ActiveRecord::Schema.define do t.integer :person_id t.integer :job_id t.boolean :favourite + t.integer :lock_version, :default => 0 end create_table :minimalistics, :force => true do |t|
Fixed that pessimistic locking you reference the quoted table name (Josh Susser) [#<I> state:resolved]
rails_rails
train
ab99872e140576ef5e0fb7c0a9dee70cc229df53
diff --git a/astroid/brain/brain_collections.py b/astroid/brain/brain_collections.py index <HASH>..<HASH> 100644 --- a/astroid/brain/brain_collections.py +++ b/astroid/brain/brain_collections.py @@ -25,21 +25,21 @@ def _deque_mock(): class deque(object): maxlen = 0 def __init__(self, iterable=None, maxlen=None): - self.iterable = iterable + self.iterable = iterable or [] def append(self, x): pass def appendleft(self, x): pass def clear(self): pass def count(self, x): return 0 def extend(self, iterable): pass def extendleft(self, iterable): pass - def pop(self): pass - def popleft(self): pass + def pop(self): return self.iterable[0] + def popleft(self): return self.iterable[0] def remove(self, value): pass - def reverse(self): pass - def rotate(self, n=1): pass + def reverse(self): return reversed(self.iterable) + def rotate(self, n=1): return self def __iter__(self): return self def __reversed__(self): return self.iterable[::-1] - def __getitem__(self, index): pass + def __getitem__(self, index): return self.iterable[index] def __setitem__(self, index, value): pass def __delitem__(self, index): pass def __bool__(self): return bool(self.iterable)
Adapt a couple of deque's brain methods Close PyCQA/pylint#<I>
PyCQA_astroid
train
065c4827706eb13597649a2fa0dba79e41024fc8
diff --git a/resources/lang/fi-FI/forms.php b/resources/lang/fi-FI/forms.php index <HASH>..<HASH> 100644 --- a/resources/lang/fi-FI/forms.php +++ b/resources/lang/fi-FI/forms.php @@ -54,6 +54,7 @@ return [ 'message-help' => 'You may also use Markdown.', 'occurred_at' => 'When did this incident occur?', 'notify_subscribers' => 'Ilmoita tilaajille?', + 'notify_disabled' => 'Due to scheduled maintenance, notifications about this incident or its components will be suppressed.', 'visibility' => 'Tapahtuman näkyvyys', 'stick_status' => 'Stick Incident', 'stickied' => 'Stickied', @@ -147,20 +148,21 @@ return [ 'settings' => [ // Application setup 'app-setup' => [ - 'site-name' => 'Sivuston Nimi', - 'site-url' => 'Sivuston URL-osoite', - 'display-graphs' => 'Näyttää kaaviot tila-sivulla?', - 'about-this-page' => 'Tietoa tästä sivustosta', - 'days-of-incidents' => 'Monenko päivän ajalta tapaukset näytetään?', - 'time_before_refresh' => 'Status page refresh rate (in seconds).', - 'banner' => 'Bannerikuva', - 'banner-help' => 'On suositeltavaa, ettet lataa yli 930px leveitä kuvia.', - 'subscribers' => 'Salli käyttäjien tilata sähköpostitilaukset?', - 'skip_subscriber_verification' => 'Skip verifying of users? (Be warned, you could be spammed)', - 'automatic_localization' => 'Lokalisoidaanko statussivu automaattisesti kävijän kielen mukaan?', - 'enable_external_dependencies' => 'Enable Third Party Dependencies (Google Fonts, Trackers, etc...)', - 'show_timezone' => 'Näytä aikavyöhyke tilat sivulla.', - 'only_disrupted_days' => 'Only show days containing incidents in the timeline?', + 'site-name' => 'Sivuston Nimi', + 'site-url' => 'Sivuston URL-osoite', + 'display-graphs' => 'Näyttää kaaviot tila-sivulla?', + 'about-this-page' => 'Tietoa tästä sivustosta', + 'days-of-incidents' => 'Monenko päivän ajalta tapaukset näytetään?', + 'time_before_refresh' => 'Status page refresh rate (in seconds).', + 'banner' => 'Bannerikuva', + 'banner-help' => "On suositeltavaa, ettet lataa yli 930px leveitä kuvia.", + 'subscribers' => 'Salli käyttäjien tilata sähköpostitilaukset?', + 'suppress_notifications_in_maintenance' => 'Suppress notifications when incident occurs during maintenance period?', + 'skip_subscriber_verification' => 'Skip verifying of users? (Be warned, you could be spammed)', + 'automatic_localization' => 'Lokalisoidaanko statussivu automaattisesti kävijän kielen mukaan?', + 'enable_external_dependencies' => 'Enable Third Party Dependencies (Google Fonts, Trackers, etc...)', + 'show_timezone' => 'Näytä aikavyöhyke tilat sivulla.', + 'only_disrupted_days' => 'Only show days containing incidents in the timeline?', ], 'analytics' => [ 'analytics_google' => 'Google Analytics seurantakoodi',
New translations forms.php (Finnish)
CachetHQ_Cachet
train
2bb984185b0ba05fbfe3d9ddaa541f3e7906a3d5
diff --git a/core/src/main/java/com/graphhopper/reader/OSMReader.java b/core/src/main/java/com/graphhopper/reader/OSMReader.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/graphhopper/reader/OSMReader.java +++ b/core/src/main/java/com/graphhopper/reader/OSMReader.java @@ -368,7 +368,7 @@ public class OSMReader implements DataReader if (!Double.isNaN(firstLat) && !Double.isNaN(firstLon) && !Double.isNaN(lastLat) && !Double.isNaN(lastLon)) { double estimatedDist = distCalc.calcDist(firstLat, firstLon, lastLat, lastLon); - // Add artificial tag for the estamated distance and center + // Add artificial tag for the estimated distance and center way.setTag("estimated_distance", estimatedDist); way.setTag("estimated_center", new GHPoint((firstLat + lastLat) / 2, (firstLon + lastLon) / 2)); }
spelling mistake in the comments estamated --> estimated
graphhopper_graphhopper
train
92958113d1fcd579b511080fd072aec23b494a0d
diff --git a/pyemma/coordinates/tests/test_featurereader_and_tica.py b/pyemma/coordinates/tests/test_featurereader_and_tica.py index <HASH>..<HASH> 100644 --- a/pyemma/coordinates/tests/test_featurereader_and_tica.py +++ b/pyemma/coordinates/tests/test_featurereader_and_tica.py @@ -109,7 +109,8 @@ class TestFeatureReaderAndTICA(unittest.TestCase): partial.partial_fit(traj) np.testing.assert_allclose(partial.eigenvalues, ref.eigenvalues) - np.testing.assert_allclose(np.abs(partial.eigenvectors), np.abs(ref.eigenvectors), atol=1e-8) + # only compare first two eigenvectors, because we only have two metastable processes + np.testing.assert_allclose(np.abs(partial.eigenvectors[:2]), np.abs(ref.eigenvectors[:2]), atol=1e-8) if __name__ == "__main__": unittest.main()
[tica-test] compare only first (meta-stable) eigenvectors in test.
markovmodel_PyEMMA
train
f8ba39fd9b5fc3ffe51a2c226aefe08da643169f
diff --git a/node-tests/acceptance/build-test.js b/node-tests/acceptance/build-test.js index <HASH>..<HASH> 100644 --- a/node-tests/acceptance/build-test.js +++ b/node-tests/acceptance/build-test.js @@ -338,7 +338,7 @@ describe('Acceptance', function() { let output = yield build(app); // Verify we have the manifest - expect(output.manifest()).to.deep.equal(expectedManifests['eager']); + expect(output.manifest()).to.deep.equal(expectedManifests['eager-in-eager']); output.contains('assets/node-asset-manifest.js'); output.contains( diff --git a/node-tests/fixtures/expected-manifests.json b/node-tests/fixtures/expected-manifests.json index <HASH>..<HASH> 100644 --- a/node-tests/fixtures/expected-manifests.json +++ b/node-tests/fixtures/expected-manifests.json @@ -1,6 +1,10 @@ { "eager": { - "bundles": {} + "bundles": { + "eager": { + "assets": [] + } + } }, "lazy": { @@ -24,8 +28,21 @@ } }, + "eager-in-eager": { + "bundles": { + "eager": { + "assets": [] + }, + "eager-in-eager": { + "assets": [] + } + } + }, "eager-in-lazy": { "bundles": { + "eager-in-lazy": { + "assets": [] + }, "lazy": { "assets": [ { @@ -51,6 +68,9 @@ "lazy-in-eager": { "bundles": { + "eager": { + "assets": [] + }, "lazy-in-eager": { "assets": [ { diff --git a/node-tests/unit/engine-addon-test.js b/node-tests/unit/engine-addon-test.js index <HASH>..<HASH> 100644 --- a/node-tests/unit/engine-addon-test.js +++ b/node-tests/unit/engine-addon-test.js @@ -6,12 +6,15 @@ const expect = require('chai').expect; describe('engine-addon', function() { describe('updateFastBootManifest', function() { it('adds necessary vendorFiles to the manifest when lazyLoading is enabled', function() { + + /*eslint-disable*/ const addon = EngineAddon.extend({ name: 'testing', lazyLoading: { enabled: true, }, }); + /*eslint-enable*/ const manifest = { vendorFiles: ['one.js', 'two.js'] }; addon.updateFastBootManifest(manifest); @@ -28,12 +31,14 @@ describe('engine-addon', function() { }); it('add config/environment file to the manifest when lazyLoading is disabled', function() { + /*eslint-disable*/ const addon = EngineAddon.extend({ name: 'testing', lazyLoading: { enabled: false, }, }); + /*eslint-enable*/ const manifest = { vendorFiles: ['one.js', 'two.js'] }; addon.updateFastBootManifest(manifest);
update fixtures and disable eslint objects on root of ember objects
ember-engines_ember-engines
train
f87e68517ded513fe03b9bda82604c17356cb212
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,6 @@ }, "dependencies": { "alt": "0.17.1", - "babel": "5.6.23", "classnames": "2.1.3", "express": "4.13.1", "lodash": "3.10.0", @@ -26,6 +25,7 @@ }, "devDependencies": { "autoprefixer-loader": "^2.0.0", + "babel": "5.6.23", "babel-core": "^5.7.4", "babel-eslint": "^3.1.23", "babel-loader": "^5.3.2", diff --git a/src/components/DebugEditor.js b/src/components/DebugEditor.js index <HASH>..<HASH> 100644 --- a/src/components/DebugEditor.js +++ b/src/components/DebugEditor.js @@ -1,5 +1,3 @@ -import 'babel/polyfill' - import React from 'react/addons' import EditorActions from '../flux/EditorActions' diff --git a/src/components/Editor.js b/src/components/Editor.js index <HASH>..<HASH> 100644 --- a/src/components/Editor.js +++ b/src/components/Editor.js @@ -1,5 +1,3 @@ -import 'babel/polyfill' - import React from 'react/addons' import classNames from 'classnames' import Spinner from 'react-spinkit' diff --git a/src/components/SharedCursorMixin.js b/src/components/SharedCursorMixin.js index <HASH>..<HASH> 100644 --- a/src/components/SharedCursorMixin.js +++ b/src/components/SharedCursorMixin.js @@ -1,5 +1,3 @@ -import 'babel/polyfill' - import EditorActions from '../flux/EditorActions' export default { diff --git a/src/components/TextInput.js b/src/components/TextInput.js index <HASH>..<HASH> 100644 --- a/src/components/TextInput.js +++ b/src/components/TextInput.js @@ -1,5 +1,3 @@ -import 'babel/polyfill' - import _ from 'lodash' import React from 'react/addons' import getEventKey from 'react/lib/getEventKey' diff --git a/src/core/EditorCommon.js b/src/core/EditorCommon.js index <HASH>..<HASH> 100644 --- a/src/core/EditorCommon.js +++ b/src/core/EditorCommon.js @@ -1,4 +1,3 @@ -import 'babel/polyfill' import _ from 'lodash' import { BASE_CHAR, EOF } from './RichText' diff --git a/src/core/dom.js b/src/core/dom.js index <HASH>..<HASH> 100644 --- a/src/core/dom.js +++ b/src/core/dom.js @@ -1,5 +1,3 @@ -import 'babel/polyfill' - export function getNumericStyleProperty(style, prop) { return parseInt(style.getPropertyValue(prop), 10) } diff --git a/src/core/utils.js b/src/core/utils.js index <HASH>..<HASH> 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,4 +1,3 @@ -import 'babel/polyfill' import invariant from 'react/lib/invariant' // http://stackoverflow.com/a/4156156/430128 diff --git a/src/flux/EditorStore.js b/src/flux/EditorStore.js index <HASH>..<HASH> 100644 --- a/src/flux/EditorStore.js +++ b/src/flux/EditorStore.js @@ -1,4 +1,3 @@ -import 'babel/polyfill' import _ from 'lodash' import invariant from 'react/lib/invariant' diff --git a/src/server.js b/src/server.js index <HASH>..<HASH> 100644 --- a/src/server.js +++ b/src/server.js @@ -1,3 +1,5 @@ +import 'babel/polyfill' + import _ from 'lodash' import fs from 'fs' import path from 'path'
Remove babel polyfill from source files ES6 polyfills should always be added only by the consuming project, not by libraries.
ritzyed_ritzy
train
fa480403c75c90880a6bc79bab9e10b012379006
diff --git a/integration/build/build_userns_linux_test.go b/integration/build/build_userns_linux_test.go index <HASH>..<HASH> 100644 --- a/integration/build/build_userns_linux_test.go +++ b/integration/build/build_userns_linux_test.go @@ -12,6 +12,7 @@ import ( "github.com/docker/docker/api/types" "github.com/docker/docker/integration/internal/container" + "github.com/docker/docker/pkg/jsonmessage" "github.com/docker/docker/pkg/stdcopy" "github.com/docker/docker/testutil/daemon" "github.com/docker/docker/testutil/fakecontext" @@ -66,17 +67,10 @@ func TestBuildUserNamespaceValidateCapabilitiesAreV2(t *testing.T) { }) assert.NilError(t, err) defer resp.Body.Close() - buf := make([]byte, 1024) - for { - n, err := resp.Body.Read(buf) - if err != nil && err != io.EOF { - t.Fatalf("Error reading ImageBuild response: %v", err) - break - } - if n == 0 { - break - } - } + + buf := bytes.NewBuffer(nil) + err = jsonmessage.DisplayJSONMessagesStream(resp.Body, buf, 0, false, nil) + assert.NilError(t, err) reader, err := clientUserRemap.ImageSave(ctx, []string{imageTag}) assert.NilError(t, err, "failed to download capabilities image") @@ -106,16 +100,9 @@ func TestBuildUserNamespaceValidateCapabilitiesAreV2(t *testing.T) { loadResp, err := clientNoUserRemap.ImageLoad(ctx, tarReader, false) assert.NilError(t, err, "failed to load image tar file") defer loadResp.Body.Close() - for { - n, err := loadResp.Body.Read(buf) - if err != nil && err != io.EOF { - t.Fatalf("Error reading ImageLoad response: %v", err) - break - } - if n == 0 { - break - } - } + buf = bytes.NewBuffer(nil) + err = jsonmessage.DisplayJSONMessagesStream(loadResp.Body, buf, 0, false, nil) + assert.NilError(t, err) cid := container.Run(ctx, t, clientNoUserRemap, container.WithImage(imageTag),
TestBuildUserNamespaceValidateCapabilitiesAreV2: verify build completed Check if the `docker build` completed successfully before continuing.
moby_moby
train
7cd9ba8f10eca0d6dfa141ad965114e99542a8e4
diff --git a/go/vt/vttablet/endtoend/framework/client.go b/go/vt/vttablet/endtoend/framework/client.go index <HASH>..<HASH> 100644 --- a/go/vt/vttablet/endtoend/framework/client.go +++ b/go/vt/vttablet/endtoend/framework/client.go @@ -162,7 +162,7 @@ func (client *QueryClient) ReadTransaction(dtid string) (*querypb.TransactionMet // SetServingType is for testing transitions. // It currently supports only master->replica and back. func (client *QueryClient) SetServingType(tabletType topodatapb.TabletType) error { - err := client.server.SetServingType(tabletType, time.Time{}, true, "") + err := client.server.SetServingType(tabletType, time.Time{}, true /* serving */, "" /* reason */) return err } diff --git a/go/vt/vttablet/tabletmanager/replmanager_test.go b/go/vt/vttablet/tabletmanager/replmanager_test.go index <HASH>..<HASH> 100644 --- a/go/vt/vttablet/tabletmanager/replmanager_test.go +++ b/go/vt/vttablet/tabletmanager/replmanager_test.go @@ -58,7 +58,7 @@ func TestReplManagerSetTabletType(t *testing.T) { tm.replManager.ticks.Stop() } -func TestReplManagerSetReplicaStopped(t *testing.T) { +func TestReplManagerSetReplicationStopped(t *testing.T) { defer func(saved bool) { *mysqlctl.DisableActiveReparents = saved }(*mysqlctl.DisableActiveReparents) *mysqlctl.DisableActiveReparents = true
vttablet: more review comments
vitessio_vitess
train
05d52342c39d5f7ffef6323019612ff51120ddff
diff --git a/parameter.go b/parameter.go index <HASH>..<HASH> 100644 --- a/parameter.go +++ b/parameter.go @@ -22,6 +22,9 @@ const ( // FormParameterKind = indicator of Request parameter type "form" FormParameterKind + // MultiPartFormParameterKind = indicator of Request parameter type "multipart/form-data" + MultiPartFormParameterKind + // CollectionFormatCSV comma separated values `foo,bar` CollectionFormatCSV = CollectionFormat("csv") @@ -108,6 +111,11 @@ func (p *Parameter) beForm() *Parameter { return p } +func (p *Parameter) beMultiPartForm() *Parameter { + p.data.Kind = MultiPartFormParameterKind + return p +} + // Required sets the required field and returns the receiver func (p *Parameter) Required(required bool) *Parameter { p.data.Required = required diff --git a/web_service.go b/web_service.go index <HASH>..<HASH> 100644 --- a/web_service.go +++ b/web_service.go @@ -165,6 +165,18 @@ func FormParameter(name, description string) *Parameter { return p } +// MultiPartFormParameter creates a new Parameter of kind Form (using multipart/form-data) for documentation purposes. +// It is initialized as required with string as its DataType. +func (w *WebService) MultiPartFormParameter(name, description string) *Parameter { + return MultiPartFormParameter(name, description) +} + +func MultiPartFormParameter(name, description string) *Parameter { + p := &Parameter{&ParameterData{Name: name, Description: description, Required: false, DataType: "string"}} + p.beMultiPartForm() + return p +} + // Route creates a new Route using the RouteBuilder and add to the ordered list of Routes. func (w *WebService) Route(builder *RouteBuilder) *WebService { w.routesLock.Lock()
support multipart/form-data (#<I>)
emicklei_go-restful
train
508baaf243786a2b9663153c9b51b4967cc71354
diff --git a/run_tests.py b/run_tests.py index <HASH>..<HASH> 100755 --- a/run_tests.py +++ b/run_tests.py @@ -14,64 +14,42 @@ import shutil TMP_DIR = tempfile.mkdtemp('analects') -class ConfigExpand(object): - +def expand_config(config): '''Expand configuration into full form. Enables shorthand forms for analects config. - ''' - - def __init__(self, config): - ''' - :param config: the configuration for the session - :type config: dict - ''' + :param config: the configuration for the session + :type config: dict - self.config = config + repo_name: http://myrepo.com/repo.git - def expand(self): - return self.expand_repo_string_to_dict().expand_shell_command_after() - - def expand_shell_command_after(self): - ''' - iterate through session, windows, and panes for - ``shell_command_after``, if it is a string, turn to list. - ''' + to - def _expand(c): - if ('shell_command_after' in c and - isinstance(c['shell_command_after'], basestring)): - c['shell_command_after'] = [c['shell_command_after']] + repo_name: { repo: 'http://myrepo.com/repo.git' } - return c + also assures the repo is a :py:class:`dict`. + ''' - for directory, repos in self.config.iteritems(): - for repo, repo_data in repos.iteritems(): - repo_data = _expand(repo_data) + def _expand(repo_data): + if isinstance(repo_data, basestring): + repo_data = {'repo': repo_data} - return self + return repo_data - def expand_repo_string_to_dict(self): + def _expand_shell_command_after(c): ''' - repo_name: http://myrepo.com/repo.git - - to - - repo_name: { repo: 'http://myrepo.com/repo.git' } - - also assures the repo is a :py:class:`dict`. + iterate through session, windows, and panes for + ``shell_command_after``, if it is a string, turn to list. ''' + if ('shell_command_after' in c and + isinstance(c['shell_command_after'], basestring)): + c['shell_command_after'] = [c['shell_command_after']] - def _expand(repo_data): - if isinstance(repo_data, basestring): - repo_data = {'repo': repo_data} - - return repo_data - - for directory, repos in self.config.iteritems(): - for repo, repo_data in repos.iteritems(): - self.config[directory][repo] = _expand(repo_data) + for directory, repos in config.iteritems(): + for repo, repo_data in repos.iteritems(): + config[directory][repo] = _expand(repo_data) + repo_data = _expand_shell_command_after(repo_data) - return self + return config class ConfigTestCaseBase(unittest.TestCase): @@ -258,22 +236,21 @@ class ConfigExpandTestCase(ConfigTestCaseBase): self.maxDiff = None - config = ConfigExpand(self.config_dict).expand().config + config = expand_config(self.config_dict) self.assertDictEqual(config, self.config_dict_expanded) class ConfigToObjectTestCase(ConfigTestCaseBase): + '''create an individual dictionary for each repository''' def setUp(self): SAMPLECONFIG_LIST = [ { 'name': None, - 'repo_parent_dir': None, - 'repo_dir': None, - 'repo_vcs_uri': None, - 'rev': None, - 'remotes': [], + 'parent_path': None, + 'remote_location': None, + 'remotes': [] } ]
lol, turn config expansion into definition
vcs-python_vcspull
train
7c3899a63e9a618185560b467d86f62fca9ab2cc
diff --git a/src/base/connectionView.js b/src/base/connectionView.js index <HASH>..<HASH> 100644 --- a/src/base/connectionView.js +++ b/src/base/connectionView.js @@ -394,7 +394,6 @@ class ConnectionView extends BaseView { [Number.MIN_VALUE, secondsPerReleaseToReleasesPerTick(10)], [1, secondsPerReleaseToReleasesPerTick(7)], [10, secondsPerReleaseToReleasesPerTick(5)], - [100, linearRatio * 100], ]; if (0 < maxVolume) { this.rateMap.push([100, 100 * linearRatio]);
Fix bug/imperfection introduced by previous commit where rateMap has two entries for volume <I>
Netflix_vizceral
train
b706f60959834e120c29df2005e6e54617ed789d
diff --git a/dvc/executor.py b/dvc/executor.py index <HASH>..<HASH> 100644 --- a/dvc/executor.py +++ b/dvc/executor.py @@ -10,7 +10,7 @@ class ExecutorError(DvcException): class Executor: @staticmethod - def exec_cmd(cmd, stdout_file=None, stderr_file=None, cwd=None): + def exec_cmd(cmd, stdout_file=None, stderr_file=None, cwd=None, shell=False): stdout, stdout_fd = Executor.output_file(stdout_file) stderr, stderr_fd = Executor.output_file(stderr_file) @@ -18,7 +18,8 @@ class Executor: p = subprocess.Popen(cmd, cwd=cwd, stdout=stdout, - stderr=stderr) + stderr=stderr, + shell=False) p.wait() out, err = map(lambda s: s.decode().strip('\n\r') if s else '', p.communicate()) diff --git a/dvc/runtime.py b/dvc/runtime.py index <HASH>..<HASH> 100644 --- a/dvc/runtime.py +++ b/dvc/runtime.py @@ -40,7 +40,7 @@ class Runtime(object): return False # It is definitely not the best way to check a symlink. - code, output, _ = Executor.exec_cmd(["dir", path]) + code, output, _ = Executor.exec_cmd(["dir", path], shell=True) if code != 0: return False diff --git a/dvc/utils.py b/dvc/utils.py index <HASH>..<HASH> 100644 --- a/dvc/utils.py +++ b/dvc/utils.py @@ -20,9 +20,7 @@ def cached_property(f): def rmtree(dir): '''Cross platform rmtree()''' if os.name == 'nt': - if not os.access(dir, os.W_OK): + if os.path.exists(dir) and not os.access(dir, os.W_OK): os.chmod(dir, stat.S_IWUSR) - else: - raise shutil.rmtree(dir, ignore_errors=True) diff --git a/tests/basic_env.py b/tests/basic_env.py index <HASH>..<HASH> 100644 --- a/tests/basic_env.py +++ b/tests/basic_env.py @@ -71,7 +71,7 @@ class DirHierarchyEnvironment(BasicEnvironment): self.settings = Settings([], self._git, self._config) self.dir1 = os.path.join('dir1') - self.dir11 = os.path.join('dir1/dir11') + self.dir11 = os.path.join('dir1', 'dir11') self.dir2 = os.path.join('dir2') os.mkdir('cache')
Windows symlink: exec cmd as shell option; small os.path.join() test fix
iterative_dvc
train
c5ce38fbce379331213a503cb2ba208d6598fca5
diff --git a/lib/plugins/lqip-blurhash.js b/lib/plugins/lqip-blurhash.js index <HASH>..<HASH> 100644 --- a/lib/plugins/lqip-blurhash.js +++ b/lib/plugins/lqip-blurhash.js @@ -22,7 +22,11 @@ class LqipBlurhashPlugin { // taken from https://github.com/google/eleventy-high-performance-blog/blob/5ed39db7fd3f21ae82ac1a8e833bf283355bd3d0/_11ty/blurry-placeholder.js#L74-L92 let bitmapHeight = targetPixels / aspectRatio; bitmapHeight = Math.sqrt(bitmapHeight); - const bitmapWidth = targetPixels / bitmapHeight; + let bitmapWidth = targetPixels / bitmapHeight; + + // Blurhash has a limit of 9 "components" + bitmapHeight = Math.min(9, Math.round(bitmapHeight)); + bitmapWidth = Math.min(9, Math.round(bitmapWidth)); return { width: Math.round(bitmapWidth), height: Math.round(bitmapHeight) }; }
Limit Blurhash size to allowed values Has a limit of so called "components" (basically pixels) to be between 1 and 9.
kaliber5_ember-responsive-image
train
ad28751b4e3fc5b1805bc9515a18f0c692c3da66
diff --git a/test/unit/functional/authorize.spec.js b/test/unit/functional/authorize.spec.js index <HASH>..<HASH> 100644 --- a/test/unit/functional/authorize.spec.js +++ b/test/unit/functional/authorize.spec.js @@ -9,6 +9,32 @@ var Promise = require('promise'); describe('isAuthorized', function() { + it('should run authorize function before call function', function(done) { + var order = [] + + var routes = new R([{ + route: 'list.push', + call: function(callPath, args) { + order.push("call") + return [ + { path: ['list', 0], value: args[0] }, + { path: ['list', 'length'], value: 1 } + ]; + }, + authorize: function() { + order.push("auth") + return true; + } + }]); + + routes.call(['list','push'], ["hello"]). + doAction(function(res) { + expect(order).to.deep.equals(["auth", "call"]) + }, noOp, noOp). + subscribe(noOp, done, done); + }); + + it('should return an error for the pathSet if unauthorized, sync', function(done) { var routes = [{ route: 'lists[{keys:ids}]',
added test for authorize() function being called before call() function
Netflix_falcor-router
train
60414255598428787071f5e8cb2abf76ae6884fa
diff --git a/Services/AzineTwigSwiftMailer.php b/Services/AzineTwigSwiftMailer.php index <HASH>..<HASH> 100644 --- a/Services/AzineTwigSwiftMailer.php +++ b/Services/AzineTwigSwiftMailer.php @@ -2,6 +2,8 @@ namespace Azine\EmailBundle\Services; +use Symfony\Component\Routing\RequestContext; + use Symfony\Component\HttpFoundation\File\Exception\FileException; use Doctrine\ORM\EntityManager; @@ -45,6 +47,12 @@ class AzineTwigSwiftMailer extends TwigSwiftMailer implements TemplateTwigSwiftM protected $entityManager; /** + * + * @var RequestContext + */ + protected $routerContext; + + /** * @var email to use for "no-reply" */ protected $noReplyEmail; @@ -85,7 +93,8 @@ class AzineTwigSwiftMailer extends TwigSwiftMailer implements TemplateTwigSwiftM $this->entityManager = $entityManager; $this->noReplyEmail = $parameters[AzineEmailExtension::NO_REPLY][AzineEmailExtension::NO_REPLY_EMAIL_ADDRESS]; $this->noReplyName = $parameters[AzineEmailExtension::NO_REPLY][AzineEmailExtension::NO_REPLY_EMAIL_NAME]; - $this->currentHost = $router->getContext()->getHost(); + $this->routerContext = $router->getContext(); + $this->currentHost = $this->routerContext->getHost(); $this->encodedItemIdPattern = "/^cid:.*@/"; } @@ -136,7 +145,12 @@ class AzineTwigSwiftMailer extends TwigSwiftMailer implements TemplateTwigSwiftM // change the locale for the email-recipients if($emailLocale != null){ - $currentUserLocale = $this->translator->getLocale(); + $currentUserLocale = $this->translator->getLocale(); + + // change the router-context locale + $this->routerContext->setParameter("_locale", $emailLocale); + + // change the translator locale $this->translator->setLocale($emailLocale); } else { $emailLocale = $this->translator->getLocale(); @@ -168,6 +182,7 @@ class AzineTwigSwiftMailer extends TwigSwiftMailer implements TemplateTwigSwiftM // change the locale back to the users locale if(isset($currentUserLocale) && $currentUserLocale != null){ + $this->routerContext->setParameter("_locale", $currentUserLocale); $this->translator->setLocale($currentUserLocale); }
fix the locale for the generation of urls in templates
azine_email-bundle
train
d337dfdc715441011422b578e6cd86419ffd1074
diff --git a/library/client.js b/library/client.js index <HASH>..<HASH> 100644 --- a/library/client.js +++ b/library/client.js @@ -66,10 +66,10 @@ var client = function client(options) { this.gracefulReconnection = false; - this.logger.dev('created a new client instance on pid ' + process.pid); - this.logger.dev('memory rss: ' + process.memoryUsage().rss); - this.logger.dev('memory heap total: ' + process.memoryUsage().heapTotal); - this.logger.dev('memory heap used : ' + process.memoryUsage().heapUsed); + this.logger.dev('Created a new client instance on pid ' + process.pid); + this.logger.dev('Memory rss: ' + process.memoryUsage().rss); + this.logger.dev('Memory heap total: ' + process.memoryUsage().heapTotal); + this.logger.dev('Memory heap used : ' + process.memoryUsage().heapUsed); DBPath = (this.options.options && (typeof this.options.options.database != 'undefined')) ? this.options.options.database : './database'; @@ -222,6 +222,7 @@ client.prototype._handleMessage = function _handleMessage(message) { if (self.options.channels.length <= 0) { Joined = true; } + this.logger.dev('Joined ' + message.params[0]); break; case 'PART': @@ -240,6 +241,7 @@ client.prototype._handleMessage = function _handleMessage(message) { var index = Channels.indexOf(Utils.remHash(message.params[0]).toLowerCase()); if (index !== -1) { Channels.splice(index, 1); } Channels.reduce(function(a,b){if(a.indexOf(b)<0)a.push(b);return a;},[]); + this.logger.dev('Left ' + message.params[0]); break; case 'NOTICE': @@ -253,6 +255,7 @@ client.prototype._handleMessage = function _handleMessage(message) { if (message.params[1] === 'Login unsuccessful') { self.logger.event('disconnected'); self.emit('disconnected', message.params[1]); + this.logger.dev('Disconnect from server: Login unsuccessful.'); } } break; @@ -262,15 +265,18 @@ client.prototype._handleMessage = function _handleMessage(message) { if (message.params[1] === '+o') { self.moderators[message.params[0]].push(message.params[2].toLowerCase()); self.moderators[message.params[0]].reduce(function(a,b){if(a.indexOf(b)<0)a.push(b);return a;},[]); + this.logger.dev('Mod ' + message.params[0] + ' ' + message.params[2]); } else { var index = self.moderators[message.params[0]].indexOf(message.params[2].toLowerCase()); if (index >= 0) { self.moderators[message.params[0]].splice(index, 1); } self.moderators[message.params[0]].reduce(function(a,b){if(a.indexOf(b)<0)a.push(b);return a;},[]); + this.logger.dev('Unmod ' + message.params[0] + ' ' + message.params[2]); } } break; case 'RECONNECT': + this.logger.dev('Received RECONNECT from Twitch.'); self.fastReconnect(); break; @@ -406,6 +412,7 @@ client.prototype._handleMessage = function _handleMessage(message) { message.params[1] === 'You need to tell me who you want to grant mod status to.') || message.params[1] === 'Failed to start commercial.': + this.logger.dev('ERROR: ' + message.params[1]); CommandError = message.params[1]; setTimeout(function() { CommandError = ''; }, 300); break; @@ -713,7 +720,7 @@ client.prototype.connect = function connect() { client.prototype.fastReconnect = function fastReconnect() { var self = this; - self.logger.info('Reconnect request received from Twitch.'); + self.logger.info('Received RECONNECT request from Twitch.'); self.gracefulReconnection = true; diff --git a/library/socket.js b/library/socket.js index <HASH>..<HASH> 100644 --- a/library/socket.js +++ b/library/socket.js @@ -43,6 +43,7 @@ var createSocket = function createSocket(client, options, logger, port, host, ca var socket = Net.connect(port, host, function() { logger.event('connecting'); client.emit('connecting', host, port); + logger.dev('Connecting to ' + host + ' on port ' + port); callback(); }); @@ -70,6 +71,7 @@ var createSocket = function createSocket(client, options, logger, port, host, ca logger.error(Errors.get(err.code)); logger.event('disconnected'); client.emit('disconnected', Errors.get(err.code)); + logger.dev('Got disconnected from server: ' + Errors.get(err.code)); var connection = options.connection || {}; var reconnect = connection.reconnect || true; @@ -81,6 +83,7 @@ var createSocket = function createSocket(client, options, logger, port, host, ca if (interval >= 90000) { interval = 90000; } logger.info('Reconnecting in ' + (interval/1000) + ' seconds..'); + this.logger.dev('Reconnecting in ' + (interval/1000) + ' seconds..'); setTimeout(function(){ logger.event('reconnect');
Added more informations for dev.
twitch-irc_twitch-irc
train
e97ec0ef1c047d58e57b77e6bac1d799d5432763
diff --git a/src/main/java/net/openhft/chronicle/network/TcpEventHandler.java b/src/main/java/net/openhft/chronicle/network/TcpEventHandler.java index <HASH>..<HASH> 100755 --- a/src/main/java/net/openhft/chronicle/network/TcpEventHandler.java +++ b/src/main/java/net/openhft/chronicle/network/TcpEventHandler.java @@ -78,7 +78,10 @@ public class TcpEventHandler implements EventHandler { @Override public boolean action() throws InvalidEventHandlerException { - if (!sc.isOpen()) throw new InvalidEventHandlerException(); + if (!sc.isOpen()) { + handler.onEndOfConnection(); + throw new InvalidEventHandlerException(); + } try { int read = inBB.remaining() > 0 ? sc.read(inBB) : 1; diff --git a/src/main/java/net/openhft/chronicle/network/api/TcpHandler.java b/src/main/java/net/openhft/chronicle/network/api/TcpHandler.java index <HASH>..<HASH> 100755 --- a/src/main/java/net/openhft/chronicle/network/api/TcpHandler.java +++ b/src/main/java/net/openhft/chronicle/network/api/TcpHandler.java @@ -25,4 +25,7 @@ import net.openhft.chronicle.network.api.session.SessionDetailsProvider; @FunctionalInterface public interface TcpHandler { void process(Bytes in, Bytes out, SessionDetailsProvider sessionDetails); + + default void onEndOfConnection() { + } }
Add an onEndOfConnection callback.
OpenHFT_Chronicle-Network
train
43c70b1dde3144f5a59011275b8849954c662cc7
diff --git a/src/upload/adapters/AwsS3Adapter.php b/src/upload/adapters/AwsS3Adapter.php index <HASH>..<HASH> 100644 --- a/src/upload/adapters/AwsS3Adapter.php +++ b/src/upload/adapters/AwsS3Adapter.php @@ -142,6 +142,10 @@ class AwsS3Adapter extends UploadAdapter { return false; } + if ($this->isImage() && function_exists('getimagesize')) { + $this->imageSizeData = @getimagesize($this->fileTemp); + } + try { $stream = fopen($this->fileTemp, 'r+'); @@ -155,6 +159,28 @@ class AwsS3Adapter extends UploadAdapter { } /** + * Set Image Properties + * + * Uses GD to determine the width/height/type of image + * + * @param string $path + * @return array + */ + protected function getImageProperties($path = ''): array + { + $image = []; + if (false !== $this->imageSizeData) { + $types = [1 => 'gif', 2 => 'jpeg', 3 => 'png']; + + $image['width'] = $this->imageSizeData[0]; + $image['height'] = $this->imageSizeData[1]; + $image['type'] = isset($types[$this->imageSizeData[2]]) ? $types[$this->imageSizeData[2]] : 'unknown'; + } + + return $image; + } + + /** * * @param string $fileName * @return string @@ -201,4 +227,4 @@ class AwsS3Adapter extends UploadAdapter { return $newFileName; } -} \ No newline at end of file +}
Update AwsS3Adapter.php moved image property data
dmetri333_wiggum-services
train
d1e624da24aef3daa40811c20ab2c2df1902e874
diff --git a/MulticoreTSNE/__init__.py b/MulticoreTSNE/__init__.py index <HASH>..<HASH> 100644 --- a/MulticoreTSNE/__init__.py +++ b/MulticoreTSNE/__init__.py @@ -62,14 +62,12 @@ class MulticoreTSNE: path = os.path.dirname(os.path.realpath(__file__)) self.C = self.ffi.dlopen(path + "/libtsne_multicore.so") - def fit_transform(self, X): + def fit_transform(self, X, _y=None): assert X.ndim == 2, 'X should be 2D array.' - assert X.dtype == np.float64, 'Only double arrays are supported for now. Use .astype(np.float64) to convert.' - - if (X.flags['C_CONTIGUOUS'] is False): - print('Converting input to contiguous array...') - X = np.ascontiguousarray(X) + + # X may be modified, make a copy + X = np.array(X, dtype=float, order='C', copy=True) N, D = X.shape Y = np.zeros((N, self.n_components)) diff --git a/MulticoreTSNE/tests/test_base.py b/MulticoreTSNE/tests/test_base.py index <HASH>..<HASH> 100644 --- a/MulticoreTSNE/tests/test_base.py +++ b/MulticoreTSNE/tests/test_base.py @@ -45,3 +45,10 @@ class TestMulticoreTSNE(unittest.TestCase): X, y = self.Xy tsne = MulticoreTSNE(perplexity=X.shape[0], n_iter=100) tsne.fit_transform(X) + + def test_dont_change_x(self): + X = np.random.random((20, 4)) + X_orig = X.copy() + MulticoreTSNE(n_iter=400).fit_transform(X) + np.testing.assert_array_equal(X, X_orig) +
Python: Make a copy of X so the orig is not changed by the algo
DmitryUlyanov_Multicore-TSNE
train
29c364479a737dcb243fd23707f486a5602360e7
diff --git a/lib/Core/Site/Values/Location.php b/lib/Core/Site/Values/Location.php index <HASH>..<HASH> 100644 --- a/lib/Core/Site/Values/Location.php +++ b/lib/Core/Site/Values/Location.php @@ -3,6 +3,10 @@ namespace Netgen\EzPlatformSiteApi\Core\Site\Values; use eZ\Publish\API\Repository\Values\Content\LocationQuery; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\ContentTypeIdentifier; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\LocationId; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\LogicalAnd; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\ParentLocationId; use Netgen\EzPlatformSiteApi\API\Values\Location as APILocation; final class Location extends APILocation @@ -107,12 +111,24 @@ final class Location extends APILocation public function getChildren(array $contentTypeIdentifiers = [], $limit = 10) { $cacheId = $this->getChildrenCacheId($contentTypeIdentifiers, $limit); + $criteria = []; + $criteria[] = new ParentLocationId($this->innerLocation->id); + + if (!empty($contentTypeIdentifiers)) { + $criteria[] = new ContentTypeIdentifier($contentTypeIdentifiers); + } + + if (count($criteria) > 1) { + $criteria = new LogicalAnd($criteria); + } if (!array_key_exists($cacheId, $this->childrenCache)) { $this->childrenCache[$cacheId] = $this->site->getFindService()->findLocations( new LocationQuery( [ - // + 'filter' => $criteria, + 'sortClauses' => $this->innerLocation->getSortClauses(), + 'limit' => $limit, ] ) ); @@ -142,7 +158,7 @@ final class Location extends APILocation $this->internalParent = $this->site->getFindService()->findLocations( new LocationQuery( [ - // + 'filter' => new LocationId($this->innerLocation->parentLocationId), ] ) ); diff --git a/lib/Core/Site/Values/Node.php b/lib/Core/Site/Values/Node.php index <HASH>..<HASH> 100644 --- a/lib/Core/Site/Values/Node.php +++ b/lib/Core/Site/Values/Node.php @@ -3,6 +3,10 @@ namespace Netgen\EzPlatformSiteApi\Core\Site\Values; use eZ\Publish\API\Repository\Values\Content\LocationQuery; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\ContentTypeIdentifier; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\LocationId; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\LogicalAnd; +use eZ\Publish\API\Repository\Values\Content\Query\Criterion\ParentLocationId; use Netgen\EzPlatformSiteApi\API\Values\Node as APINode; final class Node extends APINode @@ -102,12 +106,24 @@ final class Node extends APINode public function getChildren(array $contentTypeIdentifiers = [], $limit = 10) { $cacheId = $this->getChildrenCacheId($contentTypeIdentifiers, $limit); + $criteria = []; + $criteria[] = new ParentLocationId($this->innerLocation->id); + + if (!empty($contentTypeIdentifiers)) { + $criteria[] = new ContentTypeIdentifier($contentTypeIdentifiers); + } + + if (count($criteria) > 1) { + $criteria = new LogicalAnd($criteria); + } if (!array_key_exists($cacheId, $this->childrenCache)) { $this->childrenCache[$cacheId] = $this->site->getFindService()->findLocations( new LocationQuery( [ - // + 'filter' => $criteria, + 'sortClauses' => $this->innerLocation->getSortClauses(), + 'limit' => $limit, ] ) ); @@ -137,7 +153,7 @@ final class Node extends APINode $this->internalParent = $this->site->getFindService()->findLocations( new LocationQuery( [ - // + 'filter' => new LocationId($this->innerLocation->parentLocationId), ] ) );
Add first (untested) impl. for parent and childred queries
netgen_ezplatform-site-api
train
1e354c4e64bdbef6adeec26deffc3eb8dec9f710
diff --git a/evaluators/segment_eval.py b/evaluators/segment_eval.py index <HASH>..<HASH> 100755 --- a/evaluators/segment_eval.py +++ b/evaluators/segment_eval.py @@ -37,37 +37,41 @@ def evaluate(ref_file=None, est_file=None, trim=False): # Now compute all the metrics M = OrderedDict() # Boundary detection - M['P@0.5'], M['R@0.5'], M['F@0.5'] = \ + M['Precision@0.5'], M['Recall@0.5'], M['F-measure@0.5'] = \ mir_eval.boundary.detection(ref_intervals, est_intervals, window=0.5, trim=trim) - M['P@3.0'], M['R@3.0'], M['F@3.0'] = \ + M['Precision@3.0'], M['Recall@3.0'], M['F-measure@3.0'] = \ mir_eval.boundary.detection(ref_intervals, est_intervals, window=3.0, trim=trim) # Boundary deviation - M['True-to-Pred'], M['Pred-to-True'] = \ + M['Ref-to-est deviation'], M['Est-to-ref deviation'] = \ mir_eval.boundary.deviation(ref_intervals, est_intervals, trim=trim) # Pairwise clustering - M['Pair-P'], M['Pair-R'], M['Pair-F'] = \ + M['Pairwise Precision'], M['Pairwise Recall'], M['Pairwise F-measure'] = \ mir_eval.structure.pairwise(ref_intervals, ref_labels, est_intervals, est_labels) # Rand index - M['RI'] = mir_eval.structure.rand_index(ref_intervals, ref_labels, + M['Rand Index'] = mir_eval.structure.rand_index(ref_intervals, ref_labels, est_intervals, est_labels) # Adjusted rand index - M['ARI'] = mir_eval.structure.ari(ref_intervals, ref_labels, est_intervals, - est_labels) + M['Adjusted Rand Index'] = mir_eval.structure.ari(ref_intervals, + ref_labels, + est_intervals, + est_labels) # Mutual information metrics - M['MI'], M['AMI'], M['NMI'] = \ + (M['Mutual Information'], + M['Adjusted Mutual Information'], + M['Normalized Mutual Information']) = \ mir_eval.structure.mutual_information(ref_intervals, ref_labels, est_intervals, est_labels) # Conditional entropy metrics - M['S_Over'], M['S_Under'], M['S_F'] = \ + M['NCE Over'], M['NCE Under'], M['NCE F-measure'] = \ mir_eval.structure.nce(ref_intervals, ref_labels, est_intervals, est_labels) @@ -84,7 +88,7 @@ def print_evaluation(est_file, M): # And print them print os.path.basename(est_file) for key, value in M.iteritems(): - print '\t%12s:\t%0.3f' % (key, value) + print '\t%30s:\t%0.3f' % (key, value) pass
Nicer metric names because of #<I> for #<I>
craffel_mir_eval
train
3e4dd0fe8655ae61b03b5a42d5c3e8d769559205
diff --git a/library/Helper/Navigation.php b/library/Helper/Navigation.php index <HASH>..<HASH> 100644 --- a/library/Helper/Navigation.php +++ b/library/Helper/Navigation.php @@ -265,14 +265,14 @@ class Navigation //Check if if valid post type string if($postType != 'all' && !is_array($postType) && !post_type_exists($postType)) { - return new \WP_Error("Could not get navigation menu for " . $postType . " since it dosen't exist."); + return []; } //Check if if valid post type array if(is_array($postType)) { foreach($postType as $item) { if(!post_type_exists($item)) { - return new \WP_Error("Could not get navigation menu for " . $item . " since it dosen't exist."); + return []; } } }
Remove errors returned These should be handled well by implementation.
helsingborg-stad_Municipio
train
21031c8372078664149c40bd3a89a82049e0b16a
diff --git a/lenses/const.py b/lenses/const.py index <HASH>..<HASH> 100644 --- a/lenses/const.py +++ b/lenses/const.py @@ -10,7 +10,7 @@ class Const(object): self.item = item def __repr__(self): - return '{}({!r})'.format(self.__class__, self.item) + return '{}({!r})'.format(self.__class__.__name__, self.item) def __eq__(self, other): if not isinstance(other, Const): diff --git a/lenses/identity.py b/lenses/identity.py index <HASH>..<HASH> 100644 --- a/lenses/identity.py +++ b/lenses/identity.py @@ -13,7 +13,7 @@ class Identity(object): self.item = item def __repr__(self): - return '{}({!r})'.format(self.__class__, self.item) + return '{}({!r})'.format(self.__class__.__name__, self.item) def __eq__(self, other): if not isinstance(other, Identity):
readded readable names to the const and identity functor reprs this was removed by the port to python2 because it does not support __qualname__, but we can just use __name__ instead.
ingolemo_python-lenses
train
3ea40c7263b651fdd5c4caaffa095fc77c5f890f
diff --git a/lib/device.js b/lib/device.js index <HASH>..<HASH> 100644 --- a/lib/device.js +++ b/lib/device.js @@ -68,6 +68,9 @@ function DeviceParser(req, options) { } else if (ua.match(/Linux/i) && ua.match(/X11/i) && !ua.match(/Charlotte/i)) { // if user agent is a Linux Desktop return 'desktop'; + } else if (ua.match(/CrOS/i)) { + // if user agent is a Chrome Book + return 'desktop'; } else if (ua.match(/Solaris|SunOS|BSD/i)) { // if user agent is a Solaris, SunOS, BSD Desktop return 'desktop';
fix detection of Chrome Book OS - was being incorrectly detected as phone
rguerreiro_express-device
train
a7e34dc1456f5f6135d715c016a179d9e7b5ddb2
diff --git a/moa/src/main/java/moa/gui/visualization/GraphMultiCurve.java b/moa/src/main/java/moa/gui/visualization/GraphMultiCurve.java index <HASH>..<HASH> 100644 --- a/moa/src/main/java/moa/gui/visualization/GraphMultiCurve.java +++ b/moa/src/main/java/moa/gui/visualization/GraphMultiCurve.java @@ -106,7 +106,7 @@ public class GraphMultiCurve extends AbstractGraphPlot { x[i] = (int) (i * x_resolution); y[i] = (int)(height-(m.getValue(mSelect, i)/this.upper_y_value)*height); - if (this.isStandardDeviationPainted) { + if (this.isStandardDeviationPainted && mSelect <= 6) { // access the corresponding std value double std = m.getValue(mSelect + 7, i); int len = (int) ((std/this.upper_y_value)*height);
Fix bug if std measure is selected
Waikato_moa
train
f8467cf3753d09e6c24ac303cd1bbe6aa1138afc
diff --git a/dvc/repo/experiments/init.py b/dvc/repo/experiments/init.py index <HASH>..<HASH> 100644 --- a/dvc/repo/experiments/init.py +++ b/dvc/repo/experiments/init.py @@ -8,7 +8,6 @@ from typing import ( Callable, Dict, Iterable, - List, Optional, TextIO, Tuple, @@ -16,7 +15,7 @@ from typing import ( cast, ) -from funcy import compact, lremove +from funcy import compact, lremove, lsplit from rich.rule import Rule from rich.syntax import Syntax @@ -24,13 +23,13 @@ from dvc.exceptions import DvcException from dvc.stage import PipelineStage from dvc.stage.serialize import to_pipeline_file from dvc.types import OptStr +from dvc.utils import humanize from dvc.utils.serialize import dumps_yaml if TYPE_CHECKING: from dvc.repo import Repo from dvc.dvcfile import DVCFile from rich.tree import Tree - from dvc.dependency import Dependency from dvc.ui import ui @@ -155,7 +154,7 @@ def init_interactive( styled=True, ) - tree_label = "DVC assumes the following workspace structure:" + tree_label = "Experiment project structure:" display_workspace_tree(workspace, tree_label, stderr=True) ret.update( compact( @@ -216,27 +215,30 @@ def is_file(path: str) -> bool: return bool(ext) -def init_deps(deps: Iterable["Dependency"]) -> List[str]: +def init_deps(stage: PipelineStage, log: bool = False) -> None: + from funcy import rpartial + from dvc.dependency import ParamsDependency from dvc.fs.local import localfs - new_deps = [ - dep - for dep in deps - if not isinstance(dep, ParamsDependency) and not dep.exists - ] - for dep in new_deps: - fs_path = dep.fs_path - if not is_file(fs_path): - localfs.makedirs(fs_path) - continue - - localfs.makedirs(localfs.path.parent(fs_path), exist_ok=True) - with localfs.open(fs_path, "w", encoding="utf-8"): + new_deps = [dep for dep in stage.deps if not dep.exists] + params, deps = lsplit(rpartial(isinstance, ParamsDependency), new_deps) + + if log: + paths = map("[green]{0}[/]".format, new_deps) + ui.write(f"Creating {humanize.join(paths)}", styled=True) + + # always create a file for params, detect file/folder based on extension + # for other dependencies + dirs = [dep.fs_path for dep in deps if not is_file(dep.fs_path)] + files = [dep.fs_path for dep in deps + params if is_file(dep.fs_path)] + for path in dirs: + localfs.makedirs(path) + for path in files: + localfs.makedirs(localfs.path.parent(path), exist_ok=True) + with localfs.open(path, "w", encoding="utf-8"): pass - return [dep.def_path for dep in new_deps] - def init( repo: "Repo", @@ -327,9 +329,9 @@ def init( stage.dump(update_lock=False) stage.ignore_outs() if not interactive: - label = "Creating experiment project structure:" + label = "Experiment project structure:" display_workspace_tree(context, label) - init_deps(stage.deps) + init_deps(stage, log=not interactive) if params: repo.scm_context.track_file(params) else: diff --git a/tests/func/experiments/test_init.py b/tests/func/experiments/test_init.py index <HASH>..<HASH> 100644 --- a/tests/func/experiments/test_init.py +++ b/tests/func/experiments/test_init.py @@ -298,7 +298,8 @@ def test_init_default(tmp_dir, scm, dvc, interactive, overrides, inp, capsys): assert "'data' does not exist, the directory will be created." in err assert not out return - assert "Creating experiment project structure: " in out + assert "Experiment project structure: " in out + assert "Creating script.py and data" in out @pytest.mark.timeout(5, func_only=True) @@ -384,7 +385,8 @@ def test_init_interactive_live( assert "'data' does not exist, the directory will be created." in err assert not out return - assert "Creating experiment project structure: " in out + assert "Experiment project structure: " in out + assert "Creating script.py and data" in out @pytest.mark.parametrize(
exp init: log created dependencies in non-interactive mode
iterative_dvc
train
177f6457de498c6f86f9caea75883d113379e461
diff --git a/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/AbstractTestFilters.java b/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/AbstractTestFilters.java index <HASH>..<HASH> 100644 --- a/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/AbstractTestFilters.java +++ b/bigtable-client-core-parent/bigtable-hbase-integration-tests-common/src/test/java/com/google/cloud/bigtable/hbase/AbstractTestFilters.java @@ -2062,7 +2062,9 @@ public abstract class AbstractTestFilters extends AbstractTest { // all 8 keys should be matched try (ResultScanner scanner = table.getScanner(scan)) { for(Result result : scanner) { - actualKeys.add(toFuzzyKeyString(CellUtil.cloneRow(result.rawCells()[0]))); + if (!result.isEmpty()) { + actualKeys.add(toFuzzyKeyString(CellUtil.cloneRow(result.rawCells()[0]))); + } } } assertEquals(expectedKeys, actualKeys);
Fixing minor issue in FuzzyFilter test for HBase (#<I>)
googleapis_cloud-bigtable-client
train
988c0b3cd8f10712356ca2084e632472b8d24b79
diff --git a/README.rdoc b/README.rdoc index <HASH>..<HASH> 100644 --- a/README.rdoc +++ b/README.rdoc @@ -61,6 +61,18 @@ You can also access each subproject's API individually, if you would like to use options: { tenant: 'hawkular' } ) +=== HTTP and HTTPS options + +Will all client classes, the +:options+ hash can contain extra parameters passed through to +RestClient+ gem. It can include a +:headers+ sub-hash to add custom headers: + + require 'hawkular/hawkular_client' + client = Hawkular::Client.new( + entrypoint: 'http://localhost:8080', + credentials: { username: 'jdoe', password: 'password' }, + options: { tenant: 'hawkular', proxy: 'proxy.example.com', ssl_ca_file: 'ca.pem', + headers: {'Max-Forwards': 5} } + ) + === Examples Suppose you will monitor the availability of two networks to later determine which one is the best. @@ -147,4 +159,3 @@ variables: Client documentation can be generated using http://yardoc.org yardoc - diff --git a/lib/hawkular/base_client.rb b/lib/hawkular/base_client.rb index <HASH>..<HASH> 100644 --- a/lib/hawkular/base_client.rb +++ b/lib/hawkular/base_client.rb @@ -24,15 +24,11 @@ module Hawkular token: nil }.merge(credentials) @options = { - tenant: nil, - admin_token: nil, - ssl_ca_file: nil, verify_ssl: OpenSSL::SSL::VERIFY_PEER, - ssl_client_cert: nil, - ssl_client_key: nil, - http_proxy_uri: nil, headers: {} }.merge(options) + @tenant = @options.delete(:tenant) + @admin_token = @options.delete(:admin_token) fail 'You need to provide an entrypoint' if entrypoint.nil? end @@ -73,17 +69,14 @@ module Hawkular # @!visibility private def rest_client(suburl) - options[:timeout] = ENV['HAWKULARCLIENT_REST_TIMEOUT'] if ENV['HAWKULARCLIENT_REST_TIMEOUT'] - options[:ssl_ca_file] = @options[:ssl_ca_file] - options[:verify_ssl] = @options[:verify_ssl] - options[:ssl_client_cert] = @options[:ssl_client_cert] - options[:ssl_client_key] = @options[:ssl_client_key] - options[:proxy] = @options[:http_proxy_uri] - options[:user] = @credentials[:username] - options[:password] = @credentials[:password] + opts = @options.dup + opts[:timeout] ||= ENV['HAWKULARCLIENT_REST_TIMEOUT'] if ENV['HAWKULARCLIENT_REST_TIMEOUT'] + opts[:proxy] ||= opts.delete(:http_proxy_uri) + opts[:user] = @credentials[:username] + opts[:password] = @credentials[:password] # strip @endpoint in case suburl is absolute suburl = suburl[@entrypoint.length, suburl.length] if suburl.match(/^http/) - RestClient::Resource.new(@entrypoint, options)[suburl] + RestClient::Resource.new(@entrypoint, opts)[suburl] end # @!visibility private @@ -177,7 +170,7 @@ module Hawkular def admin_header headers = {} - headers[:'Hawkular-Admin-Token'] = @options[:admin_token] unless @options[:admin_token].nil? + headers[:'Hawkular-Admin-Token'] = @admin_token unless @admin_token.nil? headers end @@ -189,7 +182,7 @@ module Hawkular def tenant_header headers = {} - headers[:'Hawkular-Tenant'] = @options[:tenant] unless @options[:tenant].nil? + headers[:'Hawkular-Tenant'] = @tenant unless @tenant.nil? headers end diff --git a/spec/unit/base_spec.rb b/spec/unit/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/base_spec.rb +++ b/spec/unit/base_spec.rb @@ -77,13 +77,35 @@ describe 'Base Spec' do expect(ret).to eq('a1%252%203%7c45%2f') end - it 'should pass http_proxy_uri to rest_client' do + it 'should pass through options to rest_client' do + proxy_uri = 'http://myproxy.com' + c = Hawkular::BaseClient.new('not-needed-for-this-test', {}, + { proxy: proxy_uri, timeout: 10 }) + rc = c.rest_client('myurl') + + expect(rc.options).to include(proxy: proxy_uri, timeout: 10) + end + + # backward compatibility + it 'should pass :http_proxy_uri to rest_client :proxy' do proxy_uri = 'http://myproxy.com' c = Hawkular::BaseClient.new('not-needed-for-this-test', {}, { http_proxy_uri: proxy_uri }) rc = c.rest_client('myurl') expect(rc.options[:proxy]).to eq(proxy_uri) + expect(rc.options).to include(proxy: proxy_uri) + expect(rc.options).not_to have_key(:http_proxy_uri) + end + + it 'should merge constructor, hawkular, and call headers' do + c = Hawkular::BaseClient.new('not-needed-for-this-test', {}, + { tenant: 'Me', headers: { 'X-Foo' => 'bar' } }) + headers = c.http_headers('Referer' => 'them.example.com') + + expect(headers).to include('Hawkular-Tenant': 'Me', + 'Referer' => 'them.example.com', + 'X-Foo' => 'bar') end it 'Should normalize different types of url and suffix combinations with or without slash' do
Pass through options to RestClient
hawkular_hawkular-client-ruby
train
79dfecdc14fa198011b28dbed81ca6025da3096b
diff --git a/nifstd/nifstd_tools/sheets.py b/nifstd/nifstd_tools/sheets.py index <HASH>..<HASH> 100644 --- a/nifstd/nifstd_tools/sheets.py +++ b/nifstd/nifstd_tools/sheets.py @@ -41,7 +41,7 @@ class Recuration(PPP): sheet_name = 'Recuration' -class Modalities(PPP): +class Approaches(PPP): # see also map-identifiers.py and methods/ for the rest of this sheet_name = 'Modalities Merged' index_columns = 'id', @@ -99,7 +99,7 @@ class Modalities(PPP): @property def pathTtl(self): olr = aug.RepoPath(auth.get_path('ontology-local-repo')) - path = olr / 'ttl' / 'modality.ttl' + path = olr / 'ttl' / 'approach.ttl' return path def populateHeader(self, graph): @@ -107,7 +107,7 @@ class Modalities(PPP): s = rdflib.URIRef(path.remote_uri_machine()) # TODO prov pairs = ((rdf.type, owl.Ontology), - (rdfs.label, rdflib.Literal('Experimental modalities.')),) + (rdfs.label, rdflib.Literal('Experimental approaches.')),) for p, o in pairs: graph.add((s, p, o)) @@ -126,7 +126,7 @@ def main(): missing = unique - all_u extra = all_u - unique - ma = Modalities() + ma = Approaches() ma.writeTtl()
nifstd_tools sheets modalities -> approaches naming
tgbugs_pyontutils
train
bf023979de39f5ff63ca5cc9f7b54e7af6f344d6
diff --git a/app/index.js b/app/index.js index <HASH>..<HASH> 100644 --- a/app/index.js +++ b/app/index.js @@ -23,7 +23,7 @@ AppengineGenerator.prototype.askFor = function askFor() { prompts.push({ name: 'appId', message: 'What is the application ID?', - default: 'new-application' + default: path.basename(process.cwd()) }) } else { this.appId = this.args[0];
Set default application name to folder name Fixes #1
generators_appengine
train
72b2275369842a6db4634e7959aa6bcf7c96e162
diff --git a/master/buildbot/config.py b/master/buildbot/config.py index <HASH>..<HASH> 100644 --- a/master/buildbot/config.py +++ b/master/buildbot/config.py @@ -140,10 +140,11 @@ class MasterConfig(util.ComparableMixin, WorkerAPICompatMixin): "logHorizon", "logMaxSize", "logMaxTailSize", "manhole", "collapseRequests", "metrics", "mq", "multiMaster", "prioritizeBuilders", "projectName", "projectURL", "properties", "protocols", "revlink", - "schedulers", "services", "slavePortnum", "status", "title", "titleURL", - "user_managers", "validation", 'www', + "schedulers", "services", "status", "title", "titleURL", + "user_managers", "validation", "www", "workers", + + "slavePortnum", # deprecated, c['protocols']['pb']['port'] should be used "slaves", # deprecated, "worker" should be used - "workers", ]) compare_attrs = list(_known_config_keys)
add comment for "slavePortnum" that it is deprecated
buildbot_buildbot
train
79f44a4c71618ae4cdb036e170901d0691deb1af
diff --git a/benchbuild/project.py b/benchbuild/project.py index <HASH>..<HASH> 100644 --- a/benchbuild/project.py +++ b/benchbuild/project.py @@ -352,6 +352,8 @@ def populate(projects_to_filter=None, group=None): group (list(str)): In addition to the project filter, we provide a way to filter whole groups. + Returns: + a dictionary of (project name, project class) pairs. """ if projects_to_filter is None: projects_to_filter = []
project: comment return of populate projects
PolyJIT_benchbuild
train
897a360ce707f2a60bb4160ee1389c6716672b39
diff --git a/app/models/travel_advice_edition.rb b/app/models/travel_advice_edition.rb index <HASH>..<HASH> 100644 --- a/app/models/travel_advice_edition.rb +++ b/app/models/travel_advice_edition.rb @@ -63,12 +63,12 @@ class TravelAdviceEdition new_edition end - def create_action_as(user, action_type, comment = nil) - actions.create(:requester => user, :request_type => action_type, :comment => comment) + def build_action_as(user, action_type, comment = nil) + actions.build(:requester => user, :request_type => action_type, :comment => comment) end def publish_as(user) - publish && create_action_as(user, Action::PUBLISH) + build_action_as(user, Action::PUBLISH) && publish end private diff --git a/test/models/travel_advice_edition_test.rb b/test/models/travel_advice_edition_test.rb index <HASH>..<HASH> 100644 --- a/test/models/travel_advice_edition_test.rb +++ b/test/models/travel_advice_edition_test.rb @@ -223,14 +223,14 @@ class TravelAdviceEditionTest < ActiveSupport::TestCase end should "add a 'create' action" do - @edition.create_action_as(@user, Action::CREATE) + @edition.build_action_as(@user, Action::CREATE) assert_equal 1, @edition.actions.size assert_equal Action::CREATE, @edition.actions.first.request_type assert_equal @user, @edition.actions.first.requester end should "add a 'new' action with a comment" do - @edition.create_action_as(@user, Action::NEW_VERSION, "a comment for the new version") + @edition.build_action_as(@user, Action::NEW_VERSION, "a comment for the new version") assert_equal 1, @edition.actions.size assert_equal "a comment for the new version", @edition.actions.first.comment end
Use 'build' instead of 'create' to make saves implicit Rather than cause a database save on each action addition, we should instead let the applications define the save logic implicitly.
alphagov_govuk_content_models
train
65e50a4e06df25b16d8a4a7e3fa43632e39edea9
diff --git a/salt/states/cmd.py b/salt/states/cmd.py index <HASH>..<HASH> 100644 --- a/salt/states/cmd.py +++ b/salt/states/cmd.py @@ -32,6 +32,11 @@ Only run if the file specified by ``creates`` does not exist, in this case touch cmd.run: - creates: /tmp/foo +.. note:: + + The ``creates`` option is only supported in releases greater than or + equal to 2014.1.0. + Note that when executing a command or script, the state (i.e., changed or not) of the command is unknown to Salt's state system. Therefore, by default, the ``cmd`` state assumes that any command execution results in a changed state.
Added a note regarding creates and what versions it is supported in.
saltstack_salt
train
89d66cbc622f55f499ea456a32784e8b4981b9de
diff --git a/lib/peddler/client.rb b/lib/peddler/client.rb index <HASH>..<HASH> 100644 --- a/lib/peddler/client.rb +++ b/lib/peddler/client.rb @@ -66,6 +66,8 @@ module Peddler private def inherited(base) + base.parser = parser + base.path(path) base.params(params) base.on_error(&@error_handler) end diff --git a/test/unit/peddler/test_client.rb b/test/unit/peddler/test_client.rb index <HASH>..<HASH> 100644 --- a/test/unit/peddler/test_client.rb +++ b/test/unit/peddler/test_client.rb @@ -52,6 +52,14 @@ class TestPeddlerClient < MiniTest::Test assert_equal Peddler::Client.params, @klass.params end + def test_inherits_parents_path + assert_equal @klass.path, Class.new(@klass).path + end + + def test_inherits_parents_parser + assert_equal @klass.parser, Class.new(@klass).parser + end + def test_params_include_seller_id assert @klass.params.key?("SellerId") end
Client should inherit parent's path and parser
hakanensari_peddler
train
4d823ad69e3b15291c58cde519d9ef2793333c4b
diff --git a/lib/helper/MockRequest.js b/lib/helper/MockRequest.js index <HASH>..<HASH> 100644 --- a/lib/helper/MockRequest.js +++ b/lib/helper/MockRequest.js @@ -101,13 +101,15 @@ class MockRequest extends Helper { } await page.setRequestInterception(true); - this.polly = new PollyJS(title, { + const defaultConfig = { mode: 'passthrough', adapters: ['puppeteer'], adapterOptions: { puppeteer: { page }, }, - }); + }; + + this.polly = new PollyJS(title, { ...defaultConfig, ...this.options }); } /**
Allows overriding default Polly config in MockRequest (#<I>)
Codeception_CodeceptJS
train
35fd64d36c2f8b49f7e06bde597ed4b0c7a6fe3a
diff --git a/Slim/App.php b/Slim/App.php index <HASH>..<HASH> 100644 --- a/Slim/App.php +++ b/Slim/App.php @@ -286,12 +286,17 @@ class App */ public function run($silent = false) { - // Finalize routes here for middleware stack - $this->container->get('router')->finalize(); - $request = $this->container->get('request'); $response = $this->container->get('response'); + // Finalize routes here for middleware stack & ensure basePath is set + $router = $this->container->get('router'); + $router->finalize(); + if (is_callable([$request->getUri(), 'getBasePath'])) { + $router->setBasePath($request->getUri()->getBasePath()); + } + + // Dispatch the Router first if the setting for this is on if ($this->container->get('settings')['determineRouteBeforeAppMiddleware'] === true) { // Dispatch router (note: you won't be able to alter routes after this) diff --git a/Slim/Container.php b/Slim/Container.php index <HASH>..<HASH> 100644 --- a/Slim/Container.php +++ b/Slim/Container.php @@ -150,15 +150,7 @@ final class Container extends PimpleContainer implements ContainerInterface */ if (!isset($this['router'])) { $this['router'] = function ($c) { - $router = new Router(); - - $uri = $c['request']->getUri(); - - if (is_callable([$uri, 'getBasePath'])) { - $router->setBasePath($uri->getBasePath()); - } - - return $router; + return new Router(); }; }
Set router's basePath when App::run() is called Move setting of the router's basePath to App::run() so that adding routes doesn't freeze the environment and request container items.
slimphp_Slim
train
332545ed6910872160fa413ecd13f7d8bf44575b
diff --git a/lib/ecm/references/version.rb b/lib/ecm/references/version.rb index <HASH>..<HASH> 100644 --- a/lib/ecm/references/version.rb +++ b/lib/ecm/references/version.rb @@ -1,5 +1,5 @@ module Ecm module References - VERSION = '2.0.0' + VERSION = '2.0.1' end end
Bumped version to <I>
robotex82_ecm_references2
train
9b92aedce29a5899eeaeec4935a16e26a10bcc79
diff --git a/quasar/dev/components/components/tabs.vue b/quasar/dev/components/components/tabs.vue index <HASH>..<HASH> 100644 --- a/quasar/dev/components/components/tabs.vue +++ b/quasar/dev/components/components/tabs.vue @@ -194,6 +194,26 @@ <q-route-tab name="tabs/c" to="/components/tabs/c" exact label="/tabs/c" /> </q-tabs> + <div class="row q-gutter-xs justify-stretch"> + <div class="col-12 col-sm-6 col-md"> + <q-btn class="fit" size="sm" color="secondary" :to="{ name: 'ta', params: { id: 1 }}" replace label="t/1/a" /> + </div> + <div class="col-12 col-sm-6 col-md"> + <q-btn class="fit" size="sm" color="secondary" :to="{ name: 'tb', params: { id: 1 }}" replace label="t/1/b" /> + </div> + <div class="col-12 col-sm-6 col-md"> + <q-btn class="fit" size="sm" color="secondary" :to="{ name: 'ta', params: { id: 2 }}" replace label="t/2/a" /> + </div> + <div class="col-12 col-sm-6 col-md"> + <q-btn class="fit" size="sm" color="secondary" :to="{ name: 'tb', params: { id: 2 }}" replace label="t/2/b" /> + </div> + </div> + <q-tabs :dense="dense" class="test q-mt-sm"> + <q-route-tab to="/components/tabs/t" exact label="t" /> + <q-route-tab v-if="$route.params.id" :to="{ name: 'ta', params: $route.params }" exact :label="`t/${ $route.params.id }/a`" /> + <q-route-tab v-if="$route.params.id" :to="{ name: 'tb', params: $route.params }" exact :label="`t/${ $route.params.id }/b`" /> + </q-tabs> + <h4>Tabs content (animated, swipeable)</h4> <q-option-group type="radio" diff --git a/quasar/dev/router.js b/quasar/dev/router.js index <HASH>..<HASH> 100644 --- a/quasar/dev/router.js +++ b/quasar/dev/router.js @@ -44,7 +44,14 @@ let routes = [ { path: 'a/b' }, { path: 'b' }, { path: 'b/a' }, - { path: 'c' } + { path: 'c' }, + { + path: 't', + children: [ + { path: ':id/a', name: 'ta' }, + { path: ':id/b', name: 'tb' } + ] + } ] }, { diff --git a/quasar/src/components/tabs/QTabs.js b/quasar/src/components/tabs/QTabs.js index <HASH>..<HASH> 100644 --- a/quasar/src/components/tabs/QTabs.js +++ b/quasar/src/components/tabs/QTabs.js @@ -127,6 +127,12 @@ export default Vue.extend({ }, __activateRoute (params) { + if (this.bufferRoute !== this.$route && this.buffer.length > 0) { + clearTimeout(this.bufferTimer) + this.buffer.length = 0 + } + this.bufferRoute = this.$route + const { name, selectable, exact, selected, priority } = params, first = !this.buffer.length,
fix(quasar): QTabs - clean previously registered buffer when current route has changed (#<I>)
quasarframework_quasar
train
09d0d82d974458f5362de773d0e64c5d72080009
diff --git a/lib/paleta/palette.rb b/lib/paleta/palette.rb index <HASH>..<HASH> 100644 --- a/lib/paleta/palette.rb +++ b/lib/paleta/palette.rb @@ -112,19 +112,19 @@ module Paleta type = opts[:type] || :shades size = opts[:size] || 5 case type - when :analogous; self.generate_analogous_palette_from_color(color, size) - when :complementary; self.generate_complementary_palette_from_color(color, size) - when :triad; self.generate_triad_palette_from_color(color, size) - when :monochromatic; self.generate_monochromatic_palette_from_color(color, size) - when :shades; self.generate_shades_palette_from_color(color, size) - when :random; self.generate_random_palette_from_color(color, size) + when :analogous; self.generate_analogous_from_color(color, size) + when :complementary; self.generate_complementary_from_color(color, size) + when :triad; self.generate_triad_from_color(color, size) + when :monochromatic; self.generate_monochromatic_from_color(color, size) + when :shades; self.generate_shades_from_color(color, size) + when :random; self.generate_random_from_color(color, size) else raise(ArgumentError, "Palette type is not defined. Try :analogous, :monochromatic, :shades, or :random") end end private - def self.generate_analogous_palette_from_color(color, n) + def self.generate_analogous_from_color(color, n) raise(ArgumentError, "Passed argument is not a Color") unless color.is_a?(Color) palette = self.new(color) step = 20 @@ -143,7 +143,7 @@ module Paleta palette.sort! { |a, b| a.hue <=> b.hue } end - def self.generate_complementary_palette_from_color(color, n) + def self.generate_complementary_from_color(color, n) raise(ArgumentError, "Passed argument is not a Color") unless color.is_a?(Color) complement = color.complement palette = self.new(color, complement) @@ -152,7 +152,7 @@ module Paleta palette.sort! { |a, b| a.saturation <=> b.saturation } end - def self.generate_triad_palette_from_color(color, n) + def self.generate_triad_from_color(color, n) raise(ArgumentError, "Passed argument is not a Color") unless color.is_a?(Color) color2 = Paleta::Color.new(:hsl, (color.hue + 120) % 360, color.saturation, color.lightness) color3 = Paleta::Color.new(:hsl, (color2.hue + 120) % 360, color2.saturation, color2.lightness) @@ -162,7 +162,7 @@ module Paleta palette.sort! { |a, b| a.saturation <=> b.saturation } end - def self.generate_monochromatic_palette_from_color(color, n) + def self.generate_monochromatic_from_color(color, n) raise(ArgumentError, "Passed argument is not a Color") unless color.is_a?(Color) palette = self.new(color) step = (100 / n) @@ -180,7 +180,7 @@ module Paleta palette.sort! { |a, b| a.saturation <=> b.saturation } end - def self.generate_shades_palette_from_color(color, n) + def self.generate_shades_from_color(color, n) raise(ArgumentError, "Passed argument is not a Color") unless color.is_a?(Color) palette = self.new(color) step = (100 / n) @@ -198,7 +198,7 @@ module Paleta palette.sort! { |a, b| a.lightness <=> b.lightness } end - def self.generate_random_palette_from_color(color = nil, n) + def self.generate_random_from_color(color = nil, n) palette = color.is_a?(Color) ? self.new(color) : self.new r = Random.new(Time.now.sec) until palette.size == n
removed the word "palette" from private Palette generator methods
jordanstephens_paleta
train
c09cbdc28449c39cf411745e985d90b994762baa
diff --git a/zpay32/invoice_test.go b/zpay32/invoice_test.go index <HASH>..<HASH> 100644 --- a/zpay32/invoice_test.go +++ b/zpay32/invoice_test.go @@ -234,6 +234,26 @@ func TestDecodeEncode(t *testing.T) { skipEncoding: true, // Skip encoding since we don't have the unknown fields to encode. }, { + // Invoice with no amount. + encodedInvoice: "lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdq5xysxxatsyp3k7enxv4jshwlglv23cytkzvq8ld39drs8sq656yh2zn0aevrwu6uqctaklelhtpjnmgjdzmvwsh0kuxuwqf69fjeap9m5mev2qzpp27xfswhs5vgqmn9xzq", + valid: true, + decodedInvoice: func() *Invoice { + return &Invoice{ + Net: &chaincfg.MainNetParams, + Timestamp: time.Unix(1496314658, 0), + PaymentHash: &testPaymentHash, + Description: &testCupOfCoffee, + Destination: testPubKey, + } + }, + beforeEncoding: func(i *Invoice) { + // Since this destination pubkey was recovered + // from the signature, we must set it nil before + // encoding to get back the same invoice string. + i.Destination = nil + }, + }, + { // Please make a donation of any amount using rhash 0001020304050607080900010203040506070809000102030405060708090102 to me @03e7156ae33b0a208d0744199163177e909e80176e55d97a2f221ede0f934dd9ad encodedInvoice: "lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdpl2pkx2ctnv5sxxmmwwd5kgetjypeh2ursdae8g6twvus8g6rfwvs8qun0dfjkxaq8rkx3yf5tcsyz3d73gafnh3cax9rn449d9p5uxz9ezhhypd0elx87sjle52x86fux2ypatgddc6k63n7erqz25le42c4u4ecky03ylcqca784w", valid: true, @@ -532,6 +552,19 @@ func TestNewInvoice(t *testing.T) { valid: false, // Both Description and DescriptionHash set. }, { + // Invoice with no amount. + newInvoice: func() (*Invoice, error) { + return NewInvoice( + &chaincfg.MainNetParams, + testPaymentHash, + time.Unix(1496314658, 0), + Description(testCupOfCoffee), + ) + }, + valid: true, + encodedInvoice: "lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdq5xysxxatsyp3k7enxv4jshwlglv23cytkzvq8ld39drs8sq656yh2zn0aevrwu6uqctaklelhtpjnmgjdzmvwsh0kuxuwqf69fjeap9m5mev2qzpp27xfswhs5vgqmn9xzq", + }, + { // 'n' field set. newInvoice: func() (*Invoice, error) { return NewInvoice(&chaincfg.MainNetParams,
zpay<I>: add encode/decode tests for invoices with zero amount
lightningnetwork_lnd
train
aea184d8476b1a597e50ff3bb5e209130b5a62ce
diff --git a/analytics/src/main/java/com/segment/analytics/AnalyticsContext.java b/analytics/src/main/java/com/segment/analytics/AnalyticsContext.java index <HASH>..<HASH> 100644 --- a/analytics/src/main/java/com/segment/analytics/AnalyticsContext.java +++ b/analytics/src/main/java/com/segment/analytics/AnalyticsContext.java @@ -116,7 +116,7 @@ public class AnalyticsContext extends ValueMap { * instances is thread safe. */ static synchronized AnalyticsContext create(Context context, Traits traits, - boolean collectDeviceId) { + boolean collectDeviceId) { AnalyticsContext analyticsContext = new AnalyticsContext(new NullableConcurrentHashMap<String, Object>()); analyticsContext.putApp(context); @@ -410,7 +410,9 @@ public class AnalyticsContext extends ValueMap { /** Set the advertising information for this device. */ void putAdvertisingInfo(String advertisingId, boolean adTrackingEnabled) { - put(DEVICE_ADVERTISING_ID_KEY, advertisingId); + if (adTrackingEnabled && !isNullOrEmpty(advertisingId)) { + put(DEVICE_ADVERTISING_ID_KEY, advertisingId); + } put(DEVICE_AD_TRACKING_ENABLED_KEY, adTrackingEnabled); } diff --git a/analytics/src/main/java/com/segment/analytics/GetAdvertisingIdTask.java b/analytics/src/main/java/com/segment/analytics/GetAdvertisingIdTask.java index <HASH>..<HASH> 100644 --- a/analytics/src/main/java/com/segment/analytics/GetAdvertisingIdTask.java +++ b/analytics/src/main/java/com/segment/analytics/GetAdvertisingIdTask.java @@ -26,8 +26,14 @@ class GetAdvertisingIdTask extends AsyncTask<Context, Void, Pair<String, Boolean Boolean isLimitAdTrackingEnabled = (Boolean) advertisingInfo.getClass() .getMethod("isLimitAdTrackingEnabled") .invoke(advertisingInfo); - String id = (String) advertisingInfo.getClass().getMethod("getId").invoke(advertisingInfo); - return Pair.create(id, isLimitAdTrackingEnabled); + + if (isLimitAdTrackingEnabled) { + return Pair.create(null, false); + } + + String advertisingId = + (String) advertisingInfo.getClass().getMethod("getId").invoke(advertisingInfo); + return Pair.create(advertisingId, true); } catch (Exception ignored) { return null; } @@ -35,11 +41,13 @@ class GetAdvertisingIdTask extends AsyncTask<Context, Void, Pair<String, Boolean @Override protected void onPostExecute(Pair<String, Boolean> info) { super.onPostExecute(info); - if (info != null) { - AnalyticsContext.Device device = analyticsContext.device(); - if (device != null) { - device.putAdvertisingInfo(info.first, info.second); - } + if (info == null) { + return; + } + + AnalyticsContext.Device device = analyticsContext.device(); + if (device != null) { + device.putAdvertisingInfo(info.first, info.second); } } }
Fix logic for collecting isLimitAdTracking Previously we were simply copying over isLimitAdTracking to adTrackingEnabled. These fields are the inverse of each other, so this commit fixes the logic and makes it so that adTrackingEnabled is recorded as !isLimitAdTracking. Also completely stop recording advertisingId if adTrackingEnabled is false.
segmentio_analytics-android
train
78a70bc22d663f56d3d39b89a3b94ebd59e54726
diff --git a/lib/instana/backend/host_agent_activation_observer.rb b/lib/instana/backend/host_agent_activation_observer.rb index <HASH>..<HASH> 100644 --- a/lib/instana/backend/host_agent_activation_observer.rb +++ b/lib/instana/backend/host_agent_activation_observer.rb @@ -12,7 +12,7 @@ module Instana # @param [RequestClient] client used to make requests to the backend # @param [Concurrent::Atom] discovery object used to store discovery response in - def initialize(client, discovery, wait_time: 1, logger: ::Instana.logger, max_wait_tries: 60, proc_table: Sys::ProcTable, socket_proc: default_socket_proc) # rubocop:disable Metrics/ParameterLists + def initialize(client, discovery, wait_time: 30, logger: ::Instana.logger, max_wait_tries: 60, proc_table: Sys::ProcTable, socket_proc: default_socket_proc) # rubocop:disable Metrics/ParameterLists @client = client @discovery = discovery @wait_time = wait_time
Wait <I> seconds between announce attempts
instana_ruby-sensor
train
9e46d56700b8da056abafa6cfaeca7d36bd4d50f
diff --git a/composer.json b/composer.json index <HASH>..<HASH> 100644 --- a/composer.json +++ b/composer.json @@ -25,7 +25,7 @@ "phpunit/phpunit": "~7.0|~8.0", "mockery/mockery": "~1.0", "orchestra/testbench": "^3.8", - "friendsofphp/php-cs-fixer": "^2.14", + "friendsofphp/php-cs-fixer": "^2.15", "laravel/framework": "5.8.*", "spatie/phpunit-snapshot-assertions": "^2.1.0", "phpstan/phpstan": "^0.9.2" diff --git a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__1.php b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__1.php index <HASH>..<HASH> 100644 --- a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__1.php +++ b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__1.php @@ -1,3 +1,5 @@ -<?php return '{ +<?php + +return '{ "Recipe": false }'; diff --git a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__2.php b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__2.php index <HASH>..<HASH> 100644 --- a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__2.php +++ b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_disabling__2.php @@ -1,3 +1,5 @@ -<?php return '{ +<?php + +return '{ "Recipe": false }'; diff --git a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__1.php b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__1.php index <HASH>..<HASH> 100644 --- a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__1.php +++ b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__1.php @@ -1,3 +1,5 @@ -<?php return '{ +<?php + +return '{ "Recipe": true }'; diff --git a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__2.php b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__2.php index <HASH>..<HASH> 100644 --- a/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__2.php +++ b/tests/Activators/__snapshots__/FileActivatorTest__it_creates_valid_json_file_after_enabling__2.php @@ -1,3 +1,5 @@ -<?php return '{ +<?php + +return '{ "Recipe": true }';
Updating php-cs-fixer and running it
nWidart_laravel-modules
train
455f81711f9e7fb058e1a545609f03ed99feb02c
diff --git a/src/transformers/training_args.py b/src/transformers/training_args.py index <HASH>..<HASH> 100644 --- a/src/transformers/training_args.py +++ b/src/transformers/training_args.py @@ -281,7 +281,7 @@ class TrainingArguments: Whether or not to use the :class:`~transformers.Adafactor` optimizer instead of :class:`~transformers.AdamW`. group_by_length (:obj:`bool`, `optional`, defaults to :obj:`False`): - Whether or not to group together samples of roughly the same legnth in the training dataset (to minimize + Whether or not to group together samples of roughly the same length in the training dataset (to minimize padding applied and be more efficient). Only useful if applying dynamic padding. length_column_name (:obj:`str`, `optional`, defaults to :obj:`"length"`): Column name for precomputed lengths. If the column exists, grouping by length will use these values rather
Update training_args.py (#<I>) In the group by length documentation length is misspelled as legnth
huggingface_pytorch-pretrained-BERT
train
fbfe71450d77193768b2aad4dd4d820efa10277a
diff --git a/reporter_test.go b/reporter_test.go index <HASH>..<HASH> 100644 --- a/reporter_test.go +++ b/reporter_test.go @@ -3,36 +3,34 @@ package datadog import "testing" func TestGetPercentileNamesOutOfRange(t *testing.T) { - _, err := getPercentileNames([]float64{0.23, 0}) - if err == nil { + reporter := &Reporter{} + if UsePercentiles([]float64{0.23, 0})(reporter) == nil { t.Fatal("Expected error") } - _, err = getPercentileNames([]float64{0.23, 1}) - if err == nil { + if UsePercentiles([]float64{0.23, 1})(reporter) == nil { t.Fatal("Expected error") } - _, err = getPercentileNames([]float64{0.23, -0.1}) - if err == nil { + if UsePercentiles([]float64{0.23, -0.1})(reporter) == nil { t.Fatal("Expected error") } - _, err = getPercentileNames([]float64{0.23, 2}) - if err == nil { + if UsePercentiles([]float64{0.23, 2})(reporter) == nil { t.Fatal("Expected error") } } func TestGetPercentileNames(t *testing.T) { + reporter := &Reporter{} percentiles := []float64{0.23, 0.4, 0.99999, 0.45346356} - names, err := getPercentileNames(percentiles) + err := UsePercentiles(percentiles)(reporter) if err != nil { t.Fatal(err) } expectedNames := []string{".p23", ".p4", ".p99999", ".p45346356"} - if len(expectedNames) != len(names) { - t.Fatalf("Expected names: %v, got: %v", expectedNames, names) + if len(expectedNames) != len(reporter.p) { + t.Fatalf("Expected names: %v, got: %v", expectedNames, reporter.p) } for i, expectedName := range expectedNames { - if names[i] != expectedName { - t.Fatalf("Expected names: %v, got: %v", expectedNames, names) + if reporter.p[i] != expectedName { + t.Fatalf("Expected names: %v, got: %v", expectedNames, reporter.p) } } }
update unit test to cover fn option
syntaqx_go-metrics-datadog
train
5236ae9dc98513d77b4d7553af7c93730edab4de
diff --git a/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/ResultTempWriter.java b/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/ResultTempWriter.java index <HASH>..<HASH> 100644 --- a/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/ResultTempWriter.java +++ b/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/ResultTempWriter.java @@ -16,7 +16,7 @@ public class ResultTempWriter { private final File tempFile; private final BufferedWriter tempFileWriter; - public ResultTempWriter(boolean warmup) throws IOException { + public ResultTempWriter(final boolean warmup) throws IOException { tempFile = Files.createTempFile(warmup ? "kopeme-warmup-" : "kopeme-", ".tmp").toFile(); tempFileWriter = new BufferedWriter(new FileWriter(tempFile)); } @@ -65,6 +65,7 @@ public class ResultTempWriter { try { tempFileWriter.flush(); tempFileWriter.close(); + System.out.println("Flusing to " + tempFile.getAbsolutePath() + " finished"); } catch (IOException e) { e.printStackTrace(); } diff --git a/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/WrittenResultReader.java b/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/WrittenResultReader.java index <HASH>..<HASH> 100644 --- a/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/WrittenResultReader.java +++ b/kopeme-core/src/main/java/de/dagere/kopeme/datacollection/tempfile/WrittenResultReader.java @@ -34,24 +34,24 @@ public class WrittenResultReader { protected Map<String, SummaryStatistics> collectorSummaries = null; private Map<Integer, String> collectorsIndexed; - public WrittenResultReader(File file) { + public WrittenResultReader(final File file) { this.file = file; } - public void read(Throwable exception, Set<String> keys) { + public void read(final Throwable exception, final Set<String> keys) { initSummaries(keys); readValues(); checkValues(exception); } - private void checkValues(Throwable exception) { + private void checkValues(final Throwable exception) { LOG.debug("Count of executions: {} Values: {}", executionStartTimes.size(), realValues.size()); if (executionStartTimes.size() != realValues.size()) { throw new RuntimeException("Count of executions is wrong, expected: " + executionStartTimes.size() + " but got " + realValues.size(), exception); } } - public void readStreaming(Throwable thrownException, Set<String> keys) { + public void readStreaming(final Throwable thrownException, final Set<String> keys) { finalValues = new HashMap<>(); collectorsIndexed = new HashMap<>(); initSummaries(keys); @@ -84,7 +84,7 @@ public class WrittenResultReader { } } - private void initSummaries(Set<String> keys) { + private void initSummaries(final Set<String> keys) { collectorSummaries = new HashMap<>(); for (String key : keys) { collectorSummaries.put(key, new SummaryStatistics()); @@ -130,7 +130,7 @@ public class WrittenResultReader { } } - public Fulldata createFulldata(int warmup, String currentDatacollector) { + public Fulldata createFulldata(final int warmup, final String currentDatacollector) { Fulldata result = new Fulldata(); for (int i = warmup; i < realValues.size(); i++) { final Long executionStartTime = executionStartTimes.get(i); @@ -167,7 +167,7 @@ public class WrittenResultReader { return finalValues; } - public void clear(String key) { + public void clear(final String key) { if (realValues != null) { for (int i = 0; i < realValues.size(); i++) { realValues.get(i).remove(key); @@ -177,7 +177,7 @@ public class WrittenResultReader { public void deleteTempFile() { if (!file.delete()) { - System.out.println("Warning: File " + file.getAbsolutePath() + " could not be deleted!"); + System.out.println("Warning: File " + file.getAbsolutePath() + " could not be deleted, existing: " + file.exists() + "!"); } } } diff --git a/kopeme-junit/src/test/java/de/dagere/kopeme/junit/TestBeforeExecution.java b/kopeme-junit/src/test/java/de/dagere/kopeme/junit/TestBeforeExecution.java index <HASH>..<HASH> 100644 --- a/kopeme-junit/src/test/java/de/dagere/kopeme/junit/TestBeforeExecution.java +++ b/kopeme-junit/src/test/java/de/dagere/kopeme/junit/TestBeforeExecution.java @@ -51,8 +51,6 @@ public class TestBeforeExecution { }); } - // , - // {ExampleBeforeTestRunner.class, "testMethod"} @Parameter(0) public Class<?> junitTestClass;
Debug output to identify windows problem with undeleted temporary warmup files
DaGeRe_KoPeMe
train
3cbe80fbfb7759e68c11b97ad108b6afb57ec486
diff --git a/registration/forms.py b/registration/forms.py index <HASH>..<HASH> 100644 --- a/registration/forms.py +++ b/registration/forms.py @@ -14,6 +14,17 @@ from django import forms from django.utils.translation import ugettext_lazy as _ +BAD_USERNAME = _("This value may contain only letters, " + "numbers and @/./+/-/_ characters.") +DUPLICATE_EMAIL = _("This email address is already in use. " + "Please supply a different email address.") +FREE_EMAIL = _("Registration using free email addresses is prohibited. " + "Please supply a different email address.") +DUPLICATE_USER = _("A user with that username already exists.") +PASSWORD_MISMATCH = _("The two password fields didn't match.") +TOS_REQUIRED = _("You must agree to the terms to register") + + class RegistrationForm(forms.Form): """ Form for registering a new user account. @@ -34,8 +45,8 @@ class RegistrationForm(forms.Form): max_length=30, label=_("Username"), error_messages={ - 'invalid': _("This value may contain only letters, " - "numbers and @/./+/-/_ characters.")}) + 'invalid': BAD_USERNAME, + }) email = forms.EmailField(label=_("E-mail")) password1 = forms.CharField(widget=forms.PasswordInput, label=_("Password")) @@ -52,9 +63,7 @@ class RegistrationForm(forms.Form): username__iexact=self.cleaned_data['username'] ) if existing.exists(): - raise forms.ValidationError( - _("A user with that username already exists.") - ) + raise forms.ValidationError(DUPLICATE_USER) else: return self.cleaned_data['username'] @@ -71,7 +80,7 @@ class RegistrationForm(forms.Form): if self.cleaned_data['password1'] != \ self.cleaned_data['password2']: raise forms.ValidationError( - _("The two password fields didn't match.") + PASSWORD_MISMATCH ) return self.cleaned_data @@ -86,7 +95,7 @@ class RegistrationFormTermsOfService(RegistrationForm): widget=forms.CheckboxInput, label=_(u'I have read and agree to the Terms of Service'), error_messages={ - 'required': _("You must agree to the terms to register") + 'required': TOS_REQUIRED, } ) @@ -104,10 +113,7 @@ class RegistrationFormUniqueEmail(RegistrationForm): """ if User.objects.filter(email__iexact=self.cleaned_data['email']): - raise forms.ValidationError( - _("This email address is already in use. " - "Please supply a different email address.") - ) + raise forms.ValidationError(DUPLICATE_EMAIL) return self.cleaned_data['email'] @@ -134,8 +140,5 @@ class RegistrationFormNoFreeEmail(RegistrationForm): """ email_domain = self.cleaned_data['email'].split('@')[1] if email_domain in self.bad_domains: - raise forms.ValidationError( - _("Registration using free email addresses is prohibited. " - "Please supply a different email address.") - ) + raise forms.ValidationError(FREE_EMAIL) return self.cleaned_data['email']
Split form error messages into top-level constants.
ubernostrum_django-registration
train
e2e025898a6f18675c0b021dfe33ee88cb4b0aa3
diff --git a/androguard/core/bytecodes/dvm.py b/androguard/core/bytecodes/dvm.py index <HASH>..<HASH> 100644 --- a/androguard/core/bytecodes/dvm.py +++ b/androguard/core/bytecodes/dvm.py @@ -1860,14 +1860,10 @@ class StringDataItem: def get(self): s = mutf8.decode(self.data) assert len(s) == self.utf16_size, "UTF16 Length does not match!" - # We need to escape surrogates and other stuff that might not be - # printable... - nstr = s.encode("UTF-16", "surrogatepass") - try: - return nstr.decode("UTF-16") - except UnicodeDecodeError: - log.warning("Error decoding UTF16 string with IDX {} and hexdata '{}'. Fallback to surrogateescape string.".format(self.offset, binascii.hexlify(self.data))) - return nstr.decode("UTF-16", "surrogateescape") + log.debug("Decoding UTF16 string with IDX {}, utf16 length {} and hexdata '{}'.".format(self.offset, self.utf16_size, binascii.hexlify(self.data))) + # unicode_escape produces a string which is printable + # Then we decode that one as UTF-16 + return s.encode("unicode_escape").decode("utf-16") def show(self): bytecode._PrintSubBanner("String Data Item")
change the decoding of string yet again
androguard_androguard
train
060ee61d46dee343873326a7c5dd493dfdc05fb5
diff --git a/abydos/distance/_token_distance.py b/abydos/distance/_token_distance.py index <HASH>..<HASH> 100644 --- a/abydos/distance/_token_distance.py +++ b/abydos/distance/_token_distance.py @@ -623,6 +623,14 @@ class _TokenDistance(_Distance): sum(abs(val) for val in self.intersection().values()), 1 ) + def _intersection(self): + """Return the intersection. + + This function may be overridden by setting the intersection_type during + initialization. + """ + return self._crisp_intersection() + def _get_confusion_table(self): """Return the token counts as a ConfusionTable object.""" return ConfusionTable(
added default value to make docs/code completion work as expected
chrislit_abydos
train
40d6d2b42d77b54dc7e3eb074c477022d3fd4c56
diff --git a/tests/test_mlblocks_primitives.py b/tests/test_mlblocks_primitives.py index <HASH>..<HASH> 100644 --- a/tests/test_mlblocks_primitives.py +++ b/tests/test_mlblocks_primitives.py @@ -35,7 +35,7 @@ DATASETS = { } -@patch('mlblocks.PRIMITIVES_PATHS', new=[PRIMITIVES_PATH]) +@patch('mlblocks.primitives._PRIMITIVES_PATHS', new=[PRIMITIVES_PATH]) def test_jsons(): """Validate MLBlocks primitive jsons"""
Fix tests execution after mlblocks <I> changes
HDI-Project_MLPrimitives
train
223587e092d980fea1b1308f2e8f699ecc2f4e4b
diff --git a/dwave/system/samplers/dwave_sampler.py b/dwave/system/samplers/dwave_sampler.py index <HASH>..<HASH> 100644 --- a/dwave/system/samplers/dwave_sampler.py +++ b/dwave/system/samplers/dwave_sampler.py @@ -21,8 +21,9 @@ for explanations of technical terms in descriptions of Ocean tools. """ from __future__ import division -import functools import time +import functools +import collections.abc as abc from warnings import warn @@ -189,13 +190,20 @@ class DWaveSampler(dimod.Sampler, dimod.Structured): warn("'order_by' has been moved under 'solver' dict.", DeprecationWarning) - # we want a QPU solver by default, but allow override + # strongly prefer QPU solvers; requires kwarg-level override config.setdefault('client', 'qpu') + # weakly prefer QPU solver with the highest qubit count, + # easily overridden on any config level above defaults (file/env/kwarg) + defaults = config.setdefault('defaults', {}) + if not isinstance(defaults, abc.Mapping): + raise TypeError("mapping expected for 'defaults'") + defaults.update(solver=dict(order_by='-num_active_qubits')) + self.client = Client.from_config(**config) + # NOTE: split behavior until we remove `order_by` kwarg if order_by is None: - # use the default from the cloud-client (or from solver) self.solver = self.client.get_solver() else: self.solver = self.client.get_solver(order_by=order_by) @@ -204,7 +212,7 @@ class DWaveSampler(dimod.Sampler, dimod.Structured): self.retry_interval = retry_interval warnings_default = WarningAction.IGNORE - """Defines the default behabior for :meth:`.sample_ising`'s and + """Defines the default behavior for :meth:`.sample_ising`'s and :meth:`sample_qubo`'s `warnings` kwarg. """ diff --git a/requirements.txt b/requirements.txt index <HASH>..<HASH> 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ --extra-index-url https://pypi.dwavesys.com/simple dimod==0.9.6 -dwave-cloud-client==0.7.7 +dwave-cloud-client==0.8.0 dwave-networkx==0.8.4 dwave-drivers==0.4.4 dwave-tabu==0.2.2 diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ exec(open(os.path.join(".", "dwave", "system", "package_info.py")).read()) install_requires = ['dimod>=0.9.6,<0.10.0', - 'dwave-cloud-client>=0.7.7,<0.8.0', + 'dwave-cloud-client>=0.8.0,<0.9.0', 'dwave-networkx>=0.8.4', 'networkx>=2.0,<3.0', 'homebase>=1.0.0,<2.0.0', diff --git a/tests/test_dwavesampler.py b/tests/test_dwavesampler.py index <HASH>..<HASH> 100644 --- a/tests/test_dwavesampler.py +++ b/tests/test_dwavesampler.py @@ -100,11 +100,37 @@ class TestDwaveSampler(unittest.TestCase): self.sampler.solver = MockSolver() @mock.patch('dwave.system.samplers.dwave_sampler.Client') - def test_solver_init(self, MockClient): + def test_init_default(self, MockClient): + """QPU with the highest number of qubits chosen by default.""" + + sampler = DWaveSampler() + + MockClient.from_config.assert_called_once_with( + client='qpu', + defaults={'solver': {'order_by': '-num_active_qubits'}}) + + @mock.patch('dwave.system.samplers.dwave_sampler.Client') + def test_init_generic_behavior(self, MockClient): + """Generic solver behavior (default prior to 0.10.0) can be forced.""" + + sampler = DWaveSampler(client='base') + + MockClient.from_config.assert_called_once_with( + client='base', + defaults={'solver': {'order_by': '-num_active_qubits'}}) + + @mock.patch('dwave.system.samplers.dwave_sampler.Client') + def test_init_solver(self, MockClient): + """QPU can be explicitly selected (old default usage example)""" solver = {'qpu': True, 'num_qubits__gt': 1000} + sampler = DWaveSampler(solver=solver) - MockClient.from_config.assert_called_once_with(client='qpu', solver=solver) + + MockClient.from_config.assert_called_once_with( + client='qpu', + solver=solver, + defaults={'solver': {'order_by': '-num_active_qubits'}}) def test_sample_ising_variables(self):
Make DWaveSampler prefer QPUs with highest number of qubits
dwavesystems_dwave-system
train
831d0e2a57e5ac9ae6d803005fa239e41cd38b01
diff --git a/templates/layout/default.php b/templates/layout/default.php index <HASH>..<HASH> 100644 --- a/templates/layout/default.php +++ b/templates/layout/default.php @@ -45,7 +45,7 @@ if (!$this->fetch('tb_footer')) { */ $this->prepend( 'tb_body_attrs', - ' class="' . implode(' ', [$this->request->getParam('controller'), $this->request->getParam('action')]) . '" ' + ' class="' . implode(' ', [h($this->request->getParam('controller')), h($this->request->getParam('action'))]) . '" ' ); if (!$this->fetch('tb_body_start')) { $this->start('tb_body_start'); diff --git a/templates/layout/examples/cover.php b/templates/layout/examples/cover.php index <HASH>..<HASH> 100644 --- a/templates/layout/examples/cover.php +++ b/templates/layout/examples/cover.php @@ -13,7 +13,7 @@ $this->Html->css('BootstrapUI.cover', ['block' => true]); $this->prepend( 'tb_body_attrs', 'class="d-flex h-100 text-center text-white bg-dark ' . - implode(' ', [$this->request->getParam('controller'), $this->request->getParam('action')]) . + implode(' ', [h($this->request->getParam('controller')), h($this->request->getParam('action'))]) . '" ' ); diff --git a/templates/layout/examples/dashboard.php b/templates/layout/examples/dashboard.php index <HASH>..<HASH> 100644 --- a/templates/layout/examples/dashboard.php +++ b/templates/layout/examples/dashboard.php @@ -8,7 +8,7 @@ $this->Html->css('BootstrapUI.dashboard', ['block' => true]); $this->prepend( 'tb_body_attrs', ' class="' . - implode(' ', [$this->request->getParam('controller'), $this->request->getParam('action')]) . + implode(' ', [h($this->request->getParam('controller')), h($this->request->getParam('action'))]) . '" ' ); $this->start('tb_body_start'); @@ -46,7 +46,7 @@ $this->start('tb_body_start'); <main role="main" class="col-md-9 ms-sm-auto col-lg-10 px-md-4"> <div class="d-flex justify-content-between flex-wrap flex-md-nowrap align-items-center pt-3 pb-2 mb-3 border-bottom"> - <h1 class="h2 page-header"><?= $this->request->getParam('controller'); ?></h1> + <h1 class="h2 page-header"><?= h($this->request->getParam('controller')) ?></h1> </div> <?php /** diff --git a/templates/layout/examples/signin.php b/templates/layout/examples/signin.php index <HASH>..<HASH> 100644 --- a/templates/layout/examples/signin.php +++ b/templates/layout/examples/signin.php @@ -6,7 +6,7 @@ $this->Html->css('BootstrapUI.signin', ['block' => true]); $this->prepend( 'tb_body_attrs', ' class="text-center ' . - implode(' ', [$this->request->getParam('controller'), $this->request->getParam('action')]) . + implode(' ', [h($this->request->getParam('controller')), h($this->request->getParam('action'))]) . '" ' ); $this->start('tb_body_start');
Escape request params. Mitigate potential XSS issue. refs 6fe6ddba5c<I>cffcd2e0bbb6ae<I>cf<I>cc
FriendsOfCake_bootstrap-ui
train
dde0bf779592e043c78f26682d084c67d5441855
diff --git a/spyderlib/config.py b/spyderlib/config.py index <HASH>..<HASH> 100644 --- a/spyderlib/config.py +++ b/spyderlib/config.py @@ -217,6 +217,7 @@ DEFAULTS = [ 'gui_completion': True, 'pager': True, 'ask_before_closing': True, + 'buffer_size': 10000, 'pylab': True, 'pylab/backend': 0 }), diff --git a/spyderlib/plugins/ipythonconsole.py b/spyderlib/plugins/ipythonconsole.py index <HASH>..<HASH> 100644 --- a/spyderlib/plugins/ipythonconsole.py +++ b/spyderlib/plugins/ipythonconsole.py @@ -49,7 +49,7 @@ class IPythonConsoleConfigPage(PluginConfigPage): font_group = self.create_fontgroup(option=None, text=None, fontfilters=QFontComboBox.MonospacedFonts) - # --- Interface Group --- + # Interface Group interface_group = QGroupBox(_("Interface")) banner_box = newcb(_("Display initial banner"), 'banner', tip=_("This option lets you hide the message shown at\n" @@ -72,6 +72,18 @@ class IPythonConsoleConfigPage(PluginConfigPage): interface_layout.addWidget(pager_box) interface_layout.addWidget(ask_box) interface_group.setLayout(interface_layout) + + # Source Code Group + source_code_group = QGroupBox(_("Source code")) + buffer_spin = self.create_spinbox( + _("Buffer: "), _(" lines"), + 'buffer_size', min_=-1, max_=1000000, step=100, + tip=_("Set the maximum number of lines of text shown in the\n" + "console before truncation. Specifying -1 disables it\n" + "(not recommended!)")) + source_code_layout = QVBoxLayout() + source_code_layout.addWidget(buffer_spin) + source_code_group.setLayout(source_code_layout) # --- Graphics --- # Pylab Group @@ -126,7 +138,8 @@ class IPythonConsoleConfigPage(PluginConfigPage): backend_group.setEnabled) tabs = QTabWidget() - tabs.addTab(self.create_tab(font_group, interface_group), _("Display")) + tabs.addTab(self.create_tab(font_group, interface_group, + source_code_group), _("Display")) tabs.addTab(self.create_tab(pylab_group, backend_group), _("Graphics")) vlayout = QVBoxLayout() diff --git a/spyderlib/widgets/ipython.py b/spyderlib/widgets/ipython.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/ipython.py +++ b/spyderlib/widgets/ipython.py @@ -26,7 +26,7 @@ from spyderlib.widgets.sourcecode import mixins def config_widget(c): """Set options for SpyderIPythonWidget obtained through our config system""" - + # Gui completion widget gui_comp_o = CONF.get('ipython_console', 'gui_completion', True) c.IPythonWidget.gui_completion = gui_comp_o @@ -38,6 +38,10 @@ def config_widget(c): else: c.IPythonWidget.paging = 'none' + # Buffer size + buffer_size_o = CONF.get('ipython_console', 'buffer_size', 10000) + c.IPythonWidget.buffer_size = buffer_size_o + class IPythonShellWidget(QTextEdit, mixins.BaseEditMixin, mixins.TracebackLinksMixin): """QTextEdit widgets with features from Spyder's mixins.BaseEditMixin"""
IPython console: Add an option to set the buffer size
spyder-ide_spyder
train
fbaeb9dea46652adb2b17a328ecba6345197dd7f
diff --git a/test/Reference.test.js b/test/Reference.test.js index <HASH>..<HASH> 100644 --- a/test/Reference.test.js +++ b/test/Reference.test.js @@ -1,10 +1,16 @@ import { test } from 'substance-test' import setupTestApp from './shared/setupTestApp' import { JATS_BIBR_TYPES_TO_INTERNAL, INTERNAL_BIBR_TYPES } from '../index' -import { openMetadataEditor, setSelection, insertText, openMenuAndFindTool } from './shared/integrationTestHelpers' +import { + openMetadataEditor, openManuscriptEditor, setSelection, + insertText, openContextMenuAndFindTool, openMenuAndFindTool +} from './shared/integrationTestHelpers' import { doesNotThrowInNodejs } from './shared/testHelpers' import CSLJSON from './fixture/csl-json/csl-json-example' +const emptyLabel = '???' +const removeReferenceToolSelector = '.sm-remove-reference' + // addding reference is done in a workflow, where the user can choose to import, or select a specific type // TODO: we should also test the other ways to create reference (actually we should cover all cases) // For now, I have added only the following tests for adding manually @@ -86,6 +92,24 @@ test(`Reference: adding and editing authors`, t => { t.end() }) +test(`Reference: removing`, t => { + let { app } = setupTestApp(t, { archiveId: 'kitchen-sink' }) + let metadataEditor = openMetadataEditor(app) + let card = metadataEditor.find('.sc-card.sm-article-ref') + card.el.click() + + t.comment('removing reference') + t.ok(_canRemoveReference(metadataEditor), 'remove tool should not be disabled') + t.ok(_removeReference(metadataEditor), 'remove should not throw') + + t.comment('check what happened with xrefs') + let manuscriptEditor = openManuscriptEditor(app) + let xref = manuscriptEditor.find('.sc-xref') + t.equal(xref.text(), emptyLabel, 'xref label should not contain reference') + + t.end() +}) + test(`Reference: upload CSL-JSON set`, t => { let { app } = setupTestApp(t, { archiveId: 'blank' }) let metadataEditor = openMetadataEditor(app) @@ -137,3 +161,13 @@ function _insertReference (editor, bibrType) { // ... this opens a modal where we click on the button for creating the particular bibr type editor.find(`.sc-modal-dialog .sc-add-reference .se-type.sm-${bibrType}`).click() } + +function _canRemoveReference (editor) { + let tool = openMenuAndFindTool(editor, 'context-tools', removeReferenceToolSelector) + return tool && !tool.attr('disabled') +} + +function _removeReference (editor) { + let tool = openContextMenuAndFindTool(editor, removeReferenceToolSelector) + return tool.el.click() +}
Reveal reference deletion problems with test.
substance_texture
train
1d8279dd6396180612b862302b30e8e623de3304
diff --git a/ui/src/kapacitor/containers/KapacitorPage.js b/ui/src/kapacitor/containers/KapacitorPage.js index <HASH>..<HASH> 100644 --- a/ui/src/kapacitor/containers/KapacitorPage.js +++ b/ui/src/kapacitor/containers/KapacitorPage.js @@ -91,7 +91,10 @@ class KapacitorPage extends Component { this.setState({kapacitor: data}) this.checkKapacitorConnection(data) router.push(`/sources/${source.id}/kapacitors/${data.id}/edit`) - addFlashMessage({type: 'success', text: 'Kapacitor Created!'}) + addFlashMessage({ + type: 'success', + text: 'Kapacitor Created! Configuring endpoints is optional', + }) }) .catch(() => { addFlashMessage({
Improve clarity of kapacitor node success message Goal is to let users know that they are ready to go unless they want to do more configuration
influxdata_influxdb
train
de8a897bd227165933fc5bcf760070a05ba758d0
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -48,8 +48,12 @@ git_hash = "%s" raise RuntimeError("%s already exists, will not overwrite" % filename) with open(filename, "w") as out: out.write(content) - yield - os.remove(filename) + try: + yield + except: + raise + finally: + os.remove(filename) def get_requirements(filename):
Correctly removing git version in case of exceptions. (#<I>)
awslabs_sockeye
train
72803cca3e1c4e3293422fabca3bddebb9b41f1f
diff --git a/lib/FindQueryBuilder.js b/lib/FindQueryBuilder.js index <HASH>..<HASH> 100644 --- a/lib/FindQueryBuilder.js +++ b/lib/FindQueryBuilder.js @@ -401,8 +401,9 @@ FindQueryBuilder.prototype._buildJoins = function (params, builder) { _.each(params, function (param) { _.each(param.propertyRefs, function (ref) { - if (ref.relation && utils.isOneToOneRelation(ref.relation)) { - relationsToJoin.push(ref.relation); + var rel = ref.relation; + if (rel && rel.isOneToOne()) { + relationsToJoin.push(rel); } }); }); @@ -467,11 +468,12 @@ FindQueryBuilder.prototype._buildOrderBy = function (params, builder) { dir = 'desc'; } - if (propertyRef.relation) { - if (!utils.isOneToOneRelation(propertyRef.relation)) { + var rel = propertyRef.relation; + if (rel) { + if (!rel.isOneToOne()) { utils.throwError("Can only order by model's own properties and by BelongsToOneRelation relations' properties"); } - var columnNameAlias = propertyRef.relation.name + _.capitalize(propertyRef.propertyName); + var columnNameAlias = rel.name + _.capitalize(propertyRef.propertyName); builder.select(propertyRef.fullColumnName() + ' as ' + columnNameAlias); builder.orderBy(columnNameAlias, dir); } else { diff --git a/lib/PropertyRef.js b/lib/PropertyRef.js index <HASH>..<HASH> 100644 --- a/lib/PropertyRef.js +++ b/lib/PropertyRef.js @@ -108,7 +108,7 @@ PropertyRef.prototype._parse = function (str, builder) { * @returns {string} */ PropertyRef.prototype.fullColumnName = function () { - if (this.relation && utils.isOneToOneRelation(this.relation)) { + if (this.relation && this.relation.isOneToOne()) { var builder = this.modelClass.query(); // one-to-one relations are joined and the joined table is given an alias. // We must refer to the column through that alias. @@ -133,13 +133,10 @@ PropertyRef.prototype.buildFilter = function (param, builder, boolOp) { whereMethod = boolOp + _.upperFirst(whereMethod); } - if (this.relation && !utils.isOneToOneRelation(this.relation)) { - var rel = this.relation; - var subQuery = rel.relatedModelClass.QueryBuilder.forClass(rel.relatedModelClass); - var ownerRefs = rel.ownerCol.map((ownCol => `${rel.ownerModelClass.tableName}.${ownCol}`)); - rel.findQuery(subQuery, { - ownerIds: ownerRefs, - isColumnRef: true + var rel = this.relation; + if (rel && !rel.isOneToOne()) { + const subQuery = rel.findQuery(rel.relatedModelClass.query(), { + ownerIds: rel.ownerProp.refs(builder) }); subQuery[whereMethod].apply(subQuery, filter.args); diff --git a/lib/utils.js b/lib/utils.js index <HASH>..<HASH> 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,8 +1,4 @@ module.exports = { - isOneToOneRelation: function (relation) { - return relation instanceof relation.ownerModelClass.BelongsToOneRelation; - }, - throwError: function (message) { var error = new Error(message); error.statusCode = 400; diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -46,11 +46,11 @@ "knex": "^0.13", "mocha": "^2", "mysql": "^2", - "objection": "^0.8.0", + "objection": "^0.9.4", "pg": "^6", "sqlite3": "^3" }, "peerDependencies": { - "objection": "^0.8.0" + "objection": "^0.9.0" } }
Fix compatibility with Objection <I>
Vincit_objection-find
train
f70d1c9c5dece5622e2c7bf625bfcf036735c290
diff --git a/lib/collection/property-list.js b/lib/collection/property-list.js index <HASH>..<HASH> 100644 --- a/lib/collection/property-list.js +++ b/lib/collection/property-list.js @@ -478,16 +478,16 @@ _.assign(PropertyList.prototype, /** @lends PropertyList.prototype */ { * property. * @param {?Boolean} [multiValue=false] - When set to true, only the first value of a multi valued property is * returned. - * @param {Boolean} [sanitize=false] - When set to true, properties with falsy keys are removed. + * @param {Boolean} [sanitizeKeys=false] - When set to true, properties with falsy keys are removed. * @todo Change the function signature to an object of options instead of the current structure. * @return {Object} */ - toObject: function (excludeDisabled, caseSensitive, multiValue, sanitize) { + toObject: function (excludeDisabled, caseSensitive, multiValue, sanitizeKeys) { var obj = {}, // create transformation data accumulator // gather all the switches of the list key = this._postman_listIndexKey, - sanitise = this._postman_sanitize || sanitize, + sanitiseKeys = this._postman_sanitizeKeys || sanitizeKeys, sensitive = !this._postman_listIndexCaseInsensitive || caseSensitive, multivalue = this._postman_listAllowsMultipleValues || multiValue; @@ -499,7 +499,7 @@ _.assign(PropertyList.prototype, /** @lends PropertyList.prototype */ { // 3. The member is disabled and disabled properties have to be ignored. // 4. The member has a falsy key, and sanitize is true. if (!member || !member.hasOwnProperty(key) || (excludeDisabled && member.disabled) || - (sanitise && !member[key])) { + (sanitiseKeys && !member[key])) { return; } diff --git a/test/unit/property-list.test.js b/test/unit/property-list.test.js index <HASH>..<HASH> 100644 --- a/test/unit/property-list.test.js +++ b/test/unit/property-list.test.js @@ -511,7 +511,7 @@ describe('PropertyList', function () { this.disabled = options.disabled; }; - FakeType._postman_sanitize = false; + FakeType._postman_sanitizeKeys = false; FakeType._postman_propertyIndexKey = 'keyAttr'; FakeType._postman_propertyIndexCaseInsensitive = true; FakeType._postman_propertyAllowsMultipleValues = false;
Renamed `sanitize` to `sanitzeKeys`
postmanlabs_postman-collection
train
2261acf72ca6cf206ffc9c264b2e9f436fe8a1cf
diff --git a/lib/base-server.js b/lib/base-server.js index <HASH>..<HASH> 100644 --- a/lib/base-server.js +++ b/lib/base-server.js @@ -150,6 +150,8 @@ exports.Server = INHERIT({ relPath = QS.unescape(reqPath).replace(/^\/|\/$/g, ''), fullPath = PATH.join(root, relPath); + if (PATH.dirSep === '\\') relPath = PATH.unixToOs(relPath); + LOGGER.fverbose('*** trying to access %s', fullPath); // try to find node in arch diff --git a/lib/nodes/lib.js b/lib/nodes/lib.js index <HASH>..<HASH> 100644 --- a/lib/nodes/lib.js +++ b/lib/nodes/lib.js @@ -453,6 +453,6 @@ registry.decl(SvnLibraryNodeName, ScmLibraryNodeName, /** @lends SvnLibraryNode. function joinUrlPath(url, part) { var p = URL.parse(url); - p.pathname = PATH.join(p.pathname, part); + p.pathname = PATH.joinPosix(p.pathname, part); return URL.format(p); } diff --git a/lib/path.js b/lib/path.js index <HASH>..<HASH> 100644 --- a/lib/path.js +++ b/lib/path.js @@ -27,6 +27,58 @@ exports.unixToOs = function(path) { return path.replace(/\//g, dirSep); }; +exports.joinPosix = function() { + var paths = Array.prototype.slice.call(arguments, 0); + return exports.normalizePosix(paths.filter(function(p, index) { + return p && typeof p === 'string'; + }).join('/')); +}; + +exports.normalizePosix = function(path) { + var isAbsolute = path.charAt(0) === '/', + trailingSlash = path.slice(-1) === '/'; + + // Normalize the path + path = normalizeArray(path.split('/').filter(function(p) { + return !!p; + }), !isAbsolute).join('/'); + + if (!path && !isAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isAbsolute ? '/' : '') + path; +}; + // Support compatability with node 0.6.x and remove warnings on node 0.8.x exports.exists = FS.exists || PATH.exists; exports.existsSync = FS.existsSync || PATH.existsSync; + +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = parts.length - 1; i >= 0; i--) { + var last = parts[i]; + if (last == '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +}
Fix for bem server on windows (reference #<I>)
bem-archive_bem-tools
train
52e39fe99c3591ca466e9a29367da9b7a63c23ab
diff --git a/spyderlib/spyder.py b/spyderlib/spyder.py index <HASH>..<HASH> 100644 --- a/spyderlib/spyder.py +++ b/spyderlib/spyder.py @@ -958,8 +958,8 @@ class MainWindow(QMainWindow): _("Spyder support..."), triggered=self.google_group) self.check_updates_action = create_action(self, - _("Check for updates"), - triggered=self.check_updates) + _("Check for updates..."), + triggered=self.check_updates) # Spyder documentation doc_path = get_module_data_path('spyderlib', relpath="doc", @@ -1013,7 +1013,7 @@ class MainWindow(QMainWindow): self.help_menu_actions = [doc_action, tut_action, self.tours_menu, None, report_action, dep_action, - support_action, self.check_updates_action, + self.check_updates_action, support_action, None] # Python documentation if get_python_doc_path() is not None:
Main Window/Help: Change position of "Check for updates" action
spyder-ide_spyder
train
6ab68753d8b2d93a8b196cbff10ef38b139d2061
diff --git a/indexeddb/upgrade.js b/indexeddb/upgrade.js index <HASH>..<HASH> 100644 --- a/indexeddb/upgrade.js +++ b/indexeddb/upgrade.js @@ -6,24 +6,35 @@ if(entity.indexes) for (var j = 0, l2 = entity.indexes.length; j < l2; j++) - createIndex(store, entity.indexes[j]); + createIndex(store, entity, entity.indexes[j]); } function retriveStore(entity) { - if (!database.objectStoreNames.contains(entity.name)) - return database.createObjectStore(entity.name, { + if (!database.objectStoreNames.contains(entity.name)) { + var store = database.createObjectStore(entity.name, { autoIncrement: entity.autoIncrement || !entity.keyPath, keyPath: entity.keyPath }); + // create an index against our keyPath so it can be queried consistently + if (entity.keyPath) + store.createIndex(entity.keyPath, entity.keyPath); + return store; + } + return transaction.objectStore(entity.name); } - function createIndex(store, index) { + function createIndex(store, entity, index) { var name = indexName(index); - if (!store.indexNames.contains(name)) + if (!store.indexNames.contains(name)) { store.createIndex(name, index, { unique: false }); + if(entity.keyPath) { + var indexWithKeyPath = Array.prototype.concat.call([entity.keyPath], index); + store.createIndex(indexName(indexWithKeyPath), indexWithKeyPath, { unique: false }); + } + } } }; } diff --git a/tests/integration.js b/tests/integration.js index <HASH>..<HASH> 100644 --- a/tests/integration.js +++ b/tests/integration.js @@ -14,7 +14,6 @@ }) .then(function (rows) { expect(rows.length).to.equal(1); - delete rows[0].id; expect(rows[0]).to.deep.equal({ p1: 1, p2: 'test' }); }); }); @@ -60,8 +59,53 @@ }); }); + test("keyPath can be queried when autoIncrement is set", function () { + return open([], + [ + { p2: 'test' }, + { p2: 'test2' } + ], 'p1', true) + .then(function (container) { + return container.retrieve({ p: 'p1', v: 1 }); + }) + .then(function (rows) { + expect(rows.length).to.equal(1); + expect(rows[0]).to.deep.equal({ p1: 1, p2: 'test' }); + }); + }); + + test("keyPath can be queried when autoIncrement is not set", function () { + return open([], + [ + { p1: 3, p2: 'test' }, + { p1: 4, p2: 'test2' } + ], 'p1', false) + .then(function (container) { + return container.retrieve({ p: 'p1', v: 3 }); + }) + .then(function (rows) { + expect(rows.length).to.equal(1); + expect(rows[0]).to.deep.equal({ p1: 3, p2: 'test' }); + }); + }); + + test("keyPath can be queried with indexes", function () { + return open(['p2'], + [ + { p1: 1, p2: 'test' }, + { p1: 2, p2: 'test2' } + ], 'p1') + .then(function (container) { + return container.retrieve([{ p: 'p1', v: 1 }, { p: 'p2', v: 'test' }]); + }) + .then(function (rows) { + expect(rows.length).to.equal(1); + expect(rows[0]).to.deep.equal({ p1: 1, p2: 'test' }); + }); + }); + test("add operation returns entity with autoIncrement keyPath property set", function () { - return open([], [], 'id') + return open([], [], 'id', true) .then(function (container) { return container.store({}); }) @@ -71,7 +115,7 @@ }); test("multiple add operation returns entities with autoIncrement keyPath property set", function () { - return open([], [], 'id') + return open([], [], 'id', true) .then(function (container) { return container.store([{}, {}]); }) @@ -82,7 +126,7 @@ test("stored entity has autoIncrement keyPath property set", function () { var container; - return open([], [], 'id') + return open([], [], 'id', true) .then(function (db) { container = db; return container.store({}); @@ -92,14 +136,14 @@ }) .then(function (entities) { expect(entities.length).to.equal(1); - expect(entities[0]).to.deep.equal([{ id: 1 }, { id: 2 }]); + expect(entities[0]).to.deep.equal({ id: 1 }); }); }); - function open(indexes, entities, keyPath) { + function open(indexes, entities, keyPath, autoIncrement) { var entity; - return storage.open([{ name: 'test', indexes: indexes, keyPath: keyPath, autoIncrement: true }], options) + return storage.open([{ name: 'test', indexes: indexes, keyPath: keyPath, autoIncrement: autoIncrement }], options) .then(function (provider) { db = provider; entity = provider.entity('test');
Indexes are added to query with keyPath
danderson00_tribe.storage
train
b684d40f0ffbc21ae75fe15cd7c66b7bafcbe380
diff --git a/src/Symfony/Component/Mime/Crypto/DkimSigner.php b/src/Symfony/Component/Mime/Crypto/DkimSigner.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Mime/Crypto/DkimSigner.php +++ b/src/Symfony/Component/Mime/Crypto/DkimSigner.php @@ -68,6 +68,7 @@ final class DkimSigner throw new InvalidArgumentException('Invalid DKIM signing algorithm "%s".', $options['algorithm']); } $headersToIgnore['return-path'] = true; + $headersToIgnore['x-transport'] = true; foreach ($options['headers_to_ignore'] as $name) { $headersToIgnore[strtolower($name)] = true; }
Ignoring X-Transport header while signing email with DKIM
symfony_symfony
train
cf98b9328d71d08622f3389b2752229429f3add1
diff --git a/docs/source/guide.rst b/docs/source/guide.rst index <HASH>..<HASH> 100644 --- a/docs/source/guide.rst +++ b/docs/source/guide.rst @@ -369,16 +369,9 @@ execution:: from dramatiq.brokers.rabbitmq import RabbitmqBroker - rabbitmq_broker = RabbitmqBroker( - host="rabbitmq", - heartbeat_interval=0, - ) + rabbitmq_broker = RabbitmqBroker(host="rabbitmq") dramatiq.set_broker(rabbitmq_broker) -Make sure to disable heartbeats when defining your own connection -parameters by passing them ``heartbeat_interval=0`` since pika's -``BlockingConnection`` does not handle heartbeats. - Redis Broker ^^^^^^^^^^^^ diff --git a/dramatiq/broker.py b/dramatiq/broker.py index <HASH>..<HASH> 100644 --- a/dramatiq/broker.py +++ b/dramatiq/broker.py @@ -23,8 +23,9 @@ def get_broker(): set_broker(RabbitmqBroker( host="127.0.0.1", port=5672, - heartbeat_interval=0, + heartbeat_interval=5, connection_attempts=5, + blocked_connection_timeout=30, )) return global_broker diff --git a/dramatiq/brokers/rabbitmq.py b/dramatiq/brokers/rabbitmq.py index <HASH>..<HASH> 100644 --- a/dramatiq/brokers/rabbitmq.py +++ b/dramatiq/brokers/rabbitmq.py @@ -12,6 +12,10 @@ from ..message import Message #: The maximum amount of time a message can be in the dead queue. DEAD_MESSAGE_TTL = 86400 * 7 * 1000 +#: The max number of times to attempt an enqueue operation in case of +#: a connection error. +MAX_ENQUEUE_ATTEMPTS = 2 + #: The max amount of time messages can be delayed by in ms. MAX_MESSAGE_DELAY = 86400000 * 7 @@ -186,24 +190,36 @@ class RabbitmqBroker(Broker): }, ) - try: - self.logger.debug("Enqueueing message %r on queue %r.", message.message_id, queue_name) - self.emit_before("enqueue", message, delay) - self.channel.publish( - exchange="", - routing_key=queue_name, - body=message.encode(), - properties=properties, - ) - self.emit_after("enqueue", message, delay) - return message - except (pika.exceptions.ChannelClosed, - pika.exceptions.ConnectionClosed) as e: - # Delete the channel and the connection so that the next - # caller may initiate new ones of each. - del self.channel - del self.connection - raise ConnectionClosed(e) from None + attempts = 1 + while True: + try: + self.logger.debug("Enqueueing message %r on queue %r.", message.message_id, queue_name) + self.emit_before("enqueue", message, delay) + self.channel.publish( + exchange="", + routing_key=queue_name, + body=message.encode(), + properties=properties, + ) + self.emit_after("enqueue", message, delay) + return message + + except (pika.exceptions.ChannelClosed, + pika.exceptions.ConnectionClosed) as e: + + # Delete the channel and the connection so that the + # next caller/attempt may initiate new ones of each. + del self.channel + del self.connection + + attempts += 1 + if attempts > MAX_ENQUEUE_ATTEMPTS: + raise ConnectionClosed(e) from None + + self.logger.debug( + "Retrying enqueue due to closed connection. [%d/%d]", + attempts, MAX_ENQUEUE_ATTEMPTS, + ) def get_declared_queues(self): """Get all declared queues. diff --git a/dramatiq/worker.py b/dramatiq/worker.py index <HASH>..<HASH> 100644 --- a/dramatiq/worker.py +++ b/dramatiq/worker.py @@ -311,7 +311,7 @@ class _ConsumerThread(Thread): self.requeue_messages(m for _, m in iter_queue(self.delay_queue)) self.consumer.close() except ConnectionError: - self.logger.warning("Could not close Consumer.", exc_info=True) + pass class _WorkerThread(Thread): diff --git a/tests/test_rabbitmq.py b/tests/test_rabbitmq.py index <HASH>..<HASH> 100644 --- a/tests/test_rabbitmq.py +++ b/tests/test_rabbitmq.py @@ -168,17 +168,15 @@ def test_rabbitmq_broker_reconnects_after_enqueue_failure(rabbitmq_broker, rabbi def do_nothing(): pass - # If I close my channel + # If I close my connection rabbitmq_broker.connection.close() # Then send my actor a message - # I expect a ConnectionError to be raised - with pytest.raises(ConnectionClosed): - do_nothing.send() + # I expect the message to be enqueued + assert do_nothing.send() - # If I then send another message - # I expect the message to be sent - do_nothing.send() + # And the connection be reopened + assert rabbitmq_broker.connection.is_open @pytest.mark.skipif(os.getenv("TRAVIS") == "1", reason="test skipped on Travis")
fix: make enqueue resilient to disconnects under RMQ
Bogdanp_dramatiq
train
02222a5833d442dec8a8aa0ce94a66481c631b9a
diff --git a/src/LocalizedStrings.js b/src/LocalizedStrings.js index <HASH>..<HASH> 100644 --- a/src/LocalizedStrings.js +++ b/src/LocalizedStrings.js @@ -31,14 +31,14 @@ export default class LocalizedStrings{ constructor(props) { - var interfaceLanguage = (typeof navigator !== 'undefined' && navigator.languages && typeof navigator.languages !== 'undefined' && navigator.languages[0] && typeof navigator.languages[0] !== 'undefined') ? navigator.languages[0] : + this.interfaceLanguage = (typeof navigator !== 'undefined' && navigator.languages && typeof navigator.languages !== 'undefined' && navigator.languages[0] && typeof navigator.languages[0] !== 'undefined') ? navigator.languages[0] : ((typeof navigator !== 'undefined' && navigator.language && typeof navigator.language !== 'undefined') ? navigator.language : ((typeof navigator !== 'undefined' && navigator.userLanguage && typeof navigator.userLanguage !== 'undefined') ? navigator.userLanguage : 'en-US')); //Store locally the passed strings this.props = props; //Set language to its default value (the interface) - this.setLanguage(interfaceLanguage); + this.setLanguage(this.interfaceLanguage); } //Can be used from ouside the class to force a particular language @@ -70,7 +70,7 @@ export default class LocalizedStrings{ //The current interface language (could differ from the language displayed) getInterfaceLanguage(){ - return interfaceLanguage; + return this.interfaceLanguage; } //Return an array containing the available languages passed as props in the constructor
Fixed the interfaceLanguage undifined problem
stefalda_react-localization
train
2075feed3ec1fae5b6368650b62eb344ef6c19b8
diff --git a/src/util/scroll.js b/src/util/scroll.js index <HASH>..<HASH> 100644 --- a/src/util/scroll.js +++ b/src/util/scroll.js @@ -1,3 +1,4 @@ +import EVENTS from '../events.js'; import scrollToIndex from './scrollToIndex.js'; import { getToolState } from '../stateManagement/toolState.js'; import clip from './clip.js'; @@ -19,7 +20,7 @@ function scrollWithoutSkipping (stackData, pendingEvent, element) { if (index === pendingEvent.index) { stackData.pending.splice(stackData.pending.indexOf(pendingEvent), 1); - element.removeEventListener('cornerstonenewimage', newImageHandler); + element.removeEventListener(EVENTS.NEW_IMAGE, newImageHandler); if (stackData.pending.length > 0) { scrollWithoutSkipping(stackData, stackData.pending[0], element); @@ -27,7 +28,7 @@ function scrollWithoutSkipping (stackData, pendingEvent, element) { } }; - element.addEventListener('cornerstonenewimage', newImageHandler); + element.addEventListener(EVENTS.NEW_IMAGE, newImageHandler); scrollToIndex(element, pendingEvent.index); }
Use exported EVENTS constants for missing event names in string (#<I>) (#<I>)
cornerstonejs_cornerstoneTools
train
b7f815976c4a7b9945d548dc9285526d621ccb99
diff --git a/src/main/java/net/openhft/chronicle/map/impl/stage/input/ReplicatedInput.java b/src/main/java/net/openhft/chronicle/map/impl/stage/input/ReplicatedInput.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/openhft/chronicle/map/impl/stage/input/ReplicatedInput.java +++ b/src/main/java/net/openhft/chronicle/map/impl/stage/input/ReplicatedInput.java @@ -91,10 +91,6 @@ public abstract class ReplicatedInput<K, V, R> } public void processReplicatedEvent() { - if (riId == mh.m().identifier()) { - // this may occur when working with UDP, as we may receive our own data - return; - } mh.m().setLastModificationTime(riId, bootstrapTimestamp);
HCOLL-<I> When the following setting is set on chronicle map "bootstrapOnlyLocalEntries==false", it is not sending all the entries. - this is the same issue but it was affecting chronicle engine replication
OpenHFT_Chronicle-Map
train
1c7b3ca36236165ca2a1da344171d65b260ed74a
diff --git a/sunspot_rails/lib/sunspot/rails/searchable.rb b/sunspot_rails/lib/sunspot/rails/searchable.rb index <HASH>..<HASH> 100644 --- a/sunspot_rails/lib/sunspot/rails/searchable.rb +++ b/sunspot_rails/lib/sunspot/rails/searchable.rb @@ -233,7 +233,7 @@ module Sunspot #:nodoc: last_id = options[:first_id] while(offset < record_count) solr_benchmark options[:batch_size], counter do - records = all(:include => options[:include], :conditions => ["#{table_name}.#{primary_key} > ?", last_id], :limit => options[:batch_size], :order => primary_key) + records = all(:include => options[:include], :conditions => ["#{table_name}.#{primary_key} > ?", last_id], :limit => options[:batch_size], :order => "#{table_name}.#{primary_key}") Sunspot.index(records) last_id = records.last.id end
fixes SQL's ambiguous ID issue when tables are joined
sunspot_sunspot
train
ce45f3ad900bde03a20c8530537cf32c95d6360f
diff --git a/lib/carrierwave_direct.rb b/lib/carrierwave_direct.rb index <HASH>..<HASH> 100644 --- a/lib/carrierwave_direct.rb +++ b/lib/carrierwave_direct.rb @@ -6,11 +6,14 @@ require "fog" require "uuid" module CarrierWaveDirect + autoload :Uploader, "carrierwave_direct/uploader" autoload :Mount, "carrierwave_direct/mount" module Uploader autoload :Configuration, 'carrierwave_direct/uploader/configuration' + + CarrierWave::Uploader::Base.send(:include, Configuration) end module Test diff --git a/lib/carrierwave_direct/uploader.rb b/lib/carrierwave_direct/uploader.rb index <HASH>..<HASH> 100644 --- a/lib/carrierwave_direct/uploader.rb +++ b/lib/carrierwave_direct/uploader.rb @@ -12,8 +12,6 @@ module CarrierWaveDirect attr_accessor :success_action_redirect - include CarrierWaveDirect::Uploader::Configuration - fog_credentials.keys.each do |key| define_method(key) do fog_credentials[key]
Add configuration to CarrierWave's configuration so that we can use it in an initializer
dwilkie_carrierwave_direct
train
bbee223e62ebb55854c1bb263387712c2338ad8d
diff --git a/collections/traits/Collection.php b/collections/traits/Collection.php index <HASH>..<HASH> 100644 --- a/collections/traits/Collection.php +++ b/collections/traits/Collection.php @@ -13,7 +13,7 @@ use nyx\core; * A Collection is an object that contains other items which can be set, get and removed from the Collection. * * Usage of this trait allows you to implement \IteratorAggregate and the interfaces\Collection interface, including - * all of its inherited interfaces. + * all of its in * * Important notes: * 1) null is *not* an acceptable value for an item within a Collection. Null is used internally by many methods @@ -146,7 +146,7 @@ trait Collection /** * @see \nyx\core\collections\interfaces\Collection::each() */ - public function each(callable $callback) : self + public function each(callable $callback) : interfaces\Collection { array_walk($this->items, $callback); @@ -218,7 +218,7 @@ trait Collection /** * @see \nyx\core\collections\interfaces\Collection::diff() */ - public function diff(...$against) + public function diff(...$against) : interfaces\Collection { $result = $this->items; diff --git a/collections/traits/Map.php b/collections/traits/Map.php index <HASH>..<HASH> 100644 --- a/collections/traits/Map.php +++ b/collections/traits/Map.php @@ -1,5 +1,8 @@ <?php namespace nyx\core\collections\traits; +// Internal dependencies +use nyx\core\collections\interfaces; + /** * Map * @@ -29,7 +32,7 @@ trait Map /** * @see \nyx\core\collections\interfaces\Map::set() */ - public function set($key, $item) : self + public function set($key, $item) : interfaces\Map { if (null === $item) { throw new \InvalidArgumentException('Items in a Map cannot have a value of null.'); @@ -59,7 +62,7 @@ trait Map /** * @see \nyx\core\collections\interfaces\Map::remove() */ - public function remove($key) : self + public function remove($key) : interfaces\Map { unset($this->items[$key]); @@ -69,7 +72,7 @@ trait Map /** * @see \nyx\core\collections\interfaces\Collection::replace() */ - public function replace($items) : self + public function replace($items) : interfaces\Collection { $this->items = []; diff --git a/collections/traits/NamedObjectSet.php b/collections/traits/NamedObjectSet.php index <HASH>..<HASH> 100644 --- a/collections/traits/NamedObjectSet.php +++ b/collections/traits/NamedObjectSet.php @@ -4,6 +4,7 @@ use nyx\diagnostics; // Internal dependencies +use nyx\core\collections\interfaces; use nyx\core; /** @@ -48,7 +49,7 @@ trait NamedObjectSet /** * @see \nyx\core\collections\interfaces\NamedObjectSet::add() */ - public function add(core\interfaces\Named $object) : self + public function add(core\interfaces\Named $object) : interfaces\NamedObjectSet { $name = $object->getName(); @@ -97,7 +98,7 @@ trait NamedObjectSet /** * @see \nyx\core\collections\interfaces\NamedObjectSet::remove() */ - public function remove(string $name) : self + public function remove(string $name) : interfaces\NamedObjectSet { unset($this->items[$name]); @@ -107,7 +108,7 @@ trait NamedObjectSet /** * @see \nyx\core\collections\interfaces\Collection::replace() */ - public function replace($items) : self + public function replace($items) : interfaces\Collection { $this->items = []; diff --git a/collections/traits/Sequence.php b/collections/traits/Sequence.php index <HASH>..<HASH> 100644 --- a/collections/traits/Sequence.php +++ b/collections/traits/Sequence.php @@ -32,7 +32,7 @@ trait Sequence /** * @see \nyx\core\collections\interfaces\Sequence::push() */ - public function push($item) : self + public function push($item) : interfaces\Sequence { if (null === $item) { throw new \InvalidArgumentException('Items in a Sequence cannot have a value of null.'); @@ -46,7 +46,7 @@ trait Sequence /** * @see \nyx\core\collections\interfaces\Sequence::prepend() */ - public function prepend($item) : self + public function prepend($item) : interfaces\Sequence { if (null === $item) { throw new \InvalidArgumentException('Items in a Sequence cannot have a value of null.'); @@ -60,7 +60,7 @@ trait Sequence /** * @see \nyx\core\collections\interfaces\Sequence::update() */ - public function update(int $index, $item) : self + public function update(int $index, $item) : interfaces\Sequence { if (null === $item) { throw new \InvalidArgumentException('Items in a Sequence cannot have a value of null.'); @@ -94,7 +94,7 @@ trait Sequence /** * @see \nyx\core\collections\interfaces\Sequence::remove() */ - public function remove(int $index) : self + public function remove(int $index) : interfaces\Sequence { // Note: We need to maintain order so we do actually need to check whether we remove // an item or whether it's already gone. @@ -164,7 +164,7 @@ trait Sequence /** * @see \nyx\core\collections\interfaces\Collection::replace() */ - public function replace($items) : self + public function replace($items) : interfaces\Collection { $this->items = [];
[Core/Collections] [Fixed] Trait return type hints were pointing to self instead of the appropriate interfaces.
unyx_core
train
f9b5e54715e97437e0c614095bc96c83ade8e75d
diff --git a/netmiko/hp/hp_procurve.py b/netmiko/hp/hp_procurve.py index <HASH>..<HASH> 100644 --- a/netmiko/hp/hp_procurve.py +++ b/netmiko/hp/hp_procurve.py @@ -40,19 +40,39 @@ class HPProcurveBase(CiscoSSHConnection): default_username="manager", ): """Enter enable mode""" + delay_factor = self.select_delay_factor(delay_factor=0) if self.check_enable_mode(): return "" - output = self.send_command_timing(cmd) - if ( - "username" in output.lower() - or "login name" in output.lower() - or "user name" in output.lower() - ): - output += self.send_command_timing(default_username) - if "password" in output.lower(): - output += self.send_command_timing(self.secret) + + output = "" + i = 1 + max_attempts = 5 + while i <= max_attempts: + self.write_channel(cmd + self.RETURN) + time.sleep(0.3 * delay_factor) + new_output = self.read_channel() + username_pattern = r"(username|login|user name)" + if re.search(username_pattern, new_output, flags=re_flags): + output += new_output + new_output = self.send_command_timing(default_username) + if re.search(pattern, new_output, flags=re_flags): + output += new_output + self.write_channel(self.normalize_cmd(self.secret)) + new_output = self._read_channel_timing() + if self.check_enable_mode(): + output += new_output + return output + output += new_output + i += 1 + log.debug(f"{output}") self.clear_buffer() + msg = ( + "Failed to enter enable mode. Please ensure you pass " + "the 'secret' argument to ConnectHandler." + ) + if not self.check_enable_mode(): + raise ValueError(msg) return output def cleanup(self):
Adding while loop to HP ProCurve enable method (#<I>)
ktbyers_netmiko
train
ad23cb367f295d216a7a4c0117085fc0f14a9e51
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -8,6 +8,11 @@ ## - Update the version information in this file ## - python setup.py sdist upload -r pypitest # for the test version ## - python setup.py sdist upload -r pypi # for the real version +## With twine: +## - python setup.py sdist +## - twine upload dist/* + + ## ## (see http://peterdowns.com/posts/first-time-with-pypi.html) @@ -18,11 +23,10 @@ from os import path import io import os, subprocess -## in development set version to none and ... +## in development set version to none and ... PYPI_VERSION = None - # Return the git revision as a string (from numpy) def git_version(): def _minimal_ext_cmd(cmd): @@ -69,7 +73,7 @@ ext4 = Extension(name = 'stripy._ssrfpack', if __name__ == "__main__": setup(name = 'stripy', author = "Louis Moresi", - author_email = "louis.moresi@unimelb.edu.au", + author_email = "louis.moresi@anu.edu.au", url = "https://github.com/underworldcode/stripy", version = PYPI_VERSION, description = "Python interface to TRIPACK and STRIPACK fortran code for triangulation/interpolation in Cartesian coordinates and on a sphere", @@ -78,8 +82,6 @@ if __name__ == "__main__": ext_modules = [ext1, ext2, ext3, ext4], install_requires = ['numpy', 'scipy>=0.15.0'], python_requires = '>=2.7, >=3.5', - setup_requires = ["pytest-runner"], - tests_require = ["pytest"], packages = ['stripy'], package_data = {'stripy': ['Notebooks/*ipynb', # Worked Examples is not currently used 'Notebooks/CartesianTriangulations/*ipynb',
Removing tests from setup.py as this is deprecated and causes all sorts of trouble !
underworldcode_stripy
train
a6d752415d5442db5f52ace2bb5113eb475c6217
diff --git a/certipy/certipy.py b/certipy/certipy.py index <HASH>..<HASH> 100644 --- a/certipy/certipy.py +++ b/certipy/certipy.py @@ -410,8 +410,8 @@ class CertStore(): del self.store[common_name] class Certipy(): - def __init__(self, store_dir='out'): - self.store = CertStore(store_dir=store_dir) + def __init__(self, store_dir='out', store_file='certipy.json'): + self.store = CertStore(containing_dir=store_dir, store_file=store_file) def create_key_pair(self, cert_type, bits): """
Pass the store file from Certipy to CertStore
LLNL_certipy
train