hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
c3813ab41af4f0048b184785351f88b15160f481
diff --git a/bundles/org.eclipse.orion.client.javascript/web/javascript/plugins/javascriptPlugin.js b/bundles/org.eclipse.orion.client.javascript/web/javascript/plugins/javascriptPlugin.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.javascript/web/javascript/plugins/javascriptPlugin.js +++ b/bundles/org.eclipse.orion.client.javascript/web/javascript/plugins/javascriptPlugin.js @@ -257,7 +257,7 @@ define([ if(files && files.length > 0) { return _normalRead(response, files[0].location, fileClient); } - _failedRead(response, files[0].location, "File not found in workspace"); + _failedRead(response, _l, "File not found in workspace"); }, function(err) { _failedRead(response, _l, err);
Bug <I> - Exception during failed read
eclipse_orion.client
train
27382bbcd7fae0e2762eb5d7ba687d115567600c
diff --git a/modules/ngMeteor-template.js b/modules/ngMeteor-template.js index <HASH>..<HASH> 100644 --- a/modules/ngMeteor-template.js +++ b/modules/ngMeteor-template.js @@ -3,7 +3,7 @@ var ngMeteorTemplate = angular.module('ngMeteor.template', []); ngMeteorTemplate.run(['$templateCache', function ($templateCache) { angular.forEach(Template, function (template, name) { - if (name.charAt(0) != "_") { // Ignores templates with names starting with "_" + if (name.charAt(0) != "_" && name != "prototype" && name != "loginButtons") { // Ignores templates with names starting with "_" $templateCache.put(name, '<ng-template name="' + name + '"></span>'); } }); @@ -16,28 +16,34 @@ ngMeteorTemplate.directive('ngTemplate', ['$templateCache', restrict: 'E', scope: true, template: function (element, attributes) { - var name = attributes.name, - template = Template[name], - templateRender = template.render(), - templateString = null; - // Check for nested templates in the render object and replace them with the equivalent ngTemplate directive. - angular.forEach(templateRender, function (v, k) { - if (angular.isObject(v)) { - if (v._super) { - var transcludeTemplateName = v._super.kind.replace('Template_', ''); - templateRender[k] = new HTML.Raw($templateCache.get(transcludeTemplateName)); + // Check if version prior 0.8.3 + if (Template[attributes.name].render){ + var name = attributes.name, + template = Template[name], + templateRender = Blaze.toHTML(template), + templateString = null; + + // Check for nested templates in the render object and replace them with the equivalent ngTemplate directive. + angular.forEach(templateRender, function (v, k) { + if (angular.isObject(v)) { + if (v._super) { + var transcludeTemplateName = v._super.kind.replace('Template_', ''); + templateRender[k] = new HTML.Raw($templateCache.get(transcludeTemplateName)); + } } + }); + + if (angular.isDefined(template)) { + templateString = UI.toHTML(templateRender); + } else { + throw new ReferenceError("There is no Meteor template with the name '" + name + "'."); } - }); - if (angular.isDefined(template)) { - templateString = UI.toHTML(templateRender); + return templateString; } else { - throw new ReferenceError("There is no Meteor template with the name '" + name + "'."); + return Blaze.toHTML(Template[attributes.name]); } - - return templateString; }, link: function (scope, element, attributes) { var name = attributes.name, diff --git a/ngMeteor.js b/ngMeteor.js index <HASH>..<HASH> 100644 --- a/ngMeteor.js +++ b/ngMeteor.js @@ -33,8 +33,8 @@ angular.element(document).ready(function () { } // Recompiles whenever the DOM elements are updated. - var notifyParented = UI.Component.notifyParented; - UI.Component.notifyParented = function () { + var notifyParented = Blaze.View.notifyParented; + Blaze.View.notifyParented = function () { notifyParented.apply(this, arguments); if (this.region) { Deps.afterFlush(function() {
feat(Meteor<I>): Support Meteor <I> and prior for ngMeteor <I>
Urigo_angular-meteor
train
589bb23d5721aa0a843050b4e0f110559e1aa142
diff --git a/pycoin/symbols/grs.py b/pycoin/symbols/grs.py index <HASH>..<HASH> 100644 --- a/pycoin/symbols/grs.py +++ b/pycoin/symbols/grs.py @@ -18,3 +18,12 @@ network = create_bitcoinish_network( "jswallet.groestlcoin.org", "groestlsight.groestlcoin.org" ] ) + +# Cause parsing to fail and tests to skip. +try: + import groestlcoin_hash +except ImportError: + network.Key = None + none_parser = lambda *args, **kwargs: None + for attr in "hierarchical_key private_key public_key address".split(): + setattr(network.parse, attr, none_parser)
GRS: Disable parsing attempts and tests when groestlcoin_hash isn't installed
richardkiss_pycoin
train
1e84b22407958fb1294236154af0997f6617f5e2
diff --git a/influxql/call_iterator_test.go b/influxql/call_iterator_test.go index <HASH>..<HASH> 100644 --- a/influxql/call_iterator_test.go +++ b/influxql/call_iterator_test.go @@ -740,11 +740,11 @@ type FloatPointGenerator struct { func (g *FloatPointGenerator) Close() error { return nil } func (g *FloatPointGenerator) Stats() influxql.IteratorStats { return influxql.IteratorStats{} } -func (g *FloatPointGenerator) Next() *influxql.FloatPoint { +func (g *FloatPointGenerator) Next() (*influxql.FloatPoint, error) { if g.i == g.N { - return nil + return nil, nil } p := g.Fn(g.i) g.i++ - return p + return p, nil } diff --git a/influxql/iterator.go b/influxql/iterator.go index <HASH>..<HASH> 100644 --- a/influxql/iterator.go +++ b/influxql/iterator.go @@ -1259,7 +1259,11 @@ func (itr *floatFastDedupeIterator) Next() (*FloatPoint, error) { } // If the point has already been output then move to the next point. - key := fastDedupeKey{p.Name, p.Aux[0]} + key := fastDedupeKey{name: p.Name} + key.values[0] = p.Aux[0] + if len(p.Aux) > 1 { + key.values[1] = p.Aux[1] + } if _, ok := itr.m[key]; ok { continue } @@ -1271,8 +1275,8 @@ func (itr *floatFastDedupeIterator) Next() (*FloatPoint, error) { } type fastDedupeKey struct { - name string - value interface{} + name string + values [2]interface{} } type reverseStringSlice []string diff --git a/influxql/select.go b/influxql/select.go index <HASH>..<HASH> 100644 --- a/influxql/select.go +++ b/influxql/select.go @@ -104,9 +104,13 @@ func buildAuxIterators(fields Fields, ic IteratorCreator, opt IteratorOptions) ( // Filter out duplicate rows, if required. if opt.Dedupe { - // If there is no group by and it's a single field then fast dedupe. - if itr, ok := input.(FloatIterator); ok && len(fields) == 1 && len(opt.Dimensions) == 0 { - input = newFloatFastDedupeIterator(itr) + // If there is no group by and it is a float iterator, see if we can use a fast dedupe. + if itr, ok := input.(FloatIterator); ok && len(opt.Dimensions) == 0 { + if sz := len(fields); sz > 0 && sz < 3 { + input = newFloatFastDedupeIterator(itr) + } else { + input = NewDedupeIterator(itr) + } } else { input = NewDedupeIterator(input) } diff --git a/influxql/select_test.go b/influxql/select_test.go index <HASH>..<HASH> 100644 --- a/influxql/select_test.go +++ b/influxql/select_test.go @@ -1,6 +1,7 @@ package influxql_test import ( + "fmt" "reflect" "testing" "time" @@ -2401,3 +2402,30 @@ func NewRawBenchmarkIteratorCreator(pointN int) *IteratorCreator { } return &ic } + +func benchmarkSelectDedupe(b *testing.B, seriesN, pointsPerSeries int) { + stmt := MustParseSelectStatement(`SELECT sval::string FROM cpu`) + stmt.Dedupe = true + + var ic IteratorCreator + ic.CreateIteratorFn = func(opt influxql.IteratorOptions) (influxql.Iterator, error) { + if opt.Expr != nil { + panic("unexpected expression") + } + + p := influxql.FloatPoint{ + Name: "tags", + Aux: []interface{}{nil}, + } + + return &FloatPointGenerator{N: seriesN * pointsPerSeries, Fn: func(i int) *influxql.FloatPoint { + p.Aux[0] = fmt.Sprintf("server%d", i%seriesN) + return &p + }}, nil + } + + b.ResetTimer() + benchmarkSelect(b, stmt, &ic) +} + +func BenchmarkSelect_Dedupe_1K(b *testing.B) { benchmarkSelectDedupe(b, 1000, 100) }
Update SHOW TAG VALUES to use a fast dedupe iterator Include a benchmark test for the fast dedupe iterator.
influxdata_influxdb
train
27b3f2903298669e5f25c9bb138533d5c887cf5f
diff --git a/DelimiterStream.js b/DelimiterStream.js index <HASH>..<HASH> 100644 --- a/DelimiterStream.js +++ b/DelimiterStream.js @@ -160,6 +160,9 @@ DelimiterStream.prototype.addListener = function(type, listener) { return this; } events.EventEmitter.prototype.addListener.call(this, type, listener); + if (this.readableStream == null) { + return this; + } if (this._reFireListeners[type] == null && type && type !== 'data' && type !== 'close') { this._reFireListeners[type] = this.emit.bind(this, type); this.readableStream.on(type, this._reFireListeners[type]); @@ -170,6 +173,9 @@ DelimiterStream.prototype.on = DelimiterStream.prototype.addListener; DelimiterStream.prototype.removeListener = function(type, listener) { events.EventEmitter.prototype.removeListener.call(this, type, listener); + if (this.readableStream == null) { + return this; + } if (type && this._events[type] == null && this._reFireListeners[type] != null) { this.readableStream.removeListener(type, this._reFireListeners[type]); delete this._reFireListeners[type]; @@ -179,7 +185,7 @@ DelimiterStream.prototype.removeListener = function(type, listener) { DelimiterStream.prototype.removeAllListeners = function(type) { events.EventEmitter.prototype.removeAllListeners.call(this, type); - if (this.readableStream) { + if (this.readableStream != null) { this.removeAllStreamListeners(); } return this; @@ -187,11 +193,15 @@ DelimiterStream.prototype.removeAllListeners = function(type) { DelimiterStream.prototype.removeAllStreamListeners = function(type) { if (type && this._reFireListeners[type] != null) { - this.readableStream.removeListener(type, this._reFireListeners[type]); + if (this.readableStream != null) { + this.readableStream.removeListener(type, this._reFireListeners[type]); + } delete this._reFireListeners[type]; } else if (type == null) { - for (var t in this._reFireListeners) { - this.readableStream.removeListener(t, this._reFireListeners[t]); + if (this.readableStream != null) { + for (var t in this._reFireListeners) { + this.readableStream.removeListener(t, this._reFireListeners[t]); + } } this._reFireListeners = {}; } @@ -239,6 +249,10 @@ var passthruEvents = ['write', 'connect', 'end', 'ref', 'unref', 'setTimeout', ' do { (function(e) { DelimiterStream.prototype[e] = function() { + if (this.readableStream == null) { + this.emit('error', new Error(e + ' called after stream closed')); + return; + } this.readableStream[e].apply(this.readableStream, arguments); }; }(passthruEvents.pop())); diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "delimiterstream", - "version": "0.2.3", + "version": "0.2.4", "description": "Get delimiter-separated chunks of data from a Readable Stream.", "main": "DelimiterStream.js", "directories": { diff --git a/tests/tests.js b/tests/tests.js index <HASH>..<HASH> 100644 --- a/tests/tests.js +++ b/tests/tests.js @@ -534,3 +534,13 @@ exports.destroyTwice = function (test) { s.destroy(); test.done(); }; + +exports.writeAfterDestroy = function (test) { + f = new FakeReader(); + s = new DelimiterStream(f, "\n"); + s.destroy(); + s.on('error', function() { + test.done(); + }); + s.write(); +};
Check to make sure readableStream isn't null more. Version <I>
fastest963_DelimiterStream
train
af2e2756d54a3e47d3a5540a8f723b0a83691865
diff --git a/tests/TaskList.js b/tests/TaskList.js index <HASH>..<HASH> 100644 --- a/tests/TaskList.js +++ b/tests/TaskList.js @@ -2,7 +2,7 @@ var Ezlog = require('ezlog'), log = new Ezlog({pref:{t:'[TaskList]',c:'green'}}); -var TaskList = function TaskList (o){ +function TaskList (o){ o = o || {}; //log('TaskList initialized'); @@ -10,13 +10,13 @@ var TaskList = function TaskList (o){ return this; }; - +var proto = Object.create(null); /** * @method start Check if requirements are met * @req url {string} Url to grab source code from */ -TaskList.prototype.start = function (){ +proto.start = function (){ //log("startin' task"); this.d.hello = 'world'; @@ -28,7 +28,7 @@ TaskList.prototype.start = function (){ * @method getSource Gets a website source code * @data d.source {string} Website it's source code */ -TaskList.prototype.getSource = function (){ +proto.getSource = function (){ //log("gettin' source"); var self = this; @@ -44,7 +44,7 @@ TaskList.prototype.getSource = function (){ * @method writeSource Writes a source code on HD * @req d.source {string} Website it's source page */ -TaskList.prototype.writeSource = function (){ +proto.writeSource = function (){ //log("writin' souce"); var self = this; @@ -55,7 +55,7 @@ TaskList.prototype.writeSource = function (){ /** * @method notify Log what happened in the previous methods */ -TaskList.prototype.notify = function (){ +proto.notify = function (){ //log('HELLO notify'); var self = this; @@ -64,19 +64,19 @@ TaskList.prototype.notify = function (){ }; -TaskList.prototype.onRetryAll = function (){ +proto.onRetryAll = function (){ }; -TaskList.prototype.onNext = function (nextData){ +proto.onNext = function (nextData){ //log('!onNext!' + JSON.stringify(nextData)); }; -TaskList.prototype.onFinish = function (){ +proto.onFinish = function (){ log('!onFinish!'); }; -TaskList.prototype.onAbort = function (){ +proto.onAbort = function (){ //log('!onAbort!'); }; @@ -85,7 +85,9 @@ TaskList.prototype.onAbort = function (){ // Adding data to Class prototype object, will be same in every instance. // No need to add in our shared data object namespace, which could be reset. // Also using the prototype object, we only assign it once. -TaskList.prototype.writeDir = './sourceCodes'; +proto.writeDir = './sourceCodes'; + +TaskList.prototype = Object.create(proto); module.exports = TaskList; \ No newline at end of file
Better prototype object creation/inheritance in TaskList.js
opensoars_f_
train
9060cd616188634ce44fc5530058fbb698bcbb88
diff --git a/modules/es/bases/es.DocumentBranchNode.js b/modules/es/bases/es.DocumentBranchNode.js index <HASH>..<HASH> 100644 --- a/modules/es/bases/es.DocumentBranchNode.js +++ b/modules/es/bases/es.DocumentBranchNode.js @@ -81,7 +81,7 @@ es.DocumentBranchNode.prototype.traverseLeafNodes = function( callback, from, re throw "from parameter passed to traverseLeafNodes() must be a descendant"; } // Find the index of n in p - i = p.getChildren().indexOf( n ); + i = p.indexOf( n ); if ( i === -1 ) { // This isn't supposed to be possible throw "Tree corruption detected: node isn't in its parent's children array";
Fix stupid mistake that broke traverseLeafNodes() in IE
wikimedia_parsoid
train
0a030bc3ae964a3e79b74fbeaae81f817a6a5674
diff --git a/vyper/parser/memory_allocator.py b/vyper/parser/memory_allocator.py index <HASH>..<HASH> 100644 --- a/vyper/parser/memory_allocator.py +++ b/vyper/parser/memory_allocator.py @@ -127,9 +127,8 @@ class MemoryAllocator: # releasing from the end of the allocated memory - reduce the free memory pointer if pos + size == self.next_mem: self.next_mem = pos - return - if not self.deallocated_mem or self.deallocated_mem[-1].position < pos: + elif not self.deallocated_mem or self.deallocated_mem[-1].position < pos: # no previously deallocated memory, or this is the highest position deallocated self.deallocated_mem.append(FreeMemory(position=pos, size=size)) else: @@ -139,6 +138,9 @@ class MemoryAllocator: ) self.deallocated_mem.insert(idx, FreeMemory(position=pos, size=size)) + if not self.deallocated_mem: + return + # iterate over deallocated memory and merge slots where possible i = 1 active = self.deallocated_mem[0] @@ -150,3 +152,8 @@ class MemoryAllocator: else: active = next_slot i += 1 + + last = self.deallocated_mem[-1] + if last.position + last.size == self.next_mem: + self.next_mem = last.position + del self.deallocated_mem[-1]
refactor: improve de-allocation logic
ethereum_vyper
train
729b8b0d84857e78237304db8ba82a2d1247ee8f
diff --git a/aeron-cluster/src/main/java/io/aeron/cluster/Election.java b/aeron-cluster/src/main/java/io/aeron/cluster/Election.java index <HASH>..<HASH> 100644 --- a/aeron-cluster/src/main/java/io/aeron/cluster/Election.java +++ b/aeron-cluster/src/main/java/io/aeron/cluster/Election.java @@ -110,7 +110,7 @@ class Election implements AutoCloseable } } - private final boolean isStartup; + private boolean isStartup; private final long statusIntervalMs; private final long leaderHeartbeatIntervalMs; private final long logLeadershipTermId; @@ -481,6 +481,7 @@ class Election implements AutoCloseable } else { + isStartup = false; state(State.CANVASS, nowMs); } diff --git a/aeron-cluster/src/test/java/io/aeron/cluster/ElectionTest.java b/aeron-cluster/src/test/java/io/aeron/cluster/ElectionTest.java index <HASH>..<HASH> 100644 --- a/aeron-cluster/src/test/java/io/aeron/cluster/ElectionTest.java +++ b/aeron-cluster/src/test/java/io/aeron/cluster/ElectionTest.java @@ -494,7 +494,7 @@ public class ElectionTest final long t6 = t5 + 1; election.doWork(t6); - final long t7 = t6 + TimeUnit.NANOSECONDS.toMillis(ctx.startupStatusTimeoutNs()); + final long t7 = t6 + TimeUnit.NANOSECONDS.toMillis(ctx.electionTimeoutNs()); election.doWork(t7); assertThat(election.state(), is(Election.State.NOMINATE));
[Java] only use startup timeout in first canvass period, after the first then use normal election timeout.
real-logic_aeron
train
cf947ce5268a77edf01ef73e2e43cd24599aeaec
diff --git a/footer.php b/footer.php index <HASH>..<HASH> 100644 --- a/footer.php +++ b/footer.php @@ -44,7 +44,7 @@ endif; ?> </div><!-- .footer-col --> <div class="footer-col"> <h5 class="footer-title"> - <a class="no-style" href="#">Devenir membre</a> + Devenir membre </h5> <?php wp_nav_menu( @@ -56,7 +56,7 @@ endif; ?> ?> </div><!-- .footer-col --> <div class="footer-col footer-col-newsletter"> - <h5 class="footer-title"><a class="no-style" href="#">Newsletter</a></h5> + <h5 class="footer-title">Newsletter</h5> <?php echo '<form action="https://quai10.us10.list-manage.com/subscribe/post?'. 'u=699bad1c5b054cbdff43d84a8&amp;id=384dd5ed71" method="post">';
Footer titles shouldn't be links
quai10_quai10-template
train
2d0baf42845681fa4fecd22221f5a8183d4afeb8
diff --git a/lib/mixpanel-node.js b/lib/mixpanel-node.js index <HASH>..<HASH> 100644 --- a/lib/mixpanel-node.js +++ b/lib/mixpanel-node.js @@ -372,33 +372,27 @@ var create_client = function(token, config) { }, // used internally by set and set_once - _set: function(distinct_id, $set, callback, modifiers) { - modifiers = modifiers || {}; - var set_key = "$set"; - - if (modifiers && modifiers.set_once) { - set_key = "$set_once"; - delete modifiers.set_once; - } + _set: function(distinct_id, $set, callback, options) { + options = options || {}; + var set_key = (options && options.set_once) ? "$set_once" : "$set"; var data = { '$token': metrics.token, '$distinct_id': distinct_id }; data[set_key] = $set; - - // keep for backwards compatability + if ('ip' in $set) { - modifiers.$ip = $set.ip; + data.$ip = $set.ip; delete $set.ip; } - // keep for backwards compatability + if ($set.$ignore_time) { - modifiers.$ignore_time = $set.$ignore_time; + data.$ignore_time = $set.$ignore_time; delete $set.$ignore_time; } - data = merge_modifiers(data, modifiers); + data = merge_modifiers(data, options); if(metrics.config.debug) { console.log("Sending the following data to Mixpanel (Engage):");
Changed _set parameter name back to 'options' and don't delete $set_once key
mixpanel_mixpanel-node
train
1016d2d16adaa44b8131abe9b7f280155e722e05
diff --git a/plugin/pkg/admission/priority/admission.go b/plugin/pkg/admission/priority/admission.go index <HASH>..<HASH> 100644 --- a/plugin/pkg/admission/priority/admission.go +++ b/plugin/pkg/admission/priority/admission.go @@ -19,6 +19,7 @@ package admission import ( "fmt" "io" + "strings" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" @@ -41,6 +42,9 @@ const ( HighestUserDefinablePriority = 1000000000 // SystemCriticalPriority is the beginning of the range of priority values for critical system components. SystemCriticalPriority = 2 * HighestUserDefinablePriority + // SystemPriorityClassPrefix is the prefix reserved for system priority class names. Other priority + // classes are not allowed to start with this prefix. + SystemPriorityClassPrefix = "system-" ) // SystemPriorityClasses defines special priority classes which are used by system critical pods that should not be preempted by workload pods. @@ -203,6 +207,9 @@ func (p *PriorityPlugin) validatePriorityClass(a admission.Attributes) error { if pc.Value > HighestUserDefinablePriority { return admission.NewForbidden(a, fmt.Errorf("maximum allowed value of a user defined priority is %v", HighestUserDefinablePriority)) } + if strings.HasPrefix(pc.Name, SystemPriorityClassPrefix) { + return admission.NewForbidden(a, fmt.Errorf("priority class names with '%v' prefix are reserved for system use only: %v", SystemPriorityClassPrefix, pc.Name)) + } if _, ok := SystemPriorityClasses[pc.Name]; ok { return admission.NewForbidden(a, fmt.Errorf("the name of the priority class is a reserved name for system use only: %v", pc.Name)) } diff --git a/plugin/pkg/admission/priority/admission_test.go b/plugin/pkg/admission/priority/admission_test.go index <HASH>..<HASH> 100644 --- a/plugin/pkg/admission/priority/admission_test.go +++ b/plugin/pkg/admission/priority/admission_test.go @@ -127,6 +127,21 @@ func TestPriorityClassAdmission(t *testing.T) { systemClass, true, }, + { + "forbidden system name prefix", + []*scheduling.PriorityClass{}, + &scheduling.PriorityClass{ + TypeMeta: metav1.TypeMeta{ + Kind: "PriorityClass", + }, + ObjectMeta: metav1.ObjectMeta{ + Name: "system-something", + }, + Value: 5, + Description: "Name with 'system-' prefix is reserved for system use", + }, + true, + }, } for _, test := range tests {
Disallow PriorityClass names with 'system-' prefix for user defined priority classes
kubernetes_kubernetes
train
da8922cf1d75126abe2b6c3d1b8b6e44995c902a
diff --git a/psamm/gapfill.py b/psamm/gapfill.py index <HASH>..<HASH> 100644 --- a/psamm/gapfill.py +++ b/psamm/gapfill.py @@ -178,7 +178,7 @@ def gapfill(model, core, blocked, solver, epsilon=0.001, v_max=1000): # ym variables. This is done by introducing another helper # variable, yn. prob.define(('yn', reaction_id, compound), - types=lp.VariableType.Binary) + types=lp.VariableType.Binary) yn = prob.var(('yn', reaction_id, compound)) prob.add_linear_constraints( 2 * yn <= w + prob.var(('ym', reaction_id)))
gapfill: Fix minor flake style issue
zhanglab_psamm
train
42e68bb58a3e1a1e1faa9fbeb9ba6688e2962bcd
diff --git a/src/Illuminate/Support/Stringable.php b/src/Illuminate/Support/Stringable.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Support/Stringable.php +++ b/src/Illuminate/Support/Stringable.php @@ -148,7 +148,7 @@ class Stringable * @param string|array $needles * @return bool */ - public function contains($haystack, $needles) + public function contains($needles) { return Str::contains($this->value, $needles); }
Method contains() should only have needles (#<I>)
laravel_framework
train
5d5f71aac1c7e8f96139da0c281bacb10a1acbd8
diff --git a/pymatgen/core/structure.py b/pymatgen/core/structure.py index <HASH>..<HASH> 100644 --- a/pymatgen/core/structure.py +++ b/pymatgen/core/structure.py @@ -630,34 +630,6 @@ class IStructure(SiteCollection, MSONable): def __mul__(self, scaling_matrix): """ - Makes a supercell. - - Args: - scaling_matrix: A scaling matrix for transforming the lattice - vectors. Has to be all integers. Several options are possible: - - a. A full 3x3 scaling matrix defining the linear combination - the old lattice vectors. E.g., [[2,1,0],[0,3,0],[0,0, - 1]] generates a new structure with lattice vectors a' = - 2a + b, b' = 3b, c' = c where a, b, and c are the lattice - vectors of the original structure. - b. An sequence of three scaling factors. E.g., [2, 1, 1] - specifies that the supercell should have dimensions 2a x b x - c. - c. A number, which simply scales all lattice vectors by the - same factor. - - Returns: - Supercell structure. Note that a Structure is always returned, - even if the input structure is a subclass of Structure. This is - to avoid different arguments signatures from causing problems. If - you prefer a subclass to return its own type, you need to override - this method in the subclass. - """ - return self.mymul(scaling_matrix=scaling_matrix, to_unit_cell=True) - - def mymul(self, scaling_matrix, to_unit_cell=True): - """ Makes a supercell. Allowing to have sites outside the unit cell Args: @@ -674,7 +646,6 @@ class IStructure(SiteCollection, MSONable): c. c. A number, which simply scales all lattice vectors by the same factor. - to_unit_cell: Whether or not to fall back sites into the unit cell Returns: Supercell structure. Note that a Structure is always returned, @@ -696,7 +667,7 @@ class IStructure(SiteCollection, MSONable): for v in c_lat: s = PeriodicSite(site.species_and_occu, site.coords + v, new_lattice, properties=site.properties, - coords_are_cartesian=True, to_unit_cell=to_unit_cell) + coords_are_cartesian=True, to_unit_cell=False) new_sites.append(s) return Structure.from_sites(new_sites) @@ -2587,7 +2558,10 @@ class Structure(IStructure, collections.MutableSequence): same factor. to_unit_cell: Whether or not to fall back sites into the unit cell """ - s = self.mymul(scaling_matrix=scaling_matrix, to_unit_cell=to_unit_cell) + s = self*scaling_matrix + if to_unit_cell: + for isite, site in enumerate(s): + s[isite] = site.to_unit_cell self._sites = s.sites self._lattice = s.lattice
Multiplication method of a structure has slightly changed : sites are not automatically falled back to the unit cell, use make_supercell with to_unit_cell=True for that.
materialsproject_pymatgen
train
1ca13f135adf500f17337757d13ec6c7ac55f3c1
diff --git a/saltcloud/clouds/ec2.py b/saltcloud/clouds/ec2.py index <HASH>..<HASH> 100644 --- a/saltcloud/clouds/ec2.py +++ b/saltcloud/clouds/ec2.py @@ -848,7 +848,7 @@ def create(vm_=None, call=None): 'host': ip_address, 'username': username, 'key_filename': key_filename, - 'deploy_command': 'sh /tmp/deploy.sh', + 'deploy_command': '/tmp/deploy.sh', 'tty': True, 'script': deploy_script, 'name': vm_['name'], diff --git a/saltcloud/clouds/libcloud_aws.py b/saltcloud/clouds/libcloud_aws.py index <HASH>..<HASH> 100644 --- a/saltcloud/clouds/libcloud_aws.py +++ b/saltcloud/clouds/libcloud_aws.py @@ -406,7 +406,7 @@ def create(vm_): 'host': ip_address, 'username': username, 'key_filename': key_filename, - 'deploy_command': 'sh /tmp/deploy.sh', + 'deploy_command': '/tmp/deploy.sh', 'tty': True, 'script': deploy_script.script, 'name': vm_['name'],
Remove erroneous sh from deploy script command
saltstack_salt
train
90a2dfb7761faaf28801ce3cc6e8e0dafc2c2e62
diff --git a/src/system/modules/metamodels/TableMetaModelFilterSetting.php b/src/system/modules/metamodels/TableMetaModelFilterSetting.php index <HASH>..<HASH> 100644 --- a/src/system/modules/metamodels/TableMetaModelFilterSetting.php +++ b/src/system/modules/metamodels/TableMetaModelFilterSetting.php @@ -339,6 +339,43 @@ class TableMetaModelFilterSetting extends TableMetaModelHelper } /** + * Set the parent condition for the current fid. + * + * @param string $strTable The tablename - must be tl_metamodel_filtersetting. + * + * @param DC_General $objDC The DataContainer calling us. + * + * @return string The value "tl_metamodel_filtersetting". + */ + public function loadTableCallback($strTable, $objDC) + { + if ($strTable == 'tl_metamodel_filtersetting') + { + $GLOBALS['TL_DCA']['tl_metamodel_filtersetting']['dca_config']['childCondition'][0]['filter'][] = array + ( + 'local' => 'fid', + 'remote_value' => $this->Input->get('id'), + 'operation' => '=', + ); + + $GLOBALS['TL_DCA']['tl_metamodel_filtersetting']['dca_config']['rootEntries']['self']['setOn'][] = array + ( + 'property' => 'fid', + 'value' => $this->Input->get('id'), + ); + + $GLOBALS['TL_DCA']['tl_metamodel_filtersetting']['dca_config']['rootEntries']['self']['filter'][] = array + ( + 'property' => 'fid', + 'operation' => '=', + 'value' => $this->Input->get('id'), + ); + } + + return $strTable; + } + + /** * when creating a new item, we need to populate the fid column. */ public function create_callback($strTable, $insertID, $arrRow, $objDC) diff --git a/src/system/modules/metamodels/dca/tl_metamodel_filtersetting.php b/src/system/modules/metamodels/dca/tl_metamodel_filtersetting.php index <HASH>..<HASH> 100644 --- a/src/system/modules/metamodels/dca/tl_metamodel_filtersetting.php +++ b/src/system/modules/metamodels/dca/tl_metamodel_filtersetting.php @@ -26,7 +26,8 @@ $GLOBALS['TL_DCA']['tl_metamodel_filtersetting'] = array 'switchToEdit' => false, 'enableVersioning' => false, 'oncreate_callback' => array(array('TableMetaModelFilterSetting', 'create_callback')), - 'palettes_callback' => array(array('TableMetaModelFilterSetting', 'preparePalettes')) + 'palettes_callback' => array(array('TableMetaModelFilterSetting', 'preparePalettes')), + 'tablename_callback' => array(array('TableMetaModelFilterSetting', 'loadTableCallback')), ), 'dca_config' => array (
Fix issue #<I> - fid is ignored in the filter setting list causing all filter settings to appear as childs of any filter.
MetaModels_core
train
65cbb70b30b4dfc3d79e4cba7cbf3147898c0b40
diff --git a/src/__/collections/has.php b/src/__/collections/has.php index <HASH>..<HASH> 100644 --- a/src/__/collections/has.php +++ b/src/__/collections/has.php @@ -26,9 +26,12 @@ function has($collection, $path) $key = $portions[0]; if (\count($portions) === 1) { - $has = \__::isObject($collection) ? 'property_exists' : 'array_key_exists'; - $args = \__::isObject($collection) ? [$collection, $key] : [$key, $collection]; - return call_user_func_array($has, $args); +// $has = \__::isObject($collection) ? 'property_exists' : 'array_key_exists'; +// $args = \__::isObject($collection) ? [$collection, $key] : [$key, $collection]; +// return call_user_func_array($has, $args); + // We use a cast to array to handle the numeric keys for objects (workaround). + // See: https://wiki.php.net/rfc/convert_numeric_keys_in_object_array_casts + return array_key_exists($key, (array) $collection); } return has(\__::get($collection, $key), $portions[1]); } diff --git a/tests/collections.php b/tests/collections.php index <HASH>..<HASH> 100644 --- a/tests/collections.php +++ b/tests/collections.php @@ -312,6 +312,8 @@ class CollectionsTest extends \PHPUnit\Framework\TestCase $a = ['foo' => 'bar']; $b = (object) ['foo' => 'bar']; $c = ['foo' => ['bar' => 'foie']]; + $d = [5]; + $e = (object) [5]; // Act. $x = __::has($a, 'foo'); @@ -319,6 +321,8 @@ class CollectionsTest extends \PHPUnit\Framework\TestCase $z = __::has($b, 'foo'); $xa = __::has($b, 'foz'); $xb = __::has($c, 'foo.bar'); + $xc = __::has($d, 0); + $xd = __::has($e, 0); // Assert. $this->assertTrue($x); @@ -326,6 +330,8 @@ class CollectionsTest extends \PHPUnit\Framework\TestCase $this->assertTrue($z); $this->assertFalse($xa); $this->assertTrue($xb); + $this->assertTrue($xc); + $this->assertTrue($xd); } public function testHasKeys()
found a bug with numeric keys in objects in __::has()
maciejczyzewski_bottomline
train
66da4875d0f27cc63c58bd93f74fbc9b476362cb
diff --git a/dedupe/convenience.py b/dedupe/convenience.py index <HASH>..<HASH> 100644 --- a/dedupe/convenience.py +++ b/dedupe/convenience.py @@ -14,9 +14,20 @@ def dataSample(data, sample_size): '''Randomly sample pairs of records from a data dictionary''' data_list = data.values() - random_pairs = dedupe.core.randomPairs(len(data_list), sample_size) + data_list_A = [] + data_list_B = [] - return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs) + for data in data_list: + if data['dataset'] == 0: + data_list_A.append(data) + else: + data_list_B.append(data) + + n_records = len(data_list_A) if len(data_list_A) < len(data_list_B) else len(data_list_B) + + random_pairs = dedupe.core.randomPairs(n_records, sample_size) + + return tuple((data_list_A[int(k1)], data_list_B[int(k2)]) for k1, k2 in random_pairs) def blockData(data_d, blocker):
Generate Random pairs for the data_sample such that each record is from different dataset
dedupeio_dedupe
train
f5feb2d7934cf361a2167c6085452c095073fd0a
diff --git a/Tpg/ExtjsBundle/Annotation/Model.php b/Tpg/ExtjsBundle/Annotation/Model.php index <HASH>..<HASH> 100644 --- a/Tpg/ExtjsBundle/Annotation/Model.php +++ b/Tpg/ExtjsBundle/Annotation/Model.php @@ -10,4 +10,5 @@ use Doctrine\ORM\Mapping\Annotation; final class Model implements Annotation { public $name; public $extend = "Ext.data.Model"; + public $generateAsBase = false; } \ No newline at end of file diff --git a/Tpg/ExtjsBundle/Service/GeneratorService.php b/Tpg/ExtjsBundle/Service/GeneratorService.php index <HASH>..<HASH> 100644 --- a/Tpg/ExtjsBundle/Service/GeneratorService.php +++ b/Tpg/ExtjsBundle/Service/GeneratorService.php @@ -33,8 +33,12 @@ class GeneratorService { /** @var $classModelAnnotation Model */ $classModelAnnotation = $this->annoReader->getClassAnnotation($classRef, 'Tpg\ExtjsBundle\Annotation\Model'); if ($classModelAnnotation !== null) { + $modelName = $classModelAnnotation->name; + if ($classModelAnnotation->generateAsBase === true) { + $modelName = substr($modelName, 0, strrpos($modelName, '.')+1) . 'Base' . substr($modelName, strrpos($modelName, '.')+1); + } $structure = array( - 'name' => $classModelAnnotation->name, + 'name' => $modelName, 'extend' => $classModelAnnotation->extend, 'fields' => array(), 'associations' => array(), diff --git a/Tpg/ExtjsBundle/Tests/Fixtures/Test/Model/Book.php b/Tpg/ExtjsBundle/Tests/Fixtures/Test/Model/Book.php index <HASH>..<HASH> 100644 --- a/Tpg/ExtjsBundle/Tests/Fixtures/Test/Model/Book.php +++ b/Tpg/ExtjsBundle/Tests/Fixtures/Test/Model/Book.php @@ -5,7 +5,10 @@ use Doctrine\ORM\Mapping as ORM; use Tpg\ExtjsBundle\Annotation as Extjs; /** - * @Extjs\Model(name="Test.model.Book") + * @Extjs\Model( + * name="Test.model.Book", + * generateAsBase=true + * ) */ class Book { /** diff --git a/Tpg/ExtjsBundle/Tests/SpecRunner.html b/Tpg/ExtjsBundle/Tests/SpecRunner.html index <HASH>..<HASH> 100644 --- a/Tpg/ExtjsBundle/Tests/SpecRunner.html +++ b/Tpg/ExtjsBundle/Tests/SpecRunner.html @@ -10,7 +10,7 @@ src="http://extjs-public.googlecode.com/svn/tags/extjs-4.1.1a/release/ext-all.js"></script> <!-- include source files here... --> - <script type="text/javascript" src="http://127.0.0.1:8888/generateModel.js?model[]=Test.Model.Person"></script> + <script type="text/javascript" src="http://127.0.0.1:8888/generateModel.js?model[]=Test.Model.Person&model[]=Test.Model.Book"></script> <!-- include spec files here... --> <script type="text/javascript" src="spec/extjs.spec.js"></script> diff --git a/Tpg/ExtjsBundle/Tests/spec/generatemodel.spec.js b/Tpg/ExtjsBundle/Tests/spec/generatemodel.spec.js index <HASH>..<HASH> 100644 --- a/Tpg/ExtjsBundle/Tests/spec/generatemodel.spec.js +++ b/Tpg/ExtjsBundle/Tests/spec/generatemodel.spec.js @@ -2,6 +2,9 @@ describe('Model Generator', function () { it('Person Model exist', function () { expect(Test.model.Person).toBeDefined(); }); + it('Base Book Model to exist', function() { + expect(Test.model.BaseBook).toBeDefined(); + }); describe('Model Fields', function () { var getField = (function () { var fields = Test.model.Person.getFields();
Added the ability to generate base extjs model, so custom model can extend from it.
AmsTaFFix_extjs-bundle
train
51d52ed9722ea26265ad1ca839e062a75e353c47
diff --git a/src/Tasks.php b/src/Tasks.php index <HASH>..<HASH> 100644 --- a/src/Tasks.php +++ b/src/Tasks.php @@ -476,7 +476,8 @@ class Tasks extends \Robo\Tasks $this->_exec("terminus connection:set $terminus_site_env git"); // Deployment - $this->deploy($terminus_env); + $pantheon_branch = $terminus_env == 'dev' ? 'master' : $terminus_env; + $this->deploy($pantheon_branch); // Trigger remote install. if ($opts['install']) {
Fix deployment to pantheon when using dev environment.
thinkshout_robo-drupal
train
e9e9eca09bb152dff752d811f6a4e50d98272e16
diff --git a/wepay/api.py b/wepay/api.py index <HASH>..<HASH> 100644 --- a/wepay/api.py +++ b/wepay/api.py @@ -30,8 +30,8 @@ class WePay(object): suppress all warnings, or `False` to raise all warnings. :keyword bool use_requests: set to `False` in order to explicitly turn off - `requests<http://docs.python-requests.org/en/latest/>`_ library usage and - fallback to `urllib<https://docs.python.org/3/library/urllib.html#module-urllib>`_ + `requests <http://docs.python-requests.org/en/latest/>`_ library usage and + fallback to `urllib <https://docs.python.org/3/library/urllib.html#module-urllib>`_ Instance of this class contains attributes, which correspond to WePay objects and should be used to perform API calls. If a WePay object has a diff --git a/wepay/exceptions.py b/wepay/exceptions.py index <HASH>..<HASH> 100644 --- a/wepay/exceptions.py +++ b/wepay/exceptions.py @@ -44,20 +44,23 @@ class WePayError(Exception): class WePayConnectionError(Exception): + """Raised in case there is a problem connecting to WePay servers""" + def __init__(self, error, message): self._error = error self._message = message @property def error(self): - """Original exception that caused an error.""" + """Original exception that caused an error. Either `requests` or `urllib` + exception. See their corresponding documentation if necessary. + + """ return self._error @property def message(self): - """Message explaining the nature of the error. - - """ + """Message explaining the nature of the error.""" return self._message def __str__(self):
forgot WePayConnectionError documentation
lehins_python-wepay
train
a2fe95ec26ba5f8f18fba7f5f974ff8cdb652cdd
diff --git a/provider/vendor/plugins/oauth2_provider/init.rb b/provider/vendor/plugins/oauth2_provider/init.rb index <HASH>..<HASH> 100644 --- a/provider/vendor/plugins/oauth2_provider/init.rb +++ b/provider/vendor/plugins/oauth2_provider/init.rb @@ -0,0 +1,3 @@ +if RAILS_ENV == 'development' + ActiveSupport::Dependencies.load_once_paths.reject!{|x| x =~ /^#{Regexp.escape(File.dirname(__FILE__))}/} +end
fix plugin reload issue in dev env
ThoughtWorksStudios_oauth2_provider
train
0923fe675f1bfeab6117a15e5cc0e85c02a5cb09
diff --git a/mockserver-client-javascript/src/main/javascript/mockServerClient.js b/mockserver-client-javascript/src/main/javascript/mockServerClient.js index <HASH>..<HASH> 100644 --- a/mockserver-client-javascript/src/main/javascript/mockServerClient.js +++ b/mockserver-client-javascript/src/main/javascript/mockServerClient.js @@ -1,5 +1,5 @@ /** - * Start the client communicating to a MockServer at the specified host and port + * Start the client communicating to the MockServer at the specified host and port * for example: * * var client = mockServerClient("localhost", 1080); @@ -124,7 +124,7 @@ var mockServerClient = function (host, port) { /** * Verify a request has been sent for example: * - * client.verify({ + * mockServerClient("localhost", 1080).verify({ * 'method': 'POST', * 'path': '/somePath' * }); @@ -155,7 +155,7 @@ var mockServerClient = function (host, port) { /** * Verify a sequence of requests has been sent for example: * - * client.verifySequence( + * mockServerClient("localhost", 1080).verifySequence( * { * 'method': 'POST', * 'path': '/first_request' diff --git a/mockserver-client-javascript/src/main/javascript/proxyClient.js b/mockserver-client-javascript/src/main/javascript/proxyClient.js index <HASH>..<HASH> 100644 --- a/mockserver-client-javascript/src/main/javascript/proxyClient.js +++ b/mockserver-client-javascript/src/main/javascript/proxyClient.js @@ -1,3 +1,12 @@ +/** + * Start the client communicating to the proxy at the specified host and port + * for example: + * + * var client = proxyClient("localhost", 1080); + * + * @param host the host for the proxy to communicate with + * @param port the port for the proxy to communicate with + */ var proxyClient = function (host, port) { "use strict"; @@ -37,7 +46,7 @@ var proxyClient = function (host, port) { /** * Verify a request has been sent for example: * - * client.verify({ + * proxyClient("localhost", 1080).verify({ * 'method': 'POST', * 'path': '/somePath' * }); @@ -68,7 +77,7 @@ var proxyClient = function (host, port) { /** * Verify a sequence of requests has been sent for example: * - * client.verifySequence( + * proxyClient("localhost", 1080).verifySequence( * { * 'method': 'POST', * 'path': '/first_request'
improved the comments to make them clearer and more consistent
jamesdbloom_mockserver
train
9199f61224f0dd9c4a255d4bbb16d8ed798f8621
diff --git a/src/main/java/org/imsglobal/aspect/LtiLaunchVerifier.java b/src/main/java/org/imsglobal/aspect/LtiLaunchVerifier.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/imsglobal/aspect/LtiLaunchVerifier.java +++ b/src/main/java/org/imsglobal/aspect/LtiLaunchVerifier.java @@ -12,6 +12,9 @@ import org.aspectj.lang.annotation.Aspect; import org.imsglobal.basiclti.BasicLTIUtil; import org.imsglobal.basiclti.LtiVerificationResult; +import java.util.ArrayList; +import java.util.List; + /** * * @author pgray @@ -25,13 +28,43 @@ public class LtiLaunchVerifier { this.keyService = ltiKeySecretService; } - @Around("@annotation(launch) && execution(* *(javax.servlet.http.HttpServletRequest+, org.imsglobal.basiclti.LtiVerificationResult)) && args(request,result)") - public Object verifyLtiLaunch(ProceedingJoinPoint pjp, Lti launch, HttpServletRequest request, LtiVerificationResult result) throws Throwable { + //@Around("@annotation(launch) && execution(* *(javax.servlet.http.HttpServletRequest+, org.imsglobal.basiclti.LtiVerificationResult, ..)) && args(request, result)") + @Around("@annotation(launch)") + public Object verifyLtiLaunch(ProceedingJoinPoint pjp, Lti launch) throws Throwable { + HttpServletRequest request = null; + for (Object arg : pjp.getArgs()) { + if (HttpServletRequest.class.isInstance(arg)) { + request = (HttpServletRequest) arg; + } + } + if(request == null){ + throw new IllegalStateException(getErrorMessageForArgumentClass("HttpServletRequest", pjp.getSignature().toLongString())); + } + System.out.println("checking lti params..."); String oauthSecret = keyService.getSecretForKey(request.getParameter("oauth_consumer_key")); - result = BasicLTIUtil.validateMessage(request, request.getRequestURL().toString(), oauthSecret); + LtiVerificationResult ltiResult = BasicLTIUtil.validateMessage(request, request.getRequestURL().toString(), oauthSecret); + + Boolean ltiVerificationResultExists = false; + //This array will hold the arguments to the join point, so we can pass them along to the advised function. + List<Object> args = new ArrayList<>(pjp.getArgs().length); + for (Object arg : pjp.getArgs()) { + if (arg.getClass().equals(LtiVerificationResult.class)) { + args.add(ltiResult); + ltiVerificationResultExists = true; + } else { + args.add(arg); + } + } + if(!ltiVerificationResultExists){ + throw new IllegalStateException(getErrorMessageForArgumentClass("LtiVerificationResult", pjp.getSignature().toLongString())); + } + + return pjp.proceed(args.toArray()); + } - return pjp.proceed(new Object[] {request, result}); + public String getErrorMessageForArgumentClass(String argumentClass, String signature){ + return "The LtiLaunchVerifier instance cannot find the " + argumentClass + " argument on method: " + signature + ", are you sure it was declared?"; } } diff --git a/src/main/java/org/imsglobal/basiclti/LtiUser.java b/src/main/java/org/imsglobal/basiclti/LtiUser.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/imsglobal/basiclti/LtiUser.java +++ b/src/main/java/org/imsglobal/basiclti/LtiUser.java @@ -15,8 +15,10 @@ public class LtiUser { public LtiUser(HttpServletRequest request) { this.id = request.getParameter("user_id"); this.roles = new LinkedList<>(); - for(String role : request.getParameter("roles").split(",")){ - this.roles.add(role.trim()); + if(request.getParameter("roles") != null) { + for (String role : request.getParameter("roles").split(",")) { + this.roles.add(role.trim()); + } } }
Making the LtiLaunchVerifier aspect poincut broader
IMSGlobal_basiclti-util-java
train
4f886b822309ad75c4b48d689133190d80fb0006
diff --git a/src/main/java/com/fatboyindustrial/gsonjodatime/Converters.java b/src/main/java/com/fatboyindustrial/gsonjodatime/Converters.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/fatboyindustrial/gsonjodatime/Converters.java +++ b/src/main/java/com/fatboyindustrial/gsonjodatime/Converters.java @@ -40,6 +40,21 @@ import java.lang.reflect.Type; */ public class Converters { + /** The specific genericized type for {@code DateMidnight}. */ + public static final Type DATE_MIDNIGHT_TYPE = new TypeToken<DateMidnight>(){}.getType(); + + /** The specific genericized type for {@code DateTime}. */ + public static final Type DATE_TIME_TYPE = new TypeToken<DateTime>(){}.getType(); + + /** The specific genericized type for {@code LocalDate}. */ + public static final Type LOCAL_DATE_TYPE = new TypeToken<LocalDate>(){}.getType(); + + /** The specific genericized type for {@code LocalDateTime}. */ + public static final Type LOCAL_DATE_TIME_TYPE = new TypeToken<LocalDateTime>(){}.getType(); + + /** The specific genericized type for {@code LocalTime}. */ + public static final Type LOCAL_TIME_TYPE = new TypeToken<LocalTime>(){}.getType(); + /** * Registers all the Joda Time converters. * @param builder The GSON builder to register the converters with. @@ -67,8 +82,7 @@ public class Converters { if (builder == null) { throw new NullPointerException("builder cannot be null"); } - final Type type = new TypeToken<DateMidnight>(){}.getType(); - builder.registerTypeAdapter(type, new DateMidnightConverter()); + builder.registerTypeAdapter(DATE_MIDNIGHT_TYPE, new DateMidnightConverter()); return builder; } @@ -82,8 +96,7 @@ public class Converters { if (builder == null) { throw new NullPointerException("builder cannot be null"); } - final Type type = new TypeToken<DateTime>(){}.getType(); - builder.registerTypeAdapter(type, new DateTimeConverter()); + builder.registerTypeAdapter(DATE_TIME_TYPE, new DateTimeConverter()); return builder; } @@ -97,8 +110,7 @@ public class Converters { if (builder == null) { throw new NullPointerException("builder cannot be null"); } - final Type type = new TypeToken<LocalDate>(){}.getType(); - builder.registerTypeAdapter(type, new LocalDateConverter()); + builder.registerTypeAdapter(LOCAL_DATE_TYPE, new LocalDateConverter()); return builder; } @@ -112,8 +124,7 @@ public class Converters { if (builder == null) { throw new NullPointerException("builder cannot be null"); } - final Type type = new TypeToken<LocalDateTime>(){}.getType(); - builder.registerTypeAdapter(type, new LocalDateTimeConverter()); + builder.registerTypeAdapter(LOCAL_DATE_TIME_TYPE, new LocalDateTimeConverter()); return builder; } @@ -127,8 +138,7 @@ public class Converters { if (builder == null) { throw new NullPointerException("builder cannot be null"); } - final Type type = new TypeToken<LocalTime>(){}.getType(); - builder.registerTypeAdapter(type, new LocalTimeConverter()); + builder.registerTypeAdapter(LOCAL_TIME_TYPE, new LocalTimeConverter()); return builder; }
Add constants for type tokens.
gkopff_gson-jodatime-serialisers
train
4a3459399726c91662e18c6cd70dcf4db4f121d9
diff --git a/src/Go/Aop/Support/SignaturePropertyPointcut.php b/src/Go/Aop/Support/SignaturePropertyPointcut.php index <HASH>..<HASH> 100644 --- a/src/Go/Aop/Support/SignaturePropertyPointcut.php +++ b/src/Go/Aop/Support/SignaturePropertyPointcut.php @@ -48,6 +48,18 @@ class SignaturePropertyPointcut implements Pointcut, PropertyMatcher protected $modifier; /** + * Bit mask: + * + * const IS_STATIC = 1; + * const IS_PUBLIC = 256; + * const IS_PROTECTED = 512; + * const IS_PRIVATE = 1024; + * + * @var integer|null + */ + protected static $bitMask = 0x0701; + + /** * Signature property matcher constructor * * @param string $propertyName Name of the property to match or glob pattern @@ -110,7 +122,8 @@ class SignaturePropertyPointcut implements Pointcut, PropertyMatcher return false; } - if (!($property->getModifiers() & $this->modifier)) { + $modifiers = $property->getModifiers(); + if (!($modifiers & $this->modifier) || ((self::$bitMask - $this->modifier) & $modifiers)) { return false; }
#7 Fix an issue when property matcher when it matches a static property
goaop_framework
train
cadc9bf54923f4e911a11850dc8db32cc642964b
diff --git a/go/chat/server.go b/go/chat/server.go index <HASH>..<HASH> 100644 --- a/go/chat/server.go +++ b/go/chat/server.go @@ -552,7 +552,7 @@ func (h *Server) NewConversationLocal(ctx context.Context, arg chat1.NewConversa // Note that from this point on, TopicID is entirely the wrong value. convID = cerr.ConvID case libkb.ChatCollisionError: - // The triple did not exist, but a collision occurred on convID. Retry with a different topic ID. + // The triple did not exist, but a collision occurred on convID. Retry with a different topic ID.N h.Debug(ctx, "NewConversationLocal: collision: %v", reserr) continue default: @@ -592,8 +592,20 @@ func (h *Server) NewConversationLocal(ctx context.Context, arg chat1.NewConversa return chat1.NewConversationLocalRes{}, errors.New(res.Conv.Error.Message) } + // Send a message to the channel after joining. + joinMessageBody := chat1.NewMessageBodyWithJoin(chat1.MessageJoin{}) + rl, err = h.postJoinLeave(ctx, convID, joinMessageBody) + if err != nil { + h.Debug(ctx, "posting join-conv message failed: %v", err) + // ignore the error + } + if err == nil && rl != nil { + res.RateLimits = append(res.RateLimits, *rl) + } + res.RateLimits = utils.AggRateLimits(res.RateLimits) res.IdentifyFailures = identBreaks + return res, nil } @@ -2206,8 +2218,6 @@ func (h *Server) doJoinConversation(ctx context.Context, convID chat1.Conversati res.RateLimits = append(res.RateLimits, *joinRes.RateLimit) } - res.RateLimits = utils.AggRateLimits(res.RateLimits) - if !alreadyIn { // Send a message to the channel after joining. joinMessageBody := chat1.NewMessageBodyWithJoin(chat1.MessageJoin{}) @@ -2217,10 +2227,11 @@ func (h *Server) doJoinConversation(ctx context.Context, convID chat1.Conversati // ignore the error } if err == nil && rl != nil { - res.RateLimits = utils.AggRateLimits(append(res.RateLimits, *rl)) + res.RateLimits = append(res.RateLimits, *rl) } } + res.RateLimits = utils.AggRateLimits(res.RateLimits) res.Offline = h.G().Syncer.IsConnected(ctx) return res, nil @@ -2316,11 +2327,14 @@ func (h *Server) LeaveConversationLocal(ctx context.Context, convID chat1.Conver // Send a message to the channel before leaving if alreadyIn { leaveMessageBody := chat1.NewMessageBodyWithLeave(chat1.MessageLeave{}) - _, err = h.postJoinLeave(ctx, convID, leaveMessageBody) + rl, err := h.postJoinLeave(ctx, convID, leaveMessageBody) if err != nil { h.Debug(ctx, "posting leave-conv message failed: %v", err) // ignore the error } + if err == nil && rl != nil { + res.RateLimits = append(res.RateLimits, *rl) + } } leaveRes, err := h.remoteClient().LeaveConversation(ctx, convID)
send JOIN on NewConversation, better ratelimit handling
keybase_client
train
c7a02e563f84b646e28fa4e811faaef76f7dd129
diff --git a/src/config.js b/src/config.js index <HASH>..<HASH> 100644 --- a/src/config.js +++ b/src/config.js @@ -49,7 +49,7 @@ module.exports.read = function get_config(project_root) { JSHINT(data); var err = JSHINT.errors[0]; if (err) { - throw 'Parsing "'+config_json+'" at line '+err.line+" col "+err.character+"; "+err.reason; + throw new Error('Parsing "'+config_json+'" at line '+err.line+" col "+err.character+"; "+err.reason); } throw e; }
CB-<I> use throw Error to include stack information for -d
apache_cordova-lib
train
a41e3c102e509db93f62be34eb782f73e43f95a7
diff --git a/server/render.js b/server/render.js index <HASH>..<HASH> 100644 --- a/server/render.js +++ b/server/render.js @@ -72,8 +72,6 @@ async function doRender (req, res, pathname, query, { require(appPath) ]) - await Loadable.preloadAll() // Make sure all dynamic imports are loaded - Component = Component.default || Component if (typeof Component !== 'function') { @@ -143,6 +141,8 @@ async function doRender (req, res, pathname, query, { return { html, head, errorHtml, buildManifest } } + await Loadable.preloadAll() // Make sure all dynamic imports are loaded + const docProps = await loadGetInitialProps(Document, { ...ctx, renderPage }) const dynamicImports = getDynamicImportBundles(reactLoadableManifest, reactLoadableModules)
Await preloading right before rendering the page (#<I>)
zeit_next.js
train
79a50f52820a971a1508ee66a882b928fa8c13d7
diff --git a/lib/kafka/consumer.rb b/lib/kafka/consumer.rb index <HASH>..<HASH> 100644 --- a/lib/kafka/consumer.rb +++ b/lib/kafka/consumer.rb @@ -74,6 +74,7 @@ module Kafka @offset_manager = OffsetManager.new( group: @group, logger: @logger, + commit_interval: 10, ) end @@ -126,13 +127,20 @@ module Kafka yield message end + @offset_manager.commit_offsets_if_necessary + send_heartbeat_if_necessary mark_message_as_processed(message) end rescue ConnectionError => e - @logger.error "Connection error while fetching messages: #{e}" - else - @offset_manager.commit_offsets unless batch.nil? || batch.empty? + @logger.error "Connection error while sending heartbeat; rejoining" + join_group + rescue UnknownMemberId + @logger.error "Kicked out of group; rejoining" + join_group + rescue RebalanceInProgress + @logger.error "Group is rebalancing; rejoining" + join_group end end end @@ -152,15 +160,18 @@ module Kafka def shutdown @offset_manager.commit_offsets @group.leave + rescue ConnectionError end private + def join_group + @offset_manager.clear_offsets + @group.join + end + def fetch_batch - unless @group.member? - @group.join - @offset_manager.clear_offsets - end + join_group unless @group.member? @logger.debug "Fetching a batch of messages" @@ -189,9 +200,13 @@ module Kafka messages = operation.execute - @logger.debug "Fetched #{messages.count} messages" + @logger.info "Fetched #{messages.count} messages" messages + rescue ConnectionError => e + @logger.error "Connection error while fetching messages: #{e}" + + return [] end # Sends a heartbeat if it would be necessary in order to avoid getting diff --git a/lib/kafka/consumer_group.rb b/lib/kafka/consumer_group.rb index <HASH>..<HASH> 100644 --- a/lib/kafka/consumer_group.rb +++ b/lib/kafka/consumer_group.rb @@ -44,6 +44,7 @@ module Kafka def leave @logger.info "[#{@member_id}] Leaving group `#{@group_id}`" coordinator.leave_group(group_id: @group_id, member_id: @member_id) + rescue ConnectionError end def fetch_offsets @@ -66,14 +67,6 @@ module Kafka Protocol.handle_error(error_code) end end - rescue UnknownMemberId - @logger.error "Kicked out of group; rejoining" - join - retry - rescue IllegalGeneration - @logger.error "Illegal generation #{@generation_id}; rejoining group" - join - retry end def heartbeat @@ -86,15 +79,6 @@ module Kafka ) Protocol.handle_error(response.error_code) - rescue ConnectionError => e - @logger.error "Connection error while sending heartbeat; rejoining" - join - rescue UnknownMemberId - @logger.error "Kicked out of group; rejoining" - join - rescue RebalanceInProgress - @logger.error "Group is rebalancing; rejoining" - join end private diff --git a/lib/kafka/offset_manager.rb b/lib/kafka/offset_manager.rb index <HASH>..<HASH> 100644 --- a/lib/kafka/offset_manager.rb +++ b/lib/kafka/offset_manager.rb @@ -1,12 +1,14 @@ module Kafka class OffsetManager - def initialize(group:, logger:) + def initialize(group:, logger:, commit_interval: 10) @group = group @logger = logger + @commit_interval = commit_interval @processed_offsets = {} @default_offsets = {} @committed_offsets = nil + @last_commit = Time.at(0) end def set_default_offset(topic, default_offset) @@ -29,8 +31,15 @@ module Kafka end def commit_offsets - @logger.debug "Committing offsets" + @logger.info "Committing offsets" @group.commit_offsets(@processed_offsets) + @last_commit = Time.now + end + + def commit_offsets_if_necessary + if Time.now - @last_commit >= @commit_interval + commit_offsets + end end def clear_offsets diff --git a/spec/fuzz/consumer_group_spec.rb b/spec/fuzz/consumer_group_spec.rb index <HASH>..<HASH> 100644 --- a/spec/fuzz/consumer_group_spec.rb +++ b/spec/fuzz/consumer_group_spec.rb @@ -1,6 +1,6 @@ describe "Consumer groups", fuzz: true do let(:logger) { Logger.new(LOG) } - let(:num_messages) { 100_000 } + let(:num_messages) { 10_000 } let(:num_partitions) { 30 } let(:num_consumers) { 10 } let(:topic) { "fuzz-consumer-group" } @@ -56,6 +56,7 @@ describe "Consumer groups", fuzz: true do size = num_messages - missing_messages.size puts "===> Received #{size} messages" if size % 100 == 0 else + puts "===> Duplicate message #{message} received" duplicate_messages.add(message) end end @@ -76,7 +77,7 @@ describe "Consumer groups", fuzz: true do connect_timeout: 20, ) - consumer = kafka.consumer(group_id: "fuzz", session_timeout: 10) + consumer = kafka.consumer(group_id: "fuzz", session_timeout: 30) consumer.subscribe(topic) consumer.each_message do |message|
Automatically commit offsets at fixed intervals
zendesk_ruby-kafka
train
d75b015a2f9c8da4482d94292d66d2200153cd39
diff --git a/src/Space.js b/src/Space.js index <HASH>..<HASH> 100644 --- a/src/Space.js +++ b/src/Space.js @@ -12,6 +12,7 @@ var SpaceTaa = require('./SpaceTaa'); var Space = function () { Emitter(this); SpacePlane(this); + // Has constant identity transformation _T this.content = {}; // Dict over list because key search time complexity this.transformedHandlers = {}; // To be able to remove event handlers diff --git a/src/SpacePlane.js b/src/SpacePlane.js index <HASH>..<HASH> 100644 --- a/src/SpacePlane.js +++ b/src/SpacePlane.js @@ -1,5 +1,6 @@ -// API v0.3.0 +// API v0.6.0 +var nudged = require('nudged'); var SpacePoint = require('./SpacePoint'); var at = function (xy) { @@ -11,7 +12,8 @@ var at = function (xy) { var SpacePlane = function (emitter) { // Coordinate system. The transformation from the space to the plane. - emitter._T = null; + // For Space, it is obviously the identity transform. + emitter._T = new nudged.Transform(1, 0, 0, 0); // identity transformation emitter.at = at; }; diff --git a/src/Transformer.js b/src/Transformer.js index <HASH>..<HASH> 100644 --- a/src/Transformer.js +++ b/src/Transformer.js @@ -21,9 +21,6 @@ var normalize = function (points, plane) { }; var Transformer = function (plane) { - // As Space is only null SpacePlane, - // Transformers cannot have null transformations. - plane._T = new nudged.Transform(1, 0, 0, 0); // identity transformation plane.scale = function (pivot, multiplierOrDomain, range) { // Parameter
SpacePlane has identity transform by default
taataa_tapspace
train
7ba2ea54166a4a0556bdef92b51a7452a194ca8c
diff --git a/autofit/optimize/grid_search.py b/autofit/optimize/grid_search.py index <HASH>..<HASH> 100644 --- a/autofit/optimize/grid_search.py +++ b/autofit/optimize/grid_search.py @@ -123,10 +123,7 @@ class GridSearch(object): if phase_tag is None: self.phase_tag = '' else: - self.phase_tag = phase_tag - if len(self.phase_tag) > 1: - if self.phase_tag[0] is '_': - self.phase_tag = self.phase_tag[1:] + self.phase_tag = 'settings_' + phase_tag self.number_of_steps = number_of_steps self.optimizer_class = optimizer_class diff --git a/autofit/optimize/non_linear.py b/autofit/optimize/non_linear.py index <HASH>..<HASH> 100644 --- a/autofit/optimize/non_linear.py +++ b/autofit/optimize/non_linear.py @@ -173,10 +173,7 @@ class NonLinearOptimizer(object): if phase_tag is None: self.phase_tag = '' else: - self.phase_tag = phase_tag - if len(self.phase_tag) > 1: - if self.phase_tag[0] is '_': - self.phase_tag = self.phase_tag[1:] + self.phase_tag = 'settings_' + phase_tag try: os.makedirs("/".join(self.sym_path.split("/")[:-1])) diff --git a/autofit/tools/phase.py b/autofit/tools/phase.py index <HASH>..<HASH> 100644 --- a/autofit/tools/phase.py +++ b/autofit/tools/phase.py @@ -30,10 +30,7 @@ class AbstractPhase(object): if phase_tag is None and tag_phases: self.phase_tag = '' else: - self.phase_tag = phase_tag - if len(self.phase_tag) > 1: - if self.phase_tag[0] is '_': - self.phase_tag = self.phase_tag[1:] + self.phase_tag = 'settings_' + phase_tag self.phase_name = phase_name self.optimizer = optimizer_class(phase_name=self.phase_name, phase_tag=phase_tag, diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ setup( 'numpy', 'pymultinest', 'matplotlib' + 'typing_inspect==0.4.0' ], setup_requires=["pytest-runner"], tests_require=["pytest"],
Change tag folder names to lead with settings_
rhayes777_PyAutoFit
train
dc618936cf548b43093a999767a76d0ac534c1f4
diff --git a/test/node/ios/build/index.js b/test/node/ios/build/index.js index <HASH>..<HASH> 100644 --- a/test/node/ios/build/index.js +++ b/test/node/ios/build/index.js @@ -18,7 +18,7 @@ var opts = } var options = JSON.stringify(opts) -var timeout = 10000 +var timeout = 60000 describe('ios build', function () { it('ios should succeed in under ' + timeout + ' milliseconds!'
give more time to ios build
vigour-io_wrapper
train
b6b63d6ec0f23f2ff0b9d70e16a80ea00ab77aca
diff --git a/packages/razzle/config/createConfigAsync.js b/packages/razzle/config/createConfigAsync.js index <HASH>..<HASH> 100644 --- a/packages/razzle/config/createConfigAsync.js +++ b/packages/razzle/config/createConfigAsync.js @@ -78,6 +78,7 @@ module.exports = ( const IS_SERVERLESS = /serverless/.test(razzleOptions.buildType); const IS_PROD = env === 'prod'; const IS_DEV = env === 'dev'; + const IS_DEV_ENV = process.env.NODE_ENV === 'development'; // Contains various versions of the Webpack SplitChunksPlugin used in different build types const splitChunksConfigs = { @@ -465,13 +466,13 @@ module.exports = ( // This is our base webpack config. let config = { // Set webpack mode: - mode: IS_DEV ? 'development' : 'production', + mode: IS_DEV || IS_DEV_ENV ? 'development' : 'production', // Set webpack context to the current apps directory context: paths.appPath, // Specify target (either 'node' or 'web') target: target, // Controversially, decide on sourcemaps. - devtool: IS_DEV ? 'cheap-module-source-map' : razzleOptions.enableSourceMaps ? 'source-map' : false, + devtool: IS_DEV || IS_DEV_ENV ? 'cheap-module-source-map' : razzleOptions.enableSourceMaps ? 'source-map' : false, // We need to tell webpack how to resolve both Razzle's node_modules and // the users', so we use resolve and resolveLoader. resolve: { @@ -674,17 +675,18 @@ module.exports = ( if (IS_PROD) { // Prevent creating multiple chunks for the server // in dev mode emitting one huge server file on every save is very slow - - config.plugins.push( - new webpack.optimize.LimitChunkCountPlugin({ - maxChunks: 1, - }) - ); - config.optimization = { - minimize: true, - minimizer: [ - new TerserPlugin(webpackOptions.terserPluginOptions) - ], + if (!IS_DEV_ENV) { + config.plugins.push( + new webpack.optimize.LimitChunkCountPlugin({ + maxChunks: 1, + }) + ); + config.optimization = { + minimize: true, + minimizer: [ + new TerserPlugin(webpackOptions.terserPluginOptions) + ], + } } if (webpackMajor === 5) { config.optimization.emitOnErrors = razzleOptions.emitOnErrors; @@ -913,8 +915,8 @@ module.exports = ( // Define production environment vars new webpack.DefinePlugin(webpackOptions.definePluginOptions), miniCssExtractPlugin, - webpackMajor === 5 ? null : new webpack.HashedModuleIdsPlugin(), - new webpack.optimize.AggressiveMergingPlugin(), + IS_DEV_ENV || webpackMajor === 5 ? null : new webpack.HashedModuleIdsPlugin(), + IS_DEV_ENV ? null : new webpack.optimize.AggressiveMergingPlugin(), hasPublicDir && new CopyPlugin({ patterns: [ { @@ -929,37 +931,39 @@ module.exports = ( }), ].filter(x => x); - config.optimization = { - splitChunks: webpackOptions.splitChunksConfig, - moduleIds: webpackMajor === 5 ? 'deterministic' : 'hashed', - minimize: true, - minimizer: [ - new TerserPlugin(webpackOptions.terserPluginOptions), - new CssMinimizerPlugin({ - sourceMap: razzleOptions.enableSourceMaps, - minimizerOptions: { - sourceMap: razzleOptions.enableSourceMaps - }, - minify: async (data, inputMap, minimizerOptions) => { - // eslint-disable-next-line global-require - const CleanCSS = require('clean-css'); - - const [[filename, input]] = Object.entries(data); - const minifiedCss = await new CleanCSS({ sourceMap: minimizerOptions.sourceMap }).minify({ - [filename]: { - styles: input, - sourceMap: inputMap, - }, - }); - - return { - css: minifiedCss.styles, - map: minifiedCss.sourceMap ? minifiedCss.sourceMap.toJSON() : '', - warnings: minifiedCss.warnings, - }; - }, - }) - ], + if (!IS_DEV_ENV) { + config.optimization = { + splitChunks: webpackOptions.splitChunksConfig, + moduleIds: webpackMajor === 5 ? 'deterministic' : 'hashed', + minimize: true, + minimizer: [ + new TerserPlugin(webpackOptions.terserPluginOptions), + new CssMinimizerPlugin({ + sourceMap: razzleOptions.enableSourceMaps, + minimizerOptions: { + sourceMap: razzleOptions.enableSourceMaps + }, + minify: async (data, inputMap, minimizerOptions) => { + // eslint-disable-next-line global-require + const CleanCSS = require('clean-css'); + + const [[filename, input]] = Object.entries(data); + const minifiedCss = await new CleanCSS({ sourceMap: minimizerOptions.sourceMap }).minify({ + [filename]: { + styles: input, + sourceMap: inputMap, + }, + }); + + return { + css: minifiedCss.styles, + map: minifiedCss.sourceMap ? minifiedCss.sourceMap.toJSON() : '', + warnings: minifiedCss.warnings, + }; + }, + }) + ], + } } if (webpackMajor === 5) { config.optimization.emitOnErrors = razzleOptions.emitOnErrors;
feat: add support for dev build
jaredpalmer_razzle
train
208c713d59f61f6743daa5e7d3d3f687e1320427
diff --git a/src/gopherjs/translator/expressions.go b/src/gopherjs/translator/expressions.go index <HASH>..<HASH> 100644 --- a/src/gopherjs/translator/expressions.go +++ b/src/gopherjs/translator/expressions.go @@ -401,9 +401,9 @@ func (c *PkgContext) translateExpr(expr ast.Expr) string { index = fmt.Sprintf("(%s || Go$nil).Go$key()", index) } if _, isTuple := exprType.(*types.Tuple); isTuple { - return fmt.Sprintf(`(Go$obj = (%s || false)["$" + %s], Go$obj !== undefined ? [Go$obj.v, true] : [%s, false])`, x, index, c.zeroValue(t.Elem())) + return fmt.Sprintf(`(Go$obj = (%s || false)[%s], Go$obj !== undefined ? [Go$obj.v, true] : [%s, false])`, x, index, c.zeroValue(t.Elem())) } - return fmt.Sprintf(`(Go$obj = (%s || false)["$" + %s], Go$obj !== undefined ? Go$obj.v : %s)`, x, index, c.zeroValue(t.Elem())) + return fmt.Sprintf(`(Go$obj = (%s || false)[%s], Go$obj !== undefined ? Go$obj.v : %s)`, x, index, c.zeroValue(t.Elem())) case *types.Basic: return fmt.Sprintf("%s.charCodeAt(%s)", x, c.translateExprToType(e.Index, types.Typ[types.Int])) default: @@ -530,7 +530,7 @@ func (c *PkgContext) translateExpr(expr ast.Expr) string { if hasId(c.info.Types[e.Args[0]].Underlying().(*types.Map).Key()) { index = fmt.Sprintf("(%s || Go$nil).Go$key()", index) } - return fmt.Sprintf(`delete %s["$" + %s]`, c.translateExpr(e.Args[0]), index) + return fmt.Sprintf(`delete %s[%s]`, c.translateExpr(e.Args[0]), index) case "copy": return fmt.Sprintf("Go$copy(%s, %s)", c.translateExprToType(e.Args[0], types.NewSlice(nil)), c.translateExprToType(e.Args[1], types.NewSlice(nil))) case "print", "println": diff --git a/src/gopherjs/translator/natives.go b/src/gopherjs/translator/natives.go index <HASH>..<HASH> 100644 --- a/src/gopherjs/translator/natives.go +++ b/src/gopherjs/translator/natives.go @@ -245,9 +245,13 @@ var Go$clear = function(array) { for (var i = 0; i < array.length; i++) { array[ var Go$Map = function(data, capacity) { data = data || []; for (var i = 0; i < data.length; i += 2) { - this["$" + data[i]] = { k: data[i], v: data[i + 1] }; + this[data[i]] = { k: data[i], v: data[i + 1] }; } }; +var Go$objectProperyNames = Object.getOwnPropertyNames(Object.prototype); +for (var i = 0; i < Go$objectProperyNames.length; i++) { + Go$Map.prototype[Go$objectProperyNames[i]] = undefined; +} var Go$Interface = function(value) { return value; diff --git a/src/gopherjs/translator/statements.go b/src/gopherjs/translator/statements.go index <HASH>..<HASH> 100644 --- a/src/gopherjs/translator/statements.go +++ b/src/gopherjs/translator/statements.go @@ -373,7 +373,7 @@ func (c *PkgContext) translateStmt(stmt ast.Stmt, label string) { if hasId(t.Key()) { key = fmt.Sprintf("(%s || Go$nil).Go$key()", key) } - c.Printf(`%s["$" + %s] = { k: %s, v: %s };`, c.translateExpr(l.X), key, keyVar, rhs) + c.Printf(`%s[%s] = { k: %s, v: %s };`, c.translateExpr(l.X), key, keyVar, rhs) continue } }
Removed map key prefix by using a really empty object for maps.
gopherjs_gopherjs
train
58c78dae56a0ff093ee87d23f01fda22633b9ed7
diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryNullStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryNullStrategyIT.java index <HASH>..<HASH> 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryNullStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryNullStrategyIT.java @@ -20,6 +20,7 @@ import static com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrateg import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.mapper.annotations.Dao; @@ -32,6 +33,7 @@ import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import java.util.function.BiConsumer; +import org.junit.AssumptionViolatedException; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -63,6 +65,11 @@ public class QueryNullStrategyIT { @Test public void should_respect_strategy_inheritance_rules() { + if (sessionRule.session().getContext().getProtocolVersion() == DefaultProtocolVersion.V3) { + throw new AssumptionViolatedException( + "Test not valid for Protocol V3 as does not support UNSET"); + } + DaoWithNoStrategy daoWithNoStrategy = mapper.daoWithNoStrategy(); DaoWithDoNotSet daoWithDoNotSet = mapper.daoWithDoNotSet(); DaoWithSetToNull daoWithSetToNull = mapper.daoWithSetToNull();
Skip test if Protocol V3 is used
datastax_java-driver
train
60f4575365eeb877af17378320fa6dc906b81088
diff --git a/lib/ole/types/base.rb b/lib/ole/types/base.rb index <HASH>..<HASH> 100644 --- a/lib/ole/types/base.rb +++ b/lib/ole/types/base.rb @@ -1,7 +1,5 @@ # encoding: ASCII-8BIT -ICONV_DEPRECATED = ''.respond_to? :encode -require 'iconv' unless ICONV_DEPRECATED require 'date' require 'ole/base' @@ -14,17 +12,6 @@ module Ole # :nodoc: # It also defines all the variant type constants, and symbolic names. # module Types - - class Iconv - def initialize(to, from) - @to, @from = to, from - end - - def iconv(str) - str.encode(@to, @from) - end - end if ICONV_DEPRECATED - # for anything that we don't have serialization code for class Data < String def self.load str @@ -49,26 +36,53 @@ module Ole # :nodoc: end end - # for VT_LPWSTR - class Lpwstr < String - FROM_UTF16 = Iconv.new 'utf-8', 'utf-16le' - TO_UTF16 = Iconv.new 'utf-16le', 'utf-8' + if ''.respond_to? :encode + # NOTE: only here in the interim to preserve behaviour of + # FROM/TO_UTF16 constants for ruby-msg. + class Iconv # :nodoc: + def initialize(to, from) + @to, @from = to, from + end - def self.load str - ICONV_DEPRECATED ? - new(str.encode(Encoding::UTF_8, Encoding::UTF_16LE).chomp(0.chr)) : - new(FROM_UTF16.iconv(str).chomp(0.chr)) + def iconv(str) + str.encode(@to, @from) + end end - def self.dump str - # need to append nulls? - data = ICONV_DEPRECATED ? - str.encode(Encoding::UTF_16LE) : - TO_UTF16.iconv(str) - # not sure if this is the recommended way to do it, but I want to treat - # the resulting utf16 data as regular bytes, not characters. - data.force_encoding Encoding::ASCII_8BIT if data.respond_to? :encoding - data + # for VT_LPWSTR + class Lpwstr < String + FROM_UTF16 = Iconv.new 'utf-8', 'utf-16le' + TO_UTF16 = Iconv.new 'utf-16le', 'utf-8' + + def self.load str + new str.encode(Encoding::UTF_8, Encoding::UTF_16LE).chomp(0.chr) + end + + def self.dump str + # need to append nulls? + data = str.encode(Encoding::UTF_16LE) + # not sure if this is the recommended way to do it, but I want to treat + # the resulting utf16 data as regular bytes, not characters. + data.force_encoding Encoding::ASCII_8BIT + data + end + end + else + require 'iconv' + + # for VT_LPWSTR + class Lpwstr < String + FROM_UTF16 = Iconv.new 'utf-8', 'utf-16le' + TO_UTF16 = Iconv.new 'utf-16le', 'utf-8' + + def self.load str + new FROM_UTF16.iconv(str).chomp(0.chr) + end + + def self.dump str + # need to append nulls? + TO_UTF16.iconv str + end end end
Minor refactor of encoding/iconv code. Use single branch and avoid top-level constant.
aquasync_ruby-ole
train
2739975de3084e9084bb3aa42899ec5508f154fb
diff --git a/test/unit/factory/provider.js b/test/unit/factory/provider.js index <HASH>..<HASH> 100644 --- a/test/unit/factory/provider.js +++ b/test/unit/factory/provider.js @@ -106,17 +106,23 @@ define(function () { /* Create .animation + For some reason, addClass and removeClass no longer works in the unit test + in AngularJS 13.0.x (tested under 1.3.0-beta.17). As the intent of this + test is to ensure that animation is loaded, changed to before* version. + + Created following plunker to ensure that addClass/removeClass works inder 1.3.x + http://plnkr.co/edit/VsWfqqbAnqrr7cE3toYn */ result.utestAnimation = ".animation-" + suffix.toLowerCase(); module.animation(result.utestAnimation, function ($log, $interval) { return { - addClass : function(element, className, done) { + beforeAddClass : function(element, className, done) { if ( className === "custom-hide") { element.css('opacity',0); done(); } }, - removeClass : function(element, className, done) { + beforeRemoveClass : function(element, className, done) { if ( className === "custom-hide") { element.css('opacity',1); done();
Changed animation method to before version to make it work with angular <I>.x
marcoslin_angularAMD
train
e6eeca6ae2fe3c698ed57a80b35b8ab16feb074c
diff --git a/brozzler/__init__.py b/brozzler/__init__.py index <HASH>..<HASH> 100644 --- a/brozzler/__init__.py +++ b/brozzler/__init__.py @@ -17,7 +17,7 @@ class ShutdownRequested(Exception): pass class ReachedLimit(Exception): - def __init__(self, http_error=None, warcprox_meta=None): + def __init__(self, http_error=None, warcprox_meta=None, http_payload=None): if http_error: if "warcprox-meta" in http_error.headers: self.warcprox_meta = _json.loads(http_error.headers["warcprox-meta"]) @@ -26,7 +26,7 @@ class ReachedLimit(Exception): self.http_payload = http_error.read() elif warcprox_meta: self.warcprox_meta = warcprox_meta - self.http_payload = None + self.http_payload = http_payload def __repr__(self): return "ReachedLimit(warcprox_meta={},http_payload={})".format(repr(self.warcprox_meta), repr(self.http_payload)) diff --git a/brozzler/browser.py b/brozzler/browser.py index <HASH>..<HASH> 100644 --- a/brozzler/browser.py +++ b/brozzler/browser.py @@ -238,7 +238,7 @@ class Browser: if (not self._reached_limit and message["params"]["response"]["status"] == 420 and "Warcprox-Meta" in CaseInsensitiveDict(message["params"]["response"]["headers"])): - warcprox_meta = json.loads(message["params"]["response"]["headers"]["Warcprox-Meta"]) + warcprox_meta = json.loads(CaseInsensitiveDict(message["params"]["response"]["headers"])["Warcprox-Meta"]) self._reached_limit = brozzler.ReachedLimit(warcprox_meta=warcprox_meta) self.logger.info("reached limit %s", self._reached_limit) diff --git a/brozzler/hq.py b/brozzler/hq.py index <HASH>..<HASH> 100644 --- a/brozzler/hq.py +++ b/brozzler/hq.py @@ -155,8 +155,16 @@ class BrozzlerHQ: self._robots_caches = {} # {site_id:reppy.cache.RobotsCache} def _robots_cache(self, site): + class SessionRaiseOn420(requests.Session): + def get(self, url, **kwargs): + res = super().get(url, **kwargs) + if res.status_code == 420 and 'warcprox-meta' in res.headers: + raise brozzler.ReachedLimit(warcprox_meta=json.loads(res.headers['warcprox-meta']), http_payload=res.text) + else: + return response + if not site.id in self._robots_caches: - req_sesh = requests.Session() + req_sesh = SessionRaiseOn420() req_sesh.verify = False # ignore cert errors if site.proxy: proxie = "http://{}".format(site.proxy) @@ -171,10 +179,16 @@ class BrozzlerHQ: if site.ignore_robots: return True try: - return self._robots_cache(site).allowed(url, "brozzler") + self.logger.info("checking robots for %s", url) + result = self._robots_cache(site).allowed(url, "brozzler") + self.logger.info("robots allowed=%s for %s", result, url) + return result except BaseException as e: - self.logger.error("problem with robots.txt for {}: {}".format(url, e)) - return False + if isinstance(e, reppy.exceptions.ServerError) and isinstance(e.args[0], brozzler.ReachedLimit): + raise e.args[0] + else: + self.logger.error("problem with robots.txt for {}: {}".format(url, e)) + return False def run(self): try: @@ -209,19 +223,23 @@ class BrozzlerHQ: def _new_site(self): try: msg = self._new_sites_q.get(block=False) - new_site = brozzler.Site(**msg.payload) + site = brozzler.Site(**msg.payload) msg.ack() - self.logger.info("new site {}".format(new_site)) - site_id = self._db.new_site(new_site) - new_site.id = site_id + self.logger.info("new site {}".format(site)) + site_id = self._db.new_site(site) + site.id = site_id - if self.is_permitted_by_robots(new_site, new_site.seed): - page = brozzler.Page(new_site.seed, site_id=new_site.id, hops_from_seed=0) - self._db.schedule_page(page, priority=1000) - self._unclaimed_sites_q.put(new_site.to_dict()) - else: - self.logger.warn("seed url {} is blocked by robots.txt".format(new_site.seed)) + try: + if self.is_permitted_by_robots(site, site.seed): + page = brozzler.Page(site.seed, site_id=site.id, hops_from_seed=0) + self._db.schedule_page(page, priority=1000) + self._unclaimed_sites_q.put(site.to_dict()) + else: + self.logger.warn("seed url {} is blocked by robots.txt".format(site.seed)) + except brozzler.ReachedLimit as e: + site.note_limit_reached(e) + self._db.update_site(site) except kombu.simple.Empty: pass
handle <I> Reached limit when fetching robots in brozzler-hq
internetarchive_brozzler
train
0854828f064ef3b13009b57f5a412d30c6213aab
diff --git a/node_modules/generator-xtc/app/index.js b/node_modules/generator-xtc/app/index.js index <HASH>..<HASH> 100644 --- a/node_modules/generator-xtc/app/index.js +++ b/node_modules/generator-xtc/app/index.js @@ -8,6 +8,8 @@ var chalk = require('chalk'); var _projectPath = ''; var log = console.log; var dir = console.dir; +var rcVersion = 0; +var xtcfileName = 'xtcfile.json'; var XtcGenerator = module.exports = function XtcGenerator(args, options, config) { @@ -19,26 +21,30 @@ var XtcGenerator = module.exports = function XtcGenerator(args, options, config) this.xtcVersion = xtcPkg.version; _projectPath = this.projectPath; - // .xtcrc file stores setup config for setting defaults when running setup again or updating + // .xtcfile stores setup config for setting defaults when running setup again or updating var getRc = function(projectPath) { - var fs = require('fs') - ,rcData = { - rcVersion : 0 - ,xtcVersion : xtcPkg.version - } + var rcData = {} ; try { - rcData = fs.readFileSync(path.join(projectPath, '/.xtcrc')); + rcData = fs.readFileSync(path.join(projectPath, xtcfileName)); rcData = JSON.parse(rcData); } catch (e) { if (e.code !== 'ENOENT') { console.error(e.message); } } + + // store current values from rc file. + this.currentRcVersion = rcData.rcVersion || null + this.currentXtcVersion = rcData.xtcVersion || null; + + rcData.rcVersion = rcVersion; + rcData.xtcVersion = xtcPkg.version; + return rcData; }; - this.rc = getRc(_projectPath); + this.rc = getRc.call(this, _projectPath); this.on('end', onEnd); @@ -263,6 +269,8 @@ XtcGenerator.prototype.sitename = function sitename() { XtcGenerator.prototype.storeRc = function storeRc() { + this.rc.rcVersion = this.rcVersion // already set, but here for completeness + this.rc.xtcVersion = this.xtcVersion // already set, but here for completeness this.rc.name = this.name; this.rc.repositoryUri = this.repository.uri || null; this.rc.repositoryBranch = this.repository.branch || null; @@ -274,11 +282,11 @@ XtcGenerator.prototype.storeRc = function storeRc() { this.rc.siteName = this.siteName || null; try { - require('fs').writeFileSync('.xtcrc', JSON.stringify(this.rc, null, 2)); - console.log('\nCreated .xtcrc file\n'); + console.log('\nwriting %s\n', xtcfileName); + fs.writeFileSync(xtcfileName, JSON.stringify(this.rc, null, 2)); } catch (e) { - console.error('\nUnable to write .xtcrc file.\nReason: %s\n', e.message); + console.error('\nUnable to write %s.\nReason: %s\n', xtcfileName, e.message); } };
Change file `.xtcrc` to `xtcfile.json`. - Visible because it will be used for pre-filling and automated installs. - `xtc install` may create the file if not present.
MarcDiethelm_xtc
train
cae82c0023e085d2b2775b1b8283b4c98980b9e6
diff --git a/runtime/classes/propel/util/Criteria.php b/runtime/classes/propel/util/Criteria.php index <HASH>..<HASH> 100644 --- a/runtime/classes/propel/util/Criteria.php +++ b/runtime/classes/propel/util/Criteria.php @@ -1077,7 +1077,8 @@ class Criteria implements IteratorAggregate { if ($p3 !== null) { // addOr(column, value, comparison) $nc = new Criterion($this, $p1, $p2, $p3); - if ( $this->getCriterion($p1) === null) { + $oc = $this->getCriterion($p1); + if ($oc === null) { $this->map[$p1] = $nc; } else { $oc->addOr($nc);
fixing the andOr call on a non-object
propelorm_Propel
train
a220bc4e8f00923d3579dce7d124891122c0db51
diff --git a/zipline/finance/slippage.py b/zipline/finance/slippage.py index <HASH>..<HASH> 100644 --- a/zipline/finance/slippage.py +++ b/zipline/finance/slippage.py @@ -80,36 +80,28 @@ class VolumeShareSlippage(object): direction = 1.0 for order in orders: - if(order.dt < event.dt): + open_amount = order.amount - order.filled - # orders are only good on the day they are issued + if(open_amount != 0): + direction = open_amount / math.fabs(open_amount) + else: + direction = 1 - if (order.dt.year, order.dt.day) < \ - (event.dt.year, event.dt.day): - continue + desired_order = total_order + open_amount - open_amount = order.amount - order.filled + volume_share = direction * (desired_order) / event.volume + if volume_share > self.volume_limit: + volume_share = self.volume_limit + simulated_amount = int(volume_share * event.volume * direction) + simulated_impact = (volume_share) ** 2 \ + * self.price_impact * direction * event.price - if(open_amount != 0): - direction = open_amount / math.fabs(open_amount) - else: - direction = 1 + order.filled += (simulated_amount - total_order) + total_order = simulated_amount - desired_order = total_order + open_amount - - volume_share = direction * (desired_order) / event.volume - if volume_share > self.volume_limit: - volume_share = self.volume_limit - simulated_amount = int(volume_share * event.volume * direction) - simulated_impact = (volume_share) ** 2 \ - * self.price_impact * direction * event.price - - order.filled += (simulated_amount - total_order) - total_order = simulated_amount - - # we cap the volume share at configured % of a trade - if volume_share == self.volume_limit: - break + # we cap the volume share at configured % of a trade + if volume_share == self.volume_limit: + break orders = [x for x in orders if abs(x.amount - x.filled) > 0
Removes expiration from orders. Expiration is something that way may want to have in the future, but this current is implementation is dropping orders that aren't meant to be expired. So removing expiration, so that all expected orders are executed.
quantopian_zipline
train
3f86ef09bd8f46fe5d730bf160dcd6b3ada73bf4
diff --git a/lib/HiPay/Fullservice/Helper/Signature.php b/lib/HiPay/Fullservice/Helper/Signature.php index <HASH>..<HASH> 100644 --- a/lib/HiPay/Fullservice/Helper/Signature.php +++ b/lib/HiPay/Fullservice/Helper/Signature.php @@ -42,18 +42,17 @@ class Signature * @param string $secretPassphrase * @return bool */ - static public function isValidHttpSignature($secretPassphrase, $hashAlgorithm = 'sha256') + static public function isValidHttpSignature($secretPassphrase, $hashAlgorithm = 'sha1') { - switch (strtolower($hashAlgorithm)) { - case 'sha1': - $computedSignature = sha1(static::getStringToCompute($secretPassphrase)); + case 'sha256': + $computedSignature = hash('sha256', static::getStringToCompute($secretPassphrase)); break; case 'sha512' : $computedSignature = hash('sha512', static::getStringToCompute($secretPassphrase)); break; default: - $computedSignature = hash('sha256', static::getStringToCompute($secretPassphrase)); + $computedSignature = sha1(static::getStringToCompute($secretPassphrase)); break; }
Update Signature.php Sha1 is default for signature
hipay_hipay-fullservice-sdk-php
train
901153c61e39fc01961dfef3613c4e529c595476
diff --git a/src/transformers/models/distilbert/modeling_distilbert.py b/src/transformers/models/distilbert/modeling_distilbert.py index <HASH>..<HASH> 100755 --- a/src/transformers/models/distilbert/modeling_distilbert.py +++ b/src/transformers/models/distilbert/modeling_distilbert.py @@ -588,7 +588,7 @@ class DistilBertForSequenceClassification(DistilBertPreTrainedModel): self.init_weights() - @add_start_docstrings_to_model_forward(DISTILBERT_INPUTS_DOCSTRING.format("batch_size, num_choices")) + @add_start_docstrings_to_model_forward(DISTILBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) @add_code_sample_docstrings( tokenizer_class=_TOKENIZER_FOR_DOC, checkpoint=_CHECKPOINT_FOR_DOC,
Fix docstring of description about input_ids (#<I>)
huggingface_pytorch-pretrained-BERT
train
04394ab749e4420665878376b731c3f124fd3f76
diff --git a/engine/reformulation/core/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/NativeQueryGenerator.java b/engine/reformulation/core/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/NativeQueryGenerator.java index <HASH>..<HASH> 100644 --- a/engine/reformulation/core/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/NativeQueryGenerator.java +++ b/engine/reformulation/core/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/NativeQueryGenerator.java @@ -13,4 +13,6 @@ public interface NativeQueryGenerator { IQ generateSourceQuery(IQ query); IQ generateSourceQuery(IQ query, boolean avoidPostProcessing); + + IQ generateSourceQuery(IQ query, boolean avoidPostProcessing, boolean tolerateUnknownTypes); } diff --git a/engine/reformulation/sql/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/impl/SQLGeneratorImpl.java b/engine/reformulation/sql/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/impl/SQLGeneratorImpl.java index <HASH>..<HASH> 100644 --- a/engine/reformulation/sql/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/impl/SQLGeneratorImpl.java +++ b/engine/reformulation/sql/src/main/java/it/unibz/inf/ontop/answering/reformulation/generation/impl/SQLGeneratorImpl.java @@ -79,6 +79,11 @@ public class SQLGeneratorImpl implements NativeQueryGenerator { @Override public IQ generateSourceQuery(IQ query, boolean avoidPostProcessing) { + return generateSourceQuery(query, avoidPostProcessing, false); + } + + @Override + public IQ generateSourceQuery(IQ query, boolean avoidPostProcessing, boolean tolerateUnknownTypes) { if (query.getTree().isDeclaredAsEmpty()) return query; @@ -96,7 +101,7 @@ public class SQLGeneratorImpl implements NativeQueryGenerator { return iqFactory.createIQ(query.getProjectionAtom(), iqFactory.createEmptyNode(query.getProjectionAtom().getVariables())); - NativeNode nativeNode = generateNativeNode(normalizedSubTree); + NativeNode nativeNode = generateNativeNode(normalizedSubTree, tolerateUnknownTypes); UnaryIQTree newTree = iqFactory.createUnaryIQTree(split.getPostProcessingConstructionNode(), nativeNode); @@ -194,7 +199,7 @@ public class SQLGeneratorImpl implements NativeQueryGenerator { } } - private NativeNode generateNativeNode(IQTree normalizedSubTree) { - return defaultIQTree2NativeNodeGenerator.generate(normalizedSubTree, dbParameters, false); + private NativeNode generateNativeNode(IQTree normalizedSubTree, boolean tolerateUnknownTypes) { + return defaultIQTree2NativeNodeGenerator.generate(normalizedSubTree, dbParameters, tolerateUnknownTypes); } }
NativeQueryGenerator: new method for tolerating unk types.
ontop_ontop
train
fa4f445070b0e90403aec4e25002ae39b74429df
diff --git a/Resources/public/js/grid.js b/Resources/public/js/grid.js index <HASH>..<HASH> 100644 --- a/Resources/public/js/grid.js +++ b/Resources/public/js/grid.js @@ -160,7 +160,7 @@ var collection = $('#' + $(this).attr('data-collection-id')); $.ajax({ - url: Routing.generate('integrated_block_block_index', { '_format': 'json', 'limit': 999 }), // @todo paging (INTEGRATED-423) + url: Routing.generate('integrated_block_block_index', { '_format': 'json', 'limit': 99999 }), // @todo paging (INTEGRATED-423) dataType: 'json', success: function(data) {
[INTEGRATED-<I>] Change block limit to <I>
integratedfordevelopers_integrated-website-bundle
train
e6450294066d75303a2f38db816b0c5826bf2e7d
diff --git a/bootstrap.js b/bootstrap.js index <HASH>..<HASH> 100644 --- a/bootstrap.js +++ b/bootstrap.js @@ -28,6 +28,17 @@ var StaticString = function StaticString(domain, key, params) { }; /** + * Give this item a replace method + * + * @author Jelle De Loecker <jelle@kipdola.be> + * @since 0.0.1 + * @version 0.0.1 + */ +StaticString.prototype.replace = function replace(needle, replacement) { + return this.key.replace(needle, replacement); +}; + +/** * Return an HTML presentation of this StaticString, * which hawkejs helpers can interpret later on. * diff --git a/model/static_string_model.js b/model/static_string_model.js index <HASH>..<HASH> 100644 --- a/model/static_string_model.js +++ b/model/static_string_model.js @@ -41,6 +41,16 @@ Model.extend(function StaticStringModel() { translatable: true } }; + + this.modelEdit = { + general: { + title: __('chimera', 'General'), + fields: [ + {field: 'domain', type: 'String'}, + {field: 'key', type: 'String'}, + ] + } + }; }; /**
Add a 'replace' method to i<I>n objects
skerit_alchemy-i18n
train
74d37a1c8f53faf92fb9fe48a0372137472b05a9
diff --git a/benchexec/benchexec.py b/benchexec/benchexec.py index <HASH>..<HASH> 100644 --- a/benchexec/benchexec.py +++ b/benchexec/benchexec.py @@ -152,12 +152,11 @@ class BenchExec(object): metavar="N", help="Limit each run of the tool to N CPU cores (-1 to disable).") - parser.add_argument("-s", "--allowedCores", dest="coreset", - default=None, - metavar="n", - nargs='+', - type=int, - help="Limit each run of the tool to a subset of particular CPU cores.") + parser.add_argument("--allowedCores", + dest="coreset", default=None, type=util.parse_int_list, + help="Limit the set of cores BenchExec will use for all runs " + "(Applied only if the number of CPU cores is limited).", + metavar="N,M-K",) parser.add_argument("--user", dest="users",
Use a better type and description of "--allowedCores" option
sosy-lab_benchexec
train
51feacf99329b103d9880a5ef90995df7bd2c189
diff --git a/lib/gulp-fasttime-lint.js b/lib/gulp-fasttime-lint.js index <HASH>..<HASH> 100644 --- a/lib/gulp-fasttime-lint.js +++ b/lib/gulp-fasttime-lint.js @@ -67,13 +67,11 @@ function gherkinLint() catch ({ errors }) { const messages = - errors ? errors.map ( ({ location: { column, line }, message }) => ({ column, line, message: message.replace(/^\(.*?\): /, ''), severity: 2 }), - ) : - []; + ); file.eslint = { errorCount: errors.length, diff --git a/readme.md b/readme.md index <HASH>..<HASH> 100644 --- a/readme.md +++ b/readme.md @@ -3,7 +3,7 @@ [Gulp](https://gulpjs.com/) plugin for [ESLint](https://eslint.org/) validation with [fasttime](https://github.com/fasttime?tab=repositories) presets. -Both JavaScript and TypeScript files can be validated. +Validates JavaScript, TypeScript and Gherkin files. ## Usage @@ -18,5 +18,17 @@ exports.lint = }; ``` +You can also specify more than one configuration. + +```js +const stream = lint([{ src: src1, ...config1 }, { src: src2, ...config2 }]); +``` + +The settings `envs`, `fix`, `globals`, `parserOptions` and `rules` are only relevant to JavaScript +and TypeScript files, and are ignored for Gherkin files. + +It is fine to specify different kinds of source files in the same `src` glob pattern(s), as long as +other configuration settings don't interfere. + [npm badge]: https://badge.fury.io/js/gulp-fasttime-lint.svg [npm url]: https://www.npmjs.com/package/gulp-fasttime-lint diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -20,7 +20,7 @@ describe let options; try { - await endOfStrean(stream); + await endOfStream(stream); options = { message: 'PluginError expected but not thrown' }; } catch (error) @@ -48,7 +48,7 @@ describe const createFilename = (extension = '.js') => `\0${++fileNumber}${extension}`; - function endOfStrean(stream) + function endOfStream(stream) { const executor = (resolve, reject) => @@ -98,7 +98,7 @@ describe const stream = testLint({ src }); const actualFiles = []; stream.on('data', file => actualFiles.push(file)); - await endOfStrean(stream); + await endOfStream(stream); assert.equal(actualFiles.length, 1); const [file] = actualFiles; assert.strictEqual(file.basename, filename); @@ -171,7 +171,7 @@ describe const filename = createFilename(); const src = { [filename]: '\'use strict\';\n\nSymbol();\n' }; const stream = testLint({ src, parserOptions: { ecmaVersion: 6 } }); - await endOfStrean(stream); + await endOfStream(stream); }, ); @@ -183,7 +183,7 @@ describe const filename = createFilename(); const src = { [filename]: '\'use strict\';' }; const stream = testLint({ src, fix: true }); - await endOfStrean(stream); + await endOfStream(stream); }, );
Updated readme.md, fixed minor issues
fasttime_gulp-fasttime-lint
train
9d471f450b174b5645ba723127291174f7c8623f
diff --git a/xwiki-commons-tools/xwiki-commons-tool-test/xwiki-commons-tool-test-component/src/main/java/org/xwiki/test/internal/MockConfigurationSource.java b/xwiki-commons-tools/xwiki-commons-tool-test/xwiki-commons-tool-test-component/src/main/java/org/xwiki/test/internal/MockConfigurationSource.java index <HASH>..<HASH> 100644 --- a/xwiki-commons-tools/xwiki-commons-tool-test/xwiki-commons-tool-test-component/src/main/java/org/xwiki/test/internal/MockConfigurationSource.java +++ b/xwiki-commons-tools/xwiki-commons-tool-test/xwiki-commons-tool-test-component/src/main/java/org/xwiki/test/internal/MockConfigurationSource.java @@ -33,8 +33,7 @@ public class MockConfigurationSource extends MemoryConfigurationSource { public static DefaultComponentDescriptor<ConfigurationSource> getDescriptor(String roleHint) { - DefaultComponentDescriptor<ConfigurationSource> descriptor = - new DefaultComponentDescriptor<ConfigurationSource>(); + DefaultComponentDescriptor<ConfigurationSource> descriptor = new DefaultComponentDescriptor<>(); descriptor.setRoleType(ConfigurationSource.class); if (roleHint != null) { descriptor.setRoleHint(roleHint);
[Misc] Use java7 diamond operator
xwiki_xwiki-commons
train
7259cb8853aaaa6307b3b5ce2fe1bf2c6136fa5d
diff --git a/plugins/statsd_collector.go b/plugins/statsd_collector.go index <HASH>..<HASH> 100644 --- a/plugins/statsd_collector.go +++ b/plugins/statsd_collector.go @@ -26,6 +26,8 @@ type StatsdCollector struct { timeoutsPrefix string fallbackSuccessesPrefix string fallbackFailuresPrefix string + canceledPrefix string + deadlinePrefix string totalDurationPrefix string runDurationPrefix string sampleRate float32 @@ -103,6 +105,8 @@ func (s *StatsdCollectorClient) NewStatsdCollector(name string) metricCollector. timeoutsPrefix: name + ".timeouts", fallbackSuccessesPrefix: name + ".fallbackSuccesses", fallbackFailuresPrefix: name + ".fallbackFailures", + canceledPrefix: name + ".contextCanceled", + deadlinePrefix: name + ".contextDeadlineExceeded", totalDurationPrefix: name + ".totalDuration", runDurationPrefix: name + ".runDuration", sampleRate: s.sampleRate, @@ -146,6 +150,8 @@ func (g *StatsdCollector) Update(r metricCollector.MetricResult) { g.incrementCounterMetric(g.timeoutsPrefix, r.Timeouts) g.incrementCounterMetric(g.fallbackSuccessesPrefix, r.FallbackSuccesses) g.incrementCounterMetric(g.fallbackFailuresPrefix, r.FallbackFailures) + g.incrementCounterMetric(g.canceledPrefix, r.ContextCanceled) + g.incrementCounterMetric(g.deadlinePrefix, r.ContextDeadlineExceeded) g.updateTimerMetric(g.totalDurationPrefix, r.TotalDuration) g.updateTimerMetric(g.runDurationPrefix, r.RunDuration) }
update stats collector with new counters
afex_hystrix-go
train
f08966682bd3d5c5e29b3a553c067ebee8ec8d92
diff --git a/src/Records/Traits/HasTypes/RecordTrait.php b/src/Records/Traits/HasTypes/RecordTrait.php index <HASH>..<HASH> 100644 --- a/src/Records/Traits/HasTypes/RecordTrait.php +++ b/src/Records/Traits/HasTypes/RecordTrait.php @@ -2,7 +2,7 @@ namespace ByTIC\Common\Records\Traits\HasTypes; -use ByTIC\Common\Records\Types\Generic as GenericType; +use ByTIC\Common\Records\Properties\Types\Generic as GenericType; /** * Class RecordTrait @@ -20,15 +20,15 @@ trait RecordTrait /** * @var GenericType */ - protected $typeObject; + protected $typeObject = null; /** * @return GenericType */ public function getTypeObject() { - if (!$this->typeObject) { - $this->typeObject = $this->getNewType($this->getTypeValue()); + if ($this->typeObject === null) { + $this->initTypeObject(); } return $this->typeObject; @@ -50,13 +50,18 @@ trait RecordTrait return false; } + public function initTypeObject() + { + $this->typeObject = $this->getNewType($this->getTypeValue()); + } + /** * @param $type * @return mixed */ public function getNewType($type) { - $object = $this->getManager()->getType($type); + $object = clone $this->getManager()->getType($type); $object->setItem($this); return $object; diff --git a/src/Records/Traits/HasTypes/RecordsTrait.php b/src/Records/Traits/HasTypes/RecordsTrait.php index <HASH>..<HASH> 100644 --- a/src/Records/Traits/HasTypes/RecordsTrait.php +++ b/src/Records/Traits/HasTypes/RecordsTrait.php @@ -2,7 +2,7 @@ namespace ByTIC\Common\Records\Traits\HasTypes; -use ByTIC\Common\Records\Types\Generic as GenericType; +use ByTIC\Common\Records\Properties\Types\Generic as GenericType; /** * Class RecordsTrait @@ -38,11 +38,16 @@ trait RecordsTrait */ public function getTypes() { + $this->checkInitTypes(); + + return $this->types; + } + + public function checkInitTypes() + { if ($this->types === null) { $this->initTypes(); } - - return $this->types; } public function initTypes() @@ -51,8 +56,8 @@ trait RecordsTrait foreach ($files as $name) { $name = str_replace('.php', '', $name); if (!in_array($name, ['Abstract', 'AbstractType', 'Generic'])) { - $object = $this->getType($name); - $this->types[$object->getName()] = $object; + $object = $this->getNewType($name); + $this->addType($object); } } } @@ -61,7 +66,7 @@ trait RecordsTrait * @param string $type * @return GenericType */ - public function getType($type = null) + public function getNewType($type = null) { $className = $this->getTypeClass($type); /** @var GenericType $object */ @@ -99,6 +104,25 @@ trait RecordsTrait } /** + * @param GenericType $object + */ + public function addType($object) + { + $this->types[$object->getName()] = $object; + } + + /** + * @param string $type + * @return GenericType + */ + public function getType($type = null) + { + $this->checkInitTypes(); + + return $this->types[$type]; + } + + /** * @return string */ public function getTypesDirectory()
rename Generic Statuses to Properties Traits
bytic_Common
train
6855dd8c74a194d0b053482e5be186057f85f471
diff --git a/lib/vagrant/action/builtin/mixin_provisioners.rb b/lib/vagrant/action/builtin/mixin_provisioners.rb index <HASH>..<HASH> 100644 --- a/lib/vagrant/action/builtin/mixin_provisioners.rb +++ b/lib/vagrant/action/builtin/mixin_provisioners.rb @@ -35,7 +35,8 @@ module Vagrant # Note: `name` is set to a symbol, since it is converted to one via #Config::VM.provision provisioner_name = provisioner.name if !provisioner_name - if provisioner.config.name + if provisioner.config.respond_to?(:name) && + provisioner.config.name provisioner_name = provisioner.config.name.to_sym end else
Check name config option with respond_to? Prior to this commit, the check used to look for the config option `name` in a provisioner config would accidentally create a "DummyConfig" based on how vagrant handles missing config options. This commit fixes that by instead using the `respond_to?` method to check for the existance of the config option name.
hashicorp_vagrant
train
923f96fe5d9a0369fac4a26fb1a394017fe004d1
diff --git a/pywbem/cim_obj.py b/pywbem/cim_obj.py index <HASH>..<HASH> 100644 --- a/pywbem/cim_obj.py +++ b/pywbem/cim_obj.py @@ -437,8 +437,10 @@ class NocaseDict(object): if isinstance(arg, (list, tuple)): # Initialize from iterable of: tuple(key,value), or object try: + # This is used for iterables: iterator = arg.items() except AttributeError: + # This is used for dictionaries: iterator = arg for item in iterator: try: @@ -1753,8 +1755,10 @@ class CIMInstanceName(_CIMComparisonMixin): self._keybindings = NocaseDict() if keybindings: try: + # This is used for iterables: iterator = keybindings.items() except AttributeError: + # This is used for dictionaries: iterator = keybindings for item in iterator: if isinstance(item, CIMProperty): @@ -2644,8 +2648,10 @@ class CIMInstance(_CIMComparisonMixin): self._properties = NocaseDict() if properties: try: + # This is used for iterables: iterator = properties.items() except AttributeError: + # This is used for dictionaries: iterator = properties for item in iterator: if isinstance(item, CIMProperty): @@ -2695,8 +2701,10 @@ class CIMInstance(_CIMComparisonMixin): self._qualifiers = NocaseDict() if qualifiers: try: + # This is used for iterables: iterator = qualifiers.items() except AttributeError: + # This is used for dictionaries: iterator = qualifiers for item in iterator: if isinstance(item, CIMQualifier): @@ -3732,8 +3740,10 @@ class CIMClass(_CIMComparisonMixin): self._properties = NocaseDict() if properties: try: + # This is used for iterables: iterator = properties.items() except AttributeError: + # This is used for dictionaries: iterator = properties for item in iterator: if isinstance(item, CIMProperty): @@ -3788,8 +3798,10 @@ class CIMClass(_CIMComparisonMixin): self._methods = NocaseDict() if methods: try: + # This is used for iterables: iterator = methods.items() except AttributeError: + # This is used for dictionaries: iterator = methods for item in iterator: if isinstance(item, CIMMethod): @@ -3845,8 +3857,10 @@ class CIMClass(_CIMComparisonMixin): self._qualifiers = NocaseDict() if qualifiers: try: + # This is used for iterables: iterator = qualifiers.items() except AttributeError: + # This is used for dictionaries: iterator = qualifiers for item in iterator: if isinstance(item, CIMQualifier): @@ -4566,8 +4580,10 @@ class CIMProperty(_CIMComparisonMixin): self._qualifiers = NocaseDict() if qualifiers: try: + # This is used for iterables: iterator = qualifiers.items() except AttributeError: + # This is used for dictionaries: iterator = qualifiers for item in iterator: if isinstance(item, CIMQualifier): @@ -5117,8 +5133,10 @@ class CIMMethod(_CIMComparisonMixin): self._parameters = NocaseDict() if parameters: try: + # This is used for iterables: iterator = parameters.items() except AttributeError: + # This is used for dictionaries: iterator = parameters for item in iterator: if isinstance(item, CIMParameter): @@ -5174,8 +5192,10 @@ class CIMMethod(_CIMComparisonMixin): self._qualifiers = NocaseDict() if qualifiers: try: + # This is used for iterables: iterator = qualifiers.items() except AttributeError: + # This is used for dictionaries: iterator = qualifiers for item in iterator: if isinstance(item, CIMQualifier): @@ -5609,8 +5629,10 @@ class CIMParameter(_CIMComparisonMixin): self._qualifiers = NocaseDict() if qualifiers: try: + # This is used for iterables: iterator = qualifiers.items() except AttributeError: + # This is used for dictionaries: iterator = qualifiers for item in iterator: if isinstance(item, CIMQualifier):
Added clarifying comments to iteration code in cim_obj.py
pywbem_pywbem
train
c7d56a309d8a1e0b9999cc39e24697f3d45eedcf
diff --git a/README b/README index <HASH>..<HASH> 100644 --- a/README +++ b/README @@ -221,5 +221,4 @@ v0.9 FEAROW * ctr forms of ciphers are missing (blowfish-ctr, aes128-ctr, aes256-ctr) * server mode needs better documentation -* add method to block until a channel's "exit-status" is set * the error message from this is confusing as hell: DSSKey() diff --git a/paramiko/channel.py b/paramiko/channel.py index <HASH>..<HASH> 100644 --- a/paramiko/channel.py +++ b/paramiko/channel.py @@ -80,11 +80,13 @@ class Channel (object): self.in_buffer_cv = threading.Condition(self.lock) self.in_stderr_buffer_cv = threading.Condition(self.lock) self.out_buffer_cv = threading.Condition(self.lock) + self.status_event = threading.Event() self.name = str(chanid) self.logger = logging.getLogger('paramiko.chan.' + str(chanid)) self.pipe_rfd = self.pipe_wfd = None self.event = threading.Event() self.combine_stderr = False + self.exit_status = -1 def __repr__(self): """ @@ -249,19 +251,60 @@ class Channel (object): m.add_byte(chr(MSG_CHANNEL_REQUEST)) m.add_int(self.remote_chanid) m.add_string('window-change') - m.add_boolean(0) + m.add_boolean(1) m.add_int(width) m.add_int(height) m.add_int(0).add_int(0) self.event.clear() self.transport._send_user_message(m) - while 1: + while True: self.event.wait(0.1) if self.closed: return False if self.event.isSet(): return True + def recv_exit_status(self): + """ + Return the exit status from the process on the server. This is + mostly useful for retrieving the reults of an L{exec_command}. + If the command hasn't finished yet, this method will wait until + it does, or until the channel is closed. If no exit status is + provided by the server, -1 is returned. + + @return: the exit code of the process on the server. + @rtype: int + + @since: 1.2 + """ + while True: + if self.closed or self.status_event.isSet(): + return self.exit_status + self.status_event.wait(0.1) + + def send_exit_status(self, status): + """ + Send the exit status of an executed command to the client. (This + really only makes sense in server mode.) Many clients expect to + get some sort of status code back from an executed command after + it completes. + + @param status: the exit code of the process + @type status: int + + @since: 1.2 + """ + # in many cases, the channel will not still be open here. + # that's fine. + m = Message() + m.add_byte(chr(MSG_CHANNEL_REQUEST)) + m.add_int(self.remote_chanid) + m.add_string('exit-status') + m.add_boolean(0) + m.add_int(status) + self.transport._send_user_message(m) + self._log(DEBUG, 'EXIT-STATUS') + def get_transport(self): """ Return the L{Transport} associated with this channel. @@ -743,7 +786,7 @@ class Channel (object): disallowed. This closes the stream in one or both directions. @param how: 0 (stop receiving), 1 (stop sending), or 2 (stop - receiving and sending). + receiving and sending). @type how: int """ if (how == 0) or (how == 2): @@ -751,6 +794,30 @@ class Channel (object): self.eof_received = 1 if (how == 1) or (how == 2): self._send_eof() + + def shutdown_read(self): + """ + Shutdown the receiving side of this socket, closing the stream in + the incoming direction. After this call, future reads on this + channel will fail instantly. This is a convenience method, equivalent + to C{shutdown(0)}, for people who don't make it a habit to + memorize unix constants from the 1970s. + + @since: 1.2 + """ + self.shutdown(0) + + def shutdown_write(self): + """ + Shutdown the sending side of this socket, closing the stream in + the outgoing direction. After this call, future writes on this + channel will fail instantly. This is a convenience method, equivalent + to C{shutdown(1)}, for people who don't make it a habit to + memorize unix constants from the 1970s. + + @since: 1.2 + """ + self.shutdown(1) ### calls from Transport @@ -820,8 +887,8 @@ class Channel (object): def _window_adjust(self, m): nbytes = m.get_int() + self.lock.acquire() try: - self.lock.acquire() if self.ultra_debug: self._log(DEBUG, 'window up %d' % nbytes) self.out_window_size += nbytes @@ -836,6 +903,7 @@ class Channel (object): ok = False if key == 'exit-status': self.exit_status = m.get_int() + self.status_event.set() ok = True elif key == 'xon-xoff': # ignore @@ -1130,5 +1198,4 @@ class ChannelStderrFile (ChannelFile): return len(data) - # vim: set shiftwidth=4 expandtab : diff --git a/paramiko/transport.py b/paramiko/transport.py index <HASH>..<HASH> 100644 --- a/paramiko/transport.py +++ b/paramiko/transport.py @@ -951,7 +951,7 @@ class BaseTransport (threading.Thread): send a message, but block if we're in key negotiation. this is used for user-initiated requests. """ - while 1: + while True: self.clear_to_send.wait(0.1) if not self.active: self._log(DEBUG, 'Dropping user packet because connection is dead.')
[project @ Arch-1:<EMAIL><I>-public%secsh--dev--<I>--patch-<I>] add methods for sending/receiving a channel's exit status track a channel's exit status and provide a method (recv_exit_status) to block waiting for it to arrive. also provide a convenience method for servers to send it (send_exit_status). add shutdown_read and shutdown_write. fix a bug in sending window change requests.
paramiko_paramiko
train
c652db153dfa8ed0fbdf108392606dc6eb2f9da2
diff --git a/app/controllers/api/packages_controller.rb b/app/controllers/api/packages_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/api/packages_controller.rb +++ b/app/controllers/api/packages_controller.rb @@ -20,7 +20,7 @@ class Api::PackagesController < Api::ApiController skip_before_filter :authorize def index - packages = Pulp::Repository.packages(params[:repository_id]) + packages = Repository.find(params[:repository_id]).packages render :json => packages end diff --git a/app/controllers/api/repositories_controller.rb b/app/controllers/api/repositories_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/api/repositories_controller.rb +++ b/app/controllers/api/repositories_controller.rb @@ -66,7 +66,7 @@ class Api::RepositoriesController < Api::ApiController end def find_repository - @repository = Repository.find_by_pulp_id params[:id] + @repository = Repository.find(params[:id]) raise HttpErrors::NotFound, _("Couldn't find repository '#{params[:id]}'") if @repository.nil? @repository end diff --git a/app/controllers/api/sync_controller.rb b/app/controllers/api/sync_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/api/sync_controller.rb +++ b/app/controllers/api/sync_controller.rb @@ -59,7 +59,7 @@ class Api::SyncController < Api::ApiController end def find_repository - @repository = Repository.find_by_pulp_id(params[:repository_id]) + @repository = Repository.find(params[:repository_id]) raise HttpErrors::NotFound, _("Couldn't find repository '#{params[:repository_id]}'") if @repository.nil? raise HttpErrors::NotFound, _("You can only synchronize repositories in locker environment'") if not @repository.environment.locker? @repository diff --git a/app/models/glue/pulp/errata.rb b/app/models/glue/pulp/errata.rb index <HASH>..<HASH> 100644 --- a/app/models/glue/pulp/errata.rb +++ b/app/models/glue/pulp/errata.rb @@ -42,7 +42,7 @@ class Glue::Pulp::Errata def self.repos_for_filter(filter) if repoid = filter[:repoid] - return [Repository.find_by_pulp_id(repoid)] + return [Repository.find(repoid)] elsif environment_id = filter[:environment_id] env = KTEnvironment.find(environment_id) if product_id = filter[:product_id] diff --git a/app/models/glue/pulp/repo.rb b/app/models/glue/pulp/repo.rb index <HASH>..<HASH> 100644 --- a/app/models/glue/pulp/repo.rb +++ b/app/models/glue/pulp/repo.rb @@ -68,7 +68,7 @@ module Glue::Pulp::Repo end def to_hash - @params.merge(:sync_state => self.sync_state) + pulp_repo_facts.merge(as_json).merge(:sync_state=> sync_state) end TYPE_YUM = "yum"
Some fixes involving issues with cli-system-test
Katello_katello
train
74da5f68a0ef816760a4e6ee410048b8b8cf5ced
diff --git a/gwt-material/src/main/java/gwt/material/design/client/base/MaterialWidget.java b/gwt-material/src/main/java/gwt/material/design/client/base/MaterialWidget.java index <HASH>..<HASH> 100644 --- a/gwt-material/src/main/java/gwt/material/design/client/base/MaterialWidget.java +++ b/gwt-material/src/main/java/gwt/material/design/client/base/MaterialWidget.java @@ -1478,4 +1478,11 @@ public class MaterialWidget extends ComplexPanel implements HasId, HasEnabled, H } return orientationMixin; } + + public FilterStyleMixin<MaterialWidget> getFilterStyleMixin() { + if (filterMixin == null) { + filterMixin = new FilterStyleMixin<>(this); + } + return filterMixin; + } }
Merged the current rework to <I>-snapshot
GwtMaterialDesign_gwt-material
train
c992424558fff76606aacf2342266b6923a3fda7
diff --git a/selenic/config.py b/selenic/config.py index <HASH>..<HASH> 100644 --- a/selenic/config.py +++ b/selenic/config.py @@ -24,6 +24,13 @@ class Config(object): self.local_conf = {} execfile(self.config_path, self.local_conf) + def __getattr__(self, name): + if name in self.local_conf: + return self.local_conf[name] + + raise AttributeError("{!r} object has no attribute {!r}" + .format(self.__class__, name)) + def get_driver(self): """ Creates a Selenium driver on the basis of the configuration file
Added support to get arbitrary variables from the configuration file through the Config class.
mangalam-research_selenic
train
be2dbf2c3f055ec3082d9a0e07fe3b86db544d4a
diff --git a/lib/ditty/controllers/component.rb b/lib/ditty/controllers/component.rb index <HASH>..<HASH> 100644 --- a/lib/ditty/controllers/component.rb +++ b/lib/ditty/controllers/component.rb @@ -63,11 +63,11 @@ module Ditty end end format.json do - headers 'Content-Type' => 'application/json' + content_type :json if success redirect "#{base_path}/#{entity.id}", 201 else - 400 + [400, { errors: entity.errors }.to_json] end end end @@ -131,7 +131,8 @@ module Ditty haml :"#{view_location}/edit", locals: { entity: entity, title: heading(:edit) } end format.json do - 400 + content_type :json + [400, { errors: entity.errors }.to_json] end end end @@ -151,7 +152,7 @@ module Ditty redirect base_path.to_s end format.json do - content_type 'application/json' + content_type :json headers 'Location' => '/users' status 204 end
fix: Feed back validation errors for JSON PUT and POST
EagerELK_ditty
train
7cc7fa27c92e2591084999d5095cf382f30672b7
diff --git a/log/logging.go b/log/logging.go index <HASH>..<HASH> 100644 --- a/log/logging.go +++ b/log/logging.go @@ -57,7 +57,7 @@ const ( FlagsPrecisionTime = log.Lmicroseconds FlagsLongFile = log.Llongfile FlagsShortFile = log.Lshortfile - FlagsDefault = FlagsNone + FlagsDefault = log.LstdFlags ) var ( @@ -88,7 +88,11 @@ func init() { Level = LevelTrace } - Flags, _ = strconv.Atoi(os.Getenv("LOG_FORMAT")) + var err error + Flags, err = strconv.Atoi(os.Getenv("LOG_FORMAT")) + if err != nil { + Flags = FlagsDefault + } DefaultLogger = New() } diff --git a/log/logging_test.go b/log/logging_test.go index <HASH>..<HASH> 100644 --- a/log/logging_test.go +++ b/log/logging_test.go @@ -16,6 +16,7 @@ var _ = Describe("Logging functions", func() { BeforeEach(func() { output = new(bytes.Buffer) log.Level = log.LevelTrace + log.SetTimestampFlags(log.FlagsNone) log.SetOutput(output) })
Use the same default flags as stdlib’s log I just think this is what most Go developers will expect; I know I did.
timehop_golog
train
a57b52ec7e16d9b9985d6303e918aa6fdfa0a141
diff --git a/core/src/main/java/hudson/model/Executor.java b/core/src/main/java/hudson/model/Executor.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/model/Executor.java +++ b/core/src/main/java/hudson/model/Executor.java @@ -389,6 +389,9 @@ public class Executor extends Thread implements ModelObject { } if (executable instanceof Actionable) { + if (LOGGER.isLoggable(Level.FINER)) { + LOGGER.log(FINER, "when running {0} from {1} we are copying {2} actions whereas the item currently has {3}", new Object[] {executable, workUnit.context.item, workUnit.context.actions, workUnit.context.item.getAllActions()}); + } for (Action action: workUnit.context.actions) { ((Actionable) executable).addAction(action); } diff --git a/core/src/main/java/hudson/model/Queue.java b/core/src/main/java/hudson/model/Queue.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/model/Queue.java +++ b/core/src/main/java/hudson/model/Queue.java @@ -612,6 +612,9 @@ public class Queue extends ResourceController implements Saveable { for (Item item : duplicatesInQueue) { for (FoldableAction a : Util.filter(actions, FoldableAction.class)) { a.foldIntoExisting(item, p, actions); + if (LOGGER.isLoggable(Level.FINE)) { + LOGGER.log(Level.FINE, "after folding {0}, {1} includes {2}", new Object[] {a, item, item.getAllActions()}); + } } } @@ -1049,7 +1052,13 @@ public class Queue extends ResourceController implements Saveable { List<Item> result = new ArrayList<Item>(); result.addAll(blockedProjects.getAll(t)); result.addAll(buildables.getAll(t)); - result.addAll(pendings.getAll(t)); + // Do not include pendings—we have already finalized WorkUnitContext.actions. + if (LOGGER.isLoggable(Level.FINE)) { + List<BuildableItem> thePendings = pendings.getAll(t); + if (!thePendings.isEmpty()) { + LOGGER.log(Level.FINE, "ignoring {0} during scheduleInternal", thePendings); + } + } for (Item item : waitingList) { if (item.task.equals(t)) { result.add(item); @@ -1414,7 +1423,7 @@ public class Queue extends ResourceController implements Saveable { p.task.getFullDisplayName()); p.isPending = false; pendings.remove(p); - makeBuildable(p); + makeBuildable(p); // TODO whatever this is for, the return value is being ignored, so this does nothing at all } }
[FIXED JENKINS-<I>] Do not consider pendings when deciding whether a schedule result should be new or existing, as we have already taken a snapshot of actions. (#<I>)
jenkinsci_jenkins
train
4ab42367cfb6550a30eddc217e4c1991959d8382
diff --git a/stream/__init__.py b/stream/__init__.py index <HASH>..<HASH> 100644 --- a/stream/__init__.py +++ b/stream/__init__.py @@ -2,7 +2,7 @@ """ stream - ~~~~~~~~ + ~~~~~~ Python implementation of stream library. See README file for more information. diff --git a/test/test_stream.py b/test/test_stream.py index <HASH>..<HASH> 100644 --- a/test/test_stream.py +++ b/test/test_stream.py @@ -92,16 +92,13 @@ def write_objs2(fpath, *objs_list): def test_low(): - """Run all test cases.""" + """Run low-level methods tests.""" # Files testdir = os.path.dirname(os.path.realpath(__file__)) gamfile = os.path.join(testdir, 'sample_reads.gam') gamfile_nof_alns = 12 rw1_gamfile = os.path.join(testdir, 'rw1_sample_reads.gam') rw2_gamfile = os.path.join(testdir, 'rw2_sample_reads.gam') - # GUM file == Unzipped GAM file - rw1_gumfile = os.path.join(testdir, 'rw1_sample_reads.gum') - rw2_gumfile = os.path.join(testdir, 'rw2_sample_reads.gum') # Read a sample file. alns = [a for a in read_alns1(gamfile)] @@ -114,33 +111,21 @@ def test_low(): assert len(alns) == len(re_alns) # Rewrite again the read data. write_objs2(rw2_gamfile, *re_alns) - # Unzip two generated files. - with gzip.open(rw1_gamfile, 'rb') as gfp, \ - open(rw1_gumfile, 'wb') as ufp: - ufp.write(gfp.read()) - with gzip.open(rw2_gamfile, 'rb') as gfp, \ - open(rw2_gumfile, 'wb') as ufp: - ufp.write(gfp.read()) # Check whether the two generated files have the same the content. - assert filecmp.cmp(rw1_gumfile, rw2_gumfile) - # Delete the generated files. + assert compare(rw1_gamfile, rw2_gamfile) + # Remove the generated files. os.remove(rw1_gamfile) - os.remove(rw1_gumfile) os.remove(rw2_gamfile) - os.remove(rw2_gumfile) def test_high(): - """Run all test cases.""" + """Run high-level methods tests.""" # Files testdir = os.path.dirname(os.path.realpath(__file__)) gamfile = os.path.join(testdir, 'sample_reads.gam') gamfile_nof_alns = 12 rw1_gamfile = os.path.join(testdir, 'rw1_sample_reads.gam') rw2_gamfile = os.path.join(testdir, 'rw2_sample_reads.gam') - # GUM file == Unzipped GAM file - rw1_gumfile = os.path.join(testdir, 'rw1_sample_reads.gum') - rw2_gumfile = os.path.join(testdir, 'rw2_sample_reads.gum') # Read a sample file. alns = [a for a in stream.parse(gamfile, vg_pb2.Alignment)] @@ -153,20 +138,40 @@ def test_high(): assert len(alns) == len(re_alns) # Rewrite again the read data. write_objs2(rw2_gamfile, *re_alns) - # Unzip two generated files. - with gzip.open(rw1_gamfile, 'rb') as gfp, \ - open(rw1_gumfile, 'wb') as ufp: - ufp.write(gfp.read()) - with gzip.open(rw2_gamfile, 'rb') as gfp, \ - open(rw2_gumfile, 'wb') as ufp: - ufp.write(gfp.read()) # Check whether the two generated files have the same the content. - assert filecmp.cmp(rw1_gumfile, rw2_gumfile) - # Delete the generated files. + assert compare(rw1_gamfile, rw2_gamfile) + # Remove the generated files. os.remove(rw1_gamfile) - os.remove(rw1_gumfile) os.remove(rw2_gamfile) - os.remove(rw2_gumfile) + + +def compare(first, second): + """Compare two stream files. + + Since the stream files are gzipped and the file name is included in the + compressed file, they need to be decompressed first before comparing their + contents. + + Args: + first (string): path to the first stream file. + second (string): path to the second stream file. + """ + ungz_first = '.ungz'.join(os.path.splitext(first)) + ungz_second = '.ungz'.join(os.path.splitext(second)) + + # Unzip first file. + with gzip.open(first, 'rb') as gfp, open(ungz_first, 'wb') as ufp: + ufp.write(gfp.read()) + # Unzip second file. + with gzip.open(second, 'rb') as gfp, open(ungz_second, 'wb') as ufp: + ufp.write(gfp.read()) + # Compare two unzipped files. + result = filecmp.cmp(ungz_first, ungz_second) + # Remove decompressed files. + os.remove(ungz_first) + os.remove(ungz_second) + + return result if __name__ == '__main__':
Fix minor issues Add `compare` method.
cartoonist_pystream-protobuf
train
67e76430361c4b1ff7170cfa77443b2152c0e426
diff --git a/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleLoader.java b/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleLoader.java index <HASH>..<HASH> 100644 --- a/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleLoader.java +++ b/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleLoader.java @@ -293,7 +293,7 @@ class FalseFriendRuleHandler extends XMLRuleHandler { if (currentTranslationLanguage == motherTongue) { translations.add(translation); } - if (currentTranslationLanguage == textLanguage) { + if (currentTranslationLanguage == textLanguage && language == motherTongue) { suggestions.add(translation.toString()); } translation = new StringBuilder(); diff --git a/trunk/JLanguageTool/src/test/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleTest.java b/trunk/JLanguageTool/src/test/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleTest.java index <HASH>..<HASH> 100644 --- a/trunk/JLanguageTool/src/test/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleTest.java +++ b/trunk/JLanguageTool/src/test/de/danielnaber/languagetool/rules/patterns/FalseFriendRuleTest.java @@ -39,9 +39,12 @@ public class FalseFriendRuleTest extends TestCase { public void testHintsForGermanSpeakers() throws IOException, ParserConfigurationException, SAXException { JLanguageTool langTool = new JLanguageTool(Language.ENGLISH, Language.GERMAN); langTool.activateDefaultFalseFriendRules(); - assertErrors(1, "We will berate you.", langTool); + final List<RuleMatch> matches = assertErrors(1, "We will berate you.", langTool); + assertEquals(matches.get(0).getSuggestedReplacements().toString(), "[to provide advice, to give advice]"); assertErrors(0, "We will give you advice.", langTool); assertErrors(1, "I go to high school in Foocity.", langTool); + final List<RuleMatch> matches2 = assertErrors(1, "The chef", langTool); + assertEquals("[boss, chief]", matches2.get(0).getSuggestedReplacements().toString()); } public void testHintsForEnglishSpeakers() throws IOException, ParserConfigurationException, SAXException { @@ -62,10 +65,11 @@ public class FalseFriendRuleTest extends TestCase { assertSuggestions(3, "My brother is politic.", langTool); } - private void assertErrors(int errorCount, String s, JLanguageTool langTool) throws IOException { + private List<RuleMatch> assertErrors(int errorCount, String s, JLanguageTool langTool) throws IOException { List<RuleMatch> matches = langTool.check(s); //System.err.println(matches); assertEquals(errorCount, matches.size()); + return matches; } private void assertSuggestions(final int suggestionCount, final String s, final JLanguageTool langTool) throws IOException {
bugfix: suggestions for false friends where duplicated (or more)
languagetool-org_languagetool
train
f959995d670cdb4f319653c4de74baaa77c74c71
diff --git a/chat/indico_chat/plugin.py b/chat/indico_chat/plugin.py index <HASH>..<HASH> 100644 --- a/chat/indico_chat/plugin.py +++ b/chat/indico_chat/plugin.py @@ -146,7 +146,7 @@ class ChatPlugin(IndicoPlugin): visible=self._has_visible_chatrooms) def extend_event_management_menu(self, event, **kwargs): - if event.canUserModify(session.user) or is_chat_admin(session.user): + if event.canModify(session.user) or is_chat_admin(session.user): return 'chat-management', SideMenuItem('Chat Rooms', url_for_plugin('chat.manage_rooms', event)) def extend_event_management_clone(self, event, **kwargs): diff --git a/piwik/indico_piwik/plugin.py b/piwik/indico_piwik/plugin.py index <HASH>..<HASH> 100644 --- a/piwik/indico_piwik/plugin.py +++ b/piwik/indico_piwik/plugin.py @@ -72,7 +72,7 @@ class PiwikPlugin(IndicoPlugin): **event_tracking_params) def add_sidemenu_item(self, event, **kwargs): - if event.canUserModify(session.user): + if event.canModify(session.user): menu_item = SideMenuItem(_("Statistics"), url_for_plugin('piwik.view', event)) return 'statistics', menu_item diff --git a/vc_vidyo/indico_vc_vidyo/templates/buttons.html b/vc_vidyo/indico_vc_vidyo/templates/buttons.html index <HASH>..<HASH> 100644 --- a/vc_vidyo/indico_vc_vidyo/templates/buttons.html +++ b/vc_vidyo/indico_vc_vidyo/templates/buttons.html @@ -1,5 +1,5 @@ {% macro render_make_me_moderator(event, vc_room, event_vc_room) %} - {% if event.canUserModify(session.user) and session.user.id != vc_room.data['owner'][1] %} + {% if event.canModify(session.user) and session.user.id != vc_room.data['owner'][1] %} <a class="i-button i-button-small highlight arrow" data-toggle="dropdown"></a> <ul class="dropdown" data-level="level1"> <li>
Chat, Piwik, VC/Vidyo: Fix modif key handling canUserAccess doesn't check modification keys
indico_indico-plugins
train
44745a9fac8b93a085741631292fc6b565746104
diff --git a/dpark/broadcast.py b/dpark/broadcast.py index <HASH>..<HASH> 100644 --- a/dpark/broadcast.py +++ b/dpark/broadcast.py @@ -80,11 +80,13 @@ class Broadcast: self.sendBroadcast() def __getstate__(self): - return self.uuid + return (self.uuid, self.bytes, self.bytes < self.BlockSize/2 and self.value or None) - def __setstate__(self, uuid): + def __setstate__(self, v): + self.uuid, self.bytes, value = v + if value is not None: + self.value = value Broadcast.ever_used = True - self.uuid = uuid def __getattr__(self, name): if name != 'value': @@ -179,6 +181,7 @@ class FileBroadcast(Broadcast): marshal.dump(self.value, f) else: cPickle.dump(self.value, f, -1) + self.bytes = f.tell() f.close() logger.debug("dump to %s", self.path) @@ -234,10 +237,11 @@ class TreeBroadcast(FileBroadcast): self.total_blocks = variableInfo.total_blocks logger.info("broadcast %s: %d bytes in %d blocks", self.uuid, self.total_bytes, self.total_blocks) + self.bytes = self.total_bytes self.startGuide() self.startServer() - + def startGuide(self): def run(): sock = env.ctx.socket(zmq.REP) @@ -280,6 +284,7 @@ class TreeBroadcast(FileBroadcast): for source_info in self.listOfSources[1:]: req = env.ctx.socket(zmq.REQ) + req.setsockopt(zmq.LINGER, 0) req.send_pyobj(SourceInfo.StopBroadcast) #req.recv_pyobj() req.close() @@ -315,11 +320,13 @@ class TreeBroadcast(FileBroadcast): def check_activity(self, source_info): sock = env.ctx.socket(zmq.REQ) + sock.setsockopt(zmq.LINGER, 0) sock.connect(source_info.addr) poller = zmq.Poller() poller.register(sock, zmq.POLLIN) sock.send_pyobj(-1) avail = dict(poller.poll(1 * 1000)) + poller.unregister(sock) if not avail or avail.get(sock) != zmq.POLLIN: sock.close() return False @@ -330,6 +337,7 @@ class TreeBroadcast(FileBroadcast): def startServer(self): def run(): sock = env.ctx.socket(zmq.REP) + sock.setsockopt(zmq.LINGER, 0) port = sock.bind_to_random_port("tcp://0.0.0.0") self.serverAddr = 'tcp://%s:%d' % (self.host,port) logger.debug("server started at %s", self.serverAddr) @@ -408,6 +416,7 @@ class TreeBroadcast(FileBroadcast): logger.debug("total_blocks: %s has %s", self.total_blocks, len(self.blocks)) sock = env.ctx.socket(zmq.REQ) + sock.setsockopt(zmq.LINGER, 0) sock.connect(source_info.addr) poller = zmq.Poller() poller.register(sock, zmq.POLLIN) @@ -416,21 +425,25 @@ class TreeBroadcast(FileBroadcast): avail = dict(poller.poll(10 * 1000)) if not avail or avail.get(sock) != zmq.POLLIN: logger.debug("%s recv broadcast %d from %s timeout", self.serverAddr, i, source_info.addr) + poller.unregister(sock) sock.close() return False block = sock.recv_pyobj() if not isinstance(block, BroadcastBlock) or i != block.id: logger.error("%s recv bad block %d %s", self.serverAddr, i, block) + poller.unregister(sock) sock.close() return False logger.debug("Received block: %s from %s", block.id, source_info.addr) self.blocks.append(block) + poller.unregister(sock) sock.close() return len(self.blocks) == source_info.total_blocks def getMasterAddr(self, uuid): sock = env.ctx.socket(zmq.REQ) + sock.setsockopt(zmq.LINGER, 0) sock.connect(self.master_addr) sock.send_pyobj(uuid) guide_addr = sock.recv_pyobj() @@ -444,6 +457,7 @@ class TreeBroadcast(FileBroadcast): def run(): sock = env.ctx.socket(zmq.REP) + sock.setsockopt(zmq.LINGER, 0) port = sock.bind_to_random_port("tcp://0.0.0.0") cls.master_addr = 'tcp://%s:%d' % (cls.host, port) logger.debug("TreeBroadcast tracker started at %s", diff --git a/dpark/schedule.py b/dpark/schedule.py index <HASH>..<HASH> 100644 --- a/dpark/schedule.py +++ b/dpark/schedule.py @@ -625,7 +625,7 @@ class MesosScheduler(DAGScheduler): task.slave_id.value = o.slave_id.value task.data = cPickle.dumps((t, t.tried), -1) task.executor.MergeFrom(self.executor) - if len(task.data) > 100*1024: + if len(task.data) > 1000*1024: logger.warning("task too large: %s %d", t, len(task.data))
bypass smaller than <I>K broadcast objects send it directly, enlarge task data limitation to 1M
douban_dpark
train
1b2112c3b2c6f86b9ec578a104118f1d9a3f9183
diff --git a/core/array.rb b/core/array.rb index <HASH>..<HASH> 100644 --- a/core/array.rb +++ b/core/array.rb @@ -744,7 +744,7 @@ class Array < `Array` def replace(other) %x{ - #{self}.splice(0); + #{self}.splice(0, #{self}.length); #{self}.push.apply(#{self}, other); return #{self}; }
Fix bug in Array#replace not removing elements first in IE
opal_opal
train
8fee46a711c69ce30cfa6a65b30594d643e29a1e
diff --git a/conf/scripts/widgets/applist/applist.html b/conf/scripts/widgets/applist/applist.html index <HASH>..<HASH> 100644 --- a/conf/scripts/widgets/applist/applist.html +++ b/conf/scripts/widgets/applist/applist.html @@ -1,7 +1,7 @@ <div class="applist"> <ul> - <li ng-repeat="app in list.links | orderBy:'title'"> - <a target="_blank" ng-href="{{app.href}}">{{app.title}}</a> + <li ng-repeat="app in list.appMasters | orderBy:'appId'"> + <a target="_blank" ng-href="{{app.workerPath}}">{{app.appId}}</a> </li> </ul> </div> diff --git a/conf/scripts/widgets/applist/applist.js b/conf/scripts/widgets/applist/applist.js index <HASH>..<HASH> 100644 --- a/conf/scripts/widgets/applist/applist.js +++ b/conf/scripts/widgets/applist/applist.js @@ -22,7 +22,7 @@ 'use strict'; angular.module('app.widgets.applist', ['adf.provider']) -.value('appMastersUrl', location.href+'/appMasters') +.value('appMastersUrl', location.origin+'/appmasters') .config(function(dashboardProvider){ dashboardProvider.widget('applist', { title: 'Applications', @@ -37,34 +37,21 @@ angular.module('app.widgets.applist', ['adf.provider']) } }); }) -.service('applistService', function($q, $http, appMastersUrl){ - return { - get: function(path){ - var deferred = $q.defer(); - var url = appMasterUrl + path; - $http.jsonp(url) - .success(function(data){ - if (data && data.meta){ - var status = data.meta.status; - if ( status < 300 ){ - deferred.resolve(data.data); - } else { - deferred.reject(data.data.message); - } - } - }) - .error(function(){ - deferred.reject(); - }); - return deferred.promise; - } - }; -}) -.controller('applistCtrl', function($scope, config){ +.controller('applistCtrl', function($scope, $http, config, appMastersUrl){ + function getAppList(response){ if (!config.appMasters){ config.appMasters = []; } + var data = (response.data && response.data.appMasters) || []; + data.forEach(function(appMaster) { + config.appMasters.push(appMaster); + }); this.appMasters = config.appMasters; + } + var _getAppList = getAppList.bind(this); + $http.get(appMastersUrl).then(_getAppList, function(err){ + throw err; + }); }).controller('applistEditCtrl', function($scope){ function getLinks(){ if (!$scope.config.appMasters){ diff --git a/conf/scripts/widgets/dag/dag.js b/conf/scripts/widgets/dag/dag.js index <HASH>..<HASH> 100644 --- a/conf/scripts/widgets/dag/dag.js +++ b/conf/scripts/widgets/dag/dag.js @@ -95,7 +95,7 @@ angular.module('app.widgets.dag', ['adf.provider', 'nvd3']) var force = d3.layout.force() .gravity(.05) .linkDistance(200) - .charge(-600) + .charge(-400) .size([width, height]) .nodes(scope.data.nodes) .links(scope.data.links) @@ -104,8 +104,8 @@ angular.module('app.widgets.dag', ['adf.provider', 'nvd3']) function pathattr(d) { var dx = d.target.x - d.source.x, - dy = d.target.y - d.source.y; - var path = "M0,0" + "L" + dx + "," + dy; + dy = d.target.y - d.source.y, + path = "M0,0" + "L" + dx + "," + dy; return path; }
Fixes #<I> Conflicts: conf/scripts/widgets/applist/applist.html conf/scripts/widgets/applist/applist.js
gearpump_gearpump
train
3de7149d23d6c40965ef87c21884a7d8060b067a
diff --git a/lib/tickly.rb b/lib/tickly.rb index <HASH>..<HASH> 100644 --- a/lib/tickly.rb +++ b/lib/tickly.rb @@ -4,5 +4,5 @@ require File.dirname(__FILE__) + "/tickly/curve" require File.dirname(__FILE__) + "/tickly/node_processor" module Tickly - VERSION = '2.1.2' + VERSION = '2.1.3' end diff --git a/lib/tickly/curve.rb b/lib/tickly/curve.rb index <HASH>..<HASH> 100644 --- a/lib/tickly/curve.rb +++ b/lib/tickly/curve.rb @@ -17,9 +17,14 @@ module Tickly # [:c, "curve", "x1", "123", "456", ...] def initialize(curve_expression) raise InvalidCurveError, "A curve expression should have :c as it's first symbol" unless curve_expression[0] == :c - raise InvalidCurveError, "A curve expression should start with a `curve' command" unless curve_expression[1] == "curve" raise InvalidCurveError, "Curve expression contained no values" unless curve_expression[2] + # Nuke7 sometimes produces curves where the command is a string literal + # within quotes, and it contains a trailing space + cmd = curve_expression[1].to_s.strip + raise InvalidCurveError, "Curve expression should start with a 'curve' command" unless cmd == 'curve' + + expand_curve(curve_expression) end diff --git a/test/test_curve.rb b/test/test_curve.rb index <HASH>..<HASH> 100644 --- a/test/test_curve.rb +++ b/test/test_curve.rb @@ -28,4 +28,14 @@ x754 912.0731812 x755 913.7190552 916.0959473 918.1025391 920.0751953 922.189880 Tickly::Curve.new([:c, "curve"]) end end + + def test_curve_with_trailing_space_at_command_end + atoms = [:c, "curve ", "x374", "1008.35", "899.289", "809.798", + "742.572", "825.061", "1013.43", "1238.31", "1490.91", + "1698.4", "1848.96", "1889.24", "1961.12", "2024.13", + "2090.3", "2114.74", "2164.57", "2227.17", "2309.3"] + + c = Tickly::Curve.new(atoms) + assert_kind_of Tickly::Curve, c + end end \ No newline at end of file
Add the fix for padded "curve " commands
julik_tickly
train
5406657db8d7242bf71a0511c222005722c99e50
diff --git a/src/main/java/com/jayway/maven/plugins/android/configuration/Test.java b/src/main/java/com/jayway/maven/plugins/android/configuration/Test.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/jayway/maven/plugins/android/configuration/Test.java +++ b/src/main/java/com/jayway/maven/plugins/android/configuration/Test.java @@ -15,39 +15,39 @@ public class Test { */ private String skip; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testInstrumentationPackage} */ private String instrumentationPackage; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testInstrumentationRunner} */ private String instrumentationRunner; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testDebug} */ private Boolean debug; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testCoverage} */ private Boolean coverage; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testLogOnly} */ private Boolean logOnly; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSize} */ private String testSize; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testCreateReport} */ private Boolean createReport; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testPackages} */ protected List<String> packages; /** - * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testSkip} + * Mirror of {@link com.jayway.maven.plugins.android.AbstractInstrumentationMojo#testClasses} */ protected List<String> classes;
Fix names of referenced configuration member variables in javadoc
simpligility_android-maven-plugin
train
bcff554bd8406fefa81c77a54eb18d769e1fbbbd
diff --git a/Makefile b/Makefile index <HASH>..<HASH> 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,7 @@ all: test test: - go test -v client/*.go - go test -v metadata/*.go + go test -v ./... clean: rm *.test diff --git a/client/client.go b/client/client.go index <HASH>..<HASH> 100644 --- a/client/client.go +++ b/client/client.go @@ -6,7 +6,6 @@ package client import ( "errors" - "io" "net/http" "net/http/cookiejar" "strings" @@ -59,7 +58,7 @@ type Session struct { Client http.Client } -func NewSession(user, pw, userAgent, userAgentPw, retsVersion string, logger io.WriteCloser) (RetsSession, error) { +func NewSession(user, pw, userAgent, userAgentPw, retsVersion string, transport http.RoundTripper) (RetsSession, error) { var session Session session.Username = user session.Password = pw @@ -71,19 +70,14 @@ func NewSession(user, pw, userAgent, userAgentPw, retsVersion string, logger io. session.Accept = "*/*" session.Cookies = make([]*http.Cookie, 0) - transport := http.DefaultTransport - if logger != nil { - dial := WireLog(logger) - transport = &http.Transport{ - Proxy: http.ProxyFromEnvironment, - DisableCompression: true, // if you're logging it, you might want to read it - Dial: dial, - } + if transport == nil { + transport = http.DefaultTransport } + retsTransport := RetsTransport{ transport: transport, session: session, - digest: nil, + digest: nil, } jar, err := cookiejar.New(nil) if err != nil { diff --git a/client/wirelog.go b/client/wirelog.go index <HASH>..<HASH> 100644 --- a/client/wirelog.go +++ b/client/wirelog.go @@ -12,9 +12,9 @@ import ( type Dialer func(network, addr string) (net.Conn, error) /** create a net.Dial function based on this log */ -func WireLog(log io.WriteCloser) Dialer { +func WireLog(log io.WriteCloser, dial Dialer) Dialer { return func(network, addr string) (net.Conn, error) { - conn, err := net.Dial(network, addr) + conn, err := dial(network, addr) wire := WireLogConn{ log: log, Conn: conn, diff --git a/cmds/client.go b/cmds/client.go index <HASH>..<HASH> 100644 --- a/cmds/client.go +++ b/cmds/client.go @@ -10,10 +10,11 @@ package main import ( "flag" "fmt" - "io" + "net" + "net/http" "os" - gorets "github.com/jpfielding/gorets_client" + gorets "github.com/jpfielding/gorets/client" ) func main() { @@ -27,18 +28,23 @@ func main() { flag.Parse() - var logger io.WriteCloser = nil + d := net.Dial + if *logFile != "" { file, err := os.Create(*logFile) if err != nil { panic(err) } defer file.Close() - logger = file fmt.Println("wire logging enabled: ", file.Name()) + d = gorets.WireLog(file, d) } + // should we throw an err here too? - session, err := gorets.NewSession(*username, *password, *userAgent, *userAgentPw, *retsVersion, logger) + session, err := gorets.NewSession(*username, *password, *userAgent, *userAgentPw, *retsVersion, &http.Transport{ + DisableCompression: true, + Dial: d, + }) if err != nil { panic(err) }
Make transport options more flexible and general cleanup.
jpfielding_gorets
train
407143c07846326c02daf30b9ea246ce322f6c13
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -1118,8 +1118,36 @@ class TestResource(TestCase): @patch('wac.Client._op') def test_get(self, _op): - Resource1.get('gooid') - _op.assert_called_once_with(wac.requests.get, 'gooid') + Resource1.get('/v2/1s/gooid') + _op.assert_called_once_with(wac.requests.get, '/v2/1s/gooid') + + @patch('wac.Client._op') + def test_get_collection(self, _op): + with self.assertRaises(ValueError) as ex_ctx: + Resource1.get('/v2/1s') + self.assertIn('', ex_ctx.exception) + + @patch('wac.Client._op') + def test_get_collection(self, _op): + with self.assertRaises(ValueError) as ex_ctx: + Resource1.get('/v2/1s') + self.assertIn( + "'/v2/1s' resolves to a Resource1 collection", + str(ex_ctx.exception)) + + @patch('wac.Client._op') + def test_get_member_mismatch(self, _op): + with self.assertRaises(ValueError) as ex_ctx: + Resource1.get('/v2/2s/id') + self.assertIn( + "'/v2/2s/id' resolves to a Resource2 member which is not a " + "subclass of Resource1", + ex_ctx.exception) + + @patch('wac.Client._op') + def test_get_member_base(self, _op): + Resource.get('/v2/1s/gooid') + _op.assert_called_once_with(wac.requests.get, '/v2/1s/gooid') @patch('wac.Client._op') def test_create_from_save(self, _op): diff --git a/wac.py b/wac.py index <HASH>..<HASH> 100644 --- a/wac.py +++ b/wac.py @@ -1221,6 +1221,14 @@ class Resource(object): @classmethod def get(cls, uri): + resource, flags = cls.registry.match(uri) + if flags.get('collection', False): + raise ValueError("'{0}' resolves to a {1} collection".format( + uri, resource.__name__)) + if not issubclass(resource, cls): + raise ValueError( + "'{0}' resolves to a {1} member which is not a subclass of {2}" + .format(uri, resource.__name__, cls.__name__)) resp = cls.client.get(uri) return cls(**resp.data)
add some validation checks (maps to member, cls or subclass of cls) to Resource.get
balanced_wac
train
0beb4d12ae8b3134a5f009c93b3180602f969a60
diff --git a/taar/recommenders/similarity_recommender.py b/taar/recommenders/similarity_recommender.py index <HASH>..<HASH> 100644 --- a/taar/recommenders/similarity_recommender.py +++ b/taar/recommenders/similarity_recommender.py @@ -187,8 +187,9 @@ class SimilarityRecommender(BaseRecommender): index_lrs_iter = zip(indices[donor_log_lrs > 0.0], donor_log_lrs) recommendations = [] for (index, lrs) in index_lrs_iter: - candidate = (self.donors_pool[index]['active_addons'], lrs) - recommendations.append(candidate) + for term in self.donors_pool[index]['active_addons']: + candidate = (term, lrs) + recommendations.append(candidate) if len(recommendations) > limit: break return recommendations[:limit] diff --git a/tests/test_similarityrecommender.py b/tests/test_similarityrecommender.py index <HASH>..<HASH> 100644 --- a/tests/test_similarityrecommender.py +++ b/tests/test_similarityrecommender.py @@ -127,18 +127,10 @@ def test_recommendations(mock_s3_continuous_data): assert isinstance(recommendation_list, list) assert len(recommendation_list) == 1 - recommendations, weight = recommendation_list[0] - - # Make sure the structure of the recommendations is correct and that we recommended the the right addons. - assert isinstance(recommendations, list) + recommendation, weight = recommendation_list[0] # Make sure that the reported addons are the expected ones from the most similar donor. - assert "{test-guid-1}" in recommendations - assert "{test-guid-2}" in recommendations - assert "{test-guid-3}" in recommendations - assert "{test-guid-4}" in recommendations - assert len(recommendations) == 4 - + assert "{test-guid-1}" == recommendation assert type(weight) == np.float64 @@ -265,8 +257,7 @@ def test_weights_continuous(mock_s3_continuous_data): assert len(recommendation_list) == 2 for recommendation, weight in recommendation_list: - assert isinstance(recommendation, list) - assert isinstance(recommendation[0], str) + assert isinstance(recommendation, str) assert isinstance(weight, float) # Test that sorting is appropriate. @@ -298,8 +289,7 @@ def test_weights_categorical(mock_s3_categorical_data): assert len(recommendation_list) == 2 # Make sure the structure of the recommendations is correct and that we recommended the the right addons. for recommendation, weight in recommendation_list: - assert isinstance(recommendation, list) - assert isinstance(recommendation[0], str) + assert isinstance(recommendation, str) assert isinstance(weight, float) # Test that sorting is appropriate.
Flattened the output of SimilarityRecommender. The output of the SimilarityRecommender::recommend() method now emits a list of 2-tuples. Each 2-tuple is (recommendation, weight)
mozilla_taar
train
4336e0df836fe426a735a14f9c97da6d4ad62c6b
diff --git a/lib/celluloid_benchmark/visitor.rb b/lib/celluloid_benchmark/visitor.rb index <HASH>..<HASH> 100644 --- a/lib/celluloid_benchmark/visitor.rb +++ b/lib/celluloid_benchmark/visitor.rb @@ -61,8 +61,8 @@ module CelluloidBenchmark end end - def get_json(uri) - get uri, [], nil, { "Accept" => "application/json, text/javascript, */*; q=0.01" } + def get_json(uri, headers = {}) + get uri, [], nil, headers.merge("Accept" => "application/json, text/javascript, */*; q=0.01") end def post_json(uri, query)
Allow additional heraders for JSON requests
scottwillson_celluloid-benchmark
train
b4dbfd6c385b19b02da4b609cc40e2b79452c3e6
diff --git a/Makefile b/Makefile index <HASH>..<HASH> 100644 --- a/Makefile +++ b/Makefile @@ -33,7 +33,7 @@ install: .installed format: .installed .formatted -PYFILES = $(shell find * -type f -name "*.py") +PYFILES = $(shell find * -type f -name "*.py" | grep -v '__init__.py') .formatted: .installed $(PYFILES) isort $? > /dev/null diff --git a/examples/__init__.py b/examples/__init__.py index <HASH>..<HASH> 100755 --- a/examples/__init__.py +++ b/examples/__init__.py @@ -1,8 +1,11 @@ -import os import inspect +import os import sys +import peony # noqa +from peony import utils # noqa + try: try: from . import api # noqa @@ -21,8 +24,6 @@ testdir = os.path.dirname(file_) sys.path.insert(0, os.path.dirname(testdir)) -import peony # noqa -from peony import utils # noqa msg = "peony v" + peony.__version__ msg += "\n" + "-" * len(msg) print(msg) diff --git a/peony/__init__.py b/peony/__init__.py index <HASH>..<HASH> 100644 --- a/peony/__init__.py +++ b/peony/__init__.py @@ -8,8 +8,6 @@ """ -import logging - __author__ = "Florian Badie" __author_email__ = "florianbadie@gmail.com" __url__ = "https://github.com/odrling/peony-twitter" @@ -20,8 +18,11 @@ __license__ = "MIT License" __keywords__ = "twitter, asyncio, asynchronous" +import logging + logger = logging.getLogger(__name__) from .client import BasePeonyClient, PeonyClient # noqa from .commands import EventStream, event_handler, events, task # noqa from .utils import ErrorHandler, set_debug # noqa + diff --git a/peony/commands/__init__.py b/peony/commands/__init__.py index <HASH>..<HASH> 100644 --- a/peony/commands/__init__.py +++ b/peony/commands/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -from .event_handlers import EventStream, EventStreams, EventHandler # noqa +from .event_handlers import EventHandler, EventStream, EventStreams # noqa from .event_types import events # noqa from .tasks import task # noqa diff --git a/tests/__init__.py b/tests/__init__.py index <HASH>..<HASH> 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -6,9 +6,10 @@ import inspect import json import os.path import pathlib -import aiofiles import sys +import aiofiles + file_ = pathlib.Path(inspect.getfile(inspect.currentframe())) test_dir = file_.absolute().parent diff --git a/tests/tests_client/__init__.py b/tests/tests_client/__init__.py index <HASH>..<HASH> 100644 --- a/tests/tests_client/__init__.py +++ b/tests/tests_client/__init__.py @@ -1,9 +1,10 @@ +import asyncio from unittest import mock import aiohttp -import asyncio from peony import BasePeonyClient, PeonyClient, utils + from .. import MockResponse
don't run formatters on __init__.py (it could break everything)
odrling_peony-twitter
train
9c3c2f09bc61049781986bd21d64f5ce5ef57583
diff --git a/crunchyroll/api.py b/crunchyroll/api.py index <HASH>..<HASH> 100644 --- a/crunchyroll/api.py +++ b/crunchyroll/api.py @@ -4,7 +4,7 @@ import functools import requests -from .constants import ANDROID +from .constants import ANDROID, AJAX from .errors import * class ApiInterface(object): @@ -403,7 +403,25 @@ class AndroidApi(ApiInterface): class AjaxApi(ApiInterface): """AJAX call API """ - pass + + METHOD_POST = 'POST' + METHOD_GET = 'GET' + + def __init__(self): + self._connector = requests.Session() + + def start_session(self): + pass + + def user_login(self): + pass + + def videoPlayer_getStandardConfig(self): + pass + + def videoPlayer_getChromelessConfig(self): + pass + class ScraperApi(ApiInterface): """HTML scraping interface diff --git a/crunchyroll/constants.py b/crunchyroll/constants.py index <HASH>..<HASH> 100644 --- a/crunchyroll/constants.py +++ b/crunchyroll/constants.py @@ -78,9 +78,12 @@ class ANDROID(API): IMAGE_FULL_URL = 'image.full_url' class AJAX(API): - # not used yet - PREMIUM_TYPE_ANIME = '2' - PREMIUM_TYPE_DRAMA = '4' + API_DOMAIN = 'www.' + API.BASE_DOMAIN + API_URL = '{protocol}://' + API_DOMAIN + '/xml/' + + HEADER_REFERRER = 'http://static.ak.crunchyroll.com/flash/20130201144858.bd8118f7c58d1da788d88782497e30a4/StandardVideoPlayer.swf' + HEADER_ORIGIN = 'http://static.ak.crunchyroll.com' + HEADER_USER_AGENT = ANDROID.USER_AGENT class WEB(API): pass
some prelim work on ajax api
aheadley_python-crunchyroll
train
27a909cc75fcaf93d2041a861e7a8b0835ea25cc
diff --git a/src/Illuminate/Foundation/Application.php b/src/Illuminate/Foundation/Application.php index <HASH>..<HASH> 100755 --- a/src/Illuminate/Foundation/Application.php +++ b/src/Illuminate/Foundation/Application.php @@ -771,6 +771,16 @@ class Application extends Container implements HttpKernelInterface, ResponsePrep } /** + * Gets the current application locale. + * + * @return string + */ + public function getLocale() + { + return $this['config']->get('app.locale'); + } + + /** * Get the service providers that have been loaded. * * @return array
Added getLocale() method to Application which returns the current locale from the config.
laravel_framework
train
44fd3e80fdc67a5dfdf834478a19b2a1523aefbd
diff --git a/lib/rentjuicer/listing.rb b/lib/rentjuicer/listing.rb index <HASH>..<HASH> 100644 --- a/lib/rentjuicer/listing.rb +++ b/lib/rentjuicer/listing.rb @@ -51,7 +51,13 @@ module Rentjuicer end def neighborhood_name - self.neighborhoods.first unless neighborhoods.blank? + unless neighborhoods.blank? + if self.neighborhoods.first.is_a?(String) + self.neighborhoods.first + elsif self.neighborhoods.first.is_a?(Array) + self.neighborhoods.first[1] + end + end end def mls_listing?
sometimes the api is returning a hash, sometimes an array, not sure why that is happening ...
tcocca_rentjuicer
train
4f63d13d82baefffaeb1fbf1cb1c3eeefad458e7
diff --git a/beam_fit.py b/beam_fit.py index <HASH>..<HASH> 100644 --- a/beam_fit.py +++ b/beam_fit.py @@ -12,7 +12,7 @@ def beam_fit(psf,psfheader): """ psf_max_location = np.unravel_index(np.argmax(psf), psf.shape) - threshold_psf = np.where(psf>0.01,psf,0) + threshold_psf = np.where(psf>0.1,psf,0) labelled_psf, labels = ndimage.label(threshold_psf) diff --git a/pymoresane.py b/pymoresane.py index <HASH>..<HASH> 100644 --- a/pymoresane.py +++ b/pymoresane.py @@ -241,12 +241,12 @@ class FitsImage: scale_adjust = 0 for i in range(max_index-1,-1,-1): - if max_index > 1: - if (normalised_scale_maxima[i,0,0] > normalised_scale_maxima[i+1,0,0]): - scale_adjust = i + 1 - logger.info("Scale {} contains a local maxima. Ignoring scales <= {}" - .format(scale_adjust, scale_adjust)) - break + # if max_index > 1: + # if (normalised_scale_maxima[i,0,0] > normalised_scale_maxima[i+1,0,0]): + # scale_adjust = i + 1 + # logger.info("Scale {} contains a local maxima. Ignoring scales <= {}" + # .format(scale_adjust, scale_adjust)) + # break if (normalised_scale_maxima[i,0,0] == 0): scale_adjust = i + 1 logger.info("Scale {} is empty. Ignoring scales <= {}".format(scale_adjust, scale_adjust))
A few quick fixes. Note removal local maxima search.
ratt-ru_PyMORESANE
train
1bf8c69edbd1d3dc07f6f6daa56654aec8b1fe87
diff --git a/src/Diff/Base.php b/src/Diff/Base.php index <HASH>..<HASH> 100644 --- a/src/Diff/Base.php +++ b/src/Diff/Base.php @@ -56,6 +56,15 @@ abstract class Base { */ abstract public function getChangeLog(); + /** + * @return string The source version for this diff. + */ + abstract public function getFrom(); + + /** + * @return string The target version for this diff. + */ + abstract public function getTo(); public function getBasePath() { return $this->basePath; diff --git a/src/Diff/Git.php b/src/Diff/Git.php index <HASH>..<HASH> 100644 --- a/src/Diff/Git.php +++ b/src/Diff/Git.php @@ -88,11 +88,6 @@ class Git extends Base { $this->initChanges(); } - public function run($source, $target) - { - - } - protected function initChanges() { $lines = $this->gitArray('diff', ["--name-status", $this->sourceHash, $this->targetHash]); @@ -168,4 +163,12 @@ class Git extends Base { return $result; } + public function getFrom() { + return $this->from; + } + + public function getTo() { + return $this->target; + } + } \ No newline at end of file diff --git a/src/Executor/PreUpdate.php b/src/Executor/PreUpdate.php index <HASH>..<HASH> 100644 --- a/src/Executor/PreUpdate.php +++ b/src/Executor/PreUpdate.php @@ -153,4 +153,12 @@ class PreUpdate extends Base { public function getRemovedFiles() { return $this->removedFiles; } + + public function getFromVersion() { + return $this->data['from']; + } + + public function getToVersion() { + return $this->data['to']; + } } diff --git a/src/Generator/PreUpdate.php b/src/Generator/PreUpdate.php index <HASH>..<HASH> 100644 --- a/src/Generator/PreUpdate.php +++ b/src/Generator/PreUpdate.php @@ -19,6 +19,8 @@ class PreUpdate extends Base { 'sourceHashes' => $this->diff->getSourceHashes(), 'targetHashes' => $this->diff->getTargetHashes(), 'changeLog' => $this->diff->getChangeLog(), + 'from' => $this->diff->getFrom(), + 'from' => $this->diff->getTo(), 'precheck' => base64_encode($this->precheck) ]; return json_encode($result);
Include source and target version names in packages.
SAM-IT_php-auto-update
train
296e3c650759402bd83aa4c80fd2539585b7cbd5
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -14,14 +14,14 @@ function createCriticalFilename(filename, suffix) { return result; } -function Split(opts) { - var opts = opts || {}, +function CriticalSplit(options) { + var options = options || {}, filenameSuffix = '-critical', filename = ''; - pattern = opts.pattern || /FOLD/; + pattern = options.pattern || /CRITICAL/; - if (typeof opts.suffix !== 'undefined') { + if (typeof options.suffix !== 'undefined') { filenameSuffix = opts.suffix; } @@ -75,4 +75,4 @@ function processRule(criticalCss, rule) { } } -module.exports = postcss.plugin('postcss-split', Split); +module.exports = postcss.plugin('postcss-critical-split', CriticalSplit);
cleanup, name updates, getting rid of old code approaches
mrnocreativity_postcss-critical-split
train
6f320a0feaacb68373d94d5b267bb6fcaf89f3dc
diff --git a/wallet/transaction.go b/wallet/transaction.go index <HASH>..<HASH> 100644 --- a/wallet/transaction.go +++ b/wallet/transaction.go @@ -13,7 +13,6 @@ import ( "github.com/FactomProject/btcutil/base58" "github.com/FactomProject/factom" "github.com/FactomProject/factomd/common/factoid" - "github.com/FactomProject/factomd/common/interfaces" "github.com/FactomProject/factomd/common/primitives" "github.com/FactomProject/goleveldb/leveldb" ) @@ -44,7 +43,7 @@ func (w *Wallet) NewTransaction(name string) error { } t := new(factoid.Transaction) - t.SetTimestamp(*interfaces.NewTimestampNow()) + t.SetTimestamp(primitives.NewTimestampNow()) w.transactions[name] = t return nil }
update for timestamp refactor
FactomProject_factom
train
7f340869df6d38771782738daad8859fe260f959
diff --git a/src/BaseModule.php b/src/BaseModule.php index <HASH>..<HASH> 100644 --- a/src/BaseModule.php +++ b/src/BaseModule.php @@ -5,8 +5,8 @@ namespace Annotate\Modules; use Annotate\Framework\Application\Components\Container; use Annotate\Framework\Utils\Strings; use Annotate\Modules\Application\ModularPresenter; +use Nette\Application\UI\ComponentReflection; use Nette\Application\UI\Presenter; -use Nette\Application\UI\PresenterComponentReflection; use Nette\ComponentModel\IComponent; use Nette\Object; use Nette\Reflection\ClassType; @@ -150,18 +150,16 @@ abstract class BaseModule extends Object implements IModule /** * @return Container */ - public - function getContainer() + public function getContainer() { return $this->presenter->getComponent("container"); } - function tryCall($method, $params) + public function tryCall($method, $params) { - /** @var PresenterComponentReflection $rc */ - $rc = PresenterComponentReflection::from($this); + $rc = new ComponentReflection($this); if ($rc->hasMethod($method)) { $rm = $rc->getMethod($method); if ($rm->isPublic() && !$rm->isAbstract() && !$rm->isStatic()) {
[Modules] Fix compatibility with latest nette/application Create instance of ComponentReflection via constructor instead of using undefined "from" method.
AnnotateFramework_modules
train
d749a1f71824322dd842eb8e31c8db969285836f
diff --git a/cobe/tokenizers.py b/cobe/tokenizers.py index <HASH>..<HASH> 100644 --- a/cobe/tokenizers.py +++ b/cobe/tokenizers.py @@ -115,9 +115,10 @@ class CobeTokenizer: continue if char == "-" or char == "'": - # dash and single quote are part of whatever token - # they're within - continue + # Dash and single quote are part of whatever non-whitespace + # token they're within + if tokentype != "Z": + continue char_tokentype = self._tokentype(char) diff --git a/tests/test_tokenizers.py b/tests/test_tokenizers.py index <HASH>..<HASH> 100644 --- a/tests/test_tokenizers.py +++ b/tests/test_tokenizers.py @@ -138,6 +138,10 @@ class testCobeTokenizer(unittest.TestCase): words = self.tokenizer.split(u"hi, cobe") self.assertEquals(words, ["hi", ",", " ", "cobe"]) + def testSplitDash(self): + words = self.tokenizer.split(u"hi - cobe") + self.assertEquals(words, ["hi", " ", "-", " ", "cobe"]) + def testSplitUrl(self): words = self.tokenizer.split(u"http://www.google.com/") self.assertEquals(words, ["http://www.google.com/"])
Split bare dashes and quotes properly
pteichman_cobe
train
0a05a9b179c0dd44b25258e26122883c6404f17d
diff --git a/stack-client/src/main/java/com/digitalpetri/opcua/stack/client/handlers/UaTcpClientMessageHandler.java b/stack-client/src/main/java/com/digitalpetri/opcua/stack/client/handlers/UaTcpClientMessageHandler.java index <HASH>..<HASH> 100644 --- a/stack-client/src/main/java/com/digitalpetri/opcua/stack/client/handlers/UaTcpClientMessageHandler.java +++ b/stack-client/src/main/java/com/digitalpetri/opcua/stack/client/handlers/UaTcpClientMessageHandler.java @@ -36,7 +36,6 @@ import com.digitalpetri.opcua.stack.core.channel.MessageAbortedException; import com.digitalpetri.opcua.stack.core.channel.SerializationQueue; import com.digitalpetri.opcua.stack.core.channel.headers.AsymmetricSecurityHeader; import com.digitalpetri.opcua.stack.core.channel.headers.HeaderDecoder; -import com.digitalpetri.opcua.stack.core.channel.headers.SymmetricSecurityHeader; import com.digitalpetri.opcua.stack.core.channel.messages.ErrorMessage; import com.digitalpetri.opcua.stack.core.channel.messages.MessageType; import com.digitalpetri.opcua.stack.core.channel.messages.TcpMessageDecoder; @@ -516,8 +515,6 @@ public class UaTcpClientMessageHandler extends ByteToMessageCodec<UaRequestFutur final List<ByteBuf> buffersToDecode = ImmutableList.copyOf(chunkBuffers); chunkBuffers = new LinkedList<>(); - validateChunkHeaders(buffersToDecode); - serializationQueue.decode((binaryDecoder, chunkDecoder) -> { ByteBuf decodedBuffer = null; @@ -559,33 +556,6 @@ public class UaTcpClientMessageHandler extends ByteToMessageCodec<UaRequestFutur } } - private void validateChunkHeaders(List<ByteBuf> chunkBuffers) throws UaException { - ChannelSecurity channelSecurity = secureChannel.getChannelSecurity(); - long currentTokenId = channelSecurity.getCurrentToken().getTokenId().longValue(); - long previousTokenId = channelSecurity.getPreviousToken() - .map(t -> t.getTokenId().longValue()) - .orElse(-1L); - - for (ByteBuf chunkBuffer : chunkBuffers) { - chunkBuffer.skipBytes(3 + 1 + 4 + 4); // skip messageType, chunkType, messageSize, secureChannelId - - SymmetricSecurityHeader securityHeader = SymmetricSecurityHeader.decode(chunkBuffer); - - if (securityHeader.getTokenId() != currentTokenId) { - if (securityHeader.getTokenId() != previousTokenId) { - String message = String.format( - "received unknown secure channel token. " + - "tokenId=%s, currentTokenId=%s, previousTokenId=%s", - securityHeader.getTokenId(), currentTokenId, previousTokenId); - - throw new UaException(StatusCodes.Bad_SecureChannelTokenUnknown, message); - } - } - - chunkBuffer.readerIndex(0); - } - } - private void onError(ChannelHandlerContext ctx, ByteBuf buffer) { try { ErrorMessage errorMessage = TcpMessageDecoder.decodeError(buffer);
Remove redundant chunk secure header validation Validating chunk headers before decoding was redundant; the headers are validated as each chunk is decoded.
kevinherron_opc-ua-stack
train
9ebaddec83088f72725f01f5c60b7225b5f09cf8
diff --git a/tt/gspreadsheet.py b/tt/gspreadsheet.py index <HASH>..<HASH> 100644 --- a/tt/gspreadsheet.py +++ b/tt/gspreadsheet.py @@ -61,20 +61,21 @@ def PrintFeed(feed): # TODO use collections.MutableMapping as the docs recommend class GDataRow(DictMixin): """A dict-like object that represents a row of a worksheet""" - def __init__(self, entry): + def __init__(self, entry, deferred_save=False): self._entry = entry self._data = dict([(key, entry.custom[key].text) for key in entry.custom]) + self._defer_save = deferred_save + self._changed = False def __getitem__(self, *args): return self._data.__getitem__(*args) def __setitem__(self, key, value): - global gd_client - if gd_client is None: - # TODO raise a better exception - raise Exception("Not Logged In") - self._data[key] = value - return gd_client.UpdateRow(self._entry, self._data) + if self._data.get(key) != value: + self._data[key] = value + self._changed = True + if not self._defer_save: + return self.save() def __delitem__(self, *args): raise NameError("Deleting Values Not Allowed") @@ -86,6 +87,19 @@ class GDataRow(DictMixin): """Get an ordinary dict of the row""" return self._data.copy() + def save(self): + """Save the row back to the spreadsheet""" + # FIXME can only do save once + if not self._changed: + # nothing to save + return + global gd_client + assert gd_client is not None + output = gd_client.UpdateRow(self._entry, self._data) + # reset `_changed` flag + self._changed = False + return output + class GSpreadsheet(object): email = None
implement .save() and allow deferred saves
texastribune_gspreadsheet
train
2eb87dfecc3fa8d46e19c658aba0d245185655a9
diff --git a/src/Railt/Reflection/Base/Behavior/BaseTypeIndicator.php b/src/Railt/Reflection/Base/Behavior/BaseTypeIndicator.php index <HASH>..<HASH> 100644 --- a/src/Railt/Reflection/Base/Behavior/BaseTypeIndicator.php +++ b/src/Railt/Reflection/Base/Behavior/BaseTypeIndicator.php @@ -69,4 +69,12 @@ trait BaseTypeIndicator { return $this->resolve()->isList && $this->isNonNullList; } + + /** + * @return bool + */ + private function isNullable(): bool + { + return !($this->isNonNull() || $this->isNonNullList()); + } }
Add helper private method (isNullable)
railt_reflection
train
99b733d44c89d08c32a188db2ce456f49623aa6a
diff --git a/cmd/xl-storage-format-v2.go b/cmd/xl-storage-format-v2.go index <HASH>..<HASH> 100644 --- a/cmd/xl-storage-format-v2.go +++ b/cmd/xl-storage-format-v2.go @@ -708,40 +708,40 @@ func (z xlMetaV2) ToFileInfo(volume, path, versionID string) (fi FileInfo, err e return FileInfo{}, errFileNotFound } - var i = -1 - var version xlMetaV2Version + var foundIndex = -1 -findVersion: - for i, version = range orderedVersions { - switch version.Type { + for i := range orderedVersions { + switch orderedVersions[i].Type { case ObjectType: - if bytes.Equal(version.ObjectV2.VersionID[:], uv[:]) { - fi, err = version.ObjectV2.ToFileInfo(volume, path) - break findVersion + if bytes.Equal(orderedVersions[i].ObjectV2.VersionID[:], uv[:]) { + fi, err = orderedVersions[i].ObjectV2.ToFileInfo(volume, path) + foundIndex = i + break } case LegacyType: - if version.ObjectV1.VersionID == versionID { - fi, err = version.ObjectV1.ToFileInfo(volume, path) - break findVersion + if orderedVersions[i].ObjectV1.VersionID == versionID { + fi, err = orderedVersions[i].ObjectV1.ToFileInfo(volume, path) + foundIndex = i + break } case DeleteType: - if bytes.Equal(version.DeleteMarker.VersionID[:], uv[:]) { - fi, err = version.DeleteMarker.ToFileInfo(volume, path) - break findVersion + if bytes.Equal(orderedVersions[i].DeleteMarker.VersionID[:], uv[:]) { + fi, err = orderedVersions[i].DeleteMarker.ToFileInfo(volume, path) + foundIndex = i + break } } } - if err != nil { return fi, err } - if i >= 0 { + if foundIndex >= 0 { // A version is found, fill dynamic fields - fi.IsLatest = i == 0 + fi.IsLatest = foundIndex == 0 fi.NumVersions = len(z.Versions) - if i > 0 { - fi.SuccessorModTime = getModTimeFromVersion(orderedVersions[i-1]) + if foundIndex > 0 { + fi.SuccessorModTime = getModTimeFromVersion(orderedVersions[foundIndex-1]) } return fi, nil }
fix: deletion of delete marker regression (#<I>) fixes #<I> fixes #<I> fixes #<I>
minio_minio
train
3f350dfeb50657dce1397a225bbac8c7399405d1
diff --git a/lib/paper_trail/has_paper_trail.rb b/lib/paper_trail/has_paper_trail.rb index <HASH>..<HASH> 100644 --- a/lib/paper_trail/has_paper_trail.rb +++ b/lib/paper_trail/has_paper_trail.rb @@ -385,7 +385,7 @@ module PaperTrail } if assoc.options[:polymorphic] - associated_record = send(assoc.name) + associated_record = send(assoc.name) if send(assoc.foreign_type) if associated_record && associated_record.class.paper_trail_enabled_for_model? assoc_version_args.merge!(:foreign_key_id => associated_record.id) end
Fixed Error when attempting to version empty polymorphic relationship fixes #<I>
paper-trail-gem_paper_trail
train
419b76045e4040a18ce8d4a26542b2dd117870b6
diff --git a/config/logger.js b/config/logger.js index <HASH>..<HASH> 100644 --- a/config/logger.js +++ b/config/logger.js @@ -1,18 +1,7 @@ 'use strict' -const fs = require('fs') const cluster = require('cluster') -function createDirectory (path) { - try { - fs.mkdirSync(path) - } catch (e) { - if (e.code !== 'EEXIST') { - throw (new Error('Cannot create directory @ ' + path)) - } - } -} - exports['default'] = { logger: (api) => { let logger = {transports: []} @@ -29,21 +18,15 @@ exports['default'] = { } // file logger - const hasLogDirectoryConfigured = (api.config.general.paths.log.length === 1) + logger.transports.push(function (api, winston) { + const logDirectory = api.config.general.paths.log[0] - if (hasLogDirectoryConfigured) { - logger.transports.push(function (api, winston) { - const logDirectory = api.config.general.paths.log[0] - - createDirectory(logDirectory) - - return new (winston.transports.File)({ - filename: api.config.general.paths.log[0] + '/' + api.pids.title + '.log', - level: 'info', - timestamp: function () { return api.id + ' @ ' + new Date().toISOString() } - }) + return new (winston.transports.File)({ + filename: logDirectory + '/' + api.pids.title + '.log', + level: 'info', + timestamp: function () { return api.id + ' @ ' + new Date().toISOString() } }) - } + }) // the maximum length of param to log (we will truncate) logger.maxLogStringLength = 100 @@ -63,23 +46,17 @@ exports.test = { let logger = { transports: [] } // file logger - const hasLogDirectoryConfigured = (api.config.general.paths.log.length === 1) - - if (hasLogDirectoryConfigured) { - logger.transports.push(function (api, winston) { - const logDirectory = api.config.general.paths.log[0] - - createDirectory(logDirectory) - - return new (winston.transports.File)({ - filename: api.config.general.paths.log[0] + '/' + api.pids.title + '.log', - maxsize: 20480, - maxFiles: 1, - level: 'debug', - timestamp: function () { return api.id + ' @ ' + new Date().toISOString() } - }) + logger.transports.push(function (api, winston) { + const logDirectory = api.config.general.paths.log[0] + + return new (winston.transports.File)({ + filename: logDirectory + '/' + api.pids.title + '.log', + maxsize: 20480, + maxFiles: 1, + level: 'debug', + timestamp: function () { return api.id + ' @ ' + new Date().toISOString() } }) - } + }) return logger }
Update to log config based on PR review
actionhero_actionhero
train
0b62db7effcfcda0234a4462eff17519639c468a
diff --git a/modules/http/http.go b/modules/http/http.go index <HASH>..<HASH> 100644 --- a/modules/http/http.go +++ b/modules/http/http.go @@ -197,7 +197,7 @@ func (m *Module) Stop() error { // Thread-safe access to the listener object func (m *Module) accessListener() net.Listener { m.listenMu.RLock() - defer m.listenMu.RLock() + defer m.listenMu.RUnlock() return m.listener }
Fix RUnlock in accessListener (#<I>)
uber-go_fx
train
7bdf7a95a53dc58446598b110f054b39517230e0
diff --git a/psiturk/experiment_server_controller.py b/psiturk/experiment_server_controller.py index <HASH>..<HASH> 100644 --- a/psiturk/experiment_server_controller.py +++ b/psiturk/experiment_server_controller.py @@ -11,10 +11,19 @@ import webbrowser import subprocess import sys import os +import logging from builtins import object from future import standard_library standard_library.install_aliases() +stream_handler = logging.StreamHandler(sys.stdout) +stream_handler.setLevel(logging.DEBUG) # TODO: let this be configurable +stream_formatter = logging.Formatter('%(message)s') +stream_handler.setFormatter(stream_formatter) +logger = logging.getLogger(__name__) +logger.addHandler() +logger.setLevel(logging.INFO) +logger.debug('Logging set up.') #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # Supporting functions @@ -28,7 +37,8 @@ def is_port_available(ip, port): s.shutdown(2) return False except socket.timeout: - print("*** Failed to test port availability. Check that host\nis set properly in config.txt") + logger.error("*** Failed to test port availability. " + "Check that host is set properly in config.txt") return True except socket.error as e: return True @@ -117,7 +127,7 @@ class ExperimentServerController(object): self.startup() def on_terminate(self, proc: psutil.Process): - print("process {} terminated with exit code {}".format( + logger.debug("process {} terminated with exit code {}".format( proc, proc.returncode)) def kill_process_tree(self, proc: psutil.Process): @@ -139,15 +149,15 @@ class ExperimentServerController(object): ['pid', 'cmdline', 'exe', 'name']) if proc_hash in str(p.info) and 'master' in str(p.info)] if len(psiturk_master_procs) < 1: - print('No active server process found.') + logger.warning('No active server process found.') self.server_running = False return for p in psiturk_master_procs: - print('Shutting down experiment server at pid %s ... ' % p.info['pid']) + logger.info('Shutting down experiment server at pid %s ... ' % p.info['pid']) try: self.kill_process_tree(p) except psutil.NoSuchProcess as e: - print('Attempt to shut down PID {} failed with exception {}'.format( + logger.error('Attempt to shut down PID {} failed with exception {}'.format( p.as_dict['pid'], e )) # NoSuchProcess exceptions imply server is not running, so seems safe. @@ -192,15 +202,14 @@ class ExperimentServerController(object): ) server_status = self.is_server_running() if server_status == 'no': - #print "Running experiment server with command:", server_command subprocess.Popen(server_command, shell=True, close_fds=True) - print("Experiment server launching...") + logging.info("Experiment server launching...") self.server_running = True elif server_status == 'maybe': - print("Error: Not sure what to tell you...") + logging.error("Error: Not sure what to tell you...") elif server_status == 'yes': - print("Experiment server may be already running...") + logging.warning("Experiment server may be already running...") elif server_status == 'blocked': - print( + logging.warning( "Another process is running on the desired port. Try using a different port number.") time.sleep(1.2) # Allow CLI to catch up.
Replace calls to print with calls to a module level logger
NYUCCL_psiTurk
train
b221ed52125215be8e05f203ffc081facf4fa9bf
diff --git a/db/schema.rb b/db/schema.rb index <HASH>..<HASH> 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -11,7 +11,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20160809222541) do +ActiveRecord::Schema.define(version: 20160810160258) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -89,21 +89,21 @@ ActiveRecord::Schema.define(version: 20160809222541) do t.datetime "expired_at" t.integer "creator_id" t.integer "content_type_id" - t.datetime "created_at", null: false - t.datetime "updated_at", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false t.datetime "deleted_at" - t.boolean "is_published", default: false + t.integer "last_updated_by_id" t.integer "updated_by_id" end add_index "content_items", ["deleted_at"], name: "index_content_items_on_deleted_at", using: :btree create_table "content_types", force: :cascade do |t| - t.string "name", null: false + t.string "name", null: false t.text "description" - t.integer "creator_id", null: false - t.datetime "created_at", null: false - t.datetime "updated_at", null: false + t.integer "creator_id", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false t.datetime "deleted_at" t.integer "contract_id" end @@ -341,13 +341,12 @@ ActiveRecord::Schema.define(version: 20160809222541) do create_table "roles", force: :cascade do |t| t.string "name" - t.integer "resource_id" + t.datetime "created_at" + t.datetime "updated_at" t.string "resource_type" - t.datetime "created_at", null: false - t.datetime "updated_at", null: false + t.string "resource_id" end - add_index "roles", ["name", "resource_type", "resource_id"], name: "index_roles_on_name_and_resource_type_and_resource_id", using: :btree add_index "roles", ["name"], name: "index_roles_on_name", using: :btree create_table "snippets", force: :cascade do |t| @@ -431,10 +430,8 @@ ActiveRecord::Schema.define(version: 20160809222541) do add_index "users", ["tenant_id"], name: "index_users_on_tenant_id", using: :btree create_table "users_roles", id: false, force: :cascade do |t| - t.integer "user_id" - t.integer "role_id" - t.datetime "created_at", null: false - t.datetime "updated_at", null: false + t.integer "user_id" + t.integer "role_id" end add_index "users_roles", ["user_id", "role_id"], name: "index_users_roles_on_user_id_and_role_id", using: :btree @@ -458,6 +455,7 @@ ActiveRecord::Schema.define(version: 20160809222541) do t.boolean "noodp", default: false t.boolean "noarchive", default: false t.boolean "noimageindex", default: false + t.text "seo_keywords" end add_index "webpages", ["user_id"], name: "index_webpages_on_user_id", using: :btree
Run Migrations - Up to Date
cortex-cms_cortex
train
76afb89d65aa4a6412a0ab955618e08f5de7e672
diff --git a/guard-kitchen.gemspec b/guard-kitchen.gemspec index <HASH>..<HASH> 100644 --- a/guard-kitchen.gemspec +++ b/guard-kitchen.gemspec @@ -18,7 +18,7 @@ Gem::Specification.new do |spec| spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] - spec.add_dependency "guard" + spec.add_dependency "guard", "> 2.0.0" spec.add_dependency "mixlib-shellout" spec.add_development_dependency "bundler", "~> 1.3" spec.add_development_dependency "rake" diff --git a/lib/guard/kitchen.rb b/lib/guard/kitchen.rb index <HASH>..<HASH> 100644 --- a/lib/guard/kitchen.rb +++ b/lib/guard/kitchen.rb @@ -15,11 +15,17 @@ # require "guard" -require "guard/guard" +require "guard/plugin" require "mixlib/shellout" module Guard - class Kitchen < Guard + class Kitchen < Plugin + def initialize(options = {}) + super + # you can still access the watchers with options[:watchers] + # rest of the implementation... + end + def start ::Guard::UI.info("Guard::Kitchen is starting") cmd = Mixlib::ShellOut.new("kitchen create", :timeout => 10800) diff --git a/lib/guard/kitchen/version.rb b/lib/guard/kitchen/version.rb index <HASH>..<HASH> 100644 --- a/lib/guard/kitchen/version.rb +++ b/lib/guard/kitchen/version.rb @@ -1,8 +1,8 @@ require 'guard' -require 'guard/guard' +require 'guard/plugin' module Guard - class Kitchen < Guard - VERSION = "0.0.3" + class Kitchen < Plugin + VERSION = "0.0.4" end end diff --git a/spec/unit/guard/kitchen_spec.rb b/spec/unit/guard/kitchen_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/guard/kitchen_spec.rb +++ b/spec/unit/guard/kitchen_spec.rb @@ -1,6 +1,21 @@ require 'spec_helper' require 'guard/kitchen' +# Breaking change in guard 2.x for RSPEC caused by the fact +# we didn't initialize guard's internals before Guard::Kitchen.new() +# See https://github.com/guard/guard/issues/693 +# +# The recommended way fix it is to install guard-compat and use that +# instead of real guard for testing. That adds a development dependency, +# so for now just monkey patch to stub out Guard::Plugin#initialize +module Guard + class Plugin + def initialize(options={}) + end + end +end +# End of Monkey Patch + describe "Guard::Kitchen" do let(:kitchen) do Guard::Kitchen.new
Compatibility with guard v2.x - Extend Guard::Plugin instead of Guard::Guard - Fix RSpecs to avoid problem with uninitialised guard internals Guard <I>.x deprecated the use of Guard::Guard over a year ago. Use of guard-kitchen generates deprecation warnings. Guard <I>.x removed guard/guard.rb breaking anything that extends Guard::Guard (such as guard-kitchen
test-kitchen_guard-kitchen
train
385733b4ab4028b1c83cd1f8c1d609a9278b0df0
diff --git a/pyvlx/api/frame_creation.py b/pyvlx/api/frame_creation.py index <HASH>..<HASH> 100644 --- a/pyvlx/api/frame_creation.py +++ b/pyvlx/api/frame_creation.py @@ -177,7 +177,7 @@ def create_frame(command): return FrameNodeStatePositionChangedNotification() if command == Command.GW_LEAVE_LEARN_STATE_CFM: return FrameLeaveLearnStateConfirmation() - if command == command.GW_LEAVE_LEARN_STATE_REQ: + if command == Command.GW_LEAVE_LEARN_STATE_REQ: return FrameLeaveLearnStateRequest() return None
Access enum from the Enum class instead of instance which is deprecated since Python <I>.
Julius2342_pyvlx
train
cfb10e39473036801e3ee2c6167b5afdc83c566d
diff --git a/hazelcast-jet-core/src/main/java/com/hazelcast/jet/Jet.java b/hazelcast-jet-core/src/main/java/com/hazelcast/jet/Jet.java index <HASH>..<HASH> 100644 --- a/hazelcast-jet-core/src/main/java/com/hazelcast/jet/Jet.java +++ b/hazelcast-jet-core/src/main/java/com/hazelcast/jet/Jet.java @@ -20,9 +20,7 @@ import com.hazelcast.client.HazelcastClient; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.client.impl.clientside.HazelcastClientProxy; import com.hazelcast.config.Config; -import com.hazelcast.config.HotRestartConfig; import com.hazelcast.config.MapConfig; -import com.hazelcast.config.MergePolicyConfig; import com.hazelcast.config.ServiceConfig; import com.hazelcast.config.matcher.MatchingPointConfigPatternMatcher; import com.hazelcast.core.Hazelcast; @@ -144,17 +142,17 @@ public final class Jet { .setClassName(JetMetricsService.class.getName()) .setConfigObject(jetConfig.getMetricsConfig())); + boolean hotRestartEnabled = hzConfig.getHotRestartPersistenceConfig().isEnabled(); MapConfig metadataMapConfig = new MapConfig(INTERNAL_JET_OBJECTS_PREFIX + '*') .setBackupCount(jetConfig.getInstanceConfig().getBackupCount()) - .setStatisticsEnabled(false) - .setMergePolicyConfig( - new MergePolicyConfig().setPolicy(IgnoreMergingEntryMapMergePolicy.class.getName())) - .setHotRestartConfig(new HotRestartConfig().setEnabled(true)); + .setStatisticsEnabled(false); + metadataMapConfig.getMergePolicyConfig().setPolicy(IgnoreMergingEntryMapMergePolicy.class.getName()); + metadataMapConfig.getHotRestartConfig().setEnabled(hotRestartEnabled); MapConfig resultsMapConfig = new MapConfig(metadataMapConfig) .setName(JOB_RESULTS_MAP_NAME) .setTimeToLiveSeconds(properties.getSeconds(JOB_RESULTS_TTL_SECONDS)); - resultsMapConfig.getHotRestartConfig().setEnabled(true); + resultsMapConfig.getHotRestartConfig().setEnabled(hotRestartEnabled); hzConfig.addMapConfig(metadataMapConfig) .addMapConfig(resultsMapConfig);
Align Hot Restart enablement with global setting (#<I>) It is an error to enable Hot Restart on a data structure when the feature of Hot Restart Persistence is disabled.
hazelcast_hazelcast
train
68febe8361bd542e6494d376f25927168e45ef6b
diff --git a/course/report/stats/report.php b/course/report/stats/report.php index <HASH>..<HASH> 100644 --- a/course/report/stats/report.php +++ b/course/report/stats/report.php @@ -11,14 +11,17 @@ } } - - echo '<form action="index.php" method="post">'."\n" - .'<input type="hidden" name="mode" value="'.$mode.'" />'."\n"; + // Ugly hack. This file may be included from admin or course reports. + // For admin reports, $adminroot is set. We basically use it to decide + // what kind of footer we need to print. + if (!isset($adminroot)) { + $adminroot = false; + } $reportoptions = stats_get_report_options($course->id, $mode); $timeoptions = report_stats_timeoptions($mode); if (empty($timeoptions)) { - error(get_string('nostatstodisplay'), $CFG->wwwroot.'/course/view.php?id='.$course->id); + error(get_string('nostatstodisplay'), $CFG->wwwroot.'/course/view.php?id='.$course->id, $adminroot); } $table->width = '*'; @@ -36,7 +39,7 @@ .' ORDER BY r.sortorder'; if (!$us = get_records_sql($sql)) { - error('Cannot enter detailed view: No users found for this course.'); + error('Cannot enter detailed view: No users found for this course.', $adminroot); } foreach ($us as $u) { @@ -62,13 +65,18 @@ '<input type="submit" value="'.get_string('view').'" />') ; } + echo '<form action="index.php" method="post">'."\n" + .'<fieldset class="invisiblefieldset">'."\n" + .'<input type="hidden" name="mode" value="'.$mode.'" />'."\n"; print_table($table); + + echo '</fieldset>'; echo '</form>'; if (!empty($report) && !empty($time)) { if ($report == STATS_REPORT_LOGINS && $course->id != SITEID) { - error("This type of report is only available for the site course"); + error('This type of report is only available for the site course', $adminroot); } $timesql = $param = stats_get_parameters($time,$report,$course->id,$mode); @@ -189,4 +197,4 @@ } } -?> +?> \ No newline at end of file
Fixes for MDL-<I>.
moodle_moodle
train
cbbf3068e42bf5acbc9b0546e4a8e682cb02c407
diff --git a/lib/Backend/EzPlatformBackend.php b/lib/Backend/EzPlatformBackend.php index <HASH>..<HASH> 100644 --- a/lib/Backend/EzPlatformBackend.php +++ b/lib/Backend/EzPlatformBackend.php @@ -282,7 +282,7 @@ final class EzPlatformBackend implements BackendInterface $searchLocation = $searchQuery->getLocation(); if ($searchLocation instanceof LocationInterface) { - $location = $this->locationService->loadLocation($searchLocation->getLocationId()); + $location = $this->locationService->loadLocation((int) $searchLocation->getLocationId()); $criteria[] = new Criterion\Subtree($location->pathString); $criteria[] = new Criterion\LogicalNot(new Criterion\LocationId($location->id)); @@ -316,7 +316,7 @@ final class EzPlatformBackend implements BackendInterface $searchLocation = $searchQuery->getLocation(); if ($searchLocation instanceof LocationInterface) { - $location = $this->locationService->loadLocation($searchLocation->getLocationId()); + $location = $this->locationService->loadLocation((int) $searchLocation->getLocationId()); $criteria[] = new Criterion\Subtree($location->pathString); $criteria[] = new Criterion\LogicalNot(new Criterion\LocationId($location->id));
Cast location ID to int before using it
netgen-layouts_content-browser-ezplatform
train
3094da2ac205041a6f97ef528b19baf3a9562eb2
diff --git a/src/SchematicEntity.php b/src/SchematicEntity.php index <HASH>..<HASH> 100644 --- a/src/SchematicEntity.php +++ b/src/SchematicEntity.php @@ -35,14 +35,14 @@ class SchematicEntity extends AbstractEntity } if (!empty($this->schema[ModelSchema::FILLABLE])) { - return in_array($field, $this->schema[ModelSchema::FILLABLE]); + return in_array($field, $this->schema[ModelSchema::FILLABLE], true); } if (!empty($this->schema[ModelSchema::SECURED]) && $this->schema[ModelSchema::SECURED] === '*') { return false; } - return !in_array($field, $this->schema[ModelSchema::SECURED]); + return !in_array($field, $this->schema[ModelSchema::SECURED], true); } /**
Fixes some CS from #<I> pr
spiral_models
train
77937f6a794fdfe1f264a11d7d08e11705fbe062
diff --git a/master/buildbot/test/integration/test_trigger.py b/master/buildbot/test/integration/test_trigger.py index <HASH>..<HASH> 100644 --- a/master/buildbot/test/integration/test_trigger.py +++ b/master/buildbot/test/integration/test_trigger.py @@ -32,7 +32,7 @@ r"""\*\*\* BUILD 1 \*\*\* ==> finished \(success\) log:stdio \({loglines}\) \*\*\* STEP trigger \*\*\* ==> triggered trigsched \(success\) url:trigsched #2 \(http://localhost:8080/#buildrequests/2\) - url:success: build #1 \(http://localhost:8080/#builders/2/builds/1\) + url:success: build #1 \(http://localhost:8080/#builders/(1|2)/builds/1\) \*\*\* STEP shell_1 \*\*\* ==> 'echo world' \(success\) log:stdio \({loglines}\) \*\*\* BUILD 2 \*\*\* ==> finished \(success\)
Sometimes the build completes on builder 1, and sometimes on builder 2.
buildbot_buildbot
train