hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
fc88b29aa8b57fbdc921bf7d092a1aac39de1f6f
diff --git a/README.rst b/README.rst index <HASH>..<HASH> 100644 --- a/README.rst +++ b/README.rst @@ -142,7 +142,7 @@ Changelog Version 0.3.0 ------------- -* Added __str__ method for convenience of pretty string +* Added __str__/__unicode__ methods for convenience of pretty string Version 0.2.0 ------------- diff --git a/user_agents/parsers.py b/user_agents/parsers.py index <HASH>..<HASH> 100644 --- a/user_agents/parsers.py +++ b/user_agents/parsers.py @@ -111,7 +111,10 @@ class UserAgent(object): os = ("%s %s" % (self.os.family, self.os.version_string)).strip() browser = ("%s %s" % (self.browser.family, self.browser.version_string)).strip() return " / ".join([device, os, browser]) - + + def __unicode__(self): + return unicode(str(self)) + def _is_android_tablet(self): # Newer Android tablets don't have "Mobile" in their user agent string, # older ones like Galaxy Tab still have "Mobile" though they're not diff --git a/user_agents/tests.py b/user_agents/tests.py index <HASH>..<HASH> 100644 --- a/user_agents/tests.py +++ b/user_agents/tests.py @@ -200,3 +200,8 @@ class UserAgentsTest(unittest.TestCase): self.assertEqual(str(google_bot_ua), "Spider / Other / Googlebot 2.1") self.assertEqual(str(nokia_n97_ua), "Nokia N97 / Symbian OS 9.4 / Nokia Browser 7.1.12344") self.assertEqual(str(android_firefox_aurora_ua), "Other / Android / Firefox Mobile 27") + + def test_unicode_strings(self): + unicode_ua_str = unicode(iphone_ua) + self.assertEqual(unicode_ua_str, u"iPhone / iOS 5.1 / Mobile Safari 5.1") + self.assertTrue(isinstance(unicode_ua_str, unicode))
Added unicode support, version bump
selwin_python-user-agents
train
7a1b783a10066849bcb14ae60a56d61377a16cc7
diff --git a/petl/io/avro.py b/petl/io/avro.py index <HASH>..<HASH> 100644 --- a/petl/io/avro.py +++ b/petl/io/avro.py @@ -7,7 +7,7 @@ from collections import OrderedDict from datetime import datetime, date, time from decimal import Decimal -from petl.compat import izip, izip_longest, text_type, string_types, PY3 +from petl.compat import izip_longest, text_type, string_types, PY3 from petl.io.sources import read_source_from_arg, write_source_from_arg from petl.transform.headers import skip, setheader from petl.util.base import Table, dicts, fieldnames, iterpeek, wrap @@ -15,7 +15,7 @@ from petl.util.base import Table, dicts, fieldnames, iterpeek, wrap # region API -def fromavro(source, limit=None, skip=0, **avro_args): +def fromavro(source, limit=None, skips=0, **avro_args): """Extract a table from the records of a avro file. The `source` argument (string or file-like or fastavro.reader) can either @@ -92,7 +92,7 @@ def fromavro(source, limit=None, skip=0, **avro_args): source2 = read_source_from_arg(source) return AvroView(source=source2, limit=limit, - skip=skip, + skips=skips, **avro_args) @@ -242,10 +242,10 @@ def appendavro(table, target, schema=None, sample=9, **avro_args): class AvroView(Table): '''Read rows from avro file with their types and logical types''' - def __init__(self, source, limit, skip, **avro_args): + def __init__(self, source, limit, skips, **avro_args): self.source = source self.limit = limit - self.skip = skip + self.skip = skips self.avro_args = avro_args self.avro_schema = None @@ -457,7 +457,6 @@ def _get_definition_from_record(prop, val, fprev, dprev, fill_missing): fprev = OrderedDict() if dprev is None: dprev = OrderedDict() - props = list(val.keys()) row = list(val.values()) @@ -475,9 +474,9 @@ def _get_definition_from_record(prop, val, fprev, dprev, fill_missing): def _get_precision_from_decimal(curr, val, prev): if val is None: - prec = scale = bytes_req = num = 0 + prec = scale = 0 else: - prec, scale, bytes_req, num = precision_and_scale(val) + prec, scale, _, _ = precision_and_scale(val) if prev is not None: # get the greatests precision and scale of the sample prec0, scale0 = prev.get('precision'), prev.get('scale') @@ -508,7 +507,7 @@ def precision_and_scale(numeric_value): def _fix_missing_headers(table, schema): '''add missing columns headers from schema''' - if schema is None or not 'fields' in schema: + if schema is None or 'fields' not in schema: return table # table2: try not advance iterators sample, table2 = iterpeek(table, 2) @@ -541,6 +540,7 @@ def _get_schema_header_names(schema): header = [field.get('name') for field in fields] return header + def _raise_error(details): if PY3: raise ValueError(details).with_traceback(sys.exc_info()[2]) diff --git a/petl/test/io/test_avro.py b/petl/test/io/test_avro.py index <HASH>..<HASH> 100644 --- a/petl/test/io/test_avro.py +++ b/petl/test/io/test_avro.py @@ -4,7 +4,7 @@ from __future__ import absolute_import, print_function, division import sys import math -from datetime import datetime, date, time +from datetime import datetime, date from decimal import Decimal from tempfile import NamedTemporaryFile @@ -12,7 +12,7 @@ from nose.tools import eq_ from petl.compat import izip_longest, PY3 from petl.transform.basics import cat -from petl.util.base import dicts, records +from petl.util.base import dicts from petl.util.vis import look from petl.io.avro import fromavro, toavro, appendavro @@ -105,7 +105,7 @@ else: wrong_schema = dict(schema0) schema_fields = wrong_schema['fields'] for field in schema_fields: - field['type'] = ['null', 'string'] + field['type'] = ['null', 'string'] try: _write_temp_avro_file(table1, wrong_schema) except ValueError: @@ -196,7 +196,6 @@ else: def _decs(float_value, rounding=12): return Decimal(str(round(float_value, rounding))) - def _utc(year, month, day, hour=0, minute=0, second=0, microsecond=0): u = datetime(year, month, day, hour, minute, second, microsecond) if PY3:
avro: fix some lints
petl-developers_petl
train
2d24de5aa8b70da6b0dd84b7f01f7acce2d51174
diff --git a/lib/ReactViews/ObserveModelMixin.js b/lib/ReactViews/ObserveModelMixin.js index <HASH>..<HASH> 100644 --- a/lib/ReactViews/ObserveModelMixin.js +++ b/lib/ReactViews/ObserveModelMixin.js @@ -6,7 +6,7 @@ import knockout from 'terriajs-cesium/Source/ThirdParty/knockout'; const ObserveModelMixin = { componentWillMount() { - this.__observeModelChangeSubscription = undefined; + this.__observeModelChangeSubscriptions = undefined; const originalRender = this.render; @@ -35,7 +35,12 @@ const ObserveModelMixin = { } }); - that.__observeModelChangeSubscription = []; + that.__observeModelChangeSubscriptions = []; + + function disposeAndForceUpdate() { + disposeSubscription(that); + that.forceUpdate(); + } // The computed observable should also depend on anything in this component's props // with a __knockoutSubscribable property. This property is added to arrays @@ -43,18 +48,13 @@ const ObserveModelMixin = { for (const prop in that.props) { if (that.props.hasOwnProperty(prop)) { if (defined(that.props[prop]) && defined(that.props[prop].__knockoutSubscribable)) { - that.__observeModelChangeSubscription.push(that.props[prop].__knockoutSubscribable.subscribe(function() { - disposeSubscription(that); - that.forceUpdate(); - })); + + that.__observeModelChangeSubscriptions.push(that.props[prop].__knockoutSubscribable.subscribe(disposeAndForceUpdate)); } } } - that.__observeModelChangeSubscription.push(computed.subscribe(function() { - disposeSubscription(that); - that.forceUpdate(); - })); + that.__observeModelChangeSubscriptions.push(computed.subscribe(disposeAndForceUpdate)); return computed(); }); @@ -71,11 +71,11 @@ const ObserveModelMixin = { }; function disposeSubscription(component) { - if (defined(component.__observeModelChangeSubscription)) { - for (let i = 0; i < component.__observeModelChangeSubscription.length; ++i) { - component.__observeModelChangeSubscription[i].dispose(); + if (defined(component.__observeModelChangeSubscriptions)) { + for (let i = 0; i < component.__observeModelChangeSubscriptions.length; ++i) { + component.__observeModelChangeSubscriptions[i].dispose(); } - component.__observeModelChangeSubscription = undefined; + component.__observeModelChangeSubscriptions = undefined; } }
Cleaner observable array tracking.
TerriaJS_terriajs
train
8d5aac92cc9ee1df8f598db4cf13bd0f2ffd1d30
diff --git a/CHANGELOG.rst b/CHANGELOG.rst index <HASH>..<HASH> 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,11 @@ Changelog ========= +0.6.1 (unreleased) +****************** + +* Fix compatibility with marshmallow-sqlalchemy>=0.4.0 (:issue:`25`). Thanks :user:`svenstaro` for reporting. + 0.6.0 (2015-05-02) ****************** diff --git a/dev-requirements.txt b/dev-requirements.txt index <HASH>..<HASH> 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -9,4 +9,4 @@ flake8==2.4.0 # Soft requirements flask-sqlalchemy -marshmallow-sqlalchemy +marshmallow-sqlalchemy>=0.4.0 diff --git a/flask_marshmallow/__init__.py b/flask_marshmallow/__init__.py index <HASH>..<HASH> 100755 --- a/flask_marshmallow/__init__.py +++ b/flask_marshmallow/__init__.py @@ -35,7 +35,7 @@ else: else: has_sqla = True -__version__ = '0.6.0' +__version__ = '0.6.1dev' __author__ = 'Steven Loria' __license__ = 'MIT' diff --git a/flask_marshmallow/sqla.py b/flask_marshmallow/sqla.py index <HASH>..<HASH> 100644 --- a/flask_marshmallow/sqla.py +++ b/flask_marshmallow/sqla.py @@ -18,7 +18,7 @@ class DummySession(object): """Placeholder session object.""" pass -class SchemaOpts(msqla.SchemaOpts): +class SchemaOpts(msqla.ModelSchemaOpts): """Schema options for `ModelSchema <flask_marshmallow.sqla.ModelSchema>`. Same as `marshmallow_sqlalchemy.SchemaOpts`, except that we add a placeholder `DummySession` if ``sqla_session`` is not defined on diff --git a/test_flask_marshmallow.py b/test_flask_marshmallow.py index <HASH>..<HASH> 100755 --- a/test_flask_marshmallow.py +++ b/test_flask_marshmallow.py @@ -380,14 +380,12 @@ class TestSQLAlchemy: book_result = book_schema.dump(book) book_result.data['author'] = book.url deserialized = book_schema.load(book_result.data) - assert deserialized.data.author is None assert 'expected "author"' in deserialized.errors['author'][0] # Deserialization fails on bad URL key book_result = book_schema.dump(book) book_schema.fields['author'].url_key = 'pk' deserialized = book_schema.load(book_result.data) - assert deserialized.data.author is None assert 'URL pattern "pk" not found' in deserialized.errors['author'][0] def test_hyperlink_related_field_external(self, extma, models, db, extapp):
Fix compatibility with marshmallow-sqlalchemy>=<I> closes #<I>
marshmallow-code_flask-marshmallow
train
f92e6312779886c697da135fedf38e1fac7f1227
diff --git a/keras_text/models/sentence_model.py b/keras_text/models/sentence_model.py index <HASH>..<HASH> 100644 --- a/keras_text/models/sentence_model.py +++ b/keras_text/models/sentence_model.py @@ -77,7 +77,7 @@ class SentenceModelFactory(object): embedding_layer = Embedding(len(self.token_index), self.embedding_dims, input_length=self.max_tokens, - mask_zero=True, + mask_zero=token_encoder_model.allows_dynamic_length(), trainable=trainable_embeddings) else: embedding_layer = Embedding(len(self.token_index), @@ -85,7 +85,7 @@ class SentenceModelFactory(object): weights=[build_embedding_weights( self.token_index, self.embeddings_index)], input_length=self.max_tokens, - mask_zero=True, + mask_zero=token_encoder_model.allows_dynamic_length(), trainable=trainable_embeddings) word_input = Input(shape=(self.max_tokens,), dtype='int32')
only mask for 0 if possible (for sent model)
jfilter_text-classification-keras
train
568d46e23f859cadab340677c5614d6cb2360b18
diff --git a/stanza/utils/run_mwt.py b/stanza/utils/run_mwt.py index <HASH>..<HASH> 100644 --- a/stanza/utils/run_mwt.py +++ b/stanza/utils/run_mwt.py @@ -1,3 +1,20 @@ +""" +This script allows for training or testing on dev / test of the UD mwt tools. + +If run with a single treebank name, it will train or test that treebank. +If run with ud_all or all_ud, it will iterate over all UD treebanks it can find. + +Args are given as follows: + +python run_mwt.py [mode] <treebank> [mwt args...] + +The first argument, mode, is optional. It can either be --train (or +nothing) to train a model and report the dev score, --score_dev to +just report the dev score, or --score_test to report the test score. + +After specifying the treebank, any further arguments will be passed to mwt_extractor. +""" + import logging import math @@ -13,6 +30,9 @@ from stanza.utils.max_mwt_length import max_mwt_length logger = logging.getLogger('stanza') def check_mwt(filename): + """ + Checks whether or not there are MWTs in the given conll file + """ doc = Document(CoNLL.conll2dict(filename)) data = doc.get_mwt_expansions(False) return len(data) > 0
Add some comments to run_mwt
stanfordnlp_stanza
train
09122ad4275bb3df8cafbb9d4227880d89b36c60
diff --git a/js/language/lua.js b/js/language/lua.js index <HASH>..<HASH> 100644 --- a/js/language/lua.js +++ b/js/language/lua.js @@ -2,7 +2,7 @@ * Lua patterns * * @author Javier Aguirre - * @version 0.0.1 + * @version 1.0 */ Rainbow.extend('lua', [ { diff --git a/util/builder.py b/util/builder.py index <HASH>..<HASH> 100644 --- a/util/builder.py +++ b/util/builder.py @@ -18,6 +18,7 @@ class RainbowBuilder(object): 'generic': '1.0.8', 'html': '1.0.6', 'javascript': '1.0.6', + 'lua': '1.0', 'php': '1.0.5', 'python': '1.0.6', 'ruby': '1.0.5',
Set lua version to <I>
ccampbell_rainbow
train
641bc1d6bdf2cf75a06a8393dd7a1746b73e147f
diff --git a/src/PhpPact/Standalone/Installer/Service/InstallerPosixPreinstalled.php b/src/PhpPact/Standalone/Installer/Service/InstallerPosixPreinstalled.php index <HASH>..<HASH> 100644 --- a/src/PhpPact/Standalone/Installer/Service/InstallerPosixPreinstalled.php +++ b/src/PhpPact/Standalone/Installer/Service/InstallerPosixPreinstalled.php @@ -32,6 +32,6 @@ class InstallerPosixPreinstalled implements InstallerInterface private function getBinaryPath(string $binary): string { - return trim(shell_exec('command -v ' . escapeshellarg($binary))); + return trim((string) shell_exec('command -v ' . escapeshellarg($binary))); } }
fix: deprecation of null being passed to "trim()"
pact-foundation_pact-php
train
b9561074b127b4f7a335dcb1c5f87f7a88555218
diff --git a/lhc/io/fasta_/inorder_access_set.py b/lhc/io/fasta_/inorder_access_set.py index <HASH>..<HASH> 100644 --- a/lhc/io/fasta_/inorder_access_set.py +++ b/lhc/io/fasta_/inorder_access_set.py @@ -5,22 +5,25 @@ from lhc.interval import Interval class FastaInOrderAccessSet(object): - def __init__(self, fileobj): + def __init__(self, iterator): self.starts = [] self.stops = [] self.buffer = [] - self.fileobj = fileobj - self.chr = fileobj.next().split()[0][1:] + self.iterator = iterator + self.chr = iterator.next().split()[0][1:] self.start = 0 + def __getitem__(self, item): + return FastaInOrderAccessEntry(self, item) + def fetch(self, chr, start, stop): start = (chr, start) stop = (chr, stop) current_chr = self.chr current_start = self.start - for line in self.fileobj: + for line in self.iterator: if line.startswith('>'): current_chr = line.split()[0][1:] current_start = 0 @@ -29,7 +32,7 @@ class FastaInOrderAccessSet(object): line = line.strip() key = Interval((current_chr, current_start), (current_chr, current_start + len(line))) if key.start >= stop: - self.fileobj = chain([line], self.fileobj) + self.iterator = chain([line], self.iterator) break self.starts.append(key.start) self.stops.append(key.stop) @@ -45,3 +48,14 @@ class FastaInOrderAccessSet(object): index = bisect.bisect_left(self.starts, stop) return ''.join(self.buffer[:index])[start[1] - self.starts[0][1]:stop[1] - self.starts[0][1]] + + +class FastaInOrderAccessEntry(object): + def __init__(self, set, chr): + self.set = set + self.chr = chr + + def __getitem__(self, item): + if isinstance(item, slice): + return self.set.fetch(self.chr, item.start, item.stop) + return self.set.fetch(self.chr, item, item + 1)
added __getitem__ function to in fasta order access
childsish_lhc-python
train
65c99a3b6809187de6a06a5a77e71024e77ec685
diff --git a/driver-compat/src/test/unit/com/mongodb/DBCollectionTest.java b/driver-compat/src/test/unit/com/mongodb/DBCollectionTest.java index <HASH>..<HASH> 100644 --- a/driver-compat/src/test/unit/com/mongodb/DBCollectionTest.java +++ b/driver-compat/src/test/unit/com/mongodb/DBCollectionTest.java @@ -21,8 +21,14 @@ import org.bson.BSONBinaryWriter; import org.bson.BSONObject; import org.bson.BSONWriter; import org.bson.io.OutputBuffer; +import org.bson.types.BSONTimestamp; import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.MaxKey; +import org.bson.types.MinKey; +import org.bson.types.ObjectId; import org.junit.Test; +import org.mongodb.Document; import java.util.ArrayList; import java.util.Arrays; @@ -31,6 +37,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.regex.Pattern; import static com.mongodb.DBObjectMatchers.hasSubdocument; import static org.hamcrest.CoreMatchers.hasItem; @@ -42,6 +49,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class DBCollectionTest extends DatabaseTestCase { @@ -361,6 +369,62 @@ public class DBCollectionTest extends DatabaseTestCase { assertThat(tweet.getDate(), is(new Date(12))); } + @Test + public void shouldAcceptDocumentsWithAllValidValueTypes() { + BasicDBObject doc = new BasicDBObject(); + doc.append("_id", new ObjectId()); + doc.append("bool", true); + doc.append("int", 3); + doc.append("short", (short) 4); + doc.append("long", 5L); + doc.append("str", "Hello MongoDB"); + doc.append("float", 6.0f); + doc.append("double", 1.1); + doc.append("date", new Date()); + doc.append("ts", new BSONTimestamp(5, 1)); + doc.append("pattern", Pattern.compile(".*")); + doc.append("minKey", new MinKey()); + doc.append("maxKey", new MaxKey()); + doc.append("js", new Code("code")); +// doc.append("jsWithScope", new CodeWithScope("code", new Document())); + doc.append("null", null); + doc.append("uuid", UUID.randomUUID()); + doc.append("db ref", new com.mongodb.DBRef(collection.getDB(), "test", new ObjectId())); + doc.append("binary", new Binary((byte) 42, new byte[] {10, 11, 12})); + doc.append("byte array", new byte[] {1, 2, 3}); + doc.append("int array", new int[] {4, 5, 6}); + doc.append("list", Arrays.asList(7, 8, 9)); + doc.append("doc list", Arrays.asList(new Document("x", 1), new Document("x", 2))); + + collection.insert(doc); + DBObject found = collection.findOne(); + assertNotNull(found); + assertEquals(ObjectId.class, found.get("_id").getClass()); + assertEquals(Boolean.class, found.get("bool").getClass()); + assertEquals(Integer.class, found.get("int").getClass()); + assertEquals(Integer.class, found.get("short").getClass()); + assertEquals(Long.class, found.get("long").getClass()); + assertEquals(String.class, found.get("str").getClass()); + assertEquals(Double.class, found.get("float").getClass()); + assertEquals(Double.class, found.get("double").getClass()); + assertEquals(Date.class, found.get("date").getClass()); + assertEquals(BSONTimestamp.class, found.get("ts").getClass()); + assertEquals(Pattern.class, found.get("pattern").getClass()); + assertEquals(MinKey.class, found.get("minKey").getClass()); + assertEquals(MaxKey.class, found.get("maxKey").getClass()); + assertEquals(Code.class, found.get("js").getClass()); +// assertEquals(CodeWithScope.class, found.get("jsWithScope").getClass()); + assertNull(found.get("null")); + assertEquals(UUID.class, found.get("uuid").getClass()); + assertEquals(DBRef.class, found.get("db ref").getClass()); + assertEquals(Binary.class, found.get("binary").getClass()); + assertEquals(byte[].class, found.get("byte array").getClass()); + assertTrue(found.get("int array") instanceof List); + assertTrue(found.get("list") instanceof List); + assertTrue(found.get("doc list") instanceof List); + } + + public static class MyDBObject extends BasicDBObject { private static final long serialVersionUID = 3352369936048544621L; }
Added acceptance test that show that valid value types can be round-tripped to MongoDB in driver-compat. Note that CodeWithScope is not yet supported.
mongodb_mongo-java-driver
train
f21083f71a956ef5271f7cd25f00774b198e73f1
diff --git a/lib/authority/abilities.rb b/lib/authority/abilities.rb index <HASH>..<HASH> 100644 --- a/lib/authority/abilities.rb +++ b/lib/authority/abilities.rb @@ -23,9 +23,6 @@ module Authority end end - def authorizer=(authorizer_class) - @authorizer = authorizer_class - end def authorizer self.class.authorizer.new(self) # instantiate on every check, in case model has changed @@ -44,6 +41,11 @@ module Authority module ClassMethods include Definitions + def authorizer=(authorizer_class) + @authorizer = authorizer_class + self.authorizer_name = @authorizer.name + end + # @return [Class] of the designated authorizer def authorizer @authorizer ||= authorizer_name.constantize # Get an actual reference to the authorizer class diff --git a/spec/authority/abilities_spec.rb b/spec/authority/abilities_spec.rb index <HASH>..<HASH> 100644 --- a/spec/authority/abilities_spec.rb +++ b/spec/authority/abilities_spec.rb @@ -46,6 +46,26 @@ describe Authority::Abilities do end + describe "authorizer=" do + + let(:test_class) { Class.new {include Authority::Abilities} } + + it "has a class attribute=" do + expect(test_class).to respond_to(:authorizer=) + end + + it "sets authorizer" do + test_class.authorizer = ExampleResourceAuthorizer + expect(test_class.authorizer).to eq(ExampleResourceAuthorizer) + end + + it "sets authorizer_name" do + test_class.authorizer = ExampleResourceAuthorizer + expect(test_class.authorizer_name).to eq("ExampleResourceAuthorizer") + end + + end + describe "authorizer" do it "constantizes the authorizer name as the authorizer" do
correctly set authorizer= as class method, update authorizer_name, assert accordingly in specs
nathanl_authority
train
6ad0d5f99a931c7629f9acb805d4db5c03a01fa3
diff --git a/CHANGELOG.txt b/CHANGELOG.txt index <HASH>..<HASH> 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -4,6 +4,7 @@ Changelog 1.1 (xx.xx.20xx) - IN DEVELOPMENT ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Added exclude() method to FakeQuerySet +* Removed dependency on the 'six' package, in favour of Django's built-in version 1.0 (09.10.2015) ~~~~~~~~~~~~~~~~ diff --git a/modelcluster/forms.py b/modelcluster/forms.py index <HASH>..<HASH> 100644 --- a/modelcluster/forms.py +++ b/modelcluster/forms.py @@ -1,6 +1,6 @@ from __future__ import unicode_literals -from six import with_metaclass +from django.utils.six import with_metaclass import django from django.forms.models import ( diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,6 @@ setup( license='BSD', long_description=open('README.rst').read(), install_requires=[ - "six>=1.6.1", "pytz>=2015.2", ], classifiers=[ diff --git a/tests/tests/test_cluster_form.py b/tests/tests/test_cluster_form.py index <HASH>..<HASH> 100644 --- a/tests/tests/test_cluster_form.py +++ b/tests/tests/test_cluster_form.py @@ -1,6 +1,6 @@ from __future__ import unicode_literals -from six import text_type +from django.utils.six import text_type from django.test import TestCase from tests.models import Band, BandMember, Album, Restaurant diff --git a/tox.ini b/tox.ini index <HASH>..<HASH> 100644 --- a/tox.ini +++ b/tox.ini @@ -14,7 +14,6 @@ basepython = deps = django-taggit>=0.13.0 pytz>=2014.7 - six>=1.6.1 dj17: Django>=1.7,<1.8 dj18: Django>=1.8,<1.9 postgres: psycopg2>=2.6
Use Django's builtin version of six Removes direct dependency on six
wagtail_django-modelcluster
train
18c1849773eb5ef5c9148941bcf969d93eda9084
diff --git a/spec/integration/cli_deployment_process_spec.rb b/spec/integration/cli_deployment_process_spec.rb index <HASH>..<HASH> 100644 --- a/spec/integration/cli_deployment_process_spec.rb +++ b/spec/integration/cli_deployment_process_spec.rb @@ -131,7 +131,8 @@ lines"} expect(output).to_not include('stemcell') expect(output).to_not include('releases') - expect(output).to match(/ resource_pools: + expect(output).to match(/ + resource_pools: - name: a cloud_properties: \+ name: new_property @@ -139,6 +140,7 @@ lines"} - env: - bosh: - password: "?<redacted>"? + jobs: - name: job1 properties:
Fixes expectation to include new lines separating top level sections in diff [#<I>]
cloudfoundry_bosh
train
5344114ad39b5234e72a6111c07b7a0dacfffd00
diff --git a/pom.xml b/pom.xml index <HASH>..<HASH> 100644 --- a/pom.xml +++ b/pom.xml @@ -197,8 +197,8 @@ </dependency> <dependency> <groupId>org.bouncycastle</groupId> - <artifactId>bcpg-jdk12</artifactId> - <version>130</version> + <artifactId>bcpg-jdk15on</artifactId> + <version>1.47</version> </dependency> <dependency> <groupId>junit</groupId> diff --git a/src/main/java/org/vafer/jdeb/signing/SigningUtils.java b/src/main/java/org/vafer/jdeb/signing/SigningUtils.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/vafer/jdeb/signing/SigningUtils.java +++ b/src/main/java/org/vafer/jdeb/signing/SigningUtils.java @@ -20,15 +20,12 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; -import java.security.NoSuchAlgorithmException; -import java.security.NoSuchProviderException; -import java.security.Security; +import java.security.GeneralSecurityException; import java.security.SignatureException; import java.util.Iterator; import org.bouncycastle.bcpg.ArmoredOutputStream; import org.bouncycastle.bcpg.BCPGOutputStream; -import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.openpgp.PGPException; import org.bouncycastle.openpgp.PGPPrivateKey; import org.bouncycastle.openpgp.PGPSecretKey; @@ -37,13 +34,15 @@ import org.bouncycastle.openpgp.PGPSecretKeyRingCollection; import org.bouncycastle.openpgp.PGPSignature; import org.bouncycastle.openpgp.PGPSignatureGenerator; import org.bouncycastle.openpgp.PGPUtil; +import org.bouncycastle.openpgp.operator.bc.BcPBESecretKeyDecryptorBuilder; +import org.bouncycastle.openpgp.operator.bc.BcPGPContentSignerBuilder; +import org.bouncycastle.openpgp.operator.bc.BcPGPDigestCalculatorProvider; /** * Utils to do the signing with OpenPGP * * @author Torsten Curdt */ - public final class SigningUtils { private static PGPSecretKey getSecretKey( final InputStream pInput, final String pKey ) throws IOException, PGPException { @@ -78,21 +77,18 @@ public final class SigningUtils { * @param pOutput * @throws IOException * @throws PGPException - * @throws NoSuchProviderException - * @throws NoSuchAlgorithmException - * @throws SignatureException + * @throws GeneralSecurityException */ - public static void clearSign( final InputStream pInput, final InputStream pKeyring, final String pKey, final String pPassphrase, final OutputStream pOutput ) throws IOException, PGPException, NoSuchProviderException, NoSuchAlgorithmException, SignatureException { - - Security.addProvider(new BouncyCastleProvider()); + public static void clearSign( final InputStream pInput, final InputStream pKeyring, final String pKey, final String pPassphrase, final OutputStream pOutput ) throws IOException, PGPException, GeneralSecurityException { final PGPSecretKey secretKey = getSecretKey(pKeyring, pKey); - final PGPPrivateKey privateKey = secretKey.extractPrivateKey(pPassphrase.toCharArray(), "BC"); - + + final PGPPrivateKey privateKey = secretKey.extractPrivateKey(new BcPBESecretKeyDecryptorBuilder(new BcPGPDigestCalculatorProvider()).build(pPassphrase.toCharArray())); + final int digest = PGPUtil.SHA1; - - final PGPSignatureGenerator signatureGenerator = new PGPSignatureGenerator(secretKey.getPublicKey().getAlgorithm(), digest, "BC"); - signatureGenerator.initSign(PGPSignature.CANONICAL_TEXT_DOCUMENT, privateKey); + + PGPSignatureGenerator signatureGenerator = new PGPSignatureGenerator(new BcPGPContentSignerBuilder(secretKey.getPublicKey().getAlgorithm(), digest)); + signatureGenerator.init(PGPSignature.CANONICAL_TEXT_DOCUMENT, privateKey); // final PGPSignatureSubpacketGenerator subpackageGenerator = new PGPSignatureSubpacketGenerator(); // @@ -133,7 +129,6 @@ public final class SigningUtils { signatureGenerator.generate().encode(pgpOutput); armoredOutput.close(); - } diff --git a/src/test/java/org/vafer/jdeb/signing/SigningTestCase.java b/src/test/java/org/vafer/jdeb/signing/SigningTestCase.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/vafer/jdeb/signing/SigningTestCase.java +++ b/src/test/java/org/vafer/jdeb/signing/SigningTestCase.java @@ -41,7 +41,7 @@ public final class SigningTestCase extends TestCase { "TEST2\n" + "TEST3\n" + "-----BEGIN PGP SIGNATURE-----\n" + - "Version: BCPG v1.29\n" + + "Version: BCPG v1.47\n" + "\n" + "iEYEARECABAFAkax1rgJEHM9pIAuB02PAABIJgCghFmoCJCZ0CGiqgVLGGPd/Yh5\n" + "FQQAnRVqvI2ij45JQSHYJBblZ0Vv2meN\n" +
Updated the dependency on BouncyCastle (<I>)
tcurdt_jdeb
train
8d74dee28d9b772b05c3ea509b6b09d93a8b21ee
diff --git a/examples/producer.rb b/examples/producer.rb index <HASH>..<HASH> 100644 --- a/examples/producer.rb +++ b/examples/producer.rb @@ -1,8 +1,9 @@ require 'hutch' + Hutch.connect - loop do - Hutch.publish('hutch.test', {subject: 'test message'}, true) - sleep 0.5 - end +loop do + Hutch.publish('hutch.test', {subject: 'test message'}) + sleep 0.5 +end diff --git a/lib/hutch.rb b/lib/hutch.rb index <HASH>..<HASH> 100644 --- a/lib/hutch.rb +++ b/lib/hutch.rb @@ -33,8 +33,8 @@ module Hutch @connected end - def self.publish(routing_key, message, confirm) - @broker.publish(routing_key, message, confirm) + def self.publish(routing_key, message) + @broker.publish(routing_key, message) end end diff --git a/lib/hutch/broker.rb b/lib/hutch/broker.rb index <HASH>..<HASH> 100644 --- a/lib/hutch/broker.rb +++ b/lib/hutch/broker.rb @@ -139,26 +139,15 @@ module Hutch @channel.ack(delivery_tag, false) end - def publish(routing_key, message, confirm) - logger.info "publishing message '#{message.inspect}' to #{routing_key}" - - if confirm - @channel.confirm_select do |delivery_tag, multiple, nack| - logger.info "confirm cb #{delivery_tag} #{multiple} #{nack}" - end - end - + def publish(routing_key, message) payload = JSON.dump(message) - - @exchange.publish(payload, routing_key: routing_key, persistent: true, - timestamp: Time.now.to_i, message_id: generate_id) - - if confirm - success = @channel.wait_for_confirms - unless success - logger.info "confirmation never received for message '#{message.inspect}" - return false - end + + if @connection.open? + logger.info "publishing message '#{message.inspect}' to #{routing_key}" + @exchange.publish(payload, routing_key: routing_key, persistent: true, + timestamp: Time.now.to_i, message_id: generate_id) + else + logger.error "Unable to publish : routing key: #{routing_key}, message: #{message}" end end
Remove pub confirm and check for connection before publishing
gocardless_hutch
train
8b1c232ecf9d3ba83b5309795022f7346e2caa7d
diff --git a/src/core.js b/src/core.js index <HASH>..<HASH> 100755 --- a/src/core.js +++ b/src/core.js @@ -1649,7 +1649,7 @@ var me = me || {}; // this should be replace by a list of the 4 adjacent cell around the object requesting collision for ( var i = gameObjects.length, obj; i--, obj = gameObjects[i];)//for (var i = objlist.length; i-- ;) { - if (obj.inViewport && obj.visible && obj.collidable && (obj!=objA)) + if ((obj.inViewport || obj.alwaysUpdate) && obj.collidable && (obj!=objA)) { res = obj.collisionBox.collideVsAABB.call(obj.collisionBox, objA.collisionBox); if (res.x != 0 || res.y != 0) { @@ -1691,7 +1691,7 @@ var me = me || {}; // this should be replace by a list of the 4 adjacent cell around the object requesting collision for ( var i = gameObjects.length, obj; i--, obj = gameObjects[i];)//for (var i = objlist.length; i-- ;) { - if (obj.inViewport && obj.visible && obj.collidable && (obj.type === type) && (obj!=objA)) + if ((obj.inViewport || obj.alwaysUpdate) && obj.collidable && (obj.type === type) && (obj!=objA)) { res = obj.collisionBox.collideVsAABB.call(obj.collisionBox, objA.collisionBox); if (res.x != 0 || res.y != 0) { diff --git a/src/renderable/sprite.js b/src/renderable/sprite.js index <HASH>..<HASH> 100644 --- a/src/renderable/sprite.js +++ b/src/renderable/sprite.js @@ -552,7 +552,7 @@ */ update : function() { // update animation if necessary - if (this.inViewport && !this.animationpause && (this.fpscount++ > this.current.animationspeed)) { + if (!this.animationpause && (this.fpscount++ > this.current.animationspeed)) { this.setAnimationFrame(++this.current.idx); this.fpscount = 0;
[#<I>] Followup: Remove explicit inViewport check from SpriteObject, and check alwaysUpdate in `me.game.collide`
melonjs_melonJS
train
7f23758b99f4218d3225e7b1d4ffc8690fe575e3
diff --git a/server/src/main/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidator.java b/server/src/main/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidator.java index <HASH>..<HASH> 100644 --- a/server/src/main/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidator.java +++ b/server/src/main/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidator.java @@ -262,7 +262,7 @@ public class ClientAdminEndpointsValidator implements InitializingBean, ClientDe } for (String uri : uris) { - if (!UaaUrlUtils.isValidRegisteredRedirectUrl(uri)) { + if (!UaaUrlUtils.isValidRegisteredRedirectUrl(uri) || uri.contains(",")) { throw new InvalidClientDetailsException( String.format("One of the redirect_uri is invalid: %s", uri)); } diff --git a/server/src/test/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidatorTests.java b/server/src/test/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidatorTests.java index <HASH>..<HASH> 100644 --- a/server/src/test/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidatorTests.java +++ b/server/src/test/java/org/cloudfoundry/identity/uaa/client/ClientAdminEndpointsValidatorTests.java @@ -212,6 +212,16 @@ public class ClientAdminEndpointsValidatorTests { validator.validateClientRedirectUri(client); } + @Test(expected = InvalidClientDetailsException.class) + public void testAnotherOptionOneInvalidURL() { + Set<String> urls = new HashSet<>(); + urls.add("http://valid.com"); + urls.add("http://invalid.com/with/path,subpath"); + client.setAuthorizedGrantTypes(Collections.singleton(GRANT_TYPE_AUTHORIZATION_CODE)); + client.setRegisteredRedirectUri(urls); + validator.validateClientRedirectUri(client); + } + @Test public void testValidateValidURLs() { Set<String> urls = new HashSet<>();
fix for issue <I> (#<I>) Another option to solve it, simply reject comma Url in REST validation
cloudfoundry_uaa
train
2988030b4c7ceed5d59939f09d1ac4437ede2c67
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ import alabaster -from sprockets.mixins.media_type import __version__ +from sprockets.mixins.mediatype import __version__ needs_sphinx = '1.0' extensions = ['sphinx.ext.autodoc',
Correct module name in sphinx conf.
sprockets_sprockets.mixins.mediatype
train
0d13277c0f2f03b51395366053c861ece494b91a
diff --git a/hererocks.py b/hererocks.py index <HASH>..<HASH> 100755 --- a/hererocks.py +++ b/hererocks.py @@ -531,7 +531,7 @@ class LuaJIT(Lua): run_command("msvcbuild.bat") os.chdir("..") else: - run_command("make", "PREFIX=" + quote(opts.location)) + run_command("make") def make_install(self): luajit_file = exe("luajit")
Do not pass PREFIX when building LuaJIT Since default paths are patched, there is no point.
mpeterv_hererocks
train
f6f6beab66c9cf48d1c4d2bd54669430de01bc65
diff --git a/icekit/publishing/models.py b/icekit/publishing/models.py index <HASH>..<HASH> 100644 --- a/icekit/publishing/models.py +++ b/icekit/publishing/models.py @@ -283,7 +283,7 @@ class PublishingModel(models.Model): publish_obj.publishing_published_at = self.publishing_published_at # Perform per-model preparation before saving published copy - publish_obj.publishing_prepare_published_copy() + publish_obj.publishing_prepare_published_copy(self) # Save the new published object as a separate instance to self. publish_obj.save() @@ -358,7 +358,7 @@ class PublishingModel(models.Model): """ self.save() - def publishing_prepare_published_copy(self): + def publishing_prepare_published_copy(self, draft_obj): """ Prepare published copy of draft prior to saving it """ pass
Fix mistaken API change of overrideable method, re #5 #<I>
ic-labs_django-icekit
train
d89351ab97711672df4afeefccd6b45969d08a29
diff --git a/core/src/main/java/org/togglz/core/user/FeatureUser.java b/core/src/main/java/org/togglz/core/user/FeatureUser.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/togglz/core/user/FeatureUser.java +++ b/core/src/main/java/org/togglz/core/user/FeatureUser.java @@ -23,4 +23,12 @@ public interface FeatureUser { */ boolean isFeatureAdmin(); + /** + * This method allows to retrieve attributes associated with a user. + * + * @param name The name of the attribute + * @return the value of the attribute or <code>null</code> if there is no such attribute. + */ + Object getAttribute(String name); + } diff --git a/core/src/main/java/org/togglz/core/user/SimpleFeatureUser.java b/core/src/main/java/org/togglz/core/user/SimpleFeatureUser.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/togglz/core/user/SimpleFeatureUser.java +++ b/core/src/main/java/org/togglz/core/user/SimpleFeatureUser.java @@ -1,5 +1,8 @@ package org.togglz.core.user; +import java.util.HashMap; +import java.util.Map; + import org.togglz.core.manager.DefaultFeatureManager; /** @@ -13,6 +16,7 @@ public class SimpleFeatureUser implements FeatureUser { private final String name; private final boolean featureAdmin; + private final Map<String, Object> attributes = new HashMap<String, Object>(); /** * Constructor of {@link DefaultFeatureManager}. @@ -33,4 +37,25 @@ public class SimpleFeatureUser implements FeatureUser { return featureAdmin; } + @Override + public Object getAttribute(String name) { + return attributes.get(name); + } + + /** + * This method can be used to set attributes of the user. + * + * @param name The name of the attribute + * @param value The value of the attribute + * @return <code>this</code> for fluent object creation + */ + public SimpleFeatureUser setAttribute(String name, Object value) { + if (value != null) { + attributes.put(name, value); + } else { + attributes.remove(name); + } + return this; + } + }
Added API for storing attributes to the FeatureUser interface
togglz_togglz
train
62fd1d0f0bd4770c581f7b9c3a7b9c200f781f4b
diff --git a/lib/jsdom/living/parent-node.js b/lib/jsdom/living/parent-node.js index <HASH>..<HASH> 100644 --- a/lib/jsdom/living/parent-node.js +++ b/lib/jsdom/living/parent-node.js @@ -14,7 +14,7 @@ module.exports = function (core) { if (!this._childrenList) { const self = this; - this._childrenList = new core.NodeList(this, function () { + this._childrenList = new core.HTMLCollection(this, function () { return self._childNodes.filter(function (node) { return node.tagName; }); diff --git a/test/living-dom/parent-node.js b/test/living-dom/parent-node.js index <HASH>..<HASH> 100644 --- a/test/living-dom/parent-node.js +++ b/test/living-dom/parent-node.js @@ -15,6 +15,9 @@ exports["Document should implement ParentNode:children"] = function (t) { t.strictEqual(nodeName(parent.children.item(0)), "HTML"); t.strictEqual(nodeName(parent.children[1]), undefined); t.strictEqual(nodeName(parent.children.item(1)), null); + t.strictEqual(nodeName(parent.children.namedItem("html_id")), "HTML"); + t.strictEqual(nodeName(parent.children.namedItem("foo")), null); + t.ok(parent.children instanceof parent.defaultView.HTMLCollection, "children should be a HTMLCollection"); t.done(); }; @@ -29,6 +32,9 @@ exports["Element should implement ParentNode:children"] = function (t) { t.strictEqual(nodeName(parent.children.item(1)), "DIV"); t.strictEqual(nodeName(parent.children[2]), undefined); t.strictEqual(nodeName(parent.children.item(2)), null); + t.strictEqual(nodeName(parent.children.namedItem("a_name")), "A"); + t.strictEqual(nodeName(parent.children.namedItem("foo")), null); + t.ok(parent.children instanceof doc.defaultView.HTMLCollection, "children should be a HTMLCollection"); t.done(); }; @@ -47,5 +53,8 @@ exports["DocumentFragment should implement ParentNode:children"] = function (t) t.strictEqual(nodeName(parent.children.item(1)), "DIV"); t.strictEqual(nodeName(parent.children[2]), undefined); t.strictEqual(nodeName(parent.children.item(2)), null); + t.strictEqual(nodeName(parent.children.namedItem("a_name")), "A"); + t.strictEqual(nodeName(parent.children.namedItem("foo")), null); + t.ok(parent.children instanceof doc.defaultView.HTMLCollection, "children should be a HTMLCollection"); t.done(); };
NodeParent:children should be a HTMLCollection, not a NodeList
jsdom_jsdom
train
c5960e25d583fec87739bac0693dacb89358cb4d
diff --git a/wfdb/processing/qrs.py b/wfdb/processing/qrs.py index <HASH>..<HASH> 100644 --- a/wfdb/processing/qrs.py +++ b/wfdb/processing/qrs.py @@ -103,13 +103,13 @@ class XQRS(object): ref_period : int, float, optional The QRS refractory period. t_inspect_period : int, float, optional - The period below which a potential QRS complex is - inspected to see if it is a T-wave. + The period below which a potential QRS complex is inspected to + see if it is a T-wave. Leave as 0 for no T-wave inspection. """ def __init__(self, hr_init=75, hr_max=200, hr_min=25, qrs_width=0.1, qrs_thr_init=0.13, qrs_thr_min=0, ref_period=0.2, - t_inspect_period=0.36): + t_inspect_period=0): if hr_min < 0: raise ValueError("'hr_min' must be >= 0")
set default of `t_inspect_period` to `0`
MIT-LCP_wfdb-python
train
9626ae193149c0c43d186dcb63ccee8d5c3bbb17
diff --git a/lib/guard/resque.rb b/lib/guard/resque.rb index <HASH>..<HASH> 100644 --- a/lib/guard/resque.rb +++ b/lib/guard/resque.rb @@ -11,11 +11,11 @@ module Guard # :count e.g. 3 # :verbose e.g. true # :trace e.g. true - # :stop_signal e.g. KILL + # :stop_signal e.g. QUIT def initialize(watchers = [], options = {}) @options = options @pid = nil - @stop_signal = options[:stop_signal] || "KILL" + @stop_signal = options[:stop_signal] || "QUIT" super end
Default to QUIT signal instead of KILL, per the Resque docs.
jacquescrocker_guard-resque
train
73115614fd1545bf90b0b144a22be523ef3e2355
diff --git a/asv/util.py b/asv/util.py index <HASH>..<HASH> 100644 --- a/asv/util.py +++ b/asv/util.py @@ -341,26 +341,23 @@ def check_output(args, valid_return_codes=(0,), timeout=600, dots=True, log.debug("Running '{0}'".format(' '.join(args))) - posix = getattr(os, 'setpgid', None) - if posix: - # Run the subprocess in a separate process group, so that we - # can kill it and all child processes it spawns e.g. on - # timeouts. Note that subprocess.Popen will wait until exec() - # before returning in parent process, so there is no race - # condition in setting the process group vs. calls to os.killpg - preexec_fn = lambda: os.setpgid(0, 0) + kwargs = dict(shell=shell, env=env, cwd=cwd, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if WIN: + kwargs['close_fds'] = False + kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP else: - preexec_fn = None - - proc = subprocess.Popen( - args, - close_fds=(not WIN), - env=env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - shell=shell, - preexec_fn=preexec_fn, - cwd=cwd) + kwargs['close_fds'] = True + posix = getattr(os, 'setpgid', None) + if posix: + # Run the subprocess in a separate process group, so that we + # can kill it and all child processes it spawns e.g. on + # timeouts. Note that subprocess.Popen will wait until exec() + # before returning in parent process, so there is no race + # condition in setting the process group vs. calls to os.killpg + kwargs['preexec_fn'] = lambda: os.setpgid(0, 0) + + proc = subprocess.Popen(args, **kwargs) last_dot_time = time.time() stdout_chunks = [] @@ -392,7 +389,7 @@ def check_output(args, valid_return_codes=(0,), timeout=600, dots=True, time.sleep(0.1) if time.time() - start_time[0] > timeout: was_timeout[0] = True - proc.terminate() + proc.send_signal(signal.CTRL_BREAK_EVENT) watcher = threading.Thread(target=watcher_run) watcher.start() diff --git a/test/test_subprocess.py b/test/test_subprocess.py index <HASH>..<HASH> 100644 --- a/test/test_subprocess.py +++ b/test/test_subprocess.py @@ -4,7 +4,6 @@ from __future__ import (absolute_import, division, print_function, unicode_literals) -import os import sys import time @@ -27,11 +26,8 @@ sys.stderr.write("Stderr after waiting\n") """) # Another example, where timeout is due to a hanging sub-subprocess - if getattr(os, 'setpgid', None): - # only on posix - timeout_codes.append(r""" + timeout_codes.append(r""" import sys -import time import subprocess sys.stdout.write("Stdout before waiting\n") @@ -42,7 +38,7 @@ subprocess.call([sys.executable, "-c", "import sys, subprocess; subprocess.call([sys.executable, '-c', 'import time; time.sleep(60)'])"]) sys.stdout.write("Stdout after waiting\n") sys.stderr.write("Stderr after waiting\n") - """) + """) for timeout_code in timeout_codes: t = time.time()
ENH: Terminate process group on timeout under Windows.
airspeed-velocity_asv
train
19d6588be8e2e7d37a3b9dec9cd415e2f95cb41d
diff --git a/sparse/sfold.go b/sparse/sfold.go index <HASH>..<HASH> 100644 --- a/sparse/sfold.go +++ b/sparse/sfold.go @@ -19,24 +19,23 @@ type FoldFileOperations interface { // FoldFile folds child snapshot data into its parent func FoldFile(childFileName, parentFileName string, ops FoldFileOperations) error { - childFInfo, err := os.Stat(childFileName) if err != nil { - panic("os.Stat(childFileName) failed, error: " + err.Error()) + return fmt.Errorf("os.Stat(childFileName) failed, error: %v", err) } parentFInfo, err := os.Stat(parentFileName) if err != nil { - panic("os.Stat(parentFileName) failed, error: " + err.Error()) + return fmt.Errorf("os.Stat(parentFileName) failed, error: %v", err) } // ensure no directory if childFInfo.IsDir() || parentFInfo.IsDir() { - panic("at least one file is directory, not a normal file") + return fmt.Errorf("at least one file is directory, not a normal file") } // ensure file sizes are equal if childFInfo.Size() != parentFInfo.Size() { - panic("file sizes are not equal") + return fmt.Errorf("file sizes are not equal") } go coalesce(parentFileName, childFileName, childFInfo.Size(), ops) @@ -59,19 +58,19 @@ func coalesce(parentFileName, childFileName string, fileSize int64, ops FoldFile // open child and parent files childFileIo, err := NewDirectFileIoProcessor(childFileName, os.O_RDONLY, 0) if err != nil { - panic("Failed to open childFile, error: " + err.Error()) + return fmt.Errorf("failed to open childFile, error: %v", err) } defer childFileIo.Close() parentFileIo, err := NewDirectFileIoProcessor(parentFileName, os.O_WRONLY, 0) if err != nil { - panic("Failed to open parentFile, error: " + err.Error()) + return fmt.Errorf("failed to open parentFile, error: %v", err) } defer parentFileIo.Close() blockSize, err := getFileSystemBlockSize(childFileIo) if err != nil { - panic("can't get FS block size, error: " + err.Error()) + return fmt.Errorf("can't get FS block size, error: %v", err) } exts, err := GetFiemapExtents(childFileIo) if err != nil {
Convert panics in sfold to returned errors
rancher_sparse-tools
train
53136e2913582bc1b33fad42b283f961dca56410
diff --git a/src/functions.php b/src/functions.php index <HASH>..<HASH> 100644 --- a/src/functions.php +++ b/src/functions.php @@ -440,7 +440,8 @@ function readline(StreamInterface $stream, $maxLength = null) function parse_request($message) { $data = _parse_message($message); - if (!preg_match('/^[a-zA-Z]+\s+\/.*/', $data['start-line'])) { + $matches = []; + if (!preg_match('/^[a-zA-Z]+\s+([a-zA-Z]+:\/\/|\/).*/', $data['start-line'], $matches)) { throw new \InvalidArgumentException('Invalid request string'); } $parts = explode(' ', $data['start-line'], 3); @@ -448,7 +449,7 @@ function parse_request($message) return new Request( $parts[0], - _parse_request_uri($parts[1], $data['headers']), + $matches[1] === '/' ? _parse_request_uri($parts[1], $data['headers']) : $parts[1], $data['headers'], $data['body'], $version diff --git a/tests/FunctionsTest.php b/tests/FunctionsTest.php index <HASH>..<HASH> 100644 --- a/tests/FunctionsTest.php +++ b/tests/FunctionsTest.php @@ -270,6 +270,18 @@ class FunctionsTest extends \PHPUnit_Framework_TestCase $this->assertEquals('http://foo.com/', (string) $request->getUri()); } + public function testParsesRequestMessagesWithFullUri() + { + $req = "GET https://www.google.com:443/search?q=foobar HTTP/1.1\r\nHost: www.google.com\r\n\r\n"; + $request = Psr7\parse_request($req); + $this->assertEquals('GET', $request->getMethod()); + $this->assertEquals('/search?q=foobar', $request->getRequestTarget()); + $this->assertEquals('1.1', $request->getProtocolVersion()); + $this->assertEquals('www.google.com', $request->getHeaderLine('Host')); + $this->assertEquals('', (string) $request->getBody()); + $this->assertEquals('https://www.google.com/search?q=foobar', (string) $request->getUri()); + } + /** * @expectedException \InvalidArgumentException */
Fixes for issue-<I> (support full URL's in requests, such as proxied requests)
guzzle_psr7
train
48b4dea4ed5f2b651c01379b9e1c8d1a2ea01fc5
diff --git a/extension/protobuf/src/main/java/org/openbase/jul/extension/protobuf/MessageObservable.java b/extension/protobuf/src/main/java/org/openbase/jul/extension/protobuf/MessageObservable.java index <HASH>..<HASH> 100644 --- a/extension/protobuf/src/main/java/org/openbase/jul/extension/protobuf/MessageObservable.java +++ b/extension/protobuf/src/main/java/org/openbase/jul/extension/protobuf/MessageObservable.java @@ -39,37 +39,12 @@ import org.openbase.jul.pattern.ObservableImpl; public class MessageObservable<S, M extends Message> extends ObservableImpl<S, M> { public static final String TIMESTAMP_MESSAGE_NAME = "Timestamp"; - public static final String RESOURCE_ALLOCATION_FIELD = "resource_allocation"; - -// private final DataProvider<M> dataProvider; public MessageObservable(final S source) { super(source); - -// this.dataProvider = source; this.setHashGenerator((M value) -> removeTimestamps(value.toBuilder()).build().hashCode()); } -// @Override -// public void waitForValue(long timeout, TimeUnit timeUnit) throws CouldNotPerformException, InterruptedException { -// dataProvider.waitForData(); -// } -// -// @Override -// public M getValue() throws NotAvailableException { -// return dataProvider.getData(); -// } -// -// @Override -// public boolean isValueAvailable() { -// return dataProvider.isDataAvailable(); -// } -// -// @Override -// public Future<M> getValueFuture() { -// return dataProvider.getDataFuture(); -// } - /** * Recursively clear timestamp messages from a builder. For efficiency repeated fields are ignored. * @@ -79,20 +54,13 @@ public class MessageObservable<S, M extends Message> extends ObservableImpl<S, M public Builder removeTimestamps(final Builder builder) { final Descriptors.Descriptor descriptorForType = builder.getDescriptorForType(); for (final Descriptors.FieldDescriptor field : descriptorForType.getFields()) { + // if the field is not repeated, a message and a timestamp it is cleared if (!field.isRepeated() && field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE) { - //=============================================================== - //TODO: This is just a hack since states in units now contain action descriptions, - // This line prevents resource allocations to be checked because they contain required fields - // and thus if they are checked calling build on the builder afterwards fails. - // can be removed after switching to protobuf 3 or replacing the resource allocation type - if (field.getName().equals(RESOURCE_ALLOCATION_FIELD)) { - continue; - } - //=============================================================== if (field.getMessageType().getName().equals(TIMESTAMP_MESSAGE_NAME)) { builder.clearField(field); } else { + // skip checking recursively if the field is not even initialized if (builder.hasField(field)) { removeTimestamps(builder.getFieldBuilder(field));
cleanup code and remove RESOURCE_ALLOCATION workaround which related issue has already been fixed.
openbase_jul
train
28fe3159de5739332153a5f6e8aa642e101dab56
diff --git a/js/MediaStream.js b/js/MediaStream.js index <HASH>..<HASH> 100644 --- a/js/MediaStream.js +++ b/js/MediaStream.js @@ -314,8 +314,13 @@ MediaStream.prototype.removeTrack = function (track) { MediaStream.prototype.clone = function () { - debug('clone()'); - return new MediaStream(this); + + var newStream = MediaStream(); + this.getTracks().forEach(function (track) { + newStream.addTrack(track.clone()); + }); + + return newStream; }; // Backwards compatible API. diff --git a/js/MediaStreamTrack.js b/js/MediaStreamTrack.js index <HASH>..<HASH> 100644 --- a/js/MediaStreamTrack.js +++ b/js/MediaStreamTrack.js @@ -23,6 +23,10 @@ var // Save original MediaStreamTrack var originalMediaStreamTrack = window.MediaStreamTrack || function dummyMediaStreamTrack() {}; +function newMediaStreamTrackId() { + return window.crypto.getRandomValues(new Uint32Array(4)).join('-'); +} + function MediaStreamTrack(dataFromEvent) { if (!dataFromEvent) { throw new Error('Illegal constructor'); @@ -81,9 +85,18 @@ MediaStreamTrack.prototype.applyConstraints = function () { }; MediaStreamTrack.prototype.clone = function () { - //throw new Error('Not implemented.'); - // SHAM - return this; + + var newTrackId = newMediaStreamTrackId(); + + exec(null, null, 'iosrtcPlugin', 'MediaStreamTrack_clone', [this.id, newTrackId]); + + return new MediaStreamTrack({ + id: newTrackId, + kind: this.kind, + label: this.label, + readyState: this.readyState, + enabled: this.enabled + }); }; MediaStreamTrack.prototype.getCapabilities = function () {
Implement MediaStream and MediaStreamTrack clone SHIM that duplicate track and stream for real
BasqueVoIPMafia_cordova-plugin-iosrtc
train
a2e109ef8b6b7d9593705257f86650ac663607e9
diff --git a/src/wcmf/lib/model/ObjectQuery.php b/src/wcmf/lib/model/ObjectQuery.php index <HASH>..<HASH> 100644 --- a/src/wcmf/lib/model/ObjectQuery.php +++ b/src/wcmf/lib/model/ObjectQuery.php @@ -444,13 +444,15 @@ class ObjectQuery extends AbstractQuery { $thisFkAttr = $nmMapper->getAttribute($thisRelationDescription->getFkName()); $thisIdAttr = $mapper->getAttribute($thisRelationDescription->getIdName()); - $joinCondition1 = $nmMapper->getRealTableName().'.'.$thisFkAttr->getColumn().' = '. + $nmAlias = $nmMapper->getRealTableName().ucfirst($childTableName['alias']); + + $joinCondition1 = $nmAlias.'.'.$thisFkAttr->getColumn().' = '. $tpl->getProperty(self::PROPERTY_TABLE_NAME).'.'. $thisIdAttr->getColumn(); $joinCondition2 = $curChild->getProperty(self::PROPERTY_TABLE_NAME).'.'.$otherIdAttr->getColumn().' = '. - $nmMapper->getRealTableName().'.'.$otherFkAttr->getColumn(); + $nmAlias.'.'.$otherFkAttr->getColumn(); - $selectStmt->join($nmMapper->getRealTableName(), $joinCondition1, []); + $selectStmt->join([$nmAlias => $nmMapper->getRealTableName()], $joinCondition1, []); $selectStmt->join([$childTableName['alias'] => $childTableName['name']], $joinCondition2, []); // register the nm type
Fix ObjectQuery for multiple conditions on same nm relation
iherwig_wcmf
train
40bdf63a1c74b71c0eaa22da651d6c6f3dfaf1d6
diff --git a/includes/object-cache.php b/includes/object-cache.php index <HASH>..<HASH> 100644 --- a/includes/object-cache.php +++ b/includes/object-cache.php @@ -466,7 +466,18 @@ class WP_Object_Cache if (defined('WP_REDIS_SHARDS')) { $this->redis = new RedisArray(array_values(WP_REDIS_SHARDS)); } elseif (defined('WP_REDIS_CLUSTER')) { - $this->redis = new RedisCluster(null, array_values(WP_REDIS_CLUSTER)); + $connection_args = [ + null, + array_values(WP_REDIS_CLUSTER), + $parameters['timeout'], + $parameters['read_timeout'], + ]; + + if (isset($parameters['password']) && version_compare($phpredis_version, '4.3.0', '>=')) { + $connection_args[] = $parameters['password']; + } + + $this->redis = new RedisCluster($connection_args); } else { $this->redis = new Redis();
support cluster timeout, read timeout and password
tillkruss_redis-cache
train
f1e2eb9a204b89a3c7b32c3350d1e036dec76cbf
diff --git a/lib/rest.rb b/lib/rest.rb index <HASH>..<HASH> 100644 --- a/lib/rest.rb +++ b/lib/rest.rb @@ -3,6 +3,9 @@ require 'uri' # REST is basically a convenience wrapper around Net::HTTP. It defines a simple and consistant API for doing REST-style # HTTP calls. module REST + # Raised when the remote server disconnects when reading the response + class DisconnectedError < StandardError; end + # Performs a HEAD on a resource. See REST::Request.new for a complete discussion of options. # # response = REST.get('http://example.com/pigeons/12', diff --git a/lib/rest/request.rb b/lib/rest/request.rb index <HASH>..<HASH> 100644 --- a/lib/rest/request.rb +++ b/lib/rest/request.rb @@ -130,7 +130,11 @@ module REST end end - response = http_request.start { |http| http.request(request) } + begin + response = http_request.start { |http| http.request(request) } + rescue EOFError => error + raise REST::DisconnectedError, error.message + end REST::Response.new(response.code, response.instance_variable_get('@header'), response.body) end diff --git a/test/functional_test.rb b/test/functional_test.rb index <HASH>..<HASH> 100644 --- a/test/functional_test.rb +++ b/test/functional_test.rb @@ -28,5 +28,15 @@ else response = REST.get(BASE_URL + '/') response.body.should == "OK!\n" end + + it "does stuff when the server disconnects" do + begin + REST.get(BASE_URL + '/disconnect') + rescue REST::DisconnectedError => e + e.message.should == 'end of file reached' + else + fail + end + end end end \ No newline at end of file diff --git a/test/lib/http_server.rb b/test/lib/http_server.rb index <HASH>..<HASH> 100644 --- a/test/lib/http_server.rb +++ b/test/lib/http_server.rb @@ -40,6 +40,8 @@ class Webserver case path when '/' response(client, :ok, 'OK!') + when '/disconnect' + client.close else response(client, :not_found, "Unknown path: #{path}") end diff --git a/test/rest_request_test.rb b/test/rest_request_test.rb index <HASH>..<HASH> 100644 --- a/test/rest_request_test.rb +++ b/test/rest_request_test.rb @@ -160,4 +160,13 @@ describe "A REST Request" do request.perform }.should.raise(ArgumentError) end + + it "should raise a disconnect errro when the reading the response fails" do + http_request = Net::HTTP.new('example.com') + Net::HTTP.expects(:new).returns(http_request) + http_request.expects(:start).raises(EOFError.new('failed')) + lambda { + REST.get('/something') + }.should.raise(REST::DisconnectedError) + end end \ No newline at end of file
Raise a REST::DisconnectError when the client is disconnected from the server.
Fingertips_nap
train
f0b843403126c7ab927c78ae5ff99ae1d464c9df
diff --git a/hbc-core/src/main/java/com/twitter/hbc/SitestreamController.java b/hbc-core/src/main/java/com/twitter/hbc/SitestreamController.java index <HASH>..<HASH> 100644 --- a/hbc-core/src/main/java/com/twitter/hbc/SitestreamController.java +++ b/hbc-core/src/main/java/com/twitter/hbc/SitestreamController.java @@ -35,6 +35,7 @@ import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.util.Collection; import static com.twitter.hbc.core.Constants.DEFAULT_CHARSET; @@ -59,7 +60,8 @@ public class SitestreamController { addUsers(streamId, Lists.newArrayList(userId)); } - public void addUsers(String streamId, Iterable<Long> userIds) throws IOException, ControlStreamException { + public void addUsers(String streamId, Collection<Long> userIds) throws IOException, ControlStreamException { + Preconditions.checkArgument(userIds.size() <= 100, "The userId parameter can be supplied with up to 100 user IDs."); Endpoint endpoint = SitestreamEndpoint.addUserEndpoint(streamId); endpoint.addPostParameter(Constants.USER_ID_PARAM, Joiner.on(',').join(userIds)); @@ -71,7 +73,8 @@ public class SitestreamController { removeUsers(streamId, Lists.newArrayList(userId)); } - public void removeUsers(String streamId, Iterable<Long> userIds) throws IOException, ControlStreamException { + public void removeUsers(String streamId, Collection<Long> userIds) throws IOException, ControlStreamException { + Preconditions.checkArgument(userIds.size() <= 100, "The userId parameter can be supplied with up to 100 user IDs."); Endpoint endpoint = SitestreamEndpoint.removeUserEndpoint(streamId); endpoint.addPostParameter(Constants.USER_ID_PARAM, Joiner.on(',').join(userIds));
Added preconditions test to enforce maximum number of user_id values to <I> on addUser and removeUser as per docs at <URL>
twitter_hbc
train
b04dd042869f3f15380989af4cea258c5215517c
diff --git a/pythonforandroid/bootstraps/common/build/build.py b/pythonforandroid/bootstraps/common/build/build.py index <HASH>..<HASH> 100644 --- a/pythonforandroid/bootstraps/common/build/build.py +++ b/pythonforandroid/bootstraps/common/build/build.py @@ -309,7 +309,8 @@ main.py that loads it.''') if exists(python_bundle_dir): tar_dirs.append(python_bundle_dir) if get_bootstrap_name() == "webview": - tar_dirs.append('webview_includes') + for asset in listdir('webview_includes'): + shutil.copy(join('webview_includes', asset), join(assets_dir, asset)) for asset in args.assets: asset_src, asset_dest = asset.split(":") diff --git a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java b/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java index <HASH>..<HASH> 100644 --- a/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java +++ b/pythonforandroid/bootstraps/webview/build/src/main/java/org/kivy/android/PythonActivity.java @@ -159,7 +159,7 @@ public class PythonActivity extends Activity { mWebView = new WebView(PythonActivity.mActivity); mWebView.getSettings().setJavaScriptEnabled(true); mWebView.getSettings().setDomStorageEnabled(true); - mWebView.loadUrl("file:///" + app_root_dir + "/_load.html"); + mWebView.loadUrl("file:///android_asset/_load.html"); mWebView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); mWebView.setWebViewClient(new WebViewClient() {
webview: put webview_includes in assets dir
kivy_python-for-android
train
30853c370050eae11376eb50d9bc3b00629526b5
diff --git a/library/src/main/java/com/akexorcist/roundcornerprogressbar/IconRoundCornerProgressBar.java b/library/src/main/java/com/akexorcist/roundcornerprogressbar/IconRoundCornerProgressBar.java index <HASH>..<HASH> 100644 --- a/library/src/main/java/com/akexorcist/roundcornerprogressbar/IconRoundCornerProgressBar.java +++ b/library/src/main/java/com/akexorcist/roundcornerprogressbar/IconRoundCornerProgressBar.java @@ -82,7 +82,7 @@ public class IconRoundCornerProgressBar extends BaseRoundCornerProgressBar { } @Override - protected void initStyleable(Context context, AttributeSet attrs) { + protected void initStyleable(@NonNull Context context, AttributeSet attrs) { TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.IconRoundCornerProgressBar); iconResource = typedArray.getResourceId(R.styleable.IconRoundCornerProgressBar_rcIconSrc, -1); diff --git a/library/src/main/java/com/akexorcist/roundcornerprogressbar/common/BaseRoundCornerProgressBar.java b/library/src/main/java/com/akexorcist/roundcornerprogressbar/common/BaseRoundCornerProgressBar.java index <HASH>..<HASH> 100644 --- a/library/src/main/java/com/akexorcist/roundcornerprogressbar/common/BaseRoundCornerProgressBar.java +++ b/library/src/main/java/com/akexorcist/roundcornerprogressbar/common/BaseRoundCornerProgressBar.java @@ -22,7 +22,6 @@ import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.content.res.TypedArray; -import android.graphics.Color; import android.graphics.drawable.GradientDrawable; import android.os.Build; import android.os.Parcel; @@ -30,12 +29,9 @@ import android.os.Parcelable; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.util.TypedValue; -import android.view.Gravity; import android.view.LayoutInflater; -import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.RelativeLayout; -import android.widget.TextView; import androidx.annotation.ColorInt; import androidx.annotation.Keep;
Add missing annotation and remove unused imports
akexorcist_Android-RoundCornerProgressBar
train
9c349a57f221c166c4b50d8fe1f169de06ecb5e4
diff --git a/addon/hint/show-hint.js b/addon/hint/show-hint.js index <HASH>..<HASH> 100644 --- a/addon/hint/show-hint.js +++ b/addon/hint/show-hint.js @@ -50,6 +50,7 @@ CodeMirror.showHint = function(cm, getHints, options) { hints.style.left = left + "px"; hints.style.top = top + "px"; document.body.appendChild(hints); + CodeMirror.signal(cm, "hintShowed"); // If we're at the edge of the screen, then we want the menu to appear on the left of the cursor. var winW = window.innerWidth || Math.max(document.body.offsetWidth, document.documentElement.offsetWidth); @@ -154,6 +155,7 @@ CodeMirror.showHint = function(cm, getHints, options) { cm.off("focus", onFocus); cm.off("scroll", onScroll); if (willContinue !== true && data.onClose) data.onClose(); + CodeMirror.signal(cm, "hintClosed"); } function pick() { pickCompletion(cm, data, completions[selectedHint]);
show-hint addon to emit "hintShowed" and "hintClosed" events
codemirror_CodeMirror
train
2c54193cdda7b152135a908c9481dca83f32045d
diff --git a/lib/compass-rails/version.rb b/lib/compass-rails/version.rb index <HASH>..<HASH> 100644 --- a/lib/compass-rails/version.rb +++ b/lib/compass-rails/version.rb @@ -1,5 +1,5 @@ module CompassRails unless defined?(::CompassRails::VERSION) - VERSION = "2.0.alpha.0" + VERSION = "1.1.0.pre" end end
Do version <I>.x for Rails 4 support. Drops support for Rails <I> Version <I> of compass-rails will be compatible with compass <I>+ when it comes out.
Compass_compass-rails
train
edea346f9a020ff6ca321787bc37e5f98b36e30f
diff --git a/playback/nova_compute.py b/playback/nova_compute.py index <HASH>..<HASH> 100644 --- a/playback/nova_compute.py +++ b/playback/nova_compute.py @@ -25,9 +25,9 @@ class NovaCompute(Task): env.parallel = self.parallel def _install(self, my_ip, rabbit_hosts, rabbit_user, rabbit_pass, auth_uri, auth_url, nova_pass, novncproxy_base_url, glance_api_servers, neutron_endpoint, neutron_pass, rbd_secret_uuid, memcached_servers): - print red(env.host_string + ' | Install nova-compute') + print red(env.host_string + ' | Install nova-compute sysfsutils') sudo('apt-get update') - sudo('apt-get -y install nova-compute') + sudo('apt-get -y install nova-compute sysfsutils') print red(env.host_string + ' | Update /etc/nova/nova.conf') with open('tmp_nova_conf_' + env.host_string, 'w') as f:
Add sysfsutils to resolve failed to attach volume
jiasir_playback
train
b989b7601b9f53c3ad630072d971a92865cb3831
diff --git a/telebot/__init__.py b/telebot/__init__.py index <HASH>..<HASH> 100644 --- a/telebot/__init__.py +++ b/telebot/__init__.py @@ -30,6 +30,25 @@ Module : telebot """ +class Handler: + def __init__(self, callback: {dict, function}, *args, **kwargs): + if type(callback) == dict: + self.callback = getattr(sys.modules[callback["module"]], callback["name"]) + else: + self.callback = callback + + self.args = args + self.kwargs = kwargs + + def __getitem__(self, item): + return getattr(self, item) + + def copy_to_dump(self): + module_ = self.callback.__module__ + name = self.callback.__name__ + return Handler({"module": module_, "name": name}, *self.args, **self.kwargs) + + class Saver: def __init__(self, handlers, filename, delay): self.handlers = handlers @@ -61,15 +80,10 @@ class Saver: with open(filename + ".tmp", file_mode) as file: for id_, handlers_ in handlers.items(): for handler in handlers_: - name = handler['callback'].__name__ - module = handler['callback'].__module__ - - tmp = {"callback": {"module": module, "name": name}, "args": handler["args"], - "kwargs": handler["kwargs"]} if id_ in to_dump.keys(): - to_dump[id_].append(tmp) + to_dump[id_].append(handler.copy_to_dump()) else: - to_dump[id_] = [tmp] + to_dump[id_] = [handler.copy_to_dump()] json.dump(to_dump, file) @@ -87,11 +101,8 @@ class Saver: result = {} for id_, handlers_ in handlers.items(): for handler in handlers_: - name = handler['callback']["name"] - module = handler['callback']["module"] - callback = getattr(sys.modules[module], name) - tmp = {"callback": callback, "args": handler["args"], "kwargs": handler["kwargs"]} + tmp = Handler(handler['callback'], handler["args"], handler["kwargs"]) if int(id_) in result.keys(): result[int(id_)].append(tmp) @@ -1172,9 +1183,9 @@ class TeleBot: parameter, which will contain the replied message. """ if message_id in self.reply_handlers.keys(): - self.reply_handlers[message_id].append({"callback": callback, "args": args, "kwargs": kwargs}) + self.reply_handlers[message_id].append(Handler(callback, *args, **kwargs)) else: - self.reply_handlers[message_id] = [{"callback": callback, "args": args, "kwargs": kwargs}] + self.reply_handlers[message_id] = [Handler(callback, *args, **kwargs)] if self.reply_saver is not None: self.reply_saver.start_save_timer() @@ -1212,9 +1223,9 @@ class TeleBot: :param kwargs: Args to pass in callback func """ if chat_id in self.next_step_handlers.keys(): - self.next_step_handlers[chat_id].append({"callback": callback, "args": args, "kwargs": kwargs}) + self.next_step_handlers[chat_id].append(Handler(callback, *args, **kwargs)) else: - self.next_step_handlers[chat_id] = [{"callback": callback, "args": args, "kwargs": kwargs}] + self.next_step_handlers[chat_id] = [Handler(callback, *args, **kwargs)] if self.next_step_saver is not None: self.next_step_saver.start_save_timer()
Add new class: Handler Change type of (next step|reply) handlers from dict to Handler [WIP] update: telebot/__init__.py
eternnoir_pyTelegramBotAPI
train
bcbe9bd2e598b38296fe23b4827328e15989d939
diff --git a/lib/rdl/types/structural.rb b/lib/rdl/types/structural.rb index <HASH>..<HASH> 100644 --- a/lib/rdl/types/structural.rb +++ b/lib/rdl/types/structural.rb @@ -2,7 +2,7 @@ require_relative 'type' module RDL::Type class StructuralType < Type - attr_reader :map + attr_reader :methods @@cache = {} @@ -10,32 +10,40 @@ module RDL::Type alias :__new__ :new end - def self.new(map) - t = @@cache[map] + def self.new(methods) + t = @@cache[methods] return t if t - t = StructuralType.__new__(map) - return (@@cache[map] = t) # assignment evaluates to t + t = StructuralType.__new__(methods) + return (@@cache[methods] = t) # assignment evaluates to t end # Create a new StructuralType. # - # [+map+] Map from method names as symbols to their types. - def initialize(map) - raise "map can't be empty" if map.empty? - map.each { |m, t| + # [+methods+] Map from method names as symbols to their types. + def initialize(methods) + raise "methods can't be empty" if methods.empty? + methods.each { |m, t| raise RuntimeError, "Method names in StructuralType must be symbols" unless m.instance_of? Symbol raise RuntimeError, "Got #{t.class} where MethodType expected" unless t.instance_of? MethodType } - @map = map + @methods = methods super() end def to_s # :nodoc: - "[ " + @map.each_pair.map { |m, t| "#{m.to_s}: #{t.to_s}" }.sort.join(", ") + " ]" + "[ " + @methods.each_pair.map { |m, t| "#{m.to_s}: #{t.to_s}" }.sort.join(", ") + " ]" end + def <=(other) + # allow width subtyping + other.methods.each_pair.map { |m, t| + return false unless @methods.has_key?(m) && @methods[m] <= t + } + return true + end + def instantiate(inst) - StructuralType.new(Hash[*@map.each_pair.map{ |m, t| [m, t.instantiate(inst)] }.flatten]) + StructuralType.new(Hash[*@methods.each_pair.map { |m, t| [m, t.instantiate(inst)] }.flatten]) end def eql?(other) @@ -43,11 +51,11 @@ module RDL::Type end def ==(other) # :nodoc: - return (other.instance_of? StructuralType) && (other.map == @map) + return (other.instance_of? StructuralType) && (other.methods == @methods) end def hash # :nodoc: - @map.hash + @methods.hash end end end diff --git a/test/test_le.rb b/test/test_le.rb index <HASH>..<HASH> 100644 --- a/test/test_le.rb +++ b/test/test_le.rb @@ -127,6 +127,20 @@ class TestLe < Minitest::Test assert (tbso <= tbso) assert (tbso <= tbos) end + + def test_structural + tso = MethodType.new([@tstring], nil, @tobject) + tos = MethodType.new([@tobject], nil, @tstring) + ts1 = StructuralType.new(m1: tso) + ts2 = StructuralType.new(m1: tos) + assert (ts1 <= ts1) + assert (ts2 <= ts2) + assert (ts2 <= ts1) + assert (not (ts1 <= ts2)) + ts3 = StructuralType.new(m1: tso, m2: tso) # width subtyping + assert (ts3 <= ts1) + assert (not (ts1 <= ts3)) + end # def test_intersection # skip "<= not defined on intersection" diff --git a/test/test_types.rb b/test/test_types.rb index <HASH>..<HASH> 100644 --- a/test/test_types.rb +++ b/test/test_types.rb @@ -141,8 +141,8 @@ class TestTypes < Minitest::Test tm1 = MethodType.new [ta, tb, tc], nil, tnil tm2 = MethodType.new [ta], tm1, tb t1 = StructuralType.new(m1: tm1, m2: tm2) - assert_equal tm1, t1.map[:m1] - assert_equal tm2, t1.map[:m2] + assert_equal tm1, t1.methods[:m1] + assert_equal tm2, t1.methods[:m2] t2 = StructuralType.new(m1: tm1, m2: tm2) assert_equal t1, t2 end
Rename map in StructuralType to methods to avoid confusion. Add <= for StructuralType.
plum-umd_rdl
train
58d4b30ce77fa2448e019782905383585b07d584
diff --git a/tests/ProjectCodeTest.php b/tests/ProjectCodeTest.php index <HASH>..<HASH> 100644 --- a/tests/ProjectCodeTest.php +++ b/tests/ProjectCodeTest.php @@ -32,15 +32,15 @@ class ProjectCodeTest extends BaseTestCase $this->addToAssertionCount(1); return; } - - $this->assertContains( - '@method static void nullOr' . ucfirst($method), - self::$assertDocComment, - sprintf( + $correct = strpos( (string)self::$assertDocComment,'@method static void nullOr' . ucfirst($method)); + if ($correct === false) { + $this->fail(sprintf( 'All methods have a corresponding "nullOr" method, please add the "nullOr%s" method to the class level doc comment.', ucfirst($method) - ) - ); + )); + } + + $this->addToAssertionCount(1); } /** @@ -50,14 +50,16 @@ class ProjectCodeTest extends BaseTestCase */ public function testHasAll($method) { - $this->assertContains( - '@method static void all' . ucfirst($method), - self::$assertDocComment, - sprintf( + $correct = strpos((string) self::$assertDocComment,'@method static void all' . ucfirst($method)); + + if ($correct === false) { + $this->fail(sprintf( 'All methods have a corresponding "all" method, please add the "all%s" method to the class level doc comment.', ucfirst($method) - ) - ); + )); + } + + $this->addToAssertionCount(1); } /** @@ -67,14 +69,15 @@ class ProjectCodeTest extends BaseTestCase */ public function testIsInReadme($method) { - $this->assertContains( - $method, - self::$readmeContent, - sprintf( + $correct = strpos((string) self::$readmeContent,$method); + + if($correct === false) { + $this->fail(sprintf( 'All methods must be documented in the README.md, please add the "%s" method.', - ucfirst($method) - ) - ); + $method + )); + } + $this->addToAssertionCount(1); } /**
Improve the error messages on auto review tests (#<I>) If a new method was added, then you would get a few phpunit errors that had far too much text to quickly see what you have to do. So instead of the phpunit assertions, we use `fail` to only show the relevant information.
webmozart_assert
train
660d2187a9593b996ff401f71f2fa550f27d394c
diff --git a/src/test/java/one/util/streamex/LimiterTest.java b/src/test/java/one/util/streamex/LimiterTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/one/util/streamex/LimiterTest.java +++ b/src/test/java/one/util/streamex/LimiterTest.java @@ -24,6 +24,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import org.junit.Test; + import static org.junit.Assert.*; /** @@ -33,22 +34,26 @@ public class LimiterTest { @Test public void testLimiter() { Comparator<String> cmp = Comparator.nullsFirst(Comparator.comparingInt(String::length)); - exerciseLimiter("str", Arrays.asList("abc", "abgdc", "abd", "a", "fgssdfg", "sfsvsx", null, - "wrffvs", "xcvbxvcb", "sffg", "abe", "adf", "abh"), cmp); - for(int i : new int[] {10, 100, 1000, 10000, 100000}) { - exerciseLimiter("asc, nat, "+i, IntStream.range(0, i).boxed().collect(Collectors.toList()), Comparator.naturalOrder()); - exerciseLimiter("asc, dec, "+i, IntStream.range(0, i).boxed().collect(Collectors.toList()), Comparator.comparingInt(x -> x/10)); - exerciseLimiter("desc, nat, "+i, IntStream.range(0, i).mapToObj(x -> ~x).collect(Collectors.toList()), Comparator.naturalOrder()); - exerciseLimiter("desc, dec, "+i, IntStream.range(0, i).mapToObj(x -> ~x).collect(Collectors.toList()), Comparator.comparingInt(x -> x/10)); - exerciseLimiter("rnd, nat, "+i, new Random(1).ints(i).boxed().collect(Collectors.toList()), Comparator.naturalOrder()); - exerciseLimiter("rnd, dec, "+i, new Random(1).ints(i).boxed().collect(Collectors.toList()), Comparator.comparingInt(x -> x/10)); - exerciseLimiter("rnd2, nat, "+i, new Random(1).ints(i, -1000, 1000).boxed().collect(Collectors.toList()), Comparator.naturalOrder()); - exerciseLimiter("rnd2, dec, "+i, new Random(1).ints(i, -1000, 1000).boxed().collect(Collectors.toList()), Comparator.comparingInt(x -> x/10)); + exerciseLimiter("str", Arrays.asList("abc", "abgdc", "abd", "a", "fgssdfg", "sfsvsx", null, "wrffvs", + "xcvbxvcb", "sffg", "abe", "adf", "abh"), cmp); + for (int i : new int[] { 10, 100, 1000, 10000, 100000 }) { + List<Integer> ascending = IntStream.range(0, i).boxed().collect(Collectors.toList()); + exerciseLimiter("asc, nat, " + i, ascending, Comparator.naturalOrder()); + exerciseLimiter("asc, dec, " + i, ascending, Comparator.comparingInt(x -> x / 10)); + List<Integer> descending = IntStream.range(0, i).mapToObj(x -> ~x).collect(Collectors.toList()); + exerciseLimiter("desc, nat, " + i, descending, Comparator.naturalOrder()); + exerciseLimiter("desc, dec, " + i, descending, Comparator.comparingInt(x -> x / 10)); + List<Integer> random = new Random(1).ints(i).boxed().collect(Collectors.toList()); + exerciseLimiter("rnd, nat, " + i, random, Comparator.naturalOrder()); + exerciseLimiter("rnd, dec, " + i, random, Comparator.comparingInt(x -> x / 10)); + List<Integer> randomRange = new Random(1).ints(i, -1000, 1000).boxed().collect(Collectors.toList()); + exerciseLimiter("rnd2, nat, " + i, randomRange, Comparator.naturalOrder()); + exerciseLimiter("rnd2, dec, " + i, randomRange, Comparator.comparingInt(x -> x / 10)); } } - + public static <T> void exerciseLimiter(String msg, Collection<T> input, Comparator<T> comp) { - for(int limit : new int[] {0, 1, 2, 5, 10, 20, 100, 1000}) { + for (int limit : new int[] { 0, 1, 2, 5, 10, 20, 100, 1000 }) { exerciseLimiter(msg, input, limit, comp); } } @@ -56,8 +61,8 @@ public class LimiterTest { public static <T> void exerciseLimiter(String msg, Collection<T> input, int limit, Comparator<T> comp) { List<T> expected = input.stream().sorted(comp).limit(limit).collect(Collectors.toList()); List<T> actual = input.stream().collect(MoreCollectors.least(comp, limit)); - assertEquals("Mismatch (sequential), "+msg+", limit="+limit, expected, actual); + assertEquals("Mismatch (sequential), " + msg + ", limit=" + limit, expected, actual); actual = input.parallelStream().collect(MoreCollectors.least(comp, limit)); - assertEquals("Mismatch (parallel), "+msg+", limit="+limit, expected, actual); + assertEquals("Mismatch (parallel), " + msg + ", limit=" + limit, expected, actual); } }
[#<I>] LimiterTest: extract wars for javac 8<u<I>
amaembo_streamex
train
9ee6f8d6a3e887b5d8440d1d3fb75b35ed6f3d35
diff --git a/src/gfx/shaders.js b/src/gfx/shaders.js index <HASH>..<HASH> 100644 --- a/src/gfx/shaders.js +++ b/src/gfx/shaders.js @@ -271,6 +271,7 @@ varying float vertAlpha;\n\ varying float vertSelect;\n\ uniform vec2 relativePixelSize;\n\ uniform float outlineWidth;\n\ +uniform float outlineOffset;\n\ \n\ void main(void) {\n\ gl_Position = projectionMat * modelviewMat * vec4(attrPos, 1.0);\n\ @@ -281,6 +282,7 @@ void main(void) {\n\ (outlineWidth + 2.0 * step(0.5, attrSelect));\n\ vec2 offset = normal.xy * expansion;\n\ gl_Position.xy += gl_Position.w * offset;\n\ + gl_Position.z += gl_Position.w * outlineOffset;\n\ }', TEXT_VS : '\n\ diff --git a/src/viewer.js b/src/viewer.js index <HASH>..<HASH> 100644 --- a/src/viewer.js +++ b/src/viewer.js @@ -435,6 +435,18 @@ Viewer.prototype = { this._boundDraw = utils.bind(this, this._draw); this._touchHandler = new TouchHandler(this._canvas.domElement(), this, this._cam); + var gl = c.gl(); + var outlineOffset = 0.0; + // in case we have fewer than 24 depth bits, we need to add offset + // the drawn outline a tiny bit, as otherwise the outline appears on + // top of the actual geometry. + if (gl.getParameter(gl.DEPTH_BITS) >= 24) { + outlineOffset = 0.00001; + } + var outlineProg = this._shaderCatalog.outline; + gl.useProgram(outlineProg); + gl.uniform1f(gl.getUniformLocation(outlineProg, 'outlineOffset'), + outlineOffset); var viewer = this; // call init on all registered extensions this._extensions.forEach(function(ext) {
fix precision issue for GPU's that offer less than <I> depth bits
biasmv_pv
train
b1e2e6b8b3f7dd6fdbc424373be21967b38260b6
diff --git a/cohort/lib.php b/cohort/lib.php index <HASH>..<HASH> 100644 --- a/cohort/lib.php +++ b/cohort/lib.php @@ -149,6 +149,7 @@ class cohort_candidate_selector extends user_selector_base { protected function get_options() { $options = parent::get_options(); $options['cohortid'] = $this->cohortid; + $options['file'] = 'cohort/lib.php'; return $options; } } @@ -211,6 +212,7 @@ class cohort_existing_selector extends user_selector_base { protected function get_options() { $options = parent::get_options(); $options['cohortid'] = $this->cohortid; + $options['file'] = 'cohort/lib.php'; return $options; } }
MDL-<I> fixed incorrect options in user selectors
moodle_moodle
train
70ddb2ae33c5f601dac6a7acf0fcd5f8eb24e870
diff --git a/lib/metasploit_data_models/version.rb b/lib/metasploit_data_models/version.rb index <HASH>..<HASH> 100755 --- a/lib/metasploit_data_models/version.rb +++ b/lib/metasploit_data_models/version.rb @@ -4,5 +4,5 @@ module MetasploitDataModels # metasploit-framework/data/sql/migrate to db/migrate in this project, not all models have specs that verify the # migrations (with have_db_column and have_db_index) and certain models may not be shared between metasploit-framework # and pro, so models may be removed in the future. Because of the unstable API the version should remain below 1.0.0 - VERSION = '0.17.1' + VERSION = '0.17.2' end
Bump the version to make the PR management easier
rapid7_metasploit_data_models
train
5642088a23e18d72c3c46ea3a0e2ed793c1fa941
diff --git a/upoints/gpx.py b/upoints/gpx.py index <HASH>..<HASH> 100644 --- a/upoints/gpx.py +++ b/upoints/gpx.py @@ -646,7 +646,7 @@ class Waypoint(_GpxElem): _elem_name = "wpt" -class Waypoints(point.Points): +class Waypoints(point.TimedPoints): """Class for representing a group of `Waypoint` objects :since: 0.8.0 @@ -884,7 +884,7 @@ class Trackpoints(_SegWrap): time_elem = gpx_elem("time") for segment in data.findall(segment_elem): - points = point.Points() + points = point.TimedPoints() for trackpoint in segment.findall(trackpoint_elem): latitude = trackpoint.get("lat") longitude = trackpoint.get("lon") @@ -1042,7 +1042,7 @@ class Routepoints(_SegWrap): time_elem = gpx_elem("time") for route in data.findall(route_elem): - points = point.Points() + points = point.TimedPoints() for routepoint in route.findall(routepoint_elem): latitude = routepoint.get("lat") longitude = routepoint.get("lon")
Use TimedPoints for GPX waypoints.
JNRowe_upoints
train
adb66a5722ce17c04622a196cab98ed2a106f206
diff --git a/holviapi/checkout.py b/holviapi/checkout.py index <HASH>..<HASH> 100644 --- a/holviapi/checkout.py +++ b/holviapi/checkout.py @@ -199,11 +199,30 @@ class CheckoutAPI(object): self.products_api = ProductsAPI(self.connection) self.base_url = str(connection.base_url_fmt + self.base_url_fmt) - def list_orders(self): - """Lists all orders in the system""" + def list_orders(self, **kwargs): + """Lists all orders in the system, returns OrderList you can iterate over. + + Add Holvi supported GET filters via kwargs, API documentation (last update time unknown) says following keys are supported: + + - filter_paid_time_from (datetime): Returns orders that are paid on or after the given datetime. + - filter_paid_time_to (datetime): Returns orders that are paid on or before the given datetime. + - filter_update_time_from (datetime): Returns orders that are updated on or after the given datetime. + - filter_update_time_to (datetime): Returns orders that are updated on or before the given datetime. + - firstname (string): Returns orders where buyer's first name matches the given string (parial, case insensitive match) + - lastname (string): Returns orders where buyer's last name matches the given string (parial, case insensitive match) + - street (string): Returns orders where buyer's street address matches the given string (parial, case insensitive match) + - city (string): Returns orders where buyer's city matches the given string (parial, case insensitive match) + - postcode (string): Returns orders where buyer's postcode matches the given string (parial, case insensitive match) + - country (string): Returns orders where buyer's country matches the given string (parial, case insensitive match) + - email (string): Returns orders where buyer's email address matches the given string (parial, case insensitive match) + - company (string): Returns orders where buyer's company name matches the given string (parial, case insensitive match) + + All times are ISO datetimes, try for example '2016-01-20T00:00:00.0Z'. + + For other kinds of filtering use Pythons filter() function as usual. + """ url = self.base_url + "pool/{pool}/order/".format(pool=self.connection.pool) - # TODO add filtering support - orders = self.connection.make_get(url) + orders = self.connection.make_get(url, params=kwargs) return OrderList(orders, self) def get_order(self, order_code): diff --git a/holviapi/invoicing.py b/holviapi/invoicing.py index <HASH>..<HASH> 100644 --- a/holviapi/invoicing.py +++ b/holviapi/invoicing.py @@ -187,10 +187,27 @@ class InvoiceAPI(object): self.categories_api = CategoriesAPI(self.connection) self.base_url = six.u(connection.base_url_fmt + self.base_url_fmt).format(pool=connection.pool) - def list_invoices(self): - """Lists all invoices in the system""" - # TODO add filtering support (if/when holvi adds it) - invoices = self.connection.make_get(self.base_url) + def list_invoices(self, **kwargs): + """Lists all invoices in the system, returns InvoiceList you can iterate over. + + Add Holvi supported GET filters via kwargs, on 2016.01.13 I was informed following keys are supported: + + - reference (lookup_type='icontains') + - serial (Slightly complicated: try 'prefix-number-year', 'number-year', 'number', 'prefix') + - category (exact) + - receiver (field='receiver.name', lookup_type='icontains') + - subject (lookup_type='icontains') + - status (exact) + - create_time_from (field='create_time', lookup_type='gte') + - create_time_to (field='create_time', lookup_type='lte') + - update_time_from (field='update_time', lookup_type='gte') + - update_time_to (field='update_time', lookup_type='lte') + + All times are ISO datetimes, try for example '2016-01-20T00:00:00.0Z'. + + For other kinds of filtering use Pythons filter() function as usual. + """ + invoices = self.connection.make_get(self.base_url, params=kwargs) return InvoiceList(invoices, self) def get_invoice(self, invoice_code):
Implement filtering support for orders and invoices
rambo_python-holviapi
train
c323dda4b6c13df429962910d09d08343d63dd77
diff --git a/src/qpdf/object.cpp b/src/qpdf/object.cpp index <HASH>..<HASH> 100644 --- a/src/qpdf/object.cpp +++ b/src/qpdf/object.cpp @@ -280,6 +280,20 @@ QPDFObjectHandle object_get_key(QPDFObjectHandle& h, std::string const& key) return dict.getKey(key); } +void object_set_key(QPDFObjectHandle& h, std::string const& key, QPDFObjectHandle& value) +{ + if (!h.isDictionary() && !h.isStream()) + throw py::value_error("object is not a dictionary or a stream"); + if (value.isNull()) + throw py::value_error("PDF Dictionary keys may not be set to None - use 'del' to remove"); + + // For streams, the actual dictionary is attached to stream object + QPDFObjectHandle dict = h.isStream() ? h.getDict() : h; + + // A stream dictionary has no owner, so use the stream object in this comparison + dict.replaceKey(key, value); +} + std::pair<int, int> object_get_objgen(QPDFObjectHandle &h) { @@ -416,39 +430,15 @@ void init_object(py::module& m) ) .def("__setitem__", [](QPDFObjectHandle &h, std::string const& key, QPDFObjectHandle &value) { - if (!h.isDictionary() && !h.isStream()) - throw py::value_error("object is not a dictionary or a stream"); - - // For streams, the actual dictionary is attached to stream object - QPDFObjectHandle dict = h.isStream() ? h.getDict() : h; - - // if (value.isScalar() || value.isStream()) { - // dict.replaceKey(key, value); - // return; - // } - - // try { - // auto copy = value.shallowCopy(); - // copy.makeDirect(); - // } catch (std::exception &e) { - // throw py::value_error(e.what()); - // } - dict.replaceKey(key, value); + object_set_key(h, key, value); }, "assign dictionary key to new object", py::keep_alive<1, 3>() ) .def("__setitem__", - [](QPDFObjectHandle &h, std::string const& key, py::object &pyvalue) { - if (!h.isDictionary() && !h.isStream()) - throw py::value_error("object is not a dictionary or a stream"); - - // For streams, the actual dictionary is attached to stream object - QPDFObjectHandle dict = h.isStream() ? h.getDict() : h; - + [](QPDFObjectHandle &h, std::string const& key, py::object pyvalue) { auto value = objecthandle_encode(pyvalue); - // A stream dictionary has no owner, so use the stream object in this comparison - dict.replaceKey(key, value); + object_set_key(h, key, value); } ) .def("__delitem__", @@ -484,13 +474,10 @@ void init_object(py::module& m) py::return_value_policy::reference_internal ) .def("__setattr__", - [](QPDFObjectHandle &h, std::string const& name, py::object &pyvalue) { - if (!h.isDictionary() && !h.isStream()) - throw py::attr_error("object is not a dictionary or a stream"); - QPDFObjectHandle dict = h.isStream() ? h.getDict() : h; + [](QPDFObjectHandle &h, std::string const& name, py::object pyvalue) { std::string key = "/" + name; auto value = objecthandle_encode(pyvalue); - dict.replaceKey(key, value); + object_set_key(h, key, value); }, "attribute access" ) @@ -577,7 +564,7 @@ void init_object(py::module& m) } ) .def("__setitem__", - [](QPDFObjectHandle &h, int index, py::object &pyvalue) { + [](QPDFObjectHandle &h, int index, py::object pyvalue) { size_t u_index = list_range_check(h, index); auto value = objecthandle_encode(pyvalue); h.setArrayItem(u_index, value); diff --git a/tests/test_image_access.py b/tests/test_image_access.py index <HASH>..<HASH> 100644 --- a/tests/test_image_access.py +++ b/tests/test_image_access.py @@ -8,7 +8,7 @@ import zlib from pikepdf import ( - Pdf, PdfImage, PdfError, Name, Null, + Pdf, PdfImage, PdfError, Name, parse_content_stream, PdfInlineImage, Stream, StreamDataMode ) diff --git a/tests/test_object.py b/tests/test_object.py index <HASH>..<HASH> 100644 --- a/tests/test_object.py +++ b/tests/test_object.py @@ -18,6 +18,10 @@ encode = qpdf._encode roundtrip = qpdf._roundtrip +def test_none(): + assert encode(None) is None + + @given(characters(min_codepoint=0x20, max_codepoint=0x7f)) @example('') def test_ascii_involution(ascii_): @@ -98,6 +102,12 @@ def test_nested_list2(array): assert a == array +def test_list_nones(): + a = pikepdf.Array([1, 2, 3]) + a[1] = None + assert a[1] is None + + def test_stack_depth(): a = [42] for _ in range(100): @@ -219,3 +229,9 @@ class TestRepr: def test_utf16_error(): with pytest.raises(UnicodeEncodeError): str(encode('\ud801')) + + +def test_dictionary_none(): + d = pikepdf.Dictionary({'/One': 1, '/Two': 2}) + with pytest.raises(ValueError): + d['/Two'] = None
Refactor dictionary['key']=value Removal the ability to set a key to None; invite the user to use del instead. The reason for this is that in PDFs and QPDF setting a key to null is equivalent to deletion. Seems best to remove potential source of confusion.
pikepdf_pikepdf
train
85dfa745b6c2895d968409780c650e783328899f
diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index <HASH>..<HASH> 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -17,6 +17,8 @@ # satpy. If not, see <http://www.gnu.org/licenses/>. """Modifier classes for corrections based on sun and other angles.""" +from __future__ import annotations + import logging import time from datetime import datetime @@ -135,7 +137,6 @@ class SunZenithCorrector(SunZenithCorrectorBase): def _apply_correction(self, proj, coszen): logger.debug("Apply the standard sun-zenith correction [1/cos(sunz)]") - print("Applying sunzen: ", proj.chunks == coszen.chunks) res = proj.copy() res.data = sunzen_corr_cos(proj.data, coszen.data, limit=self.correction_limit, max_sza=self.max_sza) return res
Refactor SZA and cos(SZA) generation to reduce duplicate computations
pytroll_satpy
train
90ef4cca06c8ba05ce114ef42652ea6037d443ea
diff --git a/src/main/java/org/minimalj/backend/sql/TableCreator.java b/src/main/java/org/minimalj/backend/sql/TableCreator.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/minimalj/backend/sql/TableCreator.java +++ b/src/main/java/org/minimalj/backend/sql/TableCreator.java @@ -10,9 +10,9 @@ public class TableCreator { Application.initApplication(args); Application application = Application.getInstance(); - String database = System.getProperty("MjBackendDatabase"); - String user= System.getProperty("MjBackendDataBaseUser", "APP"); - String password = System.getProperty("MjBackendDataBasePassword", "APP"); + String database = System.getProperty("MjSqlDatabase"); + String user = System.getProperty("MjSqlDatabaseUser", "APP"); + String password = System.getProperty("MjSqlDatabasePassword", "APP"); DataSource dataSource = SqlPersistence.mariaDbDataSource(database, user, password); new SqlPersistence(dataSource, SqlPersistence.CREATE_TABLES, application.getEntityClasses());
TableCreator: use same system properties as persistence
BrunoEberhard_minimal-j
train
49cc85120dafe6ab4d5e837000cec14c0803851d
diff --git a/src/store.js b/src/store.js index <HASH>..<HASH> 100644 --- a/src/store.js +++ b/src/store.js @@ -80,6 +80,9 @@ export default class Store { */ clearSelected() { this.getSelectedIds().forEach(id => { + if(this.get(id).created === false) { + this.get(id).onStopDrawing({}); + } this.delete(id); }); }
trash can should stop drawing. fixes #<I>
mapbox_mapbox-gl-draw
train
0ef7d4c78afa24d7b90f22836541e062e54cc841
diff --git a/buckets/fields.py b/buckets/fields.py index <HASH>..<HASH> 100644 --- a/buckets/fields.py +++ b/buckets/fields.py @@ -43,7 +43,7 @@ class S3File(object): if not self.committed: name = os.path.join(self.field.upload_to, os.path.basename(self._file.name)) - self.url = self.storage.save(name, self._file) + self.url = self.storage.save(name, self._file.read()) self.committed = True return self.url diff --git a/buckets/test/storage.py b/buckets/test/storage.py index <HASH>..<HASH> 100644 --- a/buckets/test/storage.py +++ b/buckets/test/storage.py @@ -22,7 +22,7 @@ class FakeS3Storage(object): url = '/media/s3/uploads/' + name with open(os.path.join(self.dir, 'uploads', name), 'wb') as dest: - dest.write(content.read()) + dest.write(content) return url diff --git a/buckets/test/views.py b/buckets/test/views.py index <HASH>..<HASH> 100644 --- a/buckets/test/views.py +++ b/buckets/test/views.py @@ -19,6 +19,6 @@ def fake_s3_upload(request): os.makedirs(path) file = request.FILES.get('file') - default_storage.save(key, file) + default_storage.save(key, file.read()) return HttpResponse('', status=204) diff --git a/tests/models.py b/tests/models.py index <HASH>..<HASH> 100644 --- a/tests/models.py +++ b/tests/models.py @@ -4,6 +4,3 @@ from buckets.fields import S3FileField class FileModel(models.Model): s3_file = S3FileField() - - # class Meta: - # app_label = 'example.exampleapp' diff --git a/tests/test_test.py b/tests/test_test.py index <HASH>..<HASH> 100644 --- a/tests/test_test.py +++ b/tests/test_test.py @@ -32,9 +32,7 @@ def test_open(make_dirs): # noqa def test_save(make_dirs): # noqa file = create_file() store = FakeS3Storage() - url = store.save( - 'text.txt', - SimpleUploadedFile('text.txt', open(file.name, 'rb').read())) + url = store.save('text.txt', open(file.name, 'rb').read()) assert url == '/media/s3/uploads/text.txt' assert os.path.isfile( @@ -61,6 +59,14 @@ def test_get_signed_url(): assert 'file.txt' == signed['fields']['key'] +def test_content_via_save(make_dirs): # noqa + store = FakeS3Storage() + txt = 'blah' + content = str.encode(txt) + url = store.save('blah.txt', content) + assert url == '/media/s3/uploads/blah.txt' + + ############################################################################# # URLs
Ensure byte-like object is passed to Storage.save()
Cadasta_django-buckets
train
8d3e5218c20501c810e79f512ef69ad0836e065c
diff --git a/spyder/plugins/workingdirectory/container.py b/spyder/plugins/workingdirectory/container.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/workingdirectory/container.py +++ b/spyder/plugins/workingdirectory/container.py @@ -43,6 +43,9 @@ class WorkingDirectoryToolbarSections: Main = "main_section" +class WorkingDirectoryToolbarItems: + PathComboBox = 'path_combo' + # --- Widgets # ---------------------------------------------------------------------------- class WorkingDirectoryToolbar(ApplicationToolbar): @@ -79,6 +82,7 @@ class WorkingDirectoryContainer(PluginMainContainer): self.pathedit = PathComboBox( self, adjust_to_contents=self.get_conf('working_dir_adjusttocontents'), + id_=WorkingDirectoryToolbarItems.PathComboBox ) # Widget Setup diff --git a/spyder/widgets/comboboxes.py b/spyder/widgets/comboboxes.py index <HASH>..<HASH> 100644 --- a/spyder/widgets/comboboxes.py +++ b/spyder/widgets/comboboxes.py @@ -206,7 +206,7 @@ class PathComboBox(EditableComboBox): """ open_dir = Signal(str) - def __init__(self, parent, adjust_to_contents=False): + def __init__(self, parent, adjust_to_contents=False, id_=None): EditableComboBox.__init__(self, parent) # Replace the default lineedit by a custom one with icon display @@ -227,6 +227,9 @@ class PathComboBox(EditableComboBox): self.sig_tab_pressed.connect(self.tab_complete) self.valid.connect(lineedit.update_status) + if id_ is not None: + self.ID = id_ + # --- Qt overrides def focusInEvent(self, event): """Handle focus in event restoring to display the status icon."""
Update workingdirectory toolbar item ids
spyder-ide_spyder
train
a8f6269755c95392f898e5e1f53596eafe81634f
diff --git a/concrete/single_pages/dashboard/blocks/types/view.php b/concrete/single_pages/dashboard/blocks/types/view.php index <HASH>..<HASH> 100644 --- a/concrete/single_pages/dashboard/blocks/types/view.php +++ b/concrete/single_pages/dashboard/blocks/types/view.php @@ -91,7 +91,7 @@ if ($controller->getAction() == 'inspect') { foreach ($availableBlockTypes as $bt) { $btIcon = $ci->getBlockTypeIconURL($bt); ?> - <li><span><img src="<?= $btIcon ?>" /> <?= t($bt->getBlockTypeName()) ?> + <li><span class="clearfix"><img src="<?= $btIcon ?>" /> <?= t($bt->getBlockTypeName()) ?> <a href="<?= $urlResolver->resolve(['/dashboard/blocks/types', 'install', $bt->getBlockTypeHandle()]) ?>" class="btn pull-right btn-sm btn-default"><?= t('Install') ?></a> </span></li> <?php
Fix ugly hover on awaiting for installation items
concrete5_concrete5
train
11960ed1509633c60f4d97ec67c422358969eea3
diff --git a/app/models/concerns/tree.rb b/app/models/concerns/tree.rb index <HASH>..<HASH> 100644 --- a/app/models/concerns/tree.rb +++ b/app/models/concerns/tree.rb @@ -9,4 +9,21 @@ module Tree ids end + + def self.recursively_gather_names(tree_array, ids, names=[]) + tree_array.each do |hash_val| + if ids.empty? + return names + elsif ids.include?(hash_val[:id]) + names << hash_val[:name] + ids.delete(hash_val[:id]) + end + + if hash_val[:children].any? + Tree.recursively_gather_names(hash_val[:children], ids, names) + end + end + + names + end end
Build Recursive Name Gathering method for Trees
cortex-cms_cortex
train
e58dd10469ec073328a9568a2527583d21bba394
diff --git a/testSetup/init.js b/testSetup/init.js index <HASH>..<HASH> 100644 --- a/testSetup/init.js +++ b/testSetup/init.js @@ -1,4 +1,4 @@ -require('@testing-library/jest-dom'); +require('@testing-library/jest-dom/extend-expect'); const { DEFAULT_DESKTOP_MIN_WIDTH, DEFAULT_DESKTOP_LARGE_MIN_WIDTH,
chore(tests): added /extend-expect to @testing-library/jest-dom
mlaursen_react-md
train
6c109c91f7f1998991dd0dfe55758d26856abd67
diff --git a/executor/prepared_test.go b/executor/prepared_test.go index <HASH>..<HASH> 100644 --- a/executor/prepared_test.go +++ b/executor/prepared_test.go @@ -548,6 +548,31 @@ func (s *testPrepareSuite) TestPlanCacheXXX(c *C) { } } +func (s *testSerialSuite) TestIssue28782(c *C) { + store, dom, err := newStoreWithBootstrap() + c.Assert(err, IsNil) + tk := testkit.NewTestKit(c, store) + defer func() { + dom.Close() + store.Close() + }() + orgEnable := plannercore.PreparedPlanCacheEnabled() + defer func() { + plannercore.SetPreparedPlanCache(orgEnable) + }() + plannercore.SetPreparedPlanCache(true) + tk.MustExec("use test") + tk.MustExec("set @@tidb_enable_collect_execution_info=0;") + tk.MustExec("prepare stmt from 'SELECT IF(?, 1, 0);';") + tk.MustExec("set @a=1, @b=null, @c=0") + + tk.MustQuery("execute stmt using @a;").Check(testkit.Rows("1")) + tk.MustQuery("execute stmt using @b;").Check(testkit.Rows("0")) + tk.MustQuery("select @@last_plan_from_cache;").Check(testkit.Rows("1")) + tk.MustQuery("execute stmt using @c;").Check(testkit.Rows("0")) + tk.MustQuery("select @@last_plan_from_cache;").Check(testkit.Rows("1")) +} + func (s *testSerialSuite) TestIssue28087And28162(c *C) { store, dom, err := newStoreWithBootstrap() c.Assert(err, IsNil) diff --git a/expression/constant_fold.go b/expression/constant_fold.go index <HASH>..<HASH> 100644 --- a/expression/constant_fold.go +++ b/expression/constant_fold.go @@ -154,7 +154,7 @@ func foldConstant(expr Expression) (Expression, bool) { if _, ok := unFoldableFunctions[x.FuncName.L]; ok { return expr, false } - if function := specialFoldHandler[x.FuncName.L]; function != nil { + if function := specialFoldHandler[x.FuncName.L]; function != nil && !MaybeOverOptimized4PlanCache(x.GetCtx(), []Expression{expr}) { return function(x) } diff --git a/planner/core/cache.go b/planner/core/cache.go index <HASH>..<HASH> 100644 --- a/planner/core/cache.go +++ b/planner/core/cache.go @@ -160,7 +160,9 @@ func (s FieldSlice) Equal(tps []*types.FieldType) bool { // string types will show up here, and (2) we don't need flen and decimal to be matched exactly to use plan cache tpEqual := (s[i].Tp == tps[i].Tp) || (s[i].Tp == mysql.TypeVarchar && tps[i].Tp == mysql.TypeVarString) || - (s[i].Tp == mysql.TypeVarString && tps[i].Tp == mysql.TypeVarchar) + (s[i].Tp == mysql.TypeVarString && tps[i].Tp == mysql.TypeVarchar) || + // TypeNull should be considered the same as other types. + (s[i].Tp == mysql.TypeNull || tps[i].Tp == mysql.TypeNull) if !tpEqual || s[i].Charset != tps[i].Charset || s[i].Collate != tps[i].Collate { return false }
planner: forbid constant fold when plan cache enable (#<I>)
pingcap_tidb
train
5344b31d2809d6d81526330ea38a430f31f3f9bd
diff --git a/vlcp/service/connection/zookeeperdb.py b/vlcp/service/connection/zookeeperdb.py index <HASH>..<HASH> 100644 --- a/vlcp/service/connection/zookeeperdb.py +++ b/vlcp/service/connection/zookeeperdb.py @@ -259,7 +259,7 @@ class ZooKeeperDB(TcpServerBase): if losts or retries: # Should not happend but in case... raise ZooKeeperSessionUnavailable(ZooKeeperSessionStateChanged.DISCONNECTED) - if completes[0].mzxid <= zxid_limit: + if completes[0].stat.mzxid <= zxid_limit: if completes[0].data: self.apiroutine.retvalue = completes[0].data else: @@ -410,7 +410,7 @@ class ZooKeeperDB(TcpServerBase): if losts or retries: raise ZooKeeperSessionUnavailable(ZooKeeperSessionStateChanged.DISCONNECTED) self._check_completes(completes) - server_time = completes[0].mtime * 1000 + server_time = completes[0].stat.mtime * 1000 # Retrieve values for m in client.requests([zk.getchildren(b'/vlcp/kvdb/' + k) for k in escaped_keys], @@ -525,7 +525,7 @@ class ZooKeeperDB(TcpServerBase): if not losts and not retries: break self._check_completes(completes) - server_time = completes[1].mtime * 1000 + server_time = completes[1].stat.mtime * 1000 session_lock = client.session_id values = [] try: @@ -633,7 +633,7 @@ class ZooKeeperDB(TcpServerBase): raise ZooKeeperSessionUnavailable(ZooKeeperSessionStateChanged.DISCONNECTED) self._check_completes(completes) # time limit is 2 minutes ago - time_limit = completes[1].mtime - 120000 + time_limit = completes[1].stat.mtime - 120000 # Get the children list for m in client.requests([zk.getchildren2(recycle_key)], self.apiroutine, 60): @@ -644,10 +644,10 @@ class ZooKeeperDB(TcpServerBase): self._check_completes(completes, (zk.ZOO_ERR_NONODE,)) if completes[0].err == zk.ZOO_ERR_NONODE: continue - can_recycle_parent = (completes[0].mtime < time_limit) - recycle_parent_version = completes[0].version + can_recycle_parent = (completes[0].stat.mtime < time_limit) + recycle_parent_version = completes[0].stat.version children = [(name.rpartition(b'-')[2], name) for name in completes[0].children if name.startswith(b'data')] - other_children = completes[0].numChildren - len(children) + other_children = completes[0].stat.numChildren - len(children) children.sort() # Use a binary search to find the boundary for deletion # We recycle a version if: @@ -670,8 +670,8 @@ class ZooKeeperDB(TcpServerBase): # Might already be recycled recycle_key = None break - if completes[0].ctime < time_limit: - is_empty = (completes[0].dataLength <= 0) + if completes[0].stat.ctime < time_limit: + is_empty = (completes[0].stat.dataLength <= 0) begin = middle + 1 else: end = middle
bug fix: should use .stat.mtime etc.
hubo1016_vlcp
train
baf70f6edc54143313411af6a2622c82e5684c4c
diff --git a/pliers/extractors/api/google.py b/pliers/extractors/api/google.py index <HASH>..<HASH> 100644 --- a/pliers/extractors/api/google.py +++ b/pliers/extractors/api/google.py @@ -205,7 +205,11 @@ class GoogleVideoIntelligenceAPIExtractor(GoogleAPITransformer, VideoExtractor): return request_obj.execute(num_retries=self.num_retries) def _query_operations(self, name): - request_obj = self.service.operations().get(name=name) + if hasattr(self.service.operations(), 'get'): + request_obj = self.service.operations().get(name=name) + else: + request_obj = self.service.projects().locations().\ + operations().get(name=name) return request_obj.execute(num_retries=self.num_retries) def _build_request(self, stim):
Patch operations request for Google Video Intelligence
tyarkoni_pliers
train
cd0314e16e9edc6fd3f332a5259c1d6c31509799
diff --git a/metaknowledge/diffusion.py b/metaknowledge/diffusion.py index <HASH>..<HASH> 100644 --- a/metaknowledge/diffusion.py +++ b/metaknowledge/diffusion.py @@ -33,11 +33,15 @@ def diffusionGraph(source, target, sourceType = "raw", targetType = "raw"): _sourceType_ : `str` - > default `'raw'`, if `'raw'` the returned graph will contain `Records` as source nodes. If it is a WOS tag then the nodes will be of that type. + > Default `'raw'`, if `'raw'` the returned graph will contain `Records` as source nodes. + + > If Records are not wanted then it can be set to a WOS tag, such as `'SO'` (for journals ), to make the nodes into the type of object returned by that tag from Records. _targetType_ : `str` - > default `'raw'`, if `'raw'` the returned graph will contain `Records` as target nodes. If it is a WOS tag of the long name of one then the nodes will be of that type. + > Default `'raw'`, if `'raw'` the returned graph will contain `Records` as target nodes. + + > If Records are not wanted then it can be set to a WOS tag, such as `'SO'` (for journals ), to make the nodes into the type of object returned by that tag from Records. # Returns diff --git a/metaknowledge/record.py b/metaknowledge/record.py index <HASH>..<HASH> 100644 --- a/metaknowledge/record.py +++ b/metaknowledge/record.py @@ -220,7 +220,8 @@ class Record(object): if either is bad False is returned """ if not isinstance(other, Record): - raise RuntimeError("Equality checking between Records and non-Records is not implemented") + + return NotImplemented if self.bad or other.bad: return False else:
Fexed bug with equality checking and made a doc string better Former-commit-id: 3d3d<I>b0c6d<I>dccadad8ac3d<I>b4ca<I>b6b
networks-lab_metaknowledge
train
3b1abcc2225426d0b3245bbf8b9777b44716f9f7
diff --git a/activesupport/test/core_ext/object/deep_dup_test.rb b/activesupport/test/core_ext/object/deep_dup_test.rb index <HASH>..<HASH> 100644 --- a/activesupport/test/core_ext/object/deep_dup_test.rb +++ b/activesupport/test/core_ext/object/deep_dup_test.rb @@ -53,7 +53,7 @@ class DeepDupTest < ActiveSupport::TestCase def test_deep_dup_with_hash_class_key hash = { Fixnum => 1 } dup = hash.deep_dup - assert_equal dup.keys.length, 1 + assert_equal 1, dup.keys.length end end
Put the assertion arguments in the right order
rails_rails
train
e4c10c96880dccac6009660d142478b6d2560bdb
diff --git a/core/client/app/assets/lib/uploader.js b/core/client/app/assets/lib/uploader.js index <HASH>..<HASH> 100644 --- a/core/client/app/assets/lib/uploader.js +++ b/core/client/app/assets/lib/uploader.js @@ -16,8 +16,8 @@ let UploadUi = function ($dropzone, settings) { })); $.extend(this, { - complete(result) { - function showImage(width, height) { + complete: (result) => { + let showImage = (width, height) => { $dropzone.find('img.js-upload-target').attr({width, height}).css({display: 'block'}); $dropzone.find('.fileupload-loading').remove(); $dropzone.css({height: 'auto'}); @@ -25,9 +25,9 @@ let UploadUi = function ($dropzone, settings) { $('.js-button-accept').prop('disabled', false); this.init(); }); - } + }; - function animateDropzone($img) { + let animateDropzone = ($img) => { $dropzone.animate({opacity: 0}, 250, () => { $dropzone.removeClass('image-uploader').addClass('pre-image-uploader'); $dropzone.css({minHeight: 0}); @@ -36,9 +36,9 @@ let UploadUi = function ($dropzone, settings) { showImage($img.width(), $img.height()); }); }); - } + }; - function preLoadImage() { + let preLoadImage = () => { let $img = $dropzone.find('img.js-upload-target') .attr({src: '', width: 'auto', height: 'auto'}); @@ -49,7 +49,7 @@ let UploadUi = function ($dropzone, settings) { $dropzone.trigger('uploadsuccess', [result]); animateDropzone($img); }).attr('src', result); - } + }; preLoadImage(); }, @@ -82,7 +82,7 @@ let UploadUi = function ($dropzone, settings) { $progress.find('.js-upload-progress-bar').css('width', `${progress}%`); } }, - fail(e, data) { + fail: (e, data) => { /*jshint unused:false*/ $('.js-button-accept').prop('disabled', false); $dropzone.trigger('uploadfailure', [data.result]); @@ -102,7 +102,7 @@ let UploadUi = function ($dropzone, settings) { this.init(); }); }, - done(e, data) { + done: (e, data) => { /*jshint unused:false*/ this.complete(data.result); }
Properly binds "this" in uploader.js closes #<I>
TryGhost_Ghost
train
1a287a67e0f50d87dd7781e681693670589c6068
diff --git a/src/walker/semantic_walker.js b/src/walker/semantic_walker.js index <HASH>..<HASH> 100644 --- a/src/walker/semantic_walker.js +++ b/src/walker/semantic_walker.js @@ -214,9 +214,7 @@ sre.SemanticWalker.prototype.right = function() { sre.SemanticWalker.prototype.findFocusOnLevel = function(id) { var focus = this.levels.find( function(x) { - var primary = /** @type {!Node} */(x.getDomPrimary()); - var pid = sre.WalkerUtil.getAttribute( - primary, sre.EnrichMathml.Attribute.ID); - return pid === id.toString();}); + var pid = x.getSemanticPrimary().id; + return pid === id;}); return focus; };
Uses semantic node information for retrieving focus level.
zorkow_speech-rule-engine
train
a6241df281ccdee327c57a2c3dd48477efcd18c6
diff --git a/Serializer/JsonLdNormalizer.php b/Serializer/JsonLdNormalizer.php index <HASH>..<HASH> 100644 --- a/Serializer/JsonLdNormalizer.php +++ b/Serializer/JsonLdNormalizer.php @@ -155,7 +155,7 @@ class JsonLdNormalizer extends AbstractNormalizer ); foreach ($attributes as $attribute => $details) { - if ('id' !== $attribute) { + if ($details['readable'] && 'id' !== $attribute) { $attributeValue = $this->propertyAccessor->getValue($object, $attribute); if ($details['type']) {
Check if the property is readable in the normalizer
api-platform_core
train
cad7093a3175868944acf1d2f62bad523e4f8a41
diff --git a/tests/unit/utils/test_thin.py b/tests/unit/utils/test_thin.py index <HASH>..<HASH> 100644 --- a/tests/unit/utils/test_thin.py +++ b/tests/unit/utils/test_thin.py @@ -15,7 +15,7 @@ from tests.support.mock import ( from salt.ext.six.moves import zip from salt.ext import six -import salt.utils.ssdp as ssdp +from salt.utils import thin import salt.utils.stringutils try: @@ -24,15 +24,34 @@ except ImportError: pytest = None +class SaltSyetemExitException(Exception): + ''' + System + ''' + def __init__(self): + Exception.__init__(self, 'The Dilithium Crystals need to be rotated.') + + @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') class SSHThinTestCase(TestCase): ''' TestCase for SaltSSH-related parts. ''' - def test_get_tops(self): + @patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException)) + @patch('salt.utils.thin.log', MagicMock()) + def test_get_ext_tops_cfg_missing_dependencies(self): ''' Test thin.get_tops :return: ''' + cfg = [ + {'namespace': {'path': '/foo', 'dependencies': []}}, + ] + with pytest.raises(Exception) as err: + thin.get_ext_tops(cfg) + assert 'Dilithium Crystals' in str(err) + assert thin.log.error.called + assert 'Missing dependencies' in thin.log.error.call_args[0][0] + assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
Add unit test for missing dependencies on get_ext_tops
saltstack_salt
train
db76b42ef50703d218aeccee15b0ab10f5621724
diff --git a/lib/vagrant/environment.rb b/lib/vagrant/environment.rb index <HASH>..<HASH> 100644 --- a/lib/vagrant/environment.rb +++ b/lib/vagrant/environment.rb @@ -115,7 +115,7 @@ module Vagrant # @return [Pathname] def dotfile_path return nil if !root_path - root_path.join(config.global.vagrant.dotfile_name) + root_path.join(File.expand_path(config.global.vagrant.dotfile_name)) end # Returns the collection of boxes for the environment.
Allow the .vagrant dotfile to be moved into a completely different directory tree I wanted to define my dotfile as: `config.vagrant.dotfile_name = "~/.vagrant-projectname"` and noticed that the full path wasn't expanded as expected. This patch allows the vagrant file to be placed anywhere on the filesystem.
hashicorp_vagrant
train
772f27304585c3052371460d0d1f8b73b76a51d6
diff --git a/__tests__/yaml-test-suite.js b/__tests__/yaml-test-suite.js index <HASH>..<HASH> 100644 --- a/__tests__/yaml-test-suite.js +++ b/__tests__/yaml-test-suite.js @@ -18,10 +18,6 @@ const matchJson = (stream, json) => { } } -const skipStringify = [ - '6FWR', // Block Scalar Keep -] - testDirs.forEach(dir => { const root = path.resolve(__dirname, 'yaml-test-suite', dir) const name = fs.readFileSync(path.resolve(root, '==='), 'utf8') @@ -40,8 +36,7 @@ testDirs.forEach(dir => { .map(doc => doc.errors.filter(err => !(err instanceof YAMLWarning))) .filter(docErrors => docErrors.length > 0) expect(errors).toHaveLength(0) - if (skipStringify.includes(dir)) return - const src2 = stream.map(doc => String(doc)).join('\n---\n') + const src2 = stream.map(doc => String(doc).replace(/\n$/, '')).join('\n---\n') + '\n' const stream2 = resolve(src2) trace: name, '\nIN\n' + yaml,
Fix test suite stream joiner (Test case 6FWR)
eemeli_yaml
train
4b124f3f8ee34c800af43c8821b43eaaa8e79420
diff --git a/Classes/Command/JobCommandController.php b/Classes/Command/JobCommandController.php index <HASH>..<HASH> 100644 --- a/Classes/Command/JobCommandController.php +++ b/Classes/Command/JobCommandController.php @@ -16,7 +16,7 @@ use TYPO3\FLOW3\Annotations as FLOW3; /** * Job command controller */ -class JobCommandController extends \TYPO3\FLOW3\MVC\Controller\CommandController { +class JobCommandController extends \TYPO3\FLOW3\Cli\CommandController { /** * @FLOW3\Inject diff --git a/Classes/Job/Aspect/DeferMethodCallAspect.php b/Classes/Job/Aspect/DeferMethodCallAspect.php index <HASH>..<HASH> 100644 --- a/Classes/Job/Aspect/DeferMethodCallAspect.php +++ b/Classes/Job/Aspect/DeferMethodCallAspect.php @@ -39,11 +39,11 @@ class DeferMethodCallAspect { protected $processingJob = FALSE; /** - * @param \TYPO3\FLOW3\AOP\JoinPointInterface $joinPoint The current join point + * @param \TYPO3\FLOW3\Aop\JoinPointInterface $joinPoint The current join point * @return void * @FLOW3\Around("methodAnnotatedWith(TYPO3\Jobqueue\Common\Annotations\Defer)") */ - public function queueMerthodCallAsJob(\TYPO3\FLOW3\AOP\JoinPointInterface $joinPoint) { + public function queueMerthodCallAsJob(\TYPO3\FLOW3\Aop\JoinPointInterface $joinPoint) { if ($this->processingJob) { return $joinPoint->getAdviceChain()->proceed($joinPoint); } else {
[TASK] Apply migration TYPO3.FLOW3-<I> This commit contains the result of applying migration TYPO3.FLOW3-<I>. to this package. Migration: TYPO3.FLOW3-<I>
Flowpack_jobqueue-common
train
2df48c715098c1e5710aa38f5ca250b0d455c4fa
diff --git a/web/concrete/src/Asset/CssAsset.php b/web/concrete/src/Asset/CssAsset.php index <HASH>..<HASH> 100644 --- a/web/concrete/src/Asset/CssAsset.php +++ b/web/concrete/src/Asset/CssAsset.php @@ -111,7 +111,7 @@ class CssAsset extends Asset ? $m[1] : substr($m[1], 1, strlen($m[1]) - 2); - if ('/' !== $url[0] && strpos($url, '//') === false) { + if ('/' !== $url[0] && strpos($url, '//') === false && strpos($url, 'data:') !== 0) { $url = $change_prefix.$url; $url = str_replace('/./', '/', $url); do {
Don't change paths of urls that start with data: Former-commit-id: <I>b5e<I>c6be<I>b<I>d9d7b<I>fde<I> Former-commit-id: c<I>a<I>d7e1fc<I>dcc<I>c<I>f9cc<I>
concrete5_concrete5
train
8677b86c0120f5a12516beac7837017d13862a4e
diff --git a/pyemma/_ext/sklearn/base.py b/pyemma/_ext/sklearn/base.py index <HASH>..<HASH> 100644 --- a/pyemma/_ext/sklearn/base.py +++ b/pyemma/_ext/sklearn/base.py @@ -26,6 +26,18 @@ import six from pyemma.util.reflection import getargspec_no_self ############################################################################### +def _first_and_last_element(arr): + """Returns first and last element of numpy array or sparse matrix.""" + if isinstance(arr, np.ndarray) or hasattr(arr, 'data'): + # numpy array or sparse matrix with .data attribute + data = arr.data if sparse.issparse(arr) else arr + return data.flat[0], data.flat[-1] + else: + # Sparse matrices without .data attribute. Only dok_matrix at + # the time of writing, in this case indexing is fast + return arr[0, 0], arr[-1, -1] + + def clone(estimator, safe=True): """Constructs a new estimator with the same parameters. Clone does a deep copy of the model in an estimator @@ -33,9 +45,9 @@ def clone(estimator, safe=True): with the same parameters that has not been fit on any data. Parameters ---------- - estimator: estimator object, or list, tuple or set of objects + estimator : estimator object, or list, tuple or set of objects The estimator or group of estimators to be cloned - safe: boolean, optional + safe : boolean, optional If safe is false, clone will fall back to a deepcopy on objects that are not estimators. """ @@ -62,6 +74,9 @@ def clone(estimator, safe=True): for name in new_object_params: param1 = new_object_params[name] param2 = params_set[name] + if param1 is param2: + # this should always happen + continue if isinstance(param1, np.ndarray): # For most ndarrays, we do not test for complete equality if not isinstance(param2, type(param1)): @@ -74,9 +89,8 @@ def clone(estimator, safe=True): equality_test = ( param1.shape == param2.shape and param1.dtype == param2.dtype - # We have to use '.flat' for 2D arrays - and param1.flat[0] == param2.flat[0] - and param1.flat[-1] == param2.flat[-1] + and (_first_and_last_element(param1) == + _first_and_last_element(param2)) ) else: equality_test = np.all(param1 == param2) @@ -93,19 +107,20 @@ def clone(estimator, safe=True): else: equality_test = ( param1.__class__ == param2.__class__ - and param1.data[0] == param2.data[0] - and param1.data[-1] == param2.data[-1] + and (_first_and_last_element(param1) == + _first_and_last_element(param2)) and param1.nnz == param2.nnz and param1.shape == param2.shape ) else: - new_obj_val = new_object_params[name] - params_set_val = params_set[name] - # The following construct is required to check equality on special - # singletons such as np.nan that are not equal to them-selves: - equality_test = (new_obj_val == params_set_val or - new_obj_val is params_set_val) - if not equality_test: + # fall back on standard equality + equality_test = param1 == param2 + if equality_test: + warnings.warn("Estimator %s modifies parameters in __init__." + " This behavior is deprecated as of 0.18 and " + "support for this behavior will be removed in 0.20." + % type(estimator).__name__, DeprecationWarning) + else: raise RuntimeError('Cannot clone object %s, as the constructor ' 'does not seem to set parameter %s' % (estimator, name))
[ext/sklearn] updated code from sklearn repo
markovmodel_PyEMMA
train
f0840f4ee7e1946dddddd75712dd1a841280acdc
diff --git a/.gitignore b/.gitignore index <HASH>..<HASH> 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,9 @@ npm-debug.log* *.sublime-project *.sublime-workspace +# vscode project +.vscode + # Runtime data pids *.pid diff --git a/src/MUIDataTable.js b/src/MUIDataTable.js index <HASH>..<HASH> 100644 --- a/src/MUIDataTable.js +++ b/src/MUIDataTable.js @@ -540,9 +540,17 @@ class MUIDataTable extends React.Component { }; changeRowsPerPage = rows => { + /** + * After changing rows per page recalculate totalPages and checks its if current page not higher. + * Otherwise sets current page the value of nextTotalPages + */ + const rowCount = this.options.count || this.state.displayData.length; + const nextTotalPages = Math.floor(rowCount / rows); + this.setState( () => ({ rowsPerPage: rows, + page: this.state.page > nextTotalPages ? nextTotalPages : this.state.page, }), () => { this.setTableAction("changeRowsPerPage"); diff --git a/test/MUIDataTable.test.js b/test/MUIDataTable.test.js index <HASH>..<HASH> 100644 --- a/test/MUIDataTable.test.js +++ b/test/MUIDataTable.test.js @@ -381,6 +381,18 @@ describe("<MUIDataTable />", function() { assert.deepEqual(state.rowsPerPage, 10); }); + it("should recalculate page when calling changeRowsPerPage method", () => { + const data = new Array(29).fill("").map(() => ["Joe James", "Test Corp", "Yonkers", "NY"]); + const mountWrapper = mount(shallow(<MUIDataTable columns={columns} data={data} />).get(0)); + const instance = mountWrapper.instance(); + + instance.changePage(2); + instance.changeRowsPerPage(15); + + const state = mountWrapper.state(); + assert.equal(state.page, 1); + }); + it("should update page position when calling changePage method", () => { const shallowWrapper = shallow(<MUIDataTable columns={columns} data={data} />).dive(); const instance = shallowWrapper.instance();
Fix for changing rows per page issue (#<I>) * fix(MUIDataTable): recalculate page after calling changeRowsPerPage fixes #<I> * chore: add folder .vscode to .gitignore
gregnb_mui-datatables
train
fa14005aa9c6197cc2e2f2273c98ce2c400b6ebe
diff --git a/test/test_any_instance_of.rb b/test/test_any_instance_of.rb index <HASH>..<HASH> 100644 --- a/test/test_any_instance_of.rb +++ b/test/test_any_instance_of.rb @@ -113,6 +113,7 @@ describe Muack::AnyInstanceOf do obj.f.should.eq 0 end + # Brought from rspec-mocks and it's currently failing on rspec-mocks would 'stub any_instance_of on module extending it self' do mod = Module.new { extend self diff --git a/test/test_prepend.rb b/test/test_prepend.rb index <HASH>..<HASH> 100644 --- a/test/test_prepend.rb +++ b/test/test_prepend.rb @@ -93,4 +93,29 @@ describe 'mock with prepend' do paste :test end + + # Brought from rspec-mocks and it's currently failing on rspec-mocks + # See https://github.com/rspec/rspec-mocks/pull/1218 + would "handle stubbing prepending methods that were only defined on the prepended module" do + to_be_prepended = Module.new do + def value + "#{super}_prepended".to_sym + end + + def value_without_super + :prepended + end + end + + object = Object.new + object.singleton_class.send(:prepend, to_be_prepended) + expect(object.value_without_super).eq :prepended + + stub(object).value_without_super{ :stubbed } + + expect(object.value_without_super).eq :stubbed + + expect(Muack.verify) + expect(object.value_without_super).eq :prepended + end end
Add another test case which fails on rspec-mocks
godfat_muack
train
652763521db03b01503c6a3fd1d2cb5422dd9935
diff --git a/vxlan/driver.go b/vxlan/driver.go index <HASH>..<HASH> 100644 --- a/vxlan/driver.go +++ b/vxlan/driver.go @@ -448,43 +448,56 @@ func (d *Driver) DeleteEndpoint(r *network.DeleteEndpointRequest) error { } log.Debugf("Deleted subinterface: %s", linkName) - netID := r.NetworkID - // If no other subinterfaces of the vxlan exist, delete it too + // Asynchronously check and remove the vxlan interface if nothing else is using it. + go d.cleanup(r.NetworkID) + return nil +} + +func (d *Driver) cleanup(netID string) { links, err := d.getLinks(netID) if err != nil { - return err + log.Errorf("Error getting links: %v", err) + return } VxlanIndex := links.Vxlan.LinkAttrs.Index allLinks, err := netlink.LinkList() if err != nil { - return err + log.Errorf("Error getting all links: %v", err) + return } + // Do nothing if other interfaces are slaves of the vxlan interface for i := range allLinks { if allLinks[i].Attrs().MasterIndex == VxlanIndex { log.Debugf("Interface still attached to vxlan: %v", allLinks[i]) - return nil + return } } - // FIXME: Check for macvlan interfaces with vxlan as parent in every - // docker namespace - docker := d.docker - containers, err := docker.ContainerList(context.Background(), dockertypes.ContainerListOptions{}) + // Do nothing if there are other containers in this network + containers, err := d.docker.ContainerList(context.Background(), dockertypes.ContainerListOptions{}) if err != nil { - return err + log.Errorf("Error getting containers: %v", err) + return } + log.Debugf("%v containers running.", len(containers)) for i := range containers { + log.Debugf("Checking container %v", i) + log.Debugf("container: %v", containers[i]) if _, ok := containers[i].NetworkSettings.Networks[netID]; ok { log.Debugf("Other containers are still connected to this network") - return nil + return } } log.Debugf("No interfaces attached to vxlan: deleting vxlan interface.") - return d.deleteNics(netID) + err = d.deleteNics(netID) + if err != nil { + log.Errorf("Error deleting nics: %v", err) + } + return } func (d *Driver) EndpointInfo(r *network.InfoRequest) (*network.InfoResponse, error) {
remove nics asynchronously, because get containers blocks while the container is being deleted
TrilliumIT_docker-vxlan-plugin
train
44ffdfe397bd571243bfd66955632d4b0e729679
diff --git a/raiden/network/transport/matrix/transport.py b/raiden/network/transport/matrix/transport.py index <HASH>..<HASH> 100644 --- a/raiden/network/transport/matrix/transport.py +++ b/raiden/network/transport/matrix/transport.py @@ -590,8 +590,7 @@ class MatrixTransport(Runnable): def force_check_address_reachability(self, address: Address) -> AddressReachability: """Force checks an address's reachability bypassing the whitelisting""" user_ids = self.get_user_ids_for_address(address) - self._address_mgr.track_address_presence(address, user_ids) - return self._address_mgr.get_address_reachability(address) + return self._address_mgr.get_reachability_from_matrix(user_ids) def async_start_health_check(self, node_address: Address) -> None: """ diff --git a/raiden/network/transport/matrix/utils.py b/raiden/network/transport/matrix/utils.py index <HASH>..<HASH> 100644 --- a/raiden/network/transport/matrix/utils.py +++ b/raiden/network/transport/matrix/utils.py @@ -322,6 +322,19 @@ class UserAddressManager: self._maybe_address_reachability_changed(address) + def get_reachability_from_matrix(self, user_ids: Iterable[str]) -> AddressReachability: + """ Get the current reachability without any side effects + + Since his does not even do any caching, don't use it for the normal + communication between participants in a channel. + """ + for uid in user_ids: + presence = self._fetch_user_presence(uid) + if USER_PRESENCE_TO_ADDRESS_REACHABILITY[presence] == AddressReachability.REACHABLE: + return AddressReachability.REACHABLE + + return AddressReachability.UNREACHABLE + def _maybe_address_reachability_changed(self, address: Address) -> None: # A Raiden node may have multiple Matrix users, this happens when # Raiden roams from a Matrix server to another. This loop goes over all diff --git a/raiden/tests/integration/long_running/test_token_networks.py b/raiden/tests/integration/long_running/test_token_networks.py index <HASH>..<HASH> 100644 --- a/raiden/tests/integration/long_running/test_token_networks.py +++ b/raiden/tests/integration/long_running/test_token_networks.py @@ -339,7 +339,6 @@ def test_connect_does_not_open_channels_with_offline_nodes(raiden_network, token ) -@pytest.mark.xfail(reason="https://github.com/raiden-network/raiden/issues/5918") @raise_on_failure @pytest.mark.parametrize("number_of_nodes", [3]) @pytest.mark.parametrize("channels_per_node", [0])
Avoid health checking side effects in conn-manager The healthcheck problems in the bf5 scenarios are caused by `force_check_address_reachability`, which is only used in the connection manager. When that is called, the address and its presence become known, which prevents the room from being created in `immediate_health_check_for`. This PR removes all side-effects from `force_check_address_reachability`, thereby resolving this issue. Closes <URL>
raiden-network_raiden
train
e1250cc9a39a9d08068dab7dd85c76eaf4421138
diff --git a/lib/dpl/provider/s3.rb b/lib/dpl/provider/s3.rb index <HASH>..<HASH> 100644 --- a/lib/dpl/provider/s3.rb +++ b/lib/dpl/provider/s3.rb @@ -17,7 +17,7 @@ module DPL def max_threads return @max_threads if @max_threads - if (@max_threads = threads_wanted = options.fetch(:max_threads, DEFAULT_MAX_THREADS)) > MAX_THREADS + if (@max_threads = threads_wanted = options.fetch(:max_threads, DEFAULT_MAX_THREADS).to_i) > MAX_THREADS log "Desired thread count #{threads_wanted} is too large. Using #{MAX_THREADS}." @max_threads = MAX_THREADS end
Fixed configurable "max_thread" issue Attempt to compare string with integer fails.
travis-ci_dpl
train
9957b43e7e55882403e268e0f03e589279d0aed0
diff --git a/src/Authenticator/JwtAuthenticator.php b/src/Authenticator/JwtAuthenticator.php index <HASH>..<HASH> 100644 --- a/src/Authenticator/JwtAuthenticator.php +++ b/src/Authenticator/JwtAuthenticator.php @@ -39,6 +39,13 @@ class JwtAuthenticator extends TokenAuthenticator { ]; /** + * Payload data. + * + * @var object|null + */ + protected $_payload; + + /** * @inheritdoc */ public function __construct(IdentifierCollection $identifiers, array $config = []) @@ -65,8 +72,7 @@ class JwtAuthenticator extends TokenAuthenticator { */ public function authenticate(ServerRequestInterface $request, ResponseInterface $response) { - $token = $this->_getToken($request); - $result = $this->_decode($token); + $result = $this->getPayload($request); if (!$result instanceof stdClass) { return new Result(null, Result::FAILURE_CREDENTIAL_INVALID); @@ -95,22 +101,38 @@ class JwtAuthenticator extends TokenAuthenticator { } /** + * Get payload data. + * + * @param \Psr\Http\Message\ServerRequestInterface|null $request Request to get authentication information from. + * @return object|null Payload object on success, null on failure + */ + public function getPayload(ServerRequestInterface $request = null) + { + if (!$request) { + return $this->_payload; + } + + $payload = null; + $token = $this->_getToken($request); + + if ($token) { + $payload = $this->_decodeToken($token); + } + + return $this->_payload = $payload; + } + + /** * Decode JWT token. * * @param string $token JWT token to decode. * @return object|null The JWT's payload as a PHP object, null on failure. */ - protected function _decode($token) + protected function _decodeToken($token) { $config = $this->getConfig(); - $token = str_ireplace($config['tokenPrefix'] . ' ', '', $token); - try { - $payload = JWT::decode($token, $config['key'] ?: $config['salt'], $config['allowedAlgs']); - return $payload; - } catch (Exception $e) { - throw $e; - } + return JWT::decode($token, $config['key'] ?: $config['salt'], $config['allowedAlgs']); } }
Refacotring the JWT authenticator code a little
cakephp_authentication
train
603038a78b4ab85a8733677e9f69a4d4c7c3b9aa
diff --git a/lib/puppetdb_query/mongodb.rb b/lib/puppetdb_query/mongodb.rb index <HASH>..<HASH> 100644 --- a/lib/puppetdb_query/mongodb.rb +++ b/lib/puppetdb_query/mongodb.rb @@ -153,7 +153,7 @@ module PuppetDBQuery # @param node [String] node name # @param facts [Array<String>] get these facts in the result, eg ['fqdn'], empty for all def node_update(node, facts) - connection[nodes_collection].find(_id: node).replace_one(facts, upsert: true) + connection[nodes_collection].find(_id: node).replace_one(facts, upsert: true, bypass_document_validation: false, check_keys: false) rescue ::Mongo::Error::OperationFailure => e # mongodb doesn't support keys with a dot # see https://docs.mongodb.com/manual/reference/limits/#Restrictions-on-Field-Names @@ -161,7 +161,7 @@ module PuppetDBQuery # The dotted field .. in .. is not valid for storage. (57) raise e unless e.message =~ /The dotted field / connection[nodes_collection].find(_id: node).delete_one - connection[nodes_collection].insert_one(facts.merge(_id: node)) + connection[nodes_collection].insert_one(facts.merge(_id: node), check_keys: false) end # delete node data for given node name diff --git a/lib/puppetdb_query/sync.rb b/lib/puppetdb_query/sync.rb index <HASH>..<HASH> 100644 --- a/lib/puppetdb_query/sync.rb +++ b/lib/puppetdb_query/sync.rb @@ -32,7 +32,7 @@ module PuppetDBQuery updater.update3(timestamp - seconds_back) timestamp = ts rescue Timeout::Error - logger.info "syncing puppetdb nodes: now our time is up, we finsh" + logger.info "syncing puppetdb nodes: now our time is up, we finish" return rescue logger.error $!
enable dot in key string, 1. try
m-31_puppetdb_query
train
eccc05d8b1f008ba8fd54c4ecbd6983722bc3b67
diff --git a/lib/Cake/Utility/Folder.php b/lib/Cake/Utility/Folder.php index <HASH>..<HASH> 100644 --- a/lib/Cake/Utility/Folder.php +++ b/lib/Cake/Utility/Folder.php @@ -413,9 +413,6 @@ class Folder { if ($skipHidden && $name[0] === '.' || isset($exceptions[$name])) { continue; } - if ($name === '.' || $name === '..') { - continue; - } if ($item->isFile()) { $files[] = $item->getPathName(); } else if ($item->isDir()) {
Revert previous commit. Trying to fix jenkins build.
cakephp_cakephp
train
7b285538858a2eb64644d6e463da322cee9daa31
diff --git a/submit/models.py b/submit/models.py index <HASH>..<HASH> 100644 --- a/submit/models.py +++ b/submit/models.py @@ -92,6 +92,8 @@ class Submission(models.Model): def authors_list(self): return [u.get_full_name() for u in self.authors.all()] def can_withdraw(self): + if self.state == self.WITHDRAWN: + return False if self.assignment.hard_deadline < timezone.now(): # Assignment is over return False
Don't show actions for withdrawn submissions
troeger_opensubmit
train
e9238e82d652761c03070783fa6f01c13ddfa081
diff --git a/biplist/__init__.py b/biplist/__init__.py index <HASH>..<HASH> 100644 --- a/biplist/__init__.py +++ b/biplist/__init__.py @@ -391,6 +391,19 @@ class BoolWrapper(object): def __repr__(self): return "<BoolWrapper: %s>" % self.value +class FloatWrapper(object): + _instances = {} + def __new__(klass, value): + # Ensure FloatWrapper(x) for a given float x is always the same object + wrapper = klass._instances.get(value) + if wrapper is None: + wrapper = object.__new__(klass) + wrapper.value = value + klass._instances[value] = wrapper + return wrapper + def __repr__(self): + return "<FloatWrapper: %s>" % self.value + class PlistWriter(object): header = six.b('bplist00bybiplist1.0') file = None @@ -467,6 +480,8 @@ class PlistWriter(object): return self.wrappedTrue else: return self.wrappedFalse + elif isinstance(root, float): + return FloatWrapper(root) elif isinstance(root, set): n = set() for value in root: @@ -522,7 +537,7 @@ class PlistWriter(object): elif isinstance(obj, six.integer_types): size = self.intSize(obj) self.incrementByteCount('intBytes', incr=1+size) - elif isinstance(obj, (float)): + elif isinstance(obj, FloatWrapper): size = self.realSize(obj) self.incrementByteCount('realBytes', incr=1+size) elif isinstance(obj, datetime.datetime): @@ -609,7 +624,7 @@ class PlistWriter(object): root = math.log(bytes, 2) output += pack('!B', (0b0001 << 4) | int(root)) output += self.binaryInt(obj) - elif isinstance(obj, float): + elif isinstance(obj, FloatWrapper): # just use doubles output += pack('!B', (0b0010 << 4) | 3) output += self.binaryReal(obj) @@ -686,7 +701,7 @@ class PlistWriter(object): def binaryReal(self, obj): # just use doubles - result = pack('>d', obj) + result = pack('>d', obj.value) return result def binaryInt(self, obj, bytes=None): diff --git a/tests/test_write.py b/tests/test_write.py index <HASH>..<HASH> 100644 --- a/tests/test_write.py +++ b/tests/test_write.py @@ -62,7 +62,7 @@ class TestWritePlist(unittest.TestCase): def testDictRoot(self): self.roundTrip({'a':1, 'B':'d'}) - def boolsAndIntegersHelper(self, cases): + def mixedNumericTypesHelper(self, cases): result = readPlistFromString(writePlistToString(cases)) for i in range(0, len(cases)): self.assertTrue(cases[i] == result[i]) @@ -73,11 +73,17 @@ class TestWritePlist(unittest.TestCase): self.assertEquals(repr(case), repr(result)) def testBoolsAndIntegersMixed(self): - self.boolsAndIntegersHelper([0, 1, True, False, None]) - self.boolsAndIntegersHelper([False, True, 0, 1, None]) + self.mixedNumericTypesHelper([0, 1, True, False, None]) + self.mixedNumericTypesHelper([False, True, 0, 1, None]) self.reprChecker({'1':[True, False, 1, 0], '0':[1, 2, 0, {'2':[1, 0, False]}]}) self.reprChecker([1, 1, 1, 1, 1, True, True, True, True]) + def testFloatsAndIntegersMixed(self): + self.mixedNumericTypesHelper([0, 1, 1.0, 0.0, None]) + self.mixedNumericTypesHelper([0.0, 1.0, 0, 1, None]) + self.reprChecker({'1':[1.0, 0.0, 1, 0], '0':[1, 2, 0, {'2':[1, 0, 0.0]}]}) + self.reprChecker([1, 1, 1, 1, 1, 1.0, 1.0, 1.0, 1.0]) + def testSetRoot(self): self.roundTrip(set((1, 2, 3)))
issue #2: wrap floats to avoid int-float confusion
wooster_biplist
train
0a02e93588077c6500306c4b19688a0b5cb91430
diff --git a/edisgo/grid/network.py b/edisgo/grid/network.py index <HASH>..<HASH> 100644 --- a/edisgo/grid/network.py +++ b/edisgo/grid/network.py @@ -1,5 +1,7 @@ from edisgo.data.import_data import import_from_dingo from ..utils import interfaces +from pypsa import Network as PyPSANetwork +from pypsa.io import import_series_from_dataframe class Network: @@ -120,6 +122,7 @@ class Network: components = interfaces.combine_mv_and_lv(mv_components, lv_components) elif mode is 'mv': + # get topology and time series data mv_components = interfaces.mv_to_pypsa(self) mv_components = interfaces.attach_aggregated_lv_components( self, @@ -130,6 +133,38 @@ class Network: timeseries_gen_p, timeseries_gen_q = interfaces.pypsa_generator_timeseries(self, mode='mv') + # create power flow problem and solve it + network = PyPSANetwork() + # TODO: replace input for `set_snapshots` by DatetimeIndex constructed based on user input + network.set_snapshots(timeseries_gen_p.index) + + + for k, components in mv_components.items(): + network.import_components_from_dataframe(components, k) + + # for attr in ['p_set', 'q_set']: + import_series_from_dataframe(network, + timeseries_gen_p, + 'Generator', + 'p_set') + import_series_from_dataframe(network, + timeseries_gen_q, + 'Generator', + 'q_set') + import_series_from_dataframe(network, + timeseries_load_p, + 'Load', + 'p_set') + import_series_from_dataframe(network, + timeseries_load_q, + 'Load', + 'q_set') + + network.export_to_csv_folder('edisgo2pypsa_export') + # TODO: add lines to network + # TODO: maybe 'v_mag_pu_set' is required for buses + # TODO: if missing, add slack generator + network.pf(network.snapshots) elif mode is 'lv': interfaces.lv_to_pypsa(self) else:
Add PyPSA calls exemplary for MV grids
openego_eDisGo
train
5d7f9f22d504642e2965ecc0bda93560d803e9d8
diff --git a/src/Symfony/Component/Yaml/Parser.php b/src/Symfony/Component/Yaml/Parser.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Yaml/Parser.php +++ b/src/Symfony/Component/Yaml/Parser.php @@ -201,7 +201,7 @@ class Parser array_pop($this->refsBeingParsed); } } elseif ( - self::preg_match('#^(?P<key>(?:![^\s]++\s++)?(?:'.Inline::REGEX_QUOTED_STRING.'|(?:!?!php/const:)?[^ \'"\[\{!].*?)) *\:(( |\t)++(?P<value>.+))?$#u', rtrim($this->currentLine), $values) + self::preg_match('#^(?P<key>(?:![^\s]++\s++)?(?:'.Inline::REGEX_QUOTED_STRING.'|[^ \'"\[\{!].*?)) *\:(( |\t)++(?P<value>.+))?$#u', rtrim($this->currentLine), $values) && (!str_contains($values['key'], ' #') || \in_array($values['key'][0], ['"', "'"])) ) { if ($context && 'sequence' == $context) {
[Yaml] Remove legacy parsing rule
symfony_symfony
train
49a59cc6a5e950c56938493da5b23fce77c1cf3f
diff --git a/actionmailer/CHANGELOG.md b/actionmailer/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/actionmailer/CHANGELOG.md +++ b/actionmailer/CHANGELOG.md @@ -1,3 +1,8 @@ +* `assert_emails` in block form use the given number as expected value. + This makes the error message much easier to understand. + + *Yuji Yaginuma* + * Add support for inline images in mailer previews by using an interceptor class to convert cid: urls in image src attributes to data urls. diff --git a/actionmailer/lib/action_mailer/test_helper.rb b/actionmailer/lib/action_mailer/test_helper.rb index <HASH>..<HASH> 100644 --- a/actionmailer/lib/action_mailer/test_helper.rb +++ b/actionmailer/lib/action_mailer/test_helper.rb @@ -34,7 +34,7 @@ module ActionMailer original_count = ActionMailer::Base.deliveries.size yield new_count = ActionMailer::Base.deliveries.size - assert_equal original_count + number, new_count, "#{number} emails expected, but #{new_count - original_count} were sent" + assert_equal number, new_count - original_count, "#{number} emails expected, but #{new_count - original_count} were sent" else assert_equal number, ActionMailer::Base.deliveries.size end diff --git a/actionmailer/test/test_helper_test.rb b/actionmailer/test/test_helper_test.rb index <HASH>..<HASH> 100644 --- a/actionmailer/test/test_helper_test.rb +++ b/actionmailer/test/test_helper_test.rb @@ -112,6 +112,17 @@ class TestHelperMailerTest < ActionMailer::TestCase assert_match(/1 .* but 2/, error.message) end + def test_assert_emails_message + TestHelperMailer.test.deliver_now + error = assert_raise ActiveSupport::TestCase::Assertion do + assert_emails 2 do + TestHelperMailer.test.deliver_now + end + end + assert_match "Expected: 2", error.message + assert_match "Actual: 1", error.message + end + def test_assert_no_emails_failure error = assert_raise ActiveSupport::TestCase::Assertion do assert_no_emails do
`assert_emails` in block form use the given number as expected value
rails_rails
train
44aee49b9edf03587bb59d0ee8041eae6e5d2304
diff --git a/src/AllowedInclude.php b/src/AllowedInclude.php index <HASH>..<HASH> 100644 --- a/src/AllowedInclude.php +++ b/src/AllowedInclude.php @@ -31,13 +31,24 @@ class AllowedInclude $internalName = Str::camel($internalName ?? $name); return IncludedRelationship::getIndividualRelationshipPathsFromInclude($internalName) - ->flatMap(function (string $relationship) use ($name, $internalName): Collection { - return collect([ - new self($relationship, new IncludedRelationship(), $relationship === $internalName ? $internalName : null), - ]) - ->when(! Str::contains($relationship, '.'), function (Collection $includes) use ($internalName, $relationship) { - return $includes->merge(self::count("{$relationship}Count", $relationship === $internalName ? "{$internalName}Count" : null)); - }); + ->zip(IncludedRelationship::getIndividualRelationshipPathsFromInclude($name)) + ->flatMap(function ($args): Collection { + [$relationship, $alias] = $args; + + $includes = collect([ + new self($alias, new IncludedRelationship, $relationship), + ]); + + if (! Str::contains($relationship, '.')) { + $suffix = config('query-builder.count_suffix'); + + $includes = $includes->merge(self::count( + $alias.$suffix, + $relationship.$suffix + )); + } + + return $includes; }); } diff --git a/src/Concerns/AddsIncludesToQuery.php b/src/Concerns/AddsIncludesToQuery.php index <HASH>..<HASH> 100644 --- a/src/Concerns/AddsIncludesToQuery.php +++ b/src/Concerns/AddsIncludesToQuery.php @@ -22,6 +22,10 @@ trait AddsIncludesToQuery return empty($include); }) ->flatMap(function ($include): Collection { + if ($include instanceof Collection) { + return $include; + } + if ($include instanceof IncludeInterface) { return collect([$include]); } diff --git a/src/Includes/IncludedRelationship.php b/src/Includes/IncludedRelationship.php index <HASH>..<HASH> 100644 --- a/src/Includes/IncludedRelationship.php +++ b/src/Includes/IncludedRelationship.php @@ -33,7 +33,7 @@ class IncludedRelationship implements IncludeInterface public static function getIndividualRelationshipPathsFromInclude(string $include): Collection { return collect(explode('.', $include)) - ->reduce(function ($includes, $relationship) { + ->reduce(function (Collection $includes, string $relationship) { if ($includes->isEmpty()) { return $includes->push($relationship); } diff --git a/tests/IncludeTest.php b/tests/IncludeTest.php index <HASH>..<HASH> 100644 --- a/tests/IncludeTest.php +++ b/tests/IncludeTest.php @@ -294,6 +294,26 @@ class IncludeTest extends TestCase $this->assertEquals(['allowed include'], $exception->allowedIncludes->all()); } + /** @test */ + public function it_can_alias_multiple_allowed_includes() + { + $request = new Request([ + 'include' => 'relatedModelsCount,relationShipAlias', + ]); + + $models = QueryBuilder::for(TestModel::class, $request) + ->allowedIncludes([ + AllowedInclude::count('relatedModelsCount'), + AllowedInclude::relationship('relationShipAlias', 'otherRelatedModels'), + ]) + ->get(); + + $this->assertRelationLoaded($models, 'otherRelatedModels'); + $models->each(function ($model) { + $this->assertNotNull($model->related_models_count); + }); + } + protected function createQueryFromIncludeRequest(string $includes): QueryBuilder { $request = new Request([
Fix alias for allowed includes (#<I>)
spatie_laravel-query-builder
train
35b596a497b207deffdde77d7624a432699f29bf
diff --git a/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJob.java b/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJob.java index <HASH>..<HASH> 100644 --- a/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJob.java +++ b/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJob.java @@ -150,7 +150,7 @@ public class TemporaryJob { void deploy() { try { // Create job - log.debug("Creating job {}", job.getId()); + log.info("Creating job {}", job.getId().toShortString()); final CreateJobResponse createResponse = get(client.createJob(job)); if (createResponse.getStatus() != CreateJobResponse.Status.OK) { fail(format("Failed to create job %s - %s", job.getId(),
Log for creating job is now at info It also uses jobId.toShortString() so it's less verbose than printing the full hash.
spotify_helios
train
a3358e185697dc70c3f7b69d2f265dc897629c4f
diff --git a/src/Api/Transaction/GetService.php b/src/Api/Transaction/GetService.php index <HASH>..<HASH> 100644 --- a/src/Api/Transaction/GetService.php +++ b/src/Api/Transaction/GetService.php @@ -68,16 +68,17 @@ class GetService extends Transaction } try { $result = parent::doRequest('transaction/getService'); + + if(isset($result['service']) && empty($result['service']['basePath'])) { + $result['service']['basePath'] = 'https://admin.pay.nl/images'; + } + self::$cache[$cacheKey] = $result; } catch (\Exception $e) { self::$cache[$cacheKey] = $e; throw $e; } - if(isset($result['service']) && empty($result['service']['basePath'])) { - $result['service']['basePath'] = 'https://admin.pay.nl/images'; - } - return $result; } }
Added fix for <I> GetService
paynl_sdk
train
785b429737ca3147d6122374a4e411ffc049a4ab
diff --git a/internal/rest/client.go b/internal/rest/client.go index <HASH>..<HASH> 100644 --- a/internal/rest/client.go +++ b/internal/rest/client.go @@ -242,8 +242,10 @@ func (c *Client) MarkOffline() bool { } if c.HealthCheckFn() { if atomic.CompareAndSwapInt32(&c.connected, offline, online) { - logger.Info("Client %s online", c.url.String()) - atomic.StoreInt64(&c.lastConn, time.Now().UnixNano()) + now := time.Now() + disconnected := now.Sub(c.LastConn()) + logger.Info("Client '%s' re-connected in %s", c.url.String(), disconnected) + atomic.StoreInt64(&c.lastConn, now.UnixNano()) } return }
add reconnect duration allows for verifying disconnect intervals (#<I>)
minio_minio
train
4629496fb811f352fcd93b1a7fea39acfb2e7767
diff --git a/dashboard_app/tests.py b/dashboard_app/tests.py index <HASH>..<HASH> 100644 --- a/dashboard_app/tests.py +++ b/dashboard_app/tests.py @@ -1040,7 +1040,7 @@ class BundleStreamDetailViewAnonymousTest(TestCase): "403.html") -class BundleStreamDetailViewAuthenticatedTest(TestCase): +class BundleStreamDetailViewAuthorizedTest(BundleStreamDetailViewAnonymousTest): def setUp(self): super(BundleStreamDetailViewAuthorizedTest, self).setUp()
Fix test class inheritance, thanks Paul!
zyga_json-schema-validator
train
59d36bc1e3ffe2e177a6cc05aa4f30d5d5fdba3f
diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.mapAndControls.js b/nunaliit2-js/src/main/js/nunaliit2/n2.mapAndControls.js index <HASH>..<HASH> 100644 --- a/nunaliit2-js/src/main/js/nunaliit2/n2.mapAndControls.js +++ b/nunaliit2-js/src/main/js/nunaliit2/n2.mapAndControls.js @@ -5829,23 +5829,26 @@ var MapAndControls = $n2.Class({ var mapLayer = layerInfo.olLayer; var mustReproject = false; - var remoteProjection = mapLayer.projection; - var localProjection = layerInfo.olLayer.map.getProjectionObject(); - if( localProjection - && false == localProjection.equals(remoteProjection) ) { - mustReproject = true; - }; + var remoteProjection = mapLayer.projection; + var localProjection = layerInfo.olLayer.map.getProjectionObject(); + if( localProjection + && false == localProjection.equals(remoteProjection) ) { + mustReproject = true; + }; // Remove features. Remove features that are to be updated - var featureIdsToRemoveMap = {}; - state.removed.forEach(function(f){ - featureIdsToRemoveMap[f.fid] = true; - }); - state.updated.forEach(function(f){ - featureIdsToRemoveMap[f.fid] = true; - }); - var featuresToRemove = []; - var featuresToAdd = []; + var featureIdsToRemoveMap = {}; + state.removed.forEach(function(f){ + featureIdsToRemoveMap[f.fid] = true; + }); + state.updated.forEach(function(f){ + featureIdsToRemoveMap[f.fid] = true; + }); + state.added.forEach(function(f){ + featureIdsToRemoveMap[f.fid] = true; + }); + var featuresToRemove = []; + var featuresToAdd = []; if( mapLayer && mapLayer.features ) { var loop; var features = mapLayer.features; diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.modelFilter.js b/nunaliit2-js/src/main/js/nunaliit2/n2.modelFilter.js index <HASH>..<HASH> 100644 --- a/nunaliit2-js/src/main/js/nunaliit2/n2.modelFilter.js +++ b/nunaliit2-js/src/main/js/nunaliit2/n2.modelFilter.js @@ -1104,8 +1104,20 @@ var SelectableDocumentFilter = $n2.Class('SelectableDocumentFilter', { var visible = this._computeVisibility(doc); if( visible ){ - docInfo.visible = visible; - added.push(doc); + if( docInfo.visible ){ + // Is visible and used to be visible: update + updated.push(doc); + } else { + // Is visible and did not used to be visible: added + added.push(doc); + }; + } else { + if( docInfo.visible ){ + // Is not visible and used to be visible: remove + removed.push(doc); + } else { + // Is not visible and did not used to be visible: nothing + }; }; }; };
nunaliit2-js: Fix issue where maps driven from model sometimes have duplicate geometries.
GCRC_nunaliit
train
568207870b03df00b0efb5c28570a94e70b78b70
diff --git a/lib/fauxhai/mocker.rb b/lib/fauxhai/mocker.rb index <HASH>..<HASH> 100644 --- a/lib/fauxhai/mocker.rb +++ b/lib/fauxhai/mocker.rb @@ -65,7 +65,11 @@ module Fauxhai path = Pathname.new(filepath) FileUtils.mkdir_p(path.dirname) - File.open(filepath, 'w') { |f| f.write(response_body) } + begin + File.open(filepath, 'w') { |f| f.write(response_body) } + rescue Errno::EACCES # a pretty common problem in CI systems + raise Fauxhai::Exception::InvalidPlatform.new("Fetched '#{platform}/#{version}' from Github, but could could not write the to the local path: #{filepath}") + end return parse_and_validate(response_body) else raise Fauxhai::Exception::InvalidPlatform.new("Could not find platform '#{platform}/#{version}' on the local disk and an Github fetching returned http error code #{response.status.first.to_i}! #{PLATFORM_LIST_MESSAGE}")
Provide a better error message if we can't write to the local disk
chefspec_fauxhai
train
fd265d1d286dfdd90141509e32cdc58a603935ee
diff --git a/blocks/diff/diff.js b/blocks/diff/diff.js index <HASH>..<HASH> 100644 --- a/blocks/diff/diff.js +++ b/blocks/diff/diff.js @@ -212,7 +212,7 @@ define([ DiffTool.prototype.setContent = function(original, modified, diff, opt_refresh) { var overriddenMode = DiffTool.getModeByContent_(original, modified); - if (overriddenMode) { + if (overriddenMode && !this.hasMode(overriddenMode)) { this.deactivateMode(); this.setMode(overriddenMode); }
Improve setContent method by not reloading editor if mode was not changed. Former-commit-id: <I>da<I>b<I>acc2e<I>fe<I>e1e4ba<I>be
JetBrains_ring-ui
train
acb8b69108d84edfc7e5dd3a8eda0abaea2b8dce
diff --git a/src/CallbackWrapper.php b/src/CallbackWrapper.php index <HASH>..<HASH> 100644 --- a/src/CallbackWrapper.php +++ b/src/CallbackWrapper.php @@ -20,7 +20,7 @@ namespace Icewind\Streams; * ] * ] * - * All callbacks are called before the operation is executed on the source stream + * All callbacks are called after the operation is executed on the source stream */ class CallBackWrapper extends Wrapper { /** @@ -54,23 +54,26 @@ class CallBackWrapper extends Wrapper { } public function stream_read($count) { + $result = parent::stream_read($count); if ($this->readCallback) { call_user_func($this->readCallback, $count); } - return parent::stream_read($count); + return $result; } public function stream_write($data) { + $result = parent::stream_write($data); if ($this->writeCallback) { call_user_func($this->writeCallback, $data); } - return parent::stream_write($data); + return $result; } public function stream_close() { + $result = parent::stream_close(); if ($this->closeCallback) { call_user_func($this->closeCallback); } - return parent::stream_close(); + return $result; } }
call callbacks after running the stream operation
icewind1991_Streams
train
15275eddaceb577a50068e07c43be130a508f66f
diff --git a/cake/libs/route/cake_route.php b/cake/libs/route/cake_route.php index <HASH>..<HASH> 100644 --- a/cake/libs/route/cake_route.php +++ b/cake/libs/route/cake_route.php @@ -274,7 +274,7 @@ class CakeRoute { if ((!isset($options['named']) || !empty($this->options['named'])) && $separatorIsPresent) { list($key, $val) = explode($namedConfig['separator'], $param, 2); $hasRule = isset($rules[$key]); - $passIt = (!$hasRule && !$greedy) || ($hasRule && !Router::matchNamed($key, $val, $rules[$key], $context)); + $passIt = (!$hasRule && !$greedy) || ($hasRule && !$this->_matchNamed($key, $val, $rules[$key], $context)); if ($passIt) { $pass[] = $param; } else { @@ -304,6 +304,38 @@ class CakeRoute { } /** + * Return true if a given named $param's $val matches a given $rule depending on $context. Currently implemented + * rule types are controller, action and match that can be combined with each other. + * + * @param string $param The name of the named parameter + * @param string $val The value of the named parameter + * @param array $rule The rule(s) to apply, can also be a match string + * @param string $context An array with additional context information (controller / action) + * @return boolean + */ + protected function _matchNamed($param, $val, $rule, $context) { + if ($rule === true || $rule === false) { + return $rule; + } + if (is_string($rule)) { + $rule = array('match' => $rule); + } + if (!is_array($rule)) { + return false; + } + + $controllerMatches = !isset($rule['controller'], $context['controller']) || in_array($context['controller'], (array)$rule['controller']); + if (!$controllerMatches) { + return false; + } + $actionMatches = !isset($rule['action'], $context['action']) || in_array($context['action'], (array)$rule['action']); + if (!$actionMatches) { + return false; + } + return (!isset($rule['match']) || preg_match('/' . $rule['match'] . '/', $val)); + } + +/** * Apply persistent parameters to a url array. Persistant parameters are a special * key used during route creation to force route parameters to persist when omitted from * a url array. diff --git a/cake/tests/cases/libs/route/cake_route.test.php b/cake/tests/cases/libs/route/cake_route.test.php index <HASH>..<HASH> 100644 --- a/cake/tests/cases/libs/route/cake_route.test.php +++ b/cake/tests/cases/libs/route/cake_route.test.php @@ -514,7 +514,7 @@ class CakeRouteTestCase extends CakeTestCase { 'named' => array( 'wibble', 'fish' => array('action' => 'index'), - 'fizz' => array('controller' => 'comments'), + 'fizz' => array('controller' => array('comments', 'other')), 'pattern' => 'val-[\d]+' ) ));
Moving matchNamed() into CakeRoute. This will allow its removal from Router.
cakephp_cakephp
train
af2a7ea84098cb3692824b94240329eb534edec8
diff --git a/src/components/Drawer.js b/src/components/Drawer.js index <HASH>..<HASH> 100644 --- a/src/components/Drawer.js +++ b/src/components/Drawer.js @@ -25,7 +25,7 @@ const Drawer = React.createClass({ }, componentDidMount () { - this._animateComponent({ right: 0 }); + this._animateComponent({ left: '20%' }); this._animateBackArrow(); if (this.props.navConfig) { this._animateNav(); @@ -48,7 +48,7 @@ const Drawer = React.createClass({ }, _handleCloseClick () { - this._animateComponent({ right: -800 }) + this._animateComponent({ left: '100%' }) .then(() => { this.props.onClose(); }); @@ -78,7 +78,7 @@ const Drawer = React.createClass({ _animateNav () { const el = this._nav; - const transition = { top: 12 }; + const transition = { top: '50%' }; const options = { delay: this.props.duration, duration: this.props.duration, @@ -111,9 +111,9 @@ const Drawer = React.createClass({ component: { top: 0, bottom: 0, - right: -800, + left: '100%', position: 'absolute', - width: 800, + width: '80%', overflow: 'hidden', backgroundColor: StyleConstants.Colors.PORCELAIN }, @@ -122,21 +122,24 @@ const Drawer = React.createClass({ }, backArrow: { position: 'absolute', - left: -25, - top: 12 + right: '100%', + top: '50%', + transform: 'translateY(-50%)' }, header: { backgroundColor: StyleConstants.Colors.PORCELAIN, borderBottom: 'solid 1px ' + StyleConstants.Colors.FOG, height: 15, - padding: '15px 25px' + padding: '15px 25px', + position: 'relative' }, nav: { fontFamily: StyleConstants.Fonts.THIN, color: StyleConstants.Colors.ASH, position: 'absolute', right: 25, - top: -25 + top: '-100%', + transform: 'translateY(-50%)' } }; }
Ditching magic numbers for percentages for width and animations
mxenabled_mx-react-components
train
9ec8349572015d4af3f4079cc72eef73af98d688
diff --git a/moco-core/src/main/java/com/github/dreamhead/moco/Moco.java b/moco-core/src/main/java/com/github/dreamhead/moco/Moco.java index <HASH>..<HASH> 100644 --- a/moco-core/src/main/java/com/github/dreamhead/moco/Moco.java +++ b/moco-core/src/main/java/com/github/dreamhead/moco/Moco.java @@ -543,7 +543,7 @@ public final class Moco { } public static RequestExtractor<Object> var(final Object text) { - return new PlainExtractor<Object>(checkNotNull(text, "Template variable should not be null or empty")); + return new PlainExtractor<>(checkNotNull(text, "Template variable should not be null or empty")); } public static Failover failover(final String file) {
replaced generic with diamond operator in Moco
dreamhead_moco
train
8553c8563c92ac5f94d02d70d00fa9df3e91bf3f
diff --git a/igor_test.go b/igor_test.go index <HASH>..<HASH> 100644 --- a/igor_test.go +++ b/igor_test.go @@ -327,13 +327,13 @@ func TestJoinsTableSelectDeleteWhere(t *testing.T) { func TestJSON(t *testing.T) { user := createUser() - var emptyJson igor.JSON = make(igor.JSON) + var emptyJSON = make(igor.JSON) - if !reflect.DeepEqual(user.NotifyStory, emptyJson) { - t.Errorf("JSON notifyStory should be empty but got: %s instead of %s\n", user.NotifyStory, emptyJson) + if !reflect.DeepEqual(user.NotifyStory, emptyJSON) { + t.Errorf("JSON notifyStory should be empty but got: %s instead of %s\n", user.NotifyStory, emptyJSON) } - var ns igor.JSON = make(igor.JSON) + var ns = make(igor.JSON) ns["0"] = struct { From uint64 `json:"from"`
igor_test.go: follow go conventions
galeone_igor
train
ed6736d06d0c9a096dc980f1fbc9909877e01dc4
diff --git a/lib/puppet/version.rb b/lib/puppet/version.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/version.rb +++ b/lib/puppet/version.rb @@ -7,7 +7,7 @@ module Puppet - PUPPETVERSION = '3.8.3' + PUPPETVERSION = '3.8.4' ## # version is a public API method intended to always provide a fast and
(packaging) Update PUPPETVERSION to <I>
puppetlabs_puppet
train
8a23e182f26ac05f67dde02db508246dcf326e91
diff --git a/lib/mongo/mongo_client.rb b/lib/mongo/mongo_client.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/mongo_client.rb +++ b/lib/mongo/mongo_client.rb @@ -39,6 +39,7 @@ module Mongo POOL_OPTS = [:pool_size, :pool_timeout] WRITE_CONCERN_OPTS = [:w, :j, :fsync, :wtimeout] CLIENT_ONLY_OPTS = [:slave_ok] + READ_PREFERENCE_OPTS = [:read, :tag_sets, :secondary_acceptable_latency_ms] mongo_thread_local_accessor :connections @@ -55,7 +56,8 @@ module Mongo :socket_class, :op_timeout, :tag_sets, - :acceptable_latency + :acceptable_latency, + :read # Create a connection to single MongoDB instance. # @@ -137,11 +139,11 @@ module Mongo @primary_pool = nil # Not set for direct connection - @tag_sets = {} + @tag_sets = [] @acceptable_latency = 15 check_opts(opts) - setup(opts) + setup(opts.dup) end # DEPRECATED @@ -552,6 +554,7 @@ module Mongo GENERIC_OPTS + CLIENT_ONLY_OPTS + POOL_OPTS + + READ_PREFERENCE_OPTS + WRITE_CONCERN_OPTS + TIMEOUT_OPTS end @@ -566,7 +569,6 @@ module Mongo # Parse option hash def setup(opts) - # slave_ok can be true only if one node is specified @slave_ok = opts.delete(:slave_ok) @ssl = opts.delete(:ssl) @@ -595,33 +597,33 @@ module Mongo @op_timeout = opts.delete(:op_timeout) || nil # Timeout on socket connect. - @connect_timeout = opts.delete(:connect_timeout) || nil + @connect_timeout = opts.delete(:connect_timeout) || 30 - @logger = opts.fetch(:logger, nil) - - # Connection level write concern options. - @write_concern = get_write_concern(opts) + @logger = opts.delete(:logger) || nil if @logger write_logging_startup_message end - if opts.fetch(:connect, true) - connect + # Determine read preference + if defined?(@slave_ok) && (@slave_ok) || defined?(@read_secondary) && @read_secondary + @read = :secondary_preferred + else + @read = opts.delete(:read) || :primary end - end + Mongo::ReadPreference::validate(@read) - private + @tag_sets = opts.delete(:tag_sets) || [] + @acceptable_latency = opts.delete(:secondary_acceptable_latency_ms) || 15 - ## Methods for establishing a connection: + # Connection level write concern options. + @write_concern = get_write_concern(opts) - # If a ConnectionFailure is raised, this method will be called - # to close the connection and reset connection values. - # TODO: evaluate whether this method is actually necessary - def reset_connection - close + connect if opts.fetch(:connect, true) end + private + def check_is_master(node) begin host, port = *node diff --git a/lib/mongo/mongo_replica_set_client.rb b/lib/mongo/mongo_replica_set_client.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/mongo_replica_set_client.rb +++ b/lib/mongo/mongo_replica_set_client.rb @@ -22,14 +22,11 @@ module Mongo class MongoReplicaSetClient < MongoClient REPL_SET_OPTS = [ - :read, :refresh_mode, :refresh_interval, :read_secondary, :rs_name, - :name, - :tag_sets, - :secondary_acceptable_latency_ms + :name ] attr_reader :replica_set_name, @@ -37,9 +34,7 @@ module Mongo :refresh_interval, :refresh_mode, :refresh_version, - :manager, - :tag_sets, - :acceptable_latency + :manager # Create a connection to a MongoDB replica set. # @@ -170,7 +165,7 @@ module Mongo @refresh_mutex = Mutex.new check_opts(opts) - setup(opts) + setup(opts.dup) end def valid_opts @@ -472,20 +467,12 @@ module Mongo "Refresh mode must be either :sync or false." end - # Determine read preference if opts[:read_secondary] warn ":read_secondary options has now been deprecated and will " + "be removed in driver v2.0. Use the :read option instead." @read_secondary = opts.delete(:read_secondary) || false - @read = :secondary_preferred - else - @read = opts.delete(:read) || :primary - Mongo::ReadPreference::validate(@read) end - @tag_sets = opts.delete(:tag_sets) || [] - @acceptable_latency = opts.delete(:secondary_acceptable_latency_ms) || 15 - # Replica set name if opts[:rs_name] warn ":rs_name option has been deprecated and will be removed in v2.0. " + @@ -495,8 +482,6 @@ module Mongo @replica_set_name = opts.delete(:name) end - opts[:connect_timeout] = opts.delete(:connect_timeout) || 30 - super opts end
RUBY-<I> move new shared functionality into client
mongodb_mongo-ruby-driver
train
27a43c3b3b6049936065c7ce4e6944adbb29803d
diff --git a/lib/sauce/capybara/cucumber.rb b/lib/sauce/capybara/cucumber.rb index <HASH>..<HASH> 100644 --- a/lib/sauce/capybara/cucumber.rb +++ b/lib/sauce/capybara/cucumber.rb @@ -21,6 +21,12 @@ module Sauce module_function :use_sauce_driver def name_from_scenario(scenario) + # Special behavior to handle Scenario Outlines + if scenario.instance_of? ::Cucumber::Ast::OutlineTable::ExampleRow + table = scenario.instance_variable_get(:@table) + outline = table.instance_variable_get(:@scenario_outline) + return "#{outline.feature.file} - #{outline.title} - #{table.headers} -> #{scenario.name}" + end scenario, feature = _scenario_and_feature_name(scenario) return "#{feature} - #{scenario}" end diff --git a/spec/sauce/capybara/cucumber_spec.rb b/spec/sauce/capybara/cucumber_spec.rb index <HASH>..<HASH> 100644 --- a/spec/sauce/capybara/cucumber_spec.rb +++ b/spec/sauce/capybara/cucumber_spec.rb @@ -66,6 +66,47 @@ module Sauce::Capybara Capybara.stub_chain(:current_session, :driver).and_return(driver) end + context 'with a scenario outline' do + before :each do + $ran_scenario = 0 + end + + let(:feature) do + """ + Feature: A dummy feature with a table + @selenium + Scenario Outline: Mic check + Given a <Number> + When I raise no exceptions + Examples: Numbers + | Number | + | 1 | + | 2 | + """ + end + + it 'should have executed the scenario outline twice' do + define_steps do + Given /^a (\d+)$/ do |number| + $ran_scenario = $ran_scenario + 1 + end + When /^I raise no exceptions$/ do + end + # Set up and invoke our defined Around hook + Around('@selenium') do |scenario, block| + # We need to fully reference the module function here due to a + # change in scoping that will happen to this block courtesy of + # define_steps + Sauce::Capybara::Cucumber.around_hook(scenario, block) + end + end + + run_defined_feature feature + $ran_scenario.should == 2 + end + + end + context 'with a correct scenario' do let(:feature) do """
Handle running the Around hook with Scenario Outlines Fixes #<I>
saucelabs_sauce_ruby
train
4e2ce7b3fe6ce96edae22520b8b7a6c59cb18452
diff --git a/flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/ChangelogStateBackend.java b/flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/ChangelogStateBackend.java index <HASH>..<HASH> 100644 --- a/flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/ChangelogStateBackend.java +++ b/flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/ChangelogStateBackend.java @@ -36,6 +36,7 @@ import org.apache.flink.runtime.state.OperatorStateBackend; import org.apache.flink.runtime.state.OperatorStateHandle; import org.apache.flink.runtime.state.StateBackend; import org.apache.flink.runtime.state.changelog.ChangelogStateBackendHandle; +import org.apache.flink.runtime.state.changelog.ChangelogStateBackendHandle.ChangelogStateBackendHandleImpl; import org.apache.flink.runtime.state.changelog.StateChangelogStorage; import org.apache.flink.runtime.state.delegate.DelegatingStateBackend; import org.apache.flink.runtime.state.ttl.TtlTimeProvider; @@ -49,8 +50,12 @@ import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.util.Collection; +import java.util.Objects; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; + /** * This state backend holds the working state in the underlying delegatedStateBackend, and forwards * state changes to State Changelog. @@ -221,8 +226,21 @@ public class ChangelogStateBackend implements DelegatingStateBackend, Configurab private Collection<ChangelogStateBackendHandle> castHandles( Collection<KeyedStateHandle> stateHandles) { + if (stateHandles.stream().anyMatch(h -> !(h instanceof ChangelogStateBackendHandle))) { + LOG.warn( + "Some state handles do not contain changelog: {} (ok if recovery from a savepoint)", + stateHandles); + } return stateHandles.stream() - .map(keyedStateHandle -> (ChangelogStateBackendHandle) keyedStateHandle) + .filter(Objects::nonNull) + .map( + keyedStateHandle -> + keyedStateHandle instanceof ChangelogStateBackendHandle + ? (ChangelogStateBackendHandle) keyedStateHandle + : new ChangelogStateBackendHandleImpl( + singletonList(keyedStateHandle), + emptyList(), + keyedStateHandle.getKeyGroupRange())) .collect(Collectors.toList()); } }
[FLINK-<I>][state/changelog] Allow recovery without non-materialized state Motivation: 1. ChangelogBackend.savpoint() currently simply delegates to the underlying backend, so the snapshot doesn't contain non-materialized state 2. Enable migration to ChangelogBackend from savepoints 3. Some tests create savepoints directly using underlying backends and then try to recover a regular job from them. This fails with ChangelogBackend enabled.
apache_flink
train