hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
7c02deefae336a44a45cddc3be6167040c8326bd
diff --git a/shinken/daemons/brokerdaemon.py b/shinken/daemons/brokerdaemon.py index <HASH>..<HASH> 100644 --- a/shinken/daemons/brokerdaemon.py +++ b/shinken/daemons/brokerdaemon.py @@ -361,7 +361,18 @@ class Broker(BaseSatellite): # self.schedulers.clear() for sched_id in conf['schedulers']: # Must look if we already have it to do nto overdie our broks - already_got = sched_id in self.schedulers + already_got = False + + # We can already got this conf id, but with another address + if sched_id in self.schedulers: + new_addr = conf['schedulers'][sched_id]['address'] + old_addr = self.schedulers[sched_id]['address'] + new_port = conf['schedulers'][sched_id]['port'] + old_port = self.schedulers[sched_id]['port'] + # Should got all the same to be ok :) + if new_addr == old_addr and new_port == old_port: + already_got = True + if already_got: broks = self.schedulers[sched_id]['broks'] running_id = self.schedulers[sched_id]['running_id'] diff --git a/shinken/dispatcher.py b/shinken/dispatcher.py index <HASH>..<HASH> 100644 --- a/shinken/dispatcher.py +++ b/shinken/dispatcher.py @@ -403,7 +403,6 @@ class Dispatcher: for kind in ( 'reactionner', 'poller', 'broker' ): if r.to_satellites_need_dispatch[kind][cfg_id]: cfg_for_satellite_part = r.to_satellites[kind][cfg_id] - #print "*"*10, "DBG: cfg_for_satellite_part", cfg_for_satellite_part, r.get_name(), cfg_id # make copies of potential_react list for sort satellites = [] @@ -444,16 +443,9 @@ class Dispatcher: # Now we dispatch cfg to every one ask for it nb_cfg_sent = 0 - for satellite in satellites: - # We get the already managed conf of this satellite - # for not resending it the same conf again and again - sat_cfg_ids = satellite.managed_confs - + for satellite in satellites: # Send only if we need, and if we can if nb_cfg_sent < r.get_nb_of_must_have_satellites(kind) and satellite.alive: - #If the satellite already got the conf, we pass the sending - already_got = (cfg_id in sat_cfg_ids) - satellite.cfg['schedulers'][cfg_id] = cfg_for_satellite_part if satellite.manage_arbiters: satellite.cfg['arbiters'] = arbiters_cfg @@ -462,11 +454,8 @@ class Dispatcher: if kind == "broker": r.fill_broker_with_poller_reactionner_links(satellite) - if not already_got: - logger.log('[%s] Trying to send configuration to %s %s' %(r.get_name(), kind, satellite.get_name())) - is_sent = satellite.put_conf(satellite.cfg) - else: #already got? so yes, it's sent :) - is_sent = True + logger.log('[%s] Trying to send configuration to %s %s' %(r.get_name(), kind, satellite.get_name())) + is_sent = satellite.put_conf(satellite.cfg) if is_sent: satellite.active = True diff --git a/shinken/satellite.py b/shinken/satellite.py index <HASH>..<HASH> 100644 --- a/shinken/satellite.py +++ b/shinken/satellite.py @@ -778,6 +778,7 @@ we must register our interfaces for 3 possible callers: arbiter, schedulers or b self.new_conf = None self.cur_conf = conf g_conf = conf['global'] + # Got our name from the globals if 'poller_name' in g_conf: name = g_conf['poller_name'] @@ -788,23 +789,33 @@ we must register our interfaces for 3 possible callers: arbiter, schedulers or b self.name = name self.passive = g_conf['passive'] - print "Is passive?", self.passive if self.passive: logger.log("[%s] Passive mode enabled." % self.name) # If we've got something in the schedulers, we do not want it anymore for sched_id in conf['schedulers'] : + already_got = False + + # We can already got this conf id, but with another address if sched_id in self.schedulers: + new_addr = conf['schedulers'][sched_id]['address'] + old_addr = self.schedulers[sched_id]['address'] + new_port = conf['schedulers'][sched_id]['port'] + old_port = self.schedulers[sched_id]['port'] + # Should got all the same to be ok :) + if new_addr == old_addr and new_port == old_port: + already_got = True + + if already_got: logger.log("[%s] We already got the conf %d (%s)" % (self.name, sched_id, conf['schedulers'][sched_id]['name'])) - already_got = True wait_homerun = self.schedulers[sched_id]['wait_homerun'] actions = self.schedulers[sched_id]['actions'] + s = conf['schedulers'][sched_id] self.schedulers[sched_id] = s uri = pyro.create_uri(s['address'], s['port'], 'Checks', self.use_ssl) - print "DBG: scheduler UIR:", uri self.schedulers[sched_id]['uri'] = uri if already_got:
Fix : the last enhancement was a bit too optimist. Mush find a better way for conf identification than just ids.
Alignak-monitoring_alignak
train
2942e0fc753aca6a3e62fa738eddc43e41aa5f6b
diff --git a/src/saml2/extension/pefim.py b/src/saml2/extension/pefim.py index <HASH>..<HASH> 100644 --- a/src/saml2/extension/pefim.py +++ b/src/saml2/extension/pefim.py @@ -3,6 +3,7 @@ import saml2 from saml2 import SamlBase from xmldsig import X509Data +from xmldsig import KeyInfo NAMESPACE = 'urn:net:eustix:names:tc:PEFIM:0.0:assertion' @@ -16,11 +17,16 @@ class SPCertEncType_(SamlBase): c_attributes = SamlBase.c_attributes.copy() c_child_order = SamlBase.c_child_order[:] c_cardinality = SamlBase.c_cardinality.copy() - c_children['{http://www.w3.org/2000/09/xmldsig#}X509Data'] = ('x509_data', - [X509Data]) + c_children['{http://www.w3.org/2000/09/xmldsig#}KeyInfo'] = ('key_info', + [KeyInfo]) + c_cardinality['key_info'] = {"min": 1} + c_attributes['VerifyDepth'] = ('verify_depth', 'unsignedByte', False) + c_child_order.extend(['key_info']) def __init__(self, + key_info=None, x509_data=None, + verify_depth='1', text=None, extension_elements=None, extension_attributes=None): @@ -28,7 +34,14 @@ class SPCertEncType_(SamlBase): text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) - self.x509_data = x509_data + if key_info: + self.key_info = key_info + elif x509_data: + self.key_info = KeyInfo(x509_data=x509_data) + else: + self.key_info = [] + self.verify_depth = verify_depth + #self.x509_data = x509_data def spcertenc_type__from_string(xml_string): diff --git a/src/saml2/sigver.py b/src/saml2/sigver.py index <HASH>..<HASH> 100644 --- a/src/saml2/sigver.py +++ b/src/saml2/sigver.py @@ -21,6 +21,7 @@ from Crypto.Util.asn1 import DerSequence from Crypto.PublicKey import RSA from saml2.cert import OpenSSLWrapper from saml2.extension import pefim +from saml2.extension.pefim import SPCertEnc from saml2.saml import EncryptedAssertion import xmldsig as ds @@ -1063,19 +1064,24 @@ def encrypt_cert_from_item(item): try: _elem = extension_elements_to_elements(item.extension_elements[0].children, [pefim, ds]) - if len(_elem) == 1: - _encrypt_cert = _elem[0].x509_data[0].x509_certificate.text - else: - certs = cert_from_instance(item) - if len(certs) > 0: - _encrypt_cert = certs[0] - except Exception: + for _tmp_elem in _elem: + if isinstance(_tmp_elem, SPCertEnc): + for _tmp_key_info in _tmp_elem.key_info: + if _tmp_key_info.x509_data is not None and len(_tmp_key_info.x509_data) > 0: + _encrypt_cert = _tmp_key_info.x509_data[0].x509_certificate.text + break + #_encrypt_cert = _elem[0].x509_data[0].x509_certificate.text +# else: +# certs = cert_from_instance(item) +# if len(certs) > 0: +# _encrypt_cert = certs[0] + except Exception as _exception: pass - if _encrypt_cert is None: - certs = cert_from_instance(item) - if len(certs) > 0: - _encrypt_cert = certs[0] +# if _encrypt_cert is None: +# certs = cert_from_instance(item) +# if len(certs) > 0: +# _encrypt_cert = certs[0] if _encrypt_cert is not None: if _encrypt_cert.find("-----BEGIN CERTIFICATE-----\n") == -1: diff --git a/tests/test_82_pefim.py b/tests/test_82_pefim.py index <HASH>..<HASH> 100644 --- a/tests/test_82_pefim.py +++ b/tests/test_82_pefim.py @@ -48,5 +48,5 @@ _elem = extension_elements_to_elements(parsed.extensions.extension_elements, assert len(_elem) == 1 _spcertenc = _elem[0] -_cert = _spcertenc.x509_data[0].x509_certificate.text +_cert = _spcertenc.key_info[0].x509_data[0].x509_certificate.text assert cert == _cert
Correction for PEFIM. <AuthnRequest> elements must contain the encryption certificate used to encrypt the assertion with the attribute statement. The encryption key is represented within a <ds:KeyInfo> element. Its XPath is: /samlp:AuthnRequest/samlp:extension/pefim:SPCertEnc/ds:KeyInfo/ ds:X<I>Data/ds:X<I>Certificate.
IdentityPython_pysaml2
train
f4e32db81abb0b242e0ccadf2f2d87a011eeb5e6
diff --git a/ddg.py b/ddg.py index <HASH>..<HASH> 100644 --- a/ddg.py +++ b/ddg.py @@ -12,7 +12,8 @@ l.setLevel(logging.DEBUG) MAX_BBL_ANALYZE_TIMES = 4 class DDG(object): - def __init__(self, cfg, entry_point): + def __init__(self, project, cfg, entry_point): + self._project = project self._cfg = cfg self._entry_point = entry_point @@ -224,7 +225,9 @@ class DDG(object): continue else: scanned_runs[run] += 1 - new_run = run.reanalyze(new_state=current_run_wrapper.new_state) + # new_run = run.reanalyze(new_state=current_run_wrapper.new_state) + # FIXME: Now we are always generating new SimRun to avoid issues in ARM mode + new_run = self._project.sim_run(self._project.exit_to(run.addr, state=current_run_wrapper.new_state)) l.debug("Scanning %s", new_run) reanalyze_successors_flag = current_run_wrapper.reanalyze_successors
Avoided issues in reusing IRSB in ARM mode.
angr_angr
train
50bf73062e1363c33b8fc25e4ce147b100c96a5d
diff --git a/libfs/file.go b/libfs/file.go index <HASH>..<HASH> 100644 --- a/libfs/file.go +++ b/libfs/file.go @@ -78,7 +78,7 @@ func (f *File) ReadAt(p []byte, off int64) (n int, err error) { if err != nil { return 0, err } - if int(readBytes) < n { + if int(readBytes) < len(p) { // ReadAt is more strict than Read. return 0, errors.Errorf("Could only read %d bytes", readBytes) } @@ -110,7 +110,7 @@ func (f *File) Seek(offset int64, whence int) (n int64, err error) { return 0, errors.Errorf("Cannot seek to offset %d", newOffset) } - _ = atomic.SwapInt64(&f.offset, offset) + _ = atomic.SwapInt64(&f.offset, newOffset) return newOffset, nil } diff --git a/libfs/fs.go b/libfs/fs.go index <HASH>..<HASH> 100644 --- a/libfs/fs.go +++ b/libfs/fs.go @@ -136,6 +136,8 @@ func (fs *FS) OpenFile(filename string, flag int, perm os.FileMode) ( parts := strings.Split(filename, "/") n := fs.root + // Iterate through each of the parent directories of the file, but + // not the file itself. for i := 0; i < len(parts)-1; i++ { p := parts[i] var ei libkbfs.EntryInfo @@ -164,7 +166,9 @@ func (fs *FS) OpenFile(filename string, flag int, perm os.FileMode) ( offset := int64(0) if flag&os.O_APPEND != 0 { - // TODO: worry about overflow. + if ei.Size >= uint64(1<<63) { + return nil, errors.New("offset too large") + } offset = int64(ei.Size) } @@ -204,7 +208,7 @@ func (fs *FS) Remove(filename string) error { // Join implements the billy.Filesystem interface for FS. func (fs *FS) Join(elem ...string) string { - return path.Join(elem...) + return path.Clean(path.Join(elem...)) } // TempFile implements the billy.Filesystem interface for FS.
libfs: address review feedback Suggested by jzila. Issue: #<I>
keybase_client
train
9a5fb8f16e11f54e2cba235952db1917967f3a08
diff --git a/modin/backends/pandas/query_compiler.py b/modin/backends/pandas/query_compiler.py index <HASH>..<HASH> 100644 --- a/modin/backends/pandas/query_compiler.py +++ b/modin/backends/pandas/query_compiler.py @@ -1722,6 +1722,8 @@ class PandasQueryCompiler(BaseQueryCompiler): DataManager containing the data sorted by columns or indices. """ axis = kwargs.pop("axis", 0) + if self._is_transposed: + return self.transpose().sort_index(axis=axis ^ 1, **kwargs).transpose() index = self.columns if axis else self.index # sort_index can have ascending be None and behaves as if it is False. diff --git a/modin/pandas/series.py b/modin/pandas/series.py index <HASH>..<HASH> 100644 --- a/modin/pandas/series.py +++ b/modin/pandas/series.py @@ -89,7 +89,9 @@ class Series(BasePandasDataset): isinstance(new_query_compiler, type(self._query_compiler)) or type(new_query_compiler) in self._query_compiler.__class__.__bases__ ), "Invalid Query Compiler object: {}".format(type(new_query_compiler)) - if not inplace and len(new_query_compiler.columns) == 1: + if not inplace and ( + len(new_query_compiler.columns) == 1 or len(new_query_compiler.index) == 1 + ): return Series(query_compiler=new_query_compiler) elif not inplace: # This can happen with things like `reset_index` where we can add columns.
Fix issue with `sort_values` after transpose (#<I>) * Fix issue with `sort_values` after transpose * Resolves #<I> * Also add logic to handle update_inplace with transposed QueryCompiler * Resolves #<I> * Lint * Revert change introducing bug
modin-project_modin
train
d603dd7f33650a0977c820f9e4722c302eadc598
diff --git a/bitset.go b/bitset.go index <HASH>..<HASH> 100644 --- a/bitset.go +++ b/bitset.go @@ -475,12 +475,12 @@ func (b *BitSet) Count() uint { return 0 } -// Equal tests the equvalence of two BitSets. +// Equal tests the equivalence of two BitSets. // False if they are of different sizes, otherwise true // only if all the same bits are set func (b *BitSet) Equal(c *BitSet) bool { - if c == nil { - return false + if c == nil || b == nil { + return c == b } if b.length != c.length { return false diff --git a/bitset_test.go b/bitset_test.go index <HASH>..<HASH> 100644 --- a/bitset_test.go +++ b/bitset_test.go @@ -842,6 +842,15 @@ func TestEqual(t *testing.T) { if !a.Equal(b) { t.Error("Two empty set should be equal") } + var x *BitSet + var y *BitSet + z := New(0) + if !x.Equal(y) { + t.Error("Two nil bitsets should be equal") + } + if x.Equal(z) { + t.Error("Nil receiver bitset should not be equal to non-nil bitset") + } } func TestUnion(t *testing.T) {
Fix Equal() on a nil bitset receiver - Two nil bitsets are considered equal - Protect against a nil bitset receiver and not nil bitset argument to Equal()
willf_bitset
train
7773c428829512b44e0fa973c1ed13a74d791c22
diff --git a/aeron-samples/src/main/java/io/aeron/samples/cluster/ClusterConfig.java b/aeron-samples/src/main/java/io/aeron/samples/cluster/ClusterConfig.java index <HASH>..<HASH> 100644 --- a/aeron-samples/src/main/java/io/aeron/samples/cluster/ClusterConfig.java +++ b/aeron-samples/src/main/java/io/aeron/samples/cluster/ClusterConfig.java @@ -186,7 +186,7 @@ public final class ClusterConfig * Gets the configuration's media driver context. * * @return configured {@link MediaDriver.Context}. - * @see MediaDriver.Context + * @see io.aeron.driver.MediaDriver.Context */ public MediaDriver.Context mediaDriverContext() { @@ -197,7 +197,7 @@ public final class ClusterConfig * Gets the configuration's archive context. * * @return configured {@link Archive.Context}. - * @see Archive.Context + * @see io.aeron.archive.Archive.Context */ public Archive.Context archiveContext() { @@ -208,7 +208,7 @@ public final class ClusterConfig * Gets the configuration's aeron archive context. * * @return configured {@link Archive.Context}. - * @see AeronArchive.Context + * @see io.aeron.archive.client.AeronArchive.Context */ public AeronArchive.Context aeronArchiveContext() { @@ -219,7 +219,7 @@ public final class ClusterConfig * Gets the configuration's consensus module context. * * @return configured {@link ConsensusModule.Context}. - * @see ConsensusModule.Context + * @see io.aeron.cluster.ConsensusModule.Context */ public ConsensusModule.Context consensusModuleContext() { @@ -230,7 +230,7 @@ public final class ClusterConfig * Gets the configuration's clustered service container context. * * @return configured {@link ClusteredServiceContainer.Context}. - * @see ClusteredServiceContainer.Context + * @see io.aeron.cluster.service.ClusteredServiceContainer.Context */ public ClusteredServiceContainer.Context clusteredServiceContext() {
[Java] Fully qualify javadoc references to fix warnings on some JDKs.
real-logic_aeron
train
20131e5cddd9b070b58e853d0649d40ebe9ac947
diff --git a/lib/db/upgrade.php b/lib/db/upgrade.php index <HASH>..<HASH> 100644 --- a/lib/db/upgrade.php +++ b/lib/db/upgrade.php @@ -1472,6 +1472,20 @@ function xmldb_main_upgrade($oldversion) { // Main savepoint reached. upgrade_main_savepoint(true, 2012111200.01); } + if ($oldversion < 2012111601.01) { + + // Define field eventtype to be added to event_subscriptions. + $table = new xmldb_table('event_subscriptions'); + $field = new xmldb_field('eventtype', XMLDB_TYPE_CHAR, '20', null, XMLDB_NOTNULL, null, null, 'userid'); + + // Conditionally launch add field eventtype. + if (!$dbman->field_exists($table, $field)) { + $dbman->add_field($table, $field); + } + + // Main savepoint reached. + upgrade_main_savepoint(true, 2012111601.01); + } return true; diff --git a/version.php b/version.php index <HASH>..<HASH> 100644 --- a/version.php +++ b/version.php @@ -30,7 +30,7 @@ defined('MOODLE_INTERNAL') || die(); -$version = 2012111601.00; // YYYYMMDD = weekly release date of this DEV branch +$version = 2012111601.01; // YYYYMMDD = weekly release date of this DEV branch // RR = release increments - 00 in DEV branches // .XX = incremental changes
MDL-<I> calendar: upgrade code for adding eventtype field to all existing installs
moodle_moodle
train
7ad19f67c144c298fadac5f702378fafc402b1ef
diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/impl/Linker.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/impl/Linker.java index <HASH>..<HASH> 100755 --- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/impl/Linker.java +++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/impl/Linker.java @@ -60,9 +60,7 @@ public class Linker extends AbstractCleaningLinker { private void ensureLinked(EObject obj, IDiagnosticProducer producer, ICompositeNode node, Set<EReference> handledReferences) { - Iterator<INode> iterator = node.getChildren().iterator(); - while(iterator.hasNext()) { - INode abstractNode = iterator.next(); + for(INode abstractNode = node.getFirstChild(); abstractNode != null; abstractNode = abstractNode.getNextSibling()) { if (abstractNode.getGrammarElement() instanceof CrossReference) { CrossReference ref = (CrossReference) abstractNode.getGrammarElement(); producer.setNode(abstractNode); diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinker.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinker.java index <HASH>..<HASH> 100644 --- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinker.java +++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyLinker.java @@ -115,8 +115,7 @@ public class LazyLinker extends AbstractCleaningLinker { if (eClass.getEAllReferences().size() - eClass.getEAllContainments().size() == 0) return; - for (Iterator<INode> iterator = parentNode.getChildren().iterator(); iterator.hasNext(); ) { - INode node = iterator.next(); + for (INode node = parentNode.getFirstChild(); node != null; node = node.getNextSibling()) { EObject grammarElement = node.getGrammarElement(); if (grammarElement instanceof CrossReference && !Iterables.isEmpty(node.getLeafNodes())) { producer.setNode(node); diff --git a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyURIEncoder.java b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyURIEncoder.java index <HASH>..<HASH> 100644 --- a/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyURIEncoder.java +++ b/plugins/org.eclipse.xtext/src/org/eclipse/xtext/linking/lazy/LazyURIEncoder.java @@ -126,9 +126,10 @@ public class LazyURIEncoder { if (parserNode == node) return; if (isAncestor(parserNode, node)) { - getRelativePath(result, parserNode, node.getParent()); + ICompositeNode parent = node.getParent(); + getRelativePath(result, parserNode, parent); int idx = 0; - INode child = node.getParent().getFirstChild(); + INode child = parent.getFirstChild(); while(child != node && child.hasNextSibling()) { idx++; child = child.getNextSibling(); @@ -141,13 +142,10 @@ public class LazyURIEncoder { } protected boolean isAncestor(INode parent, INode child) { - if (child.equals(parent)) - return true; - INode node = child; - while (node.getParent() != null) { - if (node.getParent().equals(parent)) + for (INode node = child; node != null; node = node.getParent()) { + if (node.equals(parent)) { return true; - node = node.getParent(); + } } return false; }
feedback from ed - various improvements Change-Id: I<I>b<I>c<I>c2fd<I>ad<I>bc9a4b<I>f<I>
eclipse_xtext-core
train
8e0daa7a007fa463e29f831e7db214a4332764eb
diff --git a/packages/records/src/common.js b/packages/records/src/common.js index <HASH>..<HASH> 100644 --- a/packages/records/src/common.js +++ b/packages/records/src/common.js @@ -1,7 +1,5 @@ // @flow -import produce from "immer"; - // Straight from nbformat export type MultilineString = string | Array<string>; @@ -85,7 +83,8 @@ export function demultiline(s: string | Array<string>): string { } /** - * Split string into a list of strings delimited by newlines, useful for on-disk git comparisons + * Split string into a list of strings delimited by newlines, useful for on-disk git comparisons, + * and is the expectation for jupyter notebooks on disk */ export function remultiline(s: string | Array<string>): Array<string> { if (Array.isArray(s)) { @@ -119,18 +118,24 @@ export function createImmutableMimeBundle( // "text/plain": "Hey" // } - return produce( - (mimeBundle: OnDiskMimebundle), - (draftBundle: OnDiskMimebundle): MimeBundle => { - for (const key in draftBundle) { - if ( - !isJSONKey(key) && - (typeof draftBundle[key] === "string" || - Array.isArray(draftBundle[key])) - ) { - draftBundle[key] = demultiline(draftBundle[key]); - } - } + // Since we have to convert from one type to another that has conflicting types, we need to hand convert it in a way that + // flow is able to verify correctly. The way we do that is create a new object that we declare with the type we want, + // set the keys and values we need, then seal the object with Object.freeze + const bundle: MimeBundle = {}; + + for (const key in mimeBundle) { + if ( + !isJSONKey(key) && + (typeof mimeBundle[key] === "string" || Array.isArray(mimeBundle[key])) + ) { + // Because it's a string, we can't mutate it anyways (and don't have to Object.freeze it) + bundle[key] = demultiline(mimeBundle[key]); + } else { + // we now know it's an Object of some kind + // TODO: DeepFreeze the mimebundle object for @nteract/records + bundle[key] = Object.freeze(mimeBundle[key]); } - ); + } + + return Object.freeze(bundle); }
write conversion directly for mimebundle records
nteract_nteract
train
866b07d3b9379b4462134119d68209c19b584ebf
diff --git a/src/components/themr.js b/src/components/themr.js index <HASH>..<HASH> 100644 --- a/src/components/themr.js +++ b/src/components/themr.js @@ -60,7 +60,7 @@ export default (componentName, localTheme, options = DEFAULT_OPTIONS) => (Themed function themeable(style = {}, theme) { if (!theme) return style return [ ...Object.keys(theme), ...Object.keys(style) ].reduce((result, key) => ( - theme[key] && style[key] && theme[key].indexOf(style[key]) === -1 + typeof theme[key] === 'string' && style[key] && theme[key].indexOf(style[key]) === -1 ? { ...result, [key]: `${style[key]} ${theme[key]}` } : { ...result, [key]: theme[key] || style[key] } ), {})
Check theme[key] is a string
javivelasco_react-css-themr
train
efea02671c48ba8a1001528db07f33bf475059c4
diff --git a/katversion/__init__.py b/katversion/__init__.py index <HASH>..<HASH> 100644 --- a/katversion/__init__.py +++ b/katversion/__init__.py @@ -14,7 +14,7 @@ # limitations under the License. ################################################################################ -from .version import get_version, build_info +from .version import get_version, build_info # noqa: F401 (used in other packages) # BEGIN VERSION CHECK # Get package version when locally imported from repo or via -e develop install diff --git a/katversion/build.py b/katversion/build.py index <HASH>..<HASH> 100644 --- a/katversion/build.py +++ b/katversion/build.py @@ -26,7 +26,7 @@ if "setuptools" in sys.modules: else: from distutils.command.sdist import log, sdist as OriginalSdist -from .version import get_version +from .version import get_version # noqa: E402 (confused by if-statement above) def patch_init_py(package_dir, version): @@ -113,13 +113,17 @@ def setuptools_entry(dist, keyword, value): s = "Ignoring explicit version='{0}' in setup.py, using '{1}' instead" warnings.warn(s.format(dist.metadata.version, version)) dist.metadata.version = version + # Extend build_py command to bake version string into installed package ExistingCustomBuildPy = dist.cmdclass.get('build_py', object) + class KatVersionBuildPy(AddVersionToInitBuildPy, ExistingCustomBuildPy): """First perform existing build_py and then bake in version string.""" dist.cmdclass['build_py'] = KatVersionBuildPy + # Extend sdist command to bake version string into source package ExistingCustomSdist = dist.cmdclass.get('sdist', object) + class KatVersionSdist(AddVersionToInitSdist, ExistingCustomSdist): """First perform existing sdist and then bake in version string.""" dist.cmdclass['sdist'] = KatVersionSdist diff --git a/setup.cfg b/setup.cfg index <HASH>..<HASH> 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,5 @@ [bdist_wheel] universal = 1 + +[flake8] +max-line-length = 90
Flake8 cleanup Ignore some import warnings. The E<I> in build.py could probably also be solved by replacing the if-statement above it by a try-except-ImportError (why not this?). Add spaces before class definitions, but also space out the rest of the lines to separate each main step in `setuptools_entry`. Relax the line length restriction to <I> instead of excessive tweaking.
ska-sa_katversion
train
b1cf901d282c869c670fa4246be5ce40116112c9
diff --git a/lib/active_storage/log_subscriber.rb b/lib/active_storage/log_subscriber.rb index <HASH>..<HASH> 100644 --- a/lib/active_storage/log_subscriber.rb +++ b/lib/active_storage/log_subscriber.rb @@ -1,7 +1,5 @@ require "active_support/log_subscriber" -# Implements the ActiveSupport::LogSubscriber for logging notifications when -# email is delivered or received. class ActiveStorage::LogSubscriber < ActiveSupport::LogSubscriber def service_upload(event) message = color("Uploaded file to key: #{key_in(event)}", GREEN) @@ -25,7 +23,6 @@ class ActiveStorage::LogSubscriber < ActiveSupport::LogSubscriber debug event, color("Generated URL for file at key: #{key_in(event)} (#{event.payload[:url]})", BLUE) end - # Use the logger configured for ActiveStorage::Base.logger def logger ActiveStorage::Service.logger end
Copypasta comments # Conflicts: # lib/active_storage/engine.rb # lib/active_storage/service.rb # lib/active_storage/service/disk_service.rb # lib/active_storage/service/s3_service.rb # test/service/s3_service_test.rb # test/test_helper.rb
rails_rails
train
1c153ac1e4f7e10c641d6524a19f664923041d52
diff --git a/bin/hash_db_password.py b/bin/hash_db_password.py index <HASH>..<HASH> 100644 --- a/bin/hash_db_password.py +++ b/bin/hash_db_password.py @@ -20,10 +20,10 @@ except: try: - print "using connection string: {}".format(app.config['SQLALCHEMY_DATABASE_URI']) + print "using connection string: {0}".format(app.config['SQLALCHEMY_DATABASE_URI']) users = db.session.query(User).all() except Exception as e: - print "Query, connection error {}".format(e) + print "Query, connection error {0}".format(e) print "Config db key {}".format(app.config['SQLALCHEMY_DATABASE_URI']) exit() @@ -34,5 +34,5 @@ for user in users: db.session.merge(user) db.session.commit() except: - print "Error updating password for {}".format(user.full_name) + print "Error updating password for {0}".format(user.full_name)
Update hash_db_password.py
dpgaspar_Flask-AppBuilder
train
fb48a7aca5a145c0add0f9fb9ed06197221f7183
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -15,10 +15,13 @@ function JSHinter (inputNode, options) { if (!(this instanceof JSHinter)) return new JSHinter(inputNode, options); options = options || {}; + if (!options.hasOwnProperty('persist')) { + options.persist = true; + } Filter.call(this, inputNode, { annotation: options.annotation, - persist: true + persist: options.persist }); this.log = true; this.options = options; diff --git a/tests/index.js b/tests/index.js index <HASH>..<HASH> 100644 --- a/tests/index.js +++ b/tests/index.js @@ -47,6 +47,7 @@ describe('broccoli-jshint', function(){ var sourcePath = 'tests/fixtures/some-files-with-too-many-errors'; var node = new JSHinter(sourcePath, { + persist: false, logError: function(message) { loggerOutput.push(message) } }); @@ -73,6 +74,7 @@ describe('broccoli-jshint', function(){ var sourcePath = 'tests/fixtures/some-files-ignoring-missing-semi-colons-non-default-jshintrc-path'; var node = new JSHinter(sourcePath, { + persist: false, jshintrcRoot: 'blah', logError: function(message) { loggerOutput.push(message) } }); @@ -87,6 +89,7 @@ describe('broccoli-jshint', function(){ var sourcePath = 'tests/fixtures/some-files-ignoring-missing-semi-colons'; var node = new JSHinter(sourcePath, { + persist: false, jshintrcRoot: '../jshintrc-outside-project-heirarchy', logError: function(message) { loggerOutput.push(message) } }); @@ -132,6 +135,7 @@ describe('broccoli-jshint', function(){ var sourcePath = 'tests/fixtures/some-files-without-semi-colons'; var consoleLogOutput = []; var node = new JSHinter(sourcePath, { + persist: false, console: { log: function(data) { consoleLogOutput.push(data); @@ -154,6 +158,7 @@ describe('broccoli-jshint', function(){ it('logs errors using custom supplied function', function(){ var sourcePath = 'tests/fixtures/some-files-without-semi-colons'; var node = new JSHinter(sourcePath, { + persist: false, logError: function(message) { loggerOutput.push(message) } }); @@ -200,6 +205,7 @@ describe('broccoli-jshint', function(){ var sourcePath = 'tests/fixtures/some-files-without-semi-colons'; var node = new JSHinter(sourcePath, { + persist: false, logError: function(message) { loggerOutput.push(message) }, escapeErrorString: function(string) { escapeErrorStringCalled = true;
Disable persistence for unit tests testing side-effects.
rwjblue_broccoli-jshint
train
c4a75532df3bb2a6cf2b9fb72fdd336974db496f
diff --git a/django_extensions/management/commands/graph_models.py b/django_extensions/management/commands/graph_models.py index <HASH>..<HASH> 100644 --- a/django_extensions/management/commands/graph_models.py +++ b/django_extensions/management/commands/graph_models.py @@ -43,15 +43,18 @@ class Command(BaseCommand): raise CommandError("need pygraphviz python module ( apt-get install python-pygraphviz )") vizdata = ' '.join(dotdata.split("\n")).strip() - if [int(v) for v in pygraphviz.__version__.split('.')]<(0,36): - ##raise CommandError("need version 0.36 or higher of pygraphviz") - # HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version) - import tempfile - tmpfile = tempfile.NamedTemporaryFile() - tmpfile.write(vizdata) - tmpfile.seek(0) - vizdata = tmpfile.name + version = pygraphviz.__version__.rstrip("-svn") + try: + if [int(v) for v in version.split('.')]<(0,36): + # HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version) + import tempfile + tmpfile = tempfile.NamedTemporaryFile() + tmpfile.write(vizdata) + tmpfile.seek(0) + vizdata = tmpfile.name + except ValueError: + pass + graph = pygraphviz.AGraph(vizdata) graph.layout(prog=kwargs['layout']) graph.draw(kwargs['outputfile']) -
strip "-svn" from pygraphviz version string, fixes ticket #<I>, thanks jjconti!
django-extensions_django-extensions
train
a3d2b0210ececaf7049a5f37d61aabdc5973c157
diff --git a/tests/fetch_data.py b/tests/fetch_data.py index <HASH>..<HASH> 100644 --- a/tests/fetch_data.py +++ b/tests/fetch_data.py @@ -1,6 +1,7 @@ import os import hashlib import urllib +import gzip def load_from_s3(path, md5hash): @@ -21,6 +22,11 @@ def load_from_s3(path, md5hash): def main(): + if not os.path.exists('../examples/data/haarcascade_frontalface_default.xml.gz'): + fp = open('haarcascade_frontalface_default.xml', 'w') + o = gzip.GzipFile('../examples/data/haarcascade_frontalface_default.xml.gz').read() + fp.write(o) + fp.close() load_from_s3('../examples/data/face_finder-input-voctrainpart.tb', 'dbc50c02103221a499fc7cc77a5b61e9') if __name__ == '__main__': diff --git a/tests/test_with_hadoop.py b/tests/test_with_hadoop.py index <HASH>..<HASH> 100644 --- a/tests/test_with_hadoop.py +++ b/tests/test_with_hadoop.py @@ -21,7 +21,6 @@ import subprocess import time import hadoopy import os -import gzip import numpy as np import hashlib import urllib @@ -137,10 +136,6 @@ class TestUsingHadoop(unittest.TestCase): @unittest.skipIf(not hadoop_installed(), 'Hadoop not installed') @unittest.skipIf(not pil_and_cv_installed(), 'PIL or OpenCV not installed') def test_face(self): - fp = open('haarcascade_frontalface_default.xml', 'w') - o = gzip.GzipFile('../examples/data/haarcascade_frontalface_default.xml.gz').read() - fp.write(o) - fp.close() self._run_face('../examples/data/face_finder-input-voctrainpart.tb') self._run_face('../examples/data/face_finder-input-voctrainpart.tb', pipe=False)
Added face detect cascade to fetch_data.py
bwhite_hadoopy
train
6aa5dd9dfc56a64038c0131ddbbb9b6d15ed2a7e
diff --git a/tasks/install.php b/tasks/install.php index <HASH>..<HASH> 100644 --- a/tasks/install.php +++ b/tasks/install.php @@ -31,6 +31,13 @@ class FluxBB_Install_Task extends Task public function run($arguments = array()) { + $this->structure(); + + $this->seed(); + } + + public function structure() + { foreach (new FilesystemIterator($this->path()) as $file) { $migration = basename($file->getFileName(), '.php');
Seed the database, too, when installing.
fluxbb_core
train
446140b1ae8871c10451d496f9aee6c20ba709a8
diff --git a/jwk/interface.go b/jwk/interface.go index <HASH>..<HASH> 100644 --- a/jwk/interface.go +++ b/jwk/interface.go @@ -21,9 +21,9 @@ type JSONWebKey interface { Kty() string } -// Essential defines the common data that any JSONWebKey may +// EssentialHeader defines the common data that any JSONWebKey may // carry with it. -type Essential struct { +type EssentialHeader struct { Algorithm string `json:"alg,omitempty"` KeyID string `json:"kid,omitempty"` KeyOps []string `json:"key_ops,omitempty"` @@ -37,7 +37,7 @@ type Essential struct { // RsaPublicKey is a type of JWK generated from RSA public keys type RsaPublicKey struct { - *Essential + *EssentialHeader E buffer.Buffer `json:"e"` N buffer.Buffer `json:"n"` } diff --git a/jwk/jwk.go b/jwk/jwk.go index <HASH>..<HASH> 100644 --- a/jwk/jwk.go +++ b/jwk/jwk.go @@ -48,9 +48,9 @@ func constructKey(m map[string]interface{}) (JSONWebKey, error) { } } -func constructEssential(m map[string]interface{}) (*Essential, error) { +func constructEssentialHeader(m map[string]interface{}) (*EssentialHeader, error) { r := emap.Hmap(m) - e := &Essential{} + e := &EssentialHeader{} var err error // https://tools.ietf.org/html/rfc7517#section-4.1 @@ -90,7 +90,7 @@ func constructEssential(m map[string]interface{}) (*Essential, error) { } func constructRsaPublicKey(m map[string]interface{}) (*RsaPublicKey, error) { - e, err := constructEssential(m) + e, err := constructEssentialHeader(m) if err != nil { return nil, err } @@ -105,7 +105,7 @@ func constructRsaPublicKey(m map[string]interface{}) (*RsaPublicKey, error) { } } - k := &RsaPublicKey{Essential: e} + k := &RsaPublicKey{EssentialHeader: e} r := emap.Hmap(m) if v, err := r.GetBuffer("e"); err == nil { @@ -165,10 +165,10 @@ func constructRsaPrivateKey(m map[string]interface{}) (*RsaPrivateKey, error) { return k, nil } -func (e Essential) Kid() string { +func (e EssentialHeader) Kid() string { return e.KeyID } -func (e Essential) Kty() string { +func (e EssentialHeader) Kty() string { return e.KeyType } diff --git a/jwk/rsa.go b/jwk/rsa.go index <HASH>..<HASH> 100644 --- a/jwk/rsa.go +++ b/jwk/rsa.go @@ -10,9 +10,9 @@ import ( func NewRsaPublicKey(pk *rsa.PublicKey) (*RsaPublicKey, error) { k := &RsaPublicKey{ - Essential: &Essential{KeyType: "RSA"}, - N: buffer.Buffer(pk.N.Bytes()), - E: buffer.FromUint(uint64(pk.E)), + EssentialHeader: &EssentialHeader{KeyType: "RSA"}, + N: buffer.Buffer(pk.N.Bytes()), + E: buffer.FromUint(uint64(pk.E)), } return k, nil }
Align struct name with other packages
lestrrat-go_jwx
train
8ae8bc542c9952faf4e572d4a120a01e4990b78f
diff --git a/js/gateio.js b/js/gateio.js index <HASH>..<HASH> 100755 --- a/js/gateio.js +++ b/js/gateio.js @@ -2041,7 +2041,23 @@ module.exports = class gateio extends Exchange { const defaultType = this.safeString2 (this.options, 'createOrder', 'defaultType', 'spot'); const marketType = this.safeString (params, 'type', defaultType); const contract = market['contract']; - const request = this.prepareRequest (market); + const request = this.extend (this.prepareRequest (market), { + // user-defined, must follow the rules if not empty + // prefixed with t- + // no longer than 28 bytes without t- prefix + // can only include 0-9, A-Z, a-z, underscores (_), hyphens (-) or dots (.) + // 'text': clientOrderId, // 't-abcdef1234567890', + // 'currency_pair': market['id'], + // 'type': type, + // 'account': 'spot', // 'spot', 'margin', 'cross_margin' + // 'side': side, + // 'amount': this.amountToPrecision (symbol, amount), + // 'price': this.priceToPrecision (symbol, price), + // 'time_in_force': 'gtc', // gtc, ioc, poc PendingOrCancelled == postOnly order + // 'iceberg': 0, // amount to display for the iceberg order, null or 0 for normal orders, set to -1 to hide the order completely + // 'auto_borrow': false, // used in margin or cross margin trading to allow automatic loan of insufficient amount if balance is not enough + // 'auto_repay': false, // automatic repayment for automatic borrow loan generated by cross margin order, diabled by default + }); const reduceOnly = this.safeValue (params, 'reduceOnly'); params = this.omit (params, 'reduceOnly'); if (reduceOnly !== undefined) { @@ -2056,6 +2072,17 @@ module.exports = class gateio extends Exchange { } request['size'] = this.parseNumber (this.amountToPrecision (symbol, amount)); } else { + let clientOrderId = this.safeString2 (params, 'text', 'clientOrderId'); + if (clientOrderId !== undefined) { + if (clientOrderId.length > 28) { + throw new BadRequest (this.id + ' createOrder() clientOrderId or text param must be up to 28 characters'); + } + params = this.omit (params, [ 'text', 'clientOrderId' ]); + if (clientOrderId[0] !== 't') { + clientOrderId = 't-' + clientOrderId; + } + request['text'] = clientOrderId; + } request['side'] = side; request['type'] = type; request['amount'] = this.amountToPrecision (symbol, amount);
gateio createOrder clientOrderId unified
ccxt_ccxt
train
13b93172518f5f43829db97d5cd0ce432ba5b0ad
diff --git a/graphsrv/application.py b/graphsrv/application.py index <HASH>..<HASH> 100644 --- a/graphsrv/application.py +++ b/graphsrv/application.py @@ -12,6 +12,7 @@ import vodka.app import vodka.data import vodka.data.renderers import vodka.config +import vodka.config.shared import vodka.plugins import vodka.plugins.zeromq @@ -120,8 +121,10 @@ class GraphServ(vodka.app.WebApplication): help_text="data groups" ) - includes = vodka.config.Attribute( + includes = vodka.config.shared.Container( dict, + nested=1, + share="includes:merge", default={ "js" : { "jquery" : {"path":"graphsrv/js/jquery.js"}, @@ -132,7 +135,7 @@ class GraphServ(vodka.app.WebApplication): "graphsrv" : {"path":"graphsrv/media/graphsrv.css", "order":1} } }, - handler=lambda x,y: vodka.config.shared.Routers(dict, "includes:merge", handler=SharedIncludesConfigHandler), + handler=lambda x,y: vodka.config.shared.Routers(dict, "includes:merge", handler=vodka.app.SharedIncludesConfigHandler), help_text="allows you to specify extra media includes for js,css etc." )
includes attribute to config.shared.Container
20c_graphsrv
train
58ebf302b206ed30568da60d7f3b124b26e9325b
diff --git a/activerecord/CHANGELOG b/activerecord/CHANGELOG index <HASH>..<HASH> 100644 --- a/activerecord/CHANGELOG +++ b/activerecord/CHANGELOG @@ -1,5 +1,7 @@ *SVN* +* The exists? class method should treat a string argument as an id rather than as conditions. #5698 [jeremy@planetargon.com] + * Fixed to_xml with :include misbehaviors when invoked on array of model instances #5690 [alexkwolfe@gmail.com] * Added support for conditions on Base.exists? #5689 [josh@joshpeek.com]. Examples: diff --git a/activerecord/lib/active_record/base.rb b/activerecord/lib/active_record/base.rb index <HASH>..<HASH> 100755 --- a/activerecord/lib/active_record/base.rb +++ b/activerecord/lib/active_record/base.rb @@ -426,10 +426,13 @@ module ActiveRecord #:nodoc: # You can also pass a set of SQL conditions. # Example: # Person.exists?(5) + # Person.exists?('5') # Person.exists?(:name => "David") - def exists?(conditions) - conditions = ["#{primary_key} = ?", conditions] if conditions.is_a?(Fixnum) - !find(:first, :conditions => conditions).nil? rescue false + # Person.exists?(['name LIKE ?', "%#{query}%"]) + def exists?(id_or_conditions) + !find(:first, :conditions => expand_id_conditions(id_or_conditions)).nil? + rescue ActiveRecord::ActiveRecordError + false end # Creates an object, instantly saves it as a record (if the validation permits it), and returns it. If the save @@ -1224,6 +1227,15 @@ module ActiveRecord #:nodoc: end end + # Interpret Array and Hash as conditions and anything else as an id. + def expand_id_conditions(id_or_conditions) + case id_or_conditions + when Array, Hash then id_or_conditions + else construct_conditions_from_arguments([primary_key], [id_or_conditions]) + end + end + + # Defines an "attribute" method (like #inheritance_column or # #table_name). A new (class) method will be created with the # given name. If a value is specified, the new method will diff --git a/activerecord/test/finder_test.rb b/activerecord/test/finder_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/finder_test.rb +++ b/activerecord/test/finder_test.rb @@ -20,13 +20,14 @@ class FinderTest < Test::Unit::TestCase end def test_exists - assert (Topic.exists?(1)) - assert (Topic.exists?(:author_name => "David")) - assert (Topic.exists?(:author_name => "Mary", :approved => true)) - assert (Topic.exists?(["parent_id = ?", 1])) - assert !(Topic.exists?(45)) - assert !(Topic.exists?("foo")) - assert !(Topic.exists?([1,2])) + assert Topic.exists?(1) + assert Topic.exists?("1") + assert Topic.exists?(:author_name => "David") + assert Topic.exists?(:author_name => "Mary", :approved => true) + assert Topic.exists?(["parent_id = ?", 1]) + assert !Topic.exists?(45) + assert !Topic.exists?("foo") + assert_raise(NoMethodError) { Topic.exists?([1,2]) } end def test_find_by_array_of_one_id
The exists? class method should treat a string argument as an id rather than as conditions. Closes #<I>. git-svn-id: <URL>
rails_rails
train
57e32e61a59a02219e06bcb4a1fb132fd381a9a9
diff --git a/sos/report/__init__.py b/sos/report/__init__.py index <HASH>..<HASH> 100644 --- a/sos/report/__init__.py +++ b/sos/report/__init__.py @@ -660,7 +660,7 @@ class SoSReport(SoSComponent): def _set_all_options(self): if self.opts.alloptions: for plugname, plug in self.loaded_plugins: - for opt in plug.options: + for opt in plug.options.values(): if bool in opt.val_type: opt.value = True
[report] Fix setting of all options via `-a` Fixes an issue where the use of `-a` would fail due to trying to iterate over the key of the plugin's options dict rather than the actual `PluginOpt` value. Closes: #<I>
sosreport_sos
train
57869f18af54697b1f25b6ef9e3372a7c1650cd6
diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index <HASH>..<HASH> 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -1,3 +1,5 @@ +import sys + import numpy as np from pandas.core.common import isnull, notnull @@ -112,10 +114,14 @@ def nanmin(values, axis=None, skipna=True, copy=True): values = values.copy() np.putmask(values, mask, np.inf) # numpy 1.6.1 workaround in Python 3.x - if values.dtype == np.object_: # pragma: no cover + if (values.dtype == np.object_ + and sys.version_info[0] >= 3): # pragma: no cover import __builtin__ - apply_ax = axis if axis is not None else 0 - result = np.apply_along_axis(__builtin__.min, apply_ax, values) + if values.ndim > 1: + apply_ax = axis if axis is not None else 0 + result = np.apply_along_axis(__builtin__.min, apply_ax, values) + else: + result = __builtin__.min(values) else: result = values.min(axis) @@ -128,10 +134,15 @@ def nanmax(values, axis=None, skipna=True, copy=True): values = values.copy() np.putmask(values, mask, -np.inf) # numpy 1.6.1 workaround in Python 3.x - if values.dtype == np.object_: # pragma: no cover + if (values.dtype == np.object_ + and sys.version_info[0] >= 3): # pragma: no cover import __builtin__ - apply_ax = axis if axis is not None else 0 - result = np.apply_along_axis(__builtin__.max, apply_ax, values) + + if values.ndim > 1: + apply_ax = axis if axis is not None else 0 + result = np.apply_along_axis(__builtin__.max, apply_ax, values) + else: + result = __builtin__.max(values) else: result = values.max(axis) return _maybe_null_out(result, axis, mask) diff --git a/pandas/tests/test_series.py b/pandas/tests/test_series.py index <HASH>..<HASH> 100644 --- a/pandas/tests/test_series.py +++ b/pandas/tests/test_series.py @@ -575,10 +575,10 @@ class TestSeries(unittest.TestCase, CheckNameIntegration): self._check_stat_op('prod', np.prod) def test_min(self): - self._check_stat_op('min', np.min) + self._check_stat_op('min', np.min, check_objects=True) def test_max(self): - self._check_stat_op('max', np.max) + self._check_stat_op('max', np.max, check_objects=True) def test_std(self): alt = lambda x: np.std(x, ddof=1) @@ -604,7 +604,9 @@ class TestSeries(unittest.TestCase, CheckNameIntegration): def test_cumprod(self): self._check_accum_op('cumprod') - def _check_stat_op(self, name, alternate): + def _check_stat_op(self, name, alternate, check_objects=False): + from pandas import DateRange + f = getattr(Series, name) # add some NaNs @@ -625,6 +627,13 @@ class TestSeries(unittest.TestCase, CheckNameIntegration): s = Series([1, 2, 3, None, 5]) f(s) + # check DateRange + if check_objects: + s = Series(DateRange('1/1/2000', periods=10)) + res = f(s) + exp = alternate(s) + self.assertEqual(res, exp) + def _check_accum_op(self, name): func = getattr(np, name) self.assert_(np.array_equal(func(self.ts), func(np.array(self.ts))))
BUG: handle datetime.datetime in nanmin/nanmax, on Python 2 at least, GH #<I>
pandas-dev_pandas
train
6bd41d13e1dfe9ea3ccfdeae21d93c0705378d4a
diff --git a/lib/agent/cli.js b/lib/agent/cli.js index <HASH>..<HASH> 100644 --- a/lib/agent/cli.js +++ b/lib/agent/cli.js @@ -97,7 +97,7 @@ pid.store(pid_file, function(err, running){ if (process.stdout._type == 'tty') { var run_time_str = run_time.toString().substring(0,5); - logger.write('\nLive instance for ' + run_time_str + ' minutes with PID: ' + running.pid + '.\n'); + logger.write('\nLive instance for ' + run_time_str + ' minutes with PID: ' + running.pid + '.'); } // don't poke instance if running since less than two minutes ago diff --git a/lib/conf/windows/index.js b/lib/conf/windows/index.js index <HASH>..<HASH> 100755 --- a/lib/conf/windows/index.js +++ b/lib/conf/windows/index.js @@ -6,25 +6,14 @@ var async = require('async'), var remove_existing = function(cb) { - var remove_it = function(){ - service.delete(cb); - } - service.exists(function(exists){ if (!exists) return cb(); - service.running(function(err, pid) { - // if (err) return cb(err); - - if (!pid) - return remove_it(); - - service.kill(pid, function(err, success) { - if (err || !success) - return cb(err || new Error("Can't kill process with PID " + pid)); + service.kill(function(err) { + // if (err || !success) + // return cb(err || new Error("Can't kill process with PID " + pid)); - remove_it(); - }); + service.delete(cb); }); }) }; @@ -33,9 +22,9 @@ exports.post_install = function(cb) { remove_existing(function(err){ if (err) return cb(err); - service.create(function(err, success){ - if (err || !success) - return cb(err || new Error('Unable to register service.')); + service.create(function(err){ + if (err) + return cb(new Error('Unable to register service: ' + err.message)); service.start(cb); }); @@ -43,5 +32,5 @@ exports.post_install = function(cb) { }; exports.pre_uninstall = function(callback){ - remove_existing(callback); + remove_existing(callback); }; diff --git a/lib/conf/windows/service.js b/lib/conf/windows/service.js index <HASH>..<HASH> 100644 --- a/lib/conf/windows/service.js +++ b/lib/conf/windows/service.js @@ -43,52 +43,35 @@ service.running = function(callback) { /** * Kill the running service process, note this is not the same as service_delete. **/ -service.kill = function(pid, callback) { - exec('taskkill /f /pid ' + pid, function(err, stdout){ - if (err) return callback(err); - - callback(null, stdout.indexOf("SUCCESS") !== -1); - }); +service.kill = function(pid, cb) { + exec('taskkill /f /im ' + service.bin, cb); }; /** * Delete the service from the service registry. **/ -service.delete = function(callback) { - exec('sc delete ' + service.key, callback); +service.delete = function(cb) { + exec('sc delete ' + service.key, cb); }; /** * Create a service, by providing the path to the executable. **/ service.create = function(callback) { - var cmd = 'sc create ' + service.key + ' binPath= '+ service.path; + var cmd = 'sc create ' + service.key + ' binPath= ' + service.path; cmd += ' start= auto'; // options: boot, system, auto, demand, disabled cmd += ' DisplayName= "' + service.name + '"'; - exec(cmd, function(err, stdout) { - if (err) return callback(err); - - callback(null, stdout.toString().indexOf("SUCCESS") !== -1); - }); + exec(cmd, cb); }; /** * Callback the service PID if all is well, else null. **/ -service.start = function(callback) { - exec('sc start ' + service.key, function(err, stdout) { - if (err) return callback(err); - - var match = stdout.match(/PID\s+?:\s([0-9]+)\s/); - - if (!match) - return callback(new Error('Couldnt start service')); - - callback(null, match[1]); - }); +service.start = function(cb) { + exec('sc start ' + service.key, cb); }; module.exports = service;
Simpler checks for Windows service creation/deletion.
prey_prey-node-client
train
162cbfef2d7cd62509241261fb976828dd1423af
diff --git a/pylint/checkers/imports.py b/pylint/checkers/imports.py index <HASH>..<HASH> 100644 --- a/pylint/checkers/imports.py +++ b/pylint/checkers/imports.py @@ -51,7 +51,7 @@ import copy import os import sys from distutils import sysconfig -from typing import Any, Dict, List, Set, Tuple, Union +from typing import Any, Dict, List, Optional, Set, Tuple, Union import astroid from astroid import nodes @@ -424,7 +424,7 @@ class ImportsChecker(DeprecatedMixin, BaseChecker): ) def __init__( - self, linter: PyLinter = None + self, linter: Optional[PyLinter] = None ): # pylint: disable=super-init-not-called # See https://github.com/PyCQA/pylint/issues/4941 BaseChecker.__init__(self, linter) self.stats: CheckerStats = {} diff --git a/pylint/checkers/stdlib.py b/pylint/checkers/stdlib.py index <HASH>..<HASH> 100644 --- a/pylint/checkers/stdlib.py +++ b/pylint/checkers/stdlib.py @@ -39,7 +39,7 @@ import sys from collections.abc import Iterable -from typing import Any, Dict, Set +from typing import Any, Dict, Optional, Set import astroid from astroid import nodes @@ -447,7 +447,7 @@ class StdlibChecker(DeprecatedMixin, BaseChecker): } def __init__( - self, linter: PyLinter = None + self, linter: Optional[PyLinter] = None ): # pylint: disable=super-init-not-called # See https://github.com/PyCQA/pylint/issues/4941 BaseChecker.__init__(self, linter) self._deprecated_methods: Set[Any] = set() diff --git a/pylint/checkers/utils.py b/pylint/checkers/utils.py index <HASH>..<HASH> 100644 --- a/pylint/checkers/utils.py +++ b/pylint/checkers/utils.py @@ -674,7 +674,7 @@ def is_attr_private(attrname: str) -> Optional[Match[str]]: def get_argument_from_call( - call_node: nodes.Call, position: int = None, keyword: str = None + call_node: nodes.Call, position: Optional[int] = None, keyword: Optional[str] = None ) -> nodes.Name: """Returns the specified argument from a function call. diff --git a/pylint/graph.py b/pylint/graph.py index <HASH>..<HASH> 100644 --- a/pylint/graph.py +++ b/pylint/graph.py @@ -26,6 +26,7 @@ import shutil import subprocess import sys import tempfile +from typing import Optional def target_info_from_filename(filename): @@ -82,7 +83,9 @@ class DotBackend: source = property(get_source) - def generate(self, outputfile: str = None, mapfile: str = None) -> str: + def generate( + self, outputfile: Optional[str] = None, mapfile: Optional[str] = None + ) -> str: """Generates a graph file. :param str outputfile: filename and path [defaults to graphname.png] diff --git a/pylint/message/message_definition.py b/pylint/message/message_definition.py index <HASH>..<HASH> 100644 --- a/pylint/message/message_definition.py +++ b/pylint/message/message_definition.py @@ -20,7 +20,7 @@ class MessageDefinition: scope: str, minversion: Optional[Tuple[int, int]] = None, maxversion: Optional[Tuple[int, int]] = None, - old_names: List[Tuple[str, str]] = None, + old_names: Optional[List[Tuple[str, str]]] = None, ): self.checker_name = checker.name self.check_msgid(msgid) diff --git a/pylint/message/message_handler_mix_in.py b/pylint/message/message_handler_mix_in.py index <HASH>..<HASH> 100644 --- a/pylint/message/message_handler_mix_in.py +++ b/pylint/message/message_handler_mix_in.py @@ -74,7 +74,7 @@ class MessagesHandlerMixIn: self, msgid: str, scope: str = "package", - line: Union[bool, int] = None, + line: Union[bool, int, None] = None, ignore_unknown: bool = False, ): if not line: diff --git a/setup.cfg b/setup.cfg index <HASH>..<HASH> 100644 --- a/setup.cfg +++ b/setup.cfg @@ -83,6 +83,7 @@ skip_glob = tests/functional/**,tests/input/**,tests/extensions/data/**,tests/re src_paths = pylint [mypy] +no_implicit_optional = True scripts_are_modules = True warn_unused_ignores = True diff --git a/tests/test_self.py b/tests/test_self.py index <HASH>..<HASH> 100644 --- a/tests/test_self.py +++ b/tests/test_self.py @@ -141,7 +141,7 @@ class TestRunTC: args: List[str], reporter: Any = None, out: Optional[StringIO] = None, - code: int = None, + code: Optional[int] = None, ) -> None: if out is None: out = StringIO()
Add ``no-implicit-optional`` flag to ``mypy`` (#<I>)
PyCQA_pylint
train
5c9efc809860fcbc11fff45a122b5bfa0f9cc3aa
diff --git a/starlette/convertors.py b/starlette/convertors.py index <HASH>..<HASH> 100644 --- a/starlette/convertors.py +++ b/starlette/convertors.py @@ -20,7 +20,7 @@ class StringConvertor(Convertor): def to_string(self, value: typing.Any) -> str: value = str(value) - assert "/" not in value, "May not contain path seperators" + assert "/" not in value, "May not contain path separators" assert value, "Must not be empty" return value
Friendly spellcheck in convertor assertion (#<I>)
encode_starlette
train
e00ccf18bde11a4d03e8969a88f35f17e0b155a2
diff --git a/ui/src/shared/components/OptIn.js b/ui/src/shared/components/OptIn.js index <HASH>..<HASH> 100644 --- a/ui/src/shared/components/OptIn.js +++ b/ui/src/shared/components/OptIn.js @@ -17,7 +17,7 @@ class OptIn extends Component { useRightValue, leftValue, rightValue: value || '', - leftValueFieldClicked: false, + // leftValueFieldClicked: false, toggleClicked: false, rightValueInputBlurred: false, // rightValueInputClicked: false, // TODO: implement right input clickability @@ -26,7 +26,7 @@ class OptIn extends Component { this.useLeftValue = ::this.useLeftValue this.toggleValue = ::this.toggleValue this.useRightValue = ::this.useRightValue - this.handleClickLeftValueField = ::this.handleClickLeftValueField + // this.handleClickLeftValueField = ::this.handleClickLeftValueField this.handleClickToggle = ::this.handleClickToggle this.handleBlurRightValueInput = ::this.handleBlurRightValueInput this.handleChangeRightValue = ::this.handleChangeRightValue @@ -58,11 +58,11 @@ class OptIn extends Component { }) } - handleClickLeftValueField() { - return () => { - this.setState({leftValueFieldClicked: true}, this.useLeftValue) - } - } + // handleClickLeftValueField() { + // return () => { + // this.setState({leftValueFieldClicked: true}, this.useLeftValue) + // } + // } handleClickToggle() { return () => { @@ -136,7 +136,9 @@ class OptIn extends Component { <div className={classnames('opt-in', {'right-toggled': useRightValue})}> <div className="opt-in--left-label" - onClick={this.handleClickLeftValueField()} + onClick={() => { + // this.handleClickLeftValueField() // TODO: re-enable once clickability of right value input is enabled + }} > {leftLabel} </div>
Comment out click handler for left field until right input is clickable
influxdata_influxdb
train
1f1601732814d0bec9bae157ee46b2aa0e67879b
diff --git a/pyvista/plotting/scalar_bars.py b/pyvista/plotting/scalar_bars.py index <HASH>..<HASH> 100644 --- a/pyvista/plotting/scalar_bars.py +++ b/pyvista/plotting/scalar_bars.py @@ -46,7 +46,8 @@ class ScalarBars(): except AttributeError: return - for name in self._scalar_bar_mappers: + # NOTE: keys to list to prevent iterator changing during loop + for name in list(self._scalar_bar_mappers): try: self._scalar_bar_mappers[name].remove(mapper) except ValueError:
Convert scalar bar keys to list (#<I>)
vtkiorg_vtki
train
2d699e24ff10158c0831cf6b7f5e5b12ac41903a
diff --git a/activerecord/CHANGELOG.md b/activerecord/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/activerecord/CHANGELOG.md +++ b/activerecord/CHANGELOG.md @@ -1 +1,5 @@ +* Quote database name in db:create grant statement (when database_user does not have access to create the database). + + *Rune Philosof* + Please check [5-1-stable](https://github.com/rails/rails/blob/5-1-stable/activerecord/CHANGELOG.md) for previous changes. diff --git a/activerecord/lib/active_record/tasks/mysql_database_tasks.rb b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/tasks/mysql_database_tasks.rb +++ b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb @@ -104,7 +104,7 @@ module ActiveRecord def grant_statement <<-SQL -GRANT ALL PRIVILEGES ON #{configuration['database']}.* +GRANT ALL PRIVILEGES ON `#{configuration['database']}`.* TO '#{configuration['username']}'@'localhost' IDENTIFIED BY '#{configuration['password']}' WITH GRANT OPTION; SQL diff --git a/activerecord/test/cases/tasks/mysql_rake_test.rb b/activerecord/test/cases/tasks/mysql_rake_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/tasks/mysql_rake_test.rb +++ b/activerecord/test/cases/tasks/mysql_rake_test.rb @@ -167,7 +167,7 @@ if current_adapter?(:Mysql2Adapter) def assert_permissions_granted_for(db_user) db_name = @configuration["database"] db_password = @configuration["password"] - @connection.expects(:execute).with("GRANT ALL PRIVILEGES ON #{db_name}.* TO '#{db_user}'@'localhost' IDENTIFIED BY '#{db_password}' WITH GRANT OPTION;") + @connection.expects(:execute).with("GRANT ALL PRIVILEGES ON `#{db_name}`.* TO '#{db_user}'@'localhost' IDENTIFIED BY '#{db_password}' WITH GRANT OPTION;") end end
Fix quoting in db:create grant all statement. The database name used in the test would have actually shown this if it had tried to execute on a real Mysql instead of being stubbed out (dashes in database names needs quotes).
rails_rails
train
9b9db3a1c982036bdc19302700e78d8db4f2be0d
diff --git a/hprof-heap/src/main/java/org/gridkit/jvmtool/heapdump/HeapWalker.java b/hprof-heap/src/main/java/org/gridkit/jvmtool/heapdump/HeapWalker.java index <HASH>..<HASH> 100644 --- a/hprof-heap/src/main/java/org/gridkit/jvmtool/heapdump/HeapWalker.java +++ b/hprof-heap/src/main/java/org/gridkit/jvmtool/heapdump/HeapWalker.java @@ -1,9 +1,11 @@ package org.gridkit.jvmtool.heapdump; +import java.lang.reflect.Array; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Set; @@ -44,9 +46,23 @@ public class HeapWalker { return primitiveValue(instance); } }; + InstanceConverter primitiveArrayConverter = new InstanceConverter() { + @Override + public Object convert(Instance instance) { + return primitiveArrayValue(instance); + } + }; for(String ptype: BOX_TYPES) { CONVERTERS.put(ptype, primitiveConverter); } + CONVERTERS.put("boolean[]", primitiveArrayConverter); + CONVERTERS.put("byte[]", primitiveArrayConverter); + CONVERTERS.put("char[]", primitiveArrayConverter); + CONVERTERS.put("short[]", primitiveArrayConverter); + CONVERTERS.put("int[]", primitiveArrayConverter); + CONVERTERS.put("long[]", primitiveArrayConverter); + CONVERTERS.put("float[]", primitiveArrayConverter); + CONVERTERS.put("double[]", primitiveArrayConverter); } /** @@ -195,6 +211,60 @@ public class HeapWalker { } } + @SuppressWarnings("unchecked") + public static <T> T primitiveArrayValue(Instance obj) { + if (obj == null) { + return null; + } + if (obj instanceof PrimitiveArrayInstance) { + PrimitiveArrayInstance pa = (PrimitiveArrayInstance) obj; + String type = pa.getJavaClass().getName(); + Object array; + int len = pa.getLength(); + if ("boolean[]".equals(type)) { + array = new boolean[len]; + } + else if ("byte[]".equals(type)) { + array = new byte[len]; + } + else if ("char[]".equals(type)) { + array = new char[len]; + } + else if ("short[]".equals(type)) { + array = new short[len]; + } + else if ("int[]".equals(type)) { + array = new int[len]; + } + else if ("long[]".equals(type)) { + array = new long[len]; + } + else if ("float[]".equals(type)) { + array = new float[len]; + } + else if ("double[]".equals(type)) { + array = new double[len]; + } + else { + throw new IllegalArgumentException("Is not a primitive array: " + obj.getInstanceId() + " (" + obj.getJavaClass().getName() + ")"); + } + + List<Object> values = pa.getValues(); + for(int i = 0; i != values.size(); ++i) { + Object val = values.get(i); + if (val instanceof String) { + val = Character.valueOf(((String)val).charAt(0)); + } + Array.set(array, i, val); + } + + return (T)array; + } + else { + throw new IllegalArgumentException("Is not a primitive array: " + obj.getInstanceId() + " (" + obj.getJavaClass().getName() + ")"); + } + } + public static Iterable<Instance> walk(Instance root, String path) { return HeapPath.collect(root, HeapPath.parsePath(path, true)); }
Primitive array support for valueOf(Instance)
aragozin_jvm-tools
train
1b99559d5b745fa8c6228ee405ea8babcf778005
diff --git a/lib/buildpack/packager.rb b/lib/buildpack/packager.rb index <HASH>..<HASH> 100644 --- a/lib/buildpack/packager.rb +++ b/lib/buildpack/packager.rb @@ -47,6 +47,7 @@ module Buildpack def build_zip_file(zip_file_path, temp_dir) exclude_files = buildpack[:exclude_files].collect { |e| "--exclude=*#{e}*" }.join(" ") + `rm -rf #{zip_file_path}` `cd #{temp_dir} && zip -r #{zip_file_path} ./ #{exclude_files}` end diff --git a/spec/integration/buildpack/packager_spec.rb b/spec/integration/buildpack/packager_spec.rb index <HASH>..<HASH> 100644 --- a/spec/integration/buildpack/packager_spec.rb +++ b/spec/integration/buildpack/packager_spec.rb @@ -111,6 +111,18 @@ module Buildpack expect(zip_contents).to_not include(*files_to_exclude) end + + context 'when appending an exclusion for the zip file' do + specify do + Packager.package(buildpack) + Packager.package(buildpack.merge(exclude_files: files_to_exclude + ['VERSION'])) + + zip_file_path = File.join(buildpack_dir, 'sample_buildpack-v1.2.3.zip') + zip_contents = get_zip_contents(zip_file_path) + + expect(zip_contents).to_not include('VERSION') + end + end end describe 'caching of dependencies' do
cleanup zip file before creating new one [#<I>]
cloudfoundry_buildpack-packager
train
4fbb12491e219e9652c90a25d89d1bf4aab81e8f
diff --git a/lib/Sabre/DAV/Exception/ServiceUnavailable.php b/lib/Sabre/DAV/Exception/ServiceUnavailable.php index <HASH>..<HASH> 100644 --- a/lib/Sabre/DAV/Exception/ServiceUnavailable.php +++ b/lib/Sabre/DAV/Exception/ServiceUnavailable.php @@ -10,12 +10,12 @@ use Sabre\DAV; * This exception is thrown in case the service * is currently not available (e.g. down for maintenance). * - * @author Thomas Müller - * @copyright 2013 Thomas Müller <thomas.mueller@tmit.eu> + * @author Thomas Müller <thomas.mueller@tmit.eu> + * @copyright Copyright (C) 2007-2013 Rooftop Solutions. All rights reserved. * @license http://code.google.com/p/sabredav/wiki/License Modified BSD License */ -class ServiceUnavailable extends Sabre_DAV_Exception { +class ServiceUnavailable extends DAV\Exception { /** * Returns the HTTP statuscode for this exception
fixing copyright use correct exception base class
sabre-io_dav
train
dfbfc71887b85bb1d67a5fabdfc62e189775bb66
diff --git a/Thru/ActiveRecord/ActiveRecord.php b/Thru/ActiveRecord/ActiveRecord.php index <HASH>..<HASH> 100644 --- a/Thru/ActiveRecord/ActiveRecord.php +++ b/Thru/ActiveRecord/ActiveRecord.php @@ -6,7 +6,7 @@ use Thru\ActiveRecord\DatabaseLayer\TableBuilder; class ActiveRecord { - static protected $MYSQL_FORMAT = "Y-m-d H:i:s"; + static public $MYSQL_FORMAT = "Y-m-d H:i:s"; protected $_label_column = 'name'; protected $_columns_to_save_down; diff --git a/Thru/ActiveRecord/DatabaseLayer/Sql/Mysql.php b/Thru/ActiveRecord/DatabaseLayer/Sql/Mysql.php index <HASH>..<HASH> 100644 --- a/Thru/ActiveRecord/DatabaseLayer/Sql/Mysql.php +++ b/Thru/ActiveRecord/DatabaseLayer/Sql/Mysql.php @@ -364,6 +364,14 @@ class Mysql extends Base case "uuid": $type = "VARCHAR(36)"; break; + + case "md5": + $type = "VARCHAR(" . strlen(md5("test")) . ")"; + break; + + case "sha1": + $type = "VARCHAR(" . strlen(sha1("test")) . ")"; + break; } }
Add support for MD5 and SHA1 types
Thruio_ActiveRecord
train
fc572b1622a1f3303cce83e65d74a6aec487e2bd
diff --git a/src/main/java/com/etnetera/qa/seleniumbrowser/page/Page.java b/src/main/java/com/etnetera/qa/seleniumbrowser/page/Page.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/etnetera/qa/seleniumbrowser/page/Page.java +++ b/src/main/java/com/etnetera/qa/seleniumbrowser/page/Page.java @@ -31,6 +31,10 @@ abstract public class Page implements BrowserContext { protected String baseUrl; protected String baseUrlRegex; + + protected String url; + + protected String urlRegex; protected Boolean urlVerification; @@ -55,6 +59,10 @@ abstract public class Page implements BrowserContext { baseUrlRegex = config.baseUrlRegex()[0]; if (config.urlVerification().length > 0) urlVerification = config.urlVerification()[0]; + if (config.url().length > 0) + url = config.url()[0]; + if (config.urlRegex().length > 0) + urlRegex = config.urlRegex()[0]; if (config.waitTimeout().length > 0) waitTimeout = config.waitTimeout()[0]; if (config.waitRetryInterval().length > 0) @@ -100,11 +108,15 @@ abstract public class Page implements BrowserContext { } public String getUrl() { + if (url != null) + return url; String uri = getUri(); return uri == null ? null : (getBaseUrl() + uri); } public String getUrlRegex() { + if (urlRegex != null) + return urlRegex; String baseUrlRegex = getBaseUrlRegex(); if (baseUrlRegex == null) return null; diff --git a/src/main/java/com/etnetera/qa/seleniumbrowser/page/PageConfig.java b/src/main/java/com/etnetera/qa/seleniumbrowser/page/PageConfig.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/etnetera/qa/seleniumbrowser/page/PageConfig.java +++ b/src/main/java/com/etnetera/qa/seleniumbrowser/page/PageConfig.java @@ -24,6 +24,10 @@ public @interface PageConfig { public String[] baseUrlRegex() default {}; + public String[] url() default {}; + + public String[] urlRegex() default {}; + public boolean[] urlVerification() default {}; public double[] waitTimeout() default {}; diff --git a/src/test/resources/seleniumbrowser.properties b/src/test/resources/seleniumbrowser.properties index <HASH>..<HASH> 100644 --- a/src/test/resources/seleniumbrowser.properties +++ b/src/test/resources/seleniumbrowser.properties @@ -1 +1,2 @@ -browser.baseUrl=http://www.etnetera2.cz \ No newline at end of file +browser.baseUrl=http://www.etneteragroup.com +browser.baseUrlRegex=http(s)?://www\\.etneteragroup\\.com \ No newline at end of file
add url and urlRegex into PageConfig annotation
etnetera_seb
train
5a4495c694013696d56c8658f7e3327652890fec
diff --git a/inherited_resources.gemspec b/inherited_resources.gemspec index <HASH>..<HASH> 100644 --- a/inherited_resources.gemspec +++ b/inherited_resources.gemspec @@ -5,11 +5,11 @@ Gem::Specification.new do |s| s.name = %q{inherited_resources} - s.version = "1.1.0" + s.version = "1.1.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["Jos\303\251 Valim"] - s.date = %q{2010-03-26} + s.date = %q{2010-04-03} s.description = %q{Inherited Resources speeds up development by making your controllers inherit all restful actions so you just have to focus on what is important.} s.email = %q{jose.valim@gmail.com} s.extra_rdoc_files = [ diff --git a/lib/inherited_resources/version.rb b/lib/inherited_resources/version.rb index <HASH>..<HASH> 100644 --- a/lib/inherited_resources/version.rb +++ b/lib/inherited_resources/version.rb @@ -1,3 +1,3 @@ module InheritedResources - VERSION = '1.1.0'.freeze + VERSION = '1.1.1'.freeze end diff --git a/test/test_helper.rb b/test/test_helper.rb index <HASH>..<HASH> 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -1,5 +1,10 @@ require 'rubygems' -gem 'responders', '0.5.5' + +gem 'responders', '0.6.0' +gem 'activesupport', '3.0.0.beta2' +gem 'activemodel', '3.0.0.beta2' +gem 'actionpack', '3.0.0.beta2' +gem 'railties', '3.0.0.beta2' begin gem "test-unit" @@ -37,16 +42,16 @@ require_dependency 'inherited_resources' ActionController::Base.view_paths = File.join(File.dirname(__FILE__), 'views') -InheritedResources::Router = ActionDispatch::Routing::RouteSet.new -InheritedResources::Router.draw do |map| +InheritedResources::Routes = ActionDispatch::Routing::RouteSet.new +InheritedResources::Routes.draw do |map| map.connect ':controller/:action/:id' map.connect ':controller/:action' end -ActionController::Base.send :include, InheritedResources::Router.url_helpers +ActionController::Base.send :include, InheritedResources::Routes.url_helpers class ActiveSupport::TestCase setup do - @router = InheritedResources::Router + @routes = InheritedResources::Routes end end
Updated to be compatible with Rails master.
activeadmin_inherited_resources
train
6a13007fcd19a9547106adfef66cad32c07dea1c
diff --git a/source/Plx.js b/source/Plx.js index <HASH>..<HASH> 100644 --- a/source/Plx.js +++ b/source/Plx.js @@ -159,7 +159,7 @@ const FILTER_PROPERTIES = [ ]; // Props to be removed from passing directly to the component element -const PROPS_TO_OMIT = [ +const PROPS_TO_OMIT = new Set([ 'animateWhenNotInViewport', 'children', 'className', @@ -169,7 +169,7 @@ const PROPS_TO_OMIT = [ 'tagName', 'onPlxStart', 'onPlxEnd', -]; +]); // Get element's top offset function getElementTop(el) { @@ -384,7 +384,7 @@ function parallax(scrollPosition, start, duration, startValue, endValue, easing) value += min; } - return parseFloat(value.toFixed(3)); + return Math.floor(value * 1000) / 1000; } // Calculates current value for color parallax @@ -501,7 +501,7 @@ function omit(object, keysToOmit) { const result = {}; Object.keys(object).forEach(key => { - if (keysToOmit.indexOf(key) === -1) { + if (!keysToOmit.has(key)) { result[key] = object[key]; } }); @@ -520,7 +520,6 @@ function getNewState(scrollPosition, props, state, element) { } = props; const { showElement, - plxStyle, plxStateClasses, } = state; @@ -559,13 +558,8 @@ function getNewState(scrollPosition, props, state, element) { const segments = []; let isInSegment = false; let lastSegmentScrolledBy = null; - const maxScroll = Math.max( - document.body.scrollHeight, - document.body.offsetHeight, - document.documentElement.clientHeight, - document.documentElement.scrollHeight, - document.documentElement.offsetHeight - ) - window.innerHeight; + const bodyHeight = document.documentElement.scrollHeight || document.body.scrollHeight; + const maxScroll = bodyHeight - window.innerHeight; for (let i = 0; i < parallaxData.length; i++) { const { @@ -698,10 +692,7 @@ function getNewState(scrollPosition, props, state, element) { newStyle.OFilter = newStyle.filter; newStyle.msFilter = newStyle.filter; - // "Stupid" check if style should be updated - if (JSON.stringify(plxStyle) !== JSON.stringify(newStyle)) { - newState.plxStyle = newStyle; - } + newState.plxStyle = newStyle; // Adding state class const newPlxStateClasses = getClasses(lastSegmentScrolledBy, isInSegment, parallaxData);
[fix] Fix some performance bottlenecks (#<I>) * [fix] Fix some bottlenecks * [refactor] Simplified maxScroll calculation method
Stanko_react-plx
train
b76015539688d9779d0e5b39a93f36092862b462
diff --git a/src/js/features/progress.js b/src/js/features/progress.js index <HASH>..<HASH> 100644 --- a/src/js/features/progress.js +++ b/src/js/features/progress.js @@ -494,7 +494,7 @@ Object.assign(MediaElementPlayer.prototype, { player.setCurrentRail(e); } updateSlider(); - } else if (!broadcast || t.options.forceLive) { + } else if (!broadcast && t.options.forceLive) { const label = document.createElement('span'); label.className = `${t.options.classPrefix}broadcast`; label.innerText = i18n.t('mejs.live-broadcast');
fixes the forceLive always adding "Live Broadcast" memory leak (#<I>) * fixes the forceLive always adding "Live Broadcast" memory leak * fixing the if condition only no need for another if
mediaelement_mediaelement
train
eb2bc6b86b63e07d0f2c15d1e64a32ed90340405
diff --git a/kubespawner/objects.py b/kubespawner/objects.py index <HASH>..<HASH> 100644 --- a/kubespawner/objects.py +++ b/kubespawner/objects.py @@ -97,7 +97,7 @@ def make_pod_spec( { 'name': 'notebook', 'image': image_spec, - 'command': cmd, + 'args': cmd, 'imagePullPolicy': image_pull_policy, 'ports': [{ 'containerPort': port, diff --git a/tests/test_objects.py b/tests/test_objects.py index <HASH>..<HASH> 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -39,7 +39,7 @@ def test_make_simplest_pod(): "name": "notebook", "image": "jupyter/singleuser:latest", "imagePullPolicy": "IfNotPresent", - "command": ["jupyterhub-singleuser"], + "args": ["jupyterhub-singleuser"], "ports": [{ "containerPort": 8888 }], @@ -97,7 +97,7 @@ def test_make_labeled_pod(): "name": "notebook", "image": "jupyter/singleuser:latest", "imagePullPolicy": "IfNotPresent", - "command": ["jupyterhub-singleuser"], + "args": ["jupyterhub-singleuser"], "ports": [{ "containerPort": 8888 }], @@ -157,7 +157,7 @@ def test_make_pod_with_image_pull_secrets(): "name": "notebook", "image": "jupyter/singleuser:latest", "imagePullPolicy": "IfNotPresent", - "command": ["jupyterhub-singleuser"], + "args": ["jupyterhub-singleuser"], "ports": [{ "containerPort": 8888 }], @@ -219,7 +219,7 @@ def test_set_pod_uid_fs_gid(): "name": "notebook", "image": "jupyter/singleuser:latest", "imagePullPolicy": "IfNotPresent", - "command": ["jupyterhub-singleuser"], + "args": ["jupyterhub-singleuser"], "ports": [{ "containerPort": 8888 }], @@ -278,7 +278,7 @@ def test_make_pod_resources_all(): "name": "notebook", "image": "jupyter/singleuser:latest", "imagePullPolicy": "IfNotPresent", - "command": ["jupyterhub-singleuser"], + "args": ["jupyterhub-singleuser"], "ports": [{ "containerPort": 8888 }], @@ -339,7 +339,7 @@ def test_make_pod_with_env(): "name": "notebook", "image": "jupyter/singleuser:latest", "imagePullPolicy": "IfNotPresent", - "command": ["jupyterhub-singleuser"], + "args": ["jupyterhub-singleuser"], "ports": [{ "containerPort": 8888 }],
specifying command should set args, not command - `command` in pod spec sets ENTRYPOINT - `args` in pod spec sets CMD we want to set the equivalent of CMD, not ENTRYPOINT
jupyterhub_kubespawner
train
137f3ea0b809c39cafe6f16638d88c59e8bf4f93
diff --git a/lib/yoti/http/profile_request.rb b/lib/yoti/http/profile_request.rb index <HASH>..<HASH> 100644 --- a/lib/yoti/http/profile_request.rb +++ b/lib/yoti/http/profile_request.rb @@ -18,6 +18,7 @@ module Yoti yoti_request.encrypted_connect_token = @encrypted_connect_token yoti_request.http_method = 'GET' yoti_request.endpoint = 'profile' + yoti_request.set_auth_key = true yoti_request end end diff --git a/lib/yoti/http/request.rb b/lib/yoti/http/request.rb index <HASH>..<HASH> 100644 --- a/lib/yoti/http/request.rb +++ b/lib/yoti/http/request.rb @@ -14,6 +14,9 @@ module Yoti # @return [Hash] the body sent with the request attr_accessor :payload + # @return [Boolean] Whether the X-Auth-Key header should be set to the client's key + attr_accessor :set_auth_key + # Makes a HTTP request after signing the headers # @return [Hash] the body from the HTTP request def body @@ -21,7 +24,7 @@ module Yoti raise RequestError, 'The payload needs to be a hash.' unless @payload.to_s.empty? || @payload.is_a?(Hash) res = Net::HTTP.start(uri.hostname, Yoti.configuration.api_port, use_ssl: https_uri?) do |http| - signed_request = SignedRequest.new(unsigned_request, path, @payload).sign + signed_request = SignedRequest.new(unsigned_request, path, @payload).sign(with_auth_key: @set_auth_key) http.request(signed_request) end diff --git a/lib/yoti/http/signed_request.rb b/lib/yoti/http/signed_request.rb index <HASH>..<HASH> 100644 --- a/lib/yoti/http/signed_request.rb +++ b/lib/yoti/http/signed_request.rb @@ -10,13 +10,11 @@ module Yoti @auth_key = Yoti::SSL.auth_key_from_pem end - def sign - @http_req['X-Yoti-Auth-Key'] = @auth_key + def sign(with_auth_key: false) + @http_req['X-Yoti-Auth-Key'] = @auth_key if with_auth_key @http_req['X-Yoti-Auth-Digest'] = message_signature @http_req['X-Yoti-SDK'] = Yoti.configuration.sdk_identifier @http_req['X-Yoti-SDK-Version'] = "#{Yoti.configuration.sdk_identifier}-#{Yoti::VERSION}" - @http_req['Content-Type'] = 'application/json' - @http_req['Accept'] = 'application/json' @http_req end diff --git a/spec/yoti/http/signed_request_spec.rb b/spec/yoti/http/signed_request_spec.rb index <HASH>..<HASH> 100644 --- a/spec/yoti/http/signed_request_spec.rb +++ b/spec/yoti/http/signed_request_spec.rb @@ -16,7 +16,7 @@ describe 'Yoti::SignedRequest' do end describe '#sign' do - let(:signed) { signed_request.send(:sign) } + let(:signed) { signed_request.sign(with_auth_key: true) } it 'return a signed request' do expect(signed).to be_a(Net::HTTP::Get) expect(signed['X-Yoti-Auth-Key']).to eql('MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAs9zAY5K9O92zfmRhxBO0NX8Dg7UyyIaLE5GdbCMimlccew2p8LN6P8EDUoU7hiCbW1EQ/cp4iZVIp7UPA3AO/ecuejs2DjkFQOeMGnSlwD0pk74ZI3ammQtYm2ml47IWGrciMh4dPIPh0SOF+tVD0kHhAB9cMaj96Ij2De60Y7SeqvIXUHCtnoHId7Zk5I71mtewAnb9Gpx+wPnr2gpX/uUqkh+3ZHsF2eNCpw/ICvKj4UkNXopUyBemDp3n/s7u8TFyewp7ipPbFxDmxZKJT9SjZNFFe/jc2V/R2uC9qSFRKpTsxqmXggjiBlH46cpyg2SeYFj1p5bkpKZ10b3iOwIDAQAB') @@ -24,8 +24,6 @@ describe 'Yoti::SignedRequest' do expect(signed['X-Yoti-SDK']).to eql('Ruby') expect(signed['X-Yoti-SDK-Version']).to eql("Ruby-#{Yoti::VERSION}") expect(signed['X-Yoti-SDK-Version']).to match(/Ruby-\d+\.\d+\.\d+/) - expect(signed['Content-Type']).to eql('application/json') - expect(signed['Accept']).to eql('application/json') end end
SDK-<I>: Do not set X-Yoti-Auth-Key by default, only on requests that require it
getyoti_yoti-ruby-sdk
train
2fbf834731d37d53d81238f58866ff90edca5cbb
diff --git a/src/editor/components/RefComponent.js b/src/editor/components/RefComponent.js index <HASH>..<HASH> 100644 --- a/src/editor/components/RefComponent.js +++ b/src/editor/components/RefComponent.js @@ -16,7 +16,7 @@ export default class RefComponent extends NodeComponent { return $$('div').addClass('sc-ref-component').append( $$('div').addClass('se-label').append(label), $$('div').addClass('se-text').html(entityHtml) - ) + ).attr('data-id', ref.id) } }
Set data-id on ref component.
substance_texture
train
25dd35a51f2fc4d3eec039c3692ab09431bdfe92
diff --git a/Eloquent/Model.php b/Eloquent/Model.php index <HASH>..<HASH> 100755 --- a/Eloquent/Model.php +++ b/Eloquent/Model.php @@ -98,6 +98,11 @@ abstract class Model implements ArrayAccess, ArrayableInterface, JsonableInterfa protected $visible = array(); /** + * The accessors to append to the model's array form. + */ + protected $appends = array(); + + /** * The attributes that are mass assignable. * * @var array @@ -1609,6 +1614,17 @@ abstract class Model implements ArrayAccess, ArrayableInterface, JsonableInterfa } /** + * Set the accessors to append to model arrays. + * + * @param array $appends + * @return void + */ + public function setAppends(array $appends) + { + $this->appends = $appends; + } + + /** * Get the fillable attributes for the model. * * @return array @@ -1810,7 +1826,17 @@ abstract class Model implements ArrayAccess, ArrayableInterface, JsonableInterfa { if ( ! array_key_exists($key, $attributes)) continue; - $attributes[$key] = $this->mutateAttribute($key, $attributes[$key]); + $attributes[$key] = $this->mutateAttribute( + $key, $attributes[$key] + ); + } + + // Here we will grab all of the appended, calculated attributes to this model + // as these attributes are not really in the attributes array, but are run + // when we need to array or JSON the model for convenience to the coder. + foreach ($this->appends as $key) + { + $attributes[$key] = $this->mutateAttribute($key, null); } return $attributes;
Add ability to append items to model arrays / JSON.
illuminate_database
train
b3eba009283d11fd0f2ae22fd05890cd30ef8beb
diff --git a/src/ClientAbstract.js b/src/ClientAbstract.js index <HASH>..<HASH> 100644 --- a/src/ClientAbstract.js +++ b/src/ClientAbstract.js @@ -115,22 +115,6 @@ class ClientAbstract extends EventEmitter { this.ws.terminate() } - isOpening () { - return this.ws.readyState === this.ws.CONNECTING - } - - isOpen () { - return this.ws.readyState === this.ws.OPEN - } - - isClosing () { - return this.ws.readyState === this.ws.CLOSING - } - - isClosed () { - return this.ws.readyState === this.ws.CLOSED - } - _handleError (err) { this._da(`handling error: ${maybestack(err)}`) // Assure that _handleClose or _handleError emits an event only once. @@ -138,8 +122,9 @@ class ClientAbstract extends EventEmitter { this._da('socketeer is closing, ignoring _handleError') return } + // If we are doing a session resume, + // we do *not* want to emit an error event. if (!this._resumePromiseResolve) { - // This means we are a Client, and we attempted a session resume. this._emit('error', err, true) } this._closeMustHaveError = true @@ -180,7 +165,7 @@ class ClientAbstract extends EventEmitter { this._detachEvents() if (this._resumePromiseResolve) { // This means we are a Client, and we attempted a session resume. - // We _should_ have this function. + // We *should* have this function. this._resolveSessionResume(false) } else if (!this._doNotEmitClose) { this._emit('close', code, message, error) @@ -356,11 +341,6 @@ class ClientAbstract extends EventEmitter { this._actions.set(name, handler) } - _generateActionId () { - this._da(`generated action id: ${this._currentActionId}`) - return this._currentActionId++ - } - // TODO: Action timeouts request (name, data) { return new Promise((resolve, reject) => { @@ -377,6 +357,11 @@ class ClientAbstract extends EventEmitter { }) } + _generateActionId () { + this._da(`generated action id: ${this._currentActionId}`) + return this._currentActionId++ + } + _validateSessionResumeToken (token) { // Note: If the session resume token does have a : in it during the handshake, // then it will cause session resuming to silently fail. @@ -390,6 +375,22 @@ class ClientAbstract extends EventEmitter { } return true } + + isOpening () { + return this.ws.readyState === this.ws.CONNECTING + } + + isOpen () { + return this.ws.readyState === this.ws.OPEN + } + + isClosing () { + return this.ws.readyState === this.ws.CLOSING + } + + isClosed () { + return this.ws.readyState === this.ws.CLOSED + } } module.exports = ClientAbstract
ClientAbstract: move functions around
SEAPUNK_socketeer
train
62d30c763174bb3741e1e63f258aff9fa4221b95
diff --git a/samples/flushfs/flush_fs_test.go b/samples/flushfs/flush_fs_test.go index <HASH>..<HASH> 100644 --- a/samples/flushfs/flush_fs_test.go +++ b/samples/flushfs/flush_fs_test.go @@ -122,6 +122,19 @@ func (t *FlushFSTest) setFsyncError(err error) { t.fsyncErr = err } +// Like syscall.Dup2, but correctly annotates the syscall as blocking. See here +// for more info: https://github.com/golang/go/issues/10202 +func dup2(oldfd int, newfd int) (err error) { + _, _, e1 := syscall.Syscall( + syscall.SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0) + + if e1 != 0 { + err = e1 + } + + return +} + //////////////////////////////////////////////////////////////////////// // Tests //////////////////////////////////////////////////////////////////////// @@ -575,7 +588,7 @@ func (t *FlushFSTest) Dup2() { // Duplicate the temporary file descriptor on top of the file from our file // system. We should see a flush. - err = syscall.Dup2(int(f2.Fd()), int(f1.Fd())) + err = dup2(int(f2.Fd()), int(f1.Fd())) ExpectEq(nil, err) ExpectThat(t.getFlushes(), ElementsAre("taco"))
Fixed a deadlock in FlushFSTest.Dup2.
jacobsa_fuse
train
ace7d261cc1f83b0c0695ccdef72d7142172ac77
diff --git a/auth/token.go b/auth/token.go index <HASH>..<HASH> 100644 --- a/auth/token.go +++ b/auth/token.go @@ -134,6 +134,10 @@ func createPasswordToken(u *User) (*PasswordToken, error) { return &t, nil } +func (t *PasswordToken) user() (*User, error) { + return GetUserByEmail(t.UserEmail) +} + func getPasswordToken(token string) (*PasswordToken, error) { return nil, nil } diff --git a/auth/token_test.go b/auth/token_test.go index <HASH>..<HASH> 100644 --- a/auth/token_test.go +++ b/auth/token_test.go @@ -157,3 +157,16 @@ func (s *S) TestCreatePasswordTokenErrors(c *gocheck.C) { c.Check(err.Error(), gocheck.Equals, t.want) } } + +func (s *S) TestPasswordTokenUser(c *gocheck.C) { + u := User{Email: "need@who.com", Password: "123456"} + err := u.Create() + c.Assert(err, gocheck.IsNil) + defer s.conn.Users().Remove(bson.M{"email": u.Email}) + t, err := createPasswordToken(&u) + c.Assert(err, gocheck.IsNil) + u2, err := t.user() + u2.Keys = u.Keys + c.Assert(err, gocheck.IsNil) + c.Assert(*u2, gocheck.DeepEquals, u) +}
auth: added method user to the PasswordToken type
tsuru_tsuru
train
68acc362a31ddcfcb5c31fa2c7b9eefddc40456f
diff --git a/test/test_parser.py b/test/test_parser.py index <HASH>..<HASH> 100644 --- a/test/test_parser.py +++ b/test/test_parser.py @@ -2045,6 +2045,31 @@ print(1) wf = script.workflow() Base_Executor(wf).run() + def testSequentialSubsteps(self): + '''Test sequential execution of substeps''' + script = SoS_Script(r''' +[10: shared='sum'] +sum = 0 +input: for_each=dict(i=range(4)), concurrent=False +sum += i +print(f'sum is {sum} at index {_index}') +''') + wf = script.workflow() + Base_Executor(wf).run() + self.assertEqual(env.sos_dict['sum'], 6) + + + def testLimitedConcurrency(self): + '''Set concurrent=INT''' + + script = SoS_Script(r''' +[10] +input: for_each=dict(i=range(6)), concurrent=2 +print(i) +''') + wf = script.workflow() + Base_Executor(wf).run() + if __name__ == '__main__': #suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParser) # unittest.TextTestRunner(, suite).run()
Add tests for limited concurrency #<I>
vatlab_SoS
train
e43aaf849c5552082fec83301bbf4d3f5d586b10
diff --git a/moneta-core/src/main/java/org/javamoney/moneta/FastMoney.java b/moneta-core/src/main/java/org/javamoney/moneta/FastMoney.java index <HASH>..<HASH> 100644 --- a/moneta-core/src/main/java/org/javamoney/moneta/FastMoney.java +++ b/moneta-core/src/main/java/org/javamoney/moneta/FastMoney.java @@ -647,7 +647,7 @@ public final class FastMoney implements MonetaryAmount, Comparable<MonetaryAmoun .of(ToStringMonetaryAmountFormatStyle.FAST_MONEY); private BigDecimal getBigDecimal() { - return BigDecimal.valueOf(this.number).movePointLeft(SCALE); + return BigDecimal.valueOf(this.number, SCALE); } @Override diff --git a/moneta-core/src/main/java/org/javamoney/moneta/internal/DefaultCashRounding.java b/moneta-core/src/main/java/org/javamoney/moneta/internal/DefaultCashRounding.java index <HASH>..<HASH> 100644 --- a/moneta-core/src/main/java/org/javamoney/moneta/internal/DefaultCashRounding.java +++ b/moneta-core/src/main/java/org/javamoney/moneta/internal/DefaultCashRounding.java @@ -130,7 +130,7 @@ final class DefaultCashRounding implements MonetaryRounding, Serializable { } } return amount.getFactory().setCurrency(amount.getCurrency()) - .setNumber(BigDecimal.valueOf(minors).movePointLeft(scale)).create(); + .setNumber(BigDecimal.valueOf(minors, scale)).create(); } @Override
Avoid intermediate object when creating BigDecimal Currently in some places a BigDecimal is created from an unscaled value and a scale using an intermediate BigDecimal. This creates unnecessary object churn. Such a BigDecimal can be created directly.
JavaMoney_jsr354-ri
train
b7ec7d8b74f420d06fabffefca45ef2808089bd4
diff --git a/jsr330activator.implementation/src/main/java/no/steria/osgi/jsr330activator/Jsr330Activator.java b/jsr330activator.implementation/src/main/java/no/steria/osgi/jsr330activator/Jsr330Activator.java index <HASH>..<HASH> 100644 --- a/jsr330activator.implementation/src/main/java/no/steria/osgi/jsr330activator/Jsr330Activator.java +++ b/jsr330activator.implementation/src/main/java/no/steria/osgi/jsr330activator/Jsr330Activator.java @@ -65,12 +65,13 @@ public class Jsr330Activator implements BundleActivator { try { BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); if (null != bundleWiring) { - Collection<String> classnames = bundleWiring.listResources("/", "*", BundleWiring.LISTRESOURCES_LOCAL); - if (null != classnames) { + Collection<String> resources = bundleWiring.listResources("/", "*.class", BundleWiring.LISTRESOURCES_RECURSE | BundleWiring.LISTRESOURCES_LOCAL); + if (null != resources) { ClassLoader bundleClassLoader = bundleWiring.getClassLoader(); if (null != bundleClassLoader) { - for (String classname : classnames) { + for (String resource : resources) { try { + String classname = resource.substring(0, resource.length() - ".class".length()).replaceAll("/", "."); Class<?> clazz = bundleClassLoader.loadClass(classname); classes.add(clazz); } catch (ClassNotFoundException e) { } diff --git a/jsr330activator.implementation/src/test/java/no/steria/osgi/jsr330activator/Jsr330ActivatorTest.java b/jsr330activator.implementation/src/test/java/no/steria/osgi/jsr330activator/Jsr330ActivatorTest.java index <HASH>..<HASH> 100644 --- a/jsr330activator.implementation/src/test/java/no/steria/osgi/jsr330activator/Jsr330ActivatorTest.java +++ b/jsr330activator.implementation/src/test/java/no/steria/osgi/jsr330activator/Jsr330ActivatorTest.java @@ -36,8 +36,8 @@ public class Jsr330ActivatorTest { Bundle bundle = mock(Bundle.class); BundleWiring bundleWiring = mock(BundleWiring.class); // Names of 3 classes found in the test project, name of 1 class not found (to test the try/catch) - List<String> classnames = Arrays.asList("no.steria.osgi.jsr330activator.testbundle.HelloService", "no.steria.osgi.jsr330activator.testbundle.implementation.HelloServiceImplementation", "no.steria.osgi.jsr330activator.testbundle.implementation.HelloServiceProvider", "no.steria.osgi.jsr330activator.testbundle.implementation.NotFoundClass"); - when(bundleWiring.listResources(anyString(), anyString(), eq(BundleWiring.LISTRESOURCES_LOCAL))).thenReturn(classnames); + List<String> classResources = Arrays.asList("no/steria/osgi/jsr330activator/testbundle/HelloService.class", "no/steria/osgi/jsr330activator/testbundle/implementation/HelloServiceImplementation.class", "no/steria/osgi/jsr330activator/testbundle/implementation/HelloServiceProvider.class", "no/steria/osgi/jsr330activator/testbundle/implementation/NotFoundClass.class"); + when(bundleWiring.listResources(anyString(), anyString(), anyInt())).thenReturn(classResources); when(bundleWiring.getClassLoader()).thenReturn(this.getClass().getClassLoader()); when(bundle.adapt(eq(BundleWiring.class))).thenReturn(bundleWiring); @@ -160,9 +160,9 @@ public class Jsr330ActivatorTest { public void testActivatorStartStop() throws Exception { BundleWiring bundleWiring = mock(BundleWiring.class); // Names of 3 classes found in the test project, name of 1 class not found (to test the try/catch) - List<String> classnames = Arrays.asList("no.steria.osgi.jsr330activator.testbundle.HelloService", "no.steria.osgi.jsr330activator.testbundle.implementation.HelloServiceImplementation", "no.steria.osgi.jsr330activator.testbundle.implementation.HelloServiceProvider", "no.steria.osgi.jsr330activator.testbundle.implementation.NotFoundClass"); - when(bundleWiring.listResources(anyString(), anyString(), eq(BundleWiring.LISTRESOURCES_LOCAL))).thenReturn(classnames); - when(bundleWiring.getClassLoader()).thenReturn(this.getClass().getClassLoader()); + List<String> classResources = Arrays.asList("no/steria/osgi/jsr330activator/testbundle/HelloService.class", "no/steria/osgi/jsr330activator/testbundle/implementation/HelloServiceImplementation.class", "no/steria/osgi/jsr330activator/testbundle/implementation/HelloServiceProvider.class", "no/steria/osgi/jsr330activator/testbundle/implementation/NotFoundClass.class"); + when(bundleWiring.listResources(anyString(), anyString(), anyInt())).thenReturn(classResources); + when(bundleWiring.getClassLoader()).thenReturn(this.getClass().getClassLoader()); MockBundle bundle = new MockBundle(bundleWiring); BundleContext bundleContext = new MockBundleContext(bundle);
Made correct use of BundleWiring.listResources. Changes: - Changed listResources arguments to make it scan just the bundle recursively for class files (a bitwise OR of two values) - Modify the returned resources into loadable class names (strip ".class" and change "/" to ".")
sbang_jsr330activator
train
0560fa0368580027190a6b11c52f8deb3a827a92
diff --git a/src/main/java/org/primefaces/component/accordionpanel/AccordionPanelRenderer.java b/src/main/java/org/primefaces/component/accordionpanel/AccordionPanelRenderer.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/primefaces/component/accordionpanel/AccordionPanelRenderer.java +++ b/src/main/java/org/primefaces/component/accordionpanel/AccordionPanelRenderer.java @@ -120,7 +120,8 @@ public class AccordionPanelRenderer extends CoreRenderer { wb.attr("multiple", multiple, false) .callback("onTabChange", "function(panel)", acco.getOnTabChange()) - .callback("onTabShow", "function(panel)", acco.getOnTabShow()); + .callback("onTabShow", "function(panel)", acco.getOnTabShow()) + .callback("onTabClose", "function(panel)", acco.getOnTabClose()); if(acco.getTabController() != null) { wb.attr("controlled", true); diff --git a/src/main/resources-maven-jsf/ui/accordionPanel.xml b/src/main/resources-maven-jsf/ui/accordionPanel.xml index <HASH>..<HASH> 100644 --- a/src/main/resources-maven-jsf/ui/accordionPanel.xml +++ b/src/main/resources-maven-jsf/ui/accordionPanel.xml @@ -62,6 +62,12 @@ <description>Client side callback to execute when a tab is shown.</description> </attribute> <attribute> + <name>onTabClose</name> + <required>false</required> + <type>java.lang.String</type> + <description>Client side callback to execute when a tab is closed.</description> + </attribute> + <attribute> <name>dynamic</name> <required>false</required> <type>java.lang.Boolean</type> diff --git a/src/main/resources/META-INF/resources/primefaces/accordion/accordion.js b/src/main/resources/META-INF/resources/primefaces/accordion/accordion.js index <HASH>..<HASH> 100644 --- a/src/main/resources/META-INF/resources/primefaces/accordion/accordion.js +++ b/src/main/resources/META-INF/resources/primefaces/accordion/accordion.js @@ -169,7 +169,11 @@ PrimeFaces.widget.AccordionPanel = PrimeFaces.widget.BaseWidget.extend({ var oldHeader = this.headers.filter('.ui-state-active'); oldHeader.children('.ui-icon').removeClass(this.cfg.expandedIcon).addClass(this.cfg.collapsedIcon); oldHeader.attr('aria-selected', false); - oldHeader.attr('aria-expanded', false).removeClass('ui-state-active ui-corner-top').addClass('ui-corner-all').next().attr('aria-hidden', true).slideUp(); + oldHeader.attr('aria-expanded', false).removeClass('ui-state-active ui-corner-top').addClass('ui-corner-all') + .next().attr('aria-hidden', true).slideUp(function(){ + if(_self.cfg.onTabClose) + _self.cfg.onTabClose.call(_self, panel); + }); } //activate selected @@ -184,7 +188,7 @@ PrimeFaces.widget.AccordionPanel = PrimeFaces.widget.BaseWidget.extend({ }, hide: function(index) { - var $this = this, + var _self = this, panel = this.panels.eq(index), header = panel.prev(); @@ -192,8 +196,8 @@ PrimeFaces.widget.AccordionPanel = PrimeFaces.widget.BaseWidget.extend({ header.attr('aria-expanded', false).children('.ui-icon').removeClass(this.cfg.expandedIcon).addClass(this.cfg.collapsedIcon); header.removeClass('ui-state-active ui-corner-top').addClass('ui-corner-all'); panel.attr('aria-hidden', true).slideUp(function(){ - if($this.cfg.onTabClose) - $this.cfg.onTabClose.call($this, panel); + if(_self.cfg.onTabClose) + _self.cfg.onTabClose.call(_self, panel); }); this.removeFromSelection(index);
Missing part ofs onTabClose merge
primefaces_primefaces
train
d0c79c4333cf4a2231c6ba79e5ab49068889845d
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index <HASH>..<HASH> 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -512,10 +512,10 @@ class SaltMaster(SaltDaemonScriptBase): cli_script_name = 'salt-master' def get_check_ports(self): - return set([self.config['runtests_conn_check_port']]) - #return set([self.config['ret_port'], - # self.config['publish_port'], - # self.config['runtests_conn_check_port']]) + #return set([self.config['runtests_conn_check_port']]) + return set([self.config['ret_port'], + self.config['publish_port'], + self.config['runtests_conn_check_port']]) def get_script_args(self): #return ['-l', 'debug']
Check all ports on the salt-master
saltstack_salt
train
c8bc719ff6c688295d59ff35b68405e1ecbd86f6
diff --git a/modules/discopower/config-templates/module_discopower.php b/modules/discopower/config-templates/module_discopower.php index <HASH>..<HASH> 100644 --- a/modules/discopower/config-templates/module_discopower.php +++ b/modules/discopower/config-templates/module_discopower.php @@ -36,6 +36,15 @@ $config = array ( */ 'cdc.domain' => NULL, + /* + * The lifetime of the common domain cookie, in seconds. + * + * If this is NULL (the default), the common domain cookie will be deleted when the browser closes. + * + * Example: 'cdc.lifetime' => 180*24*60*60, // 180 days. + */ + 'cdc.lifetime' => NULL, + ); ?> diff --git a/modules/discopower/lib/PowerIdPDisco.php b/modules/discopower/lib/PowerIdPDisco.php index <HASH>..<HASH> 100644 --- a/modules/discopower/lib/PowerIdPDisco.php +++ b/modules/discopower/lib/PowerIdPDisco.php @@ -26,6 +26,15 @@ class sspmod_discopower_PowerIdPDisco extends SimpleSAML_XHTML_IdPDisco { /** + * The lifetime of the CDC cookie, in seconds. + * If set to NULL, it will only be valid until the browser is closed. + * + * @var int|NULL + */ + private $cdcLifetime; + + + /** * Initializes this discovery service. * * The constructor does the parsing of the request. If this is an invalid request, it will @@ -45,6 +54,8 @@ class sspmod_discopower_PowerIdPDisco extends SimpleSAML_XHTML_IdPDisco { /* Ensure that the CDC domain starts with a dot ('.') as required by the spec. */ $this->cdcDomain = '.' . $this->cdcDomain; } + + $this->cdcLifetime = $this->discoconfig->getInteger('cdc.lifetime', NULL); } @@ -291,7 +302,13 @@ class sspmod_discopower_PowerIdPDisco extends SimpleSAML_XHTML_IdPDisco { $newCookie = $tmp[1]; } - setcookie('_saml_idp', $newCookie, time() + 180*24*60*60, '/', $this->cdcDomain, TRUE); + if ($this->cdcLifetime === NULL) { + $expire = 0; + } else { + $expire = time() + $this->cdcLifetime; + } + + setcookie('_saml_idp', $newCookie, $expire, '/', $this->cdcDomain, TRUE); }
discopower: Allow configuring of the lifetime of the CDC cookie.
simplesamlphp_saml2
train
d7559f67ba24eddde83d4406e027c6cad970b3ff
diff --git a/lib/guard/dsl_describer.rb b/lib/guard/dsl_describer.rb index <HASH>..<HASH> 100644 --- a/lib/guard/dsl_describer.rb +++ b/lib/guard/dsl_describer.rb @@ -35,8 +35,6 @@ module Guard # @see CLI#list # def list - _evaluate_guardfile - names = ::Guard::PluginUtil.plugin_names.sort.uniq final_rows = names.inject([]) do |rows, name| rows << {
list cmd should not eval guardfile
guard_guard
train
ad1642263f9286a0f4d20c6d8c15979e3d4dc18a
diff --git a/pyxel/draw_command.py b/pyxel/draw_command.py index <HASH>..<HASH> 100644 --- a/pyxel/draw_command.py +++ b/pyxel/draw_command.py @@ -218,10 +218,13 @@ class DrawCommand: data[SIZE_W_INDEX] = w data[SIZE_H_INDEX] = h - def bltm(self, x, y, img, tm, tu, tv, tw, th, colkey=None): - data = self._tilemap_list[tm]._data[tv:, tu:] - for i in range(th): - for j in range(tw): + def bltm(self, x, y, tm, u, v, w, h, colkey=None): + tilemap = self._tilemap_list[tm] + data = self._tilemap_list[tm]._data[v:, u:] + img = tilemap.image + + for i in range(h): + for j in range(w): val = data[i, j] sx = (val % 32) * 8 sy = (val // 32) * 8 diff --git a/pyxel/examples/03_draw_api.py b/pyxel/examples/03_draw_api.py index <HASH>..<HASH> 100644 --- a/pyxel/examples/03_draw_api.py +++ b/pyxel/examples/03_draw_api.py @@ -10,7 +10,9 @@ class App: pyxel.image(0).load(0, 0, "assets/cat_16x16.png") pyxel.image(1).load(0, 0, "assets/tileset_24x32.png") - pyxel.tilemap(0).set(0, 0, ["2200020401006061620040", "4203202122030001020360"]) + pyxel.tilemap(0).set( + 0, 0, ["2200020401006061620040", "4203202122030001020360"], 1 + ) self.pal_test_is_enabled = False self.clip_test_is_enabled = False @@ -148,11 +150,11 @@ class App: pyxel.blt(x + 76, y, 0, 0, 0, -16, -16, 5) def test_bltm(self, x, y): - pyxel.text(x, y, "bltm(x,y,img,tm,tu,tv,\n tw,th,[colkey])", 7) + pyxel.text(x, y, "bltm(x,y,tm,u,v,\n w,h,[colkey])", 7) y += 15 - pyxel.bltm(x, y, 1, 0, 0, 0, 11, 2, 2) + pyxel.bltm(x, y, 0, 0, 0, 11, 2, 2) def test_text(self, x, y): pyxel.text(x, y, "text(x,y,s,col)", 7)
Removed the img argument from the bltm command
kitao_pyxel
train
b19d2d856f1ed77cd34351a35f9ae7864d39048e
diff --git a/zipline/core/component.py b/zipline/core/component.py index <HASH>..<HASH> 100644 --- a/zipline/core/component.py +++ b/zipline/core/component.py @@ -19,6 +19,7 @@ import gevent_zeromq # zmq_ctypes #import zmq_ctypes +from zipline.protocol import CONTROL_UNFRAME from zipline.utils.gpoll import _Poller as GeventPoller from zipline.protocol import CONTROL_PROTOCOL, COMPONENT_STATE, \ COMPONENT_FAILURE, CONTROL_FRAME @@ -82,12 +83,15 @@ class Component(object): self.zmq = None self.context = None self.addresses = None + self.waiting = None self.out_socket = None self.killed = False self.controller = None # timeout after a full minute self.heartbeat_timeout = 60 *1000 + # TODO: state_flag is deprecated, remove + # TODO: error_state is deprecated, remove self.state_flag = COMPONENT_STATE.OK self.error_state = COMPONENT_FAILURE.NOFAILURE self.on_done = None @@ -190,6 +194,12 @@ class Component(object): raise Exception("Unknown ZeroMQ Flavor") def _run(self): + """ + The main component loop. This is wrapped inside a + exception reporting context inside of run. + + The core logic of the all components is run here. + """ self.start_tic = time.time() self.done = False # TODO: use state flag @@ -200,9 +210,17 @@ class Component(object): self.setup_poller() self.open() - self.setup_sync() self.setup_control() + self.signal_ready() + self.lock_ready() + + self.wait_ready() + # ----------------------- + # YOU SHALL NOT PASS!!!!! + # ----------------------- + # ... until the controller signals GO + self.loop() self.shutdown() @@ -246,20 +264,8 @@ class Component(object): Loop to do work while we still have work to do. """ while self.working(): - self.confirm() self.do_work() - def confirm(self): - """ - Send a synchronization request to the host. - """ - if not self.confirmed: - # TODO: proper framing - self.sync_socket.send(self.get_id + ":RUN") - - self.receive_sync_ack() # blocking - self.confirmed = True - def runtime(self): if self.ready() and self.start_tic and self.stop_tic: return self.stop_tic - self.start_tic @@ -299,6 +305,81 @@ class Component(object): # Internal Maintenance # ---------------------- + def lock_ready(self): + """ + Unlock the component, topology is now ready to run. + """ + self.waiting = True + + def unlock_ready(self): + """ + Unlock the component, topology is still pending. + """ + self.waiting = False + + def wait_ready(self): + # Implicit side-effect of unlocking the component iff + # the GO message is received from the monitor level. + # This then unlocks the barrier and proceeds to the + # do_work state. + + # Poll on a subset of the control protocol while we exist + # in the locked quasimode. Respond to HEARTBEAT and GO + # messages. + + while self.waiting: + socks = dict(self.poll.poll(self.heartbeat_timeout)) + + msg = self.control_in.recv() + event, payload = CONTROL_UNFRAME(msg) + + # ==== + # Go + # ==== + + # A distributed lock from the controller to ensure + # synchronized start. + + if event == CONTROL_PROTOCOL.HEARTBEAT: + heartbeat_frame = CONTROL_FRAME( + CONTROL_PROTOCOL.OK, + payload + ) + self.control_out.send(heartbeat_frame) + log.info('Prestart Heartbeat' + self.get_id) + + elif event == CONTROL_PROTOCOL.GO: + # Side effectful call from the controller to unlock + # and begin doing work only when the entire topology + # of the system beings to come online + log.info('Unlocking ' + self.__class__.__name__) + self.unlock_ready() + + def signal_ready(self): + log.info(self.__class__.__name__ + ' is ready') + + if hasattr(self, 'control_out'): + frame = CONTROL_FRAME( + CONTROL_PROTOCOL.READY, + '' + ) + self.control_out.send(frame) + + def signal_cancel(self): + self.done = True + + # TODO: no hasattr hacks + #if not self.controller: + if hasattr(self, 'control_out'): + frame = CONTROL_FRAME( + CONTROL_PROTOCOL.SHUTDOWN, + None + ) + self.control_out.send(frame) + + # then proceeds to do shutdown(), and teardown_sockets() + # to complete the process + def signal_exception(self, exc=None, scope=None): """ This is *very* important error tracking handler. @@ -470,24 +551,6 @@ class Component(object): self.poll.register(self.control_in, self.zmq.POLLIN) self.sockets.extend([self.control_in, self.control_out]) - def setup_sync(self): - """ - Setup the sync socket and poller. ( Connect ) - - DEPRECATED, left in for compatability for now. - """ - - #LOGGER.debug("Connecting sync client for {id}".format(id=self.get_id)) - - self.sync_socket = self.context.socket(self.zmq.REQ) - self.sync_socket.connect(self.addresses['sync_address']) - #self.sync_socket.setsockopt(self.zmq.LINGER,0) - - self.sync_poller = self.zmq_poller() - self.sync_poller.register(self.sync_socket, self.zmq.POLLIN) - - self.sockets.append(self.sync_socket) - # ----------- # FSM Actions # -----------
Selectively backport portions of new component.py
quantopian_zipline
train
e5dfd73767a82563ec6aee2e2011187437b84a0d
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -68,7 +68,7 @@ Catalog.DEFAULT_IDENTIFIERS = (function () { // Same as what Jed.js uses Catalog.CONTEXT_DELIMITER = String.fromCharCode(4); -Catalog.prototype.messageToKey = function messageToKey (msgid, msgctxt) { +Catalog.messageToKey = Catalog.prototype.messageToKey = function messageToKey (msgid, msgctxt) { return msgctxt ? msgctxt + Catalog.CONTEXT_DELIMITER + msgid : msgid; };
Expose `messageToKey` on exports Not just on prototype
gabegorelick_gettext-catalog
train
fddaa17afb2428314278995182cd2693f2eb5418
diff --git a/tests/Cache/MemcacheTestCase.php b/tests/Cache/MemcacheTestCase.php index <HASH>..<HASH> 100644 --- a/tests/Cache/MemcacheTestCase.php +++ b/tests/Cache/MemcacheTestCase.php @@ -41,7 +41,11 @@ class Doctrine_Cache_Memcache_TestCase extends Doctrine_UnitTestCase class Doctrine_Cache_Memcache_Mock extends Doctrine_Cache_Memcache { } -class Memcache -{ +if (!class_exists('Memcache')) +{ + class Memcache + { + + } }
Adding conditional class definition so that people who use the 'memcache' pecl module can run the tests.
doctrine_orm
train
d675f10621e5d252dab4a6929c4982895b748e94
diff --git a/lib/utils.js b/lib/utils.js index <HASH>..<HASH> 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -118,6 +118,23 @@ function buildOptions(projectConfig, templateCompilerPath, pluginInfo) { return htmlbarsOptions; } +const hasGlobalThis = (function () { + try { + let context = vm.createContext(); + + // we must create a sandboxed context to test if `globalThis` will be + // present _within_ it because in some contexts a globalThis polyfill has + // been evaluated. In that case globalThis would be available on the + // current global context but **would not** be inherited to the global + // contexts created by `vm.createContext` + let type = vm.runInContext(`typeof globalThis`, context); + + return type !== 'undefined'; + } catch (e) { + return false; + } +})(); + function getTemplateCompiler(templateCompilerPath, EmberENV = {}) { let templateCompilerFullPath = require.resolve(templateCompilerPath); let cacheData = TemplateCompilerCache.get(templateCompilerFullPath); @@ -164,7 +181,7 @@ function getTemplateCompiler(templateCompilerPath, EmberENV = {}) { // we must provide a `global` // // this is due to https://git.io/Jtb7s (Ember 3.27+) - if (typeof globalThis === 'undefined') { + if (!hasGlobalThis) { sandbox.global = sandbox; }
Ensure global is present for Node <I> + globalThis polyfill The prior method (`typeof globalThis` in the currently running context) cannot properly indicate if `globalThis` will be available within the sandboxed context.
ember-cli_ember-cli-htmlbars
train
6b5548f127ddbd4ab7bd341d92d9b146bf7a59bc
diff --git a/src/drawNode.js b/src/drawNode.js index <HASH>..<HASH> 100644 --- a/src/drawNode.js +++ b/src/drawNode.js @@ -11,7 +11,7 @@ import {roundTo4Decimals} from "./utils"; export function drawNode(x, y, nodeId, attributes={}, options={}) { attributes = Object.assign({}, attributes); if (attributes.style && attributes.style.includes('invis')) { - var newNode = null; + var newNode = d3.select(null); } else { var root = this._selection; var svg = root.selectWithoutDataPropagation("svg"); @@ -47,14 +47,14 @@ export function updateDrawnNode(x, y, nodeId, attributes={}, options={}) { this._drawnNode.nodeId = nodeId; this._drawnNode.x = x; this._drawnNode.y = y; - if (!node && !(attributes.style && attributes.style.includes('invis'))) { + if (node.empty() && !(attributes.style && attributes.style.includes('invis'))) { var root = this._selection; var svg = root.selectWithoutDataPropagation("svg"); var graph0 = svg.selectWithoutDataPropagation("g"); var node = graph0.append('g'); this._drawnNode.g = node; } - if (node) { + if (!node.empty()) { _updateNode(node, x, y, nodeId, attributes, options); } @@ -131,7 +131,7 @@ export function moveDrawnNode(x, y, options={}) { this._drawnNode.x = x; this._drawnNode.y = y; - if (node) { + if (!node.empty()) { _moveNode(node, x, y, attributes, options); } @@ -146,7 +146,7 @@ export function removeDrawnNode() { var node = this._drawnNode.g; - if (node) { + if (!node.empty()) { node.remove(); } @@ -165,7 +165,7 @@ export function insertDrawnNode(nodeId) { nodeId = this._drawnNode.nodeId; } var node = this._drawnNode.g; - if (!node) { + if (node.empty()) { return this; } var attributes = this._drawnNode.attributes;
Refactor: An invisible node is an empty selection, not null
magjac_d3-graphviz
train
d83ee0bf3cddfb19957d91947a961900d1ffc174
diff --git a/plenum/server/replicas.py b/plenum/server/replicas.py index <HASH>..<HASH> 100644 --- a/plenum/server/replicas.py +++ b/plenum/server/replicas.py @@ -47,7 +47,17 @@ class Replicas: extra={"tags": ["node-replica"]}) return self.num_replicas - def shrink(self, index: int=None) -> int: + def shrink(self) -> int: + index = max(self._replicas.keys()) + replica = self._replicas.pop(index) + self._messages_to_replicas = self._messages_to_replicas[:-1] + self._monitor.removeInstance() + logger.display("{} removed replica {} from instance {}". + format(self._node.name, replica, replica.instId), + extra={"tags": ["node-replica"]}) + return self.num_replicas + + def remove_replica(self, index: int=None) -> int: if index >= self.num_replicas: return if index is None: diff --git a/plenum/test/primary_selection/test_primary_selector.py b/plenum/test/primary_selection/test_primary_selector.py index <HASH>..<HASH> 100644 --- a/plenum/test/primary_selection/test_primary_selector.py +++ b/plenum/test/primary_selection/test_primary_selector.py @@ -49,11 +49,11 @@ class FakeNode: self.totalNodes = len(self.allNodeNames) self.mode = Mode.starting self.config = config or getConfigOnce() - self.replicas = [ - Replica(node=self, instId=0, isMaster=True, config=self.config), - Replica(node=self, instId=1, isMaster=False, config=self.config), - Replica(node=self, instId=2, isMaster=False, config=self.config), - ] + self.replicas = { + 0: Replica(node=self, instId=0, isMaster=True, config=self.config), + 1: Replica(node=self, instId=1, isMaster=False, config=self.config), + 2: Replica(node=self, instId=2, isMaster=False, config=self.config), + } self._found = False self.ledgerManager = LedgerManager(self, ownedByNode=True) ledger0 = FakeLedger(0, 10) diff --git a/plenum/test/replica/test_replica_removing.py b/plenum/test/replica/test_replica_removing.py index <HASH>..<HASH> 100644 --- a/plenum/test/replica/test_replica_removing.py +++ b/plenum/test/replica/test_replica_removing.py @@ -26,7 +26,7 @@ def test_replica_removing(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_cl node = txnPoolNodeSet[0] start_replicas_count = node.replicas.num_replicas index = start_replicas_count - 1 - node.replicas.shrink(index) + node.replicas.remove_replica(index) _check_replica_removed(node, start_replicas_count) # trigger view change on all nodes for node in txnPoolNodeSet: @@ -50,7 +50,7 @@ def test_replica_removing_before_vc_with_primary_disconnected(looper, node = txnPoolNodeSet[0] start_replicas_count = node.replicas.num_replicas index = start_replicas_count - 1 - node.replicas.shrink(index) + node.replicas.remove_replica(index) _check_replica_removed(node, start_replicas_count) # trigger view change on all nodes disconnect_node_and_ensure_disconnected(looper, txnPoolNodeSet, node) @@ -74,7 +74,7 @@ def test_replica_removing_before_ordering(looper, txnPoolNodeSet, sdk_pool_handl node = txnPoolNodeSet[0] start_replicas_count = node.replicas.num_replicas index = start_replicas_count - 1 - node.replicas.shrink(index) + node.replicas.remove_replica(index) sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, 1) looper.run(eventually(check_checkpoint_finalize, txnPoolNodeSet)) _check_replica_removed(node, start_replicas_count) @@ -101,7 +101,7 @@ def test_replica_removing_in_ordering(looper, txnPoolNodeSet, sdk_pool_handle, s sdk_pool_handle, sdk_wallet_client, 1) - node.replicas.shrink(index) + node.replicas.remove_replica(index) sdk_get_replies(looper, req) looper.run(eventually(check_checkpoint_finalize, txnPoolNodeSet)) _check_replica_removed(node, start_replicas_count) @@ -122,7 +122,7 @@ def test_replica_removing_after_ordering(looper, txnPoolNodeSet, sdk_pool_handle sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_client, 1) looper.run(eventually(check_checkpoint_finalize, txnPoolNodeSet)) index = start_replicas_count - 1 - node.replicas.shrink(index) + node.replicas.remove_replica(index) _check_replica_removed(node, start_replicas_count) # trigger view change on all nodes for return backup replicas for node in txnPoolNodeSet:
INDY-<I>: divide method shrink to shrink() and remove_replica()
hyperledger_indy-plenum
train
4dd0bb6b32a71878b156847f2f955c78a26f061e
diff --git a/js/chrome/save.js b/js/chrome/save.js index <HASH>..<HASH> 100644 --- a/js/chrome/save.js +++ b/js/chrome/save.js @@ -21,7 +21,9 @@ $document.bind('jsbinReady', function () { $document.bind('codeChange', function (event, data) { // savingLabels[data.panelId].text('Saving'); - savingLabels[data.panelId].css({ 'opacity': 0 }).stop(true, true); + if (savingLabels[data.panelId]) { + savingLabels[data.panelId].css({ 'opacity': 0 }).stop(true, true); + } }); $document.bind('saveComplete', throttle(function (event, data) {
Small JS error when code change triggered
jsbin_jsbin
train
43785e0d07baf85f9155183b383d02d1e828277e
diff --git a/maildir_deduplicate/deduplicate.py b/maildir_deduplicate/deduplicate.py index <HASH>..<HASH> 100644 --- a/maildir_deduplicate/deduplicate.py +++ b/maildir_deduplicate/deduplicate.py @@ -46,13 +46,30 @@ class DuplicateSet(object): Implements all deletion strategies applicable to a set of duplicate mails. """ - def __init__(self, hash_key, regexp=None, dry_run=True): + def __init__( + self, hash_key, mail_path_set, regexp=None, dry_run=True, + time_source=None, use_message_id=False): + """ Load-up the duplicate set from mail's path list and freeze pool. + + Once loaded-up, the pool of parsed mails is considered frozen for the + rest of the duplicate set life. This allow aggressive caching of lazy + instance attributes depending on the pool content. + """ self.hash_key = hash_key + + # Global config. self.regexp = regexp self.dry_run = dry_run + self.time_source = time_source + self.use_message_id = use_message_id # Pool referencing all duplicated mails and their attributes. self.pool = set() + for mail_path in set(mail_path_set): + self.pool.add(Mail( + mail_path, self.time_source, self.use_message_id)) + # Freeze pool. + self.pool = frozenset(self.pool) # Keep set metrics. self.stats = Counter() @@ -69,11 +86,6 @@ class DuplicateSet(object): """ Return the size of the duplicate set. """ return len(self.pool) - def add(self, mail): - """ Add provided message to the pool. """ - # Add mail to the pool. - self.pool.add(mail) - def delete(self, mail): """ Delete a mail from the filesystem. """ self.stats['mail_deleted'] += 1 @@ -446,17 +458,12 @@ class Deduplicate(object): continue duplicates = DuplicateSet( - hash_key, regexp=self.regexp, dry_run=self.dry_run) - - # TODO: initialize the duplicate set at DuplicateSet instanciation - # and then freeze the pool right away. This means unexposing - # add_from_file() method. Freezing the pool will allow caching of - # lazy instance attributes depending on the pool content. This is - # necessary to pave the way to more expressive attributes like - # 'largest_message', 'oldest_mail' and so on. - for mail_path in mail_path_set: - duplicates.add(Mail( - mail_path, self.time_source, self.use_message_id)) + hash_key, + mail_path_set, + regexp=self.regexp, + dry_run=self.dry_run, + time_source=self.time_source, + use_message_id=self.use_message_id) logger.debug( "Initialized duplicate set of {} mails sharing the {} hash."
Initialize the duplicate set at instanciation and freeze the pool.
kdeldycke_maildir-deduplicate
train
b9125f23da4ae4b2cdfbf799e10a89a784891d92
diff --git a/examples/genkey.rb b/examples/genkey.rb index <HASH>..<HASH> 100755 --- a/examples/genkey.rb +++ b/examples/genkey.rb @@ -1,29 +1,33 @@ #!/usr/bin/env ruby require 'gpgme' -ctx = GPGME::Ctx.new +# If you do not have gpg-agent installed, comment out the following +# and set it as :passphrase_callback. +# +# def passfunc(hook, uid_hint, passphrase_info, prev_was_bad, fd) +# $stderr.write("Passphrase for #{uid_hint}: ") +# $stderr.flush +# begin +# system('stty -echo') +# io = IO.for_fd(fd, 'w') +# io.puts(gets) +# io.flush +# ensure +# (0 ... $_.length).each do |i| $_[i] = ?0 end if $_ +# system('stty echo') +# end +# puts +# end -passphrase_cb = proc {|hook, uid_hint, passphrase_info, prev_was_bad, fd| - $stderr.write("Passphrase for #{uid_hint}: ") - $stderr.flush - begin - system('stty -echo') - io = IO.for_fd(fd, 'w') - io.puts(gets.chomp) - io.flush - ensure - system('stty echo') - end - puts -} -ctx.set_passphrase_cb(passphrase_cb) - -progress_cb = proc {|hook, what, type, current, total| +def progfunc(hook, what, type, current, total) $stderr.write("#{what}: #{current}/#{total}\r") $stderr.flush -} +end + +ctx = GPGME::Ctx.new({:progress_callback => method(:progfunc), + # :passphrase_callback => method(:passfunc) + }) -#ctx.set_progress_cb(progress_cb) ctx.genkey(<<'EOF', nil, nil) <GnupgKeyParms format="internal"> Key-Type: DSA diff --git a/examples/roundtrip.rb b/examples/roundtrip.rb index <HASH>..<HASH> 100755 --- a/examples/roundtrip.rb +++ b/examples/roundtrip.rb @@ -1,12 +1,34 @@ #!/usr/bin/env ruby require 'gpgme' +# If you do not have gpg-agent installed, comment out the following +# and set it as :passphrase_callback. +# +# def passfunc(hook, uid_hint, passphrase_info, prev_was_bad, fd) +# $stderr.write("Passphrase for #{uid_hint}: ") +# $stderr.flush +# begin +# system('stty -echo') +# io = IO.for_fd(fd, 'w') +# io.puts(gets) +# io.flush +# ensure +# (0 ... $_.length).each do |i| $_[i] = ?0 end if $_ +# system('stty echo') +# end +# puts +# end + plain = 'test test test' puts("Plaintext:\n#{plain}") # Perform symmetric encryption on PLAIN. -cipher = GPGME::encrypt(nil, plain, {:armor => true}) +cipher = GPGME::encrypt(nil, plain, {:armor => true + # :passphrase_callback => method(:passfunc) + }) puts("Ciphertext:\n#{cipher}") -plain = GPGME::decrypt(cipher) +plain = GPGME::decrypt(cipher, { + # :passphrase_callback => method(:passfunc) + }) puts("Plaintext:\n#{plain}") diff --git a/examples/sign.rb b/examples/sign.rb index <HASH>..<HASH> 100755 --- a/examples/sign.rb +++ b/examples/sign.rb @@ -1,4 +1,24 @@ #!/usr/bin/env ruby require 'gpgme' -GPGME::sign('test test test', $stdout, {:mode => GPGME::SIG_MODE_CLEAR}) +# If you do not have gpg-agent installed, comment out the following +# and set it as :passphrase_callback. +# +# def passfunc(hook, uid_hint, passphrase_info, prev_was_bad, fd) +# $stderr.write("Passphrase for #{uid_hint}: ") +# $stderr.flush +# begin +# system('stty -echo') +# io = IO.for_fd(fd, 'w') +# io.puts(gets) +# io.flush +# ensure +# (0 ... $_.length).each do |i| $_[i] = ?0 end if $_ +# system('stty echo') +# end +# puts +# end + +GPGME::sign('test test test', $stdout, {:mode => GPGME::SIG_MODE_CLEAR, + # :passphrase_callback => method(:passfunc) + }) diff --git a/lib/gpgme.rb b/lib/gpgme.rb index <HASH>..<HASH> 100644 --- a/lib/gpgme.rb +++ b/lib/gpgme.rb @@ -624,9 +624,10 @@ keylist_mode=#{KEYLIST_MODE_NAMES[keylist_mode]}>" # begin # system('stty -echo') # io = IO.for_fd(fd, 'w') - # io.puts(gets.chomp) + # io.puts(gets) # io.flush # ensure + # (0 ... $_.length).each do |i| $_[i] = ?0 end if $_ # system('stty echo') # end # puts
Add comment on passphrase callback if gpg-agent is not available.
ueno_ruby-gpgme
train
618fb2b83963d7b5d1cfe7ea6d953c56b793230a
diff --git a/common/concurrent/ManagedBlockingQueue.java b/common/concurrent/ManagedBlockingQueue.java index <HASH>..<HASH> 100644 --- a/common/concurrent/ManagedBlockingQueue.java +++ b/common/concurrent/ManagedBlockingQueue.java @@ -22,6 +22,12 @@ import java.util.concurrent.ForkJoinPool; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +/** + * TODO: implement our LinkedBlockingQueue that has the features of: + * 1) ResizingBlockingQueue + * 2) ManagedBlocker + * 3) Cancellable via {@code Condition} signalling + */ public class ManagedBlockingQueue<E> { private static final int BLOCKING_TIMEOUT_SECONDS = 8; diff --git a/common/iterator/ParallelIterators.java b/common/iterator/ParallelIterators.java index <HASH>..<HASH> 100644 --- a/common/iterator/ParallelIterators.java +++ b/common/iterator/ParallelIterators.java @@ -29,7 +29,6 @@ import java.util.concurrent.ForkJoinTask; public class ParallelIterators<T> implements ComposableIterator<T> { private final ResizingBlockingQueue<T> queue; - private final List<ForkJoinTask<?>> producers; private State state; private T next; @@ -37,15 +36,14 @@ public class ParallelIterators<T> implements ComposableIterator<T> { public ParallelIterators(final List<ComposableIterator<T>> iterators) { queue = new ResizingBlockingQueue<>(); - producers = new ArrayList<>(); state = State.EMPTY; next = null; iterators.forEach(iterator -> { queue.incrementPublisher(); - producers.add(CommonExecutorService.get().submit(() -> { + CommonExecutorService.get().submit(() -> { while (!queue.isCancelled() && iterator.hasNext()) queue.put(iterator.next()); queue.decrementPublisher(); - })); + }); }); } @@ -71,6 +69,5 @@ public class ParallelIterators<T> implements ComposableIterator<T> { @Override protected void finalize() { queue.cancel(); - producers.forEach(p -> p.cancel(true)); } }
ParallelIterators untrack ForkJoinTasks of producers
graknlabs_grakn
train
7507edb7dd83b96f6ee55f8f7e9a0641e8ceb9ce
diff --git a/testgrid/jenkins_verify/jenkins_validate.go b/testgrid/jenkins_verify/jenkins_validate.go index <HASH>..<HASH> 100644 --- a/testgrid/jenkins_verify/jenkins_validate.go +++ b/testgrid/jenkins_verify/jenkins_validate.go @@ -76,7 +76,7 @@ func main() { } // Also check k/k presubmit, prow postsubmit and periodic jobs - for _, job := range prowConfig.AllPresubmits([]string{"kubernetes/kubernetes"}) { + for _, job := range prowConfig.AllPresubmits([]string{}) { jobs[job.Name] = false }
* Try to fix failure Testgrid group mlkube-build-presubmit does not have a matching jenkins or prow job by considering all presubmit jobs; not just presubmit jobs for repo: kubernetes/kubernetes. * The change to only consider kubernetes/kubernetes was added in <URL>
kubernetes_test-infra
train
4881bc88cd20b100589c929b80abed4809d97947
diff --git a/modules/social_features/social_activity/modules/social_activity_filter/src/Form/FilterSettingsForm.php b/modules/social_features/social_activity/modules/social_activity_filter/src/Form/FilterSettingsForm.php index <HASH>..<HASH> 100644 --- a/modules/social_features/social_activity/modules/social_activity_filter/src/Form/FilterSettingsForm.php +++ b/modules/social_features/social_activity/modules/social_activity_filter/src/Form/FilterSettingsForm.php @@ -7,6 +7,7 @@ use Drupal\Core\DependencyInjection\ContainerInjectionInterface; use Drupal\Core\Extension\ModuleHandlerInterface; use Drupal\Core\Form\ConfigFormBase; use Drupal\Core\Form\FormStateInterface; +use Drupal\node\Entity\NodeType; use Drupal\taxonomy\Entity\Vocabulary; use Symfony\Component\DependencyInjection\ContainerInterface; @@ -118,12 +119,10 @@ class FilterSettingsForm extends ConfigFormBase implements ContainerInjectionInt */ public function getReferencedTaxonomyFields(array $vocabulary_list) { - $node_types = node_type_get_types(); - $field_names = []; foreach ($vocabulary_list as $vocabulary) { - foreach ($node_types as $content_type => $type) { + foreach (NodeType::loadMultiple() as $content_type => $type) { $field_definitions = \Drupal::service('entity_field.manager') ->getFieldDefinitions('node', $content_type);
Issue #<I> by SV: Replace node_type_get_types - social_activity_filter module
goalgorilla_open_social
train
d6598723cc0e6ed15b455a402b85c0c92e7117de
diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -11,8 +11,10 @@ var tests = [ 0x80, -0x80, 0x100, -0x100, 0x200, -0x100, 0x1000, -0x1000, 0x10000, -0x10000, 0x20000, -0x20000, 0x40000,-0x40000, - 10, 100, 1000, 10000, 100000, 1000000, - -10, -100, -1000, -10000, -100000, -1000000, + 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000, + 10000000000, 100000000000, 1000000000000, + -10, -100, -1000, -10000, -100000, -1000000, -10000000, -100000000, + -1000000000, -10000000000, -100000000000, -1000000000000, 1.1, 0.1, -0.02, 'hello', 'world', bops.from("Hello"), bops.from("World"), [1,2,3], [], {name: "Tim", age: 29}, {},
Add more <I>-bit tests
creationix_msgpack-js
train
4b3a99539ef070ca615cea17f04a5a23bd9eccfc
diff --git a/bezier/curve.py b/bezier/curve.py index <HASH>..<HASH> 100644 --- a/bezier/curve.py +++ b/bezier/curve.py @@ -100,3 +100,31 @@ class Curve(object): # Here: Value will be 1x2, we just want the 1D point. return value.flatten() + + def evaluate_multi(self, s_vals): + r"""Evaluate :math:`B(s)` for multiple points along the curve. + + Performs `de Casteljau's algorithm`_ to build up :math:`B(s)`. + + .. _de Casteljau's algorithm: + https://en.wikipedia.org/wiki/De_Casteljau%27s_algorithm + + .. note:: + + This current just uses :meth:`evaluate` and so is less + performant than it could be. + + Args: + s_vals (numpy.ndarray): Parameters along the curve (as a + 1D array). + + Returns: + numpy.ndarray: The points on the curve. As a two dimensional + NumPy array, with the rows corresponding to each ``s`` + value and the columns to the dimension. + """ + num_vals, = s_vals.shape + result = np.zeros((num_vals, self.dimension)) + for i, s_val in enumerate(s_vals): + result[i, :] = self.evaluate(s_val) + return result diff --git a/tests/test_curve.py b/tests/test_curve.py index <HASH>..<HASH> 100644 --- a/tests/test_curve.py +++ b/tests/test_curve.py @@ -80,3 +80,43 @@ class TestCurve(unittest.TestCase): expected = np.array([0.25, 0.265625]) result = curve.evaluate(s) self.assertTrue(np.all(expected == result)) + + def test_evaluate_multi(self): + import numpy as np + + s_vals = np.array([0.0, 0.25, 0.5, 1.0, 1.25]) + nodes = np.array([ + [0.0, 0.0], + [0.375, 0.375], + [1.0, 1.0], + ]) + curve = self._make_one(nodes) + expected = np.array([ + [0.0, 0.0], + [0.203125, 0.203125], + [0.4375, 0.4375], + [1.0, 1.0], + [1.328125, 1.328125], + ]) + result = curve.evaluate_multi(s_vals) + self.assertTrue(np.all(expected == result)) + + def test_evaluate_multi_calls_evaluate(self): + import mock + import numpy as np + + s1 = 3.14159 + s2 = 2.817281728 + s_vals = np.array([s1, s2]) + num_pts = len(s_vals) + curve = self._make_one(np.zeros((2, 1))) + ret_vals = [10.0, -1.0] + curve.evaluate = mock.Mock(side_effect=ret_vals) + + result = curve.evaluate_multi(s_vals) + self.assertEqual(result.shape, (num_pts, 1)) + self.assertTrue(np.all(result == np.array([ret_vals]).T)) + + curve.evaluate.assert_any_call(s1) + curve.evaluate.assert_any_call(s2) + self.assertEqual(curve.evaluate.call_count, 2)
Adding Curve.evaluate_multi(). Punting on making it performant, and adding a unit test to assert the general structure of the impl. (i.e. it calls Curve.evaluate()).
dhermes_bezier
train
47b8d847f1d13cb4bea3b94f6c436447510b2e19
diff --git a/Extractor.php b/Extractor.php index <HASH>..<HASH> 100644 --- a/Extractor.php +++ b/Extractor.php @@ -204,7 +204,7 @@ class Extractor // Strip away the docblock header and footer to ease parsing of one line annotations $docblock = substr($docblock, 3, -2); - if (preg_match_all('/@(?<name>[A-Za-z_-]+)[\s\t]*\((?<args>(?:(?!"\)).)*")\)\r?/s', $docblock, $matches)) { + if (preg_match_all('/@(?<name>[A-Za-z_-]+)[\s\t]*\((?<args>(?:(?!\)).)*)\)\r?/s', $docblock, $matches)) { $numMatches = count($matches[0]); for ($i = 0; $i < $numMatches; ++$i) {
Fix issue #<I> Fix an error when nullable is at the end of the annotation line
calinrada_php-apidoc
train
6f76442e906bdaa431b8ca13a99f2db606061f6b
diff --git a/test/unit/test-adapter.js b/test/unit/test-adapter.js index <HASH>..<HASH> 100644 --- a/test/unit/test-adapter.js +++ b/test/unit/test-adapter.js @@ -6,7 +6,7 @@ var getRandomPort = require('get-random-port'); var systemUnderTest = require('../../dist/adapter'); var SlackMessageAdapter = systemUnderTest.default; -// fixtures and test helpers +// fixtures var workingVerificationToken = 'VERIFICATION_TOKEN'; // helpers @@ -26,7 +26,45 @@ function unregisterAllHandlers(adapter) { adapter.callbacks = []; // eslint-disable-line no-param-reassign } +// shared tests +function shouldRegisterWithCallbackId(methodName) { + describe('when registering with a callback_id', function () { + beforeEach(function () { + this.handler = function () { }; + }); + it('a plain string callback_id registers successfully', function () { + this.adapter[methodName]('my_callback', this.handler); + assertHandlerRegistered(this.adapter, this.handler); + }); + it('a RegExp callback_id registers successfully', function () { + this.adapter[methodName](/\w+_callback/, this.handler); + assertHandlerRegistered(this.adapter, this.handler); + }); + it('invalid callback_id types throw on registration', function () { + var handler = this.handler; + assert.throws(function () { + this.adapter[methodName](5, handler); + }, TypeError); + assert.throws(function () { + this.adapter[methodName](true, handler); + }, TypeError); + assert.throws(function () { + this.adapter[methodName]([], handler); + }, TypeError); + assert.throws(function () { + this.adapter[methodName](null, handler); + }, TypeError); + assert.throws(function () { + this.adapter[methodName](undefined, handler); + }, TypeError); + }); + }); +} + +// tests describe('SlackMessageAdapter', function () { + beforeEach(function () { + }); describe('constructor', function () { it('should build an instance', function () { var adapter = new SlackMessageAdapter(workingVerificationToken); @@ -151,44 +189,20 @@ describe('SlackMessageAdapter', function () { describe('#action()', function () { beforeEach(function () { this.adapter = new SlackMessageAdapter(workingVerificationToken); - this.actionHandler = function () { }; }); it('should fail action registration without handler', function () { assert.throws(function () { this.adapter.action('my_callback'); }, TypeError); }); - // TODO: see if this can be reused in the options registration - describe('when registering with a callback_id', function () { - it('a plain string callback_id registers successfully', function () { - this.adapter.action('my_callback', this.actionHandler); - assertHandlerRegistered(this.adapter, this.actionHandler); - }); - it('a RegExp callback_id registers successfully', function () { - this.adapter.action(/\w+_callback/, this.actionHandler); - assertHandlerRegistered(this.adapter, this.actionHandler); - }); - it('invalid callback_id types throw on registration', function () { - var actionHandler = this.actionHandler; - assert.throws(function () { - this.adapter.action(5, actionHandler); - }, TypeError); - assert.throws(function () { - this.adapter.action(true, actionHandler); - }, TypeError); - assert.throws(function () { - this.adapter.action([], actionHandler); - }, TypeError); - assert.throws(function () { - this.adapter.action(null, actionHandler); - }, TypeError); - assert.throws(function () { - this.adapter.action(undefined, actionHandler); - }, TypeError); - }); - }); - // NOTE: the following probably only make sense for actions and not for options + + // shared tests + shouldRegisterWithCallbackId('action'); + describe('when registering with a complex set of constraints', function () { + beforeEach(function () { + this.actionHandler = function () { }; + }); it('should register with valid type constraints successfully', function () { var adapter = this.adapter; var actionHandler = this.actionHandler; @@ -239,5 +253,8 @@ describe('SlackMessageAdapter', function () { this.adapter.options('my_callback'); }, TypeError); }); + + // shared tests + shouldRegisterWithCallbackId('options'); }); });
add options callback id registration tests by reusing tests from actions registration
slackapi_node-slack-interactive-messages
train
bf99f1cd24a28fa5a6931b2549034c17a8c8cb6c
diff --git a/lib/core/Stager.js b/lib/core/Stager.js index <HASH>..<HASH> 100644 --- a/lib/core/Stager.js +++ b/lib/core/Stager.js @@ -453,7 +453,7 @@ Stager.prototype.addStep = function(step) { var res, unique; unique = true; - res = this.checkStepValidity(step, unique); + res = checkStepValidity(this, step, unique); if (res !== null) { throw new Error('Stager.addStep: invalid step received: ' + res + '.'); @@ -513,7 +513,7 @@ // Stage. else { - res = this.checkStageValidity(stage); + res = checkStageValidity(this, stage); if (res !== null) { throw new Error('Stager.addStage: invalid stage received: ' + res + '.'); @@ -1143,7 +1143,7 @@ } /** - * ### Stager.checkStepValidity + * checkStepValidity * * Returns whether given step is valid * @@ -1155,13 +1155,15 @@ * @return {string} NULL for valid stages, error description otherwise * * @see Stager.addStep + * + * @api private */ - Stager.prototype.checkStepValidity = function(step, unique) { + function checkStepValidity(that, step, unique) { if (!step) return 'missing step object'; if ('string' !== typeof step.id) return 'missing ID'; if ('function' !== typeof step.cb) return 'missing callback'; - if (unique && this.steps.hasOwnProperty(step.id)) { + if (unique && that.steps.hasOwnProperty(step.id)) { return 'step ID already existing: ' + step.id + '. Use extendStep to modify it.'; } @@ -1169,7 +1171,7 @@ }; /** - * ### Stager.checkStepValidity + * checkStageValidity * * Returns whether given stage is valid * @@ -1180,22 +1182,22 @@ * @return {string} NULL for valid stages, error description otherwise * * @see Stager.addStage + * + * @api private */ - Stager.prototype.checkStageValidity = function(stage, unique) { - var i, len; + function checkStageValidity(that, stage, unique) { if (!stage) return 'missing stage object'; if ('string' !== typeof stage.id) return 'missing ID'; if (!stage.steps || !stage.steps.length) return 'missing "steps" array'; - if (unique && this.stages.hasOwnProperty(stage.id)) { + if (unique && that.stages.hasOwnProperty(stage.id)) { return 'stage id already existing: ' + stage.id + '. Use extendStage to modify it.'; } // Check whether the all referenced steps exist. - i = -1, len = stage.steps.length; - for ( ; ++i < len ; ) { - if (!this.steps[stage.steps[i]]) { + for (var i = 0; i < stage.steps.length; ++i) { + if (!that.steps[stage.steps[i]]) { return 'unknown step "' + stage.steps[i] + '"'; } } @@ -1204,7 +1206,7 @@ }; /** - * ### Stager.handleAlias + * handleAlias * * Handles stage id and alias strings * @@ -1264,7 +1266,7 @@ } return id; - }; + } // ## Closure })(
Removed Stager prvate methods from prototype
nodeGame_nodegame-client
train
bf88405121f91b48b18ca46bc0101a026bbbb140
diff --git a/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/webservices/WebServicesAutoConfiguration.java b/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/webservices/WebServicesAutoConfiguration.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/webservices/WebServicesAutoConfiguration.java +++ b/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/webservices/WebServicesAutoConfiguration.java @@ -43,6 +43,7 @@ import org.springframework.context.ApplicationContextAware; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Role; import org.springframework.core.io.Resource; import org.springframework.util.StringUtils; import org.springframework.ws.config.annotation.EnableWs; @@ -82,6 +83,7 @@ public class WebServicesAutoConfiguration { } @Bean + @Role(BeanDefinition.ROLE_INFRASTRUCTURE) @Conditional(OnWsdlLocationsCondition.class) public static WsdlDefinitionBeanFactoryPostProcessor wsdlDefinitionBeanFactoryPostProcessor() { return new WsdlDefinitionBeanFactoryPostProcessor(); diff --git a/spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/jdbc/TestDatabaseAutoConfiguration.java b/spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/jdbc/TestDatabaseAutoConfiguration.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/jdbc/TestDatabaseAutoConfiguration.java +++ b/spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/jdbc/TestDatabaseAutoConfiguration.java @@ -41,6 +41,7 @@ import org.springframework.boot.jdbc.EmbeddedDatabaseConnection; import org.springframework.context.EnvironmentAware; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Role; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.core.env.ConfigurableEnvironment; @@ -70,6 +71,7 @@ public class TestDatabaseAutoConfiguration { } @Bean + @Role(BeanDefinition.ROLE_INFRASTRUCTURE) @ConditionalOnProperty(prefix = "spring.test.database", name = "replace", havingValue = "ANY", matchIfMissing = true) static EmbeddedDataSourceBeanFactoryPostProcessor embeddedDataSourceBeanFactoryPostProcessor() { diff --git a/spring-boot-project/spring-boot-test/src/main/java/org/springframework/boot/test/context/ImportsContextCustomizer.java b/spring-boot-project/spring-boot-test/src/main/java/org/springframework/boot/test/context/ImportsContextCustomizer.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-test/src/main/java/org/springframework/boot/test/context/ImportsContextCustomizer.java +++ b/spring-boot-project/spring-boot-test/src/main/java/org/springframework/boot/test/context/ImportsContextCustomizer.java @@ -1,5 +1,5 @@ /* - * Copyright 2012-2019 the original author or authors. + * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -83,6 +83,7 @@ class ImportsContextCustomizer implements ContextCustomizer { private void registerCleanupPostProcessor(BeanDefinitionRegistry registry, AnnotatedBeanDefinitionReader reader) { BeanDefinition definition = registerBean(registry, reader, ImportsCleanupPostProcessor.BEAN_NAME, ImportsCleanupPostProcessor.class); + definition.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); definition.getConstructorArgumentValues().addIndexedArgumentValue(0, this.testClass); }
Flag BeanDefinitionRegistryPostProcessor beans with role infrastructure Closes gh-<I>
spring-projects_spring-boot
train
8bc4accd6c35392d84a4e116b3b996321292ffdf
diff --git a/common-core-open/src/main/java/com/bbn/bue/common/MakeCrossValidationBatches.java b/common-core-open/src/main/java/com/bbn/bue/common/MakeCrossValidationBatches.java index <HASH>..<HASH> 100644 --- a/common-core-open/src/main/java/com/bbn/bue/common/MakeCrossValidationBatches.java +++ b/common-core-open/src/main/java/com/bbn/bue/common/MakeCrossValidationBatches.java @@ -66,8 +66,8 @@ public final class MakeCrossValidationBatches { final int randomSeed = parameters.getInteger("com.bbn.bue.common.crossValidation.randomSeed"); final boolean useDocIdMap = parameters.getBoolean("com.bbn.bue.common.crossValidation.useDocIdMap"); - if (numBatches < 2) { - errorExit("Bad numBatches value: Need two or more batches to divide data into"); + if (numBatches < 1) { + errorExit("Bad numBatches value: Need one or more batches to divide data into"); } if (useDocIdMap) {
Change numbatches constraint to < 1.
BBN-E_bue-common-open
train
49732d24fbd2f10129cbef747a85a7bc09fd7ec7
diff --git a/repo/repo.go b/repo/repo.go index <HASH>..<HASH> 100644 --- a/repo/repo.go +++ b/repo/repo.go @@ -5,7 +5,6 @@ import ( datastore "github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/go-datastore" config "github.com/jbenet/go-ipfs/repo/config" - util "github.com/jbenet/go-ipfs/util" ) type Repo interface { @@ -19,15 +18,3 @@ type Repo interface { io.Closer } - -// IsInitialized returns true if the path is home to an initialized IPFS -// repository. -func IsInitialized(path string) bool { - if !util.FileExists(path) { - return false - } - // TODO add logging check - // TODO add datastore check - // TODO add config file check - return true -}
Dead code cleanup: repo.IsInitialized The version actually used is in fsrepo, and belongs there as it knows about fsrepo file layout.
ipfs_go-ipfs
train
ebfa742eb9627ab10b3ad72d323857256b8719d1
diff --git a/History.md b/History.md index <HASH>..<HASH> 100644 --- a/History.md +++ b/History.md @@ -12,6 +12,7 @@ Release date: unreleased * Selenium driver with Chrome should support multiple file upload [Thomas Walpole] * Fix visible: :hidden with :text option behavior [Thomas Walpole] * App exceptions with multiple parameter initializers now re-raised correctly - Issue #1785 [Michael Lutsiuk] +* Warn on selenium unknown errors rather than raising when quitting driver [Adam Pohorecki, Thomas Walpole] #2.10.1 Release date: 2016-10-08 diff --git a/lib/capybara/selenium/driver.rb b/lib/capybara/selenium/driver.rb index <HASH>..<HASH> 100644 --- a/lib/capybara/selenium/driver.rb +++ b/lib/capybara/selenium/driver.rb @@ -258,8 +258,10 @@ class Capybara::Selenium::Driver < Capybara::Driver::Base rescue Errno::ECONNREFUSED # Browser must have already gone rescue Selenium::WebDriver::Error::UnknownError => e - raise unless e.message =~ /Error communicating with the remote browser/ - # probably already gone + unless silenced_unknown_error_message?(e.message) # Most likely already gone + # probably already gone but not sure - so warn + warn "Ignoring Selenium UnknownError during driver quit: #{e.message}" + end ensure @browser = nil end @@ -311,4 +313,11 @@ class Capybara::Selenium::Driver < Capybara::Driver::Base end end + def silenced_unknown_error_message?(msg) + silenced_unknown_error_messages.any? { |r| msg =~ r } + end + + def silenced_unknown_error_messages + [ /Error communicating with the remote browser/ ] + end end diff --git a/spec/selenium_spec_marionette.rb b/spec/selenium_spec_marionette.rb index <HASH>..<HASH> 100644 --- a/spec/selenium_spec_marionette.rb +++ b/spec/selenium_spec_marionette.rb @@ -52,6 +52,39 @@ RSpec.describe Capybara::Selenium::Driver do #access instance variable directly so we don't create a new browser instance expect(@driver.instance_variable_get(:@browser)).to be_nil end + + context "with errors" do + before do + @original_browser = @driver.browser + end + after do + # Ensure browser is actually quit so we don't leave hanging processe + RSpec::Mocks.space.proxy_for(@original_browser).reset + @original_browser.quit + end + + it "warns UnknownError returned during quit because the browser is probably already gone" do + expect_any_instance_of(Capybara::Selenium::Driver).to receive(:warn).with(/random message/) + allow(@driver.browser).to( + receive(:quit) + .and_raise(Selenium::WebDriver::Error::UnknownError, "random message") + ) + + expect { @driver.quit }.not_to raise_error + expect(@driver.instance_variable_get(:@browser)).to be_nil + end + + it "ignores silenced UnknownError returned during quit because the browser is almost definitely already gone" do + expect_any_instance_of(Capybara::Selenium::Driver).not_to receive(:warn) + allow(@driver.browser).to( + receive(:quit) + .and_raise(Selenium::WebDriver::Error::UnknownError, "Error communicating with the remote browser") + ) + + expect { @driver.quit }.not_to raise_error + expect(@driver.instance_variable_get(:@browser)).to be_nil + end + end end context "storage" do
ignore Selenium UnknownError during quit
teamcapybara_capybara
train
a978975eea56bad12bbcea1fecff665f038b0b83
diff --git a/xl-erasure-v1-metadata.go b/xl-erasure-v1-metadata.go index <HASH>..<HASH> 100644 --- a/xl-erasure-v1-metadata.go +++ b/xl-erasure-v1-metadata.go @@ -29,6 +29,7 @@ type xlMetaV1 struct { Size int64 `json:"size"` ModTime time.Time `json:"modTime"` Version int64 `json:"version"` + Deleted bool `json:"deleted"` } `json:"stat"` Erasure struct { DataBlocks int `json:"data"` diff --git a/xl-erasure-v1.go b/xl-erasure-v1.go index <HASH>..<HASH> 100644 --- a/xl-erasure-v1.go +++ b/xl-erasure-v1.go @@ -18,6 +18,7 @@ package main import ( "fmt" + "io" "os" slashpath "path" "sort" @@ -547,19 +548,85 @@ func (xl XL) DeleteFile(volume, path string) error { if !isValidPath(path) { return errInvalidArgument } + + // Lock right before reading from disk. + nsMutex.RLock(volume, path) + partsMetadata, errs := xl.getPartsMetadata(volume, path) + nsMutex.RUnlock(volume, path) + + // List all the file versions on existing files. + versions, err := listFileVersions(partsMetadata, errs) + // Get highest file version. + higherVersion := highestInt(versions) + // Increment to have next higher version. + higherVersion++ + + // Take last meta data to use later + var mdata xlMetaV1 + onlineDisksCount := 0 + for index, metadata := range partsMetadata { + if errs[index] == nil { + mdata = metadata + onlineDisksCount++ + } + } + + if onlineDisksCount < xl.writeQuorum { + return errWriteQuorum + } + + xlMetaV1FilePath := slashpath.Join(path, xlMetaV1File) + deleteMetaData := (onlineDisksCount == len(xl.storageDisks)) + + // Set higher version to indicate file operation + mdata.Stat.Version = higherVersion + mdata.Stat.Deleted = true + + nsMutex.Lock(volume, path) + defer nsMutex.Unlock(volume, path) + // Loop through and delete each chunks. for index, disk := range xl.storageDisks { - erasureFilePart := slashpath.Join(path, fmt.Sprintf("file.%d", index)) - err := disk.DeleteFile(volume, erasureFilePart) - if err != nil { - log.WithFields(logrus.Fields{ - "volume": volume, - "path": path, - }).Errorf("DeleteFile failed with %s", err) - return err + if errs[index] != nil { + continue } - xlMetaV1FilePath := slashpath.Join(path, xlMetaV1File) - err = disk.DeleteFile(volume, xlMetaV1FilePath) + + // TODO: errors can be allowed up to len(xl.storageDisks) - xl.writeQuorum + // TODO: Fix: meta data will be lost if deleteMetaData is true and anyone DeleteFile failure. + + if deleteMetaData { + err = disk.DeleteFile(volume, xlMetaV1FilePath) + if err != nil { + log.WithFields(logrus.Fields{ + "volume": volume, + "path": path, + }).Errorf("DeleteFile failed with %s", err) + return err + } + } else { + var metadataWriter io.WriteCloser + metadataWriter, err = disk.CreateFile(volume, xlMetaV1FilePath) + if err != nil { + log.WithFields(logrus.Fields{ + "volume": volume, + "path": path, + }).Errorf("CreateFile failed with %s", err) + return err + } + + err = mdata.Write(metadataWriter) + if err != nil { + log.WithFields(logrus.Fields{ + "volume": volume, + "path": path, + "diskIndex": index, + }).Errorf("Writing metadata failed with %s", err) + return err + } + } + + erasureFilePart := slashpath.Join(path, fmt.Sprintf("file.%d", index)) + err = disk.DeleteFile(volume, erasureFilePart) if err != nil { log.WithFields(logrus.Fields{ "volume": volume,
xl: add quorum support for DeleteFile() (#<I>) Fixes #<I>
minio_minio
train
35d3ae3298dc77664c0b2ef8fc636821dafbec28
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb +++ b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb @@ -264,7 +264,7 @@ module ActiveRecord class StatementPool < ConnectionAdapters::StatementPool # :nodoc: def initialize(connection, max) super(max) - @raw_connection = connection + @connection = connection @counter = 0 end @@ -274,14 +274,14 @@ module ActiveRecord private def dealloc(key) - @raw_connection.query "DEALLOCATE #{key}" if connection_active? - rescue PG::Error - end - - def connection_active? - @raw_connection.status == PG::CONNECTION_OK + # This is ugly, but safe: the statement pool is only + # accessed while holding the connection's lock. (And we + # don't need the complication of with_raw_connection because + # a reconnect would invalidate the entire statement pool.) + if conn = @connection.instance_variable_get(:@raw_connection) + conn.query "DEALLOCATE #{key}" if conn.status == PG::CONNECTION_OK + end rescue PG::Error - false end end @@ -976,7 +976,7 @@ module ActiveRecord end def build_statement_pool - StatementPool.new(@raw_connection, self.class.type_cast_config_to_integer(@config[:statement_limit])) + StatementPool.new(self, self.class.type_cast_config_to_integer(@config[:statement_limit])) end def can_perform_case_insensitive_comparison_for?(column)
Switch Postgres statement pool to use the AR connection We're still ultimately accessing the raw connection directly, but this is necessary to keep the statement pool working across full reconnects.
rails_rails
train
558f82bf2bd48a0b522e0937f23b65b4ab7dcdd8
diff --git a/tests/test_io.py b/tests/test_io.py index <HASH>..<HASH> 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -34,11 +34,7 @@ class MonkeyPatchingTestCase(StateClearingTestCase): assert open is _open assert file is _file -class EpollSocketTestCase(StateClearingTestCase): - def setUp(self): - StateClearingTestCase.setUp(self) - greenhouse.poller.set(greenhouse.poller.Epoll()) - +class PollerMixin(object): def test_sockets_basic(self): with self.socketpair() as (client, handler): client.send("howdy") @@ -245,12 +241,19 @@ class EpollSocketTestCase(StateClearingTestCase): assert client.getsockname() == handler.getpeername() assert client.getpeername() == handler.getsockname() -class PollSocketTestCase(EpollSocketTestCase): - def setUp(self): - StateClearingTestCase.setUp(self) - greenhouse.poller.set(greenhouse.poller.Poll()) +if greenhouse.poller.Epoll._POLLER: + class EpollSocketTestCase(PollerMixin, StateClearingTestCase): + def setUp(self): + StateClearingTestCase.setUp(self) + greenhouse.poller.set(greenhouse.poller.Epoll()) + +if greenhouse.poller.Poll._POLLER: + class PollSocketTestCase(PollerMixin, StateClearingTestCase): + def setUp(self): + StateClearingTestCase.setUp(self) + greenhouse.poller.set(greenhouse.poller.Poll()) -class SelectSocketTestCase(EpollSocketTestCase): +class SelectSocketTestCase(PollerMixin, StateClearingTestCase): def setUp(self): StateClearingTestCase.setUp(self) greenhouse.poller.set(greenhouse.poller.Select())
some reorg in the IO tests to make omit poller type tests on platforms that don't have them
teepark_greenhouse
train
ced20411502623be42c6f28cac7a8c43dd4e8562
diff --git a/lightfm/evaluation.py b/lightfm/evaluation.py index <HASH>..<HASH> 100644 --- a/lightfm/evaluation.py +++ b/lightfm/evaluation.py @@ -62,8 +62,11 @@ def precision_at_k(model, test_interactions, train_interactions=None, item_features=item_features, num_threads=num_threads) - ranks.data[ranks.data < k] = 1.0 - ranks.data[ranks.data >= k] = 0.0 + k_inds = ranks.data < k + other_inds = ranks.data >= k + + ranks.data[k_inds] = 1.0 + ranks.data[other_inds] = 0.0 precision = np.squeeze(np.array(ranks.sum(axis=1))) / k
Update evaluation.py - include case where k = 1 For the case where k = 1, setting all the rank=0 values to '1' is immediately overwritten. By saving-off the indices first, the ranks data is updated without this issue.
lyst_lightfm
train
b08b1d5e18de8df48072c4fc04c9c714bb6fbc81
diff --git a/client/state/test/initial-state.js b/client/state/test/initial-state.js index <HASH>..<HASH> 100644 --- a/client/state/test/initial-state.js +++ b/client/state/test/initial-state.js @@ -1,17 +1,31 @@ -require( 'lib/react-test-env-setup' )(); /** * External dependencies */ import { expect } from 'chai'; import sinon from 'sinon'; -import localforage from 'localforage'; + /** * Internal dependencies */ import config from 'config'; -import createReduxStoreFromPersistedInitialState, { MAX_AGE } from 'state/initial-state'; +import domEnvSetup from 'lib/react-test-env-setup'; describe( 'initial-state', () => { + let localforage, createReduxStoreFromPersistedInitialState, MAX_AGE; + + before( () => { + domEnvSetup(); + + localforage = require( 'localforage' ); + const initialState = require( '../initial-state' ); + createReduxStoreFromPersistedInitialState = initialState.default; + MAX_AGE = initialState.MAX_AGE; + } ); + + after( () => { + domEnvSetup.cleanup(); + } ); + describe( 'createReduxStoreFromPersistedInitialState', () => { describe( 'persist-redux disabled', () => { describe( 'with recently persisted data and initial server data', () => { @@ -114,18 +128,6 @@ describe( 'initial-state', () => { consoleSpy, localforageGetItemStub, state, - savedState = { - currentUser: { id: 123456789 }, - postTypes: { - items: { - 2916284: { - post: { name: 'post', label: 'Posts' }, - page: { name: 'page', label: 'Pages' } - } - } - }, - _timestamp: Date.now() - MAX_AGE - }, serverState = { postTypes: { items: { @@ -143,7 +145,18 @@ describe( 'initial-state', () => { localforageGetItemStub = sinon.stub( localforage, 'getItem' ) .returns( new Promise( function( resolve ) { - resolve( savedState ); + resolve( { + currentUser: { id: 123456789 }, + postTypes: { + items: { + 2916284: { + post: { name: 'post', label: 'Posts' }, + page: { name: 'page', label: 'Pages' } + } + } + }, + _timestamp: Date.now() - MAX_AGE - 1 + } ); } ) ); const reduxReady = function( reduxStore ) {
State: Improve tear-down of initial state testing
Automattic_wp-calypso
train
449a9725fa8ba7078c36d7439444f56a352903c2
diff --git a/atomic_reactor/plugins/exit_koji_import.py b/atomic_reactor/plugins/exit_koji_import.py index <HASH>..<HASH> 100644 --- a/atomic_reactor/plugins/exit_koji_import.py +++ b/atomic_reactor/plugins/exit_koji_import.py @@ -117,7 +117,8 @@ class KojiImportPlugin(ExitPlugin): if platform == "x86_64" and has_pulp_pull: exit_results = self.workflow.exit_results image_id, _ = exit_results[PLUGIN_PULP_PULL_KEY] - instance['extra']['docker']['id'] = image_id + if image_id is not None: + instance['extra']['docker']['id'] = image_id # update repositories to point to Crane if crane_registry: diff --git a/tests/plugins/test_koji_import.py b/tests/plugins/test_koji_import.py index <HASH>..<HASH> 100644 --- a/tests/plugins/test_koji_import.py +++ b/tests/plugins/test_koji_import.py @@ -1045,11 +1045,15 @@ class TestKojiImport(object): False), ]) @pytest.mark.parametrize('tag_later', (True, False)) - @pytest.mark.parametrize('pulp_pull', (("abcdef01234567", 'v1'), False)) + @pytest.mark.parametrize('pulp_pull,expect_id', ( + (("abcdef01234567", ['v1']), 'abcdef01234567'), + ((None, ['v1', 'v2']), '123456'), + (False, '123456'), + )) def test_koji_import_success(self, tmpdir, apis, docker_registry, pulp_registries, target, os_env, has_config, is_autorebuild, - tag_later, pulp_pull): + tag_later, pulp_pull, expect_id): session = MockedClientSession('') # When target is provided koji build will always be tagged, # either by koji_import or koji_tag_build. @@ -1105,7 +1109,7 @@ class TestKojiImport(object): if pulp_pull: for output in output_files: if 'extra' in output: - assert output['extra']['docker']['id'] == pulp_pull[0] + assert output['extra']['docker']['id'] == expect_id assert set(build.keys()) == set([ 'name',
koji_import: don't overwrite 'id' with None When pulp_pull sees a v2 digest from Crane it returns None. In this case, don't overwrite the reported image ID.
projectatomic_atomic-reactor
train
ab9dc877054a5822e168d0e772b2a82b90cd4908
diff --git a/frontend/dockerfile/builder/build.go b/frontend/dockerfile/builder/build.go index <HASH>..<HASH> 100644 --- a/frontend/dockerfile/builder/build.go +++ b/frontend/dockerfile/builder/build.go @@ -130,8 +130,15 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { name := "load build definition from " + filename + filenames := []string{filename, filename + ".dockerignore"} + + // dockerfile is also supported casing moby/moby#10858 + if path.Base(filename) == defaultDockerfileName { + filenames = append(filenames, path.Join(path.Dir(filename), strings.ToLower(defaultDockerfileName))) + } + src := llb.Local(localNameDockerfile, - llb.FollowPaths([]string{filename, filename + ".dockerignore"}), + llb.FollowPaths(filenames), llb.SessionID(c.BuildOpts().SessionID), llb.SharedKeyHint(localNameDockerfile), dockerfile2llb.WithInternalName(name), @@ -258,7 +265,19 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { Filename: filename, }) if err != nil { - return errors.Wrapf(err, "failed to read dockerfile") + fallback := false + if path.Base(filename) == defaultDockerfileName { + var err1 error + dtDockerfile, err1 = ref.ReadFile(ctx2, client.ReadRequest{ + Filename: path.Join(path.Dir(filename), strings.ToLower(defaultDockerfileName)), + }) + if err1 == nil { + fallback = true + } + } + if !fallback { + return errors.Wrapf(err, "failed to read dockerfile") + } } sourceMap = llb.NewSourceMap(&src, filename, dtDockerfile) diff --git a/frontend/dockerfile/dockerfile_test.go b/frontend/dockerfile/dockerfile_test.go index <HASH>..<HASH> 100644 --- a/frontend/dockerfile/dockerfile_test.go +++ b/frontend/dockerfile/dockerfile_test.go @@ -107,6 +107,7 @@ var allTests = []integration.Test{ testFrontendSubrequests, testDockefileCheckHostname, testDefaultShellAndPath, + testDockerfileLowercase, } var fileOpTests = []integration.Test{ @@ -2732,6 +2733,34 @@ COPY . . } } +// moby/moby#10858 +func testDockerfileLowercase(t *testing.T, sb integration.Sandbox) { + f := getFrontend(t, sb) + + dockerfile := []byte(`FROM scratch +`) + + dir, err := tmpdir( + fstest.CreateFile("dockerfile", dockerfile, 0600), + ) + require.NoError(t, err) + defer os.RemoveAll(dir) + + ctx := context.TODO() + + c, err := client.New(ctx, sb.Address()) + require.NoError(t, err) + defer c.Close() + + _, err = f.Solve(ctx, c, client.SolveOpt{ + LocalDirs: map[string]string{ + builder.DefaultLocalNameDockerfile: dir, + builder.DefaultLocalNameContext: dir, + }, + }, nil) + require.NoError(t, err) +} + func testExportedHistory(t *testing.T, sb integration.Sandbox) { skipDockerd(t, sb) f := getFrontend(t, sb)
dockerfile: allow lowercase dockerfile name This was supported by the legacy builder moby/moby#<I>
moby_buildkit
train
531eeae29b94ce1cbdaec8c6ce7bbafba770aae9
diff --git a/pyzcasp/potassco/__init__.py b/pyzcasp/potassco/__init__.py index <HASH>..<HASH> 100644 --- a/pyzcasp/potassco/__init__.py +++ b/pyzcasp/potassco/__init__.py @@ -16,7 +16,19 @@ # along with caspo. If not, see <http://www.gnu.org/licenses/>.import random # -*- coding: utf-8 -*- +from adapters import * from interfaces import * from utilities import * -from pyzcasp import asp \ No newline at end of file +from pyzcasp import asp +from zope import component + +gsm = component.getGlobalSiteManager() + +gsm.registerUtility(asp.EncodingRegistry(), asp.IEncodingRegistry, 'potassco') + +root = __file__.rsplit('/', 1)[0] +reg = component.getUtility(asp.IEncodingRegistry, 'potassco') +reg.register_encoding('meta', root + '/encodings/meta.lp') +reg.register_encoding('metaD', root + '/encodings/metaD.lp') +reg.register_encoding('metaO', root + '/encodings/metaO.lp') diff --git a/pyzcasp/potassco/interfaces.py b/pyzcasp/potassco/interfaces.py index <HASH>..<HASH> 100644 --- a/pyzcasp/potassco/interfaces.py +++ b/pyzcasp/potassco/interfaces.py @@ -27,3 +27,18 @@ class IClaspSolver(asp.ISolver): """ clasp solver """ + +class IClaspHSolver(IClaspSolver): + """ + hclasp solver + """ + +class IClaspDSolver(IClaspSolver): + """ + claspD solver + """ + +class IClaspSubsetMinimalSolver(asp.ISubsetMinimalSolver): + """ + Marker interface for clasp subset minimal solver + """ diff --git a/pyzcasp/potassco/utilities.py b/pyzcasp/potassco/utilities.py index <HASH>..<HASH> 100644 --- a/pyzcasp/potassco/utilities.py +++ b/pyzcasp/potassco/utilities.py @@ -73,11 +73,13 @@ class ClaspSolver(asp.Process): return atoms class ClaspHSolver(ClaspSolver): + interface.implements(IClaspHSolver, IClaspSubsetMinimalSolver) def __filteratoms__(self, atoms): filter(lambda atom: not atom.startswith('_'), atoms) class ClaspDSolver(ClaspSolver): + interface.implements(IClaspDSolver, IClaspSubsetMinimalSolver) def __getstats__(self): return self.json['Models']
- Register metasp encodings - Add marker interfaces to solvers
svidela_pyzcasp
train
68810a5559349b0d2dbc25896bc8e8db02433459
diff --git a/spyderlib/plugins/inspector.py b/spyderlib/plugins/inspector.py index <HASH>..<HASH> 100644 --- a/spyderlib/plugins/inspector.py +++ b/spyderlib/plugins/inspector.py @@ -457,14 +457,24 @@ class ObjectInspector(SpyderPluginWidget): if self._starting_up: self._starting_up = False intro_message = _("Here you can get help of any object by pressing " - "<b>Ctrl+I</b> in front of it, either on the " - "Editor or the Console.<br><br>" + "%s in front of it, either on the Editor or the " + "Console.%s" "Help is also shown automatically after writing " "an opening brace next to an object. If you " "don't like this behavior, you can deactivate " - "it in <i>Preferences</i>.") - self.set_rich_text_html(usage(intro_message), - QUrl.fromLocalFile(CSS_PATH)) + "it in %s.") + prefs = _("Preferences") + if self.is_rich_text_mode(): + intro_message = intro_message % ("<b>Ctrl+I</b>", "<br><br>", + "<i>"+prefs+"</i>") + self.set_rich_text_html(usage(intro_message), + QUrl.fromLocalFile(CSS_PATH)) + else: + install_sphinx = _("\n\nPlease consider installing Sphinx to " + "get documentation rendered in rich text.") + intro_message = intro_message % ("Ctrl+I", "\n\n", prefs) + intro_message += install_sphinx + self.set_plain_text(intro_message, is_code=False) def apply_plugin_settings(self, options): """Apply configuration file's plugin settings"""
Object Inspector: Fix showing intro message when rich text mode is not available
spyder-ide_spyder
train
81870261ee860b9d19b8a512d21302a19c11d1d0
diff --git a/src/FormFactory.php b/src/FormFactory.php index <HASH>..<HASH> 100644 --- a/src/FormFactory.php +++ b/src/FormFactory.php @@ -60,7 +60,7 @@ class FormFactory // Create an id if needed, this speeds up finding the element again if (!$id) { - $id = spl_object_hash($element); + $id = $name; $element->setAttribute('id', $id); } @@ -70,22 +70,6 @@ class FormFactory $type = 'textarea'; } - // Get element value - $value = (isset($data[$name])) ? $data[$name] : $element->getAttribute('value'); - - // Set input value - $reuseSubmittedValue = filter_var( - $element->getAttribute('data-reuse-submitted-value'), - FILTER_VALIDATE_BOOLEAN - ); - - if ($reuseSubmittedValue) { - // Use for textarea - $element->nodeValue = $value; - // For other elements - $element->setAttribute('value', $value); - } - // Add validation if (isset($this->formElements[$type])) { $validator = new $this->formElements[$type]; @@ -103,11 +87,29 @@ class FormFactory } } - /* TODO: Set validated values - foreach ($inputFilter->getValues() as $id => $value) { - $element = $this->document->getElementById($id); + $validatedValues = $inputFilter->getValues(); + + // Set validated values + foreach ($elements as $element) { + $reuseSubmittedValue = filter_var( + $element->getAttribute('data-reuse-submitted-value'), + FILTER_VALIDATE_BOOLEAN + ); + + if (! $reuseSubmittedValue) { + continue; + } + + $id = $element->getAttribute('id'); + + // Get element value + $value = (isset($validatedValues[$id])) ? $validatedValues[$id] : $element->getAttribute('value'); + + // Use for textarea + $element->nodeValue = $value; + // For other elements $element->setAttribute('value', $value); - }*/ + } // Return validation result return new ValidationResult($inputFilter->getRawValues(), $inputFilter->getValues(), $validationErrors);
Reuse filtered value if asked for
xtreamwayz_html-form-validator
train
b7703e5d3568a60301a220369f7e229f16e671cb
diff --git a/lib/amq/protocol/frame.rb b/lib/amq/protocol/frame.rb index <HASH>..<HASH> 100644 --- a/lib/amq/protocol/frame.rb +++ b/lib/amq/protocol/frame.rb @@ -14,7 +14,7 @@ module AMQ # The channel number is 0 for all frames which are global to the connection and 1-65535 for frames that refer to specific channels. def self.encode(type, payload, channel) - raise FrameTypeError.new(TYPES_OPTIONS) unless TYPES_OPTIONS.include?(type) or (type = TYPES[type]) + raise FrameTypeError.new(TYPES_OPTIONS) if type == nil || !(TYPES_OPTIONS.include?(type) || type == TYPES[type]) raise RuntimeError.new("Channel has to be 0 or an integer in range 1..65535 but was #{channel.inspect}") unless CHANNEL_RANGE.include?(channel) raise RuntimeError.new("Payload can't be nil") if payload.nil? [TYPES[type], channel, payload.bytesize].pack(PACK_CACHE[:cnN]) + payload + FINAL_OCTET
Fixed an instance of accidental assignment Had to add an extra check for nil.
ruby-amqp_amq-protocol
train
8e656d798a6898ea2e90932c1e6ea6eb716fc42b
diff --git a/src/Service/QueryModifier/UseQuery/UseQueryFromDotNotation.php b/src/Service/QueryModifier/UseQuery/UseQueryFromDotNotation.php index <HASH>..<HASH> 100644 --- a/src/Service/QueryModifier/UseQuery/UseQueryFromDotNotation.php +++ b/src/Service/QueryModifier/UseQuery/UseQueryFromDotNotation.php @@ -135,7 +135,7 @@ class UseQueryFromDotNotation throw new RelationNotFoundException("Relation \"$relation\" Not Found in \"$path\""); } $alias = $alias ?? 'alias_' . self::$aliasesCnt++; - $this->query = call_user_func([$this->query, $method], "`" . $alias . "_" . $relation . "`", $joinType); + $this->query = call_user_func([$this->query, $method], $alias . "_" . $relation, $joinType); } } $this->inUse = true;
fix(QueryModifier): useless backtick removed
wollanup_php-api-rest
train
21f7c2d92a8a1d1ca15692d22a1ed38ed85bcc9a
diff --git a/cmd/swagger/commands/generate/model.go b/cmd/swagger/commands/generate/model.go index <HASH>..<HASH> 100644 --- a/cmd/swagger/commands/generate/model.go +++ b/cmd/swagger/commands/generate/model.go @@ -25,6 +25,7 @@ type Model struct { Name []string `long:"name" short:"n" description:"the model to generate"` NoStruct bool `long:"skip-struct" description:"when present will not generate the model struct"` DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files"` + SkipFlattening bool `long:"skip-flatten" description:"skips flattening of spec prior to generation"` SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation"` } @@ -47,6 +48,7 @@ func (m *Model) Execute(args []string) error { SkipSupport: true, SkipOperations: true, SkipModels: m.NoStruct, + SkipFlattening: m.SkipFlattening, SkipValidation: m.SkipValidation, } return s.Execute(args)
support --skip-flatten for model generation
go-swagger_go-swagger
train
ed413ceda7faac862823e8f859e24d41e3e4042d
diff --git a/lib/how_is.rb b/lib/how_is.rb index <HASH>..<HASH> 100644 --- a/lib/how_is.rb +++ b/lib/how_is.rb @@ -58,8 +58,7 @@ module HowIs # @return [Boolean] +true+ if HowIs can export to the file, +false+ # if it can't. def self.can_export_to?(file) - # TODO: Check if the file is writable? - supported_formats.include?(file.split(".").last) + supported_formats.include?(File.extname(file)[1..-1]) end def self.silence_warnings(&block) diff --git a/lib/how_is/report.rb b/lib/how_is/report.rb index <HASH>..<HASH> 100644 --- a/lib/how_is/report.rb +++ b/lib/how_is/report.rb @@ -104,7 +104,7 @@ module HowIs end def to_format_for(filename) - format = filename.split(".").last + format = File.extname(filename)[1..-1] send("to_#{format}") end private :to_format_for
Use File.extname() instead of doing it manually.
duckinator_inq
train
81ad53f2d8226021f4402ecb020a4b78b0f1ae15
diff --git a/apiserver/admin.go b/apiserver/admin.go index <HASH>..<HASH> 100644 --- a/apiserver/admin.go +++ b/apiserver/admin.go @@ -227,6 +227,11 @@ func (a *admin) authenticate(req params.LoginRequest) (*authResult, error) { if entity != nil { if machine, ok := entity.(*state.Machine); ok && machine.IsManager() { result.controllerMachineLogin = true + if machine.Tag() != a.srv.tag { + // We don't want to run pingers for other + // controller machines; they run their own. + startPinger = false + } } a.root.entity = entity a.apiObserver.Login(entity.Tag(), a.root.model.ModelTag(), result.controllerMachineLogin, req.UserData)
apiserver: don't run pingers for other controllers
juju_juju
train
d76a9be27b33ad9fca50bc7dcd5bd583659741ce
diff --git a/lib/infoblox/resource/network.rb b/lib/infoblox/resource/network.rb index <HASH>..<HASH> 100644 --- a/lib/infoblox/resource/network.rb +++ b/lib/infoblox/resource/network.rb @@ -7,8 +7,18 @@ module Infoblox wapi_object "network" - def next_available_ip(num=1) - JSON.parse(connection.post(resource_uri + "?_function=next_available_ip&num=#{num}", {}).body)["ips"] + ## + # Invoke the same-named function on the network resource in WAPI, + # returning an array of available IP addresses. + # You may optionally specify how many IPs you want (num) and which ones to + # exclude from consideration (array of IPv4 addrdess strings). + # + def next_available_ip(num=1, exclude=[]) + post_body = { + num: num.to_i, + exclude: exclude + } + JSON.parse(connection.post(resource_uri + "?_function=next_available_ip", post_body).body)["ips"] end end -end \ No newline at end of file +end diff --git a/lib/infoblox/version.rb b/lib/infoblox/version.rb index <HASH>..<HASH> 100644 --- a/lib/infoblox/version.rb +++ b/lib/infoblox/version.rb @@ -1,3 +1,3 @@ module Infoblox - VERSION = "0.2.0" + VERSION = "0.2.1" end
Fully-supported 'next_available_ip' invocation on Network resource
govdelivery_infoblox
train
964fea099d3cf3e2c3f50f000a1759dc6d1bfb29
diff --git a/lib/rest-ftp-daemon/notification.rb b/lib/rest-ftp-daemon/notification.rb index <HASH>..<HASH> 100644 --- a/lib/rest-ftp-daemon/notification.rb +++ b/lib/rest-ftp-daemon/notification.rb @@ -9,11 +9,11 @@ module RestFtpDaemon attr_accessor :status attr_accessor :url attr_accessor :job - attr_accessor :key + #attr_accessor :key def initialize url, params # Generate a random key - key = Helpers.identifier(IDENT_NOTIF_LEN) + @id = Helpers.identifier(IDENT_NOTIF_LEN) # Logger # @logger = RestFtpDaemon::Logger.new(:workers, "NOTIF #{key}")
notification identifier is now called @id instead of @key, removing its accessor
bmedici_rest-ftp-daemon
train
13c931baec1eb8b6b23ed3d7ff707b9d0bd2ea38
diff --git a/tasks/lib/po.js b/tasks/lib/po.js index <HASH>..<HASH> 100644 --- a/tasks/lib/po.js +++ b/tasks/lib/po.js @@ -81,7 +81,7 @@ PO.parse = function (contents) { } var po = new PO, - messages = contents.replace(/(?:\r\n)/g, '\n').split('\n\n'), + messages = contents.replace(/\r\n/g, '\n').split('\n\n'), headers = messages.shift(); var header = parseMessage(headers);
Cleaned up a pointless non-capturing group
warmrobot_po2json
train
d1654b9bf4da0ab6158bb0f49479eb7f01bbc7f6
diff --git a/src/Psalm/DocComment.php b/src/Psalm/DocComment.php index <HASH>..<HASH> 100644 --- a/src/Psalm/DocComment.php +++ b/src/Psalm/DocComment.php @@ -49,7 +49,8 @@ class DocComment $last = false; } elseif ($last !== false) { $old_last_line = $lines[$last]; - $lines[$last] = rtrim($old_last_line) . ($preserve_format ? "\n" . $line : ' ' . trim($line)); + $lines[$last] = rtrim($old_last_line) + . ($preserve_format || trim($old_last_line) === '@return' ? "\n" . $line : ' ' . trim($line)); if ($line_number) { $old_line_number = $line_map[$old_last_line]; diff --git a/src/Psalm/Internal/Analyzer/CommentAnalyzer.php b/src/Psalm/Internal/Analyzer/CommentAnalyzer.php index <HASH>..<HASH> 100644 --- a/src/Psalm/Internal/Analyzer/CommentAnalyzer.php +++ b/src/Psalm/Internal/Analyzer/CommentAnalyzer.php @@ -256,36 +256,7 @@ class CommentAnalyzer ? $comments['specials']['psalm-return'] : $comments['specials']['return']; - $return_block = trim((string)reset($return_specials)); - - if (!$return_block) { - throw new DocblockParseException('Missing @return type'); - } - - try { - $line_parts = self::splitDocLine($return_block); - } catch (DocblockParseException $e) { - throw $e; - } - - if (!preg_match('/\[[^\]]+\]/', $line_parts[0]) - && $line_parts[0][0] !== '{' - ) { - if ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) { - throw new IncorrectDocblockException('Misplaced variable'); - } - - $info->return_type = array_shift($line_parts); - $info->return_type_description = $line_parts ? implode(' ', $line_parts) : null; - - $line_number = array_keys($return_specials)[0]; - - if ($line_number) { - $info->return_type_line_number = $line_number; - } - } else { - throw new DocblockParseException('Badly-formatted @return type'); - } + self::extractReturnType((string) reset($return_specials), array_keys($return_specials)[0], $info); } if (isset($comments['specials']['param']) || isset($comments['specials']['psalm-param'])) { @@ -486,6 +457,47 @@ class CommentAnalyzer } /** + * @return void + */ + private static function extractReturnType(string $return_block, int $line_number, FunctionDocblockComment $info) + { + $return_lines = explode("\n", $return_block); + + if (!trim($return_lines[0])) { + return; + } + + $return_block = trim($return_block); + + if (!$return_block) { + return; + } + + try { + $line_parts = self::splitDocLine($return_block); + } catch (DocblockParseException $e) { + throw $e; + } + + if (!preg_match('/\[[^\]]+\]/', $line_parts[0]) + && $line_parts[0][0] !== '{' + ) { + if ($line_parts[0][0] === '$' && !preg_match('/^\$this(\||$)/', $line_parts[0])) { + throw new IncorrectDocblockException('Misplaced variable'); + } + + $info->return_type = array_shift($line_parts); + $info->return_type_description = $line_parts ? implode(' ', $line_parts) : null; + + if ($line_number) { + $info->return_type_line_number = $line_number; + } + } else { + throw new DocblockParseException('Badly-formatted @return type'); + } + } + + /** * @param string $comment * @param int $line_number * diff --git a/tests/AnnotationTest.php b/tests/AnnotationTest.php index <HASH>..<HASH> 100644 --- a/tests/AnnotationTest.php +++ b/tests/AnnotationTest.php @@ -1153,6 +1153,15 @@ class AnnotationTest extends TestCase function foo() : void {}', 'error_message' => 'InvalidDocblock', ], + 'returnTypeNewLineIsIgnored' => [ + '<?php + /** + * @return + * Some text + */ + function foo() {}', + 'error_message' => 'MissingReturnType', + ], ]; } }
Ignore @return with a newline after @return
vimeo_psalm
train
356a0689dcf419baba0aff6df3569cd389140585
diff --git a/src/Palladium/Mapper/NonceIdentity.php b/src/Palladium/Mapper/NonceIdentity.php index <HASH>..<HASH> 100644 --- a/src/Palladium/Mapper/NonceIdentity.php +++ b/src/Palladium/Mapper/NonceIdentity.php @@ -31,10 +31,10 @@ class NonceIdentity extends DataMapper $statement = $this->connection->prepare($sql); + $statement->bindValue(':type', $entity->getType()); + $statement->bindValue(':status', Entity\Identity::STATUS_ACTIVE); $statement->bindValue(':fingerprint', $entity->getFingerprint()); $statement->bindValue(':identifier', $entity->getIdentifier()); - $statement->bindValue(':status', Entity\Identity::STATUS_ACTIVE); - $statement->bindValue(':type', $entity->getType()); $statement->execute(); diff --git a/tests/unit/Palladium/Mapper/NonceIdentityTest.php b/tests/unit/Palladium/Mapper/NonceIdentityTest.php index <HASH>..<HASH> 100644 --- a/tests/unit/Palladium/Mapper/NonceIdentityTest.php +++ b/tests/unit/Palladium/Mapper/NonceIdentityTest.php @@ -57,9 +57,9 @@ final class NonceIdentityTest extends TestCase ); $pdo = $this - ->getMockBuilder(PDO::class) - ->disableOriginalConstructor() - ->getMock(); + ->getMockBuilder(PDO::class) + ->disableOriginalConstructor() + ->getMock(); $pdo->expects($this->once())->method('prepare')->will($this->returnValue($statement)); @@ -70,4 +70,80 @@ final class NonceIdentityTest extends TestCase $instance = new NonceIdentity($pdo, 'table'); $instance->store($identity); } + + + /** + * @test + */ + public function retrieve_Nonce_Identity_by_Identifier_with_No_Data() + { + $statement = $this + ->getMockBuilder(PDOStatement::class) + ->disableOriginalConstructor() + ->getMock(); + $statement + ->method('bindValue') + ->withConsecutive( + [$this->equalTo(':type'), $this->equalTo(Entity\Identity::TYPE_NONCE), null], + [$this->equalTo(':status'), $this->equalTo(Entity\Identity::STATUS_ACTIVE), null], + [$this->equalTo(':fingerprint'), $this->equalTo('3c9c30d9f665e74d515c842960d4a451c83a0125fd3de7392d7b37231af10c72ea58aedfcdf89a5765bf902af93ecf06'), null], + [$this->equalTo(':identifier'), $this->equalTo('foobar'), null] + ); + $statement + ->method('fetch') + ->will($this->returnValue(null)); + + $pdo = $this + ->getMockBuilder(PDO::class) + ->disableOriginalConstructor() + ->getMock(); + $pdo->expects($this->once())->method('prepare')->will($this->returnValue($statement)); + + + $identity = new Entity\NonceIdentity; + $identity->setIdentifier('foobar'); + + $instance = new NonceIdentity($pdo, 'table'); + $instance->fetch($identity); + + $this->assertNull($identity->getId()); + } + + + /** + * @test + */ + public function retrieve_Nonce_Identity_by_Identifier() + { + $statement = $this + ->getMockBuilder(PDOStatement::class) + ->disableOriginalConstructor() + ->getMock(); + $statement + ->method('bindValue') + ->withConsecutive( + [$this->equalTo(':type'), $this->equalTo(Entity\Identity::TYPE_NONCE), null], + [$this->equalTo(':status'), $this->equalTo(Entity\Identity::STATUS_ACTIVE), null], + [$this->equalTo(':fingerprint'), $this->equalTo('3c9c30d9f665e74d515c842960d4a451c83a0125fd3de7392d7b37231af10c72ea58aedfcdf89a5765bf902af93ecf06'), null], + [$this->equalTo(':identifier'), $this->equalTo('foobar'), null] + ); + $statement + ->method('fetch') + ->will($this->returnValue(['id' => 4])); + + $pdo = $this + ->getMockBuilder(PDO::class) + ->disableOriginalConstructor() + ->getMock(); + $pdo->expects($this->once())->method('prepare')->will($this->returnValue($statement)); + + + $identity = new Entity\NonceIdentity; + $identity->setIdentifier('foobar'); + + $instance = new NonceIdentity($pdo, 'table'); + $instance->fetch($identity); + + $this->assertSame(4, $identity->getId()); + } }
Covering the mapping for nonce identity
teresko_palladium
train
99f8bc304006b25eb827e7055b56ae8ae62a5594
diff --git a/tests/src/main/java/org/hibernate/beanvalidation/tck/tests/valueextraction/unwrapping/ValueExtractionUnwrappingTest.java b/tests/src/main/java/org/hibernate/beanvalidation/tck/tests/valueextraction/unwrapping/ValueExtractionUnwrappingTest.java index <HASH>..<HASH> 100644 --- a/tests/src/main/java/org/hibernate/beanvalidation/tck/tests/valueextraction/unwrapping/ValueExtractionUnwrappingTest.java +++ b/tests/src/main/java/org/hibernate/beanvalidation/tck/tests/valueextraction/unwrapping/ValueExtractionUnwrappingTest.java @@ -6,7 +6,6 @@ */ package org.hibernate.beanvalidation.tck.tests.valueextraction.unwrapping; -import static org.hibernate.beanvalidation.tck.util.ConstraintViolationAssert.assertNumberOfViolations; import static org.hibernate.beanvalidation.tck.util.ConstraintViolationAssert.assertThat; import static org.hibernate.beanvalidation.tck.util.ConstraintViolationAssert.violationOf; import static org.testng.Assert.assertEquals; @@ -122,7 +121,9 @@ public class ValueExtractionUnwrappingTest extends AbstractTCKTest { @SpecAssertion(section = Sections.CONSTRAINTDECLARATIONVALIDATIONPROCESS_VALIDATIONROUTINE_VALUEEXTRACTORRESOLUTION_IMPLICITUNWRAPPING, id = "a") public void validate_wrapper_if_unwrapping_disabled_per_constraint_for_generic_container() { Set<ConstraintViolation<WrapperWithDisabledUnwrapping>> constraintViolations = getValidatorWithValueExtractors().validate( new WrapperWithDisabledUnwrapping() ); - assertNumberOfViolations( constraintViolations, 0 ); + assertThat( constraintViolations ).containsOnlyViolations( + violationOf( Null.class ).withProperty( "integerWrapper" ) + ); } @Test @@ -152,7 +153,9 @@ public class ValueExtractionUnwrappingTest extends AbstractTCKTest { @SpecAssertion(section = Sections.VALUEEXTRACTORDEFINITION_EXTRACTEDVALUE, id = "e") public void validate_wrapper_if_unwrapping_disabled_per_constraint_for_non_generic_container() { Set<ConstraintViolation<IntegerWrapperWithDisabledUnwrapping>> constraintViolations = getValidatorWithValueExtractors().validate( new IntegerWrapperWithDisabledUnwrapping() ); - assertNumberOfViolations( constraintViolations, 0 ); + assertThat( constraintViolations ).containsOnlyViolations( + violationOf( Null.class ).withProperty( "integerWrapper" ) + ); } @Test @@ -183,7 +186,7 @@ public class ValueExtractionUnwrappingTest extends AbstractTCKTest { .getConstraintDescriptors() .iterator().next(); - assertEquals( minConstraintDescriptor.getAnnotation().annotationType(), NotNull.class ); + assertEquals( minConstraintDescriptor.getAnnotation().annotationType(), Null.class ); assertEquals( minConstraintDescriptor.validateUnwrappedValue(), ValidateUnwrappedValue.NO ); minConstraintDescriptor = validator.getConstraintsForClass( WrapperWithForcedUnwrapping.class ) @@ -228,8 +231,8 @@ public class ValueExtractionUnwrappingTest extends AbstractTCKTest { private class WrapperWithDisabledUnwrapping { - @NotNull(payload = { Unwrapping.Skip.class }) - private final Wrapper<Integer> integerWrapper = new Wrapper<>( null ); + @Null(payload = { Unwrapping.Skip.class }) + private final Wrapper<Integer> integerWrapper = new Wrapper<>( 5 ); } private class WrapperWithForcedUnwrapping { @@ -246,8 +249,8 @@ public class ValueExtractionUnwrappingTest extends AbstractTCKTest { private class IntegerWrapperWithDisabledUnwrapping { - @NotNull(payload = { Unwrapping.Skip.class }) - private final IntegerWrapper integerWrapper = new IntegerWrapper( null ); + @Null(payload = { Unwrapping.Skip.class }) + private final IntegerWrapper integerWrapper = new IntegerWrapper( 5 ); } private class IntegerWrapperWithForcedUnwrapping {
BVTCK-<I> Test with WrapperWithDisabledUnwrapping should be based on presence of violation
beanvalidation_beanvalidation-tck
train
1f1050e48d3f8fed7f8e475f9f977a94a4dc96a3
diff --git a/server_modules/dnsbl.js b/server_modules/dnsbl.js index <HASH>..<HASH> 100644 --- a/server_modules/dnsbl.js +++ b/server_modules/dnsbl.js @@ -26,7 +26,7 @@ module.on('irc connecting', function (event, event_data) { isBlacklisted(client_addr, function(is_blocked) { if (is_blocked) { - var err = new Error('DNSBL blocked'); + var err = new Error('DNSBL blocked (' + client_addr + ')'); err.code = 'Blacklisted'; event_data.connection.emit('error', err);
DNSBL: Include blocked address in error log
prawnsalad_KiwiIRC
train
ecf5521a665b9c0692b5447a9365f6a10b64111d
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +import setuptools from distutils.core import setup import os
import setuptools to enable bdist_wheel
jgm_pandocfilters
train
34a3deacb11498642de4a0cfce3305e9f84f4107
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java index <HASH>..<HASH> 100644 --- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java +++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java @@ -42,6 +42,7 @@ import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.engine.local.OEngineLocal; import com.orientechnologies.orient.core.engine.memory.OEngineMemory; import com.orientechnologies.orient.core.exception.ODatabaseException; +import com.orientechnologies.orient.core.exception.ORecordNotFoundException; import com.orientechnologies.orient.core.exception.OSecurityAccessException; import com.orientechnologies.orient.core.exception.OSerializationException; import com.orientechnologies.orient.core.fetch.OFetchHelper; @@ -575,6 +576,10 @@ public class ONetworkProtocolBinary extends ONetworkProtocol { final ORecordInternal<?> currentRecord; if (newRecord instanceof ODocument) { currentRecord = connection.database.load(rid); + + if (currentRecord == null) + throw new ORecordNotFoundException(rid.toString()); + final ODocument doc = (ODocument) currentRecord; doc.merge((ODocument) newRecord, false, false); doc.setVersion(version);
Fixed issue <I>. Now if the updated record not exists a ORecordNotFoundException exception is thrown
orientechnologies_orientdb
train
11e56b4b2a1fe5ac1a1cfa181f33c9401316c88a
diff --git a/gulpfile.js b/gulpfile.js index <HASH>..<HASH> 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -60,7 +60,9 @@ gulp.task('mocha', function () { return gulp.src(paths.mochaSrc, {read: false}) .pipe(mocha({ reporter: 'spec' - })); + })).once('end', function () { + process.exit(); + });; }); /**
adding single run to gulp mocha
back4app_back4app-entity-mongodb
train
bda385019999309d2cd2117099223ec23802ce62
diff --git a/lib/passenger/abstract_server.rb b/lib/passenger/abstract_server.rb index <HASH>..<HASH> 100644 --- a/lib/passenger/abstract_server.rb +++ b/lib/passenger/abstract_server.rb @@ -88,11 +88,18 @@ class AbstractServer class ServerError < StandardError end + # The last time when this AbstractServer had processed a message. + attr_accessor :last_activity_time + + # An attribute, used internally. This should not be used outside Passenger. + attr_accessor :max_idle_time + def initialize @done = false @message_handlers = {} @signal_handlers = {} @orig_signal_handlers = {} + @last_activity_time = Time.now end # Start the server. This method does not block since the server runs @@ -311,6 +318,7 @@ private while !@done begin name, *args = channel.read + @last_activity_time = Time.now if name.nil? @done = true elsif @message_handlers.has_key?(name) diff --git a/lib/passenger/railz/application_spawner.rb b/lib/passenger/railz/application_spawner.rb index <HASH>..<HASH> 100644 --- a/lib/passenger/railz/application_spawner.rb +++ b/lib/passenger/railz/application_spawner.rb @@ -51,8 +51,6 @@ class ApplicationSpawner < AbstractServer # The group ID of the root user. ROOT_GID = 0 - # An attribute, used internally. This should not be used outside Passenger. - attr_accessor :time # The application root of this spawner. attr_reader :app_root @@ -85,7 +83,7 @@ class ApplicationSpawner < AbstractServer @lower_privilege = lower_privilege @lowest_user = lowest_user @environment = environment - self.time = Time.now + self.max_idle_time = APP_SPAWNER_MAX_IDLE_TIME assert_valid_app_root(@app_root) define_message_handler(:spawn_application, :handle_spawn_application) end diff --git a/lib/passenger/railz/framework_spawner.rb b/lib/passenger/railz/framework_spawner.rb index <HASH>..<HASH> 100644 --- a/lib/passenger/railz/framework_spawner.rb +++ b/lib/passenger/railz/framework_spawner.rb @@ -45,9 +45,6 @@ class FrameworkSpawner < AbstractServer class Error < AbstractServer::ServerError end - # An attribute, used internally. This should not be used outside Passenger. - attr_accessor :time - # Creates a new instance of FrameworkSpawner. # # Valid options: @@ -74,6 +71,7 @@ class FrameworkSpawner < AbstractServer end super() + self.max_idle_time = FRAMEWORK_SPAWNER_MAX_IDLE_TIME define_message_handler(:spawn_application, :handle_spawn_application) define_message_handler(:reload, :handle_reload) end @@ -287,7 +285,6 @@ private end @spawners[app_root] = spawner end - spawner.time = Time.now begin app = spawner.spawn_application rescue ApplicationSpawner::Error => e @@ -324,7 +321,7 @@ private # The main loop for the spawners cleaner thread. # This thread checks the spawners list every APP_SPAWNER_CLEAN_INTERVAL seconds, # and stops application spawners that have been idle for more than - # APP_SPAWNER_MAX_IDLE_TIME seconds. + # spawner.max_idle_time seconds. def spawners_cleaner_main_loop @spawners_lock.synchronize do while true @@ -334,7 +331,7 @@ private current_time = Time.now @spawners.keys.each do |key| spawner = @spawners[key] - if current_time - spawner.time > APP_SPAWNER_MAX_IDLE_TIME + if current_time - spawner.last_activity_time > spawner.max_idle_time spawner.stop @spawners.delete(key) end diff --git a/lib/passenger/spawn_manager.rb b/lib/passenger/spawn_manager.rb index <HASH>..<HASH> 100644 --- a/lib/passenger/spawn_manager.rb +++ b/lib/passenger/spawn_manager.rb @@ -225,7 +225,6 @@ private end @spawners[key] = spawner end - spawner.time = Time.now begin if spawner.is_a?(Railz::FrameworkSpawner) return spawner.spawn_application(app_root, lower_privilege, @@ -308,12 +307,7 @@ private current_time = Time.now @spawners.keys.each do |key| spawner = @spawners[key] - if spawner.is_a?(Railz::FrameworkSpawner) - max_idle_time = FRAMEWORK_SPAWNER_MAX_IDLE_TIME - else - max_idle_time = APP_SPAWNER_MAX_IDLE_TIME - end - if current_time - spawner.time > max_idle_time + if current_time - spawner.last_activity_time > spawner.max_idle_time if spawner.started? spawner.stop end
Refactoring: move spawn server idle time information to AbstractServer. This makes it easier in the future to have dynamic, configurable spawn server timeouts.
phusion_passenger
train