hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
92eed7228a676ff7f1e29a602209d9950b813931
|
diff --git a/fluids/compressible.py b/fluids/compressible.py
index <HASH>..<HASH> 100644
--- a/fluids/compressible.py
+++ b/fluids/compressible.py
@@ -325,8 +325,8 @@ def isentropic_efficiency(P1, P2, k, eta_s=None, eta_p=None):
{(P_2/P_1)^{\frac{k-1}{k\eta_p}}-1}
.. math::
- \eta_p = \frac{\left(k - 1\right) \log{\left (\frac{P_{2}}{P_{1}}
- \right )}}{k \log{\left (\frac{1}{\eta_{s}} \left(\eta_{s}
+ \eta_p = \frac{\left(k - 1\right) \ln{\left (\frac{P_{2}}{P_{1}}
+ \right )}}{k \ln{\left (\frac{1}{\eta_{s}} \left(\eta_{s}
+ \left(\frac{P_{2}}{P_{1}}\right)^{\frac{1}{k} \left(k - 1\right)}
- 1\right) \right )}}
diff --git a/fluids/drag.py b/fluids/drag.py
index <HASH>..<HASH> 100644
--- a/fluids/drag.py
+++ b/fluids/drag.py
@@ -821,7 +821,7 @@ def Clift(Re):
If 0.01 < Re < 20:
.. math::
- C_D = \frac{24}{Re}(1 + 0.1315Re^{0.82 - 0.05\log Re})
+ C_D = \frac{24}{Re}(1 + 0.1315Re^{0.82 - 0.05\log_{10} Re})
If 20 < Re < 260:
@@ -831,32 +831,32 @@ def Clift(Re):
If 260 < Re < 1500:
.. math::
- C_D = 10^{[1.6435 - 1.1242\log Re + 0.1558[\log Re]^2}
+ C_D = 10^{[1.6435 - 1.1242\log_{10} Re + 0.1558[\log_{10} Re]^2}
If 1500 < Re < 12000:
.. math::
- C_D = 10^{[-2.4571 + 2.5558\log Re - 0.9295[\log Re]^2 + 0.1049[\log Re]^3}
+ C_D = 10^{[-2.4571 + 2.5558\log_{10} Re - 0.9295[\log_{10} Re]^2 + 0.1049[\log_{10} Re]^3}
If 12000 < Re < 44000:
.. math::
- C_D = 10^{[-1.9181 + 0.6370\log Re - 0.0636[\log Re]^2}
+ C_D = 10^{[-1.9181 + 0.6370\log_{10} Re - 0.0636[\log_{10} Re]^2}
If 44000 < Re < 338000:
.. math::
- C_D = 10^{[-4.3390 + 1.5809\log Re - 0.1546[\log Re]^2}
+ C_D = 10^{[-4.3390 + 1.5809\log_{10} Re - 0.1546[\log_{10} Re]^2}
If 338000 < Re < 400000:
.. math::
- C_D = 9.78 - 5.3\log Re
+ C_D = 9.78 - 5.3\log_{10} Re
If 400000 < Re < 1000000:
.. math::
- C_D = 0.19\log Re - 0.49
+ C_D = 0.19\log_{10} Re - 0.49
Parameters
----------
diff --git a/fluids/geometry.py b/fluids/geometry.py
index <HASH>..<HASH> 100644
--- a/fluids/geometry.py
+++ b/fluids/geometry.py
@@ -3146,7 +3146,7 @@ class TANK(object):
"""
table = False
chebyshev = False
- __full_path__ = "%s.%s" %(__module__, __qualname__)
+ __full_path__ = "fluids.geometry.TANK"
def __repr__(self): # pragma: no cover
orient = 'Horizontal' if self.horizontal else 'Vertical'
|
Reverse the slow entropic death of Python 2 support once more
|
CalebBell_fluids
|
train
|
aac353c94e484026902e8fe1537292b12088d661
|
diff --git a/library/Imbo/Auth/AccessControl/Adapter/AdapterInterface.php b/library/Imbo/Auth/AccessControl/Adapter/AdapterInterface.php
index <HASH>..<HASH> 100644
--- a/library/Imbo/Auth/AccessControl/Adapter/AdapterInterface.php
+++ b/library/Imbo/Auth/AccessControl/Adapter/AdapterInterface.php
@@ -111,6 +111,14 @@ interface AdapterInterface {
function getPrivateKey($publicKey);
/**
+ * Get whether a public key exists or not
+ *
+ * @param string $publicKey Public key to check
+ * @return boolean
+ */
+ function publicKeyExists($publicKey);
+
+ /**
* Returns a list of resources which should be accessible for read-only public keys
*
* @return array
diff --git a/library/Imbo/Auth/AccessControl/Adapter/ArrayAdapter.php b/library/Imbo/Auth/AccessControl/Adapter/ArrayAdapter.php
index <HASH>..<HASH> 100644
--- a/library/Imbo/Auth/AccessControl/Adapter/ArrayAdapter.php
+++ b/library/Imbo/Auth/AccessControl/Adapter/ArrayAdapter.php
@@ -151,6 +151,13 @@ class ArrayAdapter extends AbstractAdapter implements AdapterInterface {
}
/**
+ * {@inheritdoc}
+ */
+ public function publicKeyExists($publicKey) {
+ return isset($this->keys[$publicKey]);
+ }
+
+ /**
* For compatibility reasons, where the configuration for Imbo has a set of
* 'public key' => 'private key' pairs - this method converts that config
* to an AccessControl-compatible format. Public key will equal the user.
diff --git a/library/Imbo/Auth/AccessControl/Adapter/MongoDB.php b/library/Imbo/Auth/AccessControl/Adapter/MongoDB.php
index <HASH>..<HASH> 100644
--- a/library/Imbo/Auth/AccessControl/Adapter/MongoDB.php
+++ b/library/Imbo/Auth/AccessControl/Adapter/MongoDB.php
@@ -206,9 +206,7 @@ class MongoDB extends AbstractAdapter implements MutableAdapterInterface {
* {@inheritdoc}
*/
public function addKeyPair($publicKey, $privateKey) {
- $publickeyExists = !!$this->getPublicKeyDetails($publicKey);
-
- if ($publickeyExists) {
+ if (!$this->publicKeyExists($publicKey)) {
throw new RuntimeException('Publickey already exist', 400);
}
@@ -265,6 +263,13 @@ class MongoDB extends AbstractAdapter implements MutableAdapterInterface {
}
/**
+ * {@inheritdoc}
+ */
+ public function publicKeyExists($publicKey) {
+ return !!$this->getPublicKeyDetails($publicKey);
+ }
+
+ /**
* Get the access control list for a given public key
*
* @param string $publicKey
|
Add public key existence check to access control adapter interface
|
imbo_imbo
|
train
|
500289660cc52ab09678fabf09ca99664d1c9200
|
diff --git a/src/Sculpin/Bundle/TwigBundle/TwigFormatter.php b/src/Sculpin/Bundle/TwigBundle/TwigFormatter.php
index <HASH>..<HASH> 100644
--- a/src/Sculpin/Bundle/TwigBundle/TwigFormatter.php
+++ b/src/Sculpin/Bundle/TwigBundle/TwigFormatter.php
@@ -54,13 +54,15 @@ class TwigFormatter implements FormatterInterface
{
try {
$this->arrayLoader->setTemplate($formatContext->templateId(), $this->massageTemplate($formatContext));
+ $data = $formatContext->data()->export();
$template = $this->twig->loadTemplate($formatContext->templateId());
- if (!count($blockNames = $template->getBlockNames())) {
- return array('content' => $template->render($formatContext->data()->export()));
+
+ if (!count($blockNames = $this->findAllBlocks($template, $data))) {
+ return array('content' => $template->render($data));
}
$blocks = array();
foreach ($blockNames as $blockName) {
- $blocks[$blockName] = $template->renderBlock($blockName, $formatContext->data()->export());
+ $blocks[$blockName] = $template->renderBlock($blockName, $data);
}
return $blocks;
@@ -69,6 +71,15 @@ class TwigFormatter implements FormatterInterface
}
}
+ public function findAllBlocks(\Twig_Template $template, array $context)
+ {
+ if (false !== $parent = $template->getParent($context)) {
+ return array_unique(array_merge($this->findAllBlocks($parent, $context), $template->getBlockNames()));
+ }
+
+ return $template->getBlockNames();
+ }
+
/**
* {@inheritdoc}
*/
|
Try to find all available blocks and render them all.
|
sculpin_sculpin
|
train
|
437211844df604454577f9e487a89721b397b4bf
|
diff --git a/cache.go b/cache.go
index <HASH>..<HASH> 100644
--- a/cache.go
+++ b/cache.go
@@ -10,11 +10,11 @@ import (
type pkgTypes struct {
ifaces map[string]string
- funcs map[string]string
+ funcs map[string]bool
}
func (p *pkgTypes) isFuncType(t string) bool {
- return p.funcs[t] != ""
+ return p.funcs[t]
}
func (p *pkgTypes) ifaceOf(t string) string {
@@ -23,9 +23,9 @@ func (p *pkgTypes) ifaceOf(t string) string {
func (p *pkgTypes) getTypes(pkg *types.Package) {
p.ifaces = make(map[string]string)
- p.funcs = make(map[string]string)
+ p.funcs = make(map[string]bool)
path := pkg.Path()
- addTypes := func(impPath string, ifs, funs map[string]string, top bool) {
+ addTypes := func(impPath string, ifs map[string]string, funs map[string]bool, top bool) {
fullName := func(name string) string {
if !top {
return impPath + "." + name
@@ -38,9 +38,9 @@ func (p *pkgTypes) getTypes(pkg *types.Package) {
p.ifaces[iftype] = fullName(name)
}
}
- for ftype, name := range funs {
+ for ftype := range funs {
// ignore non-exported func signatures too
- p.funcs[ftype] = fullName(name)
+ p.funcs[ftype] = true
}
}
for _, imp := range pkg.Imports() {
diff --git a/types.go b/types.go
index <HASH>..<HASH> 100644
--- a/types.go
+++ b/types.go
@@ -111,10 +111,9 @@ func anyInteresting(params *types.Tuple) bool {
return false
}
-func fromScope(scope *types.Scope) (ifaces, funcs map[string]string) {
+func fromScope(scope *types.Scope) (ifaces map[string]string, funcs map[string]bool) {
ifaces = make(map[string]string)
- funcs = make(map[string]string)
- ifaceFuncs := make(map[string]string)
+ funcs = make(map[string]bool)
for _, name := range scope.Names() {
tn, ok := scope.Lookup(name).(*types.TypeName)
if !ok {
@@ -132,11 +131,7 @@ func fromScope(scope *types.Scope) (ifaces, funcs map[string]string) {
if !anyInteresting(sign.Params()) {
continue
}
- s := signString(sign)
- if _, e := ifaceFuncs[s]; e {
- continue
- }
- ifaceFuncs[s] = tn.Name() + "." + f.Name()
+ funcs[signString(sign)] = true
}
s := funcMapString(iface)
if _, e := ifaces[s]; !e {
@@ -146,15 +141,7 @@ func fromScope(scope *types.Scope) (ifaces, funcs map[string]string) {
if !anyInteresting(x.Params()) {
continue
}
- s := signString(x)
- if _, e := funcs[s]; !e {
- funcs[s] = tn.Name()
- }
- }
- }
- for s, name := range ifaceFuncs {
- if _, e := funcs[s]; !e {
- funcs[s] = name
+ funcs[signString(x)] = true
}
}
return ifaces, funcs
|
Simplify func signature map
It's a set after all. Also remove the redundant ifaceFuncs.
|
mvdan_interfacer
|
train
|
22fac5c2e40b00911933040afaf9adcdd2c00ac2
|
diff --git a/src/support.js b/src/support.js
index <HASH>..<HASH> 100644
--- a/src/support.js
+++ b/src/support.js
@@ -13,24 +13,23 @@ jQuery.support = (function() {
clickFn,
div = document.createElement("div");
- // Preliminary tests
+ // Setup
div.setAttribute( "className", "t" );
div.innerHTML = " <link/><table></table><a href='/a'>a</a><input type='checkbox'/>";
+ // Support tests won't run in some limited or non-browser environments
all = div.getElementsByTagName("*");
a = div.getElementsByTagName("a")[ 0 ];
- a.style.cssText = "top:1px;float:left;opacity:.5";
-
- // Can't get basic test support
- if ( !all || !all.length ) {
+ if ( !all || !a || !all.length ) {
return {};
}
- // First batch of supports tests
+ // First batch of tests
select = document.createElement("select");
opt = select.appendChild( document.createElement("option") );
input = div.getElementsByTagName("input")[ 0 ];
+ a.style.cssText = "top:1px;float:left;opacity:.5";
support = {
// IE strips leading whitespace when .innerHTML is used
leadingWhitespace: ( div.firstChild.nodeType === 3 ),
@@ -72,7 +71,7 @@ jQuery.support = (function() {
// Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7)
getSetAttribute: div.className !== "t",
- // Tests for enctype support on a form(#6743)
+ // Tests for enctype support on a form (#6743)
enctype: !!document.createElement("form").enctype,
// Makes sure cloning an html5 element does not cause problems
|
Fix #<I>: be more kind to non-browser environments. Close gh-<I>.
|
jquery_jquery
|
train
|
3a1e45c4eeeb64ecf89230e552551b9cf612e39c
|
diff --git a/moztelemetry/histogram.py b/moztelemetry/histogram.py
index <HASH>..<HASH> 100644
--- a/moztelemetry/histogram.py
+++ b/moztelemetry/histogram.py
@@ -63,7 +63,7 @@ class Histogram:
histograms_definition = _fetch_histograms_definition(revision)
# TODO: implement centralized revision service which handles all the quirks...
- if name.startswith("USE_COUNTER_"):
+ if name.startswith("USE_COUNTER_") or name.startswith("USE_COUNTER2_"):
self.definition = histogram_tools.Histogram(name, {"kind": "boolean", "description": "", "expires_in_version": "never"})
else:
try:
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ class FetchExternal(setuptools.command.install.install):
setup(cmdclass={'install': FetchExternal},
name='python_moztelemetry',
- version='0.3.6.5',
+ version='0.3.6.7',
author='Roberto Agostino Vitillo',
author_email='rvitillo@mozilla.com',
description='Spark bindings for Mozilla Telemetry',
|
Add support for USE_COUNTER2_* histograms.
|
mozilla_python_moztelemetry
|
train
|
1cc190864d87d51ba776ce112455b2e03f34a14a
|
diff --git a/lib/gir_ffi/info_ext/i_signal_info.rb b/lib/gir_ffi/info_ext/i_signal_info.rb
index <HASH>..<HASH> 100644
--- a/lib/gir_ffi/info_ext/i_signal_info.rb
+++ b/lib/gir_ffi/info_ext/i_signal_info.rb
@@ -65,6 +65,15 @@ module GirFFI
end
types.unshift(:pointer).push(:pointer)
end
+
+ def return_ffi_type
+ result = super
+ if result == GLib::Boolean
+ :bool
+ else
+ result
+ end
+ end
end
end
end
diff --git a/lib/gir_ffi/type_map.rb b/lib/gir_ffi/type_map.rb
index <HASH>..<HASH> 100644
--- a/lib/gir_ffi/type_map.rb
+++ b/lib/gir_ffi/type_map.rb
@@ -18,7 +18,7 @@ module GirFFI
:array => :pointer,
:utf8 => :pointer,
:GType => gtype_type,
- :gboolean => :bool,
+ :gboolean => GLib::Boolean,
:gunichar => :uint32,
:gint8 => :int8,
:guint8 => :uint8,
diff --git a/test/gir_ffi/info_ext/i_signal_info_test.rb b/test/gir_ffi/info_ext/i_signal_info_test.rb
index <HASH>..<HASH> 100644
--- a/test/gir_ffi/info_ext/i_signal_info_test.rb
+++ b/test/gir_ffi/info_ext/i_signal_info_test.rb
@@ -2,6 +2,7 @@ require 'gir_ffi_test_helper'
describe GirFFI::InfoExt::ISignalInfo do
let(:klass) { Class.new do
+ include GirFFI::InfoExt::ICallableInfo
include GirFFI::InfoExt::ISignalInfo
end }
let(:signal_info) { klass.new }
@@ -55,4 +56,15 @@ describe GirFFI::InfoExt::ISignalInfo do
end
end
end
+
+ describe "#return_ffi_type" do
+ # FIXME: This is needed because callbacks are limited in the accepted
+ # types. This should be fixed in FFI.
+ it "returns :bool for the :gboolean type" do
+ stub(return_type_info = Object.new).to_ffitype { GLib::Boolean }
+ stub(signal_info).return_type { return_type_info }
+
+ signal_info.return_ffi_type.must_equal :bool
+ end
+ end
end
|
Make :gboolean to GLib::Boolean as an FFI type
|
mvz_gir_ffi
|
train
|
0a50285b6072dd0691ee89e68671316c1b8948d6
|
diff --git a/src/test/java/com/thinkaurelius/faunus/FaunusVertexTest.java b/src/test/java/com/thinkaurelius/faunus/FaunusVertexTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/thinkaurelius/faunus/FaunusVertexTest.java
+++ b/src/test/java/com/thinkaurelius/faunus/FaunusVertexTest.java
@@ -383,4 +383,20 @@ public class FaunusVertexTest extends BaseTest {
identicalStructure(graph, ExampleGraph.GRAPH_OF_THE_GODS_2);
reader.close();
}
+
+ public void testLargeProperty() throws Exception {
+ String value = "a24$%~bU*!";
+ for (int i = 0; i < 19; i++) {
+ value = value + value;
+ }
+ // a 5 million length string == ~10 books worth of data
+ assertTrue(value.length() > 5000000);
+
+ FaunusVertex vertex1 = new FaunusVertex(1l);
+ vertex1.setProperty("name", value);
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+ vertex1.write(new DataOutputStream(bytes));
+ FaunusVertex vertex2 = new FaunusVertex(new DataInputStream(new ByteArrayInputStream(bytes.toByteArray())));
+ assertEquals(vertex2.getProperty("name"), value);
+ }
}
\ No newline at end of file
|
Added a test case for large propert values -- 5M characters supported, <I>M throws buffer overflow problem in Kryo. Will not fix as that is an egregious amount of data. ~<I> books worth of character data.
|
thinkaurelius_titan
|
train
|
916e6cc45b073a9dace8671307b566616830e10f
|
diff --git a/d1_common_python/src/tests/test_utils.py b/d1_common_python/src/tests/test_utils.py
index <HASH>..<HASH> 100644
--- a/d1_common_python/src/tests/test_utils.py
+++ b/d1_common_python/src/tests/test_utils.py
@@ -34,13 +34,15 @@ import sys
import logging
import unittest
import codecs
+import os
from d1_common import xmlrunner
import d1_common.util
class TestUtils(unittest.TestCase):
def testEncodePathElement(self):
- ftest = 'd1_testdocs/encodingTestSet/testUnicodeStrings.utf8.txt'
+ fpath = os.path.abspath(os.path.dirname(__file__))
+ ftest = os.path.join(fpath, 'd1_testdocs/encodingTestSet/testUnicodeStrings.utf8.txt')
testfile = codecs.open(ftest, encoding='utf-8', mode='r')
testrows = testfile.readlines()
for row in testrows:
@@ -52,7 +54,8 @@ class TestUtils(unittest.TestCase):
self.assertEqual(e, d1_common.util.encodePathElement(v))
def testEncodeQueryElement(self):
- ftest = 'd1_testdocs/encodingTestSet/testUnicodeStrings.utf8.txt'
+ fpath = os.path.abspath(os.path.dirname(__file__))
+ ftest = os.path.join(fpath, 'd1_testdocs/encodingTestSet/testUnicodeStrings.utf8.txt')
testfile = codecs.open(ftest, encoding='utf-8', mode='r')
testrows = testfile.readlines()
for row in testrows:
|
Removed relative path dependency for encoding tests
|
DataONEorg_d1_python
|
train
|
47c705be9c8fb1b6e02f2fe137a13a5c2818deda
|
diff --git a/lib/websearch_webinterface.py b/lib/websearch_webinterface.py
index <HASH>..<HASH> 100644
--- a/lib/websearch_webinterface.py
+++ b/lib/websearch_webinterface.py
@@ -94,6 +94,7 @@ from invenio.webuser import getUid, page_not_authorized, get_user_preferences, \
collect_user_info, logoutUser, isUserSuperAdmin
from invenio.websubmit_webinterface import WebInterfaceFilesPages
from invenio.webcomment_webinterface import WebInterfaceCommentsPages
+from invenio.weblinkback_webinterface import WebInterfaceRecordLinkbacksPages
from invenio.bibcirculation_webinterface import WebInterfaceHoldingsPages
from invenio.webpage import page, pageheaderonly, create_error_box
from invenio.messages import gettext_set_language
@@ -988,7 +989,7 @@ class WebInterfaceRecordPages(WebInterfaceDirectory):
_exports = ['', 'files', 'reviews', 'comments', 'usage',
'references', 'export', 'citations', 'holdings', 'edit',
- 'keywords', 'multiedit', 'merge', 'plots']
+ 'keywords', 'multiedit', 'merge', 'plots', 'linkbacks']
#_exports.extend(output_formats)
@@ -1009,6 +1010,7 @@ class WebInterfaceRecordPages(WebInterfaceDirectory):
self.export = WebInterfaceRecordExport(self.recid, self.format)
self.edit = WebInterfaceEditPages(self.recid)
self.merge = WebInterfaceMergePages(self.recid)
+ self.linkbacks = WebInterfaceRecordLinkbacksPages(self.recid)
return
@@ -1084,7 +1086,7 @@ class WebInterfaceRecordRestrictedPages(WebInterfaceDirectory):
_exports = ['', 'files', 'reviews', 'comments', 'usage',
'references', 'export', 'citations', 'holdings', 'edit',
- 'keywords', 'multiedit', 'merge', 'plots']
+ 'keywords', 'multiedit', 'merge', 'plots', 'linkbacks']
#_exports.extend(output_formats)
@@ -1105,6 +1107,7 @@ class WebInterfaceRecordRestrictedPages(WebInterfaceDirectory):
self.export = WebInterfaceRecordExport(self.recid, self.format)
self.edit = WebInterfaceEditPages(self.recid)
self.merge = WebInterfaceMergePages(self.recid)
+ self.linkbacks = WebInterfaceRecordLinkbacksPages(self.recid)
return
@@ -1476,7 +1479,7 @@ class WebInterfaceSearchInterfacePages(WebInterfaceDirectory):
try:
if path[1] in ['', 'files', 'reviews', 'comments', 'usage',
'references', 'citations', 'holdings', 'edit',
- 'keywords', 'multiedit', 'merge', 'plots']:
+ 'keywords', 'multiedit', 'merge', 'plots', 'linkbacks']:
tab = path[1]
elif path[1] == 'export':
tab = ''
diff --git a/lib/websearchadminlib.py b/lib/websearchadminlib.py
index <HASH>..<HASH> 100644
--- a/lib/websearchadminlib.py
+++ b/lib/websearchadminlib.py
@@ -3399,6 +3399,7 @@ def get_detailed_page_tabs(colID=None, recID=None, ln=CFG_SITE_LANG):
'files' : {'label': _('Files'), 'visible': False, 'enabled': True, 'order': 7},
'plots' : {'label': _('Plots'), 'visible': False, 'enabled': True, 'order': 8},
'holdings' : {'label': _('Holdings'), 'visible': False, 'enabled': True, 'order': 9},
+ 'linkbacks' : {'label': _('Linkbacks'), 'visible': False, 'enabled': True, 'order': 10},
}
res = run_sql("SELECT tabs FROM collectiondetailedrecordpagetabs " + \
|
WebLinkback: initial release
* Initial release of the trackback/linkback support.
(closes #<I>) (closes #<I>) (references #<I>)
|
inveniosoftware_invenio-records
|
train
|
ee013fbbd4be5c84d5314611c637a1b1a8f66953
|
diff --git a/src/Way/Generators/Commands/ModelGeneratorCommand.php b/src/Way/Generators/Commands/ModelGeneratorCommand.php
index <HASH>..<HASH> 100644
--- a/src/Way/Generators/Commands/ModelGeneratorCommand.php
+++ b/src/Way/Generators/Commands/ModelGeneratorCommand.php
@@ -5,19 +5,19 @@ use Symfony\Component\Console\Input\InputArgument;
class ModelGeneratorCommand extends GeneratorCommand {
- /**
- * The console command name.
- *
- * @var string
- */
- protected $name = 'generate:model';
+ /**
+ * The console command name.
+ *
+ * @var string
+ */
+ protected $name = 'generate:model';
- /**
- * The console command description.
- *
- * @var string
- */
- protected $description = 'Generate a model';
+ /**
+ * The console command description.
+ *
+ * @var string
+ */
+ protected $description = 'Generate a model';
/**
* The path where the file will be created
@@ -41,29 +41,29 @@ class ModelGeneratorCommand extends GeneratorCommand {
];
}
- /**
- * Get the console command arguments.
- *
- * @return array
- */
- protected function getArguments()
- {
- return array(
- array('modelName', InputArgument::REQUIRED, 'The name of the desired Eloquent model')
- );
- }
+ /**
+ * Get the console command arguments.
+ *
+ * @return array
+ */
+ protected function getArguments()
+ {
+ return [
+ ['modelName', InputArgument::REQUIRED, 'The name of the desired Eloquent model']
+ ];
+ }
- /**
- * Get the console command options.
- *
- * @return array
- */
- protected function getOptions()
- {
- return array(
- array('path', null, InputOption::VALUE_OPTIONAL, 'Where should the file be created?', app_path('models')),
- array('templatePath', null, InputOption::VALUE_OPTIONAL, 'What is the path to the template for this generator?', __DIR__.'/../templates/model.txt')
- );
- }
+ /**
+ * Get the console command options.
+ *
+ * @return array
+ */
+ protected function getOptions()
+ {
+ return [
+ ['path', null, InputOption::VALUE_OPTIONAL, 'Where should the file be created?', app_path('models')],
+ ['templatePath', null, InputOption::VALUE_OPTIONAL, 'What is the path to the template for this generator?', __DIR__ . '/../templates/model.txt']
+ ];
+ }
}
|
Formatting and <I> arrays
|
JeffreyWay_Laravel-4-Generators
|
train
|
a4dd6745059a8690ca864df04a9e6b122a726abd
|
diff --git a/tests/ScaffoldTest.php b/tests/ScaffoldTest.php
index <HASH>..<HASH> 100644
--- a/tests/ScaffoldTest.php
+++ b/tests/ScaffoldTest.php
@@ -3,7 +3,7 @@
namespace Tests;
use Symfony\Component\Console\Application;
-use TightenCo\Jigsaw\Scaffold\PresetScaffold;
+use TightenCo\Jigsaw\Scaffold\BasicScaffold;
use org\bovigo\vfs\vfsStream;
class Scaffold extends TestCase
@@ -34,7 +34,7 @@ class Scaffold extends TestCase
];
sort($base_files);
- $scaffold = $this->app->make(PresetScaffold::class);
+ $scaffold = $this->app->make(BasicScaffold::class);
$this->assertEquals($base_files, $scaffold->getSiteFiles()->sort()->values()->toArray());
}
@@ -48,7 +48,7 @@ class Scaffold extends TestCase
self::EXISTING_SITE_FILES,
['archived' => []]
));
- $scaffold = $this->app->make(PresetScaffold::class)->setBase($vfs->url());
+ $scaffold = $this->app->make(BasicScaffold::class)->setBase($vfs->url());
$scaffold->archiveExistingSite();
@@ -65,7 +65,7 @@ class Scaffold extends TestCase
public function will_create_archived_directory_if_none_exists_when_archiving_site()
{
$vfs = vfsStream::setup('virtual', null, self::EXISTING_SITE_FILES);
- $scaffold = $this->app->make(PresetScaffold::class)->setBase($vfs->url());
+ $scaffold = $this->app->make(BasicScaffold::class)->setBase($vfs->url());
$scaffold->archiveExistingSite();
@@ -85,7 +85,7 @@ class Scaffold extends TestCase
self::EXISTING_SITE_FILES,
['archived' => ['old-file.md' => '']]
));
- $scaffold = $this->app->make(PresetScaffold::class)->setBase($vfs->url());
+ $scaffold = $this->app->make(BasicScaffold::class)->setBase($vfs->url());
$this->assertNotNull($vfs->getChild('config.php'));
$this->assertNotNull($vfs->getChild('archived/old-file.md'));
@@ -103,7 +103,7 @@ class Scaffold extends TestCase
$subset_of_base_files = self::EXISTING_SITE_FILES;
unset($subset_of_base_files['bootstrap.php']);
$vfs = vfsStream::setup('virtual', null, $subset_of_base_files);
- $scaffold = $this->app->make(PresetScaffold::class)->setBase($vfs->url());
+ $scaffold = $this->app->make(BasicScaffold::class)->setBase($vfs->url());
$scaffold->archiveExistingSite();
@@ -120,7 +120,7 @@ class Scaffold extends TestCase
public function can_delete_existing_files_and_directories()
{
$vfs = vfsStream::setup('virtual', null, self::EXISTING_SITE_FILES);
- $scaffold = $this->app->make(PresetScaffold::class)->setBase($vfs->url());
+ $scaffold = $this->app->make(BasicScaffold::class)->setBase($vfs->url());
$scaffold->deleteExistingSite();
@@ -137,7 +137,7 @@ class Scaffold extends TestCase
$subset_of_base_files = self::EXISTING_SITE_FILES;
unset($subset_of_base_files['bootstrap.php']);
$vfs = vfsStream::setup('virtual', null, $subset_of_base_files);
- $scaffold = $this->app->make(PresetScaffold::class)->setBase($vfs->url());
+ $scaffold = $this->app->make(BasicScaffold::class)->setBase($vfs->url());
$scaffold->deleteExistingSite();
|
Use BasicScaffold in ScaffoldTest
|
tightenco_jigsaw
|
train
|
8c96909414b8e156e69712a58a25069b87111103
|
diff --git a/test/clockTest.js b/test/clockTest.js
index <HASH>..<HASH> 100644
--- a/test/clockTest.js
+++ b/test/clockTest.js
@@ -56,14 +56,6 @@ function describeClockContract(name, ctr, intervalFn, timeFn, greaterThan) {
});
}
-describeClockContract('date based', zeit.DateClock, function (i) {return 10 * i;}, function () {
- return new Date();
-}, function(a, b) {return a > b});
-
-describeClockContract('moment based', zeit.MomentClock, function (i) {return moment.duration(10 * i);}, function () {
- return moment();
-}, function(a, b) {return a > b.asMilliseconds();});
-
function describeTestClockContract(name, ctrFn, timeAtSeconds, durationOfSeconds) {
describe(name, function () {
@@ -129,7 +121,15 @@ function describeTestClockContract(name, ctrFn, timeAtSeconds, durationOfSeconds
assert.momentEql(defaultClock.now(), timeAtSeconds(1));
});
});
-};
+}
+
+describeClockContract('date based', zeit.DateClock, function (i) {return 10 * i;}, function () {
+ return new Date();
+}, function(a, b) {return a >= b});
+
+describeClockContract('moment based', zeit.MomentClock, function (i) {return moment.duration(10 * i);}, function () {
+ return moment();
+}, function(a, b) {return a >= b.asMilliseconds();});
describeTestClockContract('stub moment clock', zeit.StubMomentClock, function (seconds) {
return moment(seconds * 1000)
|
added tests for get and set timeout/interval
|
daviddenton_zeit
|
train
|
601e0e5b2fcd1466e34d7bdb731f87bc074d83a2
|
diff --git a/lib/scoped_search/query_builder.rb b/lib/scoped_search/query_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/scoped_search/query_builder.rb
+++ b/lib/scoped_search/query_builder.rb
@@ -89,11 +89,11 @@ module ScopedSearch
def order_by(order, &block)
order ||= definition.default_order
return nil if order.blank?
- field_name, direction_name = order.split(/\s+/, 2)
+ field_name, direction_name = order.to_s.split(/\s+/, 2)
field = definition.field_by_name(field_name)
raise ScopedSearch::QueryNotSupported, "the field '#{field_name}' in the order statement is not valid field for search" unless field
sql = field.to_sql(&block)
- direction = (direction_name.downcase.eql?('desc')) ? " DESC" : " ASC"
+ direction = (!direction_name.nil? && direction_name.downcase.eql?('desc')) ? " DESC" : " ASC"
order = sql + direction
return order
diff --git a/spec/integration/string_querying_spec.rb b/spec/integration/string_querying_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/integration/string_querying_spec.rb
+++ b/spec/integration/string_querying_spec.rb
@@ -226,6 +226,10 @@ ScopedSearch::RSpec::Database.test_databases.each do |db|
Set.new(distinct_search.map(&:explicit)).should == Set['baz', nil]
end
+
+ it 'should order using symbol' do
+ @class.search_for('',:order => :string).first.string.should eql('bar')
+ end
end
end
end
|
Allow ordering by field symbol (regression)
Fixes regression introduced by fix for Issue #<I> in commit
c4a<I>de<I>c6d<I>aa<I>afd<I>e<I>dda<I>. It wasn't documented or
tested, but previously we supported using a field symbol for the "order"
option of search_for. This commit adds that capability back (and a test).
|
wvanbergen_scoped_search
|
train
|
b7652563b1658b36b05f4af3c7098f5eba162b6f
|
diff --git a/integration/wai-aria.spec.js b/integration/wai-aria.spec.js
index <HASH>..<HASH> 100644
--- a/integration/wai-aria.spec.js
+++ b/integration/wai-aria.spec.js
@@ -260,8 +260,16 @@ describe('WAI ARIA Spec', () => {
}
});
- it('If the accordion panel associated with an accordion header is visible, and if the accordion does not permit the panel to be collapsed, the header button element has aria-disabled set to true.', () => {
- // todo
+ it('If the accordion panel associated with an accordion header is visible, and if the accordion does not permit the panel to be collapsed, the header button element has aria-disabled set to true.', async () => {
+ const [firstHeadingHandle] = await evaluateHeadings();
+ await firstHeadingHandle.click();
+
+ const headingAriaDisabled = await page.evaluate(
+ heading => heading.getAttribute('aria-disabled'),
+ firstHeadingHandle,
+ );
+
+ expect(headingAriaDisabled).toEqual('true');
});
it('Optionally, each element that serves as a container for panel content has role region and aria-labelledby with a value that refers to the button that controls display of the panel.', async () => {
|
Add integration test for ariaDisabled
|
springload_react-accessible-accordion
|
train
|
e6e6eb6b784f19b7748e649d8b5e7828e62a9bac
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,41 +1,37 @@
import re
from setuptools import find_packages, setup
-LONG_DESCRIPTION = """Django-Prometheus
+with open("README.md") as fl:
+ LONG_DESCRIPTION = fl.read()
-This library contains code to expose some monitoring metrics relevant
-to Django internals so they can be monitored by Prometheus.io.
-
-See https://github.com/korfuri/django-prometheus for usage
-instructions.
-"""
def get_version():
- version_file = open('django_prometheus/__init__.py', 'r').read()
- version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', version_file, re.MULTILINE)
+ version_file = open("django_prometheus/__init__.py", "r").read()
+ version_match = re.search(
+ r'^__version__ = [\'"]([^\'"]*)[\'"]', version_file, re.MULTILINE
+ )
if version_match:
return version_match.group(1)
- raise RuntimeError('Unable to find version string.')
+ raise RuntimeError("Unable to find version string.")
+
setup(
name="django-prometheus",
version=get_version(),
author="Uriel Corfa",
author_email="uriel@corfa.fr",
- description=(
- "Django middlewares to monitor your application with Prometheus.io."),
+ description=("Django middlewares to monitor your application with Prometheus.io."),
license="Apache",
keywords="django monitoring prometheus",
url="http://github.com/korfuri/django-prometheus",
packages=find_packages(),
test_suite="django_prometheus.tests",
long_description=LONG_DESCRIPTION,
- tests_require=['pytest', 'pytest-django'],
+ long_description_content_type="text/markdown",
+ tests_require=["pytest", "pytest-django"],
setup_requires=["pytest-runner"],
options={"bdist_wheel": {"universal": "1"}},
- install_requires=[
- "prometheus-client>=0.7",
- ],
+ install_requires=["prometheus-client>=0.7",],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
|
Use README.md as a long description
|
korfuri_django-prometheus
|
train
|
9ddb92b9e72b332b77beb53aceec7151d4bb2100
|
diff --git a/lib/controller/page/type/page.summary/index.js b/lib/controller/page/type/page.summary/index.js
index <HASH>..<HASH> 100644
--- a/lib/controller/page/type/page.summary/index.js
+++ b/lib/controller/page/type/page.summary/index.js
@@ -299,6 +299,24 @@ module.exports = class SummaryController extends CommonController {
const emailFrom = defaultEmailAddress ? formatEmail(defaultEmailAddress) : '"Form Builder" <form-builder-team@digital.justice.gov.uk>'
const emailTo = formatEmail(SERVICE_OUTPUT_EMAIL)
+ let toTeamParts = emailTo.split(/,\s*/)
+
+ toTeamParts = toTeamParts
+ .map((address) => {
+ address = address.trim()
+ if (!address.match(/^e\[.+\]$/)) {
+ return address
+ }
+ const addressLookup = address.replace(/^e\[(.+)\]$/, '$1')
+ return CONSTANTS[addressLookup] || address
+ })
+ .filter((address) => address)
+ .map((address) => address.split(/,\s*/))
+
+ // flatten
+ toTeamParts = [].concat(...toTeamParts)
+ toTeamParts = [...new Set(toTeamParts)]
+
const submission = {
type: 'csv',
recipientType: 'team',
@@ -312,7 +330,9 @@ module.exports = class SummaryController extends CommonController {
attachments: []
}
- submissions.push(submission)
+ toTeamParts.forEach((to) => {
+ submissions.push(Object.assign({}, submission, { to }))
+ })
}
async postValidation (pageInstance, userData) {
diff --git a/lib/controller/page/type/page.summary/index.unit.spec.js b/lib/controller/page/type/page.summary/index.unit.spec.js
index <HASH>..<HASH> 100644
--- a/lib/controller/page/type/page.summary/index.unit.spec.js
+++ b/lib/controller/page/type/page.summary/index.unit.spec.js
@@ -528,3 +528,98 @@ test('actions contains email and json actions', async t => {
submitterClientSpy.resetHistory()
t.end()
})
+
+test('split emails from json, csv and normal submissions', async t => {
+ submitterClientSpy.resetHistory()
+
+ const PageSummaryController = proxyquire('.', {
+ '~/fb-runner-node/presenter/pdf-payload': { pdfPayload: pdfPayloadStub },
+ '~/fb-runner-node/presenter/submission-data-with-labels': { submissionDataWithLabels: submissionDataWithLabelsStub },
+ '~/fb-runner-node/page/check-submits/check-submits': { checkSubmits: checkSubmitsStub },
+ '~/fb-runner-node/constants/constants': {
+ SERVICE_OUTPUT_JSON_ENDPOINT: 'https://example.com/adaptor',
+ SERVICE_OUTPUT_JSON_KEY: 'shared_key',
+ SERVICE_OUTPUT_EMAIL: 'bob@gov.uk,bob@justice.gov.uk',
+ SERVICE_OUTPUT_CSV: 'true'
+ }
+ })
+
+ getInstancePropertyStub.withArgs('service', 'emailTemplateUser').returns('you ({firstname}) submitted!')
+
+ const userdata = getUserDataMethods({
+ input: {
+ firstname: 'bob',
+ email: 'test@emample.com'
+ }
+ })
+
+ const pageSummaryController = new PageSummaryController()
+
+ await pageSummaryController.postValidation({}, userdata)
+
+ const submissions = submitterClientSpy.getCall(0).args[0]
+
+ t.equals(submissions.actions.length, 6)
+
+ // remove deprecated fields
+ submissions.actions.map(action => {
+ delete action.attachments
+ delete action.user_answers
+ })
+
+ t.deepEquals(submissions.actions, [
+ {
+ recipientType: 'team',
+ type: 'email',
+ from: '"Form Builder" <form-builder-team@digital.justice.gov.uk>',
+ subject: 'undefined submission',
+ email_body: 'user bob submitted!',
+ include_pdf: true,
+ include_attachments: true,
+ to: 'bob@gov.uk'
+ }, {
+ recipientType: 'team',
+ type: 'email',
+ from: '"Form Builder" <form-builder-team@digital.justice.gov.uk>',
+ subject: 'undefined submission',
+ email_body: 'user bob submitted!',
+ include_pdf: true,
+ include_attachments: true,
+ to: 'bob@justice.gov.uk'
+ }, {
+ recipientType: 'user',
+ type: 'email',
+ from: '"Form Builder" <form-builder-team@digital.justice.gov.uk>',
+ subject: 'Your undefined submission',
+ email_body: 'you (bob) submitted!',
+ include_pdf: false,
+ include_attachments: false,
+ to: 'test@emample.com'
+ }, {
+ type: 'json',
+ url: 'https://example.com/adaptor',
+ encryption_key: 'shared_key'
+ }, {
+ type: 'csv',
+ recipientType: 'team',
+ from: '"Form Builder" <form-builder-team@digital.justice.gov.uk>',
+ to: 'bob@gov.uk',
+ email_body: '',
+ include_pdf: false,
+ subject: 'undefined submission',
+ include_attachments: true
+ }, {
+ type: 'csv',
+ recipientType: 'team',
+ from: '"Form Builder" <form-builder-team@digital.justice.gov.uk>',
+ to: 'bob@justice.gov.uk',
+ email_body: '',
+ include_pdf: false,
+ subject: 'undefined submission',
+ include_attachments: true
+ }
+ ])
+
+ submitterClientSpy.resetHistory()
+ t.end()
+})
|
Make sure CSV submissions can accept multiple destinations
In the runner node the to field for csv attachments does not split
multiple email addresses on the comma therefore when the payload reaches
AWS SES it throws and InvalidParameter exception.
This causes the submission to get put on the failed queue which in turn
keeps trying a number of times.
|
ministryofjustice_fb-runner-node
|
train
|
a25f5f2ec126e7ce89beb78829001e180e301e54
|
diff --git a/src/service.js b/src/service.js
index <HASH>..<HASH> 100644
--- a/src/service.js
+++ b/src/service.js
@@ -148,7 +148,7 @@
script.async = true;
script.defer = true;
script.src = 'https://www.google.com/recaptcha/api.js?onload='+provider.onLoadFunctionName+'&render=explicit';
- $document.find('body').append(script);
+ $document.find('body')[0].appendChild(script);
}
return {
diff --git a/tests/service_test.js b/tests/service_test.js
index <HASH>..<HASH> 100644
--- a/tests/service_test.js
+++ b/tests/service_test.js
@@ -103,9 +103,9 @@ describe('service', function () {
.given.onLoadFunctionName(funcName = 'my-func')
.given.mockDocument({
find: function () {
- return {
- append: appendSpy
- };
+ return [{
+ appendChild: appendSpy
+ }];
}
})
.given.mockWindow({
|
fix broken angular translate attr for meta tags in template
* when use translate-attr-content from angular for meta content, meta content remains empty when using append
* this issue might be related to jQuery, since [0] is the real DOM, without the [0], it's the jQuery wrapped DOM
* appendChild on the native DOM to avoid this angular digest issue
|
VividCortex_angular-recaptcha
|
train
|
8735378d33fdec8155709d2eeb44ad933a3b3936
|
diff --git a/src/main/java/au/com/southsky/jfreesane/SaneSession.java b/src/main/java/au/com/southsky/jfreesane/SaneSession.java
index <HASH>..<HASH> 100644
--- a/src/main/java/au/com/southsky/jfreesane/SaneSession.java
+++ b/src/main/java/au/com/southsky/jfreesane/SaneSession.java
@@ -435,7 +435,7 @@ public class SaneSession implements Closeable {
int offset = 0;
int bytesRead = 0;
- while ((bytesRead = readRecord(bigArray, offset)) > 0) {
+ while ((bytesRead = readRecord(bigArray, offset)) >= 0) {
offset += bytesRead;
}
|
Empty records are perfectly valid, they do not signify an end of stream. JFreeSane incorrectly thought they signified an end of stream.
|
sjamesr_jfreesane
|
train
|
cdb2c9fcf1d6ee75623de3e3c7cf269a25bac83b
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,8 @@ this interface will replace the current epic api""",
include_package_data=True,
zip_safe=False,
install_requires=[
- 'nose'
+ 'nose',
+ 'requests'
],
entry_points="""
# -*- Entry points: -*-
|
Added requests library to setup requires section
|
EUDAT-B2SAFE_B2HANDLE
|
train
|
280c54d78a72f37154fc5bbb2578f84fae0ac6f1
|
diff --git a/tile_generator/pcf.py b/tile_generator/pcf.py
index <HASH>..<HASH> 100755
--- a/tile_generator/pcf.py
+++ b/tile_generator/pcf.py
@@ -150,7 +150,14 @@ def install_cmd(product, version):
payload = {
'to_version': version
}
- opsmgr.put('/api/installation_settings/products/' + matches[0]['guid'], payload)
+ response = opsmgr.put('/api/installation_settings/products/' + matches[0]['guid'], payload, check=False)
+ if response.status_code == 422:
+ errors = response.json()["errors"]
+ for error in errors:
+ if error.endswith(' is already in use.'):
+ print('-','version already installed')
+ return
+ opsmgr.check_response(response)
@cli.command('uninstall')
@click.argument('product')
|
pcf isntall succeeds when version is already installed
|
cf-platform-eng_tile-generator
|
train
|
82fc62aed230ab9441ecb4a2f44be64a155ac9a6
|
diff --git a/pkg/etwlogrus/hook.go b/pkg/etwlogrus/hook.go
index <HASH>..<HASH> 100644
--- a/pkg/etwlogrus/hook.go
+++ b/pkg/etwlogrus/hook.go
@@ -64,12 +64,10 @@ func (h *Hook) Fire(e *logrus.Entry) error {
// We could try to map Logrus levels to ETW levels, but we would lose some
// fidelity as there are fewer ETW levels. So instead we use the level
// directly.
- h.provider.WriteEvent(
+ return h.provider.WriteEvent(
"LogrusEntry",
etw.WithEventOpts(etw.WithLevel(level)),
fields)
-
- return nil
}
// Close cleans up the hook and closes the ETW provider.
|
Return error from WriteEvent in Logrus hook
|
Microsoft_go-winio
|
train
|
77985fdc98e99e0cf7aa3884e27b57c7afd31309
|
diff --git a/src/DataTablesEditor.php b/src/DataTablesEditor.php
index <HASH>..<HASH> 100644
--- a/src/DataTablesEditor.php
+++ b/src/DataTablesEditor.php
@@ -327,6 +327,29 @@ abstract class DataTablesEditor
}
/**
+ * Get dataTables model.
+ *
+ * @return Model
+ */
+ public function getModel()
+ {
+ return $this->model;
+ }
+
+ /**
+ * Set the dataTables model on runtime.
+ *
+ * @param Model $model
+ * @return DataTablesEditor
+ */
+ public function setModel(Model $model)
+ {
+ $this->model = $model;
+
+ return $this;
+ }
+
+ /**
* Display dataTables editor validation errors.
*
* @param Validator $validator
|
Add model fluent getter and setter.
Fix #<I>.
|
yajra_laravel-datatables-editor
|
train
|
4aef0e82ed1042d500c74bf50ab60a8404354060
|
diff --git a/readabilitySAX.js b/readabilitySAX.js
index <HASH>..<HASH> 100644
--- a/readabilitySAX.js
+++ b/readabilitySAX.js
@@ -142,7 +142,6 @@ var tagsToSkip = {__proto__:null,aside:true,footer:true,head:true,nav:true,noscr
unpackDivs = {__proto__:embeds,div:true,img:true},
noContent = {__proto__:formatTags,font:false,input:false,link:false,meta:false,span:false},
formatTags = {__proto__:null,br:new Element("br"),hr:new Element("hr")},
- tagsToScore = {__proto__:null,p:true,pre:true,td:true},
headerTags = {__proto__:null,h1:true,h2:true,h3:true,h4:true,h5:true,h6:true},
newLinesAfter = {__proto__:headerTags,br:true,li:true,p:true},
@@ -524,7 +523,7 @@ Readability.prototype.onclosetag = function(tagName){
elem.parent.children.push(elem);
//should node be scored?
- if(tagName in tagsToScore);
+ if(tagName === "p" || tagName === "pre" || tagName === "td");
else if(tagName === "div"){
//check if div should be converted to a p
for(i = 0, j = divToPElements.length; i < j; i++){
@@ -556,11 +555,11 @@ var getCandidateSiblings = function(candidate){
if(typeof childs[i] === "string") continue;
if(childs[i] === candidate);
- else if(candidate.attributes["class"] === childs[i].attributes["class"]){
+ else if(candidate.elementData === childs[i].elementData){ //TODO: just the class name should be checked
if((childs[i].totalScore + candidate.totalScore * .2) >= siblingScoreThreshold){
if(childs[i].name !== "p") childs[i].name = "div";
}
- else continue;
+ else continue;
} else if(childs[i].name === "p"){
if(childs[i].info.textLength >= 80 && childs[i].info.density < .25);
else if(childs[i].info.textLength < 80 && childs[i].info.density === 0 && re_sentence.test(childs[i].toString()));
|
Inlined tagsToScore, fixed bug where the attribute `class` was checked
The `class` attribute isn't present anymore, the check would have
always returned true (because undefined === undefined)
The new test is much more restrictive and doesn't reflect the behavior
of Readability, but it's better nevertheless.
|
fb55_readabilitySAX
|
train
|
76036144e4b0049575d5c2f0c64d6365529ee9e7
|
diff --git a/astroid/test_utils.py b/astroid/test_utils.py
index <HASH>..<HASH> 100644
--- a/astroid/test_utils.py
+++ b/astroid/test_utils.py
@@ -29,7 +29,7 @@ def require_version(minver: str = "0.0.0", maxver: str = "4.0.0") -> Callable:
Skip the test if older.
"""
- def parse(python_version: str) -> Tuple[int]:
+ def parse(python_version: str) -> Tuple[int, ...]:
try:
return tuple(int(v) for v in python_version.split("."))
except ValueError as e:
|
Fix got "Tuple[int, ...]", expected "Tuple[int]"
|
PyCQA_astroid
|
train
|
81560a139b0b07def52dd8af9bab71019ccc7743
|
diff --git a/lib/topsy/page.rb b/lib/topsy/page.rb
index <HASH>..<HASH> 100755
--- a/lib/topsy/page.rb
+++ b/lib/topsy/page.rb
@@ -13,6 +13,7 @@ module Topsy
class Page < Hashie::Dash
property :total
+ property :trackback_total
property :list
property :page
property :perpage
diff --git a/test/fixtures/trackbacks.json b/test/fixtures/trackbacks.json
index <HASH>..<HASH> 100644
--- a/test/fixtures/trackbacks.json
+++ b/test/fixtures/trackbacks.json
@@ -9,6 +9,7 @@
},
"response": {
"page": 1,
+ "trackback_total": 3,
"total": 3,
"perpage": 10,
"topsy_trackback_url": "http://topsy.com/tb/orrka.com/",
diff --git a/test/test_topsy.rb b/test/test_topsy.rb
index <HASH>..<HASH> 100644
--- a/test/test_topsy.rb
+++ b/test/test_topsy.rb
@@ -182,6 +182,7 @@ class TestTopsy < Test::Unit::TestCase
results = Topsy.trackbacks("http://orrka.com")
results.class.should == Topsy::Page
results.total.should == 3
+ results.trackback_total.should == 3
results.list.first.date.year.should == 2009
results.list.first.permalink_url.should == "http://twitter.com/orrka/status/6435248067"
results.list.first.date.should == Time.at(1260204073)
|
Added trackback_total property to Page
|
pengwynn_topsy
|
train
|
e48d8ce2234f3de7a0dc645eeee15d5ceec083ef
|
diff --git a/templatetags/analytics.py b/templatetags/analytics.py
index <HASH>..<HASH> 100644
--- a/templatetags/analytics.py
+++ b/templatetags/analytics.py
@@ -38,11 +38,14 @@ class AnalyticsNode(template.Node):
code = self.code
else:
return ''
-
- t = loader.get_template('google_analytics/analytics_template.html')
- c = Context({
- 'analytics_code': code,
- })
- return t.render(c)
+
+ if code.strip() != '':
+ t = loader.get_template('google_analytics/analytics_template.html')
+ c = Context({
+ 'analytics_code': code,
+ })
+ return t.render(c)
+ else:
+ return ''
register.tag('analytics', do_get_analytics)
|
If the code is left blank, nothing should be rendered
|
clintecker_django-google-analytics
|
train
|
6b5497f86e768bfcdedd28d7841d9fab8c52fabb
|
diff --git a/lib/resque_cleaner.rb b/lib/resque_cleaner.rb
index <HASH>..<HASH> 100644
--- a/lib/resque_cleaner.rb
+++ b/lib/resque_cleaner.rb
@@ -118,17 +118,20 @@ module Resque
if !block_given? || block.call(job)
index = @limiter.start_index + i - requeued
- if clear_after_requeue
- # remove job
- value = redis.lindex(:failed, index)
- redis.lrem(:failed, 1, value)
- else
- # mark retried
- job['retried_at'] = Time.now.strftime("%Y/%m/%d %H:%M:%S")
- redis.lset(:failed, @limiter.start_index+i, Resque.encode(job))
+ value = redis.lindex(:failed, index)
+ redis.multi do
+ Job.create(queue||job['queue'], job['payload']['class'], *job['payload']['args'])
+
+ if clear_after_requeue
+ # remove job
+ redis.lrem(:failed, 1, value)
+ else
+ # mark retried
+ job['retried_at'] = Time.now.strftime("%Y/%m/%d %H:%M:%S")
+ redis.lset(:failed, @limiter.start_index+i, Resque.encode(job))
+ end
end
- Job.create(queue||job['queue'], job['payload']['class'], *job['payload']['args'])
requeued += 1
end
end
|
Use transacation to wrap this up more safely.
Also reverse the logic so the job gets created first in the code
and then removed. Given that it's now in a transaction this is
a minor point, but it reads more reasonably.
|
ono_resque-cleaner
|
train
|
eb62fb814c2ab811182dda6b19832bacd0af6f4a
|
diff --git a/src/sos/controller.py b/src/sos/controller.py
index <HASH>..<HASH> 100644
--- a/src/sos/controller.py
+++ b/src/sos/controller.py
@@ -8,7 +8,7 @@ import zmq
import time
import threading
from collections import defaultdict
-from .utils import env, ProcessKilled
+from .utils import env, ProcessKilled, get_localhost_ip
from .signatures import StepSignatures, WorkflowSignatures
from .messages import encode_msg, decode_msg
@@ -453,9 +453,12 @@ class Controller(threading.Thread):
# broker to handle the execution of substeps
self.worker_backend_socket = create_socket(
self.context, zmq.REP, 'controller backend rep') # ROUTER
- env.config['sockets'][
- 'worker_backend'] = self.worker_backend_socket.bind_to_random_port(
- 'tcp://127.0.0.1')
+ # we assume the router is always on local host, but we will use a non-localhost
+ # IP so that others can connect to it.
+ local_ip = get_localhost_ip()
+ worker_port = self.worker_backend_socket.bind_to_random_port(
+ f'tcp://{local_ip}')
+ env.config['sockets']['worker_backend'] = f'tcp://{local_ip}:{worker_port}'
# tapping
if env.config['exec_mode'] == 'master':
diff --git a/src/sos/utils.py b/src/sos/utils.py
index <HASH>..<HASH> 100644
--- a/src/sos/utils.py
+++ b/src/sos/utils.py
@@ -14,6 +14,7 @@ import os
import pickle
import re
import sys
+import socket
import tempfile
import threading
import time
@@ -1866,3 +1867,16 @@ def separate_options(options: str) -> List[str]:
pieces[idx] += '\n' + pieces[idx + 1]
pieces.pop(idx + 1)
return pieces
+
+
+def get_localhost_ip():
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ try:
+ # doesn't even have to be reachable
+ s.connect(('10.255.255.255', 1))
+ IP = s.getsockname()[0]
+ except:
+ IP = '127.0.0.1'
+ finally:
+ s.close()
+ return IP
\ No newline at end of file
diff --git a/src/sos/workers.py b/src/sos/workers.py
index <HASH>..<HASH> 100755
--- a/src/sos/workers.py
+++ b/src/sos/workers.py
@@ -14,7 +14,7 @@ import zmq
from .controller import (close_socket, connect_controllers, create_socket,
disconnect_controllers)
from .executor_utils import kill_all_subprocesses, prepare_env
-from .utils import env, ProcessKilled, short_repr
+from .utils import env, ProcessKilled, short_repr, get_localhost_ip
from .messages import encode_msg, decode_msg
def signal_handler(*args, **kwargs):
@@ -163,8 +163,9 @@ class SoS_Worker(mp.Process):
# create controller socket
env.ctrl_socket = create_socket(env.zmq_context, zmq.REQ,
'worker backend')
- env.ctrl_socket.connect(
- f'tcp://127.0.0.1:{self.config["sockets"]["worker_backend"]}')
+ # worker_backend, or the router, might be on another machine
+ env.log_to_file('WORKER', f'Connecting to router {self.config["sockets"]["worker_backend"]} from {get_localhost_ip()}')
+ env.ctrl_socket.connect(self.config["sockets"]["worker_backend"])
signal.signal(signal.SIGTERM, signal_handler)
# result socket used by substeps
|
Using real IP addresses for router address to allow remote worker connectin
|
vatlab_SoS
|
train
|
1a4b1b31096dedccdd9be4b6aea2e226deeb4d94
|
diff --git a/models/points.go b/models/points.go
index <HASH>..<HASH> 100644
--- a/models/points.go
+++ b/models/points.go
@@ -1936,7 +1936,9 @@ func (p *point) Delete() {
switch {
case p.it.end == p.it.start:
case p.it.end >= len(p.fields):
- p.fields = p.fields[:p.it.start]
+ // Remove the trailing comma if there are more than one fields
+ p.fields = bytes.TrimSuffix(p.fields[:p.it.start], []byte(","))
+
case p.it.start == 0:
p.fields = p.fields[p.it.end:]
default:
diff --git a/models/points_test.go b/models/points_test.go
index <HASH>..<HASH> 100644
--- a/models/points_test.go
+++ b/models/points_test.go
@@ -2182,6 +2182,10 @@ func TestPoint_FieldIterator_Delete_Begin(t *testing.T) {
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
+
+ if _, err = models.ParsePointsString(points[0].String()); err != nil {
+ t.Fatalf("Failed to parse point: %v", err)
+ }
}
func TestPoint_FieldIterator_Delete_Middle(t *testing.T) {
@@ -2203,6 +2207,10 @@ func TestPoint_FieldIterator_Delete_Middle(t *testing.T) {
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
+
+ if _, err = models.ParsePointsString(points[0].String()); err != nil {
+ t.Fatalf("Failed to parse point: %v", err)
+ }
}
func TestPoint_FieldIterator_Delete_End(t *testing.T) {
@@ -2225,6 +2233,10 @@ func TestPoint_FieldIterator_Delete_End(t *testing.T) {
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
+
+ if _, err = models.ParsePointsString(points[0].String()); err != nil {
+ t.Fatalf("Failed to parse point: %v", err)
+ }
}
func TestPoint_FieldIterator_Delete_Nothing(t *testing.T) {
@@ -2244,6 +2256,10 @@ func TestPoint_FieldIterator_Delete_Nothing(t *testing.T) {
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
+
+ if _, err = models.ParsePointsString(points[0].String()); err != nil {
+ t.Fatalf("Failed to parse point: %v", err)
+ }
}
func TestPoint_FieldIterator_Delete_Twice(t *testing.T) {
@@ -2266,6 +2282,10 @@ func TestPoint_FieldIterator_Delete_Twice(t *testing.T) {
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
+
+ if _, err = models.ParsePointsString(points[0].String()); err != nil {
+ t.Fatalf("Failed to parse point: %v", err)
+ }
}
func TestEscapeStringField(t *testing.T) {
|
Fix delete time fields creating unparseable points
If a field was named time was written and was subsequently dropped,
it could leave a trailing comma in the series key causing it to fail
to be parseable in other parts of the code.
|
influxdata_influxdb
|
train
|
556d46835b10e6f49a03fe517fe758231016acb5
|
diff --git a/pyhull/__init__.py b/pyhull/__init__.py
index <HASH>..<HASH> 100644
--- a/pyhull/__init__.py
+++ b/pyhull/__init__.py
@@ -35,8 +35,7 @@ def qhull_cmd(cmd, options, points):
Output as a list of strings. E.g., ['4', '0 2', '1 0', '2 3 ', '3 1']
"""
prep_str = [str(len(points[0])), str(len(points))]
- #This complicated expression ensures correction precision in conversion.
- prep_str.extend([' '.join(["%.17f" % i for i in row]) for row in points])
+ prep_str.extend([' '.join(map(repr, row)) for row in points])
output = getattr(hull, cmd)(options, "\n".join(prep_str))
return map(string.strip, output.strip().split("\n"))
@@ -131,10 +130,10 @@ def qhalf(options, halfspaces, interior_point):
"""
points = [list(h.normal) + [h.offset] for h in halfspaces]
data = [[len(interior_point), 1]]
- data.append(["%.17f" % i for i in interior_point])
+ data.append(map(repr, interior_point))
data.append([len(points[0])])
data.append([len(points)])
- data.extend([["%.17f" % i for i in row] for row in points])
+ data.extend([map(repr, row) for row in points])
prep_str = [" ".join(map(str, line)) for line in data]
output = getattr(hull, "qhalf")(options, "\n".join(prep_str))
return map(string.strip, output.strip().split("\n"))
|
Use repr instead of %<I>f to guanrantee float precision preservation.
|
materialsvirtuallab_pyhull
|
train
|
1838e094d1acd0243b8299b4654aa455c56e6f1b
|
diff --git a/components/ekb/src/main/java/org/openengsb/core/ekb/internal/ModelRegistryService.java b/components/ekb/src/main/java/org/openengsb/core/ekb/internal/ModelRegistryService.java
index <HASH>..<HASH> 100644
--- a/components/ekb/src/main/java/org/openengsb/core/ekb/internal/ModelRegistryService.java
+++ b/components/ekb/src/main/java/org/openengsb/core/ekb/internal/ModelRegistryService.java
@@ -81,16 +81,6 @@ public final class ModelRegistryService implements ModelRegistry, BundleListener
if (event.getType() != BundleEvent.STARTED && event.getType() != BundleEvent.STOPPED) {
return false;
}
- String symbolicName = event.getBundle().getSymbolicName();
- // this two bundles contain references which would be checked during the bundle scanning,
- // which aren't there since they are optional. So the bundle scanning would throw
- // ClassNotFoundExceptions which don't make a problem, but maybe worry the user
- if (symbolicName.equals("org.apache.servicemix.bundles.xmlbeans")) {
- return false;
- }
- if (symbolicName.equals("org.ops4j.pax.wicket.service")) {
- return false;
- }
return true;
}
@@ -135,6 +125,9 @@ public final class ModelRegistryService implements ModelRegistry, BundleListener
} catch (ClassNotFoundException e) {
LOGGER.warn(String.format("Bundle could not find own class: %s", classname), e);
return false;
+ } catch (NoClassDefFoundError e) {
+ // ignore since this happens if bundle have optional imports
+ return false;
}
return OpenEngSBModel.class.isAssignableFrom(clazz);
}
|
[OPENENGSB-<I>] found a better way to catch the ClassNotFound exceptions in the model registry
|
openengsb_openengsb
|
train
|
7564695eb7b511d0914907e9188c619b2392d14c
|
diff --git a/arch/zx48k/optimizer/errors.py b/arch/zx48k/optimizer/errors.py
index <HASH>..<HASH> 100644
--- a/arch/zx48k/optimizer/errors.py
+++ b/arch/zx48k/optimizer/errors.py
@@ -7,7 +7,22 @@ class DuplicatedLabelError(Error):
""" Exception raised when a duplicated Label is found.
This should never happen.
"""
-
def __init__(self, label):
- Error.__init__(self, "Invalid mnemonic '%s'" % label)
+ Error.__init__(self, "Invalid mnemonic '{}'".format(label))
self.label = label
+
+
+class OptimizerError(Error):
+ """ Generic exception raised during the optimization phase
+ """
+ def __init__(self, msg):
+ Error.__init__(self, msg)
+
+
+class OptimizerInvalidBasicBlockError(OptimizerError):
+ """ Exception raised when a block is not correctly partitioned.
+ This should never happen.
+ """
+ def __init__(self, block):
+ Error.__init__(self, "Invalid block '{}'".format(block.id))
+ self.block = block
|
Add optimizer exceptions
Raised when unexpected error in block partitions are
encountered.
|
boriel_zxbasic
|
train
|
ca9b9f2c0490cfee56660c5120787cef5006f3d9
|
diff --git a/src/Bes/Twig/Extension/MobileDetectExtension.php b/src/Bes/Twig/Extension/MobileDetectExtension.php
index <HASH>..<HASH> 100755
--- a/src/Bes/Twig/Extension/MobileDetectExtension.php
+++ b/src/Bes/Twig/Extension/MobileDetectExtension.php
@@ -23,15 +23,15 @@ class MobileDetectExtension extends \Twig_Extension
public function getFunctions()
{
$functions = array(
- 'get_available_devices' => new \Twig_Function_Method($this, 'getAvailableDevices'),
- 'is_mobile' => new \Twig_Function_Method($this, 'isMobile'),
- 'is_tablet' => new \Twig_Function_Method($this, 'isTablet'),
+ new \Twig_SimpleFunction('get_available_devices', array($this, 'getAvailableDevices')),
+ new \Twig_SimpleFunction('is_mobile', array($this, 'isMobile')),
+ new \Twig_SimpleFunction('is_tablet', array($this, 'isTablet'))
);
foreach ($this->getAvailableDevices() as $device => $fixedName) {
$methodName = 'is'.$device;
$twigFunctionName = 'is_'.$fixedName;
- $functions[$twigFunctionName] = new \Twig_Function_Method($this, $methodName);
+ $functions[] = new \Twig_SimpleFunction($twigFunctionName, array($this, $methodName));
}
return $functions;
|
replaced depricated methods, support twig 2.x
|
bes89_mobiledetect-twig-extension
|
train
|
f9d28540567264ad9943779b6b5e3ea64b95edf0
|
diff --git a/SoftLayer/DNS.py b/SoftLayer/DNS.py
index <HASH>..<HASH> 100644
--- a/SoftLayer/DNS.py
+++ b/SoftLayer/DNS.py
@@ -31,8 +31,9 @@ class DNSManager(object):
domain - str"""
domain = domain.lower()
- results = self.domain.getByDomainName(domain,
- mask={'resourceRecords': {}})
+ results = self.domain.getByDomainName(
+ domain,
+ mask={'resourceRecords': {}})
matches = filter(lambda x: x['name'].lower() == domain, results)
try:
@@ -45,7 +46,8 @@ class DNSManager(object):
domain - str
serial - int (default strftime(%Y%m%d01))"""
- return self.domain.createObject({'name': domain,
+ return self.domain.createObject({
+ 'name': domain,
'serial': serial or strftime('%Y%m%d01')})
def delete_zone(self, domid):
|
pep8 cleanups for DNS.py
|
softlayer_softlayer-python
|
train
|
3396208df88e0348285ed7b692d32218431e41a2
|
diff --git a/android/guava/src/com/google/common/util/concurrent/CollectionFuture.java b/android/guava/src/com/google/common/util/concurrent/CollectionFuture.java
index <HASH>..<HASH> 100644
--- a/android/guava/src/com/google/common/util/concurrent/CollectionFuture.java
+++ b/android/guava/src/com/google/common/util/concurrent/CollectionFuture.java
@@ -102,7 +102,7 @@ abstract class CollectionFuture<V extends @Nullable Object, C extends @Nullable
/** The result of a successful {@code Future}. */
private static final class Present<V extends @Nullable Object> {
- V value;
+ final V value;
Present(V value) {
this.value = value;
diff --git a/guava/src/com/google/common/util/concurrent/CollectionFuture.java b/guava/src/com/google/common/util/concurrent/CollectionFuture.java
index <HASH>..<HASH> 100644
--- a/guava/src/com/google/common/util/concurrent/CollectionFuture.java
+++ b/guava/src/com/google/common/util/concurrent/CollectionFuture.java
@@ -102,7 +102,7 @@ abstract class CollectionFuture<V extends @Nullable Object, C extends @Nullable
/** The result of a successful {@code Future}. */
private static final class Present<V extends @Nullable Object> {
- V value;
+ final V value;
Present(V value) {
this.value = value;
|
Mark a field as `final`.
I don't think there was any concurrency-related danger here from the non-`final` field. But it should have been `final` to begin with, so now it is.
PiperOrigin-RevId: <I>
|
google_guava
|
train
|
08d7c8d50427730a128097d74a20bd5b5806ee15
|
diff --git a/geocoder/baidu.py b/geocoder/baidu.py
index <HASH>..<HASH> 100644
--- a/geocoder/baidu.py
+++ b/geocoder/baidu.py
@@ -3,10 +3,13 @@
from __future__ import absolute_import
+from collections import OrderedDict
import logging
+import re
+import six
from geocoder.base import OneResult, MultipleResultsQuery
-from geocoder.keys import baidu_key
+from geocoder.keys import baidu_key, baidu_security_key
class BaiduResult(OneResult):
@@ -55,13 +58,62 @@ class BaiduQuery(MultipleResultsQuery):
def _build_params(self, location, provider_key, **kwargs):
coordtype = kwargs.get('coordtype', 'wgs84ll')
- return {
- 'address': location,
+ params = {
+ 'address': re.sub('[ ,]', '%', location),
'output': 'json',
'ret_coordtype': coordtype,
'ak': provider_key,
}
+ # adapt params to authentication method
+ self.security_key = kwargs.get('sk', baidu_security_key)
+ if self.security_key:
+ return self._encode_params(params)
+ else:
+ return params
+
+ def _encode_params(self, params):
+ # maintain the order of the parameters during signature creation when returning the results
+ # signature is added to the end of the parameters
+ ordered_params = sorted([(k, v)
+ for (k, v) in params.items() if v])
+
+ params = OrderedDict(ordered_params)
+
+ # urlencode with Chinese symbols sabotage the query
+ params['sn'] = self._sign_url(
+ '/geocoder/v2/',
+ params,
+ self.security_key
+ )
+
+ return params
+
+ def _sign_url(self, base_url, params, security_key):
+ """
+ Signs a request url with a security key.
+ """
+ import hashlib
+
+ if six.PY3:
+ from urllib.parse import urlencode, quote, quote_plus
+ else:
+ from urllib import urlencode, quote, quote_plus
+
+ if not base_url or not self.security_key:
+ return None
+
+ params = params.copy()
+ address = params.pop('address')
+
+ url = base_url + '?address=' + address + '&' + urlencode(params)
+ encoded_url = quote(url, safe="/:=&?#+!$,;'@()*[]")
+
+ signature = quote_plus(encoded_url + self.security_key).encode('utf-8')
+ encoded_signature = hashlib.md5(signature).hexdigest()
+
+ return encoded_signature
+
def _build_headers(self, provider_key, **kwargs):
return {'Referer': kwargs.get('referer', 'http://developer.baidu.com')}
diff --git a/geocoder/keys.py b/geocoder/keys.py
index <HASH>..<HASH> 100644
--- a/geocoder/keys.py
+++ b/geocoder/keys.py
@@ -13,6 +13,7 @@ geonames_username = os.environ.get('GEONAMES_USERNAME')
opencage_key = os.environ.get('OPENCAGE_API_KEY')
mapquest_key = os.environ.get('MAPQUEST_API_KEY')
baidu_key = os.environ.get('BAIDU_API_KEY')
+baidu_security_key = os.environ.get('BAIDU_SECURITY_KEY')
gaode_key = os.environ.get('GAODE_API_KEY')
w3w_key = os.environ.get('W3W_API_KEY')
mapbox_access_token = os.environ.get('MAPBOX_ACCESS_TOKEN')
|
Added developer key functionality for Baidu
|
DenisCarriere_geocoder
|
train
|
5a81e9e5578baa30cd4a64602f9c1cd65491531a
|
diff --git a/src/base/Router.php b/src/base/Router.php
index <HASH>..<HASH> 100644
--- a/src/base/Router.php
+++ b/src/base/Router.php
@@ -174,6 +174,24 @@
}
/**
+ * Function that checks if the long of the patterns match
+ * @param $routePattern
+ * @param $path
+ * @return bool
+ */
+ private function compareSlashes($routePattern, $path) {
+ $pattern_sep = count(explode('/', $routePattern));
+ if(preg_match('/\/$/', $routePattern)) {
+ $pattern_sep--;
+ }
+ $path_sep = count(explode('/', $path));
+ if(preg_match('/\/$/', $path)) {
+ $path_sep--;
+ }
+ return abs($pattern_sep - $path_sep) < 1;
+ }
+
+ /**
* Método que busca el componente que ejecuta la ruta
*
* @param string $route
@@ -189,7 +207,7 @@
foreach ($this->routing as $pattern => $action) {
list($httpMethod, $routePattern) = $this->extractHttpRoute($pattern);
$matched = $this->matchRoutePattern($routePattern, $path);
- if ($matched && ($httpMethod === "ALL" || $httpRequest === $httpMethod)) {
+ if ($matched && ($httpMethod === "ALL" || $httpRequest === $httpMethod) && $this->compareSlashes($routePattern, $path)) {
$get = $this->extractComponents($route, $routePattern);
/** @var $class \PSFS\base\types\Controller */
$class = $this->getClassToCall($action);
@@ -572,7 +590,6 @@
$expr = str_replace('###', '(.*)', $expr);
$expr2 = preg_replace('/\(\.\*\)$/', '', $expr);
$matched = preg_match('/^' . $expr . '\/?$/i', $path) || preg_match('/^' . $expr2 . '?$/i', $path);
-
return $matched;
}
@@ -589,7 +606,7 @@
list($httpMethod, $routePattern) = explode("#|#", $pattern, 2);
}
- return array($httpMethod, $routePattern);
+ return array(strtoupper($httpMethod), $routePattern);
}
/**
|
Improve router parser to allow very complex urls
|
psfs_core
|
train
|
ea350ae14d717da842105d4bfad3a6c2dee71b1d
|
diff --git a/java/client/src/org/openqa/selenium/remote/BeanToJsonConverter.java b/java/client/src/org/openqa/selenium/remote/BeanToJsonConverter.java
index <HASH>..<HASH> 100644
--- a/java/client/src/org/openqa/selenium/remote/BeanToJsonConverter.java
+++ b/java/client/src/org/openqa/selenium/remote/BeanToJsonConverter.java
@@ -198,10 +198,17 @@ public class BeanToJsonConverter {
if (res instanceof JsonElement) {
return (JsonElement) res;
}
- try {
- return new JsonParser().parse((String) res);
- } catch (JsonParseException e) {
- return new JsonPrimitive((String) res);
+
+ if (res instanceof Map) {
+ return convertObject(res);
+ } else if (res instanceof Collection) {
+ return convertObject(res);
+ } else if (res instanceof String) {
+ try {
+ return new JsonParser().parse((String) res);
+ } catch (JsonParseException e) {
+ return new JsonPrimitive((String) res);
+ }
}
} catch (ReflectiveOperationException e) {
throw new WebDriverException(e);
diff --git a/java/client/test/org/openqa/selenium/remote/BeanToJsonConverterTest.java b/java/client/test/org/openqa/selenium/remote/BeanToJsonConverterTest.java
index <HASH>..<HASH> 100644
--- a/java/client/test/org/openqa/selenium/remote/BeanToJsonConverterTest.java
+++ b/java/client/test/org/openqa/selenium/remote/BeanToJsonConverterTest.java
@@ -27,12 +27,14 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
+import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSyntaxException;
import org.junit.Test;
@@ -234,6 +236,39 @@ public class BeanToJsonConverterTest {
}
@Test
+ public void toJsonMethodCanConvertibleReturnedMap() {
+ class ToJsonReturnsMap {
+ public Map<String, Object> toJson() {
+ return ImmutableMap.of("cheese", "peas");
+ }
+ }
+
+ String json = new BeanToJsonConverter().convert(new ToJsonReturnsMap());
+ JsonObject converted = new JsonParser().parse(json).getAsJsonObject();
+
+ assertEquals(1, converted.entrySet().size());
+ assertEquals("peas", converted.get("cheese").getAsString());
+ }
+
+ @Test
+ public void toJsonMethodCanConvertReturnedCollection() {
+ class ToJsonReturnsCollection {
+ public Set<String> toJson() {
+ return ImmutableSortedSet.of("cheese", "peas");
+ }
+ }
+
+ String json = new BeanToJsonConverter().convert(new ToJsonReturnsCollection());
+ JsonArray converted = new JsonParser().parse(json).getAsJsonArray();
+
+ assertEquals(2, converted.size());
+ JsonArray expected = new JsonArray();
+ expected.add(new JsonPrimitive("cheese"));
+ expected.add(new JsonPrimitive("peas"));
+ assertEquals(expected, converted);
+ }
+
+ @Test
public void testShouldCallAsMapMethodIfPresent() {
String json = new BeanToJsonConverter().convert(new Mappable1("a key", "a value"));
assertEquals("{\"a key\":\"a value\"}", json);
|
Allow the BeanToJsonConverter cleanly handle common return types
The way that `toJson` is currently handled encourages people
to take a hard dependency on gson, or attempt to handle
serialising JSON themselves. Modify how `toJson` is handled
to allow us to return other data types, such as `Map` and
anything extending `Collection`.
This opens up the possibility of an infinite loop. Best not
do that in the Real World, eh?
|
SeleniumHQ_selenium
|
train
|
8b6eaee1398e715ffe47e68723a7e7fc53f3a10b
|
diff --git a/ansible_runner/runner_config.py b/ansible_runner/runner_config.py
index <HASH>..<HASH> 100644
--- a/ansible_runner/runner_config.py
+++ b/ansible_runner/runner_config.py
@@ -26,7 +26,7 @@ import shlex
from uuid import uuid4
from collections import Mapping
-from distutils.spawn import find_executable
+#from distutils.spawn import find_executable
from six import iteritems, string_types
@@ -92,9 +92,9 @@ class RunnerConfig(object):
It's also responsible for wrapping the command with the proper ssh agent invocation
and setting early ANSIBLE_ environment variables.
"""
- ansible_path = find_executable('ansible')
- if ansible_path is None or not os.access(ansible_path, os.X_OK):
- raise ConfigurationError("Ansible not found. Make sure that it is installed.")
+ # ansible_path = find_executable('ansible')
+ # if ansible_path is None or not os.access(ansible_path, os.X_OK):
+ # raise ConfigurationError("Ansible not found. Make sure that it is installed.")
if self.private_data_dir is None:
raise ConfigurationError("Runner Base Directory is not defined")
if self.module is None and self.playbook is None: # TODO: ad-hoc mode, module and args
|
Roll back Ansible executable check
This prevents unit testing when ansible itself isn't present
|
ansible_ansible-runner
|
train
|
ff2bd052e67ad6d7f0f130eaff4d9f8a17dde1fa
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [1.1.5] - 2019-07-08
+### Changed
+- Added option to disable CSRF protection
+
## [1.1.4] - 2019-05-29
### Added
- Added form submission tracking (**database update required**)
diff --git a/Form/FormType.php b/Form/FormType.php
index <HASH>..<HASH> 100644
--- a/Form/FormType.php
+++ b/Form/FormType.php
@@ -129,6 +129,11 @@ class FormType extends AbstractType
'required' => false,
]);
+ $builder->add('disableCsrfProtection', CheckboxType::class, [
+ 'label' => 'Disable CSRF protection',
+ 'required' => false,
+ ]);
+
$builder->add('save', SubmitType::class, [
'label' => 'Save',
]);
diff --git a/FormsBundle.php b/FormsBundle.php
index <HASH>..<HASH> 100755
--- a/FormsBundle.php
+++ b/FormsBundle.php
@@ -7,7 +7,7 @@ use Symfony\Component\DependencyInjection\ContainerBuilder;
class FormsBundle extends Bundle
{
- public const VERSION = '1.1.4';
+ public const VERSION = '1.1.5';
/**
* {@inheritdoc}
diff --git a/Model/Form.php b/Model/Form.php
index <HASH>..<HASH> 100644
--- a/Model/Form.php
+++ b/Model/Form.php
@@ -53,6 +53,9 @@ final class Form extends Aggregate
/** @var bool */
public $trackSubmissions = false;
+ /** @var bool */
+ public $disableCsrfProtection = false;
+
/** @var array */
public $items;
}
diff --git a/Model/FormRead.php b/Model/FormRead.php
index <HASH>..<HASH> 100644
--- a/Model/FormRead.php
+++ b/Model/FormRead.php
@@ -128,6 +128,12 @@ class FormRead
private $trackSubmissions;
/**
+ * @var bool|null
+ * @ORM\Column(type="boolean", nullable=true)
+ */
+ private $disableCsrfProtection;
+
+ /**
* @var \DateTime
* @ORM\Column(type="datetime")
*/
@@ -480,6 +486,26 @@ class FormRead
}
/**
+ * @return bool
+ */
+ public function getDisableCsrfProtection(): bool
+ {
+ return (bool) $this->disableCsrfProtection;
+ }
+
+ /**
+ * @param bool|null $disableCsrfProtection
+ *
+ * @return FormRead
+ */
+ public function setDisableCsrfProtection(?bool $disableCsrfProtection): self
+ {
+ $this->disableCsrfProtection = (bool) $disableCsrfProtection;
+
+ return $this;
+ }
+
+ /**
* @return \DateTime
*/
public function getCreated(): \DateTime
diff --git a/Resources/translations/messages.de.yml b/Resources/translations/messages.de.yml
index <HASH>..<HASH> 100644
--- a/Resources/translations/messages.de.yml
+++ b/Resources/translations/messages.de.yml
@@ -93,3 +93,4 @@ Timelimit (in seconds): "Zeitlimit (in Sekunden)"
Timelimit Message: "Zeitlimit Nachricht"
You have already submitted the form, please try again later: "Sie haben dieses Formular bereits abgeschickt."
IP-Address: "IP-Adresse"
+Disable CSRF protection: "Formularschutz deaktivieren"
diff --git a/Services/FormService.php b/Services/FormService.php
index <HASH>..<HASH> 100644
--- a/Services/FormService.php
+++ b/Services/FormService.php
@@ -132,6 +132,7 @@ class FormService
$formRead->setSuccessText($aggregate->successText);
$formRead->setSaveSubmissions($aggregate->saveSubmissions);
$formRead->setTrackSubmissions($aggregate->trackSubmissions);
+ $formRead->setDisableCsrfProtection($aggregate->disableCsrfProtection);
$formRead->setCreated($aggregate->getCreated());
$formRead->setModified($aggregate->getModified());
@@ -173,6 +174,7 @@ class FormService
$formOptions = [
'action' => '?formular=abgeschickt',
'validation_groups' => !$ignore_validation,
+ 'csrf_protection' => !$formRead->getDisableCsrfProtection(),
];
$formBuilder = $this->formFactory->createNamedBuilder($formName, FormType::class, $data, $formOptions);
|
Added option to disable CSRF protection
|
RevisionTen_forms
|
train
|
a3eb11c7ce75ec7bc9a9319afa2eaa9871eec4c5
|
diff --git a/src/angular-materialize.js b/src/angular-materialize.js
index <HASH>..<HASH> 100644
--- a/src/angular-materialize.js
+++ b/src/angular-materialize.js
@@ -1107,7 +1107,7 @@
/* example usage:
<!-- Modal Trigger -->
- <a class='btn' href='#demoModal' modal>show Modal</a>
+ <a class='btn' data-target='demoModal' modal>show Modal</a>
<!-- Modal Structure -->
<div id="demoModal" class="modal">
<div class="modal-content">
@@ -1163,13 +1163,14 @@
ready: ready,
complete: complete,
};
- element.leanModal(options);
+ modalEl.modal(options);
+ element.modal(options);
// Setup watch for opening / closing modal programatically.
if (angular.isDefined(attrs.open) && modalEl.length > 0) {
scope.$watch('open', function(value, lastValue) {
if (!angular.isDefined(value)) { return; }
- (value === true) ? modalEl.openModal(options) : modalEl.closeModal();
+ (value === true) ? modalEl.modal('open') : modalEl.modal('close');
});
}
});
|
fix modal according to refactored version <I> from Dogfalo/materialize
|
krescruz_angular-materialize
|
train
|
ecbfce2bee13bed4bb105ffdec7983051a079c08
|
diff --git a/src/TableColumn/DualTableColumn.php b/src/TableColumn/DualTableColumn.php
index <HASH>..<HASH> 100644
--- a/src/TableColumn/DualTableColumn.php
+++ b/src/TableColumn/DualTableColumn.php
@@ -38,7 +38,7 @@ abstract class DualTableColumn extends TableColumn
*
* @var int|null
*/
- protected ?int $sortOrder2;
+ protected ?int $sortOrder2 = null;
//--------------------------------------------------------------------------------------------------------------------
/**
|
Typed property DualTableColumn::$sortOrder2 must not be accessed before initialization.
|
SetBased_php-abc-table-overview
|
train
|
b9be0dcea7b2c3160145d5fd5ab3a1927beda8bd
|
diff --git a/src/java/com/threerings/miso/client/MisoScenePanel.java b/src/java/com/threerings/miso/client/MisoScenePanel.java
index <HASH>..<HASH> 100644
--- a/src/java/com/threerings/miso/client/MisoScenePanel.java
+++ b/src/java/com/threerings/miso/client/MisoScenePanel.java
@@ -470,17 +470,8 @@ public class MisoScenePanel extends VirtualMediaPanel
}
// make the menu surround the clicked object, but with consistent size
- Rectangle mbounds = new Rectangle(scobj.bounds);
- Dimension radbox = getObjectRadialSize();
- if (mbounds.width != radbox.width) {
- mbounds.x += (mbounds.width-radbox.width)/2;
- mbounds.width = radbox.width;
- }
- if (mbounds.height != radbox.height) {
- mbounds.y += (mbounds.height-radbox.height)/2;
- mbounds.height = radbox.height;
- }
-
+ Rectangle mbounds = getRadialMenuBounds(scobj);
+
_activeMenu = menu;
_activeMenu.addActionListener(new ActionListener() {
public void actionPerformed (ActionEvent e) {
@@ -498,6 +489,26 @@ public class MisoScenePanel extends VirtualMediaPanel
}
/**
+ * Returns an appropriate set of menu bounds for the specified object.
+ * Returns a rectangle of the size specified by
+ * {@link #getObjectRadialSize} centered around the object.
+ */
+ protected Rectangle getRadialMenuBounds (SceneObject scobj)
+ {
+ Rectangle mbounds = new Rectangle(scobj.bounds);
+ Dimension radbox = getObjectRadialSize();
+ if (mbounds.width != radbox.width) {
+ mbounds.x += (mbounds.width-radbox.width)/2;
+ mbounds.width = radbox.width;
+ }
+ if (mbounds.height != radbox.height) {
+ mbounds.y += (mbounds.height-radbox.height)/2;
+ mbounds.height = radbox.height;
+ }
+ return mbounds;
+ }
+
+ /**
* Returns the size of the rectangle around which we create an
* object's radial menu. The default is a sensible size, but derived
* classes may wish to tune the value to make their menus lay out in a
|
Broke menu bounds computation out into its own method for use by subclasses.
git-svn-id: svn+ssh://src.earth.threerings.net/narya/trunk@<I> <I>f4-<I>e9-<I>-aa3c-eee0fc<I>fb1
|
threerings_narya
|
train
|
5f7637dd6d981a2b99b488948f0fc04f24c252ea
|
diff --git a/xdoctest/utils/util_notebook.py b/xdoctest/utils/util_notebook.py
index <HASH>..<HASH> 100644
--- a/xdoctest/utils/util_notebook.py
+++ b/xdoctest/utils/util_notebook.py
@@ -218,7 +218,11 @@ def execute_notebook(ipynb_fpath, timeout=None, verbose=None):
>>> print('resources = {!r}'.format(resources))
>>> print('nb = {!r}'.format(nb))
>>> for cell in nb['cells']:
- >>> assert len(cell['outputs']) == 1
+ >>> if len(cell['outputs']) != 1:
+ >>> import warnings
+ >>> warnings.warn('expected an output, is this the issue '
+ >>> 'described [here](https://github.com/nteract/papermill/issues/426)?')
+
"""
import nbformat
import logging
@@ -230,37 +234,19 @@ def execute_notebook(ipynb_fpath, timeout=None, verbose=None):
verbose = 0
if verbose > 1:
- # print('nbformat = {!r}'.format(nbformat))
- # print('ExecutePreprocessor = {!r}'.format(ExecutePreprocessor))
- # print('ep = {!r}'.format(ep))
- print('dpath = {!r}'.format(dpath))
- print('executing notebook')
- print('setting log-level to {}'.format(logging.DEBUG))
+ print('executing notebook in dpath = {!r}'.format(dpath))
ep.log.setLevel(logging.DEBUG)
- _add_debug_stream_handler(ep.log)
elif verbose > 0:
ep.log.setLevel(logging.INFO)
with open(ipynb_fpath, 'r+') as file:
nb = nbformat.read(file, as_version=nbformat.NO_CONVERT)
nb, resources = ep.preprocess(nb, {'metadata': {'path': dpath}})
- # nb, resources = ep.preprocess(nb)
# from nbconvert.preprocessors import executenb
# nb, resources = executenb(nb, cwd=dpath)
return nb, resources
-def _add_debug_stream_handler(logger):
- import logging
- s_formatter = logging.Formatter('%(levelname)s: %(message)s')
- # Add a stdout handler:
- # this allows us to print logging calls to the terminal
- stdout_handler = logging.StreamHandler(sys.stdout)
- stdout_handler.setFormatter(s_formatter)
- stdout_handler.setLevel(logging.DEBUG)
- logger.addHandler(stdout_handler)
-
-
def _make_test_notebook_fpath(fpath, cell_sources):
"""
Helper for testing
|
Make error into a warning on win<I>
|
Erotemic_xdoctest
|
train
|
5cf95eadeb323c81320bc62748151b992765f0b0
|
diff --git a/openpnm/utils/petsc.py b/openpnm/utils/petsc.py
index <HASH>..<HASH> 100644
--- a/openpnm/utils/petsc.py
+++ b/openpnm/utils/petsc.py
@@ -11,10 +11,13 @@ import scipy.sparse
from openpnm.core import Base
from openpnm.utils import logging
logger = logging.getLogger(__name__)
-import petsc4py
-# Next line must be before importing PETSc
-petsc4py.init(sys.argv)
-from petsc4py import PETSc
+try:
+ import petsc4py
+ # Next line must be before importing PETSc
+ petsc4py.init(sys.argv)
+ from petsc4py import PETSc
+except ModuleNotFoundError:
+ pass
class PETScSparseLinearSolver(Base):
|
Protect petsc import via try/except
|
PMEAL_OpenPNM
|
train
|
fb947f9067f802bc3130bfd3686aa695b932b1c6
|
diff --git a/web/concrete/js/ccm.base.js b/web/concrete/js/ccm.base.js
index <HASH>..<HASH> 100644
--- a/web/concrete/js/ccm.base.js
+++ b/web/concrete/js/ccm.base.js
@@ -53,16 +53,25 @@ ccm_activateSite = function() {
ccm_topPaneDeactivated = false;
}
+
ccm_addHeaderItem = function(item, type) {
+ // "item" might already have a "?v=", so avoid invalid query string.
+ var qschar = (item.indexOf('?') != -1 ? '' : '?ts=');
if (type == 'CSS') {
- if (!($('head').children('link[href*="' + item + '"]').length)) {
- $('head').append('<link rel="stylesheet" type="text/css" href="' + item + '?ts=' + new Date().getTime() + '" />');
+ if (navigator.userAgent.indexOf('MSIE') != -1) {
+ // Most reliable way found to force IE to apply dynamically inserted stylesheet across jQuery versions
+ var ss = document.createElement('link'), hd = document.getElementsByTagName('head')[0];
+ ss.type = 'text/css'; ss.rel = 'stylesheet'; ss.href = item; ss.media = 'screen';
+ hd.appendChild(ss);
+ } else {
+ if (!($('head').children('link[href*="' + item + '"]').length)) {
+ $('head').append('<link rel="stylesheet" media="screen" type="text/css" href="' + item + qschar + new Date().getTime() + '" />');
+ }
}
} else if (type == 'JAVASCRIPT') {
if (!($('head').children('script[src*="' + item + '"]').length)) {
- $('head').append('<script type="text/javascript" src="' + item + '?ts=' + new Date().getTime() + '"></script>');
+ $('head').append('<script type="text/javascript" src="' + item + qschar + new Date().getTime() + '"></script>');
}
-
} else {
if (!($('head').children(item).length)) {
$('head').append(item);
diff --git a/web/concrete/models/block_types.php b/web/concrete/models/block_types.php
index <HASH>..<HASH> 100644
--- a/web/concrete/models/block_types.php
+++ b/web/concrete/models/block_types.php
@@ -427,10 +427,10 @@ defined('C5_EXECUTE') or die("Access Denied.");
$dir = $dir2;
}
- // now we check to see if it's been overridden in the core and if so we do it there
+ // now we check to see if it's been overridden in the site root and if so we do it there
if ($btID > 0) {
// this is only necessary when it's an existing refresh
- if (is_dir(DIR_FILES_BLOCK_TYPES . '/' . $btHandle)) {
+ if (file_exists(DIR_FILES_BLOCK_TYPES . '/' . $btHandle . '/' . FILENAME_BLOCK_CONTROLLER)) {
$dir = DIR_FILES_BLOCK_TYPES;
}
}
@@ -545,7 +545,7 @@ defined('C5_EXECUTE') or die("Access Denied.");
return $db->ErrorMsg();
}
} else {
- return t("No block found with the handle %s found.", $btHandle);
+ return t("No block found with the handle %s.", $btHandle);
}
}
|
IE fixes to JavaScript addHeaderItem; more reliable block refreshing when a package block has been overridden in the core
Former-commit-id: <I>d<I>b<I>ea<I>e<I>d<I>c<I>bd<I>bb8
|
concrete5_concrete5
|
train
|
d74b81ecaaf76c8c2f1294b1ebf28156f535934b
|
diff --git a/org_reader/org_reader.py b/org_reader/org_reader.py
index <HASH>..<HASH> 100644
--- a/org_reader/org_reader.py
+++ b/org_reader/org_reader.py
@@ -38,10 +38,11 @@ class OrgReader(BaseReader):
]
"""
no_more_header = False
+ expr_metadata = re.compile(r'^#\+[a-zA-Z]+:.*')
header = []
content = []
for line in text_lines:
- metadata = re.match(r'^#\+[a-zA-Z]+:.*', line)
+ metadata = expr_metadata.match(line)
if metadata and not no_more_header:
header.append(line)
else:
@@ -57,7 +58,12 @@ class OrgReader(BaseReader):
Return:
A dict containing metadatas
"""
- pass
+ expr_metadata = re.compile(r'^#\+([a-zA-Z]+):(.*)')
+ return {
+ expr_metadata.match(line).group(1).lower()
+ : expr_metadata.match(line).group(2).strip()
+ for line in text_lines
+ }
def read(self, source_path):
"""
|
org_reader: Write the metadata extractor code
|
getpelican_pelican-plugins
|
train
|
696e44625e79b4437f65fdc31736f3759fbf2ee5
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -1,6 +1,4 @@
const gulp = require('gulp');
-const babel = require('gulp-babel');
-const sourcemaps = require('gulp-sourcemaps');
const changed = require('gulp-changed');
const eslint = require('gulp-eslint');
@@ -11,17 +9,8 @@ gulp.task('lint', function () {
.pipe(eslint.format());
});
-gulp.task('compile', ['lint'], () => {
- return gulp.src('src/**/*.js')
- //.pipe(changed('lib'))
- .pipe(sourcemaps.init())
- .pipe(babel())
- .pipe(sourcemaps.write())
- .pipe(gulp.dest('lib'));
-});
-
gulp.task('watch', () => {
- gulp.watch('src/**/*.js', ['lint', 'compile']);
+ gulp.watch('src/**/*.js', ['lint']);
});
-gulp.task('default', ['watch']);
+gulp.task('default', ['watch', 'lint']);
|
remove babel compile from gulp tasks
|
MostlyJS_mostly-entity
|
train
|
0c093da67f31dbf29a9e29659dbba0f422260b12
|
diff --git a/cmd/minikube/cmd/root.go b/cmd/minikube/cmd/root.go
index <HASH>..<HASH> 100644
--- a/cmd/minikube/cmd/root.go
+++ b/cmd/minikube/cmd/root.go
@@ -62,7 +62,7 @@ var RootCmd = &cobra.Command{
Long: `minikube provisions and manages local Kubernetes clusters optimized for development workflows.`,
PersistentPreRun: func(cmd *cobra.Command, args []string) {
for _, path := range dirs {
- if err := os.MkdirAll(path, 0777); err != nil {
+ if err := os.MkdirAll(path, 0o777); err != nil {
exit.Error(reason.HostHomeMkdir, "Error creating minikube directory", err)
}
}
|
revert because of IDE bug
|
kubernetes_minikube
|
train
|
fc7fbe0c1add38435e104688b4bbccd8df6c2ce2
|
diff --git a/code/controllers/EventRegisterController.php b/code/controllers/EventRegisterController.php
index <HASH>..<HASH> 100644
--- a/code/controllers/EventRegisterController.php
+++ b/code/controllers/EventRegisterController.php
@@ -122,12 +122,13 @@ class EventRegisterController extends Page_Controller {
return $this->redirect($this->Link());
}
$registration = $this->getCurrentRegistration();
- $registration = $registration->customise(array(
- 'EditLink' => $this->Link('attendee/edit'),
- 'DeleteLink' => $this->Link('attendee/delete'),
- 'Total' => $registration->obj('calculateTotal')
- ))->renderWith("AttendeesReviewTable");
-
+ $customisations = array(
+ 'EditLink' => $this->Link('attendee/edit'),
+ 'DeleteLink' => $this->Link('attendee/delete'),
+ 'Total' => $registration->obj('calculateTotal')
+ );
+ $this->extend("updateReviewTable", $registration, $customisations);
+ $registration = $registration->renderWith("AttendeesReviewTable", $customisations);
return array(
'Title' => 'Review',
'Content' => $registration,
|
Allow AttendeesReviewTable template content to be customised via extensions
|
registripe_registripe-core
|
train
|
c3cf722e92643ab84988882144977e38334ea37f
|
diff --git a/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/ExternalTaskRestServiceImpl.java b/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/ExternalTaskRestServiceImpl.java
index <HASH>..<HASH> 100644
--- a/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/ExternalTaskRestServiceImpl.java
+++ b/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/ExternalTaskRestServiceImpl.java
@@ -146,15 +146,9 @@ public class ExternalTaskRestServiceImpl extends AbstractRestProcessEngineAware
@Override
public void setRetries(SetRetriesForExternalTasksDto retriesDto) {
ExternalTaskService externalTaskService = getProcessEngine().getExternalTaskService();
- ExternalTaskQueryDto externalTaskQueryDto = retriesDto.getExternalTaskQuery();
- ExternalTaskQuery externalTaskQuery = null;
-
- if (externalTaskQueryDto != null) {
- externalTaskQuery = externalTaskQueryDto.toQuery(getProcessEngine());
- }
try {
- externalTaskService.setRetriesSync(retriesDto.getExternalTaskIds(), externalTaskQuery, retriesDto.getRetries());
+ externalTaskService.setRetries(retriesDto.getExternalTaskIds(), retriesDto.getRetries());
} catch (NotFoundException e) {
throw new InvalidRequestException(Status.NOT_FOUND, e.getMessage());
} catch (BadUserRequestException e) {
diff --git a/engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/ExternalTaskRestServiceInteractionTest.java b/engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/ExternalTaskRestServiceInteractionTest.java
index <HASH>..<HASH> 100644
--- a/engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/ExternalTaskRestServiceInteractionTest.java
+++ b/engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/ExternalTaskRestServiceInteractionTest.java
@@ -859,7 +859,7 @@ public class ExternalTaskRestServiceInteractionTest extends AbstractRestServiceT
.when()
.put(RETRIES_EXTERNAL_TASKS_SYNC_URL);
- verify(externalTaskService).setRetriesSync(externalTaskIds, null, 5);
+ verify(externalTaskService).setRetries(externalTaskIds, 5);
verifyNoMoreInteractions(externalTaskService);
}
@@ -885,7 +885,7 @@ public class ExternalTaskRestServiceInteractionTest extends AbstractRestServiceT
@Test
public void testSetRetriesForExternalTasksWithNullExternalTaskIdsSync() {
- doThrow(BadUserRequestException.class).when(externalTaskService).setRetriesSync(anyListOf(String.class), any(ExternalTaskQuery.class), anyInt());
+ doThrow(BadUserRequestException.class).when(externalTaskService).setRetries(anyListOf(String.class), anyInt());
List<String> externalTaskIds = null;
Map<String, Object> parameters = new HashMap<String, Object>();
@@ -901,7 +901,7 @@ public class ExternalTaskRestServiceInteractionTest extends AbstractRestServiceT
.when()
.put(RETRIES_EXTERNAL_TASKS_SYNC_URL);
- verify(externalTaskService).setRetriesSync(externalTaskIds, null, 5);
+ verify(externalTaskService).setRetries(externalTaskIds, 5);
verifyNoMoreInteractions(externalTaskService);
}
@@ -929,7 +929,7 @@ public class ExternalTaskRestServiceInteractionTest extends AbstractRestServiceT
@Test
public void testSetNegativeRetriesForExternalTasksSync() {
- doThrow(BadUserRequestException.class).when(externalTaskService).setRetriesSync(anyListOf(String.class), any(ExternalTaskQuery.class), anyInt());
+ doThrow(BadUserRequestException.class).when(externalTaskService).setRetries(anyListOf(String.class), anyInt());
List<String> externalTaskIds = null;
Map<String, Object> parameters = new HashMap<String, Object>();
@@ -945,7 +945,7 @@ public class ExternalTaskRestServiceInteractionTest extends AbstractRestServiceT
.when()
.put(RETRIES_EXTERNAL_TASKS_SYNC_URL);
- verify(externalTaskService).setRetriesSync(externalTaskIds, null, -5);
+ verify(externalTaskService).setRetries(externalTaskIds, -5);
verifyNoMoreInteractions(externalTaskService);
}
|
fix(rest): code cleanup
related to CAM-<I>
|
camunda_camunda-bpm-platform
|
train
|
3df074ee4ae969e4bcc87cf5aa5b2e5a25c91ec6
|
diff --git a/abstractions.py b/abstractions.py
index <HASH>..<HASH> 100644
--- a/abstractions.py
+++ b/abstractions.py
@@ -122,7 +122,7 @@ class ModelGibbsSampling(Model):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
- def resample(self):
+ def resample_model(self):
pass
class ModelMeanField(Model):
@@ -132,4 +132,4 @@ class ModelMeanField(Model):
def MF_coordinate_descent(self,convergence_tol):
pass
-# TODO parallel gibbs sampling model algorithm
+# TODO parallel gibbs sampling model algorithm interface
diff --git a/models.py b/models.py
index <HASH>..<HASH> 100644
--- a/models.py
+++ b/models.py
@@ -33,9 +33,26 @@ class Mixture(ModelGibbsSampling, Model, Distribution):
return self.weights.log_likelihood(np.arange(len(self.components))) + \
np.concatenate([c.log_likelihood(x) for c in self.components]).T
+ def resample(self,data):
+ # acts like distribution resampling: doesn't remember data, but does
+ # update instantiated parameters
+
+ # temporarily add the passed data
+ self.add_data(data) # this does one ``resampling'' step for labels
+
+ # now resample components
+ for idx, c in enumerate(self.components):
+ c.resample(data=[l.data[l.z == idx] for l in self.labels_list])
+
+ # and weights
+ self.weights.resample([l.z for l in self.labels_list])
+
+ # remove the passed data
+ self.labels_list.pop()
+
### Gibbs sampling
- def resample(self):
+ def resample_model(self):
for l in self.labels_list:
l.resample()
|
differentiated resample and resample_model, so that models can act like
distributions! useful for when mixture model in larger model, or using
models as components in other models
|
mattjj_pybasicbayes
|
train
|
70ad3c16c3b500c507a991430ba03546843fbe6c
|
diff --git a/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/ConfigurationDialog.java b/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/ConfigurationDialog.java
index <HASH>..<HASH> 100644
--- a/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/ConfigurationDialog.java
+++ b/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/ConfigurationDialog.java
@@ -168,7 +168,9 @@ public class ConfigurationDialog implements ActionListener {
List<Object> motherTongues = new ArrayList<Object>();
motherTongues.add(NO_MOTHER_TONGUE);
for (Language lang : Language.LANGUAGES) {
- motherTongues.add(lang);
+ if (lang != Language.DEMO) {
+ motherTongues.add(lang);
+ }
}
return motherTongues.toArray();
}
diff --git a/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/Main.java b/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/Main.java
index <HASH>..<HASH> 100644
--- a/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/Main.java
+++ b/trunk/JLanguageTool/src/java/de/danielnaber/languagetool/gui/Main.java
@@ -97,7 +97,12 @@ class Main implements ActionListener {
panel.add(new JLabel(" in: "), buttonCons);
buttonCons.gridx = 2;
buttonCons.gridy = 0;
- langBox = new JComboBox(Language.LANGUAGES);
+ langBox = new JComboBox();
+ for (Language lang : Language.LANGUAGES) {
+ if (lang != Language.DEMO) {
+ langBox.addItem(lang);
+ }
+ }
panel.add(langBox, buttonCons);
buttonCons.gridx = 3;
buttonCons.gridy = 0;
|
don't show test language in dialog
|
languagetool-org_languagetool
|
train
|
5391a0b6f521faf175e8fbf27402c76329a6e7de
|
diff --git a/crd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java b/crd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java
index <HASH>..<HASH> 100644
--- a/crd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java
+++ b/crd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java
@@ -44,11 +44,7 @@ import javax.lang.model.type.MirroredTypeException;
@SupportedAnnotationTypes({
"io.fabric8.kubernetes.model.annotation.Group",
- "io.fabric8.kubernetes.model.annotation.Version",
- "io.fabric8.kubernetes.model.annotation.Kind",
- "io.fabric8.kubernetes.model.annotation.Plural",
- "io.fabric8.kubernetes.model.annotation.Singular",
- "io.dekorate.crd.annotation.Crd"})
+ "io.fabric8.kubernetes.model.annotation.Version"})
public class CustomResourceAnnotationProcessor extends AbstractProcessor {
private final Resources resources = new Resources();
|
fix: only trigger generation on Group / Version annotations
|
fabric8io_kubernetes-client
|
train
|
5de6df19dc7989312e5916bfd840a4f5f917308d
|
diff --git a/openstack_dashboard/dashboards/project/instances/tabs.py b/openstack_dashboard/dashboards/project/instances/tabs.py
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/dashboards/project/instances/tabs.py
+++ b/openstack_dashboard/dashboards/project/instances/tabs.py
@@ -35,7 +35,18 @@ class OverviewTab(tabs.Tab):
"_detail_overview.html")
def get_context_data(self, request):
- return {"instance": self.tab_group.kwargs['instance']}
+ instance = self.tab_group.kwargs['instance']
+ if instance.volumes and not instance.image:
+ try:
+ volume = api.cinder.volume_get(
+ self.request, volume_id=instance.volumes[0].volumeId)
+ instance.image = {
+ 'id': volume.volume_image_metadata['image_id'],
+ 'name': volume.volume_image_metadata['image_name']}
+ except Exception:
+ exceptions.handle(self.request,
+ _('Failed to get attached volume.'))
+ return {"instance": instance}
class InterfacesTab(tabs.TableTab):
|
Add image data for instance with volume
If an instance was created with a volume from an image, there is
no image details in instance overview page.
This patch adds image info from volume image-metadata.
Change-Id: I1da<I>eb4a<I>ab<I>f<I>d<I>b<I>bada<I>de<I>f2
Closes-Bug: <I>
|
openstack_horizon
|
train
|
c5d8caf0240c61ed3b50fcfc9125a42ea8b9f8a7
|
diff --git a/examples/hid/device.py b/examples/hid/device.py
index <HASH>..<HASH> 100755
--- a/examples/hid/device.py
+++ b/examples/hid/device.py
@@ -90,15 +90,6 @@ class Mouse(functionfs.HIDFunction):
(GO_RIGHT_REPORT, ),
)
-class SubprocessMouse(SubprocessFunction):
- """
- Just to illustrate subclassing SubprocessFunction.
- """
- def run(self):
- print('Function ready, processing USB events.')
- super(SubprocessMouse, self).run()
- print('Function exiting normaly.')
-
def main():
"""
Entry point.
@@ -139,7 +130,7 @@ def main():
{
'function_list': [
{ # A single function
- 'function': SubprocessMouse(
+ 'function': SubprocessFunction(
getFunction=functools.partial(
Mouse,
report_descriptor=REPORT_DESCRIPTOR,
|
examples/hid/device.py: Simplify.
usbcat is a better example of why SubprocessFunction may need to be
subclassed.
|
vpelletier_python-functionfs
|
train
|
b532b9f63e70d1cc1470d3893784bf96c89bd259
|
diff --git a/src/ParseMenu.php b/src/ParseMenu.php
index <HASH>..<HASH> 100644
--- a/src/ParseMenu.php
+++ b/src/ParseMenu.php
@@ -159,7 +159,7 @@ class ParseMenu implements ParserInterface
}
// Add this item to the newly trimmed menu
- $path_menu[] = $item;
+ $path_menu[$item['menu_item_id']] = $item;
}
// Return the trimmed menu
@@ -230,7 +230,7 @@ class ParseMenu implements ParserInterface
}
// If in bounds, add the item to the new menu
- $slice_menu[] = $item;
+ $slice_menu[$item['menu_item_id']] = $item;
}
}
@@ -297,7 +297,7 @@ class ParseMenu implements ParserInterface
}
// Add this item into the menu being formed
- $full_menu[] = $item;
+ $full_menu[$item['menu_item_id']] = $item;
}
return $full_menu;
|
Set the key for the menu items as the menu_item_id to be able to traverse the array using the path
|
waynestate_parse-menu
|
train
|
13897762f2a699a8e23badf7a47ad052b27e87c1
|
diff --git a/benchmarks/bench.py b/benchmarks/bench.py
index <HASH>..<HASH> 100644
--- a/benchmarks/bench.py
+++ b/benchmarks/bench.py
@@ -410,9 +410,9 @@ def merge_v3(output, fmt, memory):
def merge_v4(output, fmt, memory):
- files = [r'test.mf4', ] * 2
+ files = [r'test.mf4', ] * 3
- with Timer('Merge 2 files',
+ with Timer('Merge 3 files',
'asammdf {} {} v4'.format(asammdf_version, memory),
fmt) as timer:
MDF.merge(files, memory=memory, outversion='4.10')
@@ -690,9 +690,9 @@ def merge_reader_v3_nodata(output, fmt):
def merge_reader_v4(output, fmt):
- files = [r'test.mf4', ] * 2
+ files = [r'test.mf4', ] * 3
- with Timer('Merge 2 files',
+ with Timer('Merge 3 files',
'mdfreader {} v4'.format(mdfreader_version),
fmt) as timer:
x1 = MDFreader(files[0])
@@ -700,14 +700,17 @@ def merge_reader_v4(output, fmt):
x2 = MDFreader(files[1])
x2.resample(0.01)
x1.merge_mdf(x2)
+ x2 = MDFreader(files[2])
+ x2.resample(0.01)
+ x1.merge_mdf(x2)
output.send([timer.output, timer.error])
def merge_reader_v4_compress(output, fmt):
- files = [r'test.mf4', ] * 2
- with Timer('Merge 2 files',
+ files = [r'test.mf4', ] * 3
+ with Timer('Merge 3 files',
'mdfreader {} compress v4'.format(mdfreader_version),
fmt) as timer:
x1 = MDFreader(files[0], compression='blosc')
@@ -715,13 +718,16 @@ def merge_reader_v4_compress(output, fmt):
x2 = MDFreader(files[1], compression='blosc')
x2.resample(0.01)
x1.merge_mdf(x2)
+ x2 = MDFreader(files[2])
+ x2.resample(0.01)
+ x1.merge_mdf(x2)
output.send([timer.output, timer.error])
def merge_reader_v4_nodata(output, fmt):
- files = [r'test.mf4', ] * 2
- with Timer('Merge 2 files',
+ files = [r'test.mf4', ] * 3
+ with Timer('Merge 3 files',
'mdfreader {} nodata v4'.format(mdfreader_version),
fmt) as timer:
x1 = MDFreader(files[0], no_data_loading=True)
@@ -729,6 +735,9 @@ def merge_reader_v4_nodata(output, fmt):
x2 = MDFreader(files[1], no_data_loading=True)
x2.resample(0.01)
x1.merge_mdf(x2)
+ x2 = MDFreader(files[2])
+ x2.resample(0.01)
+ x1.merge_mdf(x2)
output.send([timer.output, timer.error])
|
some cahnges to the benchmark script
|
danielhrisca_asammdf
|
train
|
be49a955c1ced37f3720d5410443e3ceafe946f3
|
diff --git a/libraries/joomla/application/application.php b/libraries/joomla/application/application.php
index <HASH>..<HASH> 100644
--- a/libraries/joomla/application/application.php
+++ b/libraries/joomla/application/application.php
@@ -673,13 +673,14 @@ class JApplication extends JObject
// Get the global JAuthentication object.
jimport('joomla.user.authentication');
- $response = JAuthentication::authenticate($credentials, $options);
+ $authenticate = JAuthentication::getInstance();
+ $response = $authenticate->authenticate($credentials, $options);
if ($response->status === JAuthentication::STATUS_SUCCESS)
{
// validate that the user should be able to login (different to being authenticated)
// this permits authentication plugins blocking the user
- $authorisations = JAuthentication::authorise($response, $options);
+ $authorisations = $authenticate->authorise($response, $options);
foreach ($authorisation as $authorisation)
{
$denied_states = Array(JAuthentication::STATUS_EXPIRED, JAuthentication::STATUS_DENIED);
diff --git a/tests/suite/joomla/user/JAuthenticationTest.php b/tests/suite/joomla/user/JAuthenticationTest.php
index <HASH>..<HASH> 100644
--- a/tests/suite/joomla/user/JAuthenticationTest.php
+++ b/tests/suite/joomla/user/JAuthenticationTest.php
@@ -92,9 +92,10 @@ class JAuthenticationTest extends PHPUnit_Framework_TestCase
*/
public function testAuthentication($input, $expect, $message)
{
+ $authenticate = JAuthentication::getInstance();
$this->assertEquals(
$expect,
- JAuthentication::authenticate($input),
+ $authenticate->authenticate($input),
$message
);
}
@@ -157,9 +158,10 @@ class JAuthenticationTest extends PHPUnit_Framework_TestCase
*/
public function testAuthorise($input, $expect, $message)
{
+ $authentication = JAuthentication::getInstance();
$this->assertEquals(
$expect,
- JAuthentication::authorise($input),
+ $authentication->authorise($input),
$message
);
}
diff --git a/tests/suite/joomla/user/TestStubs/JPluginHelper.php b/tests/suite/joomla/user/TestStubs/JPluginHelper.php
index <HASH>..<HASH> 100644
--- a/tests/suite/joomla/user/TestStubs/JPluginHelper.php
+++ b/tests/suite/joomla/user/TestStubs/JPluginHelper.php
@@ -2,12 +2,17 @@
class JPluginHelper
{
- public function getPlugin($type, $plugin = null)
- {
- require_once dirname(__FILE__).'/FakeAuthenticationPlugin.php';
- $testPlugin = new stdClass;
- $testPlugin->type = 'authentication';
- $testPlugin->name = 'fake';
- return array($testPlugin);
- }
-}
\ No newline at end of file
+ public function getPlugin($type, $plugin = null)
+ {
+ require_once dirname(__FILE__).'/FakeAuthenticationPlugin.php';
+ $testPlugin = new stdClass;
+ $testPlugin->type = 'authentication';
+ $testPlugin->name = 'fake';
+ return array($testPlugin);
+ }
+
+ public function importPlugin()
+ {
+ // :)
+ }
+}
|
Turn JAuthentication back from static class and updated tests accordingly to use non-static version
|
joomla_joomla-framework
|
train
|
08547379bd9a1446a6701efa8f8cedf64481a97b
|
diff --git a/src/Renderers/Bootstrap3.php b/src/Renderers/Bootstrap3.php
index <HASH>..<HASH> 100644
--- a/src/Renderers/Bootstrap3.php
+++ b/src/Renderers/Bootstrap3.php
@@ -709,7 +709,7 @@ class Bootstrap3 extends Base
*/
private function hasErrors(Field $control)
{
- foreach ($this->getErrorMessages($control->getName()) as $message)
+ foreach ($this->getErrorMessages($control->getKey()) as $message)
{
return true;
}
@@ -723,14 +723,14 @@ class Bootstrap3 extends Base
*/
private function renderErrors(Field $control)
{
- foreach ($this->getErrorMessages($control->getName()) as $message)
+ foreach ($this->getErrorMessages($control->getKey()) as $message)
{
- $name = str_replace('_', ' ', Str::snake($control->getName()));
+ $name = str_replace('_', ' ', Str::snake($control->getKey()));
$label = $control->getLabel() ? $control->getLabel() : $control->getPlaceholder();
$message = str_replace($name, $label, $message);
// Return only first error
- return $this->html()->tag('label', ['for' => $control->getName(), 'class' => 'error'], $message);
+ return $this->html()->tag('label', ['for' => $control->getKey(), 'class' => 'error'], $message);
}
return '';
diff --git a/src/Renderers/Foundation5.php b/src/Renderers/Foundation5.php
index <HASH>..<HASH> 100644
--- a/src/Renderers/Foundation5.php
+++ b/src/Renderers/Foundation5.php
@@ -669,7 +669,7 @@ class Foundation5 extends Base
*/
private function hasErrors(Field $control)
{
- foreach ($this->getErrorMessages($control->getName()) as $message)
+ foreach ($this->getErrorMessages($control->getKey()) as $message)
{
return true;
}
@@ -683,9 +683,9 @@ class Foundation5 extends Base
*/
private function renderErrors(Field $control)
{
- foreach ($this->getErrorMessages($control->getName()) as $message)
+ foreach ($this->getErrorMessages($control->getKey()) as $message)
{
- $name = str_replace('_', ' ', Str::snake($control->getName()));
+ $name = str_replace('_', ' ', Str::snake($control->getKey()));
$label = $control->getLabel() ? $control->getLabel() : $control->getPlaceholder();
$message = str_replace($name, $label, $message);
|
Fixed validation messages for nested control names.
|
inkvizytor_FluentForm
|
train
|
0dcb3da35a2b161e494b4738dd17b9e61bf0ae05
|
diff --git a/jasper.py b/jasper.py
index <HASH>..<HASH> 100755
--- a/jasper.py
+++ b/jasper.py
@@ -18,7 +18,10 @@ if not jasper_home or not os.path.exists(jasper_home):
sys.exit(0)
# Change CWD to $JASPER_HOME/jasper/client
-os.chdir(os.path.join(os.getenv("JASPER_HOME"), "jasper" , "client"))
+client_path = os.path.join(os.getenv("JASPER_HOME"), "jasper" , "client")
+os.chdir(client_path)
+# Add $JASPER_HOME/jasper/client to sys.path
+sys.path.append(client_path)
# Set $LD_LIBRARY_PATH
os.environ["LD_LIBRARY_PATH"] = "/usr/local/lib"
|
Add client folder to sys.path
Otherwise this would cause bugs in client.brain
|
benhoff_vexbot
|
train
|
9a501ac7b1f1a8c02e87dc75299c034a8cb0ae62
|
diff --git a/lib/committee/middleware/response_validation.rb b/lib/committee/middleware/response_validation.rb
index <HASH>..<HASH> 100644
--- a/lib/committee/middleware/response_validation.rb
+++ b/lib/committee/middleware/response_validation.rb
@@ -8,17 +8,12 @@ module Committee::Middleware
def call(env)
status, headers, response = @app.call(env)
request = Rack::Request.new(env)
- link_schema, type_schema =
+ link, _ =
@router.routes_request?(request, prefix: @prefix)
- if type_schema
+ if link
check_content_type!(headers)
str = response.reduce("") { |str, s| str << s }
- Committee::ResponseValidator.new(
- MultiJson.decode(str),
- @schema,
- link_schema,
- type_schema
- ).call
+ Committee::ResponseValidator.new(link).call(MultiJson.decode(str))
end
[status, headers, response]
rescue Committee::InvalidResponse
diff --git a/lib/committee/response_validator.rb b/lib/committee/response_validator.rb
index <HASH>..<HASH> 100644
--- a/lib/committee/response_validator.rb
+++ b/lib/committee/response_validator.rb
@@ -1,22 +1,17 @@
module Committee
class ResponseValidator
- def initialize(data, schema, link, type_schema)
- @data = data
- @schema = schema
+ def initialize(link)
@link = link
- @type_schema = type_schema
- @validator = JsonSchema::Validator.new(@type_schema)
+ @validator = JsonSchema::Validator.new(link.parent)
end
- def call
- data = if @link.rel == "instances"
- if !@data.is_a?(Array)
+ def call(data)
+ if @link.rel == "instances"
+ if !data.is_a?(Array)
raise InvalidResponse, "List endpoints must return an array of objects."
end
# only consider the first object during the validation from here on
- @data[0]
- else
- @data
+ data = data[0]
end
if !@validator.validate(data)
diff --git a/test/response_validator_test.rb b/test/response_validator_test.rb
index <HASH>..<HASH> 100644
--- a/test/response_validator_test.rb
+++ b/test/response_validator_test.rb
@@ -39,11 +39,6 @@ describe Committee::ResponseValidator do
private
def call
- Committee::ResponseValidator.new(
- @data,
- @schema,
- @link,
- @type_schema
- ).call
+ Committee::ResponseValidator.new(@link).call(@data)
end
end
|
Improve API for `ResponseValidator`
|
interagent_committee
|
train
|
8a2f6363d67cb78ebd8756e092a0e9b19b2b3e37
|
diff --git a/azurerm/resource_arm_network_interface.go b/azurerm/resource_arm_network_interface.go
index <HASH>..<HASH> 100644
--- a/azurerm/resource_arm_network_interface.go
+++ b/azurerm/resource_arm_network_interface.go
@@ -90,7 +90,6 @@ func resourceArmNetworkInterface() *schema.Resource {
Type: schema.TypeString,
Optional: true,
Default: string(network.IPv4),
- ForceNew: true,
ValidateFunc: validation.StringInSlice([]string{
string(network.IPv4),
string(network.IPv6),
|
Fix bug for Network Interface (#<I>)
|
terraform-providers_terraform-provider-azurerm
|
train
|
a385186b12ee7023daf777208ff071c146681a7a
|
diff --git a/lib/rack/mauth.rb b/lib/rack/mauth.rb
index <HASH>..<HASH> 100644
--- a/lib/rack/mauth.rb
+++ b/lib/rack/mauth.rb
@@ -96,11 +96,26 @@ module Medidata
@self_app_uuid, @self_private_key = config[:app_uuid], config[:private_key]
@should_authenticate_check = config[:should_authenticate_check]
+ @logger = config[:logger]
end
+ attr_writer :logger
+ # return a logger - if #logger= has set one, then that; if Rails is defined, use its logger;
+ # otherwise, a dummy that logs to /dev/null
+ def logger
+ @logger ||= begin
+ if Object.const_defined?('Rails')
+ Rails.logger
+ else
+ require 'logger'
+ ::Logger.new(File.open('/dev/null', File::WRONLY))
+ end
+ end
+ end
+
# Write to log
def log(str_to_log)
- Rails.logger.info("rack-mauth: " + str_to_log) if can_log?
+ logger.info("rack-mauth: " + str_to_log)
end
protected
@@ -113,11 +128,6 @@ module Medidata
raise ArgumentError, "mauth_baseurl: #{@mauth_baseurl} in not a valid uri"
end
end
-
- # Can we write to the Rails log
- def can_log?
- @can_log ||= (defined?(Rails) && Rails.respond_to?(:logger))
- end
end # of MAuthMiddleware
# Manages cached MAuth verifiers for use in local authentication
@@ -128,7 +138,7 @@ module Medidata
@config = config
@cached_verifiers_mutex = Mutex.new
@cached_verifiers = {}
- @mauth_signer_for_self = MAuth::Signer.new(:private_key => @config.self_private_key)
+ @mauth_signer_for_self = MAuth::Signer.new(:private_key => @config.self_private_key, :logger => @config.logger)
end
# Rack-mauth does its own authentication
|
make the logger configurable, defaulting to Rails' logger if none is set.
|
mdsol_mauth-client-ruby
|
train
|
3ac91a29d426ddb243442499f6e257cb52b4ba55
|
diff --git a/sentry/client/base.py b/sentry/client/base.py
index <HASH>..<HASH> 100644
--- a/sentry/client/base.py
+++ b/sentry/client/base.py
@@ -69,6 +69,21 @@ class SentryClient(object):
for k in ('url', 'view', 'data'):
if k not in kwargs:
kwargs[k] = record.__dict__.get(k)
+
+ request = getattr(record, 'request', None)
+ if request:
+ if not kwargs.get('data'):
+ kwargs['data'] = {}
+ kwargs['data'].update(dict(
+ META=request.META,
+ POST=request.POST,
+ GET=request.GET,
+ COOKIES=request.COOKIES,
+ ))
+
+ if not kwargs.get('url'):
+ kwargs['url'] = request.build_absolute_uri()
+
kwargs.update({
'logger': record.name,
'level': record.levelno,
diff --git a/sentry/tests/tests.py b/sentry/tests/tests.py
index <HASH>..<HASH> 100644
--- a/sentry/tests/tests.py
+++ b/sentry/tests/tests.py
@@ -28,10 +28,14 @@ from sentry import conf
from sentry.helpers import transform
from sentry.models import Message, GroupedMessage
from sentry.client.base import SentryClient
+from sentry.client.handlers import SentryHandler
from sentry.client.models import sentry_exception_handler, get_client
from models import TestModel, DuplicateKeyModel
+logger = logging.getLogger('sentry.test')
+logger.addHandler(SentryHandler())
+
class TestServerThread(threading.Thread):
"""Thread for running a http server while tests are running."""
@@ -104,7 +108,6 @@ class SentryTestCase(TestCase):
def setUpHandler(self):
self.tearDownHandler()
- from sentry.client.handlers import SentryHandler
logger = logging.getLogger()
self._handlers = logger.handlers
@@ -534,6 +537,15 @@ class SentryTestCase(TestCase):
self.assertEquals(last.view, 'sentry-tests/error.html')
+ def testRequestInLogging(self):
+ resp = self.client.get(reverse('sentry-log-request-exc'))
+ self.assertEquals(resp.status_code, 200)
+
+ last = Message.objects.get()
+
+ self.assertEquals(last.view, 'sentry.tests.views.logging_request_exc')
+ self.assertEquals(last.data['META']['REMOTE_ADDR'], '127.0.0.1')
+
class SentryViewsTest(TestCase):
urls = 'sentry.tests.urls'
fixtures = ['sentry/tests/fixtures/views.json']
@@ -552,7 +564,6 @@ class SentryViewsTest(TestCase):
def setUpHandler(self):
self.tearDownHandler()
- from sentry.client.handlers import SentryHandler
logger = logging.getLogger()
self._handlers = logger.handlers
diff --git a/sentry/tests/urls.py b/sentry/tests/urls.py
index <HASH>..<HASH> 100644
--- a/sentry/tests/urls.py
+++ b/sentry/tests/urls.py
@@ -9,5 +9,6 @@ urlpatterns = patterns('',
url(r'^trigger-500-decorated$', 'sentry.tests.views.decorated_raise_exc', name='sentry-raise-exc-decor'),
url(r'^trigger-500-django$', 'sentry.tests.views.django_exc', name='sentry-django-exc'),
url(r'^trigger-500-template$', 'sentry.tests.views.template_exc', name='sentry-template-exc'),
+ url(r'^trigger-500-log-request$', 'sentry.tests.views.logging_request_exc', name='sentry-log-request-exc'),
url(r'', include('sentry.urls')),
)
\ No newline at end of file
diff --git a/sentry/tests/views.py b/sentry/tests/views.py
index <HASH>..<HASH> 100644
--- a/sentry/tests/views.py
+++ b/sentry/tests/views.py
@@ -1,5 +1,9 @@
+from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render_to_response
+import logging
+import sys
+
def django_exc(request):
return get_object_or_404(Exception, pk=1)
@@ -10,4 +14,12 @@ def decorated_raise_exc(request):
return raise_exc(request)
def template_exc(request):
- return render_to_response('sentry-tests/error.html')
\ No newline at end of file
+ return render_to_response('sentry-tests/error.html')
+
+def logging_request_exc(request):
+ logger = logging.getLogger('sentry.test')
+ try:
+ raise Exception(request.GET.get('message', 'view exception'))
+ except Exception, e:
+ logger.error(e, exc_info=sys.exc_info(), extra={'request': request})
+ return HttpResponse('')
\ No newline at end of file
|
Logging now supports passing in the request object via extra
|
elastic_apm-agent-python
|
train
|
7747bac60c164a7e1aa630e06aa13e7d92056756
|
diff --git a/protempa-test-suite/src/test/java/org/protempa/test/ProtempaTest.java b/protempa-test-suite/src/test/java/org/protempa/test/ProtempaTest.java
index <HASH>..<HASH> 100644
--- a/protempa-test-suite/src/test/java/org/protempa/test/ProtempaTest.java
+++ b/protempa-test-suite/src/test/java/org/protempa/test/ProtempaTest.java
@@ -88,10 +88,24 @@ public class ProtempaTest {
* The ground truth for the number of propositions for each key pulled from
* the sample data
*/
- private static final String TRUTH_PROP_COUNTS_FILE = TRUTH_DIR
+ private static final String PROP_COUNTS_FILE = TRUTH_DIR
+ "/db-proposition-counts.txt";
/**
+ * The ground truth for the number of forward derivations for each key
+ * processed from the sample data
+ */
+ private static final String FORWARD_DERIVATION_COUNTS_FILE = TRUTH_DIR
+ + "/forward-derivations.txt";
+
+ /**
+ * The ground truth for the number of backward derivations for each key
+ * processed from the sample data
+ */
+ private static final String BACKWARD_DERIVATION_COUNTS_FILE = TRUTH_DIR
+ + "/backward-derivations.txt";
+
+ /**
* Where to keep the persistent stores
*/
private static final String STORE_HOME = "src/test/resources/store";
@@ -228,7 +242,7 @@ public class ProtempaTest {
@Test
public void testProtempa() {
testRetrieveDataAndPersist();
- // testProcessResultsAndPersist();
+ testProcessResultsAndPersist();
// testOutputResults();
}
@@ -292,8 +306,9 @@ public class ProtempaTest {
assertEquals("data not expected size", 512, results.size());
Map<String, Integer> propCounts = getResultCounts(TRUTH_PROP_COUNTS_FILE);
for (Entry<String, List<Proposition>> r : results.entrySet()) {
- assertEquals("propositions for key " + r.getKey()
- + " not expected", propCounts.get(r.getKey()), r.getValue().size());
+ assertEquals("propositions for key " + r.getKey()
+ + " not expected", propCounts.get(r.getKey()), r
+ .getValue().size());
}
} catch (FinderException ex) {
ex.printStackTrace();
@@ -334,12 +349,17 @@ public class ProtempaTest {
Arrays.asSet(PROP_IDS), null, RETRIEVAL_STORE_NAME);
assertEquals("wrong number of working memories", 512,
results.size());
+ Map<String, Integer> forwardDerivCounts = getResultCounts(FORWARD_DERIVATION_COUNTS_FILE);
+ Map<String, Integer> backwardDerivCounts = getResultCounts(BACKWARD_DERIVATION_COUNTS_FILE);
for (Entry<String, WorkingMemory> r : results.entrySet()) {
- // assertEquals("wrong number of forward derivations", EXPECTED,
- // afh.getForwardDerivations(r.getKey()));
- // assertEquals("wrong number of backward derivations",
- // EXPECTED,
- // afh.getBackwardDerivations(r.getKey()));
+ assertEquals(
+ "wrong number of forward derivations for key "
+ + r.getKey(),
+ forwardDerivCounts.get(r.getKey()), afh
+ .getForwardDerivations(r.getKey()).size());
+ assertEquals("wrong number of backward derivations for key "
+ + r.getKey(), backwardDerivCounts.get(r.getKey()), afh
+ .getBackwardDerivations(r.getKey()).size());
}
} catch (KnowledgeSourceReadException e) {
e.printStackTrace();
@@ -350,6 +370,12 @@ public class ProtempaTest {
} catch (ProtempaException e) {
e.printStackTrace();
fail(AF_ERROR_MSG);
+ } catch (NumberFormatException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
} finally {
afh.cleanUp();
if (results != null) {
|
added checks against the number of forward and backward derivations for each key
|
eurekaclinical_protempa
|
train
|
f71b83ffef005ccde06b4e67fcf96348ff9f81e7
|
diff --git a/plugins/svg.php b/plugins/svg.php
index <HASH>..<HASH> 100644
--- a/plugins/svg.php
+++ b/plugins/svg.php
@@ -203,7 +203,7 @@ function svg_generator ($input, $fn_name) {
// Resolve the type.
// Bail if type not recognised.
- $type = isset($raw_data['type']) ? strtolower($raw_data['type']) : 'rect';
+ $type = isset($raw_data['type']) ? strtolower($raw_data['type']) : 'path';
if (! isset($schemas[$type])) {
return '';
@@ -549,7 +549,7 @@ function svg_starpath ($cx, $cy, $points, $outer_r, $inner_r = null, $twist = 0,
$y = ( $outer_r * sin($outer_angle) ) + $cy;
if ($points != $s) {
- $d[] = "$x,$y";
+ $d[] = "$x $y";
}
// If star shape is required need inner angles too.
@@ -565,11 +565,11 @@ function svg_starpath ($cx, $cy, $points, $outer_r, $inner_r = null, $twist = 0,
$ix = ( $inner_r * cos($inner_angle) ) + $cx;
$iy = ( $inner_r * sin($inner_angle) ) + $cy;
- $d[] = "$ix,$iy";
+ $d[] = "$ix $iy";
}
}
- return 'M' . implode(' ', $d) . 'Z';
+ return 'M' . implode('L', $d) . 'Z';
}
function svg_apply_filters ($element) {
@@ -721,7 +721,6 @@ function svg_render ($element) {
$styles_data = array(
'@font-face' => $element->face_styles,
'svg' => $element->svg_styles,
- $element->tag => $element->styles,
);
foreach ($styles_data as $selector => $declarations) {
if ($declarations) {
@@ -735,24 +734,46 @@ function svg_render ($element) {
$styles = CssCrush::$process->tokens->restore($styles, 'u', true);
$styles = CssCrush::$process->tokens->restore($styles, 's');
- $attrs = Util::htmlAttributes($element->attrs);
+ // Add element styles as attributes which tend to work better with svg2png converters.
+ $attrs = Util::htmlAttributes($element->attrs + $element->styles);
+
+ // Add viewbox to help IE scale correctly.
+ if (isset($element->svg_attrs['width']) && isset($element->svg_attrs['height'])) {
+ $element->svg_attrs += array(
+ 'viewbox' => implode(' ', array(
+ 0,
+ 0,
+ $element->svg_attrs['width'],
+ $element->svg_attrs['height']
+ )),
+ );
+ }
$svg_attrs = Util::htmlAttributes($element->svg_attrs);
// Markup.
$svg[] = "<svg$svg_attrs>";
- $svg[] = '<defs>';
- $svg[] = implode($element->fills['gradients']);
- $svg[] = implode($element->fills['patterns']);
- $svg[] = implode($element->filters);
- if ($styles) {
- $cdata = preg_match('~[<>&]~', $styles);
- $svg[] = '<style type="text/css">';
- $svg[] = $cdata ? '<![CDATA[' : '';
- $svg[] = $styles;
- $svg[] = $cdata ? ']]>' : '';
- $svg[] = '</style>';
+
+ if (
+ $element->fills['gradients'] ||
+ $element->fills['patterns'] ||
+ $element->filters ||
+ $styles
+ ) {
+ $svg[] = '<defs>';
+ $svg[] = implode($element->fills['gradients']);
+ $svg[] = implode($element->fills['patterns']);
+ $svg[] = implode($element->filters);
+ if ($styles) {
+ $cdata = preg_match('~[<>&]~', $styles);
+ $svg[] = '<style type="text/css">';
+ $svg[] = $cdata ? '<![CDATA[' : '';
+ $svg[] = $styles;
+ $svg[] = $cdata ? ']]>' : '';
+ $svg[] = '</style>';
+ }
+ $svg[] = '</defs>';
}
- $svg[] = '</defs>';
+
if ($element->tag === 'text') {
$svg[] = "<text$attrs>{$element->data['text']}</text>";
}
|
Some tweaks to SVG output to better support common svg2png convertors.
Changed default SVG type from 'rect' to 'path'.
|
peteboere_css-crush
|
train
|
4c337dc9a89dfb549a3c2cb62893d74129582143
|
diff --git a/flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaConsumerTestBase.java b/flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaConsumerTestBase.java
index <HASH>..<HASH> 100644
--- a/flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaConsumerTestBase.java
+++ b/flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaConsumerTestBase.java
@@ -2121,32 +2121,6 @@ public abstract class KafkaConsumerTestBase extends KafkaTestBase {
return result;
}
- private static void printTopic(String topicName, ConsumerConfig config,
- DeserializationSchema<?> deserializationSchema,
- int stopAfter) throws IOException {
-
- List<MessageAndMetadata<byte[], byte[]>> contents = readTopicToList(topicName, config, stopAfter);
- LOG.info("Printing contents of topic {} in consumer grouo {}", topicName, config.groupId());
-
- for (MessageAndMetadata<byte[], byte[]> message: contents) {
- Object out = deserializationSchema.deserialize(message.message());
- LOG.info("Message: partition: {} offset: {} msg: {}", message.partition(), message.offset(), out.toString());
- }
- }
-
- private static void printTopic(String topicName, int elements, DeserializationSchema<?> deserializer)
- throws IOException {
- // write the sequence to log for debugging purposes
- Properties newProps = new Properties(standardProps);
- newProps.setProperty("group.id", "topic-printer" + UUID.randomUUID().toString());
- newProps.setProperty("auto.offset.reset", "smallest");
- newProps.setProperty("zookeeper.connect", standardProps.getProperty("zookeeper.connect"));
- newProps.putAll(secureProps);
-
- ConsumerConfig printerConfig = new ConsumerConfig(newProps);
- printTopic(topicName, printerConfig, deserializer, elements);
- }
-
private static class BrokerKillingMapper<T> extends RichMapFunction<T, T>
implements ListCheckpointed<Integer>, CheckpointListener {
|
[hotfix][tests] Remove unused methods in KafkaConsumerTestBase
|
apache_flink
|
train
|
5bd91ec82386836497094c377197023fdf121526
|
diff --git a/tests/NinjaMutex/Lock/LockTest.php b/tests/NinjaMutex/Lock/LockTest.php
index <HASH>..<HASH> 100644
--- a/tests/NinjaMutex/Lock/LockTest.php
+++ b/tests/NinjaMutex/Lock/LockTest.php
@@ -168,6 +168,7 @@ class LockTest extends AbstractTest
* @medium Timeout for test increased to ~5s http://stackoverflow.com/a/10535787/916440
* @runInSeparateProcess
*
+ * @expectedException PHPUnit_Framework_Error
* @dataProvider lockFabricWithExpirationProvider
* @param LockFabricWithExpirationInterface $lockFabricWithExpiration
*/
@@ -194,15 +195,14 @@ class LockTest extends AbstractTest
$this->assertTrue($lockImplementor->releaseLock($name, 0));
// Now we set null to the Mutex with lock expiration to invoke __destructor
- try {
- $lockImplementorWithExpiration = null;
- } catch (UnrecoverableMutexException $e) {
- // hhvm doesn't throw an exception here, it rather raises a fatal error,
- // so I can't check here if Exception was really raised for all builds.
- // Looks like I should always raise fatal error in __destructor for all versions rather than trying to raise exception
- // https://github.com/facebook/hhvm/blob/af329776c9f740cc1c8c4791f673ba5aa49042ce/hphp/doc/inconsistencies#L40-L48
- // http://docs.hhvm.com/manual/en/language.oop5.decon.php#language.oop5.decon.destructor
- // https://github.com/sebastianbergmann/phpunit/issues/1640
- }
+ // which throws UnrecoverableMutexException
+ $lockImplementorWithExpiration = null;
+
+ // hhvm doesn't throw an exception here, it rather raises a fatal error,
+ // so I can't check here if Exception was really raised for all builds but I can check if script ended with Fatal Error.
+ // Looks like I should always raise fatal error in __destructor for all versions rather than trying to raise exception
+ // https://github.com/facebook/hhvm/blob/af329776c9f740cc1c8c4791f673ba5aa49042ce/hphp/doc/inconsistencies#L40-L48
+ // http://docs.hhvm.com/manual/en/language.oop5.decon.php#language.oop5.decon.destructor
+ // https://github.com/sebastianbergmann/phpunit/issues/1640
}
}
|
Let's finally try to expect Fatal Error
|
arvenil_ninja-mutex
|
train
|
10a63e2a0cbdc1e8e4d7aef87afc868bd599ca94
|
diff --git a/actionpack/lib/action_controller/vendor/html-scanner/html/node.rb b/actionpack/lib/action_controller/vendor/html-scanner/html/node.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_controller/vendor/html-scanner/html/node.rb
+++ b/actionpack/lib/action_controller/vendor/html-scanner/html/node.rb
@@ -38,18 +38,14 @@ module HTML #:nodoc:
private
def keys_to_strings(hash)
- hash.keys.inject({}) do |h,k|
- h[k.to_s] = hash[k]
- h
- end
+ Hash[hash.keys.map {|k| [k.to_s, hash[k]]}]
end
def keys_to_symbols(hash)
- hash.keys.inject({}) do |h,k|
+ Hash[hash.keys.map do |k|
raise "illegal key #{k.inspect}" unless k.respond_to?(:to_sym)
- h[k.to_sym] = hash[k]
- h
- end
+ [k.to_sym, hash[k]]
+ end]
end
end
|
Refactor methods in html node to avoid injects.
|
rails_rails
|
train
|
cb7a289361c846de28e14119a1bed3b7a673c5a0
|
diff --git a/tools/static_require.js b/tools/static_require.js
index <HASH>..<HASH> 100644
--- a/tools/static_require.js
+++ b/tools/static_require.js
@@ -255,7 +255,7 @@ exports.handle = function(req, res, options) {
}
res.writeHead(200, {
"Content-Type": 'application/javascript',
- "Content-Length": code.length
+ "Content-Length": Buffer.byteLength(code.length, 'utf8')
});
res.end(req.method === "HEAD" ? "" : code);
};
\ No newline at end of file
|
Content-Length should be calculated using Buffer. Otherwise some files got cut off.
|
voloko_uki
|
train
|
2849d056f9c927968ad4492f5d371d4a8a264a58
|
diff --git a/spec/opal/core/runtime/eval_spec.rb b/spec/opal/core/runtime/eval_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/opal/core/runtime/eval_spec.rb
+++ b/spec/opal/core/runtime/eval_spec.rb
@@ -1,5 +1,5 @@
describe "Opal.eval()" do
it "evaluates ruby code by compiling it to javascript and running" do
- `Opal.eval("'foo'.class")`.should == String
+ `Opal['eval']("'foo'.class")`.should == String
end
end
|
JSHint: do not use reserved keywords for identifiers (specs)
|
opal_opal
|
train
|
5a2d0a691abf88edea3fbd5a822811d779efac80
|
diff --git a/assess_update_mongo.py b/assess_update_mongo.py
index <HASH>..<HASH> 100755
--- a/assess_update_mongo.py
+++ b/assess_update_mongo.py
@@ -12,6 +12,7 @@ import sys
from deploy_stack import (
BootstrapManager,
)
+from remote import remote_from_address
from utility import (
add_basic_testing_arguments,
configure_logging,
@@ -20,15 +21,20 @@ from utility import (
log = logging.getLogger("assess_update_mongo")
+# The synlinks are shims while we wait for new packaging.
DEP_SCRIPT = """\
export DEBIAN_FRONTEND=noninteractive
+sudo apt-get update
+sudo apt-get install -y software-properties-common
sudo apt-add-repository -y ppa:juju/experimental
sudo apt-get update
sudo apt-get install -y juju-mongodb2.6 juju-mongodb3.2 juju-mongo-tools3.2
+sudo ln -s /usr/lib/juju/mongodb2.6 /usr/lib/juju/mongo2.6
+sudo ln -s /usr/lib/juju/mongodb3.2 /usr/lib/juju/mongo3
"""
-def assess_update_mongo(client, series):
+def assess_update_mongo(client, series, bootstrap_host):
charm = 'local:{}/ubuntu'.format(series)
log.info("Setting up test.")
client.deploy(charm)
@@ -37,6 +43,8 @@ def assess_update_mongo(client, series):
log.info("Test started.")
# Instrument the case where Juju can install the new mongo packages from
# Ubuntu.
+ remote = remote_from_address(bootstrap_host, series=series)
+ remote.run(DEP_SCRIPT)
client.upgrade_mongo()
# Wait for upgrade
# Verify mongo 3 runs on the server
@@ -57,7 +65,8 @@ def main(argv=None):
configure_logging(args.verbose)
bs_manager = BootstrapManager.from_args(args)
with bs_manager.booted_context(args.upload_tools):
- assess_update_mongo(bs_manager.client, args.series)
+ assess_update_mongo(
+ bs_manager.client, args.series, bs_manager.bootstrap_host)
return 0
diff --git a/tests/test_assess_update_mongo.py b/tests/test_assess_update_mongo.py
index <HASH>..<HASH> 100644
--- a/tests/test_assess_update_mongo.py
+++ b/tests/test_assess_update_mongo.py
@@ -6,10 +6,11 @@ import StringIO
from assess_update_mongo import (
assess_update_mongo,
+ DEP_SCRIPT,
parse_args,
main,
)
-from tests.test_jujupy import FakeJujuClient
+from test_jujupy import FakePopen
from tests import (
parse_error,
TestCase,
@@ -57,7 +58,8 @@ class TestMain(TestCase):
mock_e.assert_called_once_with("an-env")
mock_c.assert_called_once_with(env, "/bin/juju", debug=False)
self.assertEqual(mock_bc.call_count, 1)
- mock_assess.assert_called_once_with(client, 'trusty')
+ # This should verify bs_manager.bootstrap_host was passed, not None.
+ mock_assess.assert_called_once_with(client, 'trusty', None)
class TestAssess(TestCase):
@@ -65,8 +67,13 @@ class TestAssess(TestCase):
def test_update_mongo(self):
mock_client = Mock(
spec=["juju", "wait_for_started", "deploy", "upgrade_mongo"])
- assess_update_mongo(mock_client, 'trusty')
+ mock_remote = Mock(spec=['run'])
+ with patch('assess_update_mongo.remote_from_address',
+ autospec=True, return_value=mock_remote) as r_mock:
+ assess_update_mongo(mock_client, 'trusty', '10.0.0.2')
mock_client.deploy.assert_called_once_with('local:trusty/ubuntu')
mock_client.wait_for_started.assert_called_once_with()
mock_client.upgrade_mongo.assert_called_once_with()
+ r_mock.assert_called_once_with('10.0.0.2', series='trusty')
+ mock_remote.run.assert_called_once_with(DEP_SCRIPT)
self.assertNotIn("TODO", self.log_stream.getvalue())
diff --git a/tests/test_jujupy.py b/tests/test_jujupy.py
index <HASH>..<HASH> 100644
--- a/tests/test_jujupy.py
+++ b/tests/test_jujupy.py
@@ -662,6 +662,9 @@ class FakePopen(object):
self.returncode = self._code
return self._out, self._err
+ def poll(self):
+ return self._code
+
class TestEnvJujuClient(ClientTest):
|
Added DEP_script run via remote.
|
juju_juju
|
train
|
9edcc2db43c84b31862ad9d619ce43c249c6b8ed
|
diff --git a/src/pipeline/container.go b/src/pipeline/container.go
index <HASH>..<HASH> 100644
--- a/src/pipeline/container.go
+++ b/src/pipeline/container.go
@@ -2,7 +2,6 @@ package pipeline
import (
"io"
- "strings"
"github.com/fsouza/go-dockerclient"
)
@@ -41,14 +40,14 @@ func stopContainer(id string) error {
}
func pullImage(image string) error {
- repo_tag := strings.Split(image, ":")
+ repository, tag := docker.ParseRepositoryTag(image)
client, err := docker.NewClientFromEnv()
if err != nil {
return err
}
- opts := docker.PullImageOptions{Repository: repo_tag[0], Tag: "latest"}
- if len(repo_tag) == 2 {
- opts.Tag = repo_tag[1]
+ opts := docker.PullImageOptions{Repository: repository, Tag: "latest"}
+ if tag != "" {
+ opts.Tag = tag
}
return client.PullImage(opts, docker.AuthConfiguration{})
}
@@ -91,7 +90,7 @@ func waitContainer(id string) (int, error) {
}
func isImageLocal(image string) bool {
- repository := strings.Split(image, ":")
+ repository, _ := docker.ParseRepositoryTag(image)
client, err := docker.NewClientFromEnv()
if err != nil {
return false
@@ -102,7 +101,7 @@ func isImageLocal(image string) bool {
}
for _, image := range images {
- if image.ID == repository[0] {
+ if image.ID == repository {
return true
}
}
|
Use a go-dockerclient function to parse the image name
|
pachyderm_pachyderm
|
train
|
9d98ef698d3bb874c2809bb8bb07f8071f1e9669
|
diff --git a/gwpy/segments/tests/test_flag.py b/gwpy/segments/tests/test_flag.py
index <HASH>..<HASH> 100644
--- a/gwpy/segments/tests/test_flag.py
+++ b/gwpy/segments/tests/test_flag.py
@@ -519,17 +519,8 @@ class TestDataQualityFlag(object):
f2 = self.TEST_CLASS.read(fp)
utils.assert_flag_equal(f2, flag)
- @utils.skip_missing_dependency('glue.ligolw.lsctables')
- @pytest.mark.parametrize("ilwdchar_compat", [
- pytest.param(
- False,
- marks=utils.skip_missing_dependency("ligo.lw.lsctables"),
- ),
- pytest.param(
- True,
- marks=utils.skip_missing_dependency("glue.ligolw.lsctables"),
- ),
- ])
+ @utils.skip_missing_dependency('ligo.lw.lsctables')
+ @pytest.mark.parametrize("ilwdchar_compat", [False, True])
def test_read_write_ligolw(self, flag, ilwdchar_compat):
utils.test_read_write(
flag, "ligolw", extension="xml",
@@ -808,16 +799,8 @@ class TestDataQualityDict(object):
_read_write(autoidentify=True)
_read_write(autoidentify=True, write_kw={'overwrite': True})
- @pytest.mark.parametrize("ilwdchar_compat", [
- pytest.param(
- False,
- marks=utils.skip_missing_dependency("ligo.lw.lsctables"),
- ),
- pytest.param(
- True,
- marks=utils.skip_missing_dependency("glue.ligolw.lsctables"),
- ),
- ])
+ @utils.skip_missing_dependency('ligo.lw.lsctables')
+ @pytest.mark.parametrize("ilwdchar_compat", [False, True])
def test_read_write_ligolw(self, instance, ilwdchar_compat):
def _assert(a, b):
return utils.assert_dict_equal(a, b, utils.assert_flag_equal)
@@ -862,20 +845,8 @@ class TestDataQualityDict(object):
with pytest.raises(ValueError) as exc:
_read(on_missing='blah')
- @pytest.mark.parametrize("ilwdchar_compat", [
- pytest.param( # default `None` maps to `True` for now
- None,
- marks=utils.skip_missing_dependency("ligo.lw.lsctables"),
- ),
- pytest.param(
- False,
- marks=utils.skip_missing_dependency("ligo.lw.lsctables"),
- ),
- pytest.param(
- True,
- marks=utils.skip_missing_dependency("glue.ligolw.lsctables"),
- ),
- ])
+ @utils.skip_missing_dependency('ligo.lw.lsctables')
+ @pytest.mark.parametrize("ilwdchar_compat", [None, False, True])
def test_to_ligolw_tables(self, instance, ilwdchar_compat):
if ilwdchar_compat is None:
ctx = pytest.warns(PendingDeprecationWarning)
|
gwpy.segments: update test dependencies
LIGO_LW operations all require python-ligo-lw
|
gwpy_gwpy
|
train
|
5c76131fd352c67410fb9c1ba4dce1144644ea88
|
diff --git a/flask_appbuilder/console.py b/flask_appbuilder/console.py
index <HASH>..<HASH> 100644
--- a/flask_appbuilder/console.py
+++ b/flask_appbuilder/console.py
@@ -194,7 +194,7 @@ def babel_extract(config, input, output, target):
Babel, Extracts and updates all messages marked for translation
"""
click.echo(click.style('Starting Extractions config:{0} input:{1} output:{2}'.format(config, input, output), fg='green'))
- os.popen('pybabel extract -F {0} -k lazy_gettext -o {1} {2}'.format(config, output, input))
+ os.popen('pybabel extract -F {0} -k lazy_gettext -k gettext -o {1} {2}'.format(config, output, input))
click.echo(click.style('Starting Update target:{0}'.format(target), fg='green'))
os.popen('pybabel update -N -i {0} -d {1}'.format(output, target))
click.echo(click.style('Finish, you can start your translations', fg='green'))
|
fix babel-extract command
|
dpgaspar_Flask-AppBuilder
|
train
|
6d691e203a4f6cd8a001f236452521b4669ad340
|
diff --git a/src/main/java/com/cloudbees/jenkins/support/slowrequest/SlowRequestChecker.java b/src/main/java/com/cloudbees/jenkins/support/slowrequest/SlowRequestChecker.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/cloudbees/jenkins/support/slowrequest/SlowRequestChecker.java
+++ b/src/main/java/com/cloudbees/jenkins/support/slowrequest/SlowRequestChecker.java
@@ -73,7 +73,7 @@ public class SlowRequestChecker extends PeriodicWork {
long iota = System.currentTimeMillis();
- final long recurrencePeriosMillis = TimeUnit.SECONDS.toMillis(RECURRENCE_PERIOD_SEC);
+ final long recurrencePeriosMillis = TimeUnit.SECONDS.toMillis(RECURRENCE_PERIOD_SEC);
long thresholdMillis = recurrencePeriosMillis > THRESHOLD ?
recurrencePeriosMillis * 2 : THRESHOLD;
@@ -99,7 +99,7 @@ public class SlowRequestChecker extends PeriodicWork {
if (req.record.length() >= FileListCapComponent.MAX_FILE_SIZE)
continue;
- ThreadInfo lockedThread = ManagementFactory.getThreadMXBean().getThreadInfo(req.thread.getId());
+ ThreadInfo lockedThread = ManagementFactory.getThreadMXBean().getThreadInfo(req.thread.getId(), Integer.MAX_VALUE);
if (lockedThread != null ) {
w.println(lockedThread);
w.println(totalTime + "msec elapsed in " + lockedThread.getThreadName());
@@ -108,7 +108,7 @@ public class SlowRequestChecker extends PeriodicWork {
long lockOwnerId = lockedThread.getLockOwnerId();
if (lockOwnerId != -1) // If the thread is not locked, then getLockOwnerId returns -1.
{
- ThreadInfo threadInfo = ManagementFactory.getThreadMXBean().getThreadInfo(lockOwnerId);
+ ThreadInfo threadInfo = ManagementFactory.getThreadMXBean().getThreadInfo(lockOwnerId, Integer.MAX_VALUE);
w.println(threadInfo);
if (threadInfo != null) {
printThreadStackElements(threadInfo, w);
|
Fixed issue where no line were being published in the slow-requests. According to <URL>, the default getThreadInfo method returns a depth size of 0, so slow-requests were incomplete.
|
jenkinsci_support-core-plugin
|
train
|
e03c71a4558f872696e06168dc9b61508f9c7b8a
|
diff --git a/shared/desktop/package.desktop.js b/shared/desktop/package.desktop.js
index <HASH>..<HASH> 100644
--- a/shared/desktop/package.desktop.js
+++ b/shared/desktop/package.desktop.js
@@ -36,7 +36,8 @@ const argv = minimist(process.argv.slice(2), {string: ['appVersion']})
const appName = 'Keybase'
const shouldUseAsar = argv.asar || argv.a || false
const shouldBuildAll = argv.all || false
-const shouldBuildAnArch: string = (argv.arch: any)
+const arch = argv.arch ? argv.arch.toString() : os.arch()
+const platform = argv.platform ? argv.platform.toString() : os.platform()
const appVersion: string = (argv.appVersion: any) || '0.0.0'
const comment = argv.comment || ''
const outDir = argv.outDir || ''
@@ -135,15 +136,9 @@ function startPack() {
.catch(postPackError)
})
})
- } else if (shouldBuildAnArch) {
- // build for a specified arch on current platform only
- pack(os.platform(), shouldBuildAnArch)
- .then(postPack(os.platform(), shouldBuildAnArch))
- .catch(postPackError)
} else {
- // build for current platform only
- pack(os.platform(), os.arch())
- .then(postPack(os.platform(), os.arch()))
+ pack(platform, arch)
+ .then(postPack(platform, arch))
.catch(postPackError)
}
})
@@ -191,9 +186,13 @@ const postPackError = err => {
}
function postPack(plat, arch) {
- return filepath => {
+ return appPaths => {
+ if (!appPaths || appPaths.length === 0) {
+ console.log(`${plat}-${arch} finished with no app bundles`)
+ return
+ }
const subdir = plat === 'darwin' ? 'Keybase.app/Contents/Resources' : 'resources'
- const dir = path.join(filepath[0], subdir, 'app/desktop/dist')
+ const dir = path.join(appPaths[0], subdir, 'app/desktop/dist')
const files = ['index', 'main', 'component-loader'].map(p => p + '.bundle.js')
files.forEach(file => {
try {
|
Handle an empty app path in the packager (#<I>)
Also make it be able to build a particular arch and platform.
I used this to build bundles to test on Windows.
|
keybase_client
|
train
|
09cd35fbbfc01b96204866f62ac64d2b04b3b1ee
|
diff --git a/PHPCompatibility/Tests/Sniffs/PHP/RemovedAlternativePHPTagsSniffTest.php b/PHPCompatibility/Tests/Sniffs/PHP/RemovedAlternativePHPTagsSniffTest.php
index <HASH>..<HASH> 100644
--- a/PHPCompatibility/Tests/Sniffs/PHP/RemovedAlternativePHPTagsSniffTest.php
+++ b/PHPCompatibility/Tests/Sniffs/PHP/RemovedAlternativePHPTagsSniffTest.php
@@ -61,7 +61,7 @@ class RemovedAlternativePHPTagsSniffTest extends BaseSniffTest
public function testAlternativePHPTags($type, $snippet, $line)
{
if ($type === 'ASP' && self::$aspTags === false) {
- $this->markTestSkipped();
+ $this->markTestSkipped('ASP tags are unavailable (PHP 7+) or disabled.');
return;
}
@@ -104,7 +104,7 @@ class RemovedAlternativePHPTagsSniffTest extends BaseSniffTest
public function testMaybeASPOpenTag($line, $snippet)
{
if (self::$aspTags === true) {
- $this->markTestSkipped();
+ $this->markTestSkipped('ASP tags are unavailable (PHP 7+) or disabled.');
return;
}
|
Tests: add test skip explanations.
|
PHPCompatibility_PHPCompatibility
|
train
|
b42e0b0feffaff585a9ce49d0d12b0c93dd40935
|
diff --git a/src/thin-tree.js b/src/thin-tree.js
index <HASH>..<HASH> 100644
--- a/src/thin-tree.js
+++ b/src/thin-tree.js
@@ -60,8 +60,17 @@ ThinTree.prototype.preOrderNext = function() {
}
}
+ThinTree.prototype.preOrderPrevious = function() {
+ var thisNodeIndex = this.root.preOrderTraverse().indexOf(this);
+ if (thisNodeIndex > 0) {
+ return this.root.preOrderTraverse()[thisNodeIndex - 1];
+ } else {
+ return null;
+ }
+}
+
ThinTree.prototype.getChildren = function() {
- return this[this._key] || [];
+ return this[this._key] ? this[this._key] : (this[this._key] = []);
}
@@ -218,11 +227,6 @@ var __extends = function(proto, Parent) {
return Child;
};
-///////////////////////////////////////////////////////////////////////////////
-///
-/// Utility
-///
-///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
///
diff --git a/test/tree-spec.js b/test/tree-spec.js
index <HASH>..<HASH> 100644
--- a/test/tree-spec.js
+++ b/test/tree-spec.js
@@ -202,6 +202,18 @@ describe('Thin Tree', function() {
expect(rbaNode.preOrderNext()
.preOrderNext().name).to.equal("RC");
});
+
+ it('should be walkable with preOrderPrevious() from child', function() {
+ var rcNode = complexTree.getChildren()[2];
+ expect(rcNode.preOrderPrevious().name).to.equal("RBB");
+
+ expect(rcNode.preOrderPrevious()
+ .preOrderPrevious().name).to.equal("RBA");
+
+ expect(rcNode.preOrderPrevious()
+ .preOrderPrevious()
+ .preOrderPrevious().name).to.equal("RB");
+ });
})
});
|
preOrderPrevious + Changes based on review
|
evanrs_thin-tree
|
train
|
c14b38708fc523e305239b2e14733d0de9bbf1e2
|
diff --git a/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/DeviceInfo.java b/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/DeviceInfo.java
index <HASH>..<HASH> 100644
--- a/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/DeviceInfo.java
+++ b/clevertap-android-sdk/src/main/java/com/clevertap/android/sdk/DeviceInfo.java
@@ -511,33 +511,38 @@ class DeviceInfo {
}
private String getNetworkType() {
- TelephonyManager mTelephonyManager = (TelephonyManager)
- context.getSystemService(Context.TELEPHONY_SERVICE);
- if (mTelephonyManager == null) {
- return null;
- }
- int networkType = mTelephonyManager.getNetworkType();
- switch (networkType) {
- case TelephonyManager.NETWORK_TYPE_GPRS:
- case TelephonyManager.NETWORK_TYPE_EDGE:
- case TelephonyManager.NETWORK_TYPE_CDMA:
- case TelephonyManager.NETWORK_TYPE_1xRTT:
- case TelephonyManager.NETWORK_TYPE_IDEN:
- return "2G";
- case TelephonyManager.NETWORK_TYPE_UMTS:
- case TelephonyManager.NETWORK_TYPE_EVDO_0:
- case TelephonyManager.NETWORK_TYPE_EVDO_A:
- case TelephonyManager.NETWORK_TYPE_HSDPA:
- case TelephonyManager.NETWORK_TYPE_HSUPA:
- case TelephonyManager.NETWORK_TYPE_HSPA:
- case TelephonyManager.NETWORK_TYPE_EVDO_B:
- case TelephonyManager.NETWORK_TYPE_EHRPD:
- case TelephonyManager.NETWORK_TYPE_HSPAP:
- return "3G";
- case TelephonyManager.NETWORK_TYPE_LTE:
- return "4G";
- default:
+ try {
+ TelephonyManager mTelephonyManager = (TelephonyManager)
+ context.getSystemService(Context.TELEPHONY_SERVICE);
+ if (mTelephonyManager == null) {
return null;
+ }
+ int networkType = mTelephonyManager.getNetworkType();
+ switch (networkType) {
+ case TelephonyManager.NETWORK_TYPE_GPRS:
+ case TelephonyManager.NETWORK_TYPE_EDGE:
+ case TelephonyManager.NETWORK_TYPE_CDMA:
+ case TelephonyManager.NETWORK_TYPE_1xRTT:
+ case TelephonyManager.NETWORK_TYPE_IDEN:
+ return "2G";
+ case TelephonyManager.NETWORK_TYPE_UMTS:
+ case TelephonyManager.NETWORK_TYPE_EVDO_0:
+ case TelephonyManager.NETWORK_TYPE_EVDO_A:
+ case TelephonyManager.NETWORK_TYPE_HSDPA:
+ case TelephonyManager.NETWORK_TYPE_HSUPA:
+ case TelephonyManager.NETWORK_TYPE_HSPA:
+ case TelephonyManager.NETWORK_TYPE_EVDO_B:
+ case TelephonyManager.NETWORK_TYPE_EHRPD:
+ case TelephonyManager.NETWORK_TYPE_HSPAP:
+ return "3G";
+ case TelephonyManager.NETWORK_TYPE_LTE:
+ return "4G";
+ default:
+ return null;
+ }
+ }catch (SecurityException e){
+ Logger.d("Caught Security Exception, returning Network Type as NULL");
+ return null;
}
}
|
chore(androidx): Added try/catch for Android <I> Security Exception in DeviceInfo SDK-<I>
|
CleverTap_clevertap-android-sdk
|
train
|
886fcb0f7700f9cf8bb6c17f5f2f081d68f9c1a6
|
diff --git a/actionview/lib/action_view/digestor.rb b/actionview/lib/action_view/digestor.rb
index <HASH>..<HASH> 100644
--- a/actionview/lib/action_view/digestor.rb
+++ b/actionview/lib/action_view/digestor.rb
@@ -42,7 +42,7 @@ module ActionView
':variant. Please update your code to pass a Hash argument. ' \
'Support for the old method signature will be removed in Rails 5.0.'
- _options_for_digest (deprecated_args[2] || {}).merge \
+ _options_for_digest(deprecated_args[2] || {}).merge \
name: options_or_deprecated_name,
format: deprecated_args[0],
finder: deprecated_args[1]
|
Warning removed (...) interpreted as grouped expression
|
rails_rails
|
train
|
13adc4fa771d463f55156de0ccf899ce77f0d83e
|
diff --git a/pyocd/tools/pyocd.py b/pyocd/tools/pyocd.py
index <HASH>..<HASH> 100755
--- a/pyocd/tools/pyocd.py
+++ b/pyocd/tools/pyocd.py
@@ -1556,10 +1556,14 @@ Prefix line with ! to execute a shell command.""")
def print_disasm(self, code, startAddr, maxInstructions=None):
if not isCapstoneAvailable:
- print("Warning: Disassembly is not available because the Capstone library is not installed")
+ print("Warning: Disassembly is not available because the Capstone library is not installed. "
+ "To install Capstone, run 'pip install capstone'.")
return
- pc = self.target.read_core_register('pc') & ~1
+ if self.target.is_halted():
+ pc = self.target.read_core_register('pc') & ~1
+ else:
+ pc = -1
md = capstone.Cs(capstone.CS_ARCH_ARM, capstone.CS_MODE_THUMB)
addrLine = 0
|
Enable commander disasm command while target is running.
If the target is running, the disasm command doesn't try to read the PC.
|
mbedmicro_pyOCD
|
train
|
808a457ea4e9f48c34ce10e52776ae7fd7a33f84
|
diff --git a/lib/actions/form.js b/lib/actions/form.js
index <HASH>..<HASH> 100644
--- a/lib/actions/form.js
+++ b/lib/actions/form.js
@@ -71,15 +71,8 @@ export function parseUrlQueryString (params = getUrlParams()) {
})
const searchId = params.ui_activeSearch || randId()
// Convert strings to numbers/objects and dispatch
- dispatch(
- setQueryParam(
- planParamsToQuery(
- planParams,
- getState().otp.config
- ),
- searchId
- )
- )
+ planParamsToQuery(planParams, getState().otp.config)
+ .then(query => dispatch(setQueryParam(query, searchId)))
}
}
diff --git a/lib/util/query.js b/lib/util/query.js
index <HASH>..<HASH> 100644
--- a/lib/util/query.js
+++ b/lib/util/query.js
@@ -1,5 +1,6 @@
import qs from 'qs'
+import getGeocoder from './geocoder'
import { getTransitModes, hasTransit, isAccessMode, toSentenceCase } from './itinerary'
import { coordsToString, matchLatLon, stringToCoords } from './map'
import queryParams from './query-params'
@@ -149,21 +150,51 @@ export function getDefaultQuery (config) {
}
/**
+ * Geocode utility for returning the first result for the provided place name text.
+ * @param {String} text - text to search
+ * @param {Object} geocoderConfig
+ * @return {Location}
+ */
+async function getFirstGeocodeResult (text, geocoderConfig) {
+ const geocoder = getGeocoder(geocoderConfig)
+ // Attempt to geocode search text and return first result if found.
+ // TODO: Import geocoder from @opentripplanner
+ return geocoder
+ .search({ text })
+ .then((result) => {
+ const firstResult = result.features && result.features[0]
+ if (firstResult) {
+ return geocoder.getLocationFromGeocodedFeature(firstResult)
+ }
+ })
+}
+
+/**
* Create a otp query based on a the url params.
*
* @param {Object} params An object representing the parsed querystring of url
* params.
* @param config the config in the otp-rr store.
*/
-export function planParamsToQuery (params, config) {
+export async function planParamsToQuery (params, config) {
const query = {}
for (var key in params) {
switch (key) {
case 'fromPlace':
query.from = parseLocationString(params.fromPlace)
+ // If a valid location was not found, but the place name text exists,
+ // attempt to geocode the name.
+ if (!query.from && params.fromPlace) {
+ query.from = await getFirstGeocodeResult(params.fromPlace, config.geocoder)
+ }
break
case 'toPlace':
query.to = parseLocationString(params.toPlace)
+ // If a valid location was not found, but the place name text exists,
+ // attempt to geocode the name.
+ if (!query.to && params.toPlace) {
+ query.to = await getFirstGeocodeResult(params.toPlace, config.geocoder)
+ }
break
case 'arriveBy':
query.departArrive = params.arriveBy === 'true'
|
feat(query): attempt to geocode place name from query params
fix #<I>
|
opentripplanner_otp-react-redux
|
train
|
3efe8219dc58d43501213cdcb87f2dfb9663a1e1
|
diff --git a/test/vendor/load-image-scale.js b/test/vendor/load-image-scale.js
index <HASH>..<HASH> 100644
--- a/test/vendor/load-image-scale.js
+++ b/test/vendor/load-image-scale.js
@@ -236,7 +236,7 @@
if (
pixelRatio > 1 &&
// Check if image has not yet device pixel ratio applied:
- parseInt(img.style.width, 10) !== width / pixelRatio
+ parseFloat(img.style.width, 10) !== width / pixelRatio
) {
destWidth *= pixelRatio
destHeight *= pixelRatio
|
Update vendored load-image dev dependency.
|
blueimp_JavaScript-Canvas-to-Blob
|
train
|
fd6d1241aea2ab897621dcdd9f4ee6d25ab33c3b
|
diff --git a/xinclude/src/spec/java/org/etourdot/xincproc/xinclude/AbstractSuiteTest.java b/xinclude/src/spec/java/org/etourdot/xincproc/xinclude/AbstractSuiteTest.java
index <HASH>..<HASH> 100644
--- a/xinclude/src/spec/java/org/etourdot/xincproc/xinclude/AbstractSuiteTest.java
+++ b/xinclude/src/spec/java/org/etourdot/xincproc/xinclude/AbstractSuiteTest.java
@@ -54,7 +54,7 @@ public abstract class AbstractSuiteTest {
final String control = Files.toString(new File(fileResult), Charset.forName("UTF-8"));
//LOG.debug("Test control:{}", control);
return DiffBuilder.compare(new StringReader(control)).withTest(new StringReader(result)).normalizeWhitespace()
- .ignoreWhitespace().build();
+ .ignoreWhitespace().checkForSimilar().build();
}
protected void testSuccess(final URL urlTest, final URL urlResult) throws Exception
@@ -73,7 +73,7 @@ public abstract class AbstractSuiteTest {
XIncProcEngine.parse(source, urlTest.toExternalForm(), output);
final String resultat = output.toString("UTF-8");
final Diff diff = DiffBuilder.compare(Resources.toString(urlResult, Charsets.UTF_8)).withTest(resultat)
- .ignoreWhitespace().build();
+ .ignoreWhitespace().checkForSimilar().build();
source.close();
assertFalse("testSuccess:" + urlTest, diff.hasDifferences());
}
@@ -128,7 +128,7 @@ public abstract class AbstractSuiteTest {
final String expected = Resources.toString(urlTest, Charsets.UTF_8);
final Diff diff = DiffBuilder.compare(expected).withTest(output.toString("UTF-8"))
.withComparisonController(ComparisonControllers.StopWhenDifferent)
- .ignoreWhitespace().build();
+ .ignoreWhitespace().checkForSimilar().build();
result.output = StringEscapeUtils.escapeHtml4(new String(output.toByteArray()));
result.expected = StringEscapeUtils.escapeHtml4(expected);
if (diff.hasDifferences()) {
|
fix xmlunit comparison in integration tests
|
etourdot_xincproc
|
train
|
5e091bba91bffb5f7d46222ee1621e211480942c
|
diff --git a/lib/puppet/provider/package/sun.rb b/lib/puppet/provider/package/sun.rb
index <HASH>..<HASH> 100755
--- a/lib/puppet/provider/package/sun.rb
+++ b/lib/puppet/provider/package/sun.rb
@@ -61,7 +61,7 @@ Puppet::Type.type(:package).provide :sun, :parent => Puppet::Provider::Package d
cmd = [command(:pkginfo), '-l']
cmd << '-d' << device if device
cmd << @resource[:name]
- pkgs = self.class.parse_pkginfo(execute(cmd, :failonfail => false, :combine => false))
+ pkgs = self.class.parse_pkginfo(execute(cmd, :failonfail => false, :combine => true))
errmsg = case pkgs.size
when 0; 'No message'
when 1; pkgs[0]['ERROR']
diff --git a/spec/unit/provider/package/sun_spec.rb b/spec/unit/provider/package/sun_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/unit/provider/package/sun_spec.rb
+++ b/spec/unit/provider/package/sun_spec.rb
@@ -35,7 +35,7 @@ describe Puppet::Type.type(:package).provider(:sun) do
end
it "should install a package if it is not present on update" do
- Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => false}).returns File.read(my_fixture('dummy.server'))
+ Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => true}).returns File.read(my_fixture('dummy.server'))
provider.expects(:pkgrm).with(['-n', 'dummy'])
provider.expects(:install)
provider.update
@@ -74,7 +74,7 @@ describe Puppet::Type.type(:package).provider(:sun) do
context '#query' do
it "should find the package on query" do
- Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => false}).returns File.read(my_fixture('dummy.server'))
+ Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => true}).returns File.read(my_fixture('dummy.server'))
provider.query.should == {
:name => 'SUNWdummy',
:category=>"system",
@@ -87,12 +87,12 @@ describe Puppet::Type.type(:package).provider(:sun) do
end
it "shouldn't find the package on query if it is not present" do
- Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => false}).returns 'ERROR: information for "dummy" not found.'
+ Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => true}).returns 'ERROR: information for "dummy" not found.'
provider.query.should == {:ensure => :absent}
end
it "unknown message should raise error." do
- Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => false}).returns 'RANDOM'
+ Puppet::Util::Execution.expects(:execute).with(['/usr/bin/pkginfo', '-l', 'dummy'], {:failonfail => false, :combine => true}).returns 'RANDOM'
lambda { provider.query }.should raise_error(Puppet::Error)
end
end
|
(#<I>) Use combine when running pkginfo
pkginfo was previously executed with combine => false so stderr was
discarded. This way puppet was unable to detect any errors the pkginfo
might print, especially the message `ERROR: information for
"some_package" was not found`.
As a result the provider failed every time it tries to check the current
state of an absent package.
|
puppetlabs_puppet
|
train
|
8c94d50c143502b2c9b7154ee2894a925796ef59
|
diff --git a/lib/ice_cube/schedule.rb b/lib/ice_cube/schedule.rb
index <HASH>..<HASH> 100644
--- a/lib/ice_cube/schedule.rb
+++ b/lib/ice_cube/schedule.rb
@@ -43,11 +43,12 @@ module IceCube
end
# Create a schedule from a hash created by instance.to_hash
- def self.from_hash(hash)
+ def self.from_hash(hash, sd_override=nil)
options = {}
options[:duration] = hash[:duration] if hash.has_key?(:duration)
options[:end_time] = TimeUtil.deserialize_time(hash[:end_time]) if hash.has_key?(:end_time)
- schedule = Schedule.new(TimeUtil.deserialize_time(hash[:start_date]), options)
+ start_date = sd_override ||= TimeUtil.deserialize_time(hash[:start_date])
+ schedule = Schedule.new(start_date, options)
hash[:rrules].each { |rr| schedule.add_recurrence_rule Rule.from_hash(rr) }
hash[:exrules].each { |ex| schedule.add_exception_rule Rule.from_hash(ex) }
hash[:rdates].each { |rd| schedule.add_recurrence_date TimeUtil.deserialize_time(rd) }
@@ -56,8 +57,8 @@ module IceCube
end
# Create a schedule from a yaml string created by instance.to_yaml
- def self.from_yaml(str)
- from_hash(YAML::load(str))
+ def self.from_yaml(str,sd_override=nil)
+ from_hash(YAML::load(str), sd_override)
end
TIME_FORMAT = '%B %e, %Y'
diff --git a/spec/examples/ice_cube_spec.rb b/spec/examples/ice_cube_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/examples/ice_cube_spec.rb
+++ b/spec/examples/ice_cube_spec.rb
@@ -707,6 +707,35 @@ describe IceCube::Schedule, 'occurs_on?' do
s.first(4).size.should == 4
s.first(5).size.should == 5
end
-
+
+
+
+ it 'should always generate the date based off the start_date_override when specified in from_yaml' do
+ start_date = DAY # zero seconds
+ schedule = IceCube::Schedule.new(start_date)
+ schedule.add_recurrence_rule IceCube::Rule.minutely
+
+ start_date_override = DAY + 20
+
+ schedule2 = IceCube::Schedule.from_yaml( schedule.to_yaml, start_date_override)
+ dates = schedule2.first(10)
+ dates.each do |date|
+ date.sec.should == start_date_override.sec
+ end
+ end
+
+ it 'should always generate the date based off the start_date_override when specified in from_hash' do
+ start_date = DAY # zero seconds
+ schedule = IceCube::Schedule.new(start_date)
+ schedule.add_recurrence_rule IceCube::Rule.minutely
+
+ start_date_override = DAY + 20
+
+ schedule2 = IceCube::Schedule.from_hash( schedule.to_hash, start_date_override)
+ dates = schedule2.first(10)
+ dates.each do |date|
+ date.sec.should == start_date_override.sec
+ end
+ end
end
|
add start date override to from_yaml and from_hash to allow recurrence rules to be applied against different start dates
|
seejohnrun_ice_cube
|
train
|
82995952b0368fb044a62a2788e833172ed7f0ef
|
diff --git a/src/main/java/com/crawljax/browser/WebDriverFirefox.java b/src/main/java/com/crawljax/browser/WebDriverFirefox.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/crawljax/browser/WebDriverFirefox.java
+++ b/src/main/java/com/crawljax/browser/WebDriverFirefox.java
@@ -66,6 +66,8 @@ public class WebDriverFirefox extends AbstractWebDriver {
profile.setPreference("network.proxy.http_port", config.getPort());
/* 1 means HTTP proxy */
profile.setPreference("network.proxy.type", 1);
+ /* use proxy for everything, including localhost */
+ profile.setPreference("network.proxy.no_proxies_on", "");
return profile;
}
|
Also use proxy for requests to localhost (firefox webdriver)
|
crawljax_crawljax
|
train
|
298ecfeb54f96be3427e43f24ff75490d318b98a
|
diff --git a/lib/interpolateName.js b/lib/interpolateName.js
index <HASH>..<HASH> 100644
--- a/lib/interpolateName.js
+++ b/lib/interpolateName.js
@@ -12,8 +12,12 @@ function encodeStringToEmoji(content, length) {
if(emojiCache[content]) return emojiCache[content];
length = length || 1;
const emojis = [];
- do {
- const index = Math.floor(Math.random() * emojiList.length);
+ do {
+ if (!emojiList.length) {
+ throw new Error("Ran out of emoji");
+ }
+
+ const index = Math.floor(Math.random() * emojiList.length);
emojis.push(emojiList[index]);
emojiList.splice(index, 1);
} while(--length > 0);
|
Have a violent reaction when out of emoji
Failing explicitly is better than silently starting to return empty strings.
|
webpack_loader-utils
|
train
|
82013bfb6ee36045dabbb4aecc730acbee716151
|
diff --git a/Repository/Vcs/GitHubDriver.php b/Repository/Vcs/GitHubDriver.php
index <HASH>..<HASH> 100644
--- a/Repository/Vcs/GitHubDriver.php
+++ b/Repository/Vcs/GitHubDriver.php
@@ -74,10 +74,7 @@ class GitHubDriver extends BaseGitHubDriver
while ($notFoundRetries) {
try {
- $composer = JsonFile::parseJson((string) $this->getContents($resource));
- if (empty($composer['content']) || $composer['encoding'] !== 'base64' || !($composer = base64_decode($composer['content']))) {
- throw new \RuntimeException('Could not retrieve ' . $this->repoConfig['filename'] . ' from '.$resource);
- }
+ $composer = $this->parseComposerContent($resource);
break;
} catch (TransportException $e) {
if (404 !== $e->getCode()) {
@@ -94,6 +91,24 @@ class GitHubDriver extends BaseGitHubDriver
}
/**
+ * Parse the composer content.
+ *
+ * @param string $resource
+ * @return array
+ *
+ * @throws \RuntimeException When the resource could not be retrieved
+ */
+ protected function parseComposerContent($resource)
+ {
+ $composer = (array) JsonFile::parseJson((string) $this->getContents($resource));
+ if (empty($composer['content']) || $composer['encoding'] !== 'base64' || !($composer = base64_decode($composer['content']))) {
+ throw new \RuntimeException('Could not retrieve ' . $this->repoConfig['filename'] . ' from '.$resource);
+ }
+
+ return $composer;
+ }
+
+ /**
* Converts json composer file to array.
*
* @param string $composer
|
Simplify getComposerContent method
|
fxpio_composer-asset-plugin
|
train
|
d7ac77a567d6cf42c49dac8f4689793a9f263fc8
|
diff --git a/nodeconductor/cloud/backend/openstack.py b/nodeconductor/cloud/backend/openstack.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/cloud/backend/openstack.py
+++ b/nodeconductor/cloud/backend/openstack.py
@@ -149,7 +149,7 @@ class OpenStackBackend(object):
for image in cloud_account.images.exclude(backend_id__in=current_image_ids):
image.delete()
- logger.info('Removed stale image %s, was pointing to', image, image.backend_id)
+ logger.info('Removed stale image %s, was pointing to %s', image, image.backend_id)
def push_membership(self, membership):
try:
@@ -208,7 +208,72 @@ class OpenStackBackend(object):
logger.exception('Failed to propagate ssh public key %s to backend', key_name)
six.reraise(CloudBackendError, CloudBackendError())
+ def push_security_groups(self, membership):
+ security_groups = membership.security_groups
+ try:
+ nova = self.get_nova_client(
+ auth_url=membership.cloud.auth_url,
+ username=membership.username,
+ password=membership.password,
+ tenant_id=membership.tenant_id,
+ )
+
+ for security_group in security_groups:
+ logger.info('Synchronizing security group %s to backend', security_group.name)
+ try:
+ self.push_security_group(security_group, nova)
+ except nova_exceptions.ClientException:
+ logger.exception(
+ 'Failed to push synchronize security_group %s '
+ 'for CloudProjectMembership with id %s', security_group, membership.id)
+ logger.info('Successfully synchronized security group %s to backend', security_group.name)
+
+ except nova_exceptions.ClientException, keystone_exceptions.ClientException:
+ logger.exception('Failed to push synchronize security_groups for membership %s', membership)
+ six.reraise(CloudBackendError, CloudBackendError())
+
+ # TODO: implement after openstack backup architecture decision
+ def pull_security_groups(self, membership):
+ raise NotImplementedError('Security groups pull is not implemented yet.')
+
# Helper methods
+ def push_security_group(self, security_group, nova):
+ os_security_group, created = self.get_or_create_security_group(security_group)
+ # If security group already exists - we have to remove its rules to avoid duplication
+ if not created:
+ for rule in os_security_group.rules:
+ try:
+ nova.security_group_rules.delete(rule['id'])
+ except nova_exceptions.ClientException:
+ logger.exception('Failed to remove rule with id %s from security group %s',
+ rule['id'], security_group)
+
+ for rule in security_group.rules.all():
+ try:
+ nova.security_group_rules.create(
+ parent_group_id=security_group.os_security_group_id,
+ ip_protocol=rule.protocol,
+ from_port=rule.from_port,
+ to_port=rule.to_port,
+ cidr=rule.netmask,
+ )
+ except nova_exceptions.ClientException:
+ logger.exception('Failed to create rule %s for security group %s',
+ rule, security_group)
+
+ def create_security_group(self, security_group, nova):
+ os_security_group = nova.security_groups.create(name=security_group.name, description='')
+ security_group.os_security_group_id = os_security_group.id
+ security_group.save()
+
+ def get_or_create_security_group(self, security_group, nova):
+ if security_group.os_security_group_id is None:
+ return self.create_security_group(security_group, nova), True
+ try:
+ return nova.security_groups.get(group_id=security_group.os_security_group_id), False
+ except nova_exceptions.BadRequest:
+ return self.create_security_group(security_group, nova), True
+
def get_credentials(self, keystone_url):
nc_settings = getattr(settings, 'NODECONDUCTOR', {})
openstacks = nc_settings.get('OPENSTACK_CREDENTIALS', ())
|
opensatck backend push security groups method (nc-<I>)
|
opennode_waldur-core
|
train
|
2449c00dbe7bb9caeb234707b95601bd9f7d177c
|
diff --git a/app/lib/MyAccountsStore.js b/app/lib/MyAccountsStore.js
index <HASH>..<HASH> 100644
--- a/app/lib/MyAccountsStore.js
+++ b/app/lib/MyAccountsStore.js
@@ -77,10 +77,42 @@ export default class MyAccountsStore {
}
connectAccount (konnector, account, folder) {
- return account.id
+ const result = account.id
// TODO: replace by updateAccount
? Promise.resolve(account)
: this.addAccount(konnector, account.values)
+ result
+ // .then(account => {
+ // // TODO waiting for the back to return the permission document after konnector installation
+ // return cozy.client.fetchJSON('PATCH', `/permissions/${konnector.links.permissions}`, {
+ // data: {
+ // id: konnector.links.permissions,
+ // type: 'io.cozy.permissions',
+ // permissions: {
+ // 'add-this': {
+ // type: 'io.cozy.files',
+ // values: [folder._id]
+ // }
+ // }
+ // }
+ // })
+ // .then(() => account)
+ // })
+ // .then(account => {
+ // // TODO waiting for the back to allow folders to be referenced
+ // return cozy.client.fetchJSON('POST', `/files/${folder._id}/relationships/referenced_by`, {
+ // data: {
+ // type: 'io.cozy.konnectors',
+ // id: konnector._id
+ // }
+ // })
+ // .then(() => account)
+ // })
+ .then(account => {
+ // now try to run the connector one time
+ return konnectors.run(cozy.client, konnector.slug, account._id, folder._id)
+ })
+ return result
}
isInstalled (konnector) {
diff --git a/app/lib/konnectors.js b/app/lib/konnectors.js
index <HASH>..<HASH> 100644
--- a/app/lib/konnectors.js
+++ b/app/lib/konnectors.js
@@ -37,27 +37,62 @@ export function install (cozy, slug, source, timeout = 120000) {
if (!source) throw new Error('Missing `source` parameter for konnector')
return cozy.fetchJSON('POST', `/konnectors/${slug}?Source=${encodeURIComponent(source)}`)
- .then(konnector => new Promise((resolve, reject) => {
- const idTimeout = setTimeout(() => {
- reject(new Error('Konnector installation timed out'))
- }, timeout)
-
- // monitor the status of the connector
- // TODO: replace by a polling abstraction utility.
- const idInterval = setInterval(() => {
- cozy.data.find(KONNECTORS_DOCTYPE, konnector._id)
- .then(konnector => {
- if (konnector.state === STATE_READY) {
- clearTimeout(idTimeout)
- clearInterval(idInterval)
- resolve(konnector)
- }
- })
- .catch(error => {
- clearTimeout(idTimeout)
- clearInterval(idInterval)
- reject(error)
- })
- }, 1000)
- }))
+ .then(konnector => {
+ return waitForReady(cozy, konnector, timeout)
+ })
+}
+
+// monitor the status of the connector and resolve when the connector is ready
+function waitForReady (cozy, konnector, timeout) {
+ return new Promise((resolve, reject) => {
+ const idTimeout = setTimeout(() => {
+ reject(new Error('Konnector installation timed out'))
+ }, timeout)
+
+ const idInterval = setInterval(() => {
+ cozy.data.find(KONNECTORS_DOCTYPE, konnector._id)
+ .then(konnector => {
+ if (konnector.state === STATE_READY) {
+ clearTimeout(idTimeout)
+ clearInterval(idInterval)
+ resolve(konnector)
+ }
+ })
+ .catch(error => {
+ clearTimeout(idTimeout)
+ clearInterval(idInterval)
+ reject(error)
+ })
+ }, 1000)
+ })
+}
+
+export function run (cozy, slug, accountId, folderId, timeout = 120 * 1000) {
+ if (!slug) throw new Error('Missing `slug` parameter for konnector')
+ if (!accountId) throw new Error('Missing `accountId` parameter for konnector')
+ if (!folderId) throw new Error('Missing `folderId` parameter for konnector')
+
+ return findBySlug(cozy, slug)
+ .then(konnector => {
+ return cozy.fetchJSON('POST', '/jobs/queue/konnector', {
+ data: {
+ attributes: {
+ options: {
+ priority: 10,
+ timeout,
+ max_exec_count: 1
+ }
+ },
+ arguments: {
+ konnector: konnector._id,
+ account: accountId,
+ folderToSave: folderId
+ }
+ }
+ })
+ .then(() => konnector)
+ })
+ .then(konnector => {
+ return waitForReady(cozy, konnector, timeout)
+ })
}
diff --git a/vendor/assets/manifest.webapp b/vendor/assets/manifest.webapp
index <HASH>..<HASH> 100755
--- a/vendor/assets/manifest.webapp
+++ b/vendor/assets/manifest.webapp
@@ -36,6 +36,10 @@
"description": "Required to access folders",
"type": "io.cozy.files",
"verbs": ["GET", "POST", "PUT"]
+ },
+ "jobs": {
+ "description": "Required to run the konnectors",
+ "type": "io.cozy.jobs"
}
},
"routes": {
|
feat: now run the konnector on submit
|
cozy_cozy-home
|
train
|
fe27526a318f3832991c45801db4f94c5708f69f
|
diff --git a/driver-sync/src/test/functional/com/mongodb/client/AbstractClientEncryptionCustomEndpointTest.java b/driver-sync/src/test/functional/com/mongodb/client/AbstractClientEncryptionCustomEndpointTest.java
index <HASH>..<HASH> 100644
--- a/driver-sync/src/test/functional/com/mongodb/client/AbstractClientEncryptionCustomEndpointTest.java
+++ b/driver-sync/src/test/functional/com/mongodb/client/AbstractClientEncryptionCustomEndpointTest.java
@@ -205,7 +205,7 @@ public abstract class AbstractClientEncryptionCustomEndpointTest {
+ " key: \"arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0\",\n"
+ " endpoint: \"kms.us-east-2.amazonaws.com\"\n"
+ "}"),
- false, MongoClientException.class, MongoCryptException.class, "Credential should be scoped to a valid region"});
+ false, MongoClientException.class, MongoCryptException.class, null});
data.add(new Object[]{"6. [aws] invalid endpoint host",
"aws",
BsonDocument.parse("{\n"
|
Update AWS invalid endpoint host
Don't test against specific error text
JAVA-<I>
|
mongodb_mongo-java-driver
|
train
|
8b4c326365182498fd2963d825b29d93006c1512
|
diff --git a/compiler/docs/compiler.py b/compiler/docs/compiler.py
index <HASH>..<HASH> 100644
--- a/compiler/docs/compiler.py
+++ b/compiler/docs/compiler.py
@@ -412,11 +412,15 @@ def pyrogram_api():
Chat.unban_member
Chat.restrict_member
Chat.promote_member
+ Chat.join
+ Chat.leave
""",
user="""
User
User.archive
User.unarchive
+ User.block
+ User.unblock
""",
callback_query="""
Callback Query
|
Add missing bound methods to docs
|
pyrogram_pyrogram
|
train
|
8bd80a99a259e6409a9ac0a8a60325f94b5c5e9d
|
diff --git a/lib/puppet/node/facts.rb b/lib/puppet/node/facts.rb
index <HASH>..<HASH> 100755
--- a/lib/puppet/node/facts.rb
+++ b/lib/puppet/node/facts.rb
@@ -1,12 +1,17 @@
+require 'time'
+
require 'puppet/node'
require 'puppet/indirector'
+require 'puppet/util/pson'
+
# Manage a given node's facts. This either accepts facts and stores them, or
# returns facts for a given node.
class Puppet::Node::Facts
# Set up indirection, so that nodes can be looked for in
# the node sources.
extend Puppet::Indirector
+ extend Puppet::Util::Pson
# We want to expire any cached nodes if the facts are saved.
module NodeExpirer
@@ -62,6 +67,22 @@ class Puppet::Node::Facts
self.values[:_timestamp]
end
+ def self.from_pson(data)
+ result = new(data['name'], data['values'])
+ result.timestamp = Time.parse(data['timestamp'])
+ result.expiration = Time.parse(data['expiration'])
+ result
+ end
+
+ def to_pson(*args)
+ {
+ 'expiration' => expiration,
+ 'name' => name,
+ 'timestamp' => timestamp,
+ 'values' => strip_internal,
+ }.to_pson(*args)
+ end
+
private
# Add internal data to the facts for storage.
diff --git a/spec/unit/node/facts_spec.rb b/spec/unit/node/facts_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/unit/node/facts_spec.rb
+++ b/spec/unit/node/facts_spec.rb
@@ -109,5 +109,29 @@ describe Puppet::Node::Facts, "when indirecting" do
facts = Puppet::Node::Facts.new("me", "one" => "two", "three" => "four")
facts.values[:_timestamp].should be_instance_of(Time)
end
+
+ describe "using pson" do
+ before :each do
+ @timestamp = Time.parse("Thu Oct 28 11:16:31 -0700 2010")
+ @expiration = Time.parse("Thu Oct 28 11:21:31 -0700 2010")
+ end
+
+ it "should accept properly formatted pson" do
+ pson = %Q({"name": "foo", "expiration": "#{@expiration}", "timestamp": "#{@timestamp}", "values": {"a": "1", "b": "2", "c": "3"}})
+ format = Puppet::Network::FormatHandler.format('pson')
+ facts = format.intern(Puppet::Node::Facts,pson)
+ facts.name.should == 'foo'
+ facts.expiration.should == @expiration
+ facts.values.should == {'a' => '1', 'b' => '2', 'c' => '3', :_timestamp => @timestamp}
+ end
+
+ it "should generate properly formatted pson" do
+ Time.stubs(:now).returns(@timestamp)
+ facts = Puppet::Node::Facts.new("foo", {'a' => 1, 'b' => 2, 'c' => 3})
+ facts.expiration = @expiration
+ pson = PSON.parse(facts.to_pson)
+ pson.should == {"name"=>"foo", "timestamp"=>"Thu Oct 28 11:16:31 -0700 2010", "expiration"=>"Thu Oct 28 11:21:31 -0700 2010", "values"=>{"a"=>1, "b"=>2, "c"=>3}}
+ end
+ end
end
end
|
(#<I>) Add support for PSON to facts
Previously, facts could be fetched via the REST API in PSON, but came
back as the to_s representation of a Ruby object, rather than as
proper PSON data. This patch adds to_pson and from_pson to facts, so
they can be properly used with PSON.
|
puppetlabs_puppet
|
train
|
4554a104c7d1bb29aa370b2c3b87a1d51a108445
|
diff --git a/src/scs_core/particulate/exegesis/isecen2_v001.py b/src/scs_core/particulate/exegesis/isecen2_v001.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/particulate/exegesis/isecen2_v001.py
+++ b/src/scs_core/particulate/exegesis/isecen2_v001.py
@@ -11,8 +11,8 @@ Coefficients gained from Alphasense OPC-N2 (versus Palas Fidas) data at LHR2 in
method: Immediate Scaling Error / Exponential Curve (ISECE), version 1
domain: 0 <= rH <= max_rh
-model: error = ce * e ^ (cx * x)
-range: x / error
+model: error = ce * e ^ (cx * rH)
+range: PM / error
"""
from collections import OrderedDict
|
Added particle exegesis framework.
|
south-coast-science_scs_core
|
train
|
39597fb60f9e5040370307f849201c2fdbe5159a
|
diff --git a/lib/cross_validation.rb b/lib/cross_validation.rb
index <HASH>..<HASH> 100644
--- a/lib/cross_validation.rb
+++ b/lib/cross_validation.rb
@@ -3,3 +3,7 @@ $LOAD_PATH.unshift File.dirname(__FILE__)
module CrossValidation
VERSION = '0.0.1'
end
+
+%w(confusion_matrix runner).each do |fn|
+ require File.join('cross_validation', fn)
+end
|
Require the runner and confusion matrix by default
|
jmdeldin_cross_validation
|
train
|
96f7a5d9962e83a7c37cbd0a37c638bbf2065866
|
diff --git a/napalm_yang/parser.py b/napalm_yang/parser.py
index <HASH>..<HASH> 100644
--- a/napalm_yang/parser.py
+++ b/napalm_yang/parser.py
@@ -56,7 +56,7 @@ class Parser(object):
attr = getattr(attr, p)
r = attr(**m["args"])
- if isinstance(r, dict):
+ if isinstance(r, dict) and all([isinstance(x, str) for x in r.values()]):
# Some vendors like junos return commands enclosed by a key
r = "\n".join(r.values())
|
Make sure all values are string before flattening
|
napalm-automation_napalm-yang
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.