hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
2202fd1714854524ac07ef4f232297e1a124175b
|
diff --git a/lib/dryice/index.js b/lib/dryice/index.js
index <HASH>..<HASH> 100644
--- a/lib/dryice/index.js
+++ b/lib/dryice/index.js
@@ -1072,10 +1072,10 @@ copy.filter.addDefines = function(input, source) {
var module = source.isLocation ? source.path : source;
- input = input.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
- input = '"' + input.replace(/\n/g, '\\n" +\n "') + '"';
+ input = input.replace(/\\/g, "\\\\").replace(/'/g, '\\\'');
+ input = '\'' + input.replace(/\n/g, '\\n\' +\n \'') + '\'';
- return 'define("text!' + module + '", [], ' + input + ');\n\n';
+ return 'define(\'text!' + module + '\', [], ' + input + ');\n\n';
};
copy.filter.addDefines.onRead = true;
|
Use ' when adding define lines rather than "
|
mozilla_dryice
|
train
|
d09467f59443ac4f52eec1a1c1a00670047a6658
|
diff --git a/thriftrw/idl/lexer.py b/thriftrw/idl/lexer.py
index <HASH>..<HASH> 100644
--- a/thriftrw/idl/lexer.py
+++ b/thriftrw/idl/lexer.py
@@ -56,6 +56,8 @@ THRIFT_KEYWORDS = (
'const',
'required',
'optional',
+ 'true',
+ 'false',
)
@@ -67,7 +69,6 @@ class LexerSpec(object):
literals = ':;,=*{}()<>[]'
tokens = (
- 'BOOLCONSTANT',
'INTCONSTANT',
'DUBCONSTANT',
'LITERAL',
@@ -103,11 +104,6 @@ class LexerSpec(object):
def t_ignore_COMMENT(self, t):
r'\/\/[^\n]*'
- def t_BOOLCONSTANT(self, t):
- r'true|false'
- t.value = t.value == 'true'
- return t
-
def t_DUBCONSTANT(self, t):
r'-?\d+\.\d*(e-?\d+)?'
t.value = float(t.value)
diff --git a/thriftrw/idl/parser.py b/thriftrw/idl/parser.py
index <HASH>..<HASH> 100644
--- a/thriftrw/idl/parser.py
+++ b/thriftrw/idl/parser.py
@@ -99,6 +99,11 @@ class ParserSpec(object):
'''
p[0] = p[1]
+ def p_const_bool(self, p):
+ '''const_bool : TRUE
+ | FALSE'''
+ p[0] = p[1] == 'true'
+
def p_const(self, p):
'''const : CONST field_type IDENTIFIER '=' const_value
| CONST field_type IDENTIFIER '=' const_value sep'''
@@ -124,7 +129,7 @@ class ParserSpec(object):
'''const_value_primitive : INTCONSTANT
| DUBCONSTANT
| LITERAL
- | BOOLCONSTANT'''
+ | const_bool'''
p[0] = ast.ConstPrimitiveValue(p[1], lineno=p.lineno(1))
def p_const_list(self, p):
|
Fix parser bug that prevented starting variable names with true or false
|
thriftrw_thriftrw-python
|
train
|
e66238e6eac296a466855647df2c38ea0b97315a
|
diff --git a/nomad/structs/structs.go b/nomad/structs/structs.go
index <HASH>..<HASH> 100644
--- a/nomad/structs/structs.go
+++ b/nomad/structs/structs.go
@@ -2421,7 +2421,15 @@ type Plan struct {
func (p *Plan) AppendUpdate(alloc *Allocation, status, desc string) {
newAlloc := new(Allocation)
*newAlloc = *alloc
- newAlloc.Job = nil // Normalize the job
+
+ // If the job is not set in the plan we are deregistering a job so we
+ // extract the job from the allocation.
+ if p.Job == nil && newAlloc.Job != nil {
+ p.Job = newAlloc.Job
+ }
+
+ // Normalize the job
+ newAlloc.Job = nil
newAlloc.DesiredStatus = status
newAlloc.DesiredDescription = desc
node := alloc.NodeID
diff --git a/scheduler/generic_sched_test.go b/scheduler/generic_sched_test.go
index <HASH>..<HASH> 100644
--- a/scheduler/generic_sched_test.go
+++ b/scheduler/generic_sched_test.go
@@ -634,6 +634,13 @@ func TestServiceSched_JobDeregister(t *testing.T) {
out, err := h.State.AllocsByJob(job.ID)
noErr(t, err)
+ // Ensure that the job field on the allocation is still populated
+ for _, alloc := range out {
+ if alloc.Job == nil {
+ t.Fatalf("bad: %#v", alloc)
+ }
+ }
+
// Ensure no remaining allocations
out = structs.FilterTerminalAllocs(out)
if len(out) != 0 {
|
Store the job on allocations that are from stopped jobs
|
hashicorp_nomad
|
train
|
21c43348d6b2d50580ea6e8898ba138a4d4f3762
|
diff --git a/tests/BreadcrumbTest.php b/tests/BreadcrumbTest.php
index <HASH>..<HASH> 100644
--- a/tests/BreadcrumbTest.php
+++ b/tests/BreadcrumbTest.php
@@ -2,6 +2,8 @@
class BreadcrumbTest extends PHPUnit_Framework_TestCase
{
+
+ /** @var \Noherczeg\Breadcrumb\Breadcrumb */
private $bread = null;
/**
|
added type hint for better IDE support
|
noherczeg_breadcrumb
|
train
|
cc551e3523b17fc874aaaaf5703ee7b44f3d453a
|
diff --git a/src/components/CheckboxGroup/__test__/checkboxList.spec.js b/src/components/CheckboxGroup/__test__/checkboxList.spec.js
index <HASH>..<HASH> 100644
--- a/src/components/CheckboxGroup/__test__/checkboxList.spec.js
+++ b/src/components/CheckboxGroup/__test__/checkboxList.spec.js
@@ -1,11 +1,12 @@
import React from 'react';
import { mount } from 'enzyme';
import CheckboxList from '../checkboxList';
+import StyledItemDescription from '../../RadioGroup/styled/itemDescription';
const options = [
- { value: 'admin', label: 'Admin', disabled: false },
- { value: 'user', label: 'User', disabled: false },
- { value: 'nobody', label: 'Anonymus', disabled: true },
+ { value: 'admin', label: 'Admin', disabled: false, description: 'Admin user' },
+ { value: 'user', label: 'User', disabled: false, description: 'Regular user' },
+ { value: 'nobody', label: 'Anonymus', disabled: true, description: 'Anonymous user' },
];
describe('<CheckboxList />', () => {
@@ -23,4 +24,15 @@ describe('<CheckboxList />', () => {
expect(checkbox.prop('label')).toBe('Admin');
expect(checkbox.prop('disabled')).toBe(false);
});
+
+ it('should render the description', () => {
+ const component = mount(<CheckboxList options={options} />);
+ expect(component.find(StyledItemDescription).length).toBe(3);
+ expect(
+ component
+ .find(StyledItemDescription)
+ .first()
+ .text(),
+ ).toBe('Admin user');
+ });
});
diff --git a/src/components/RadioGroup/__test__/radioItems.spec.js b/src/components/RadioGroup/__test__/radioItems.spec.js
index <HASH>..<HASH> 100644
--- a/src/components/RadioGroup/__test__/radioItems.spec.js
+++ b/src/components/RadioGroup/__test__/radioItems.spec.js
@@ -1,6 +1,7 @@
import React from 'react';
import { mount } from 'enzyme';
import RadioItems from '../radioItems';
+import StyledItemDescription from '../styled/itemDescription';
describe('<RadioItems />', () => {
it('should return the 3 radios when 3 options are passed', () => {
@@ -35,4 +36,19 @@ describe('<RadioItems />', () => {
}),
);
});
+ it('should render the description', () => {
+ const options = [
+ { value: 'admin', label: 'Admin', disabled: true, description: 'Admin user' },
+ { value: 'user', label: 'User', description: 'Regular user' },
+ { value: 'anonymous', label: 'Anonymous', description: 'Anonymous user' },
+ ];
+ const component = mount(<RadioItems name="items" options={options} />);
+ expect(component.find(StyledItemDescription).length).toBe(3);
+ expect(
+ component
+ .find(StyledItemDescription)
+ .first()
+ .text(),
+ ).toBe('Admin user');
+ });
});
|
test: add tests to description in Checkbox and Radio groups (#<I>)
* test: add test to RadioGroup description
* test: add test to CheckboxGroup description
|
90milesbridge_react-rainbow
|
train
|
899ecf60d27d618655fb5d19666a6863e82a0f87
|
diff --git a/eZ/Publish/API/Repository/RoleService.php b/eZ/Publish/API/Repository/RoleService.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/API/Repository/RoleService.php
+++ b/eZ/Publish/API/Repository/RoleService.php
@@ -189,7 +189,7 @@ interface RoleService
*
* @param \eZ\Publish\API\Repository\Values\User\Role $role
*
- * @return array an array of {@link RoleAssignment}
+ * @return \eZ\Publish\API\Repository\Values\User\RoleAssignment[] an array of {@link RoleAssignment}
*/
public function getRoleAssignments( Role $role );
@@ -200,7 +200,7 @@ interface RoleService
*
* @param \eZ\Publish\API\Repository\Values\User\User $user
*
- * @return array an array of {@link UserRoleAssignment}
+ * @return \eZ\Publish\API\Repository\Values\User\UserRoleAssignment[] an array of {@link UserRoleAssignment}
*/
public function getRoleAssignmentsForUser( User $user );
@@ -211,7 +211,7 @@ interface RoleService
*
* @param \eZ\Publish\API\Repository\Values\User\UserGroup $userGroup
*
- * @return array an array of {@link UserGroupRoleAssignment}
+ * @return \eZ\Publish\API\Repository\Values\User\UserGroupRoleAssignment[] an array of {@link UserGroupRoleAssignment}
*/
public function getRoleAssignmentsForUserGroup( UserGroup $userGroup );
|
Fixed: Some return types in doc comments.
|
ezsystems_ezpublish-kernel
|
train
|
45a776692cbdee9b7239b0b2453a0e1a1c896ae0
|
diff --git a/test/faker/avatar.spec.js b/test/faker/avatar.spec.js
index <HASH>..<HASH> 100644
--- a/test/faker/avatar.spec.js
+++ b/test/faker/avatar.spec.js
@@ -22,7 +22,9 @@ describe('Avatar', () => {
});
it('should return url with specified format', () => {
- expect(Avatar.image('test', '300x300', 'bmp')).to.match(/^https\:\/\/robohash\.org\/test\.bmp\?size=300x300&set=set1/);
+ ['png', 'jpg', 'bmp'].forEach(format => {
+ expect(Avatar.image('test', '300x300', format)).to.equal(`https://robohash.org/test.${format}?size=300x300&set=set1`);
+ });
});
it('should return url with specified set', () => {
|
Added testing for all supported formats in Avatar tests.
|
mrstebo_fakergem
|
train
|
7331d2b6f503eb72f4b732bf0baa05dc4f6a56b6
|
diff --git a/examples/public/index.php b/examples/public/index.php
index <HASH>..<HASH> 100644
--- a/examples/public/index.php
+++ b/examples/public/index.php
@@ -8,7 +8,10 @@ require __DIR__ . '/../../vendor/autoload.php';
$app = new \Slim\Slim();
// setup freebird Client with application keys
-$client = new Made\Services\freebird\Client('your_consumer_key', 'your_consumer_secret_key');
+$client = new Made\Services\freebird\Client();
+//$client->init_bearer_token('your_key', 'your_secret_key');
+$client->set_bearer_token('your_bearer');
+
// set response types to json
$res = $app->response();
diff --git a/lib/Made/Services/freebird/Client.php b/lib/Made/Services/freebird/Client.php
index <HASH>..<HASH> 100644
--- a/lib/Made/Services/freebird/Client.php
+++ b/lib/Made/Services/freebird/Client.php
@@ -10,13 +10,20 @@ class Client
* @param [string] $consumer_key [Twitter Application Consumer Key]
* @param [string] $consumer_secret [Twitter Application Consumer Secret Key]
*/
- public function __construct ($consumer_key, $consumer_secret) {
-
+ public function __construct ()
+ {
$this->requestHandler = new RequestHandler();
-
+ }
+
+ public function init_bearer_token ($consumer_key, $consumer_secret)
+ {
// Establishes Twitter Applications Authentication token for this session.
- $this->requestHandler->authenticateApp($consumer_key, $consumer_secret);
+ return $this->requestHandler->authenticateApp($consumer_key, $consumer_secret);
+ }
+ public function set_bearer_token ($bearer_token)
+ {
+ $this->requestHandler->set_bearer_token($bearer_token);
}
/**
@@ -25,11 +32,10 @@ class Client
* @param [array] $options [description]
* @return [json] [description]
*/
- public function api_request ($path, $options){
-
+ public function api_request ($path, $options)
+ {
$data = $this->requestHandler->request($path, $options);
return json_encode($data->json);
-
}
}
diff --git a/lib/Made/Services/freebird/RequestHandler.php b/lib/Made/Services/freebird/RequestHandler.php
index <HASH>..<HASH> 100644
--- a/lib/Made/Services/freebird/RequestHandler.php
+++ b/lib/Made/Services/freebird/RequestHandler.php
@@ -15,9 +15,14 @@ class RequestHandler
/**
* Instantiate a new RequestHandler
*/
- public function __construct()
+ public function __construct ()
{
-
+ $this->client = new \Guzzle\Http\Client('https://api.twitter.com/1.1');
+ }
+
+ public function set_bearer_token ($bearer_token)
+ {
+ $this->bearer = $bearer_token;
}
/**
@@ -56,10 +61,10 @@ class RequestHandler
// Twitter Required Body
$body = 'grant_type=client_credentials';
- $this->client = new \Guzzle\Http\Client('https://api.twitter.com/1.1');
$response = $this->client->post('/oauth2/token', $headers, $body)->send();
$data = $response->json();
$this->bearer = $data['access_token'];
+ return $this->bearer;
}
/**
|
updating for options of setting only the bearer token if already aquired
|
corbanb_freebird-php
|
train
|
8ab2b7b348a7252733e3cbd095d763f84bca433e
|
diff --git a/src/module.js b/src/module.js
index <HASH>..<HASH> 100644
--- a/src/module.js
+++ b/src/module.js
@@ -5,10 +5,17 @@
angular.module("firebase.config", []);
angular.module("firebase.auth", ["firebase.utils"]);
angular.module("firebase.database", ["firebase.utils"]);
+ angular.module("firebase.storage", []);
// Define the `firebase` module under which all AngularFire
// services will live.
- angular.module("firebase", ["firebase.utils", "firebase.config", "firebase.auth", "firebase.database"])
+ angular.module("firebase", [
+ "firebase.utils",
+ "firebase.config",
+ "firebase.auth",
+ "firebase.database",
+ "firebase.storage"
+ ])
//TODO: use $window
.value("Firebase", exports.firebase)
.value("firebase", exports.firebase);
diff --git a/src/storage/FirebaseStorage.js b/src/storage/FirebaseStorage.js
index <HASH>..<HASH> 100644
--- a/src/storage/FirebaseStorage.js
+++ b/src/storage/FirebaseStorage.js
@@ -1,4 +1,5 @@
(function() {
+ "use strict";
function FirebaseStorage() {
@@ -15,14 +16,6 @@
};
}
- FirebaseStorage._ = {
- _unwrapStorageSnapshot: unwrapStorageSnapshot,
- _$put: _$put,
- _$getDownloadURL: _$getDownloadURL,
- _isStorageRef: isStorageRef,
- _assertStorageRef: _assertStorageRef
- };
-
function unwrapStorageSnapshot(storageSnapshot) {
return {
bytesTransferred: storageSnapshot.bytesTransferred,
@@ -42,15 +35,15 @@
$progress: function $progress(callback) {
task.on('state_changed', function (storageSnap) {
return callback(unwrapStorageSnapshot(storageSnap));
- }, function (err) {}, function (storageSnap) {});
+ }, function () {}, function () {});
},
$error: function $error(callback) {
- task.on('state_changed', function (storageSnap) {}, function (err) {
+ task.on('state_changed', function () {}, function (err) {
return callback(err);
- }, function (storageSnap) {});
+ }, function () {});
},
$complete: function $complete(callback) {
- task.on('state_changed', function (storageSnap) {}, function (err) {}, function (_) {
+ task.on('state_changed', function () {}, function () {}, function () {
return callback(unwrapStorageSnapshot(task.snapshot));
});
}
@@ -72,6 +65,14 @@
}
}
+ FirebaseStorage._ = {
+ _unwrapStorageSnapshot: unwrapStorageSnapshot,
+ _$put: _$put,
+ _$getDownloadURL: _$getDownloadURL,
+ _isStorageRef: isStorageRef,
+ _assertStorageRef: _assertStorageRef
+ };
+
angular.module('firebase.storage')
.factory('$firebaseStorage', FirebaseStorage);
|
fix(storage): Linting errors
|
firebase_angularfire
|
train
|
a4d04cbb7279b9b7209e1ca14dab7722f010fe73
|
diff --git a/cli/app.go b/cli/app.go
index <HASH>..<HASH> 100644
--- a/cli/app.go
+++ b/cli/app.go
@@ -47,7 +47,10 @@ func NewApp() *cli.App {
return
}
- if len(c.Args()) == 0 {
+ args := c.Args()
+ if len(args) > 0 {
+ cli.ShowCommandHelp(c, args[0])
+ } else {
cli.ShowAppHelp(c)
}
}
|
Print help when task is not matched
|
jingweno_gotask
|
train
|
93e67edb2cac09ce16ece3ac594203d2cdaf0b5b
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@ with open('README.rst') as f:
setup(
name='uber_rides',
- version='0.5.0',
+ version='0.5.1',
packages=find_packages(),
description='Official Uber Rides API Python SDK',
long_description=readme,
|
Bumped package version to <I> for release
|
uber_rides-python-sdk
|
train
|
1a9fe67b07797c5a8b2d19ce438beb8aa99f9516
|
diff --git a/chi.go b/chi.go
index <HASH>..<HASH> 100644
--- a/chi.go
+++ b/chi.go
@@ -45,9 +45,9 @@ func (ms *Middlewares) Use(middlewares ...func(http.Handler) http.Handler) Middl
}
func (ms Middlewares) Handler(h http.Handler) http.HandlerFunc {
- return chain(ms, h).ServeHTTP
+ return Chain(ms, h).ServeHTTP
}
func (ms Middlewares) HandlerFunc(h http.HandlerFunc) http.HandlerFunc {
- return chain(ms, h).ServeHTTP
+ return Chain(ms, h).ServeHTTP
}
diff --git a/mux.go b/mux.go
index <HASH>..<HASH> 100644
--- a/mux.go
+++ b/mux.go
@@ -212,7 +212,7 @@ func (mx *Mux) FileServer(path string, root http.FileSystem) {
}
func (mx *Mux) buildRouteHandler() {
- mx.handler = chain(mx.middlewares, http.HandlerFunc(mx.routeHTTP))
+ mx.handler = Chain(mx.middlewares, http.HandlerFunc(mx.routeHTTP))
}
// handle creates a chi.Handler from a chain of middlewares and an end handler,
@@ -236,7 +236,7 @@ func (mx *Mux) handle(method methodTyp, pattern string, handler http.Handler) {
var endpoint http.Handler
if mx.inline {
mx.handler = http.HandlerFunc(mx.routeHTTP)
- endpoint = chain(mx.middlewares, handler)
+ endpoint = Chain(mx.middlewares, handler)
} else {
endpoint = handler
}
diff --git a/util.go b/util.go
index <HASH>..<HASH> 100644
--- a/util.go
+++ b/util.go
@@ -15,7 +15,7 @@ func (k *contextKey) String() string {
// chain builds a http.Handler composed of middlewares and endpoint handler in the
// order they are passed.
-func chain(middlewares []func(http.Handler) http.Handler, endpoint http.Handler) http.Handler {
+func Chain(middlewares []func(http.Handler) http.Handler, endpoint http.Handler) http.Handler {
// Return ahead of time if there aren't any middlewares for the chain
if middlewares == nil || len(middlewares) == 0 {
return endpoint
|
Export Chain() method for mws and endpoint for app use
|
go-chi_chi
|
train
|
9e41bb39643757343dddd7f164d76ab755537777
|
diff --git a/sprinter/environment.py b/sprinter/environment.py
index <HASH>..<HASH> 100644
--- a/sprinter/environment.py
+++ b/sprinter/environment.py
@@ -12,7 +12,7 @@ import sprinter.lib as lib
from sprinter.core import PHASE, load_global_config, Directory, Injections, Manifest, load_manifest, FeatureDict
from sprinter.core.templates import shell_utils_template, source_template
from sprinter.lib import system
-from sprinter.exceptions import SprinterException
+from sprinter.exceptions import SprinterException, FormulaException
from sprinter.external import brew
@@ -188,7 +188,11 @@ class Environment(object):
self.instantiate_features()
self._specialize()
for feature in self.features.run_order:
- self.run_action(feature, 'sync')
+ try:
+ self.run_action(feature, 'sync')
+ except FormulaException:
+ # continue trying to removal any remaining features.
+ pass
self.clear_all()
self.directory.remove()
self.injections.commit()
@@ -512,9 +516,15 @@ class Environment(object):
(action, feature))
self.logger.debug("Exception", exc_info=sys.exc_info())
self.log_feature_error(feature, str(e))
- # any error in a feature should fail immediately
+ # any error in a feature should fail immediately - unless it occurred
+ # from the remove() method in which case continue the rest of the
+ # feature removal from there
if self.error_occured:
- raise SprinterException("%s action failed for feature %s!" % (action, feature))
+ exception_msg = "%s action failed for feature %s!" % (action, feature)
+ if self.phase == PHASE.REMOVE:
+ raise FormulaException(exception_msg)
+ else:
+ raise SprinterException(exception_msg)
def _specialize(self, reconfigure=False):
""" Add variables and specialize contexts """
diff --git a/sprinter/tests/test_environment.py b/sprinter/tests/test_environment.py
index <HASH>..<HASH> 100644
--- a/sprinter/tests/test_environment.py
+++ b/sprinter/tests/test_environment.py
@@ -252,6 +252,17 @@ env_source_rc = False
with MockEnvironment(None, test_target, mock_formulabase=formulabase) as environment:
environment.run_feature('testfeature', 'install')
+ def test_feature_run_remove_failure(self):
+ """ A feature remove should not throw SprinterException on failure - it should
+ raise a FeatureException that is handle in remove() """
+
+ with patch('sprinter.formula.base.FormulaBase', new=create_mock_formulabase()) as formulabase:
+ formulabase.sync.side_effect = Exception
+ with MockEnvironment(test_source, test_target, mock_formulabase=formulabase) as environment:
+ environment.directory = Mock(spec=environment.directory)
+ environment.directory.new = False
+ environment.remove()
+
missing_formula_config = """
[missingformula]
|
If fail in remove(), continue rest of feature removal instead of raising SprinterException
|
toumorokoshi_sprinter
|
train
|
93fad0a09ebb2e76563ef5f065f6bdcf677ec1a3
|
diff --git a/javascript/FrontEndGridField.js b/javascript/FrontEndGridField.js
index <HASH>..<HASH> 100644
--- a/javascript/FrontEndGridField.js
+++ b/javascript/FrontEndGridField.js
@@ -43,7 +43,41 @@
}
});
- $('.ss-gridfield a.edit-link, .ss-gridfield a.new-link').entwine({
+
+ //Row Click
+ $('.ss-gridfield .ss-gridfield-item:not(.ss-gridfield-no-items) td').entwine({
+ /**
+ * Function: onclick
+ */
+ onclick: function(e) {
+ var editButton=$(this).parent().find('a.edit-link, a.view-link');
+ var self=this, id='ss-ui-dialog-'+this.getGridField().getUUID();
+ var dialog=$('#'+id);
+
+ if(!dialog.length) {
+ dialog=$('<div class="ss-ui-dialog" id="'+id+'" />');
+ $('body').append(dialog);
+ }
+
+ var extraClass=(this.data('popupclass') ? this.data('popupclass'):'');
+ dialog.ssdialog({
+ title: editButton.text(),
+ iframeUrl: editButton.attr('href'),
+ autoOpen: true,
+ dialogExtraClass: extraClass,
+ close: function(e, ui) {
+ self.getGridField().reload();
+ }
+ });
+
+
+ e.preventDefault();
+ return false;
+ }
+ });
+
+ //View/Edit Button Click
+ $('.ss-gridfield a.edit-link, .ss-gridfield a.view-link').entwine({
/**
* Function: onclick
*/
@@ -58,6 +92,7 @@
var extraClass=(this.data('popupclass') ? this.data('popupclass'):'');
dialog.ssdialog({
+ title: $(this).text(),
iframeUrl: this.attr('href'),
autoOpen: true,
dialogExtraClass: extraClass,
|
Fixed issue when clicking on a row in the gridfield would not lightbox
the form
|
webbuilders-group_silverstripe-frontendgridfield
|
train
|
54284d2b2637951eb97598d4030beffdf3fd6ea6
|
diff --git a/findbugs/src/java/edu/umd/cs/findbugs/ba/ValueNumberFrame.java b/findbugs/src/java/edu/umd/cs/findbugs/ba/ValueNumberFrame.java
index <HASH>..<HASH> 100644
--- a/findbugs/src/java/edu/umd/cs/findbugs/ba/ValueNumberFrame.java
+++ b/findbugs/src/java/edu/umd/cs/findbugs/ba/ValueNumberFrame.java
@@ -62,17 +62,17 @@ public class ValueNumberFrame extends Frame<ValueNumber> {
// of ValueNumberAnalysis.
if (mine != getValue(slot)) throw new IllegalStateException();
+
+ if (mine.equals(other))
+ return mine;
+
ValueNumber mergedValue = mergedValueList.get(slot);
- if (mergedValue == null && !mine.equals(other)) {
+ if (mergedValue == null) {
mergedValue = factory.createFreshValue();
mergedValueList.set(slot, mergedValue);
- mine = mergedValue;
}
- // NOTE: if mergedValue == null, we could remember "other" as contributing
- // to the merged value. (Like input to a phi node.) As it is, we only
- // care that the merged value cannot reliably be thought to be the same
- // as any of the incoming values.
- return mine;
+
+ return mergedValue;
}
public ValueNumber getDefaultValue() {
|
Modified mergeValues() to actually do what it is supposed to do.
git-svn-id: <URL>
|
spotbugs_spotbugs
|
train
|
b620b73361a94bbacc051fb5fe9fec6ce2bc3459
|
diff --git a/katharsis-core/src/main/java/io/katharsis/jackson/serializer/include/IncludedRelationshipExtractor.java b/katharsis-core/src/main/java/io/katharsis/jackson/serializer/include/IncludedRelationshipExtractor.java
index <HASH>..<HASH> 100644
--- a/katharsis-core/src/main/java/io/katharsis/jackson/serializer/include/IncludedRelationshipExtractor.java
+++ b/katharsis-core/src/main/java/io/katharsis/jackson/serializer/include/IncludedRelationshipExtractor.java
@@ -19,10 +19,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.InvocationTargetException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
/**
* Extracts inclusions from a resource.
@@ -201,10 +198,13 @@ public class IncludedRelationshipExtractor {
if (pathList.size() > 1) {
if (Iterable.class.isAssignableFrom(resourceProperty.getClass())) {
if (((Iterable) resourceProperty).iterator().hasNext()) {
- resourceProperty = ((Iterable) resourceProperty).iterator().next();
- fieldName = getRelationshipName(pathList.get(1), resourceProperty.getClass());
- resourceProperty = PropertyUtils.getProperty(resourceProperty, fieldName);
- populateToResourcePropertyToIncludedResources(resourceProperty, response, ContainerType.INCLUDED_NESTED, pathList.get(1), includedResources);
+ Iterator resourceProperties = ((Iterable) resourceProperty).iterator();
+ while (resourceProperties.hasNext()) {
+ resourceProperty = resourceProperties.next();
+ fieldName = getRelationshipName(pathList.get(1), resourceProperty.getClass());
+ resourceProperty = PropertyUtils.getProperty(resourceProperty, fieldName);
+ populateToResourcePropertyToIncludedResources(resourceProperty, response, ContainerType.INCLUDED_NESTED, pathList.get(1), includedResources);
+ }
}
} else {
fieldName = getRelationshipName(pathList.get(1), resourceProperty.getClass());
|
#<I> loop over resource properties that are an iterable
|
katharsis-project_katharsis-framework
|
train
|
fe954f807eedb5e2f7acfbe33c5d78c60983babb
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -47,6 +47,7 @@ function composeInto(first){
function mixInto(src, target, propHash){
var key, val, inSrc, isRequired, decorator;
+ //console.log(src.traits)
_.each(target, function(value, key){
define(value, key, src, propHash)
})
@@ -59,19 +60,23 @@ function mixInto(src, target, propHash){
* @param {object} src
*/
function define(value, key, src, propHash){
- var inSrc = _.has(src, key)
+ var inSrc = key in src
, isRequired = value === descriptors.required
, isDescriptor = value instanceof descriptors.Descriptor
, prev;
if ( !isRequired )
+ if ( inSrc && (!propHash[key] || !_.contains(propHash[key], src[key])) )
+ add(propHash, key, src[key])
+
if ( isDescriptor) {
prev = (propHash[key] || []).splice(0) //assume this descriptor is resolving all of the conflicts
- //console.log(prev, propHash[key])
return define(value.resolve.call(src, key, prev), key, src, propHash)
}
else
add(propHash, key, value)
+
+
Object.defineProperty(src, key, {
enumerable: true,
diff --git a/lib/descriptors.js b/lib/descriptors.js
index <HASH>..<HASH> 100644
--- a/lib/descriptors.js
+++ b/lib/descriptors.js
@@ -40,6 +40,16 @@ module.exports = {
merge: describe(decorators.merge),
+ reduce: function (method, target) {
+ return new Descriptor(function (key, previousValues) {
+ return _.reduce(
+ previousValues
+ , method
+ , target === undefined ? null : target
+ , this)
+ })
+ },
+
concat: describe(function(a, b){
if( !_.isArray(a) || !_.isArray(b) )
throw new TypeError('concat descriptor must be called with arrays')
@@ -60,7 +70,7 @@ function Descriptor(fn){ this.resolve = fn }
function describe(composer, trailing) {
return function (method) {
-
+
return new Descriptor(function (key, previousValues) {
if ( method !== undefined)
@@ -72,7 +82,7 @@ function describe(composer, trailing) {
return trailing
? composer(next, merged)
: composer(merged, next)
- })
+ }, null)
})
}
diff --git a/test/descriptors.js b/test/descriptors.js
index <HASH>..<HASH> 100644
--- a/test/descriptors.js
+++ b/test/descriptors.js
@@ -100,6 +100,25 @@ describe(' when using descriptors', function(){
spyA.should.have.been.calledOnce
})
+ it( 'should reduce values', function(){
+ var spanish = { greet: function(){ return "hola" } }
+ , german = { greet: function(){ return "guten morgen" } }
+
+ , result = cobble.compose(spanish, german, {
+
+ greet: cobble.reduce(function(target, next) {
+ return function(){
+ return target() + " " + next()
+ }
+ }, englishHello)
+
+ });
+
+ result.greet().should.equal("hello hola guten morgen")
+
+ function englishHello(){ return "hello" }
+ })
+
it( 'should concat array', function(){
var mixinA = { a: [1,2,3] }
, result = cobble.compose(
|
include props from src object in descriptors
|
jquense_cobble
|
train
|
14c1e9f4bb38bb2a7b01b25130b349e84947290b
|
diff --git a/src/Flint/Provider/FlintServiceProvider.php b/src/Flint/Provider/FlintServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Flint/Provider/FlintServiceProvider.php
+++ b/src/Flint/Provider/FlintServiceProvider.php
@@ -16,8 +16,10 @@ class FlintServiceProvider implements \Silex\ServiceProviderInterface
*/
public function register(Application $app)
{
+ $app['exception_controller'] = 'Flint\\Controller\\ExceptionController::showAction';
+
$app['exception_handler'] = $app->share(function ($app) {
- return new ExceptionListener('Flint\\Controller\\ExceptionController::showAction', $app['logger']);
+ return new ExceptionListener($app['exception_controller'], $app['logger']);
});
$app->extend('resolver', function ($resolver, $app) {
|
Allow to override the controller used for excetion handling
|
flint_flint
|
train
|
56e5597ad5a0eb453f54482e563b096d48cf2b76
|
diff --git a/src/iTunes/PurchaseItem.php b/src/iTunes/PurchaseItem.php
index <HASH>..<HASH> 100644
--- a/src/iTunes/PurchaseItem.php
+++ b/src/iTunes/PurchaseItem.php
@@ -80,9 +80,9 @@ class PurchaseItem
/**
* @return array
*/
- public function getData()
+ public function getRawResponse()
{
- return $this->_data;
+ return $this->_response;
}
/**
|
Fix undefined _data attribute. Replaced with function which gives access to the raw json response
|
aporat_store-receipt-validator
|
train
|
d6e9da7ab05bdafa2b2ed38b8b4e4a3a42eaefa9
|
diff --git a/src/ossos-pipeline/ossos/gui/errorhandling.py b/src/ossos-pipeline/ossos/gui/errorhandling.py
index <HASH>..<HASH> 100644
--- a/src/ossos-pipeline/ossos/gui/errorhandling.py
+++ b/src/ossos-pipeline/ossos/gui/errorhandling.py
@@ -11,6 +11,8 @@ class DownloadErrorHandler(object):
def __init__(self, app):
self.app = app
+ self._failed_downloads = []
+
def handle_error(self, error, downloadable_item):
"""
Checks what error occured and looks for an appropriate solution.
@@ -45,11 +47,13 @@ class DownloadErrorHandler(object):
model.start_loading_images()
def handle_connection_refused(self, error_message, downloadable_item):
- self.app.get_view().show_retry_download_dialog(self, error_message,
- downloadable_item)
+ self._failed_downloads.append(downloadable_item)
+ self.app.get_view().show_retry_download_dialog(self, error_message)
- def retry_download(self, downloadable_item):
- self.app.get_model().retry_download(downloadable_item)
+ def retry_downloads(self):
+ model = self.app.get_model()
+ for downloadable_item in self._failed_downloads:
+ model.retry_download(downloadable_item)
class CertificateDialog(wx.Dialog):
@@ -137,19 +141,18 @@ class CertificateDialog(wx.Dialog):
class RetryDownloadDialog(wx.Dialog):
- def __init__(self, parent, handler, error_message, downloadable_item):
+ def __init__(self, parent, handler, error_message):
super(RetryDownloadDialog, self).__init__(parent, title="Download Error")
self.handler = handler
self.error_message = error_message
- self.downloadable_item = downloadable_item
self._init_ui()
self._do_layout()
def _init_ui(self):
- self.header_text = wx.StaticText(self, label="An image failed to "
- "download:")
+ self.header_text = wx.StaticText(self, label="One or more downloads "
+ "failed:")
self.error_text = wx.StaticText(self, label=self.error_message)
error_font = wx.Font(12, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_ITALIC,
wx.FONTWEIGHT_NORMAL)
@@ -189,7 +192,7 @@ class RetryDownloadDialog(wx.Dialog):
self.Destroy()
def on_accept(self, event):
- self.handler.retry_download(self.downloadable_item)
+ self.handler.retry_downloads()
self.Destroy()
diff --git a/src/ossos-pipeline/ossos/gui/views.py b/src/ossos-pipeline/ossos/gui/views.py
index <HASH>..<HASH> 100644
--- a/src/ossos-pipeline/ossos/gui/views.py
+++ b/src/ossos-pipeline/ossos/gui/views.py
@@ -46,6 +46,7 @@ class ApplicationView(object):
self.accept_source_dialog = None
self.reject_source_dialog = None
self.certificate_dialog = None
+ self.retry_downloads_dialog = None
self.mainframe.Show()
self.mainframe.show_image_loading_dialog()
@@ -99,10 +100,12 @@ class ApplicationView(object):
self.certificate_dialog.ShowModal()
@guithread
- def show_retry_download_dialog(self, handler, error_message,
- downloadable_item):
- RetryDownloadDialog(self.mainframe, handler, error_message,
- downloadable_item).Show()
+ def show_retry_download_dialog(self, handler, error_message):
+ # Only allow one dialog to be shown at a time
+ if not self.retry_downloads_dialog:
+ self.retry_downloads_dialog = RetryDownloadDialog(
+ self.mainframe, handler, error_message)
+ self.retry_downloads_dialog.Show()
def show_accept_source_dialog(self, preset_vals):
self.accept_source_dialog = AcceptSourceDialog(
diff --git a/src/ossos-pipeline/tests/test_ossos/test_gui/test_errorhandling.py b/src/ossos-pipeline/tests/test_ossos/test_gui/test_errorhandling.py
index <HASH>..<HASH> 100644
--- a/src/ossos-pipeline/tests/test_ossos/test_gui/test_errorhandling.py
+++ b/src/ossos-pipeline/tests/test_ossos/test_gui/test_errorhandling.py
@@ -40,7 +40,7 @@ class VOSpaceErrorHandlerTest(unittest.TestCase):
self.error_handler.handle_error(error, self.downloadable_item)
self.view.show_retry_download_dialog.assert_called_once_with(
- self.error_handler, message, self.downloadable_item)
+ self.error_handler, message)
if __name__ == '__main__':
|
Only allow one pop-up to retry downloads at a time. Selecting retry
will cause all failed downloads to be requeued.
|
OSSOS_MOP
|
train
|
54c7442de11f383e698f885466e3f9729249be8e
|
diff --git a/app/plates/errors/error.php b/app/plates/errors/error.php
index <HASH>..<HASH> 100644
--- a/app/plates/errors/error.php
+++ b/app/plates/errors/error.php
@@ -2,9 +2,14 @@
/** @var Template $this */
/** @var DataView $view */
use League\Plates\Template\Template;
+use Tuum\Form\Components\NavBar;
use Tuum\Form\DataView;
-$this->layout('layouts/layout', ['view' => $view]);
+$data = $this->data;
+if (!isset($nav)) {
+ $data['nav'] = new NavBar('errors', 'general');
+}
+$this->layout('layouts/layout', $data);
?>
diff --git a/app/plates/errors/notFound.php b/app/plates/errors/notFound.php
index <HASH>..<HASH> 100644
--- a/app/plates/errors/notFound.php
+++ b/app/plates/errors/notFound.php
@@ -2,9 +2,12 @@
/** @var Template $this */
/** @var DataView $view */
use League\Plates\Template\Template;
+use Tuum\Form\Components\NavBar;
use Tuum\Form\DataView;
-$this->layout('layouts/layout', ['view' => $view]);
+$this->layout('layouts/layout', [
+ 'nav' => new NavBar('errors', 'notFound')
+]);
?>
diff --git a/app/plates/layouts/layout.php b/app/plates/layouts/layout.php
index <HASH>..<HASH> 100644
--- a/app/plates/layouts/layout.php
+++ b/app/plates/layouts/layout.php
@@ -58,9 +58,13 @@ $nav = isset($nav) ? $nav : new NavBar(null);
<li class="<?= $nav->m('samples', 'upload');?>"><a href="/upload">Upload: uploading files</a></li>
<li class="<?= $nav->m('samples', 'content');?>"><a href="/content">Content: html rendering</a></li>
<li class="<?= $nav->m('samples', 'objGraph');?>"><a href="/objGraph">Object graph</a></li>
- <li role="separator" class="divider"></li>
- <li><a href="/not-such-file">Not Found Error</a></li>
- <li><a href="/throw">Uncaught Exception</a></li>
+ </ul>
+ </li>
+ <li class="dropdown<?= $nav->m('errors');?>">
+ <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">Errors <span class="caret"></span></a>
+ <ul class="dropdown-menu">
+ <li class="<?= $nav->m('errors', 'general');?>"><a href="/throw">General Errors</a></li>
+ <li class="<?= $nav->m('errors', 'notFound');?>"><a href="/not-such-file">Not Found Error</a></li>
</ul>
</li>
</ul>
|
highlight navigation for error pages as well.
|
TuumPHP_Respond
|
train
|
935fd33aadd270b91dad0afc8ad934e29748155a
|
diff --git a/app/lib/security/PlayAuthenticationListener.java b/app/lib/security/PlayAuthenticationListener.java
index <HASH>..<HASH> 100644
--- a/app/lib/security/PlayAuthenticationListener.java
+++ b/app/lib/security/PlayAuthenticationListener.java
@@ -38,8 +38,8 @@ public class PlayAuthenticationListener implements AuthenticationListener {
@Override
public void onFailure(AuthenticationToken token, AuthenticationException ae) {
- final Http.Session session = Http.Context.current().session();
- session.remove("sessionid");
+// final Http.Session session = Http.Context.current().session();
+// session.remove("sessionid");
}
@Override
|
don't log people out just because the connection to server failed
fixes #<I>
|
Graylog2_graylog2-server
|
train
|
19b591da138537097f082dc2979c0b1ab05b77e4
|
diff --git a/src/Charcoal/Source/StorableTrait.php b/src/Charcoal/Source/StorableTrait.php
index <HASH>..<HASH> 100644
--- a/src/Charcoal/Source/StorableTrait.php
+++ b/src/Charcoal/Source/StorableTrait.php
@@ -8,9 +8,6 @@ use InvalidArgumentException;
// From 'charcoal-factory'
use Charcoal\Factory\FactoryInterface;
-// From 'charcoal-core'
-use Charcoal\Source\SourceInterface;
-use Charcoal\Source\StorableInterface;
/**
* Provides an object with storage interaction.
@@ -137,34 +134,6 @@ trait StorableTrait
}
/**
- * Set the datasource repository factory.
- *
- * @param FactoryInterface $factory The source factory.
- * @return self
- */
- protected function setSourceFactory(FactoryInterface $factory)
- {
- $this->sourceFactory = $factory;
- return $this;
- }
-
- /**
- * Get the datasource repository factory.
- *
- * @throws RuntimeException If the source factory was not previously set.
- * @return FactoryInterface
- */
- protected function sourceFactory()
- {
- if (!isset($this->sourceFactory)) {
- throw new RuntimeException(
- sprintf('Source factory is not set for "%s"', get_class($this))
- );
- }
- return $this->sourceFactory;
- }
-
- /**
* Set the object's datasource repository.
*
* @todo This method needs to be protected.
@@ -204,7 +173,7 @@ trait StorableTrait
* @param mixed $id The identifier to load.
* @return self
*/
- public function load($id = null)
+ final public function load($id = null)
{
if ($id === null) {
$id = $this->id();
@@ -220,7 +189,7 @@ trait StorableTrait
* @param mixed $value Value of said column.
* @return self
*/
- public function loadFrom($key = null, $value = null)
+ final public function loadFrom($key = null, $value = null)
{
$this->source()->loadItemFromKey($key, $value, $this);
return $this;
@@ -233,7 +202,7 @@ trait StorableTrait
* @param array $binds Optional. The SQL query parameters.
* @return self
*/
- public function loadFromQuery($query, array $binds = [])
+ final public function loadFromQuery($query, array $binds = [])
{
$this->source()->loadItemFromQuery($query, $binds, $this);
return $this;
@@ -244,7 +213,7 @@ trait StorableTrait
*
* @return boolean TRUE on success.
*/
- public function save()
+ final public function save()
{
$pre = $this->preSave();
if ($pre === false) {
@@ -290,7 +259,7 @@ trait StorableTrait
* @param string[] $keys If provided, only update the properties specified.
* @return boolean TRUE on success.
*/
- public function update(array $keys = null)
+ final public function update(array $keys = null)
{
$pre = $this->preUpdate($keys);
if ($pre === false) {
@@ -333,7 +302,7 @@ trait StorableTrait
*
* @return boolean TRUE on success.
*/
- public function delete()
+ final public function delete()
{
$pre = $this->preDelete();
if ($pre === false) {
@@ -372,6 +341,34 @@ trait StorableTrait
}
/**
+ * Set the datasource repository factory.
+ *
+ * @param FactoryInterface $factory The source factory.
+ * @return self
+ */
+ protected function setSourceFactory(FactoryInterface $factory)
+ {
+ $this->sourceFactory = $factory;
+ return $this;
+ }
+
+ /**
+ * Get the datasource repository factory.
+ *
+ * @throws RuntimeException If the source factory was not previously set.
+ * @return FactoryInterface
+ */
+ protected function sourceFactory()
+ {
+ if (!isset($this->sourceFactory)) {
+ throw new RuntimeException(
+ sprintf('Source factory is not set for "%s"', get_class($this))
+ );
+ }
+ return $this->sourceFactory;
+ }
+
+ /**
* Event called before {@see self::save() creating} the object.
*
* @return boolean TRUE to proceed with creation; FALSE to stop creation.
|
Set save(), update(), delete() and load() as final, even if PHP doesnt enfore final keywords for Traits...
|
locomotivemtl_charcoal-core
|
train
|
28222438af3ddffcd69a18e9c215f8e37d9fc453
|
diff --git a/lib/locomotive/steam/liquid/tags/editable/base.rb b/lib/locomotive/steam/liquid/tags/editable/base.rb
index <HASH>..<HASH> 100644
--- a/lib/locomotive/steam/liquid/tags/editable/base.rb
+++ b/lib/locomotive/steam/liquid/tags/editable/base.rb
@@ -39,10 +39,16 @@ module Locomotive
page = fetch_page(context)
block = @element_options[:block] || context['block'].try(:name)
+ # If Steam inside Wagon (test mode), we've to let the developer know
+ # that editable_**** tags don't work if the site has declared at least one section
+ if context['wagon'] && context.registers[:repositories].section.count > 0
+ Locomotive::Common::Logger.error "[#{page.fullpath}] You can't use editable elements whereas you declared section(s)".colorize(:red)
+ end
+
if element = service.find(page, block, @slug)
render_element(context, element)
else
- Locomotive::Common::Logger.error "[#{page.fullpath}] missing #{@tag_name} \"#{@slug}\" (#{context['block'].try(:name) || 'default'})"
+ Locomotive::Common::Logger.error "[#{page.fullpath}] missing #{@tag_name} \"#{@slug}\" (#{context['block'].try(:name) || 'default'})".colorize(:yellow)
super
end
end
diff --git a/spec/unit/liquid/tags/editable/text_spec.rb b/spec/unit/liquid/tags/editable/text_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/liquid/tags/editable/text_spec.rb
+++ b/spec/unit/liquid/tags/editable/text_spec.rb
@@ -93,11 +93,13 @@ describe Locomotive::Steam::Liquid::Tags::Editable::Text do
let(:live_editing) { false }
let(:element_editing) { true }
+ let(:wagon) { false }
- let(:child_page) { instance_double('Page', fullpath: 'child-page') }
- let(:element) { instance_double('EditableText', _id: 42, id: 42, content: nil, inline_editing?: element_editing, inline_editing: element_editing, format: 'html') }
- let(:services) { Locomotive::Steam::Services.build_instance(nil) }
- let(:context) { ::Liquid::Context.new({}, {}, { page: child_page, services: services, live_editing: live_editing }) }
+ let(:child_page) { instance_double('Page', fullpath: 'child-page') }
+ let(:element) { instance_double('EditableText', _id: 42, id: 42, content: nil, inline_editing?: element_editing, inline_editing: element_editing, format: 'html') }
+ let(:services) { Locomotive::Steam::Services.build_instance(nil) }
+ let(:repositories) { instance_double('Repositories') }
+ let(:context) { ::Liquid::Context.new({ 'wagon' => wagon }, {}, { page: child_page, services: services, repositories: repositories, live_editing: live_editing }) }
before { allow(services.editable_element).to receive(:find).and_return(element) }
@@ -193,6 +195,32 @@ describe Locomotive::Steam::Liquid::Tags::Editable::Text do
end
+ context 'site with sections' do
+
+ let(:repositories) { instance_double('Repositories', section: instance_double('SectionRepository', count: 1)) }
+
+ context 'in production' do
+
+ it "doesn't display the warning message" do
+ expect(Locomotive::Common::Logger).not_to receive(:error).with("[child-page] You can't use editable elements whereas you declared section(s)".colorize(:red))
+ subject
+ end
+
+ end
+
+ context 'in Wagon' do
+
+ let(:wagon) { true }
+
+ it "displays a warning message (because we don't allow editable elements and sections to be used together in the same site)" do
+ expect(Locomotive::Common::Logger).to receive(:error).with("[child-page] You can't use editable elements whereas you declared section(s)".colorize(:red))
+ subject
+ end
+
+ end
+
+ end
+
end
end
|
implement suggestions in locomotivecms/engine#<I>
|
locomotivecms_steam
|
train
|
345578deb0c6d97d0f1f71dd5270bf9061dcc723
|
diff --git a/lib/zold/node/farmers.rb b/lib/zold/node/farmers.rb
index <HASH>..<HASH> 100755
--- a/lib/zold/node/farmers.rb
+++ b/lib/zold/node/farmers.rb
@@ -124,19 +124,18 @@ for #{after.host}:#{after.port} in #{Age.new(start)}: #{after.suffixes}")
def up(score)
start = Time.now
- read, write = IO.pipe
+ stdin, stdout = IO.pipe
Process.fork do
score = score.next
- write.puts "#{score.to_s}|#{Process.pid}"
+ stdout.puts "#{score.to_s}|#{Process.pid}"
end
Process.wait
- write.close
- output = read.read
- buffer = output.split('|')[0]
- proc_pid = output.split('|')[1]
+ stdout.close
+ output = stdin.read
+ buffer, pid = output.split('|')
after = Score.parse(buffer.strip)
- read.close
- @log.debug("Next score #{after.value}/#{after.strength} found in proc ##{proc_pid.strip} \
+ stdin.close
+ @log.debug("Next score #{after.value}/#{after.strength} found in proc ##{pid.strip} \
for #{after.host}:#{after.port} in #{Age.new(start)}: #{after.suffixes}")
after
end
|
Issue #<I> - Few fixes on naming
|
zold-io_zold
|
train
|
33bf20e4499f474fb4f31a062b8a6fcd0cd5869e
|
diff --git a/business_error.gemspec b/business_error.gemspec
index <HASH>..<HASH> 100644
--- a/business_error.gemspec
+++ b/business_error.gemspec
@@ -21,6 +21,8 @@ Gem::Specification.new do |spec|
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
+ spec.add_dependency "request_store"
+
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
diff --git a/lib/business_error.rb b/lib/business_error.rb
index <HASH>..<HASH> 100644
--- a/lib/business_error.rb
+++ b/lib/business_error.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+require 'request_store'
require 'business_error/version'
require 'business_error/error'
@@ -14,7 +15,11 @@ module BusinessError
http: _get_http,
group: _get_group,
format: @format
- define_singleton_method(name) { Error.new(name, message, code, http, format) }
+ define_singleton_method(name) do |locale = _get_locale|
+ msg = message.is_a?(Hash) ? (message[locale] || message[:en]) : message
+ Error.new(name, msg, code, http, format)
+ end
+
define_singleton_method("#{name}!") { send(name).throw! }
defs_tree[self.name] ||= { }
@@ -31,16 +36,6 @@ module BusinessError
@group_name, @code, @http, @format = group_name, code_start_at, http, format
end
- def _get_group
- @group_name || :public
- end
-
- def _get_code
- raise ArgumentError, 'Should give a code to define your business error' if (code = @code).nil?
- @code = @code < 0 ? (code - 1) : (code + 1)
- code
- end
-
def code_start_at code
@code = code
end
@@ -53,10 +48,6 @@ module BusinessError
@format = template
end
- def _get_http
- @http_status || Config.default_http_status
- end
-
def define_px name, message = '', code = _get_code, http: _get_http
group_name = name.to_s.split('_').first.to_sym
group group_name do
@@ -81,4 +72,24 @@ module BusinessError
def all
puts defs_tree.stringify_keys.to_yaml.gsub(' :', ' ')
end
+
+ # ===
+
+ def _get_group
+ @group_name || :public
+ end
+
+ def _get_code
+ raise ArgumentError, 'Should give a code to define your business error' if (code = @code).nil?
+ @code = @code < 0 ? (code - 1) : (code + 1)
+ code
+ end
+
+ def _get_http
+ @http_status || Config.default_http_status
+ end
+
+ def _get_locale
+ RequestStore.store[:err_locale] ||= 'en'
+ end
end
diff --git a/lib/business_error/version.rb b/lib/business_error/version.rb
index <HASH>..<HASH> 100644
--- a/lib/business_error/version.rb
+++ b/lib/business_error/version.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
module BusinessError
- VERSION = '1.0.3'
+ VERSION = '1.1.0'
end
|
F / [I<I>n] Support by RequestStore
|
zhandao_business_error
|
train
|
0851ee7e8c70d22a7f1b4f3d0e3496b1b23f86e0
|
diff --git a/pkg/deploy/cmd/scale.go b/pkg/deploy/cmd/scale.go
index <HASH>..<HASH> 100644
--- a/pkg/deploy/cmd/scale.go
+++ b/pkg/deploy/cmd/scale.go
@@ -1,8 +1,6 @@
package cmd
import (
- "fmt"
- "os"
"time"
kapi "k8s.io/kubernetes/pkg/api"
@@ -11,7 +9,6 @@ import (
"k8s.io/kubernetes/pkg/util/wait"
"github.com/openshift/origin/pkg/client"
- deployapi "github.com/openshift/origin/pkg/deploy/api"
"github.com/openshift/origin/pkg/deploy/util"
)
@@ -61,17 +58,13 @@ func (scaler *DeploymentConfigScaler) Scale(namespace, name string, newSize uint
// ScaleSimple does a simple one-shot attempt at scaling - not useful on its
// own, but a necessary building block for Scale
func (scaler *DeploymentConfigScaler) ScaleSimple(namespace, name string, preconditions *kubectl.ScalePrecondition, newSize uint) error {
- dc, err := scaler.dcClient.DeploymentConfigs(namespace).Get(name)
+ scale, err := scaler.dcClient.DeploymentConfigs(namespace).GetScale(name)
if err != nil {
return err
}
- if dc.Spec.Test {
- fmt.Fprintln(os.Stderr, "Replica size for a test deployment applies only when the deployment is running.")
- }
- scale := deployapi.ScaleFromConfig(dc)
scale.Spec.Replicas = int32(newSize)
if _, err := scaler.dcClient.DeploymentConfigs(namespace).UpdateScale(scale); err != nil {
- return kubectl.ScaleError{FailureType: kubectl.ScaleUpdateFailure, ResourceVersion: dc.ResourceVersion, ActualError: err}
+ return kubectl.ScaleError{FailureType: kubectl.ScaleUpdateFailure, ResourceVersion: "Unknown", ActualError: err}
}
return nil
}
|
Scale helpers: always fetch scale object for DCs
The scale helper for DeploymentConfigs previously would fetch the raw
deployment, covert that into a scale object, and then submit that scale
object as an update to the scale subresource of the DeploymentConfig.
This caused issues when using recent clients against older API servers
since new metadata (the UID) was added to scale, and the older API
servers would choke while validating the update.
Fixes bug <I>
|
openshift_origin
|
train
|
9a30c631ec742f54b7ee638a3ca8ce1431ec4b97
|
diff --git a/src/main/java/org/dasein/cloud/cloudstack/compute/Volumes.java b/src/main/java/org/dasein/cloud/cloudstack/compute/Volumes.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dasein/cloud/cloudstack/compute/Volumes.java
+++ b/src/main/java/org/dasein/cloud/cloudstack/compute/Volumes.java
@@ -243,11 +243,12 @@ public class Volumes extends AbstractVolumeSupport {
Param[] params;
if( product == null && snapshotId == null ) {
- params = new Param[] {
+ /*params = new Param[] {
new Param("name", options.getName()),
new Param("zoneId", ctx.getRegionId()),
new Param("size", String.valueOf(size.longValue()))
- };
+ }; */
+ throw new CloudException("A suitable snapshot or disk offering could not be found to pass to CloudStack createVolume request");
}
else if( snapshotId != null ) {
params = new Param[] {
@@ -553,13 +554,8 @@ public class Volumes extends AbstractVolumeSupport {
for( DiskOffering offering : getDiskOfferings() ) {
VolumeProduct p = toProduct(offering);
- if( p != null ) {
- if (!provider.getServiceProvider().equals(CSServiceProvider.DEMOCLOUD) && !provider.getVersion().greaterThan(CSVersion.CS3)) {
- list.add(p);
- }
- else if ( "local".equals(offering.type) ) {
- list.add(p);
- }
+ if( p != null && (!provider.getServiceProvider().equals(CSServiceProvider.DEMOCLOUD) || "local".equals(offering.type)) ) {
+ list.add(p);
}
}
products = Collections.unmodifiableList(list);
|
fixed createVolume request to use all available disk offerings
|
greese_dasein-cloud-cloudstack
|
train
|
6e179d444a981f810490abfb4f5fa4c1d5758a2d
|
diff --git a/lib/active_admin/pages/show.rb b/lib/active_admin/pages/show.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin/pages/show.rb
+++ b/lib/active_admin/pages/show.rb
@@ -24,7 +24,7 @@ module ActiveAdmin
else
default_main_content
end
- html + comments
+ html + (comments if active_admin_config.admin_notes?)
end
def comments
diff --git a/lib/active_admin/view_helpers.rb b/lib/active_admin/view_helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin/view_helpers.rb
+++ b/lib/active_admin/view_helpers.rb
@@ -125,7 +125,7 @@ module ActiveAdmin
def admin_note(note)
content_tag_for(:li, note) do
- content_tag(:h4, "Posted #{note.created_at} by XXX") +
+ content_tag(:h4, "Posted at #{l note.created_at}") +
simple_format(note.body)
end
end
diff --git a/lib/generators/active_admin/install/templates/active_admin.rb b/lib/generators/active_admin/install/templates/active_admin.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/active_admin/install/templates/active_admin.rb
+++ b/lib/generators/active_admin/install/templates/active_admin.rb
@@ -22,7 +22,14 @@ ActiveAdmin.setup do |config|
# To set no namespace by default, use:
# config.default_namespace = false
config.default_namespace = :admin
-
+
+ # == Admin Notes
+ #
+ # Admin notes allow you to add notes to any model
+ # Admin notes are enabled by default, but can be disabled
+ # by uncommenting this line:
+ #
+ # config.admin_notes = false
# == Controller Filters
#
|
Dont show the admin notes form if admin notes are disabled.
|
activeadmin_activeadmin
|
train
|
45cbc922867ef32b08f9b4cddb5271c1269d628a
|
diff --git a/tests/builder_test.py b/tests/builder_test.py
index <HASH>..<HASH> 100644
--- a/tests/builder_test.py
+++ b/tests/builder_test.py
@@ -892,6 +892,12 @@ class ToUfosTest(unittest.TestCase):
ufo = to_ufos(font)[0]
self.assertEqual(ufo.lib[GLYPHS_PREFIX + 'customName'], 'FooBar')
+ def test_coerce_to_bool(self):
+ font = generate_minimal_font()
+ font.customParameters['Disable Last Change'] = 'Truthy'
+ ufo = to_ufos(font)[0]
+ self.assertEqual(True, ufo.lib[GLYPHS_PREFIX + 'disablesLastChange'])
+
def _run_guideline_test(self, data_in, expected):
font = generate_minimal_font()
glyph = GSGlyph(name='a')
|
Check that 'Disable Last Change' is cast to bool
|
googlefonts_glyphsLib
|
train
|
8067b476d3153ba4b300a93c1aa83a27dea5ddac
|
diff --git a/jupyterdrive/__init__.py b/jupyterdrive/__init__.py
index <HASH>..<HASH> 100644
--- a/jupyterdrive/__init__.py
+++ b/jupyterdrive/__init__.py
@@ -2,6 +2,7 @@
import IPython
import IPython.html.nbextensions as nbe
from IPython.utils.path import locate_profile
+from IPython.utils.py3compat import cast_unicode_py2
import sys
@@ -41,8 +42,8 @@ def activate(profile):
print(config)
config['nbformat'] = 1
- with io.open(os.path.join(pdir,'ipython_notebook_config.json'),'wb') as f:
- json.dump(config,f, indent=2)
+ with io.open(os.path.join(pdir,'ipython_notebook_config.json'),'w', encoding='utf-8') as f:
+ f.write(cast_unicode_py2(json.dumps(config, indent=2)))
def deactivate(profile):
"""should be a matter of just unsetting the above keys
|
Fix writing JSON file on Python 3
Addresses gh-<I>
This is admittedly inelegant, but since the json module on Python 2 can
serialise to either bytes or unicode depending on the input, it seems
like the most robust way to write json to a file.
|
jupyter_jupyter-drive
|
train
|
1398eafa6d972e510d39a03cb594345fe4e6fdbb
|
diff --git a/modules/grid/src/test/java/integration/AbstractGridTest.java b/modules/grid/src/test/java/integration/AbstractGridTest.java
index <HASH>..<HASH> 100644
--- a/modules/grid/src/test/java/integration/AbstractGridTest.java
+++ b/modules/grid/src/test/java/integration/AbstractGridTest.java
@@ -5,12 +5,18 @@ import io.github.bonigarcia.wdm.WebDriverManager;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.openqa.selenium.grid.Main;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.UncheckedIOException;
import static com.codeborne.selenide.Configuration.timeout;
import static com.codeborne.selenide.Selenide.closeWebDriver;
import static org.openqa.selenium.net.PortProber.findFreePort;
abstract class AbstractGridTest extends IntegrationTest {
+ private final Logger log = LoggerFactory.getLogger(getClass());
+
int hubPort;
@BeforeEach
@@ -26,8 +32,17 @@ abstract class AbstractGridTest extends IntegrationTest {
}
closeWebDriver();
- hubPort = findFreePort();
- Main.main(new String[]{"standalone", "--port", String.valueOf(hubPort)});
+ for (int tries = 0; tries < 3; tries++) {
+ int port = findFreePort();
+ try {
+ Main.main(new String[]{"standalone", "--port", String.valueOf(port)});
+ hubPort = port;
+ break;
+ }
+ catch (UncheckedIOException portAlreadyUsed) {
+ log.warn("Failed to start Selenium Grid on port {}", port, portAlreadyUsed);
+ }
+ }
timeout = 4000;
}
|
stabilize flaky Selenium Grid tests
re-try to start Grid if the port is occasionally already used
|
selenide_selenide
|
train
|
e6533225d5e16290157a3f563e1aa4059db5c8ab
|
diff --git a/pyfritzhome/fritzhome.py b/pyfritzhome/fritzhome.py
index <HASH>..<HASH> 100644
--- a/pyfritzhome/fritzhome.py
+++ b/pyfritzhome/fritzhome.py
@@ -95,15 +95,18 @@ class Fritzhome(object):
def login(self):
"""Login and get a valid session ID."""
- (sid, challenge) = self._login_request()
- if sid == '0000000000000000':
- secret = self._create_login_secret(challenge, self._password)
- (sid2, challenge) = self._login_request(username=self._user,
- secret=secret)
- if sid2 == '0000000000000000':
- _LOGGER.warning("login failed %s", sid2)
- raise LoginError(self._user)
- self._sid = sid2
+ try:
+ (sid, challenge) = self._login_request()
+ if sid == '0000000000000000':
+ secret = self._create_login_secret(challenge, self._password)
+ (sid2, challenge) = self._login_request(username=self._user,
+ secret=secret)
+ if sid2 == '0000000000000000':
+ _LOGGER.warning("login failed %s", sid2)
+ raise LoginError(self._user)
+ self._sid = sid2
+ except xml.parsers.expat.ExpatError:
+ raise LoginError(self._user)
def logout(self):
"""Logout."""
|
LoginError when connect to non fritzbox
If connection is tried to an other device other then fitzbox
the returen value cannot be parsed. Throw a LoginError exception.
|
hthiery_python-fritzhome
|
train
|
3558d88cebcb2f0abc98169441482de7252574ec
|
diff --git a/pymola/backends/casadi/api.py b/pymola/backends/casadi/api.py
index <HASH>..<HASH> 100644
--- a/pymola/backends/casadi/api.py
+++ b/pymola/backends/casadi/api.py
@@ -136,7 +136,7 @@ def _save_model(model_folder: str, model_name: str, model: Model):
compiler = distutils.ccompiler.new_compiler()
- file_name = os.path.realpath(os.path.join(model_folder, library_name + '.c'))
+ file_name = os.path.relpath(os.path.join(model_folder, library_name + '.c'))
object_name = compiler.object_filenames([file_name])[0]
d.library = os.path.join(model_folder, library_name + compiler.shared_lib_extension)
try:
|
Fix extraneous compilation directory on Linux
Closes #<I>
|
pymoca_pymoca
|
train
|
70b00557253624ab0d917be146a55ca0aeba376a
|
diff --git a/db_adapter.go b/db_adapter.go
index <HASH>..<HASH> 100644
--- a/db_adapter.go
+++ b/db_adapter.go
@@ -6,9 +6,9 @@ import (
)
type dbAdapter struct {
- driverName string
+ driverName string
dataSourceName string
- db *sql.DB
+ db *sql.DB
}
func newDbAdapter(driverName string, dataSourceName string) *dbAdapter {
@@ -29,7 +29,7 @@ func (a *dbAdapter) open() {
panic(err)
}
- db, err = sql.Open("mysql", a.dataSourceName + "casbin")
+ db, err = sql.Open("mysql", a.dataSourceName+"casbin")
if err != nil {
panic(err)
}
@@ -60,10 +60,10 @@ func (a *dbAdapter) dropTable() {
func (a *dbAdapter) loadPolicy(model Model) {
var (
ptype string
- v1 string
- v2 string
- v3 string
- v4 string
+ v1 string
+ v2 string
+ v3 string
+ v4 string
)
rows, err := a.db.Query("select * from policy")
@@ -105,7 +105,7 @@ func (a *dbAdapter) writeTableLine(ptype string, rule []string) {
for i := range rule {
line += ",'" + rule[i] + "'"
}
- for i := 0; i < 4 - len(rule); i ++ {
+ for i := 0; i < 4-len(rule); i++ {
line += ",''"
}
diff --git a/enforcer.go b/enforcer.go
index <HASH>..<HASH> 100644
--- a/enforcer.go
+++ b/enforcer.go
@@ -7,13 +7,13 @@ import (
// Enforcer is the main interface for authorization enforcement and policy management.
type Enforcer struct {
- modelPath string
- adapter *fileAdapter
+ modelPath string
+ adapter *fileAdapter
- model Model
- fm FunctionMap
+ model Model
+ fm FunctionMap
- enabled bool
+ enabled bool
}
// Initialize an enforcer with a model file and a policy file.
diff --git a/enforcer_test.go b/enforcer_test.go
index <HASH>..<HASH> 100644
--- a/enforcer_test.go
+++ b/enforcer_test.go
@@ -1,10 +1,10 @@
package casbin
import (
+ "github.com/hsluoyz/casbin/util"
"log"
- "testing"
"reflect"
- "github.com/hsluoyz/casbin/util"
+ "testing"
)
func testEnforce(t *testing.T, e *Enforcer, sub string, obj string, act string, res bool) {
@@ -119,7 +119,7 @@ func TestABACModel(t *testing.T) {
}
type testUser struct {
- name string
+ name string
domain string
}
@@ -137,7 +137,7 @@ func (u *testUser) getAttribute(attributeName string) string {
}
type testResource struct {
- name string
+ name string
domain string
}
@@ -235,10 +235,10 @@ func TestGetRoles(t *testing.T) {
func testStringList(t *testing.T, title string, f func() []string, res []string) {
myRes := f()
- log.Print(title + ": ", myRes)
+ log.Print(title+": ", myRes)
if !util.ArrayEquals(res, myRes) {
- t.Error(title + ": ", myRes, ", supposed to be ", res)
+ t.Error(title+": ", myRes, ", supposed to be ", res)
}
}
diff --git a/file_adapter.go b/file_adapter.go
index <HASH>..<HASH> 100644
--- a/file_adapter.go
+++ b/file_adapter.go
@@ -1,12 +1,12 @@
package casbin
import (
- "os"
"bufio"
- "strings"
- "io"
"bytes"
"github.com/hsluoyz/casbin/util"
+ "io"
+ "os"
+ "strings"
)
type fileAdapter struct {
diff --git a/model.go b/model.go
index <HASH>..<HASH> 100644
--- a/model.go
+++ b/model.go
@@ -1,11 +1,11 @@
package casbin
import (
+ "github.com/hsluoyz/casbin/util"
"github.com/lxmgo/config"
"log"
"strconv"
"strings"
- "github.com/hsluoyz/casbin/util"
)
// Model represents the whole access control model.
@@ -64,7 +64,7 @@ func getKeySuffix(i int) string {
func loadSection(model Model, cfg config.ConfigInterface, sec string) {
i := 1
for {
- if !loadAssertion(model, cfg, sec, sec + getKeySuffix(i)) {
+ if !loadAssertion(model, cfg, sec, sec+getKeySuffix(i)) {
break
} else {
i++
diff --git a/policy.go b/policy.go
index <HASH>..<HASH> 100644
--- a/policy.go
+++ b/policy.go
@@ -1,9 +1,9 @@
package casbin
import (
+ "github.com/hsluoyz/casbin/util"
"log"
"strings"
- "github.com/hsluoyz/casbin/util"
)
func buildRoleLinks(model Model) {
@@ -44,7 +44,6 @@ func loadPolicyLine(line string, model Model) {
model[sec][key].policy = append(model[sec][key].policy, tokens[1:])
}
-
func getPolicy(model Model, sec string, ptype string) [][]string {
return model[sec][ptype].policy
}
diff --git a/util/util.go b/util/util.go
index <HASH>..<HASH> 100644
--- a/util/util.go
+++ b/util/util.go
@@ -1,9 +1,9 @@
package util
import (
- "strings"
- "regexp"
"bytes"
+ "regexp"
+ "strings"
)
func EscapeAssertion(s string) string {
|
Improve code with gofmt.
|
casbin_casbin
|
train
|
ff4e7929e1cccbb27f53e68a56a7c538fbceb658
|
diff --git a/driver/src/test/acceptance/org/mongodb/acceptancetest/crud/ReplaceAcceptanceTest.java b/driver/src/test/acceptance/org/mongodb/acceptancetest/crud/ReplaceAcceptanceTest.java
index <HASH>..<HASH> 100644
--- a/driver/src/test/acceptance/org/mongodb/acceptancetest/crud/ReplaceAcceptanceTest.java
+++ b/driver/src/test/acceptance/org/mongodb/acceptancetest/crud/ReplaceAcceptanceTest.java
@@ -23,7 +23,6 @@ import org.mongodb.MongoView;
import org.mongodb.MongoWriteException;
import static org.hamcrest.CoreMatchers.anyOf;
-import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
@@ -93,7 +92,6 @@ public class ReplaceAcceptanceTest extends DatabaseTestCase {
} catch (MongoWriteException e) {
// Then
assertThat("Error code should match one of these error codes", e.getErrorCode(), anyOf(is(13596), is(16837)));
- assertThat(e.getErrorMessage(), containsString("_id field cannot be changed"));
}
}
|
It's true, one shouldn't rely on error message strings to determine the correctness of the error.
|
mongodb_mongo-java-driver
|
train
|
9433cb992903b74e6a18e0d9b96a5da45f426d5a
|
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index <HASH>..<HASH> 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -3506,6 +3506,7 @@ def update(
# Update the kernel boot parameters
data = {k: v for k, v in locals().items() if bool(v)}
+ data["stop_on_reboot"] = stop_on_reboot
if boot_dev:
data["boot_dev"] = boot_dev.split()
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index <HASH>..<HASH> 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -2281,6 +2281,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
<memory unit='KiB'>1048576</memory>
<currentMemory unit='KiB'>1048576</currentMemory>
<vcpu placement='auto'>1</vcpu>
+ <on_reboot>restart</on_reboot>
<os>
<type arch='x86_64' machine='pc-i440fx-2.6'>hvm</type>
<boot dev="hd"/>
@@ -3389,6 +3390,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
<memory unit='KiB'>1048576</memory>
<currentMemory unit='KiB'>1048576</currentMemory>
<vcpu placement='auto'>1</vcpu>
+ <on_reboot>restart</on_reboot>
<os>
<type arch='x86_64' machine='pc-i440fx-2.6'>hvm</type>
</os>
|
Ensure virt.update stop_on_reboot is updated with its default value
While all virt.update properties default values should not be used when
updating the XML definition, the stop_on_reboot default value (False)
needs to be passed still or the user will never be able to update with
this value.
|
saltstack_salt
|
train
|
6b76c0355562890b74553c85e688223d772f1c92
|
diff --git a/core/src/main/java/org/infinispan/marshall/AbstractDelegatingMarshaller.java b/core/src/main/java/org/infinispan/marshall/AbstractDelegatingMarshaller.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/infinispan/marshall/AbstractDelegatingMarshaller.java
+++ b/core/src/main/java/org/infinispan/marshall/AbstractDelegatingMarshaller.java
@@ -29,6 +29,11 @@ public abstract class AbstractDelegatingMarshaller implements StreamingMarshalle
return marshaller.startObjectOutput(os, isReentrant, estimatedSize);
}
+ @Override @Deprecated
+ public ObjectOutput startObjectOutput(OutputStream os, boolean isReentrant) throws IOException {
+ return marshaller.startObjectOutput(os, isReentrant);
+ }
+
@Override
public void finishObjectOutput(ObjectOutput oo) {
marshaller.finishObjectOutput(oo);
diff --git a/core/src/main/java/org/infinispan/marshall/StreamingMarshaller.java b/core/src/main/java/org/infinispan/marshall/StreamingMarshaller.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/infinispan/marshall/StreamingMarshaller.java
+++ b/core/src/main/java/org/infinispan/marshall/StreamingMarshaller.java
@@ -76,6 +76,12 @@ public interface StreamingMarshaller extends Marshaller {
ObjectOutput startObjectOutput(OutputStream os, boolean isReentrant, final int estimatedSize) throws IOException;
/**
+ * Use {@link #startObjectOutput(OutputStream, boolean, int)} instead
+ */
+ @Deprecated
+ ObjectOutput startObjectOutput(OutputStream os, boolean isReentrant) throws IOException;
+
+ /**
* Finish using the given ObjectOutput. After opening a ObjectOutput and calling objectToObjectStream() multiple
* times, use this method to flush the data and close if necessary
*
diff --git a/core/src/main/java/org/infinispan/marshall/VersionAwareMarshaller.java b/core/src/main/java/org/infinispan/marshall/VersionAwareMarshaller.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/infinispan/marshall/VersionAwareMarshaller.java
+++ b/core/src/main/java/org/infinispan/marshall/VersionAwareMarshaller.java
@@ -129,6 +129,11 @@ public class VersionAwareMarshaller extends AbstractMarshaller implements Stream
return out;
}
+ @Deprecated @Override
+ public ObjectOutput startObjectOutput(OutputStream os, boolean isReentrant) throws IOException {
+ return startObjectOutput(os, isReentrant, 512);
+ }
+
@Override
public void finishObjectOutput(ObjectOutput oo) {
defaultMarshaller.finishObjectOutput(oo);
diff --git a/core/src/main/java/org/infinispan/marshall/jboss/AbstractJBossMarshaller.java b/core/src/main/java/org/infinispan/marshall/jboss/AbstractJBossMarshaller.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/infinispan/marshall/jboss/AbstractJBossMarshaller.java
+++ b/core/src/main/java/org/infinispan/marshall/jboss/AbstractJBossMarshaller.java
@@ -86,6 +86,11 @@ public abstract class AbstractJBossMarshaller extends AbstractMarshaller {
return marshaller;
}
+ final public ObjectOutput startObjectOutput(final OutputStream os, final boolean isReentrant) throws IOException {
+ return startObjectOutput(os, isReentrant, 512);
+ }
+
+
protected abstract Marshaller getMarshaller(boolean isReentrant, final int estimatedSize) throws IOException;
final public void finishObjectOutput(final ObjectOutput oo) {
diff --git a/core/src/test/java/org/infinispan/marshall/TestObjectStreamMarshaller.java b/core/src/test/java/org/infinispan/marshall/TestObjectStreamMarshaller.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/org/infinispan/marshall/TestObjectStreamMarshaller.java
+++ b/core/src/test/java/org/infinispan/marshall/TestObjectStreamMarshaller.java
@@ -51,6 +51,11 @@ public class TestObjectStreamMarshaller extends AbstractMarshaller implements St
return new ObjectOutputStream(os);
}
+ @Override @Deprecated
+ public ObjectOutput startObjectOutput(OutputStream os, boolean isReentrant) throws IOException {
+ throw new IllegalStateException("Should not invoke deprecated method anymore");
+ }
+
@Override
public void finishObjectOutput(ObjectOutput oo) {
Util.flushAndCloseOutput(oo);
|
ISPN-<I> Reintroduce previous method version deprecating it
|
infinispan_infinispan
|
train
|
59baa34e793f8ae3ed933769b76c699f5440a2dd
|
diff --git a/lib/srv/regular/sshserver.go b/lib/srv/regular/sshserver.go
index <HASH>..<HASH> 100644
--- a/lib/srv/regular/sshserver.go
+++ b/lib/srv/regular/sshserver.go
@@ -308,16 +308,12 @@ func (s *Server) isAuditedAtProxy() bool {
type ServerOption func(s *Server) error
func (s *Server) close() {
- s.Lock()
- defer s.Unlock()
-
s.cancel()
s.reg.Close()
if s.heartbeat != nil {
if err := s.heartbeat.Close(); err != nil {
s.Warningf("Failed to close heartbeat: %v", err)
}
- s.heartbeat = nil
}
if s.dynamicLabels != nil {
s.dynamicLabels.Close()
@@ -325,7 +321,6 @@ func (s *Server) close() {
if s.users != nil {
s.users.Shutdown()
- s.users = nil
}
}
@@ -368,9 +363,6 @@ func (s *Server) Serve(l net.Listener) error {
}
func (s *Server) startPeriodicOperations() {
- s.Lock()
- defer s.Unlock()
-
// If the server has dynamic labels defined, start a loop that will
// asynchronously keep them updated.
if s.dynamicLabels != nil {
|
Fix possible deadlock during server close (#<I>)
fix server close deadlock
|
gravitational_teleport
|
train
|
ac4da4156c332cbac89ba042b1f566fceb63110a
|
diff --git a/src/GameQ/Buffer.php b/src/GameQ/Buffer.php
index <HASH>..<HASH> 100644
--- a/src/GameQ/Buffer.php
+++ b/src/GameQ/Buffer.php
@@ -352,27 +352,4 @@ class Buffer
return $float['float'];
}
-
- /**
- * Conversion to float
- *
- * @access public
- *
- * @param string $string String to convert
- *
- * @return float 32 bit float
- */
- public function toFloat($string)
- {
-
- // Check length
- if (strlen($string) !== 4) {
- return 0;
- }
-
- // Convert
- $float = unpack('ffloat', $string);
-
- return $float['float'];
- }
}
|
Removed unused toFloat function. Same reasons apply as previous commit.
|
Austinb_GameQ
|
train
|
0ba636911235d28a75d3fd45bf490d3b7a019929
|
diff --git a/tests/integration/misc/usingUnboundModelsByPassingKnex.js b/tests/integration/misc/usingUnboundModelsByPassingKnex.js
index <HASH>..<HASH> 100644
--- a/tests/integration/misc/usingUnboundModelsByPassingKnex.js
+++ b/tests/integration/misc/usingUnboundModelsByPassingKnex.js
@@ -235,6 +235,21 @@ module.exports = session => {
expect(models[0].id).to.equal(3);
});
});
+
+ it('static relatedQuery', () => {
+ const query = Model1.query()
+ .findById(1)
+ .select(
+ 'Model1.*',
+ Model1.relatedQuery('model1Relation2')
+ .count()
+ .as('count')
+ );
+
+ return query.knex(session.knex).then(model => {
+ expect(model.count).to.eql(2);
+ });
+ });
});
describe('$relatedQuery', () => {
|
add test for static relatedQuery with unbound model
|
Vincit_objection.js
|
train
|
304e9323fbd9e0b53525c9ecae460978242865ca
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -4,6 +4,7 @@ require 'database_cleaner'
require 'support/factory_girl'
Mongoid.load!('./spec/config/mongoid.yml', :test)
+#Mongo::Logger.logger.level = ::Logger::DEBUG
RSpec.configure do |config|
|
Set (commented out) mongo querying logger level to default
|
neiljohari_scram
|
train
|
9aa58bc1ce3f6dffff0a687a33ab3a6079cb3290
|
diff --git a/code/libraries/joomlatools/component/koowa/template/filter/document.php b/code/libraries/joomlatools/component/koowa/template/filter/document.php
index <HASH>..<HASH> 100644
--- a/code/libraries/joomlatools/component/koowa/template/filter/document.php
+++ b/code/libraries/joomlatools/component/koowa/template/filter/document.php
@@ -108,11 +108,10 @@ class ComKoowaTemplateFilterDocument extends KTemplateFilterAbstract
}
foreach ($head['custom'] as $custom) {
- echo $custom."\n";
+ // Inject custom head scripts right before </head>
+ $text = str_replace('</head>', $custom."\n</head>", $text);
}
-
-
$head = ob_get_clean();
$text = $head.$text;
|
#<I>: Inject custom head scripts right before </head>
|
joomlatools_joomlatools-framework
|
train
|
352c4dd9c06e562eb7fcf5bc2f04068d0cc3b39c
|
diff --git a/lib/seraph.js b/lib/seraph.js
index <HASH>..<HASH> 100644
--- a/lib/seraph.js
+++ b/lib/seraph.js
@@ -248,8 +248,6 @@ Seraph.prototype._parseQueryResult = function(result) {
namedResults = namedResults.map(function(namedResult) {
return namedResult[result.columns[0]];
});
- } else if (namedResults.length == 1 && !resultsAreObjects) {
- namedResults = namedResults[0];
}
}
|
got rid of automatic array unboxing
|
brikteknologier_seraph
|
train
|
21650eb12762e1df1eb9af61b301b733501244d4
|
diff --git a/vault/datadog_checks/vault/vault.py b/vault/datadog_checks/vault/vault.py
index <HASH>..<HASH> 100644
--- a/vault/datadog_checks/vault/vault.py
+++ b/vault/datadog_checks/vault/vault.py
@@ -5,6 +5,7 @@ import warnings
from time import time as timestamp
import requests
+from simplejson import JSONDecodeError
from six import string_types
from urllib3.exceptions import InsecureRequestWarning
@@ -43,13 +44,13 @@ class Vault(AgentCheck):
api['check_leader'](config, tags)
api['check_health'](config, tags)
except ApiUnreachable:
- return
+ raise
self.service_check(self.SERVICE_CHECK_CONNECT, AgentCheck.OK, tags=tags)
def check_leader_v1(self, config, tags):
url = config['api_url'] + '/sys/leader'
- leader_data = self.access_api(url, config, tags).json()
+ leader_data = self.access_api(url, config, tags)
is_leader = is_affirmative(leader_data.get('is_self'))
tags.append('is_leader:{}'.format('true' if is_leader else 'false'))
@@ -76,7 +77,7 @@ class Vault(AgentCheck):
def check_health_v1(self, config, tags):
url = config['api_url'] + '/sys/health'
- health_data = self.access_api(url, config, tags).json()
+ health_data = self.access_api(url, config, tags)
cluster_name = health_data.get('cluster_name')
if cluster_name:
@@ -171,6 +172,18 @@ class Vault(AgentCheck):
timeout=config['timeout'],
headers=config['headers'],
)
+ response.raise_for_status()
+ json_data = response.json()
+ except requests.exceptions.HTTPError:
+ msg = 'The Vault endpoint `{}` returned {}.'.format(url, response.status_code)
+ self.service_check(self.SERVICE_CHECK_CONNECT, AgentCheck.CRITICAL, message=msg, tags=tags)
+ self.log.exception(msg)
+ raise ApiUnreachable
+ except JSONDecodeError:
+ msg = 'The Vault endpoint `{}` returned invalid json data.'.format(url)
+ self.service_check(self.SERVICE_CHECK_CONNECT, AgentCheck.CRITICAL, message=msg, tags=tags)
+ self.log.exception(msg)
+ raise ApiUnreachable
except requests.exceptions.Timeout:
msg = 'Vault endpoint `{}` timed out after {} seconds'.format(url, config['timeout'])
self.service_check(self.SERVICE_CHECK_CONNECT, AgentCheck.CRITICAL, message=msg, tags=tags)
@@ -182,4 +195,4 @@ class Vault(AgentCheck):
self.log.exception(msg)
raise ApiUnreachable
- return response
+ return json_data
diff --git a/vault/tests/common.py b/vault/tests/common.py
index <HASH>..<HASH> 100644
--- a/vault/tests/common.py
+++ b/vault/tests/common.py
@@ -1,6 +1,8 @@
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
+import requests
+
from datadog_checks.dev import get_docker_hostname
HOST = get_docker_hostname()
@@ -19,9 +21,13 @@ INSTANCES = {
class MockResponse:
- def __init__(self, j):
+ def __init__(self, j, status_code=200):
self.j = j
- self.status_code = 200
+ self.status_code = status_code
def json(self):
return self.j
+
+ def raise_for_status(self):
+ if self.status_code >= 300:
+ raise requests.exceptions.HTTPError
diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py
index <HASH>..<HASH> 100644
--- a/vault/tests/test_vault.py
+++ b/vault/tests/test_vault.py
@@ -2,9 +2,11 @@
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import mock
+import pytest
import requests
from datadog_checks.vault import Vault
+from datadog_checks.vault.errors import ApiUnreachable
from .common import INSTANCES, MockResponse
@@ -79,7 +81,20 @@ class TestVault:
def test_service_check_connect_fail(self, aggregator):
instance = INSTANCES['bad_url']
c = Vault(Vault.CHECK_NAME, None, {}, [instance])
- c.check(instance)
+ with pytest.raises(ApiUnreachable):
+ c.check(instance)
+
+ aggregator.assert_service_check(
+ Vault.SERVICE_CHECK_CONNECT, status=Vault.CRITICAL, tags=['instance:foobar'], count=1
+ )
+
+ def test_service_check_500_fail(self, aggregator):
+ instance = INSTANCES['main']
+ c = Vault(Vault.CHECK_NAME, None, {}, [instance])
+
+ with pytest.raises(ApiUnreachable):
+ with mock.patch('requests.get', return_value=MockResponse('', status_code=500)):
+ c.check(instance)
aggregator.assert_service_check(
Vault.SERVICE_CHECK_CONNECT, status=Vault.CRITICAL, tags=['instance:foobar'], count=1
|
Submit critical service check with <I> server errors (#<I>)
|
DataDog_integrations-core
|
train
|
81818f36fc3b37a550f1aedfb218a5cb729d9ddd
|
diff --git a/plugins/providers/docker/action.rb b/plugins/providers/docker/action.rb
index <HASH>..<HASH> 100644
--- a/plugins/providers/docker/action.rb
+++ b/plugins/providers/docker/action.rb
@@ -53,9 +53,16 @@ module VagrantPlugins
next
end
- b2.use Call, GracefulHalt, :stopped, :running do |env2, b3|
+ b2.use Call, HasSSH do |env2, b3|
if !env2[:result]
b3.use Stop
+ next
+ end
+
+ b3.use Call, GracefulHalt, :stopped, :running do |env3, b4|
+ if !env3[:result]
+ b4.use Stop
+ end
end
end
end
|
providers/docker: graceful halt only if SSH is available
|
hashicorp_vagrant
|
train
|
f950373b5067d747ad949e45abeb711ccda06b88
|
diff --git a/vyper/parser/context.py b/vyper/parser/context.py
index <HASH>..<HASH> 100644
--- a/vyper/parser/context.py
+++ b/vyper/parser/context.py
@@ -1,6 +1,7 @@
import contextlib
import enum
import itertools
+from typing import Tuple
from vyper.ast import VyperNode
from vyper.exceptions import CompilerPanic
@@ -177,6 +178,31 @@ class Context:
var_pos = self.memory_allocator.allocate_memory(var_size)
return self._new_variable(name, typ, var_pos)
+ def new_sequential_vars(self, *types: NodeType) -> Tuple[int, ...]:
+ """
+ Allocate memory for multiple internal variables, ensuring they are positioned sequentially.
+
+ Arguments
+ ---------
+ types : NodeType
+ Variable types, used to determine the size of memory allocation
+
+ Returns
+ -------
+ Tuple[int,...]
+ Tuple of memory offsets for the variables
+ """
+ total_size = sum(get_size_of_type(i) for i in types) * 32
+ placeholders: Tuple[int, ...] = ()
+ var_pos = self.memory_allocator.allocate_memory(total_size)
+ for typ in types:
+ name = f"#internal_{next(self.internal_variable_count)}"
+ self._new_variable(name, typ, var_pos)
+ placeholders += (var_pos,)
+ var_pos += 32 * get_size_of_type(typ)
+
+ return placeholders
+
def parse_type(self, ast_node, location):
return self.global_ctx.parse_type(ast_node, location)
|
feat: new_sequential_vars
|
ethereum_vyper
|
train
|
c9c2d6697119fcbe9b0f4e8285852f62a4dbccda
|
diff --git a/src/stores/GuildChannelStore.js b/src/stores/GuildChannelStore.js
index <HASH>..<HASH> 100644
--- a/src/stores/GuildChannelStore.js
+++ b/src/stores/GuildChannelStore.js
@@ -34,6 +34,7 @@ class GuildChannelStore extends DataStore {
* @param {string} name The name of the new channel
* @param {Object} [options] Options
* @param {string} [options.type='text'] The type of the new channel, either `text`, `voice`, or `category`
+ * @param {string} [options.topic] The topic for the new channel
* @param {boolean} [options.nsfw] Whether the new channel is nsfw
* @param {number} [options.bitrate] Bitrate of the new channel in bits (only voice)
* @param {number} [options.userLimit] Maximum amount of users allowed in the new channel (only voice)
@@ -58,11 +59,12 @@ class GuildChannelStore extends DataStore {
* ],
* })
*/
- create(name, { type, nsfw, bitrate, userLimit, parent, overwrites, reason } = {}) {
+ create(name, { type, topic, nsfw, bitrate, userLimit, parent, overwrites, reason } = {}) {
if (parent) parent = this.client.channels.resolveID(parent);
return this.client.api.guilds(this.guild.id).channels.post({
data: {
name,
+ topic,
type: type ? ChannelTypes[type.toUpperCase()] : 'text',
nsfw,
bitrate,
|
feat(GuildChannelStore): add topic to create options (#<I>)
The API provides the ability to send the topic, or description, of the channel within the creation post. Discord.js will now send it if its provided.
|
discordjs_discord.js
|
train
|
10284c2ab78e87e3c406b4d8f568d2b94dc30389
|
diff --git a/lib/service_double/version.rb b/lib/service_double/version.rb
index <HASH>..<HASH> 100644
--- a/lib/service_double/version.rb
+++ b/lib/service_double/version.rb
@@ -1,3 +1,3 @@
module ServiceDouble
- VERSION = "0.0.2"
+ VERSION = "0.0.3"
end
|
Bump to version <I>
|
yourkarma_service_double
|
train
|
02cfcfe3d40f2759d51dd98ffddbfd2cec04c99e
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,6 +10,7 @@
* [#5234](https://github.com/bbatsov/rubocop/issues/5234): Fix a false positive for `Rails/HasManyOrHasOneDependent` when using `class_name` option. ([@koic][])
* [#5273](https://github.com/bbatsov/rubocop/issues/5273): Fix `Style/EvalWithLocation` reporting bad line offset. ([@pocke][])
* [#5228](https://github.com/bbatsov/rubocop/issues/5228): Handle overridden `Metrics/LineLength:Max` for `--auto-gen-config`. ([@jonas054][])
+* [#5261](https://github.com/bbatsov/rubocop/issues/5261): Fix a false positive for `Style/MixinUsage` when using inside class or module. ([@koic][])
### Changes
diff --git a/lib/rubocop/cop/style/mixin_usage.rb b/lib/rubocop/cop/style/mixin_usage.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/cop/style/mixin_usage.rb
+++ b/lib/rubocop/cop/style/mixin_usage.rb
@@ -53,8 +53,9 @@ module RuboCop
def on_send(node)
include_statement(node) do |statement|
- return if node.argument?
- return if accepted_include?(node)
+ return if node.argument? ||
+ accepted_include?(node) ||
+ belongs_to_class_or_module?(node)
add_offense(node, message: format(MSG, statement: statement))
end
@@ -65,6 +66,16 @@ module RuboCop
def accepted_include?(node)
node.parent && node.macro?
end
+
+ def belongs_to_class_or_module?(node)
+ if !node.parent
+ false
+ else
+ return true if node.parent.class_type? || node.parent.module_type?
+
+ belongs_to_class_or_module?(node.parent)
+ end
+ end
end
end
end
diff --git a/spec/rubocop/cop/style/mixin_usage_spec.rb b/spec/rubocop/cop/style/mixin_usage_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rubocop/cop/style/mixin_usage_spec.rb
+++ b/spec/rubocop/cop/style/mixin_usage_spec.rb
@@ -29,6 +29,16 @@ describe RuboCop::Cop::Style::MixinUsage do
RUBY
end
+ it 'registers an offense when using `include` in method definition ' \
+ 'outside class or module' do
+ expect_offense(<<-RUBY.strip_indent)
+ def foo
+ include M
+ ^^^^^^^^^ `include` is used at the top level. Use inside `class` or `module`.
+ end
+ RUBY
+ end
+
it 'does not register an offense when using outside class' do
expect_no_offenses(<<-RUBY.strip_indent)
Foo.include M
@@ -58,6 +68,28 @@ describe RuboCop::Cop::Style::MixinUsage do
RUBY
end
+ it 'does not register an offense when using `include` in method ' \
+ 'definition inside class' do
+ expect_no_offenses(<<-RUBY.strip_indent)
+ class X
+ def foo
+ include M
+ end
+ end
+ RUBY
+ end
+
+ it 'does not register an offense when using `include` in method ' \
+ 'definition inside module' do
+ expect_no_offenses(<<-RUBY.strip_indent)
+ module X
+ def foo
+ include M
+ end
+ end
+ RUBY
+ end
+
context 'Multiple definition classes in one' do
it 'does not register an offense when using inside class' do
expect_no_offenses(<<-RUBY.strip_indent)
|
[Fix #<I>] Fix false positive for `Style/MixinUsage` when using inside module
Fixes #<I>.
Correspondence to use case for `include` belonging to module was
insufficient. This PR added a reproduction test and fixed it.
|
rubocop-hq_rubocop
|
train
|
25cb2c4638f904eaeac41d060eae54fbee7b73dd
|
diff --git a/spec/unit/network/http/connection_spec.rb b/spec/unit/network/http/connection_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/unit/network/http/connection_spec.rb
+++ b/spec/unit/network/http/connection_spec.rb
@@ -76,7 +76,46 @@ describe Puppet::Network::HTTP::Connection do
end
end
- context "when validating HTTPS requests" do
+ class ConstantErrorValidator
+ def initialize(args)
+ @fails_with = args[:fails_with]
+ @error_string = args[:error_string] || ""
+ @peer_certs = args[:peer_certs] || []
+ end
+
+ def setup_connection(connection)
+ connection.stubs(:request).with do
+ true
+ end.raises(OpenSSL::SSL::SSLError.new(@fails_with))
+ end
+
+ def peer_certs
+ @peer_certs
+ end
+
+ def verify_errors
+ [@error_string]
+ end
+ end
+
+ class NoProblemsValidator
+ def initialize(cert)
+ @cert = cert
+ end
+
+ def setup_connection(connection)
+ end
+
+ def peer_certs
+ [@cert]
+ end
+
+ def verify_errors
+ []
+ end
+ end
+
+ shared_examples_for 'ssl verifier' do
include PuppetSpec::Files
let (:host) { "my_server" }
@@ -97,11 +136,11 @@ describe Puppet::Network::HTTP::Connection do
Puppet[:confdir] = tmpdir('conf')
connection = Puppet::Network::HTTP::Connection.new(
- host, port,
- :verify => ConstantErrorValidator.new(
- :fails_with => 'hostname was not match with server certificate',
- :peer_certs => [Puppet::SSL::CertificateAuthority.new.generate(
- 'not_my_server', :dns_alt_names => 'foo,bar,baz')]))
+ host, port,
+ :verify => ConstantErrorValidator.new(
+ :fails_with => 'hostname was not match with server certificate',
+ :peer_certs => [Puppet::SSL::CertificateAuthority.new.generate(
+ 'not_my_server', :dns_alt_names => 'foo,bar,baz')]))
expect do
connection.get('request')
@@ -136,44 +175,10 @@ describe Puppet::Network::HTTP::Connection do
connection.get('request')
end
+ end
- class ConstantErrorValidator
- def initialize(args)
- @fails_with = args[:fails_with]
- @error_string = args[:error_string] || ""
- @peer_certs = args[:peer_certs] || []
- end
-
- def setup_connection(connection)
- connection.stubs(:request).with do
- true
- end.raises(OpenSSL::SSL::SSLError.new(@fails_with))
- end
-
- def peer_certs
- @peer_certs
- end
-
- def verify_errors
- [@error_string]
- end
- end
-
- class NoProblemsValidator
- def initialize(cert)
- @cert = cert
- end
-
- def setup_connection(connection)
- end
-
- def peer_certs
- [@cert]
- end
-
- def verify_errors
- []
- end
+ context "when using single use HTTPS connections" do
+ it_behaves_like 'ssl verifier' do
end
end
|
(maint) Refactor HTTPS verification tests into a shared examples
This commit refactors the spec tests that ensure puppet displays
meaningful error messages when SSL verification fails, e.g. when the
server certificate doesn't match the hostname we connected to.
|
puppetlabs_puppet
|
train
|
492d4c31343c30440ff289f0538b75e27db03b96
|
diff --git a/alerta/utils/config.py b/alerta/utils/config.py
index <HASH>..<HASH> 100644
--- a/alerta/utils/config.py
+++ b/alerta/utils/config.py
@@ -156,7 +156,8 @@ class Config(object):
handler = RotatingFileHandler(
filename=app.config['LOG_FILE'],
maxBytes=app.config['LOG_MAX_BYTES'],
- backupCount=app.config['LOG_BACKUP_COUNT']
+ backupCount=app.config['LOG_BACKUP_COUNT'],
+ encoding='utf-8'
)
handler.setLevel(log_level)
handler.setFormatter(logging.Formatter(app.config['LOG_FORMAT']))
|
Explicitly set logfile encoding to utf-8 (#<I>)
|
alerta_alerta
|
train
|
53b7b05d0d45aade047f28105af9cbb55b3284b1
|
diff --git a/shoebot/ide/ide.py b/shoebot/ide/ide.py
index <HASH>..<HASH> 100755
--- a/shoebot/ide/ide.py
+++ b/shoebot/ide/ide.py
@@ -623,7 +623,7 @@ class ShoebotEditorWindow(Gtk.Window):
)
if chooser.run() == Gtk.ResponseType.ACCEPT:
- self.open_file(chooser.filename)
+ self.open_file(chooser.get_filename())
chooser.destroy()
def save_or_save_as(self):
|
Fix bug triggered by opening a file in the IDE.
|
shoebot_shoebot
|
train
|
96e4a8500c9bd741f2873729f3183c55fdb19c2b
|
diff --git a/coursera/credentials.py b/coursera/credentials.py
index <HASH>..<HASH> 100644
--- a/coursera/credentials.py
+++ b/coursera/credentials.py
@@ -27,7 +27,7 @@ def _getenv_or_empty(s):
return os.getenv(s) or ""
-def get_config_paths(config_name): # pragma: no test
+def get_config_paths(config_name): # pragma: no test
"""
Returns a list of config files paths to try in order, given config file
name and possibly a user-specified path.
diff --git a/coursera/define.py b/coursera/define.py
index <HASH>..<HASH> 100644
--- a/coursera/define.py
+++ b/coursera/define.py
@@ -17,7 +17,7 @@ AUTH_REDIRECT_URL = 'https://class.coursera.org/{class_name}' \
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
# define a per-user cache folder
-if os.name == "posix": # pragma: no cover
+if os.name == "posix": # pragma: no cover
import pwd
user = pwd.getpwuid(os.getuid())[0]
else:
diff --git a/coursera/utils.py b/coursera/utils.py
index <HASH>..<HASH> 100644
--- a/coursera/utils.py
+++ b/coursera/utils.py
@@ -12,7 +12,7 @@ import string
import six
# six.moves doesn’t support urlparse
-if six.PY3: # pragma: no cover
+if six.PY3: # pragma: no cover
from urllib.parse import urlparse
else:
from urlparse import urlparse
|
cosmetics: PEP-8 conventions.
|
coursera-dl_coursera-dl
|
train
|
4ba760a14ea20cb2551fdbc68ed3f93facf7c8e4
|
diff --git a/template/app/view/DocTree.js b/template/app/view/DocTree.js
index <HASH>..<HASH> 100644
--- a/template/app/view/DocTree.js
+++ b/template/app/view/DocTree.js
@@ -23,11 +23,8 @@ Ext.define('Docs.view.DocTree', {
"urlclick"
);
- // Expand the main tree
+ // Expand root
this.root.expanded = true;
- if (this.root.children[0]) {
- this.root.children[0].expanded = true;
- }
this.on("itemclick", this.onItemClick, this);
this.on("beforeitemcollapse", this.handleBeforeExpandCollapse, this);
diff --git a/template/app/view/cls/Tree.js b/template/app/view/cls/Tree.js
index <HASH>..<HASH> 100644
--- a/template/app/view/cls/Tree.js
+++ b/template/app/view/cls/Tree.js
@@ -37,15 +37,26 @@ Ext.define('Docs.view.cls.Tree', {
var selected = this.getSelectionModel().getLastSelected();
// create new treestructure
var root = tree.create();
+ this.expandLonelyNode(root);
this.setRootNode(root);
this.initNodeLinks();
- // expand first child
- this.getRootNode().getChildAt(0).expand();
+
// re-establish the previous selection
selected && this.selectUrl(selected.raw.url);
}
else {
this.root = tree.create();
+ this.expandLonelyNode(this.root);
+ }
+ },
+
+ // When only one expandable node at root level, expand it
+ expandLonelyNode: function(root) {
+ var expandableNodes = Ext.Array.filter(root.children, function(node) {
+ return node.children.length > 0;
+ });
+ if (expandableNodes.length == 1) {
+ expandableNodes[0].expanded = true;
}
}
});
|
New logic for expaning Ext namespace in class tree.
Now the expansion happens only when there's just one expandable
item at root level. Basically this means that when we have two
namespaces like Ext and Blah, then neither gets expanded by default,
but if there's just Blah, it will be expanded.
|
senchalabs_jsduck
|
train
|
ef6a4bc3b26c0fee0158ff53724677cdf2057967
|
diff --git a/classes/PodsUI.php b/classes/PodsUI.php
index <HASH>..<HASH> 100644
--- a/classes/PodsUI.php
+++ b/classes/PodsUI.php
@@ -2204,8 +2204,11 @@ class PodsUI {
$this->data = $data;
- if ( !empty( $this->data ) )
- $this->data_keys = array_keys( $this->data );
+ if ( !empty( $this->data ) ) {
+ $first_row = current( $this->data );
+
+ $this->data_keys = array_keys( $first_row );
+ }
$this->total = $this->pod->total();
$this->total_found = $this->pod->total_found();
@@ -2302,8 +2305,10 @@ class PodsUI {
$this->row = false;
if ( !empty( $this->data ) ) {
- if ( empty( $this->data_keys ) || count( $this->data ) != count( $this->data_keys ) ) {
- $this->data_keys = array_keys( $this->data );
+ $first_row = current( $this->data );
+
+ if ( empty( $this->data_keys ) || count( $first_row ) != count( $this->data_keys ) ) {
+ $this->data_keys = array_keys( $first_row );
}
if ( count( $this->data ) == $this->total && isset( $this->data_keys[ $counter ] ) && isset( $this->data[ $this->data_keys[ $counter ] ] ) ) {
|
Fix for referencing first row instead of assuming $data[ 0 ] exists
|
pods-framework_pods
|
train
|
a546844147a3261fe7f4f779602713160ae76cee
|
diff --git a/cherrypy/test/test.py b/cherrypy/test/test.py
index <HASH>..<HASH> 100644
--- a/cherrypy/test/test.py
+++ b/cherrypy/test/test.py
@@ -370,6 +370,7 @@ def run():
'test_tools',
'test_encoding',
'test_etags',
+ 'test_http',
'test_httpauth',
'test_httplib',
'test_objectmapping',
diff --git a/cherrypy/test/test_http.py b/cherrypy/test/test_http.py
index <HASH>..<HASH> 100644
--- a/cherrypy/test/test_http.py
+++ b/cherrypy/test/test_http.py
@@ -1,9 +1,4 @@
-"""Tests for managing HTTP issues (malformed requests, etc).
-
-Some of these tests check timeouts, etcetera, and therefore take a long
-time to run. Therefore, this module should probably not be included in
-the 'comprehensive' test suite (test.py).
-"""
+"""Tests for managing HTTP issues (malformed requests, etc)."""
from cherrypy.test import test
test.prefer_parent_path()
|
Moved test_http.py back into the test suite since it runs fast here.
|
cherrypy_cheroot
|
train
|
7d5ab5790ae08de4e2dda83be456a2fd297515f0
|
diff --git a/lib/ronin/ui/cli/commands/wordlist.rb b/lib/ronin/ui/cli/commands/wordlist.rb
index <HASH>..<HASH> 100644
--- a/lib/ronin/ui/cli/commands/wordlist.rb
+++ b/lib/ronin/ui/cli/commands/wordlist.rb
@@ -43,7 +43,7 @@ module Ronin
#
# ## Arguments
#
- # TEMPLATE Options word template
+ # TEMPLATE Options word template (alpha:7 numeric:1-3)
#
# @since 1.4.0
#
@@ -68,7 +68,7 @@ module Ronin
:descriptions => 'Mutations rules'
argument :template, :type => Array,
- :description => 'Options word template'
+ :description => 'Options word template (alpha:7 numeric:1-3)'
#
# Executes the wordlist command.
|
Added an example for TEMPLATE in the wordlist command.
|
ronin-ruby_ronin
|
train
|
0a414cfea17a9b8429638e4f0a194a8374cfecf3
|
diff --git a/breakerbox-service/src/main/resources/assets/js/breakerbox.js b/breakerbox-service/src/main/resources/assets/js/breakerbox.js
index <HASH>..<HASH> 100644
--- a/breakerbox-service/src/main/resources/assets/js/breakerbox.js
+++ b/breakerbox-service/src/main/resources/assets/js/breakerbox.js
@@ -111,7 +111,7 @@ Breakerbox.SyncState.prototype.inSync = function() {
type: 'GET',
dataType: 'json',
url: "/sync/" + this.serviceId + '/' + this.dependencyId,
- timeout: 2000,
+ timeout: 30000,
success: function(data) {
$('#' + self.domId)[0].innerHTML = self.createDom(data);
self.hideSpinner();
|
2s is pretty small again for a sync state request, this again should scale off the number of hosts, but for now, fixed is fine.
|
yammer_breakerbox
|
train
|
58f11a5a7883a58edadee00a9a780b27ff6e3ab3
|
diff --git a/lib/jsduck/lexer.rb b/lib/jsduck/lexer.rb
index <HASH>..<HASH> 100644
--- a/lib/jsduck/lexer.rb
+++ b/lib/jsduck/lexer.rb
@@ -53,6 +53,9 @@ module JsDuck
#
# {:type => :ident, :value => "foo"}
#
+ # For doc-comments the full token also contains the field :linenr,
+ # pointing to the line where the doc-comment began.
+ #
def next(full=false)
tok = @tokens.shift
full ? tok : tok[:value]
@@ -82,6 +85,8 @@ module JsDuck
elsif @input.check(/\/\*\*/) then
@tokens << {
:type => :doc_comment,
+ # Calculate current line number, starting with 1
+ :linenr => @input.string[0...@input.pos].count("\n") + 1,
:value => @input.scan_until(/\*\/|\Z/)
}
elsif @input.check(/"/) then
diff --git a/spec/lexer_spec.rb b/spec/lexer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lexer_spec.rb
+++ b/spec/lexer_spec.rb
@@ -8,6 +8,9 @@ describe JsDuck::Lexer do
while !lex.empty?
t = lex.next(true)
tokens << [t[:type], t[:value]]
+ if t[:linenr]
+ tokens.last << t[:linenr]
+ end
end
tokens
end
@@ -96,8 +99,19 @@ describe JsDuck::Lexer do
lex("a /* foo */ b").should == [[:ident, "a"], [:ident, "b"]]
end
- it "identifies doc-comments" do
- lex("/** foo */").should == [[:doc_comment, "/** foo */"]]
+ it "identifies doc-comments together with line numbers" do
+ lex("/** foo */").should == [[:doc_comment, "/** foo */", 1]]
+ end
+
+ it "counts line numbers correctly" do
+ tokens = lex(<<-EOS)
+ foo = {
+ bar: foo,
+ /**
+ * My comment.
+ */
+ EOS
+ tokens.last.last.should == 3
end
describe "handles unfinished" do
@@ -111,7 +125,7 @@ describe JsDuck::Lexer do
end
it "doc-comment" do
- lex("/** ").should == [[:doc_comment, "/** "]]
+ lex("/** ").should == [[:doc_comment, "/** ", 1]]
end
end
|
Added line-number support to Lexer.
Currently we only need line-number information for doc-comment tokens.
Line-number detection was inspired by ruby_parser library:
<URL>
|
senchalabs_jsduck
|
train
|
65c0491af03cbdbd3ad76f494ac185ca1475dc0d
|
diff --git a/dynamic_rest/filters.py b/dynamic_rest/filters.py
index <HASH>..<HASH> 100644
--- a/dynamic_rest/filters.py
+++ b/dynamic_rest/filters.py
@@ -2,7 +2,7 @@
from django.core.exceptions import ValidationError as InternalValidationError
from django.core.exceptions import ImproperlyConfigured
-from django.db.models import Q, Prefetch
+from django.db.models import Q, Prefetch, Manager
from django.utils import six
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
@@ -567,6 +567,8 @@ class DynamicFilterBackend(BaseFilterBackend):
prefetch = prefetches.values()
if prefetch:
queryset = queryset.prefetch_related(*prefetch)
+ elif isinstance(queryset, Manager):
+ queryset = queryset.all()
if has_joins(queryset) or not is_root_level:
queryset = queryset.distinct()
|
make sure queryset isn't a Manager
|
AltSchool_dynamic-rest
|
train
|
41838c8e8632ba78791996fbc697080b2764bb6a
|
diff --git a/src/platforms/web/compiler/modules/model.js b/src/platforms/web/compiler/modules/model.js
index <HASH>..<HASH> 100644
--- a/src/platforms/web/compiler/modules/model.js
+++ b/src/platforms/web/compiler/modules/model.js
@@ -26,8 +26,19 @@ import {
function preTransformNode (el: ASTElement, options: CompilerOptions) {
if (el.tag === 'input') {
const map = el.attrsMap
- if (map['v-model'] && (map['v-bind:type'] || map[':type'])) {
- const typeBinding: any = getBindingAttr(el, 'type')
+ if (!map['v-model']) {
+ return
+ }
+
+ let typeBinding
+ if (map[':type'] || map['v-bind:type']) {
+ typeBinding = getBindingAttr(el, 'type')
+ }
+ if (!typeBinding && map['v-bind']) {
+ typeBinding = `(${map['v-bind']}).type`
+ }
+
+ if (typeBinding) {
const ifCondition = getAndRemoveAttr(el, 'v-if', true)
const ifConditionExtra = ifCondition ? `&&(${ifCondition})` : ``
const hasElse = getAndRemoveAttr(el, 'v-else', true) != null
diff --git a/test/unit/features/directives/model-dynamic.spec.js b/test/unit/features/directives/model-dynamic.spec.js
index <HASH>..<HASH> 100644
--- a/test/unit/features/directives/model-dynamic.spec.js
+++ b/test/unit/features/directives/model-dynamic.spec.js
@@ -127,6 +127,56 @@ describe('Directive v-model dynamic input type', () => {
expect(vm.data.text).toBe('foo')
}).then(done)
})
+
+ it('with v-bind', done => {
+ const vm = new Vue({
+ data: {
+ data: {
+ text: 'foo',
+ checkbox: true
+ },
+ inputs: [{ id: 'one', type: 'text' }, { id: 'two', type: 'checkbox' }]
+ },
+ template: `<div>
+ <input v-for="i in inputs" v-bind="i" v-model="data[i.type]">
+ </div>`
+ }).$mount()
+ document.body.appendChild(vm.$el)
+
+ let el1 = vm.$el.children[0]
+ expect(el1.id).toBe('one')
+ expect(el1.type).toBe('text')
+ expect(el1.value).toBe('foo')
+ el1.value = 'bar'
+ triggerEvent(el1, 'input')
+ expect(vm.data.text).toBe('bar')
+
+ let el2 = vm.$el.children[1]
+ expect(el2.id).toBe('two')
+ expect(el2.type).toBe('checkbox')
+ expect(el2.checked).toBe(true)
+ el2.click()
+ expect(vm.data.checkbox).toBe(false)
+
+ // now in reverse!
+ vm.inputs.reverse()
+ waitForUpdate(() => {
+ el1 = vm.$el.children[0]
+ expect(el1.id).toBe('two')
+ expect(el1.type).toBe('checkbox')
+ expect(el1.checked).toBe(false)
+ el1.click()
+ expect(vm.data.checkbox).toBe(true)
+
+ el2 = vm.$el.children[1]
+ expect(el2.id).toBe('one')
+ expect(el2.type).toBe('text')
+ expect(el2.value).toBe('bar')
+ el2.value = 'foo'
+ triggerEvent(el2, 'input')
+ expect(vm.data.text).toBe('foo')
+ }).then(done)
+ })
})
function assertInputWorks (vm, type, chain) {
|
feat: support v-model dynamic type binding for v-bind="object"
close #<I>
|
kaola-fed_megalo
|
train
|
bc739173c0540f58de98da08e8e5ab9b5cd0ffcd
|
diff --git a/api/models/request.go b/api/models/request.go
index <HASH>..<HASH> 100644
--- a/api/models/request.go
+++ b/api/models/request.go
@@ -52,6 +52,25 @@ type Req struct {
// PNGroup is an identifier for a pre-normalization group: data that can be pre-normalized together
type PNGroup uint64
+// NewReq creates a new request. It sets all properties minus the ones that need request planning
+func NewReq(key schema.MKey, target, patt string, from, to, maxPoints, rawInterval uint32, pngroup PNGroup, cons, consReq consolidation.Consolidator, node cluster.Node, schemaId, aggId uint16) Req {
+ return Req{
+ MKey: key,
+ Target: target,
+ Pattern: patt,
+ From: from,
+ To: to,
+ MaxPoints: maxPoints,
+ PNGroup: pngroup,
+ RawInterval: rawInterval,
+ Consolidator: cons,
+ ConsReq: consReq,
+ Node: node,
+ SchemaId: schemaId,
+ AggId: aggId,
+ }
+}
+
// Init initializes a request based on the metadata that we know of.
// It sets all properties minus the ones that need request planning
func (r *Req) Init(archive idx.Archive, cons consolidation.Consolidator, node cluster.Node) {
|
Newreq for dataprocessor and
|
grafana_metrictank
|
train
|
9b6618b9bc1a16847868cade51098244437bf617
|
diff --git a/mod/assignment/backuplib.php b/mod/assignment/backuplib.php
index <HASH>..<HASH> 100644
--- a/mod/assignment/backuplib.php
+++ b/mod/assignment/backuplib.php
@@ -45,6 +45,7 @@
fwrite ($bf,full_tag("ASSIGNMENTTYPE",4,false,$assignment->assignmenttype));
fwrite ($bf,full_tag("MAXBYTES",4,false,$assignment->maxbytes));
fwrite ($bf,full_tag("TIMEDUE",4,false,$assignment->timedue));
+ fwrite ($bf,full_tag("TIMEAVAILABLE",4,false,$assignment->timeavailable));
fwrite ($bf,full_tag("GRADE",4,false,$assignment->grade));
fwrite ($bf,full_tag("TIMEMODIFIED",4,false,$assignment->timemodified));
//if we've selected to backup users info, then execute backup_assignment_submisions
diff --git a/mod/assignment/restorelib.php b/mod/assignment/restorelib.php
index <HASH>..<HASH> 100644
--- a/mod/assignment/restorelib.php
+++ b/mod/assignment/restorelib.php
@@ -49,6 +49,7 @@
$assignment->assignmenttype = backup_todb($info['MOD']['#']['ASSIGNMENTTYPE']['0']['#']);
$assignment->maxbytes = backup_todb($info['MOD']['#']['MAXBYTES']['0']['#']);
$assignment->timedue = backup_todb($info['MOD']['#']['TIMEDUE']['0']['#']);
+ $assignment->timeavailable = backup_todb($info['MOD']['#']['TIMEAVAILABLE']['0']['#']);
$assignment->grade = backup_todb($info['MOD']['#']['GRADE']['0']['#']);
$assignment->timemodified = backup_todb($info['MOD']['#']['TIMEMODIFIED']['0']['#']);
|
timeavailable was missing in backup and restore. :-)
|
moodle_moodle
|
train
|
22e0e0354b47f13ba62949df3c1c0861a1b15446
|
diff --git a/lib/server-lib.js b/lib/server-lib.js
index <HASH>..<HASH> 100644
--- a/lib/server-lib.js
+++ b/lib/server-lib.js
@@ -26,20 +26,16 @@ function ensureConnected(remote, callback) {
*/
function isConnected(remote) {
- // if (!(remote instanceof ripple.Remote)) {
- // return false;
- // }
+ var result = false;
- var server = remote._getServer();
-
- if (!server) {
- // No connected servers
- return false;
- }
-
- return (Date.now() - server._lastLedgerClose <= module.exports.CONNECTION_TIMEOUT);
-}
+ try {
+ var server = remote._getServer();
+ var closeDiff = Date.now() - server._lastLedgerClose;
+ result = closeDiff <= module.exports.CONNECTION_TIMEOUT;
+ } catch (e) { }
+ return result;
+};
/**
* Attempt to reconnect, waiting no longer than 20
|
Safer server isConnected check
|
ripple_ripple-rest
|
train
|
42ea0d4770faeb84a13c284395626545161eb51c
|
diff --git a/pkg/apis/kops/componentconfig.go b/pkg/apis/kops/componentconfig.go
index <HASH>..<HASH> 100644
--- a/pkg/apis/kops/componentconfig.go
+++ b/pkg/apis/kops/componentconfig.go
@@ -217,7 +217,9 @@ type KubeletConfigSpec struct {
// one must set --hairpin-mode=veth-flag, because bridge assumes the
// existence of a container bridge named cbr0.
HairpinMode string `json:"hairpinMode,omitempty" flag:"hairpin-mode"`
+
// The node has babysitter process monitoring docker and kubelet.
+ // Removed as of 1.7
BabysitDaemons *bool `json:"babysitDaemons,omitempty" flag:"babysit-daemons"`
// maxPods is the number of pods that can run on this Kubelet.
diff --git a/pkg/apis/kops/v1alpha1/componentconfig.go b/pkg/apis/kops/v1alpha1/componentconfig.go
index <HASH>..<HASH> 100644
--- a/pkg/apis/kops/v1alpha1/componentconfig.go
+++ b/pkg/apis/kops/v1alpha1/componentconfig.go
@@ -216,7 +216,9 @@ type KubeletConfigSpec struct {
// one must set --hairpin-mode=veth-flag, because bridge assumes the
// existence of a container bridge named cbr0.
HairpinMode string `json:"hairpinMode,omitempty" flag:"hairpin-mode"`
+
// The node has babysitter process monitoring docker and kubelet.
+ // Removed as of 1.7
BabysitDaemons *bool `json:"babysitDaemons,omitempty" flag:"babysit-daemons"`
// maxPods is the number of pods that can run on this Kubelet.
diff --git a/pkg/apis/kops/v1alpha2/componentconfig.go b/pkg/apis/kops/v1alpha2/componentconfig.go
index <HASH>..<HASH> 100644
--- a/pkg/apis/kops/v1alpha2/componentconfig.go
+++ b/pkg/apis/kops/v1alpha2/componentconfig.go
@@ -81,7 +81,9 @@ type KubeletConfigSpec struct {
// one must set --hairpin-mode=veth-flag, because bridge assumes the
// existence of a container bridge named cbr0.
HairpinMode string `json:"hairpinMode,omitempty" flag:"hairpin-mode"`
+
// The node has babysitter process monitoring docker and kubelet.
+ // Removed as of 1.7
BabysitDaemons *bool `json:"babysitDaemons,omitempty" flag:"babysit-daemons"`
// maxPods is the number of pods that can run on this Kubelet.
diff --git a/pkg/model/components/kubelet.go b/pkg/model/components/kubelet.go
index <HASH>..<HASH> 100644
--- a/pkg/model/components/kubelet.go
+++ b/pkg/model/components/kubelet.go
@@ -57,9 +57,13 @@ func (b *KubeletOptionsBuilder) BuildOptions(o interface{}) error {
clusterSpec.Kubelet.LogLevel = fi.Int32(2)
clusterSpec.Kubelet.ClusterDNS = ip.String()
clusterSpec.Kubelet.ClusterDomain = clusterSpec.ClusterDNSDomain
- clusterSpec.Kubelet.BabysitDaemons = fi.Bool(true)
clusterSpec.Kubelet.NonMasqueradeCIDR = clusterSpec.NonMasqueradeCIDR
+ if b.Context.IsKubernetesLT("1.7") {
+ // babysit-daemons removed in 1.7
+ clusterSpec.Kubelet.BabysitDaemons = fi.Bool(true)
+ }
+
clusterSpec.MasterKubelet.RegisterSchedulable = fi.Bool(false)
// Replace the CIDR with a CIDR allocated by KCM (the default, but included for clarity)
// We _do_ allow debugging handlers, so we can do logs
|
Remove babysit-daemons flag from <I>
|
kubernetes_kops
|
train
|
c580882403d6f85ea2797ba15ef133d34c7cc3e7
|
diff --git a/mill-db-repo/src/test/java/org/duracloud/mill/auditor/jpa/JpaAuditLogStoreTest.java b/mill-db-repo/src/test/java/org/duracloud/mill/auditor/jpa/JpaAuditLogStoreTest.java
index <HASH>..<HASH> 100644
--- a/mill-db-repo/src/test/java/org/duracloud/mill/auditor/jpa/JpaAuditLogStoreTest.java
+++ b/mill-db-repo/src/test/java/org/duracloud/mill/auditor/jpa/JpaAuditLogStoreTest.java
@@ -170,7 +170,7 @@ public class JpaAuditLogStoreTest extends JpaTestBase<JpaAuditLogItem>{
JpaAuditLogItem freshItem = createMock(JpaAuditLogItem.class);
expect(repo.findOne(eq(id))).andReturn(freshItem);
freshItem.setContentProperties(serializedProps);
- expectLastCall();
+ expectLastCall().once();
expect(repo.saveAndFlush(freshItem)).andReturn(freshItem);
replayAll();
diff --git a/mill-db-repo/src/test/java/org/duracloud/mill/manifest/jpa/JpaManifestStoreTest.java b/mill-db-repo/src/test/java/org/duracloud/mill/manifest/jpa/JpaManifestStoreTest.java
index <HASH>..<HASH> 100644
--- a/mill-db-repo/src/test/java/org/duracloud/mill/manifest/jpa/JpaManifestStoreTest.java
+++ b/mill-db-repo/src/test/java/org/duracloud/mill/manifest/jpa/JpaManifestStoreTest.java
@@ -95,20 +95,20 @@ public class JpaManifestStoreTest extends JpaTestBase<ManifestItem> {
expect(returnItem.getContentChecksum()).andReturn("old checksum");
returnItem.setContentChecksum(contentChecksum);
- expectLastCall();
+ expectLastCall().once();
expect(returnItem.getContentMimetype()).andReturn("old mimetype");
returnItem.setContentMimetype(contentMimetype);
- expectLastCall();
+ expectLastCall().once();
expect(returnItem.getContentSize()).andReturn("old size");
returnItem.setContentSize(contentSize);
- expectLastCall();
+ expectLastCall().once();
returnItem.setModified(timestamp);
- expectLastCall();
+ expectLastCall().once();
expect(returnItem.isDeleted()).andReturn(true);
returnItem.setDeleted(false);
- expectLastCall();
+ expectLastCall().once();
expect(this.repo.findByAccountAndStoreIdAndSpaceIdAndContentId(account,
storeId,
@@ -160,9 +160,9 @@ public class JpaManifestStoreTest extends JpaTestBase<ManifestItem> {
expect(returnItem.isDeleted()).andReturn(false);
expect(returnItem.getModified()).andReturn(new Date(System.currentTimeMillis() - 1000));
returnItem.setModified(timestamp);
- expectLastCall();
+ expectLastCall().once();
returnItem.setDeleted(true);
- expectLastCall();
+ expectLastCall().once();
expect(this.repo.findByAccountAndStoreIdAndSpaceIdAndContentId(account,
storeId,
spaceId,
@@ -211,7 +211,7 @@ public class JpaManifestStoreTest extends JpaTestBase<ManifestItem> {
createTestSubject();
ManifestItem returnItem = createMock(ManifestItem.class);
returnItem.setMissingFromStorageProvider(true);
- expectLastCall();
+ expectLastCall().once();
expect(this.repo.findByAccountAndStoreIdAndSpaceIdAndContentId(account,
storeId,
spaceId,
|
Ensures that easymock.expectLastCall references are working properly.
|
duracloud_duracloud-db
|
train
|
eb4da830c4dd6bd5bfb9e833a45d0a01ba8f2f9b
|
diff --git a/securegraph-elasticsearch-base/src/main/java/org/securegraph/elasticsearch/ElasticSearchSearchIndexBase.java b/securegraph-elasticsearch-base/src/main/java/org/securegraph/elasticsearch/ElasticSearchSearchIndexBase.java
index <HASH>..<HASH> 100644
--- a/securegraph-elasticsearch-base/src/main/java/org/securegraph/elasticsearch/ElasticSearchSearchIndexBase.java
+++ b/securegraph-elasticsearch-base/src/main/java/org/securegraph/elasticsearch/ElasticSearchSearchIndexBase.java
@@ -251,9 +251,26 @@ public abstract class ElasticSearchSearchIndexBase implements SearchIndex {
for (Object propertyObj : properties.entrySet()) {
Map.Entry property = (Map.Entry) propertyObj;
String propertyName = (String) property.getKey();
- Map propertyAttributes = (Map) property.getValue();
- String propertyType = (String) propertyAttributes.get("type");
- propertyTypes.put(propertyName, propertyType);
+ try {
+ Map propertyAttributes = (Map) property.getValue();
+ String propertyType = (String) propertyAttributes.get("type");
+ if (propertyType != null) {
+ propertyTypes.put(propertyName, propertyType);
+ continue;
+ }
+
+ Map subProperties = (Map) propertyAttributes.get("properties");
+ if (subProperties != null) {
+ if (subProperties.containsKey("lat") && subProperties.containsKey("lon")) {
+ propertyTypes.put(propertyName, "geo_point");
+ continue;
+ }
+ }
+
+ throw new SecureGraphException("Failed to parse property type on property could not determine property type: " + propertyName);
+ } catch (Exception ex) {
+ throw new SecureGraphException("Failed to parse property type on property: " + propertyName);
+ }
}
}
} catch (IOException ex) {
|
better error checking when getting property info from es
|
lumifyio_securegraph
|
train
|
79c905e6d0b03b075eef5339b73a6e8c55acd620
|
diff --git a/core/src/main/java/jenkins/model/JenkinsLocationConfiguration.java b/core/src/main/java/jenkins/model/JenkinsLocationConfiguration.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/jenkins/model/JenkinsLocationConfiguration.java
+++ b/core/src/main/java/jenkins/model/JenkinsLocationConfiguration.java
@@ -11,11 +11,16 @@ import org.kohsuke.stapler.StaplerRequest;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
+import javax.servlet.ServletContext;
import java.io.File;
import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
import java.util.logging.Level;
import java.util.logging.Logger;
+import static hudson.Util.fixNull;
+
/**
* Stores the location of Jenkins (e-mail address and the HTTP URL.)
*
@@ -63,6 +68,8 @@ public class JenkinsLocationConfiguration extends GlobalConfiguration {
} else {
super.load();
}
+
+ updateSecureSessionFlag();
}
public String getAdminAddress() {
@@ -91,6 +98,31 @@ public class JenkinsLocationConfiguration extends GlobalConfiguration {
url += '/';
this.jenkinsUrl = url;
save();
+ updateSecureSessionFlag();
+ }
+
+ /**
+ * If the Jenkins URL starts from "https", force the secure session flag
+ *
+ * @see <a href="https://www.owasp.org/index.php/SecureFlag">discussion of this topic in OWASP</a>
+ */
+ private void updateSecureSessionFlag() {
+ try {
+ boolean v = fixNull(jenkinsUrl).startsWith("https");
+ ServletContext context = Jenkins.getInstance().servletContext;
+ Method m = context.getClass().getMethod("getSessionCookieConfig");
+ Object sessionCookieConfig = m.invoke(context);
+
+ // not exposing session cookie to JavaScript to mitigate damage caused by XSS
+ Class scc = Class.forName("javax.servlet.SessionCookieConfig");
+ Method setHttpOnly = scc.getMethod("setHttpOnly",boolean.class);
+ setHttpOnly.invoke(sessionCookieConfig,true);
+
+ Method setSecure = scc.getMethod("setSecure",boolean.class);
+ setSecure.invoke(sessionCookieConfig,v);
+ } catch (Exception e) {
+ LOGGER.log(Level.WARNING, "Failed to set secure cookie flag. Maybe running on Servlet 2.5 and younger?", e);
+ }
}
@Override
|
[FIXED SECURITY-<I>]
If Jenkins URL is set to https, force the secure flag. Also force the
cookie to be HTTP only, which mitigates the damage that XSS can cause.
See <URL>
|
jenkinsci_jenkins
|
train
|
fbbc98f569c135add8f95a5a1e2f8f6b7ad53750
|
diff --git a/distutils/command/install.py b/distutils/command/install.py
index <HASH>..<HASH> 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -431,11 +431,7 @@ class install(Command):
'VIRTUAL_ENV' in os.environ
)
- # fedora:
- def is_rpm_build():
- return 'RPM_BUILD_ROOT' in os.environ
-
- if not is_virtualenv() and not is_rpm_build():
+ if not is_virtualenv():
INSTALL_SCHEMES['unix_prefix'] = INSTALL_SCHEMES['unix_local']
# debian:
|
Remove is_rpm_build, unneeded.
|
pypa_setuptools
|
train
|
2ccfaed4068a24e47a9f822eea3d60f3ff53a62d
|
diff --git a/webcam-capture/src/main/java/com/github/sarxos/webcam/ds/buildin/WebcamDefaultDevice.java b/webcam-capture/src/main/java/com/github/sarxos/webcam/ds/buildin/WebcamDefaultDevice.java
index <HASH>..<HASH> 100644
--- a/webcam-capture/src/main/java/com/github/sarxos/webcam/ds/buildin/WebcamDefaultDevice.java
+++ b/webcam-capture/src/main/java/com/github/sarxos/webcam/ds/buildin/WebcamDefaultDevice.java
@@ -43,8 +43,8 @@ public class WebcamDefaultDevice implements WebcamDevice, BufferAccess, Runnable
private static final int DEVICE_BUFFER_SIZE = 5;
/**
- * Artificial view sizes. I'm really not sure if will fit into other webcams
- * but hope that OpenIMAJ can handle this.
+ * Artificial view sizes. I'm really not sure if will fit into other webcams but hope that
+ * OpenIMAJ can handle this.
*/
private final static Dimension[] DIMENSIONS = new Dimension[] {
WebcamResolution.QQVGA.getSize(),
@@ -77,7 +77,6 @@ public class WebcamDefaultDevice implements WebcamDevice, BufferAccess, Runnable
return;
}
- grabber.setTimeout(timeout);
result.set(grabber.nextFrame());
fresh.set(true);
}
@@ -335,6 +334,11 @@ public class WebcamDefaultDevice implements WebcamDevice, BufferAccess, Runnable
throw new WebcamException("Cannot start native grabber!");
}
+ // set timeout, this MUST be done after grabber is open and before it's closed, otherwise it
+ // will result as crash
+
+ grabber.setTimeout(timeout);
+
LOG.debug("Webcam device session started");
Dimension size2 = new Dimension(grabber.getWidth(), grabber.getHeight());
@@ -377,8 +381,8 @@ public class WebcamDefaultDevice implements WebcamDevice, BufferAccess, Runnable
}
/**
- * this is to clean up all frames from device memory buffer which causes
- * initial frames to be completely blank (black images)
+ * this is to clean up all frames from device memory buffer which causes initial frames to be
+ * completely blank (black images)
*/
private void clearMemoryBuffer() {
for (int i = 0; i < DEVICE_BUFFER_SIZE; i++) {
@@ -424,8 +428,8 @@ public class WebcamDefaultDevice implements WebcamDevice, BufferAccess, Runnable
}
/**
- * Determines if device should fail when requested image size is different
- * than actually received.
+ * Determines if device should fail when requested image size is different than actually
+ * received.
*
* @param fail the fail on size mismatch flag, true or false
*/
@@ -453,6 +457,9 @@ public class WebcamDefaultDevice implements WebcamDevice, BufferAccess, Runnable
* @param timeout the timeout value in milliseconds
*/
public void setTimeout(int timeout) {
+ if (isOpen()) {
+ throw new WebcamException("Timeout must be set before webcam is open");
+ }
this.timeout = timeout;
}
|
Set timeout once, after grabber is open, fixes #<I>
|
sarxos_webcam-capture
|
train
|
1558bd25417dd42bf82177a104a4d1207eba8726
|
diff --git a/server/src/test/java/com/orientechnologies/orient/server/tx/RemoteTransactionSupportTest.java b/server/src/test/java/com/orientechnologies/orient/server/tx/RemoteTransactionSupportTest.java
index <HASH>..<HASH> 100644
--- a/server/src/test/java/com/orientechnologies/orient/server/tx/RemoteTransactionSupportTest.java
+++ b/server/src/test/java/com/orientechnologies/orient/server/tx/RemoteTransactionSupportTest.java
@@ -13,6 +13,7 @@ import com.orientechnologies.orient.core.record.OEdge;
import com.orientechnologies.orient.core.record.OElement;
import com.orientechnologies.orient.core.record.OVertex;
import com.orientechnologies.orient.core.record.impl.ODocument;
+import com.orientechnologies.orient.core.sql.executor.OResult;
import com.orientechnologies.orient.core.sql.executor.OResultSet;
import com.orientechnologies.orient.server.OServer;
import com.orientechnologies.orient.server.network.ORemoteImportTest;
@@ -209,13 +210,96 @@ public class RemoteTransactionSupportTest {
someTx.setProperty("name", "foo");
database.save(someTx);
database.save(someTx);
- assertEquals(database.getTransaction().getEntryCount(),1);
+ assertEquals(database.getTransaction().getEntryCount(), 1);
assertEquals(database.countClass("SomeTx"), 1);
database.commit();
assertEquals(database.countClass("SomeTx"), 1);
}
@Test
+ public void testDoubleSaveDoubleFlushTransaction() {
+ database.begin();
+ OElement someTx = database.newElement("SomeTx");
+ someTx.setProperty("name", "foo");
+ database.save(someTx);
+ database.save(someTx);
+ OResultSet result = database.query("select from SomeTx");
+ assertEquals(1, result.stream().count());
+ result.close();
+ database.save(someTx);
+ database.save(someTx);
+ result = database.query("select from SomeTx");
+ assertEquals(1, result.stream().count());
+ result.close();
+ assertEquals(database.getTransaction().getEntryCount(), 1);
+ assertEquals(database.countClass("SomeTx"), 1);
+ database.commit();
+ assertEquals(database.countClass("SomeTx"), 1);
+ }
+
+ @Test
+ public void testRefFlushedInTransaction() {
+ database.begin();
+ OElement someTx = database.newElement("SomeTx");
+ someTx.setProperty("name", "foo");
+ database.save(someTx);
+
+ OElement oneMore = database.newElement("SomeTx");
+ oneMore.setProperty("name", "bar");
+ oneMore.setProperty("ref", someTx);
+ OResultSet result = database.query("select from SomeTx");
+ assertEquals(1, result.stream().count());
+ result.close();
+ database.save(oneMore);
+ database.commit();
+ OResultSet result1 = database.query("select ref from SomeTx where name='bar'");
+ assertTrue(result1.hasNext());
+ assertEquals(someTx.getIdentity(), result1.next().getProperty("ref"));
+ result1.close();
+ }
+
+ @Test
+ public void testDoubleRefFlushedInTransaction() {
+ database.begin();
+ OElement someTx = database.newElement("SomeTx");
+ someTx.setProperty("name", "foo");
+ database.save(someTx);
+
+ OElement oneMore = database.newElement("SomeTx");
+ oneMore.setProperty("name", "bar");
+ oneMore.setProperty("ref", someTx.getIdentity());
+
+ OResultSet result = database.query("select from SomeTx");
+ assertEquals(1, result.stream().count());
+ result.close();
+
+ OElement ref2 = database.newElement("SomeTx");
+ ref2.setProperty("name", "other");
+ database.save(ref2);
+
+ oneMore.setProperty("ref2", ref2.getIdentity());
+ result = database.query("select from SomeTx");
+ assertEquals(2, result.stream().count());
+ result.close();
+
+ database.save(oneMore);
+ OResultSet result1 = database.query("select ref,ref2 from SomeTx where name='bar'");
+ assertTrue(result1.hasNext());
+ OResult next = result1.next();
+ assertEquals(someTx.getIdentity(), next.getProperty("ref"));
+ assertEquals(ref2.getIdentity(), next.getProperty("ref2"));
+ result1.close();
+
+ database.commit();
+ result1 = database.query("select ref,ref2 from SomeTx where name='bar'");
+ assertTrue(result1.hasNext());
+ next = result1.next();
+ assertEquals(someTx.getIdentity(), next.getProperty("ref"));
+ assertEquals(ref2.getIdentity(), next.getProperty("ref2"));
+ result1.close();
+ }
+
+ @Test
public void testUpdateCreatedInTxIndexGetTransaction() {
OIndex<?> index = database.getClass("IndexedTx").getProperty("name").getAllIndexes().iterator().next();
database.begin();
|
add some test case for some possible corner case
|
orientechnologies_orientdb
|
train
|
e7c706e6b94e27d5978cc7aabe9202e0a5bd5f1e
|
diff --git a/lib/rubocop/cop/style/collection_methods.rb b/lib/rubocop/cop/style/collection_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/cop/style/collection_methods.rb
+++ b/lib/rubocop/cop/style/collection_methods.rb
@@ -19,11 +19,30 @@ module Rubocop
find_all: 'select'
}
+ def on_block(node)
+ method, _args, _body = *node
+
+ check_method_node(method)
+
+ super
+ end
+
def on_send(node)
- receiver, method_name, *_args = *node
+ _receiver, _method_name, *args = *node
+
+ if args.size == 1 && args.first.type == :block_pass
+ check_method_node(node)
+ end
+
+ super
+ end
+
+ private
+
+ def check_method_node(node)
+ _receiver, method_name, *_args = *node
- # a simple(but flawed way) to reduce false positives
- if receiver && PREFERRED_METHODS[method_name]
+ if PREFERRED_METHODS[method_name]
add_offence(
:convention,
node.loc.selector,
diff --git a/spec/rubocop/cops/style/collection_methods_spec.rb b/spec/rubocop/cops/style/collection_methods_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rubocop/cops/style/collection_methods_spec.rb
+++ b/spec/rubocop/cops/style/collection_methods_spec.rb
@@ -6,39 +6,34 @@ module Rubocop
module Cop
module Style
describe CollectionMethods do
- let(:cm) { CollectionMethods.new }
+ let(:cop) { CollectionMethods.new }
- it 'registers an offence for collect' do
- inspect_source(cm, ['[1, 2, 3].collect { |e| e + 1 }'])
- expect(cm.offences.size).to eq(1)
- expect(cm.offences.map(&:message))
- .to eq(['Prefer map over collect.'])
- end
+ CollectionMethods::PREFERRED_METHODS.keys.each do |method|
+ it "registers an offence for #{method} with block" do
+ inspect_source(cop, ["[1, 2, 3].#{method} { |e| e + 1 }"])
+ expect(cop.offences.size).to eq(1)
+ preferred_method = CollectionMethods::PREFERRED_METHODS[method]
+ expect(cop.messages)
+ .to eq(["Prefer #{preferred_method} over #{method}."])
+ end
- it 'registers an offence for inject' do
- inspect_source(cm, ['[1, 2, 3].inject { |e| e + 1 }'])
- expect(cm.offences.size).to eq(1)
- expect(cm.offences.map(&:message))
- .to eq(['Prefer reduce over inject.'])
- end
+ it "registers an offence for #{method} with proc param" do
+ inspect_source(cop, ["[1, 2, 3].#{method}(&:test)"])
+ expect(cop.offences.size).to eq(1)
+ preferred_method = CollectionMethods::PREFERRED_METHODS[method]
+ expect(cop.messages)
+ .to eq(["Prefer #{preferred_method} over #{method}."])
+ end
- it 'registers an offence for detect' do
- inspect_source(cm, ['[1, 2, 3].detect { |e| e + 1 }'])
- expect(cm.offences.size).to eq(1)
- expect(cm.offences.map(&:message))
- .to eq(['Prefer find over detect.'])
- end
-
- it 'registers an offence for find_all' do
- inspect_source(cm, ['[1, 2, 3].find_all { |e| e + 1 }'])
- expect(cm.offences.size).to eq(1)
- expect(cm.offences.map(&:message))
- .to eq(['Prefer select over find_all.'])
- end
+ it "accepts #{method} with more than 1 param" do
+ inspect_source(cop, ["[1, 2, 3].#{method}(other, &:test)"])
+ expect(cop.offences).to be_empty
+ end
- it 'ignores find_all without an explicit receiver' do
- inspect_source(cm, ['find_all { |e| e + 1 }'])
- expect(cm.offences).to be_empty
+ it "accepts #{method} without a block" do
+ inspect_source(cop, ["[1, 2, 3].#{method}"])
+ expect(cop.offences).to be_empty
+ end
end
end
end
|
Refine CollectionMethods cop
Now we actually check if the method was passed a block or received a
single proc argument. This reduces greatly the likelihood of false positives.
|
rubocop-hq_rubocop
|
train
|
15d834bb8aef788cf0795b974720382daba29f9e
|
diff --git a/geoviews/operation/projection.py b/geoviews/operation/projection.py
index <HASH>..<HASH> 100644
--- a/geoviews/operation/projection.py
+++ b/geoviews/operation/projection.py
@@ -74,7 +74,7 @@ class project_path(_project_operation):
Handle case of iso-contour
"""
xdim, ydim = contour.kdims[:2]
- data = {k: vals[0] for k, vals in data.items()}
+ data = {k: vals for k, vals in data.items()}
# Wrap longitudes
vertices = contour.array([0, 1])
@@ -133,7 +133,6 @@ class project_path(_project_operation):
boundary = element.crs.project_geometry(Polygon(self.p.projection.boundary),
self.p.projection)
-
if isinstance(element, Polygons):
multi_type, geom_type = MultiPolygon, Polygon
else:
@@ -142,8 +141,12 @@ class project_path(_project_operation):
projected = []
paths = element.split()
for path in paths:
- data = {vd.name: path.dimension_values(vd, expanded=False) for vd in path.vdims}
- if any(len(vals) > 1 for vals in data.values()):
+ data = {}
+ for vd in path.vdims:
+ scalar = path.interface.isscalar(path, vd)
+ values = path.dimension_values(vd, expanded=not scalar)
+ data[vd.name] = values[0] if scalar else values
+ if any(vals is not None and not np.isscalar(vals) and len(vals) > 1 for vals in data.values()):
projected += self._project_path(element, path, data, boundary, geom_type, multi_type)
else:
projected += self._project_contour(element, path, data, boundary, geom_type, multi_type)
@@ -206,8 +209,9 @@ class project_points(_project_operation):
(type(element).__name__, type(element.crs).__name__,
type(self.p.projection).__name__))
- return element.clone(new_data, crs=self.p.projection,
- datatype=datatype)
+ return element.clone(tuple(new_data[d.name] for d in element.dimensions()),
+ crs=self.p.projection, datatype=datatype)
+
class project_graph(_project_operation):
|
Handle scalar values in project_path better (#<I>)
|
pyviz_geoviews
|
train
|
0f66ed6eedf8d0233d2741b134bc0a715bdff2ae
|
diff --git a/sprinter/formula/ssh.py b/sprinter/formula/ssh.py
index <HASH>..<HASH> 100644
--- a/sprinter/formula/ssh.py
+++ b/sprinter/formula/ssh.py
@@ -38,9 +38,9 @@ class SSHFormula(FormulaBase):
def prompt(self):
if self.environment.phase in (PHASE.INSTALL, PHASE.UPDATE):
if os.path.exists(ssh_config_path):
- if not self.target.has('use_global_ssh') and self.__global_ssh_key_exists():
- self.target.prompt("use_global_ssh",
- "A standard global ssh key was detected! Would you like to use the global ssh key?",
+ if not self.target.has('use_global_ssh'):
+ self.target.prompt("use_global_ssh",
+ "Would you like to use existing ssh configuration?",
default="no")
if (self.injections.in_noninjected_file(
ssh_config_path, "Host %s" % self.target.get('host')) and
@@ -88,19 +88,21 @@ class SSHFormula(FormulaBase):
"""
Install the ssh configuration
"""
- if not self.__global_ssh_key_exists() or not config.is_affirmative('use_global_ssh', default="no"):
+ if not config.is_affirmative('use_global_ssh', default="no"):
ssh_config_injection = ssh_config_template % {
'host': config.get('host'),
'hostname': config.get('hostname'),
'ssh_key_path': config.get('ssh_key_path'),
'user': config.get('user')
}
- if os.path.exists(ssh_config_path):
+ if not os.path.exists(ssh_config_path):
+
if self.injections.in_noninjected_file(ssh_config_path, "Host %s" % config.get('host')):
if config.is_affirmative('override'):
self.injections.inject(ssh_config_path, ssh_config_injection)
else:
self.injections.inject(ssh_config_path, ssh_config_injection)
+
else:
self.injections.inject(ssh_config_path, ssh_config_injection)
self.injections.commit()
@@ -110,9 +112,3 @@ class SSHFormula(FormulaBase):
ssh_contents = open(ssh_path, 'r').read().rstrip('\n')
command = command.replace('{{ssh}}', ssh_contents)
lib.call(command, shell=True, output_log_level=logging.DEBUG)
-
- def __global_ssh_key_exists(self):
- """ Check if the global ssh keys exists """
- return (os.path.exists(os.path.join(user_ssh_path, "id_dsa")) or
- os.path.exists(os.path.join(user_ssh_path, "id_ecdsa")) or
- os.path.exists(os.path.join(user_ssh_path, "id_rsa")))
diff --git a/sprinter/formula/tests/test_ssh.py b/sprinter/formula/tests/test_ssh.py
index <HASH>..<HASH> 100644
--- a/sprinter/formula/tests/test_ssh.py
+++ b/sprinter/formula/tests/test_ssh.py
@@ -3,6 +3,7 @@ import os
import shutil
import tempfile
from mock import Mock, patch
+from nose.tools import ok_
from sprinter.testtools import FormulaTest
import sprinter.lib as lib
@@ -10,10 +11,16 @@ source_config = """
"""
target_config = """
-[simple_example]
-formula = sprinter.formula.template
-source = %(temp_dir)s/in.txt
-target = %(temp_dir)s/out.txt
+[github]
+formula = sprinter.formula.ssh
+host = github.com
+keyname = github
+nopassphrase = true
+type = rsa
+hostname = github.com
+user = toumorokoshi
+create = false
+use_global_ssh = yes
"""
@@ -21,19 +28,13 @@ class TestSSHFormula(FormulaTest):
""" Tests for the unpack formula """
def setup(self):
- self.temp_dir = tempfile.mkdtemp()
- config_dict = {'temp_dir': self.temp_dir}
- super(TestSSHFormula, self).setup(source_config=(source_config % config_dict),
- target_config=(target_config % config_dict))
+ super(TestSSHFormula, self).setup(source_config=source_config,
+ target_config=target_config)
- def teardown(self):
- shutil.rmtree(self.temp_dir)
-
- def skip_simple_example(self):
- """ The template formula should grab a template and save it """
- with open(os.path.join(self.temp_dir, 'in.txt'), 'w+') as fh:
- fh.write(SIMPLE_TEMPLATE)
- self.environment.run_feature("simple_example", 'sync')
- out_file = os.path.join(self.temp_dir, 'out.txt')
- assert os.path.exists(out_file)
- assert open(out_file).read() == SIMPLE_TEMPLATE
+ def test_use_global_ssh(self):
+ """ If use_global_ssh is false, then no config should be injected into ssh config"""
+ self.environment.injections.inject = Mock()
+ self.environment.run_feature("github", "sync")
+ print(self.environment.injections.inject.call_count)
+ print(self.environment.injections.inject.call_args)
+ ok_(not self.environment.injections.inject.called)
|
Fixing #<I>, ssh always prompts now
|
toumorokoshi_sprinter
|
train
|
bf6e4feeb3e8171f26835c07cdc34c1a0ccb7156
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -181,7 +181,7 @@ class ClosureCompilerPlugin {
try {
errors = JSON.parse(stdErrData);
} catch (e1) {
- const exceptionIndex = stdErrData.indexOf(']java.lang.RuntimeException: INTERNAL COMPILER ERROR.');
+ const exceptionIndex = stdErrData.indexOf(']java.lang.');
if (exceptionIndex > 0) {
try {
errors = JSON.parse(stdErrData.substring(0, exceptionIndex + 1));
@@ -189,13 +189,16 @@ class ClosureCompilerPlugin {
level: 'error',
description: stdErrData.substr(exceptionIndex + 1),
});
- } catch (e2) {
- errors = [{
- level: 'error',
- description: stdErrData,
- }];
+ } catch (e2) { // eslint-disable-line no-empty
}
}
+ if (!errors) {
+ errors = errors || [];
+ errors.push({
+ level: 'error',
+ description: stdErrData,
+ });
+ }
}
ClosureCompilerPlugin.reportErrors(compilation, errors, requestShortener);
|
refactor: Better logic for when error data does not parse
|
webpack-contrib_closure-webpack-plugin
|
train
|
634e256d3de1a0f504331bcb9e6be11bd712eefc
|
diff --git a/components/Flute/src/Api/Crud.php b/components/Flute/src/Api/Crud.php
index <HASH>..<HASH> 100644
--- a/components/Flute/src/Api/Crud.php
+++ b/components/Flute/src/Api/Crud.php
@@ -39,6 +39,7 @@ use Limoncello\Flute\Http\Query\FilterParameterCollection;
use Neomerx\JsonApi\Contracts\Document\DocumentInterface;
use Neomerx\JsonApi\Exceptions\ErrorCollection;
use Neomerx\JsonApi\Exceptions\JsonApiException as E;
+use Psr\Container\ContainerInterface;
/**
* @package Limoncello\Flute
@@ -88,24 +89,22 @@ class Crud implements CrudInterface
private $paginationStrategy;
/**
- * @param FactoryInterface $factory
- * @param string $modelClass
- * @param RepositoryInterface $repository
- * @param ModelSchemeInfoInterface $modelSchemes
- * @param PaginationStrategyInterface $paginationStrategy
+ * @var ContainerInterface
*/
- public function __construct(
- FactoryInterface $factory,
- string $modelClass,
- RepositoryInterface $repository,
- ModelSchemeInfoInterface $modelSchemes,
- PaginationStrategyInterface $paginationStrategy
- ) {
- $this->factory = $factory;
+ private $container;
+
+ /**
+ * @param ContainerInterface $container
+ * @param string $modelClass
+ */
+ public function __construct(ContainerInterface $container, string $modelClass)
+ {
+ $this->factory = $container->get(FactoryInterface::class);
$this->modelClass = $modelClass;
- $this->repository = $repository;
- $this->modelSchemes = $modelSchemes;
- $this->paginationStrategy = $paginationStrategy;
+ $this->repository = $container->get(RepositoryInterface::class);
+ $this->modelSchemes = $container->get(ModelSchemeInfoInterface::class);
+ $this->paginationStrategy = $container->get(PaginationStrategyInterface::class);
+ $this->container = $container;
}
/**
@@ -380,6 +379,14 @@ class Crud implements CrudInterface
}
/**
+ * @return ContainerInterface
+ */
+ protected function getContainer(): ContainerInterface
+ {
+ return $this->container;
+ }
+
+ /**
* @return FactoryInterface
*/
protected function getFactory(): FactoryInterface
diff --git a/components/Flute/tests/Data/Api/AppCrud.php b/components/Flute/tests/Data/Api/AppCrud.php
index <HASH>..<HASH> 100644
--- a/components/Flute/tests/Data/Api/AppCrud.php
+++ b/components/Flute/tests/Data/Api/AppCrud.php
@@ -17,11 +17,7 @@
*/
use Doctrine\DBAL\Query\QueryBuilder;
-use Limoncello\Contracts\Data\ModelSchemeInfoInterface;
use Limoncello\Flute\Api\Crud;
-use Limoncello\Flute\Contracts\Adapters\PaginationStrategyInterface;
-use Limoncello\Flute\Contracts\Adapters\RepositoryInterface;
-use Limoncello\Flute\Contracts\FactoryInterface;
use Limoncello\Tests\Flute\Data\Models\Model;
use Psr\Container\ContainerInterface;
@@ -34,32 +30,11 @@ abstract class AppCrud extends Crud
const MODEL_CLASS = null;
/**
- * @var ContainerInterface|null
- */
- private $container;
-
- /**
* @inheritdoc
*/
public function __construct(ContainerInterface $container)
{
- $this->container = $container;
-
- parent::__construct(
- $this->getContainer()->get(FactoryInterface::class),
- static::MODEL_CLASS,
- $this->getContainer()->get(RepositoryInterface::class),
- $this->getContainer()->get(ModelSchemeInfoInterface::class),
- $this->getContainer()->get(PaginationStrategyInterface::class)
- );
- }
-
- /**
- * @return ContainerInterface|null
- */
- protected function getContainer()
- {
- return $this->container;
+ parent::__construct($container, static::MODEL_CLASS);
}
/**
|
Add container support to CRUD constructor.
|
limoncello-php_framework
|
train
|
cc661e0c2fcc06ea0158be364a76c5074bfaef85
|
diff --git a/executor/join_test.go b/executor/join_test.go
index <HASH>..<HASH> 100644
--- a/executor/join_test.go
+++ b/executor/join_test.go
@@ -1879,6 +1879,15 @@ func (s *testSuiteJoin2) TestNullEmptyAwareSemiJoin(c *C) {
result.Check(results[i].result)
}
}
+
+ tk.MustExec("drop table if exists t1, t2")
+ tk.MustExec("create table t1(a int)")
+ tk.MustExec("create table t2(a int)")
+ tk.MustExec("insert into t1 values(1),(2)")
+ tk.MustExec("insert into t2 values(1),(null)")
+ tk.MustQuery("select * from t1 where a not in (select a from t2 where t1.a = t2.a)").Check(testkit.Rows(
+ "2",
+ ))
}
func (s *testSuiteJoin1) TestScalarFuncNullSemiJoin(c *C) {
diff --git a/planner/core/expression_rewriter.go b/planner/core/expression_rewriter.go
index <HASH>..<HASH> 100644
--- a/planner/core/expression_rewriter.go
+++ b/planner/core/expression_rewriter.go
@@ -787,12 +787,12 @@ func (er *expressionRewriter) handleInSubquery(ctx context.Context, v *ast.Patte
// For AntiSemiJoin/LeftOuterSemiJoin/AntiLeftOuterSemiJoin, we cannot treat `in` expression as
// normal column equal condition, so we specially mark the inner operand here.
if v.Not || asScalar {
- rCol.InOperand = true
// If both input columns of `in` expression are not null, we can treat the expression
// as normal column equal condition instead.
- lCol, ok := lexpr.(*expression.Column)
- if ok && mysql.HasNotNullFlag(lCol.GetType().Flag) && mysql.HasNotNullFlag(rCol.GetType().Flag) {
- rCol.InOperand = false
+ if !mysql.HasNotNullFlag(lexpr.GetType().Flag) || !mysql.HasNotNullFlag(rCol.GetType().Flag) {
+ rColCopy := *rCol
+ rColCopy.InOperand = true
+ rexpr = &rColCopy
}
}
} else {
|
planner: make a copy of column before modifying its InOperand (#<I>)
|
pingcap_tidb
|
train
|
8ab7711ad1e3f4b0394d1f9d24064b5597961476
|
diff --git a/externs/html5.js b/externs/html5.js
index <HASH>..<HASH> 100644
--- a/externs/html5.js
+++ b/externs/html5.js
@@ -768,6 +768,12 @@ DOMApplicationCache.prototype.DOWNLOADING = 3;
DOMApplicationCache.prototype.UPDATEREADY = 4;
/**
+ * The cache is obsolete.
+ * @type {number}
+ */
+DOMApplicationCache.prototype.OBSOLETE = 5;
+
+/**
* The current status of the application cache.
* @type {number}
*/
|
Add OBSOLETE to DOMApplicationCache prototype.
R=nicksantos
DELTA=6 (6 added, 0 deleted, 0 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL>
|
google_closure-compiler
|
train
|
d79aca706226499cab1bbeb4d27431393411cd6f
|
diff --git a/src/main/java/hex/glm/GLM2.java b/src/main/java/hex/glm/GLM2.java
index <HASH>..<HASH> 100644
--- a/src/main/java/hex/glm/GLM2.java
+++ b/src/main/java/hex/glm/GLM2.java
@@ -66,7 +66,7 @@ public class GLM2 extends ModelJob {
@API(help="use lambda search starting at lambda max, given lambda is then interpreted as lambda min",filter=Default.class)
boolean lambda_search;
-
+
// API input parameters END ------------------------------------------------------------
// API output parameters BEGIN ------------------------------------------------------------
@@ -558,27 +558,18 @@ public class GLM2 extends ModelJob {
private void xvalidate(final GLMModel model, int lambdaIxd,final H2OCountedCompleter cmp){
final Key [] keys = new Key[n_folds];
- H2OCallback callback = new H2OCallback() {
+ GLM2 [] glms = new GLM2[n_folds];
+ for(int i = 0; i < n_folds; ++i)
+ glms[i] = new GLM2(this.description + "xval " + i, self(), keys[i] = Key.make(destination_key + "_" + _lambdaIdx + "_xval" + i), _dinfo.getFold(i, n_folds),_glm,new double[]{lambda[_lambdaIdx]},model.alpha,0, model.beta_eps,self(),model.norm_beta(lambdaIxd),higher_accuracy,0);
+ H2O.submitTask(new ParallelGLMs(glms,H2O.CLOUD.size(),new H2OCallback(GLM2.this) {
@Override public void callback(H2OCountedCompleter t) {
- try{
- GLMModel [] models = new GLMModel[keys.length];
- // we got the xval models, now compute their validations...
- for(int i = 0; i < models.length; ++i)models[i] = DKV.get(keys[i]).get();
- new GLMXValidationTask(model,_lambdaIdx,models, cmp).asyncExec(_dinfo._adaptedFrame);
- }catch(Throwable ex){cmp.completeExceptionally(ex);}
+ GLMModel [] models = new GLMModel[keys.length];
+ // we got the xval models, now compute their validations...
+ for(int i = 0; i < models.length; ++i)models[i] = DKV.get(keys[i]).get();
+ new GLMXValidationTask(model,_lambdaIdx,models, cmp).asyncExec(_dinfo._adaptedFrame);
}
- @Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){
- cmp.completeExceptionally(ex);
- return true;
- }
- };
- callback.addToPendingCount(n_folds-1);
- double proximal_penalty = 0;
- for(int i = 0; i < n_folds; ++i)
- new GLM2(this.description + "xval " + i, self(), keys[i] = Key.make(destination_key + "_" + _lambdaIdx + "_xval" + i), _dinfo.getFold(i, n_folds),_glm,new double[]{lambda[_lambdaIdx]},model.alpha,0, model.beta_eps,self(),model.norm_beta(lambdaIxd),higher_accuracy, proximal_penalty).
- run(callback);
+ }));
}
-
// Expand grid search related argument sets
@Override protected NanoHTTPD.Response serveGrid(NanoHTTPD server, Properties parms, RequestType type) {
return superServeGrid(server, parms, type);
@@ -674,4 +665,46 @@ public class GLM2 extends ModelJob {
}
}
public boolean isDone(){return DKV.get(self()) == null;}
+
+ // class to execute multiple GLM runs in parllell
+ // (with user-given limit on how many to run in in parallel)
+ public class ParallelGLMs extends DTask {
+ transient final private GLM2 [] _glms;
+ transient final public int _maxP;
+ transient private AtomicInteger _remCnt;
+ transient private AtomicInteger _doneCnt;
+ public ParallelGLMs(GLM2 [] glms){this(glms,H2O.CLOUD.size());}
+ public ParallelGLMs(GLM2 [] glms, int maxP){ _glms = glms; _maxP = maxP;}
+ public ParallelGLMs(GLM2 [] glms, int maxP, H2OCountedCompleter cmp){super(cmp); _glms = glms; _maxP = maxP;}
+
+ private void forkDTask(int i){
+ int nodeId = i%H2O.CLOUD.size();
+ final GLM2 glm = _glms[i];
+ new RPC(H2O.CLOUD._memary[nodeId],new DTask() {
+ @Override
+ public void compute2() {
+ glm.run(this);
+ }
+ }).addCompleter(new Callback()).call();
+ }
+ class Callback extends H2OCallback<H2OCountedCompleter> {
+ public Callback(){super(GLM2.this);}
+ @Override public void callback(H2OCountedCompleter cc){
+ int i;
+ if((i = _remCnt.getAndDecrement()) > 0) // not done yet
+ forkDTask(_glms.length - i);
+ else if(_doneCnt.getAndDecrement() == 0) // am I the last guy to finish? if so complete parent.
+ ParallelGLMs.this.tryComplete();
+ // else just done myself (no more work) but others stillin progress -> just return
+ }
+ }
+ @Override public void compute2(){
+ final int n = Math.min(_maxP, _glms.length);
+ _remCnt = new AtomicInteger(_glms.length-n);
+ _doneCnt = new AtomicInteger(n-1);
+ for(int i = 0; i < n; ++i)
+ forkDTask(i);
+ }
+ }
+
}
|
Updated GLM2 xvalidation to be distributed on remote nodes.
|
h2oai_h2o-2
|
train
|
a50786fd2ba7e18fe247feb21b514a9558282bb0
|
diff --git a/indra/databases/lincs_client.py b/indra/databases/lincs_client.py
index <HASH>..<HASH> 100644
--- a/indra/databases/lincs_client.py
+++ b/indra/databases/lincs_client.py
@@ -29,6 +29,9 @@ class LincsClient(object):
def __init__(self):
with open(lincs_sm, 'r') as fh:
self._sm_data = json.load(fh)
+ extra_sm_data = load_lincs_extras()
+ self._sm_data.update(extra_sm_data)
+
with open(lincs_prot, 'r') as fh:
self._prot_data = json.load(fh)
@@ -162,6 +165,16 @@ def load_lincs_csv(url):
for line_elements in data_rows[1:]]
+def load_lincs_extras():
+ fname = os.path.join(resources, 'hms_lincs_extra.tsv')
+ with open(fname, 'r') as fh:
+ rows = [line.strip('\n').split('\t') for line in fh.readlines()]
+ return {r[0]: {'HMS LINCS ID': r[0],
+ 'Name': r[1],
+ 'ChEMBL ID': r[2] if r[2] else ''}
+ for r in rows[1:]}
+
+
# This is a set of mappings specific to HMS-LINCS that map outdated compound
# IDs appearing in HMS-LINCS to preferred compound IDs. This can be obtained
# more generally via indra.databases.pubchem_client, but this is a pre-compiled
diff --git a/indra/ontology/bio/ontology.py b/indra/ontology/bio/ontology.py
index <HASH>..<HASH> 100644
--- a/indra/ontology/bio/ontology.py
+++ b/indra/ontology/bio/ontology.py
@@ -356,10 +356,11 @@ class BioOntology(IndraOntology):
nodes = []
for hmsl_id, data in lc._sm_data.items():
- hmsl_id, suffix = hmsl_id.split('-')
+ hmsl_base_id, suffix = hmsl_id.split('-') if '-' in hmsl_id else \
+ hmsl_id, None
if suffix == '999':
continue
- nodes.append((self.label('HMS-LINCS', hmsl_id),
+ nodes.append((self.label('HMS-LINCS', hmsl_base_id),
{'name': data['Name']}))
self.add_nodes_from(nodes)
@@ -369,16 +370,17 @@ class BioOntology(IndraOntology):
edges = []
for hmsl_id, data in lc._sm_data.items():
- hmsl_id, suffix = hmsl_id.split('-')
+ hmsl_base_id, suffix = hmsl_id.split('-') if '-' in hmsl_id else \
+ hmsl_id, None
if suffix == '999':
continue
refs = lc.get_small_molecule_refs(hmsl_id)
for ref_ns, ref_id in refs.items():
- edges.append((self.label('HMS-LINCS', hmsl_id),
+ edges.append((self.label('HMS-LINCS', hmsl_base_id),
self.label(ref_ns, ref_id),
{'type': 'xref', 'source': 'hms-lincs'}))
edges.append((self.label(ref_ns, ref_id),
- self.label('HMS-LINCS', hmsl_id),
+ self.label('HMS-LINCS', hmsl_base_id),
{'type': 'xref', 'source': 'hms-lincs'}))
self.add_edges_from(edges)
|
Integrate extra naming and mappings from HMS LINCS
|
sorgerlab_indra
|
train
|
e3f3001b110baa0837f103b00f9cf732b9f2ccf6
|
diff --git a/api/cloudcontroller/ccv2/service_instance.go b/api/cloudcontroller/ccv2/service_instance.go
index <HASH>..<HASH> 100644
--- a/api/cloudcontroller/ccv2/service_instance.go
+++ b/api/cloudcontroller/ccv2/service_instance.go
@@ -37,6 +37,10 @@ type ServiceInstance struct {
// features of the service instance.
DashboardURL string
+ // RouteServiceURL is the URL of the user-provided service to which requests
+ // for bound routes will be forwarded.
+ RouteServiceURL string
+
// LastOperation is the status of the last operation requested on the service
// instance.
LastOperation LastOperation
@@ -59,6 +63,7 @@ func (serviceInstance *ServiceInstance) UnmarshalJSON(data []byte) error {
Type string `json:"type"`
Tags []string `json:"tags"`
DashboardURL string `json:"dashboard_url"`
+ RouteServiceURL string `json:"route_service_url"`
LastOperation LastOperation `json:"last_operation"`
}
}
@@ -75,6 +80,7 @@ func (serviceInstance *ServiceInstance) UnmarshalJSON(data []byte) error {
serviceInstance.Type = constant.ServiceInstanceType(ccServiceInstance.Entity.Type)
serviceInstance.Tags = ccServiceInstance.Entity.Tags
serviceInstance.DashboardURL = ccServiceInstance.Entity.DashboardURL
+ serviceInstance.RouteServiceURL = ccServiceInstance.Entity.RouteServiceURL
serviceInstance.LastOperation = ccServiceInstance.Entity.LastOperation
return nil
}
diff --git a/command/v6/service_command.go b/command/v6/service_command.go
index <HASH>..<HASH> 100644
--- a/command/v6/service_command.go
+++ b/command/v6/service_command.go
@@ -183,6 +183,7 @@ func (cmd ServiceCommand) displayUserProvidedServiceInstanceSummary(serviceInsta
table := [][]string{
{cmd.UI.TranslateText("name:"), serviceInstanceSummary.Name},
{cmd.UI.TranslateText("service:"), cmd.UI.TranslateText("user-provided")},
+ {cmd.UI.TranslateText("route service url:"), serviceInstanceSummary.RouteServiceURL},
{cmd.UI.TranslateText("tags:"), strings.Join(serviceInstanceSummary.Tags, ", ")},
}
cmd.UI.DisplayKeyValueTable("", table, 3)
|
Fix for CF cli not displaying route service url on "cf service <service name>"
Github issue: <URL>
|
cloudfoundry_cli
|
train
|
b723fdfafd0c0ae90c3d3e587f99f54334309592
|
diff --git a/lib/EarthIT/CMIPREST/ActionUnauthorized.php b/lib/EarthIT/CMIPREST/ActionUnauthorized.php
index <HASH>..<HASH> 100644
--- a/lib/EarthIT/CMIPREST/ActionUnauthorized.php
+++ b/lib/EarthIT/CMIPREST/ActionUnauthorized.php
@@ -2,6 +2,8 @@
class EarthIT_CMIPREST_ActionUnauthorized extends Exception
{
+ /** The message without notes and stuff glommed on. */
+ protected $simpleMessage;
protected $action;
protected $context;
protected $notes;
@@ -11,6 +13,7 @@ class EarthIT_CMIPREST_ActionUnauthorized extends Exception
$this->context = $ctx;
$this->notes = $notes;
$message = "You are not authorized to ".$action->getActionDescription();
+ $this->simpleMessage = $message;
if( $notes ) {
$message .= " because:";
foreach( $notes as $note ) {
@@ -22,6 +25,7 @@ class EarthIT_CMIPREST_ActionUnauthorized extends Exception
parent::__construct( $message );
}
+ public function getSimpleMessage() { return $this->simpleMessage; }
public function getAction() { return $this->action; }
public function getContext() { return $this->context; }
public function getNotes() { return $this->notes; }
diff --git a/lib/EarthIT/CMIPREST/RESTer.php b/lib/EarthIT/CMIPREST/RESTer.php
index <HASH>..<HASH> 100644
--- a/lib/EarthIT/CMIPREST/RESTer.php
+++ b/lib/EarthIT/CMIPREST/RESTer.php
@@ -537,7 +537,7 @@ class EarthIT_CMIPREST_RESTer
$preAuth = $this->authorizer->preAuthorizeSimpleAction($act, $ctx, $authorizationExplanation);
if( $preAuth === false ) {
- throw new EarthIT_CMIPREST_ActionUnauthorized($act, $authorizationExplanation);
+ throw new EarthIT_CMIPREST_ActionUnauthorized($act, $ctx, $authorizationExplanation);
}
if( $act instanceof EarthIT_CMIPREST_RESTAction_SearchAction ) {
diff --git a/lib/EarthIT/CMIPREST/Util.php b/lib/EarthIT/CMIPREST/Util.php
index <HASH>..<HASH> 100644
--- a/lib/EarthIT/CMIPREST/Util.php
+++ b/lib/EarthIT/CMIPREST/Util.php
@@ -203,7 +203,7 @@ class EarthIT_CMIPREST_Util
if( $e instanceof EarthIT_CMIPREST_ActionUnauthorized ) {
$act = $e->getAction();
$status = $userIsAuthenticated ? 403 : 401;
- return EarthIT_CMIPREST_Util::singleErrorResponse( $status, $act->getActionDescription(), $e->getNotes() );
+ return EarthIT_CMIPREST_Util::singleErrorResponse( $status, $e->getSimpleMessage(), $e->getNotes() );
} else if( $e instanceof EarthIT_CMIPREST_ActionInvalid ) {
return EarthIT_CMIPREST_Util::multiErrorResponse( 409, $e->getErrorDetails() );
} else if( $e instanceof EarthIT_Schema_NoSuchResourceClass ) {
|
Fix ActionUnauthorized throwing and handling.
|
EarthlingInteractive_PHPCMIPREST
|
train
|
209dab26595a75444b5b916c997e96a7e196438e
|
diff --git a/src/Controller/ConfigurableBase.php b/src/Controller/ConfigurableBase.php
index <HASH>..<HASH> 100644
--- a/src/Controller/ConfigurableBase.php
+++ b/src/Controller/ConfigurableBase.php
@@ -160,7 +160,7 @@ abstract class ConfigurableBase extends Base
$callbackResolver = $this->callbackResolver;
return function (Request $request) use ($callback, $callbackResolver) {
- if (!substr($callback, 0, 2) === '::') {
+ if (!is_string($callback) || substr($callback, 0, 2) !== '::') {
return $callbackResolver->resolveCallback($callback);
}
@@ -168,12 +168,15 @@ abstract class ConfigurableBase extends Base
if (is_array($controller)) {
list($cls, $_) = $controller;
} elseif (is_string($controller)) {
- list($cls, $_) = explode('::', $controller);
+ if (strpos($controller, '::') !== false) {
+ list($cls, $_) = explode('::', $controller);
+ } else {
+ $cls = $controller;
+ }
} else {
return null;
}
$callback = [$cls, substr($callback, 2)];
- $callback = $callbackResolver->resolveCallback($callback);
return $callback;
};
|
Fix middleware parsing for arrays, strings not starting with '::', and for class names that have __invoke method.
|
bolt_bolt
|
train
|
cf16ccb24e2f244b6202f7b6e48dd287f58b7df7
|
diff --git a/controls/radio-image/class-kirki-control-radio-image.php b/controls/radio-image/class-kirki-control-radio-image.php
index <HASH>..<HASH> 100644
--- a/controls/radio-image/class-kirki-control-radio-image.php
+++ b/controls/radio-image/class-kirki-control-radio-image.php
@@ -95,8 +95,13 @@ class Kirki_Control_Radio_Image extends WP_Customize_Control {
}
$this->json['inputAttrs'] = '';
+ $this->json['labelStyle'] = '';
foreach ( $this->input_attrs as $attr => $value ) {
- $this->json['inputAttrs'] .= $attr . '="' . esc_attr( $value ) . '" ';
+ if ( 'style' !== $attr ) {
+ $this->json['inputAttrs'] .= $attr . '="' . esc_attr( $value ) . '" ';
+ } else {
+ $this->json['labelStyle'] = 'style="' . esc_attr( $value ) . '" ';
+ }
}
}
@@ -124,7 +129,7 @@ class Kirki_Control_Radio_Image extends WP_Customize_Control {
<div id="input_{{ data.id }}" class="image">
<# for ( key in data.choices ) { #>
<input {{{ data.inputAttrs }}} class="image-select" type="radio" value="{{ key }}" name="_customize-radio-{{ data.id }}" id="{{ data.id }}{{ key }}" {{{ data.link }}}<# if ( data.value === key ) { #> checked="checked"<# } #>>
- <label for="{{ data.id }}{{ key }}">
+ <label for="{{ data.id }}{{ key }}" {{{ data.labelStyle }}}>
<img src="{{ data.choices[ key ] }}">
<span class="image-clickable"></span>
</label>
|
Allow defining a style for the label on radio-image controls
|
aristath_kirki
|
train
|
9681cfe47d8e7413e0d8aed520666679bcfeca23
|
diff --git a/src/commands.js b/src/commands.js
index <HASH>..<HASH> 100644
--- a/src/commands.js
+++ b/src/commands.js
@@ -34,14 +34,18 @@ commands.insertHardBreak = pm => {
return pm.apply({name: "insertInline", pos: pos, type: "hard_break"})
}
-commands.makeStrong = pm => pm.setInlineStyle(style.strong, true)
-commands.removeStrong = pm => pm.setInlineStyle(style.strong, false)
+commands.setStrong = pm => pm.setInlineStyle(style.strong, true)
+commands.unsetStrong = pm => pm.setInlineStyle(style.strong, false)
commands.toggleStrong = pm => pm.setInlineStyle(style.strong, null)
-commands.makeEm = pm => pm.setInlineStyle(style.em, true)
-commands.removeEm = pm => pm.setInlineStyle(style.em, false)
+commands.setEm = pm => pm.setInlineStyle(style.em, true)
+commands.unsetEm = pm => pm.setInlineStyle(style.em, false)
commands.toggleEm = pm => pm.setInlineStyle(style.em, null)
+commands.setCode = pm => pm.setInlineStyle(style.code, true)
+commands.unsetCode = pm => pm.setInlineStyle(style.code, false)
+commands.toggleCode = pm => pm.setInlineStyle(style.code, null)
+
function blockBefore(pos) {
for (let i = pos.path.length - 1; i >= 0; i--) {
let offset = pos.path[i] - 1
diff --git a/src/defaultkeymap.js b/src/defaultkeymap.js
index <HASH>..<HASH> 100644
--- a/src/defaultkeymap.js
+++ b/src/defaultkeymap.js
@@ -5,6 +5,7 @@ export default normalizeKeymap({
"Ctrl-Enter": "insertHardBreak",
"Ctrl-B": "toggleStrong",
"Ctrl-I": "toggleEm",
+ "Ctrl-`": "toggleCode",
"Backspace": "delBackward",
"Delete": "delForward",
"Ctrl-Z": "undo",
@@ -22,7 +23,7 @@ export default normalizeKeymap({
"Ctrl-H '5'": "makeH5",
"Ctrl-H '6'": "makeH6",
"Ctrl-P": "makeParagraph",
- "Ctrl-`": "makeCodeBlock",
+ "Ctrl-\\": "makeCodeBlock",
"Ctrl-Space": "insertRule"
})
diff --git a/src/input.js b/src/input.js
index <HASH>..<HASH> 100644
--- a/src/input.js
+++ b/src/input.js
@@ -35,7 +35,6 @@ export class Input {
}
storeInlineStyle(styles) {
- console.log("storing", styles)
this.storedStyles = styles
this.storedStylesAt = this.pm.markState(true)
}
|
Add commands for code-styling inline text
|
ProseMirror_prosemirror-markdown
|
train
|
4fc7c3a3dd2b3bd14dd52a200f4950cae88f777c
|
diff --git a/src/Contao/View/Contao2BackendView/Widget/FileTree.php b/src/Contao/View/Contao2BackendView/Widget/FileTree.php
index <HASH>..<HASH> 100644
--- a/src/Contao/View/Contao2BackendView/Widget/FileTree.php
+++ b/src/Contao/View/Contao2BackendView/Widget/FileTree.php
@@ -243,7 +243,7 @@ class FileTree extends AbstractWidget
$this->orderId = $this->orderField . \str_replace($this->strField, '', $this->strId);
$this->orderFieldValue = (!empty($value) && \is_array($value)) ? \array_filter($value) : [];
- $this->rootDir = System::getContainer()->getParameter('kernel.root_dir');
+ $this->rootDir = \dirname(System::getContainer()->getParameter('kernel.root_dir'));
}
/**
|
I had the app directory instead of the root directory
|
contao-community-alliance_dc-general
|
train
|
ed3f86087dd5d648a0397e18bdfa645797ea0419
|
diff --git a/findbugs/src/java/edu/umd/cs/findbugs/BugInstance.java b/findbugs/src/java/edu/umd/cs/findbugs/BugInstance.java
index <HASH>..<HASH> 100644
--- a/findbugs/src/java/edu/umd/cs/findbugs/BugInstance.java
+++ b/findbugs/src/java/edu/umd/cs/findbugs/BugInstance.java
@@ -185,7 +185,7 @@ public class BugInstance implements Comparable<BugInstance>, XMLWriteable, Seria
BugPattern p = DetectorFactoryCollection.instance().lookupBugPattern(type);
if (p == null) {
- if (missingBugTypes.add(type)) {
+ if (!"MISSING".equals(type) && missingBugTypes.add(type)) {
String msg = "Can't find definition of bug type " + type;
AnalysisContext.logError(msg, new IllegalArgumentException(msg));
}
|
Don't complain about do being able to find bug type MISSING
git-svn-id: <URL>
|
spotbugs_spotbugs
|
train
|
07aaa3f99d2062ee1a35d013eb91cb3efbcedfaa
|
diff --git a/src/Psalm/Internal/Codebase/Populator.php b/src/Psalm/Internal/Codebase/Populator.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Internal/Codebase/Populator.php
+++ b/src/Psalm/Internal/Codebase/Populator.php
@@ -444,8 +444,8 @@ class Populator
$this->populateClassLikeStorage($mixin_storage, $dependent_classlikes);
- $this->inheritMethodsFromParent($storage, $mixin_storage);
- $this->inheritPropertiesFromParent($storage, $mixin_storage, false);
+ $this->inheritMethodsFromParent($storage, $mixin_storage, true);
+ $this->inheritPropertiesFromParent($storage, $mixin_storage, true);
}
private static function extendType(
@@ -1043,7 +1043,8 @@ class Populator
*/
protected function inheritMethodsFromParent(
ClassLikeStorage $storage,
- ClassLikeStorage $parent_storage
+ ClassLikeStorage $parent_storage,
+ bool $is_mixin = false
) {
$fq_class_name = $storage->name;
@@ -1090,6 +1091,10 @@ class Populator
// register where they're declared
foreach ($parent_storage->inheritable_method_ids as $method_name => $declaring_method_id) {
+ if ($is_mixin && isset($storage->declaring_method_ids[$method_name])) {
+ continue;
+ }
+
if ($method_name !== '__construct') {
if ($parent_storage->is_trait) {
$declaring_class = explode('::', $declaring_method_id)[0];
@@ -1152,7 +1157,7 @@ class Populator
private function inheritPropertiesFromParent(
ClassLikeStorage $storage,
ClassLikeStorage $parent_storage,
- bool $include_protected = true
+ bool $is_mixin = false
) {
// register where they appear (can never be in a trait)
foreach ($parent_storage->appearing_property_ids as $property_name => $appearing_property_id) {
@@ -1163,7 +1168,7 @@ class Populator
if (!$parent_storage->is_trait
&& isset($parent_storage->properties[$property_name])
&& ($parent_storage->properties[$property_name]->visibility === ClassLikeAnalyzer::VISIBILITY_PRIVATE
- || (!$include_protected
+ || ($is_mixin
&& $parent_storage->properties[$property_name]->visibility
=== ClassLikeAnalyzer::VISIBILITY_PROTECTED))
) {
@@ -1185,7 +1190,7 @@ class Populator
if (!$parent_storage->is_trait
&& isset($parent_storage->properties[$property_name])
&& ($parent_storage->properties[$property_name]->visibility === ClassLikeAnalyzer::VISIBILITY_PRIVATE
- || (!$include_protected
+ || ($is_mixin
&& $parent_storage->properties[$property_name]->visibility
=== ClassLikeAnalyzer::VISIBILITY_PROTECTED))
) {
@@ -1200,7 +1205,7 @@ class Populator
if (!$parent_storage->is_trait
&& isset($parent_storage->properties[$property_name])
&& ($parent_storage->properties[$property_name]->visibility === ClassLikeAnalyzer::VISIBILITY_PRIVATE
- || (!$include_protected
+ || ($is_mixin
&& $parent_storage->properties[$property_name]->visibility
=== ClassLikeAnalyzer::VISIBILITY_PROTECTED))
) {
diff --git a/tests/MixinAnnotationTest.php b/tests/MixinAnnotationTest.php
index <HASH>..<HASH> 100644
--- a/tests/MixinAnnotationTest.php
+++ b/tests/MixinAnnotationTest.php
@@ -53,6 +53,8 @@ class MixinAnnotationTest extends TestCase
$this->b = new B();
}
+ public function c(string $s) : void {}
+
/**
* @param array<mixed> $arguments
* @return mixed
@@ -67,6 +69,8 @@ class MixinAnnotationTest extends TestCase
public function b(): void {
echo "b";
}
+
+ public function c(int $s) : void {}
}
$a = new A();
|
Don’t complain about method mismatches for @mixin
|
vimeo_psalm
|
train
|
740cf3a600ee73baffb3ce0c914f4388d7c8101d
|
diff --git a/ofp/v0x01/common/port.py b/ofp/v0x01/common/port.py
index <HASH>..<HASH> 100644
--- a/ofp/v0x01/common/port.py
+++ b/ofp/v0x01/common/port.py
@@ -148,7 +148,7 @@ class OFPPortFeatures(enum.Enum):
# Classes (Structs)
-class OFPPort(base.GenericStruct):
+class Port(base.GenericStruct):
"""
Description of a physical port.
|
Renamed class OFPPort to Port, according to our convention
|
kytos_python-openflow
|
train
|
213fcacf4d634896d978e049f381767d65c90fa0
|
diff --git a/shared/desktop/webpack.config.babel.js b/shared/desktop/webpack.config.babel.js
index <HASH>..<HASH> 100644
--- a/shared/desktop/webpack.config.babel.js
+++ b/shared/desktop/webpack.config.babel.js
@@ -26,6 +26,7 @@ const config = (_, {mode}) => {
loader: 'babel-loader',
options: {
cacheDirectory: true,
+ ignore: [/\.(native|ios|android)\.js$/],
plugins: [...(isHot ? ['react-hot-loader/babel'] : [])],
presets: [['@babel/preset-env', {debug: false, modules: false, targets: {electron: '1.8.4'}}]],
},
|
ignore native files on webpack so errors are easier to understand (#<I>)
|
keybase_client
|
train
|
2f24d02a02b44f32e8ce0ba32cf396b3a79e037c
|
diff --git a/features/s3/step_definitions.rb b/features/s3/step_definitions.rb
index <HASH>..<HASH> 100644
--- a/features/s3/step_definitions.rb
+++ b/features/s3/step_definitions.rb
@@ -31,11 +31,11 @@ def create_bucket(options = {})
end
When(/^I force path style requests$/) do
- @client = Aws.s3(force_path_style:true)
+ @client = Aws::S3::Client.new(force_path_style: true)
end
Given(/^I am using the S3 "(.*?)" region$/) do |region|
- @client = Aws.s3(region: region)
+ @client = Aws::S3::Client.new(region: region)
end
When(/^I create a bucket$/) do
|
Fixed a few more deprecation warnings in the integration tests for S3.
|
aws_aws-sdk-ruby
|
train
|
df0f37ce1132a9310ef65ebcf45a3fccb16d3fe3
|
diff --git a/lib/mongoid/multi_parameter_attributes.rb b/lib/mongoid/multi_parameter_attributes.rb
index <HASH>..<HASH> 100644
--- a/lib/mongoid/multi_parameter_attributes.rb
+++ b/lib/mongoid/multi_parameter_attributes.rb
@@ -51,7 +51,7 @@ module Mongoid
def process_attributes(attrs = nil, role = :default, guard_protected_attributes = true)
if attrs
errors = []
- attributes = {}
+ attributes = attrs.class.new
multi_parameter_attributes = {}
attrs.each_pair do |key, value|
@@ -79,7 +79,7 @@ module Mongoid
raise Errors::MultiparameterAssignmentErrors.new(errors),
"#{errors.size} error(s) on assignment of multiparameter attributes"
end
-
+
super attributes, role, guard_protected_attributes
else
super
|
made attributes class same as attrs
|
mongodb_mongoid
|
train
|
f875c794d0763efa9ce270a51c743acacb82ed57
|
diff --git a/lib/hiki2latex.rb b/lib/hiki2latex.rb
index <HASH>..<HASH> 100644
--- a/lib/hiki2latex.rb
+++ b/lib/hiki2latex.rb
@@ -41,7 +41,7 @@ module Hiki2latex
def plain_doc(file)
if @listings==true then
- puts listings_str
+ puts listings_preamble
elsif @pre==nil then
puts "\\documentclass[12pt,a4paper]{jsarticle}"
puts "\\usepackage[dvipdfmx]{graphicx}"
@@ -50,16 +50,46 @@ module Hiki2latex
end
puts "\\begin{document}"
puts File.read(@head) if @head!=nil
- puts HikiDoc.to_latex(File.read(file),{:listings=>@listings})
+ plain_tex = HikiDoc.to_latex(File.read(file),{:listings=>@listings})
+ puts mod_abstract(plain_tex)
puts File.read(@post) if @post!=nil
puts "\\end{document}"
end
def bare_doc(file)
- puts HikiDoc.to_latex(File.read(file),{:level=>@level,:listings=>@listings})
+ bare_doc = HikiDoc.to_latex(File.read(file),{:level=>@level,:listings=>@listings})
+ puts kill_head_tableofcontents(bare_doc)
end
- def listings_str
+ def kill_head_tableofcontents(text)
+ text.gsub!(/^\\tableofcontents/,'')
+ end
+
+ def mod_abstract(text)
+ abstract = []
+ content = ""
+ section = []
+ text.split("\n").each do |line|
+ case line
+ when /\\section(.+)/
+ section.push $1
+ end
+
+ case section[-1]
+ when /.+abstract.+/
+ abstract << line+"\n"
+ else
+ content << line+"\n"
+ end
+ end
+ abstract.delete_at(0)
+ content.gsub!(/\\tableofcontents/){|text|
+ tt="\n\\abstract\{\n#{abstract.join}\}\n\\tableofcontents"
+ }
+ return content
+ end
+
+ def listings_preamble
str = <<"EOS"
\\documentclass[12pt,a4paper]{jsarticle}
\\usepackage[dvipdfmx]{graphicx}
@@ -91,6 +121,14 @@ module Hiki2latex
language={ruby},
numbers=left,
}
+\\lstdefinestyle{customTex}{
+ language={tex},
+ numbers=none,
+}
+\\lstdefinestyle{customJava}{
+ language={java},
+ numbers=left,
+}
EOS
end
diff --git a/lib/hiki2latex/hiki2latex.rb b/lib/hiki2latex/hiki2latex.rb
index <HASH>..<HASH> 100755
--- a/lib/hiki2latex/hiki2latex.rb
+++ b/lib/hiki2latex/hiki2latex.rb
@@ -50,7 +50,7 @@ class LatexOutput
def headline(level, title)
title = escape_snake_names(title)
tmp=title.split(/:/)
- if tmp.size!=1 then
+ if tmp.size==2 then
case tmp[0]
when 'title','author','date','abstract'
@head << "\\#{tmp[0]}\{#{tmp[1]}\}\n"
@@ -62,6 +62,7 @@ class LatexOutput
@f << "\\section*\{#{tmp[1]}\}\n"
else
@f << "\\#{tmp[0]}\{#{tmp[1]}\}\n"
+# @f << "#{title}\n"
end
return
end
diff --git a/lib/hiki2latex/version.rb b/lib/hiki2latex/version.rb
index <HASH>..<HASH> 100644
--- a/lib/hiki2latex/version.rb
+++ b/lib/hiki2latex/version.rb
@@ -1,3 +1,3 @@
module Hiki2latex
- VERSION = "0.9.7"
+ VERSION = "0.9.8"
end
|
put mod_abstarct for plain_tex, and kill_head_tableofcontents for bare_text.
|
daddygongon_hiki2latex
|
train
|
2bf06c149ca10020b59f7286029c8b8f7ab737ec
|
diff --git a/structr-neo4j-bolt-driver/src/main/java/org/structr/bolt/BoltDatabaseService.java b/structr-neo4j-bolt-driver/src/main/java/org/structr/bolt/BoltDatabaseService.java
index <HASH>..<HASH> 100644
--- a/structr-neo4j-bolt-driver/src/main/java/org/structr/bolt/BoltDatabaseService.java
+++ b/structr-neo4j-bolt-driver/src/main/java/org/structr/bolt/BoltDatabaseService.java
@@ -231,7 +231,9 @@ public class BoltDatabaseService extends AbstractDatabaseService implements Grap
sessions.set(session);
} catch (ServiceUnavailableException ex) {
- throw new NetworkException(ex.getMessage(), ex);
+
+ logger.warn("ServiceUnavailableException in BoltDataBaseService.beginTx(). Retrying with timeout.");
+ return beginTx(1);
} catch (ClientException cex) {
logger.warn("Cannot connect to Neo4j database server at {}: {}", databaseUrl, cex.getMessage());
}
|
Adds a retry with timeout to beginTx for the BoltDatabaseService.
|
structr_structr
|
train
|
b9a9abe10d569f15755bf8a6833adc9f425a2b2d
|
diff --git a/searx/engines/gigablast.py b/searx/engines/gigablast.py
index <HASH>..<HASH> 100644
--- a/searx/engines/gigablast.py
+++ b/searx/engines/gigablast.py
@@ -19,11 +19,21 @@ from time import time
# engine dependent config
categories = ['general']
paging = True
-number_of_results = 5
+number_of_results = 10
+language_support = True
+safesearch = True
-# search-url, invalid HTTPS certificate
+# search-url
base_url = 'https://gigablast.com/'
-search_string = 'search?{query}&n={number_of_results}&s={offset}&format=xml&qh=0&rxiyd={rxiyd}&rand={rand}'
+search_string = 'search?{query}'\
+ '&n={number_of_results}'\
+ '&s={offset}'\
+ '&format=xml'\
+ '&qh=0'\
+ '&rxiyd={rxiyd}'\
+ '&rand={rand}'\
+ '&qlang={lang}'\
+ '&ff={safesearch}'
# specific xpath variables
results_xpath = '//response//result'
@@ -36,12 +46,23 @@ content_xpath = './/sum'
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
- search_path = search_string.format(
- query=urlencode({'q': query}),
- offset=offset,
- number_of_results=number_of_results,
- rxiyd=randint(10000, 10000000),
- rand=int(time()))
+ if params['language'] == 'all':
+ language = 'xx'
+ else:
+ language = params['language'][0:2]
+
+ if params['safesearch'] >= 1:
+ safesearch = 1
+ else:
+ safesearch = 0
+
+ search_path = search_string.format(query=urlencode({'q': query}),
+ offset=offset,
+ number_of_results=number_of_results,
+ rxiyd=randint(10000, 10000000),
+ rand=int(time()),
+ lang=language,
+ safesearch=safesearch)
params['url'] = base_url + search_path
diff --git a/searx/tests/engines/test_gigablast.py b/searx/tests/engines/test_gigablast.py
index <HASH>..<HASH> 100644
--- a/searx/tests/engines/test_gigablast.py
+++ b/searx/tests/engines/test_gigablast.py
@@ -10,6 +10,7 @@ class TestGigablastEngine(SearxTestCase):
query = 'test_query'
dicto = defaultdict(dict)
dicto['pageno'] = 0
+ dicto['language'] = 'all'
params = gigablast.request(query, dicto)
self.assertTrue('url' in params)
self.assertTrue(query in params['url'])
|
[enh] improve gigablast engine
add language and safesearch support
|
asciimoo_searx
|
train
|
a7f079d5b950404970dc2bd98cc68de823b10753
|
diff --git a/client/allocrunner/taskrunner/task_runner.go b/client/allocrunner/taskrunner/task_runner.go
index <HASH>..<HASH> 100644
--- a/client/allocrunner/taskrunner/task_runner.go
+++ b/client/allocrunner/taskrunner/task_runner.go
@@ -999,6 +999,11 @@ func (tr *TaskRunner) buildTaskConfig() *drivers.TaskConfig {
memoryLimit = max
}
+ cpusetCpus := make([]string, len(taskResources.Cpu.ReservedCores))
+ for i, v := range taskResources.Cpu.ReservedCores {
+ cpusetCpus[i] = fmt.Sprintf("%d", v)
+ }
+
return &drivers.TaskConfig{
ID: fmt.Sprintf("%s/%s/%s", alloc.ID, task.Name, invocationid),
Name: task.Name,
@@ -1013,6 +1018,7 @@ func (tr *TaskRunner) buildTaskConfig() *drivers.TaskConfig {
LinuxResources: &drivers.LinuxResources{
MemoryLimitBytes: memoryLimit * 1024 * 1024,
CPUShares: taskResources.Cpu.CpuShares,
+ CpusetCpus: strings.Join(cpusetCpus, ","),
PercentTicks: float64(taskResources.Cpu.CpuShares) / float64(tr.clientConfig.Node.NodeResources.Cpu.CpuShares),
},
Ports: &ports,
|
tr: set cpuset cpus if reserved
|
hashicorp_nomad
|
train
|
09205f9e8e62a9d8af36812665009e5fcce9e175
|
diff --git a/salt/modules/state.py b/salt/modules/state.py
index <HASH>..<HASH> 100644
--- a/salt/modules/state.py
+++ b/salt/modules/state.py
@@ -293,6 +293,7 @@ def request(mods=None,
salt '*' state.request test
salt '*' state.request test,pkgs
'''
+ kwargs['test'] = True
ret = apply_(mods, **kwargs)
notify_path = os.path.join(__opts__['cachedir'], 'req_state.p')
serial = salt.payload.Serial(__opts__)
@@ -400,6 +401,8 @@ def run_request(name='default', **kwargs):
if 'mods' not in n_req or 'kwargs' not in n_req:
return {}
req[name]['kwargs'].update(kwargs)
+ if 'test' in n_req['kwargs']:
+ n_req['kwargs'].pop('test')
if req:
ret = apply_(n_req['mods'], **n_req['kwargs'])
try:
|
Fix error in state.request
We were not running the initial request in test=True.
This changes things so we now run the state in testing mode when it is requested and then actually apply it when it is run.
Partially reverts #<I>
|
saltstack_salt
|
train
|
d707222b9e7ec5dcd51992037809d22ee241d316
|
diff --git a/javamelody-core/src/main/java/net/bull/javamelody/JsfActionHelper.java b/javamelody-core/src/main/java/net/bull/javamelody/JsfActionHelper.java
index <HASH>..<HASH> 100644
--- a/javamelody-core/src/main/java/net/bull/javamelody/JsfActionHelper.java
+++ b/javamelody-core/src/main/java/net/bull/javamelody/JsfActionHelper.java
@@ -19,6 +19,7 @@
package net.bull.javamelody;
import javax.faces.context.FacesContext;
+import javax.faces.event.ActionListener;
/**
* Helper pour l'ActionListener JSF RI (Mojarra).
@@ -43,13 +44,15 @@ final class JsfActionHelper {
// <application><action-listener>net.bull.javamelody.JsfActionListener</action-listener></application>
// et on ne peut pas avoir un fichier META-INF/faces-config.xml dans le jar de javamelody avec cet action-listener
// car dans Apache MyFaces, cela ferait certainement une ClassNotFoundException rendant javamelody inutilisable
- final JsfActionListener jsfActionListener = new JsfActionListener();
+ final ActionListener delegateActionListener = facesContext.getApplication()
+ .getActionListener();
+ final JsfActionListener jsfActionListener = new JsfActionListener(
+ delegateActionListener);
facesContext.getApplication().setActionListener(jsfActionListener);
}
} catch (final Exception e) {
// issue 204: initialisation du JsfActionListener échouée, tant pis, il n'y aura pas les statistiques pour JSF
LOG.info("initialization of jsf action listener failed, skipping", e);
}
-
}
}
diff --git a/javamelody-core/src/main/java/net/bull/javamelody/JsfActionListener.java b/javamelody-core/src/main/java/net/bull/javamelody/JsfActionListener.java
index <HASH>..<HASH> 100644
--- a/javamelody-core/src/main/java/net/bull/javamelody/JsfActionListener.java
+++ b/javamelody-core/src/main/java/net/bull/javamelody/JsfActionListener.java
@@ -20,37 +20,38 @@ package net.bull.javamelody;
import javax.faces.component.ActionSource2;
import javax.faces.event.ActionEvent;
-
-import com.sun.faces.application.ActionListenerImpl;
+import javax.faces.event.ActionListener;
/**
* ActionListener JSF RI (Mojarra) pour avoir les temps moyens des actions JSF.
* @author Emeric Vernat
*/
-public class JsfActionListener extends ActionListenerImpl {
+public class JsfActionListener implements ActionListener {
private static final Counter JSF_COUNTER = MonitoringProxy.getJsfCounter();
private static final boolean COUNTER_HIDDEN = Parameters.isCounterHidden(JSF_COUNTER.getName());
private static final boolean DISABLED = Boolean.parseBoolean(Parameters
.getParameter(Parameter.DISABLED));
+ private final ActionListener delegateActionListener;
/**
* Constructeur.
+ * @param delegateActionListener ActionListener
*/
- public JsfActionListener() {
+ public JsfActionListener(ActionListener delegateActionListener) {
super();
// quand cet ActionListener est utilisé, le compteur est affiché
// sauf si le paramètre displayed-counters dit le contraire
JSF_COUNTER.setDisplayed(!COUNTER_HIDDEN);
JSF_COUNTER.setUsed(true);
LOG.debug("jsf action listener initialized");
+ this.delegateActionListener = delegateActionListener;
}
/** {@inheritDoc} */
- @Override
public void processAction(ActionEvent event) { // throws FacesException
// cette méthode est appelée par JSF RI (Mojarra)
if (DISABLED || !JSF_COUNTER.isDisplayed()) {
- super.processAction(event);
+ delegateActionListener.processAction(event);
}
boolean systemError = false;
@@ -59,7 +60,7 @@ public class JsfActionListener extends ActionListenerImpl {
JSF_COUNTER.bindContextIncludingCpu(actionName);
- super.processAction(event);
+ delegateActionListener.processAction(event);
} catch (final Error e) {
// on catche Error pour avoir les erreurs systèmes
|
fix for issue <I> and issue <I>: delegate the JSF ActionListener to the next JSF ActionListener if one was defined in the faces-config.xml file
|
javamelody_javamelody
|
train
|
0594046ea04be724556fcaa12d76534fbc099a42
|
diff --git a/lib/chef/mixin/powershell_out.rb b/lib/chef/mixin/powershell_out.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/mixin/powershell_out.rb
+++ b/lib/chef/mixin/powershell_out.rb
@@ -85,7 +85,7 @@ class Chef
"-NoProfile",
# always set the ExecutionPolicy flag
# see http://technet.microsoft.com/en-us/library/ee176961.aspx
- "-ExecutionPolicy RemoteSigned",
+ "-ExecutionPolicy Unrestricted",
# Powershell will hang if STDIN is redirected
# http://connect.microsoft.com/PowerShell/feedback/details/572313/powershell-exe-can-hang-if-stdin-is-redirected
"-InputFormat None"
diff --git a/spec/unit/mixin/powershell_out_spec.rb b/spec/unit/mixin/powershell_out_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/mixin/powershell_out_spec.rb
+++ b/spec/unit/mixin/powershell_out_spec.rb
@@ -23,7 +23,7 @@ describe Chef::Mixin::PowershellOut do
subject(:object) { shell_out_class.new }
let(:architecture) { "something" }
let(:flags) {
- "-NoLogo -NonInteractive -NoProfile -ExecutionPolicy RemoteSigned -InputFormat None"
+ "-NoLogo -NonInteractive -NoProfile -ExecutionPolicy Unrestricted -InputFormat None"
}
describe "#powershell_out" do
|
change executionpolicy to unrestricted
|
chef_chef
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.