hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
e09e277771ca02991f25663e4ead87d77a6bb31a
diff --git a/gotestwin.py b/gotestwin.py index <HASH>..<HASH> 100755 --- a/gotestwin.py +++ b/gotestwin.py @@ -34,11 +34,11 @@ def main(): join(scripts, 'gotesttarfile.py'), join(scripts, 'utility.py'), ]}, - 'command': [ + 'command': [ 'python', 'ci/gotesttarfile.py', '-v', '-g', 'go.exe', '-p', args.package, '--remove', 'ci/{}'.format(tarfile) - ] - }, temp_file) + ]}, + temp_file) juju_home = os.environ.get('JUJU_HOME', join(dirname(scripts), 'cloud-city')) subprocess.check_call([ diff --git a/test_schedule_hetero_control.py b/test_schedule_hetero_control.py index <HASH>..<HASH> 100644 --- a/test_schedule_hetero_control.py +++ b/test_schedule_hetero_control.py @@ -6,7 +6,7 @@ import json import os from unittest import TestCase -from mock import patch, call +from mock import patch from jujuci import Credentials from schedule_hetero_control import ( @@ -48,41 +48,22 @@ class TestBuildJobs(TestCase): self.assertEqual(version, '1.24.3') def test_calculate_jobs(self): - find_candidates = 'schedule_hetero_control.find_candidates' with temp_dir() as root: - with temp_dir(parent=root) as old_juju_dir: - with temp_dir(parent=root) as candidates_dir: - with temp_dir(parent=old_juju_dir) as releases: - with temp_dir(parent=candidates_dir) as candidate_dir: - self.make_build_var_file(candidate_dir) - build_vars = os.path.join( - candidate_dir, 'buildvars.json') - with patch( - 'schedule_hetero_control.os.path.join', - autospec=True, - side_effect=[ - old_juju_dir, old_juju_dir, - candidates_dir, build_vars]) as g_mock: - with patch( - find_candidates, autospec=True, - return_value=[candidate_dir]) as c: - jobs = [] - for job in calculate_jobs(root): - jobs.append(job) - release_path = os.path.split(releases)[1] + release_path = os.path.join(root, 'old-juju', '1.20.11') + os.makedirs(release_path) + candidate_path = os.path.join(root, 'candidate', '1.22') + os.makedirs(candidate_path) + jobs = [] + self.make_build_var_file(candidate_path) + for job in calculate_jobs(root): + jobs.append(job) expected = [{'new_to_old': 'true', - 'old_version': release_path, - 'candidate': u'1.24.3'}, + 'old_version': '1.20.11', + 'candidate': '1.24.3'}, {'new_to_old': 'false', - 'old_version': release_path, - 'candidate': u'1.24.3'}] + 'old_version': '1.20.11', + 'candidate': '1.24.3'}] self.assertItemsEqual(jobs, expected) - calls = [call(root, 'old-juju'), - call(old_juju_dir, release_path), - call(root, 'candidate'), - call(candidate_dir, 'buildvars.json')] - self.assertEqual(g_mock.mock_calls, calls) - c.assert_called_once_with(root) def make_build_var_file(self, dir_path): build_vars = {"version": "1.24.3", "revision_build": "2870"}
Updated unit test in test_schedule_hetero_control.
juju_juju
train
dd4c5878fa6013a80ca6369fffd6903df20e3518
diff --git a/src/Connection.php b/src/Connection.php index <HASH>..<HASH> 100644 --- a/src/Connection.php +++ b/src/Connection.php @@ -267,8 +267,10 @@ class Connection if (!isset($config['simpleStorageTz'])) { $config['simpleStorageTz'] = 'UTC'; } - if (!isset($config['connectionTz'])) { + if (!isset($config['connectionTz']) || $config['connectionTz'] === IDriver::TIMEZONE_AUTO_PHP_NAME) { $config['connectionTz'] = date_default_timezone_get(); + } elseif ($config['connectionTz'] === IDriver::TIMEZONE_AUTO_PHP_OFFSET) { + $config['connectionTz'] = date('P'); } return $config; } diff --git a/src/Drivers/IDriver.php b/src/Drivers/IDriver.php index <HASH>..<HASH> 100644 --- a/src/Drivers/IDriver.php +++ b/src/Drivers/IDriver.php @@ -22,6 +22,9 @@ interface IDriver const TYPE_IDENTIFIER = 4; const TYPE_STRING = 5; + const TIMEZONE_AUTO_PHP_NAME = 'auto'; + const TIMEZONE_AUTO_PHP_OFFSET = 'auto-offset'; + /** * Connects the driver to database.
connection: allow php auto-offset for connection timezone
nextras_dbal
train
f092c4200900526cf50423f4e667489e3861c3b5
diff --git a/simra/peer/peer.go b/simra/peer/peer.go index <HASH>..<HASH> 100644 --- a/simra/peer/peer.go +++ b/simra/peer/peer.go @@ -66,13 +66,14 @@ var glPeer = &GLPeer{} // GLPeer represents gl context. // Singleton. type GLPeer struct { - glctx gl.Context - startTime time.Time - images *glutil.Images - fps *debug.FPS - eng sprite.Engine - scene *sprite.Node - mu sync.Mutex + glctx gl.Context + startTime time.Time + images *glutil.Images + fps *debug.FPS + eng sprite.Engine + scene *sprite.Node + mu sync.Mutex + spritecontainer SpriteContainerer } // GetGLPeer returns a instance of GLPeer. @@ -98,6 +99,8 @@ func (glpeer *GLPeer) Initialize(glctx gl.Context) { glpeer.images = glutil.NewImages(glctx) glpeer.fps = debug.NewFPS(glpeer.images) glpeer.initEng() + glpeer.spritecontainer = GetSpriteContainer() + glpeer.spritecontainer.Initialize() LogDebug("OUT") } @@ -229,7 +232,7 @@ func (glpeer *GLPeer) Finalize() { glpeer.mu.Lock() defer glpeer.mu.Unlock() - GetSpriteContainer().RemoveSprites() + glpeer.spritecontainer.Initialize() glpeer.eng.Release() glpeer.fps.Release() glpeer.images.Release() @@ -269,8 +272,9 @@ func (glpeer *GLPeer) Reset() { LogDebug("IN") glpeer.mu.Lock() defer glpeer.mu.Unlock() - GetSpriteContainer().RemoveSprites() + glpeer.spritecontainer.RemoveSprites() glpeer.initEng() + glpeer.spritecontainer.Initialize() LogDebug("OUT") }
define spritecontainer as a member of GLPeer
pankona_gomo-simra
train
d28877fb6c6c79448e4cd99c033fa477c813fb64
diff --git a/skiplist.go b/skiplist.go index <HASH>..<HASH> 100644 --- a/skiplist.go +++ b/skiplist.go @@ -309,23 +309,19 @@ func (list *SkipList) randLevel() int { func getScore(key interface{}, reversed bool) (score float64) { switch t := key.(type) { case []byte: + var result uint64 data := []byte(t) l := len(data) // only use first 8 bytes - if l > 7 { - data = data[:7] - l = 7 + if l > 8 { + l = 8 } - var result uint64 - for i := 0; i < l; i++ { - result |= uint64(data[i]) - result <<= 8 + result |= uint64(data[i]) << uint(8 * (7 - i)) } - result <<= uint(7-l) * 8 score = float64(result) case float32: @@ -350,23 +346,19 @@ func getScore(key interface{}, reversed bool) (score float64) { score = float64(t) case string: + var result uint64 data := []byte(t) l := len(data) // only use first 8 bytes - if l > 7 { - data = data[:7] - l = 7 + if l > 8 { + l = 8 } - var result uint64 - for i := 0; i < l; i++ { - result |= uint64(data[i]) - result <<= 8 + result |= uint64(data[i]) << uint(8 * (7 - i)) } - result <<= uint(7-l) * 8 score = float64(result) case uint:
fix scoring bug. thanks @glenn-brown
huandu_skiplist
train
83d8dec8c7f793468117310dccb4368b76cadeab
diff --git a/src/account_call_builder.js b/src/account_call_builder.js index <HASH>..<HASH> 100644 --- a/src/account_call_builder.js +++ b/src/account_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class AccountCallBuilder diff --git a/src/effect_call_builder.js b/src/effect_call_builder.js index <HASH>..<HASH> 100644 --- a/src/effect_call_builder.js +++ b/src/effect_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class EffectCallBuilder diff --git a/src/ledger_call_builder.js b/src/ledger_call_builder.js index <HASH>..<HASH> 100644 --- a/src/ledger_call_builder.js +++ b/src/ledger_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class LedgerCallBuilder diff --git a/src/offer_call_builder.js b/src/offer_call_builder.js index <HASH>..<HASH> 100644 --- a/src/offer_call_builder.js +++ b/src/offer_call_builder.js @@ -1,5 +1,5 @@ -import {CallBuilder} from "./callBuilder"; -import {OrderbookCallBuilder} from "./orderbookCallBuilder"; +import {CallBuilder} from "./call_builder"; +import {OrderbookCallBuilder} from "./orderbook_call_builder"; import {BadRequestError} from "./errors"; let URI = require("URIjs"); diff --git a/src/operation_call_builder.js b/src/operation_call_builder.js index <HASH>..<HASH> 100644 --- a/src/operation_call_builder.js +++ b/src/operation_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class OperationCallBuilder diff --git a/src/orderbook_call_builder.js b/src/orderbook_call_builder.js index <HASH>..<HASH> 100644 --- a/src/orderbook_call_builder.js +++ b/src/orderbook_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class OrderbookCallBuilder diff --git a/src/payment_call_builder.js b/src/payment_call_builder.js index <HASH>..<HASH> 100644 --- a/src/payment_call_builder.js +++ b/src/payment_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class PaymentCallBuilder diff --git a/src/server.js b/src/server.js index <HASH>..<HASH> 100644 --- a/src/server.js +++ b/src/server.js @@ -1,14 +1,14 @@ import {TransactionResult} from "./transaction_result"; import {NotFoundError, NetworkError, BadRequestError} from "./errors"; -import {AccountCallBuilder} from "./accountCallBuilder"; -import {LedgerCallBuilder} from "./ledgerCallBuilder"; -import {TransactionCallBuilder} from "./transactionCallBuilder"; -import {OperationCallBuilder} from "./operationCallBuilder"; -import {OfferCallBuilder} from "./offerCallBuilder"; -import {OrderbookCallBuilder} from "./orderbookCallBuilder"; -import {PaymentCallBuilder} from "./paymentCallBuilder"; -import {EffectCallBuilder} from "./effectCallBuilder"; +import {AccountCallBuilder} from "./account_call_builder"; +import {LedgerCallBuilder} from "./ledger_call_builder"; +import {TransactionCallBuilder} from "./transaction_call_builder"; +import {OperationCallBuilder} from "./operation_call_builder"; +import {OfferCallBuilder} from "./offer_call_builder"; +import {OrderbookCallBuilder} from "./orderbook_call_builder"; +import {PaymentCallBuilder} from "./payment_call_builder"; +import {EffectCallBuilder} from "./effect_call_builder"; import {xdr, Account} from "stellar-base"; let axios = require("axios"); diff --git a/src/transaction_call_builder.js b/src/transaction_call_builder.js index <HASH>..<HASH> 100644 --- a/src/transaction_call_builder.js +++ b/src/transaction_call_builder.js @@ -1,4 +1,4 @@ -import {CallBuilder} from "./callBuilder"; +import {CallBuilder} from "./call_builder"; /** * @class TransactionCallBuilder
Fix imports for new snake_cased files.
stellar_js-stellar-sdk
train
40f7185247b82bc2298d3515fc2262bd28f4ff86
diff --git a/migrator.go b/migrator.go index <HASH>..<HASH> 100644 --- a/migrator.go +++ b/migrator.go @@ -12,7 +12,7 @@ import ( "github.com/pkg/errors" ) -var mrx = regexp.MustCompile(`(\d+)_([^\.]+)(\.[a-z0-9]+)?\.(up|down)\.(sql|fizz)$`) +var mrx = regexp.MustCompile(`^(\d+)_([^\.]+)(\.[a-z0-9]+)?\.(up|down)\.(sql|fizz)$`) // NewMigrator returns a new "blank" migrator. It is recommended // to use something like MigrationBox or FileMigrator. A "blank"
Fix #<I>: ensure the migration file name starts with a number (#<I>) This prevents pop to grap temp/backup files.
gobuffalo_pop
train
27809e15058c9c84088d481cf5b22ad62bf0a091
diff --git a/y/file_dsync.go b/y/file_dsync.go index <HASH>..<HASH> 100644 --- a/y/file_dsync.go +++ b/y/file_dsync.go @@ -1,4 +1,4 @@ -// +build !dragonfly,!freebsd,!windows +// +build !dragonfly,!freebsd,!windows,!darwin /* * Copyright 2017 Dgraph Labs, Inc. and Contributors diff --git a/y/file_nodsync.go b/y/file_nodsync.go index <HASH>..<HASH> 100644 --- a/y/file_nodsync.go +++ b/y/file_nodsync.go @@ -1,4 +1,4 @@ -// +build dragonfly freebsd windows +// +build dragonfly freebsd windows darwin /* * Copyright 2017 Dgraph Labs, Inc. and Contributors
Tweak build flags as per #<I>
dgraph-io_badger
train
b81ce912e156795b04ac6e9a664764f9d331906c
diff --git a/src/Keboola/Json/Parser.php b/src/Keboola/Json/Parser.php index <HASH>..<HASH> 100755 --- a/src/Keboola/Json/Parser.php +++ b/src/Keboola/Json/Parser.php @@ -1,4 +1,20 @@ <?php +/** + * JSON to CSV data analyzer and parser/converter + * + * Use to convert JSON data into CSV file(s). + * Creates multiple files if the JSON contains arrays + * to store values of child nodes in a separate table, + * linked by JSON_parentId column. + * + * @author Ondrej Vana (kachna@keboola.com) + * @package keboola/json-parser + * @copyright Copyright (c) 2014 Keboola Data Services (www.keboola.com) + * @license GPL-3.0 + * @link https://github.com/keboola/php-jsonparser + * + * @TODO Ensure the column&table name don't exceed MySQL limits + */ namespace Keboola\Json; @@ -7,9 +23,6 @@ use Keboola\Temp\Temp; use Monolog\Logger; use Keboola\Json\Exception\JsonParserException as Exception; -/** - * @TODO Ensure the column&table name don't exceed MySQL limits - */ class Parser { protected $struct; protected $headers = array(); @@ -27,6 +40,16 @@ class Parser { protected $log; /** @var Temp */ protected $temp; + /** + * @var array + * Mapping of types that can be "upgraded" + */ + protected $typeUpgrades = array( + array( + "slave" => "integer", + "master" => "double" + ) + ); public function __construct(Logger $logger, array $struct = array(), $analyzeRows = 500) { @@ -299,11 +322,31 @@ class Parser { if (empty($this->struct[$type][$diffKey]) || $this->struct[$type][$diffKey] == "NULL") { // Assign if the field is new $this->struct[$type][$diffKey] = $struct[$diffKey]; + } elseif ( + $struct[$diffKey] == "NULL" + || $struct[$diffKey] == $this->struct[$type][$diffKey] + || in_array(array( + "slave" => $struct[$diffKey], + "master" => $this->struct[$type][$diffKey] + ), $this->typeUpgrades) + ) { + // If new type is null, unchanged, or the master of a master-slave pair, + // do nothing and keep the originally stored type! + } elseif (in_array(array( + "slave" => $this->struct[$type][$diffKey], + "master" => $struct[$diffKey] + ), $this->typeUpgrades) + ) { + // When current values are in the "master-slave" array + // and the "slave" is stored, upgrade type to the "master" type + $this->struct[$type][$diffKey] = $struct[$diffKey]; } elseif ($struct[$diffKey] != "NULL") { - // If the current field type is NULL, just keep the original, otherwise throw an Exception 'cos of a type mismatch + // Throw an Exception 'cos of a type mismatch $old = json_encode($this->struct[$type][$diffKey]); $new = json_encode($struct[$diffKey]); - throw new Exception("Unhandled type change between (previous){$old} and (new){$new} in {$diffKey}"); // 500 + $e = new Exception("Unhandled type change from {$old} to {$new} in '{$type}.{$diffKey}'"); // 500 + $e->setData(array("newValue" => json_encode($row->{$diffKey}))); + throw $e; } } }
Allow an int value in a "double" field
keboola_php-jsonparser
train
02f05301180ad8277669c078a559eb0e7de594f9
diff --git a/HISTORY.rst b/HISTORY.rst index <HASH>..<HASH> 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -8,6 +8,10 @@ Changelog * Fix bug in form validation signaling required field missing on choices where the value is falsy, e.g. 0 +* Implement djangos uniqueness validation on save in forms/views.py + +* Fixed input type on url field + 4.0.0 (2016-09-14) ~~~~~~~~~~~~~~~~~~ diff --git a/lib/tri/form/__init__.py b/lib/tri/form/__init__.py index <HASH>..<HASH> 100644 --- a/lib/tri/form/__init__.py +++ b/lib/tri/form/__init__.py @@ -704,7 +704,7 @@ class Field(Frozen, FieldBase): setdefaults_path( kwargs, - input_type='email', + input_type='url', parse=url_parse, ) return Field(**kwargs) diff --git a/lib/tri/form/views.py b/lib/tri/form/views.py index <HASH>..<HASH> 100644 --- a/lib/tri/form/views.py +++ b/lib/tri/form/views.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals, absolute_import import json +from django.core.exceptions import ValidationError from django.http import HttpResponse from django.http import HttpResponseRedirect from django.shortcuts import render_to_response @@ -96,21 +97,31 @@ def create_or_edit_object( model_verbose_name = kwargs.get('model_verbose_name', model._meta.verbose_name.replace('_', ' ')) if request.method == 'POST' and form.is_valid(): + if is_create: assert instance is None instance = model() - for field in form.fields: + for field in form.fields: # two phase save for creation in django, have to save main object before related stuff if not field.extra.get('django_related_field', False): form.apply_field(field=field, instance=instance) + try: + instance.validate_unique() + except ValidationError as e: + form.errors.update(set(e.messages)) + form._valid = False + + if form.is_valid(): + if is_create: # two phase save for creation in django... + instance.save() + + form.apply(instance) instance.save() - form.apply(instance) - instance.save() - kwargs['instance'] = instance - on_save(**kwargs) + kwargs['instance'] = instance + on_save(**kwargs) - return create_or_edit_object_redirect(is_create, redirect_to, request, redirect, form) + return create_or_edit_object_redirect(is_create, redirect_to, request, redirect, form) c = { 'form': form,
* Implement djangos uniqueness validation on save in forms/views.py * Fixed input type on url field
TriOptima_tri.form
train
e866e9682fec83461c28940507b8f956db04d3fa
diff --git a/pyaxo.py b/pyaxo.py index <HASH>..<HASH> 100644 --- a/pyaxo.py +++ b/pyaxo.py @@ -311,6 +311,8 @@ class Axolotl: HKr = '' if not self.state['HKr'] else self.state['HKr'] CKs = '' if not self.state['CKs'] else self.state['CKs'] CKr = '' if not self.state['CKr'] else self.state['CKr'] + bobs_first_message = True if self.state['bobs_first_message'] else False + mode = True if self.mode else False db = sqlite3.connect('axolotl.db') with db: cur = db.cursor() @@ -326,12 +328,16 @@ class Axolotl: NHKr TEXT, \ CKs TEXT, \ CKr TEXT, \ + DHIs_priv TEXT, \ DHIs TEXT, \ DHIr TEXT, \ + DHRs_priv TEXT, \ + DHRs TEXT, \ Ns INTEGER, \ Nr INTEGER, \ PNs INTEGER, \ - skipped_HK_MK TEXT \ + bobs_first_message INTEGER, \ + mode INTEGER \ )') cur.execute('INSERT INTO conversations ( \ my_identity, \ @@ -344,12 +350,17 @@ class Axolotl: NHKr, \ CKs, \ CKr, \ + DHIs_priv, \ DHIs, \ DHIr, \ + DHRs_priv, \ + DHRs, \ Ns, \ Nr, \ - PNs \ - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \ + PNs, \ + bobs_first_message, \ + mode \ + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \ ( self.state['name'], \ self.state['other_name'], \ self.mkey, \ @@ -360,9 +371,50 @@ class Axolotl: binascii.b2a_base64(self.state['NHKr']).strip(), \ binascii.b2a_base64(CKs).strip(), \ binascii.b2a_base64(CKr).strip(), \ + str(self.state['DHIs_priv']), \ str(self.state['DHIs']), \ str(self.state['DHIr']), \ + str(self.state['DHRs_priv']), \ + str(self.state['DHRs']), \ self.state['Ns'], \ self.state['Nr'], \ - self.state['PNs'] \ + self.state['PNs'], \ + self.state['bobs_first_message'], \ + mode \ )) + def loadState(self, name, other_name): + db = sqlite3.connect('axolotl.db') + + with db: + cur = db.cursor() + cur.execute('SELECT * FROM conversations') + rows = cur.fetchall() + for row in rows: + if row[1] == name and row[2] == other_name: + self.state = \ + { 'name': row[1], + 'other_name': row[2], + 'RK': binascii.a2b_base64(row[4]), + 'HKs': binascii.a2b_base64(row[5]), + 'HKr': binascii.a2b_base64(row[6]), + 'NHKs': binascii.a2b_base64(row[7]), + 'NHKr': binascii.a2b_base64(row[8]), + 'CKs': binascii.a2b_base64(row[9]), + 'CKr': binascii.a2b_base64(row[10]), + 'DHIs_priv': int(row[11]), + 'DHIs': int(row[12]), + 'DHIr': int(row[13]), + 'DHRs_priv': int(row[14]), + 'DHRs': int(row[15]), + 'Ns': row[16], + 'Nr': row[17], + 'PNs': row[18], + 'bobs_first_message': row[19] + } + self.state['bobs_first_message'] = True if self.state['bobs_first_message'] == 1 else False + self.mkey = row[3] + mode = row[20] + self.mode = True if mode == 1 else False + print "state loaded for " + self.state['name'] + " -> " + self.state['other_name'] + return # exit at first match + return False # if no matches diff --git a/test3.py b/test3.py index <HASH>..<HASH> 100755 --- a/test3.py +++ b/test3.py @@ -7,9 +7,9 @@ from pyaxo import Axolotl a = Axolotl('Alice') b = Axolotl('Bob') -# initialize their states -a.initState('Bob', b.identityPKey, b.handshakePKey, b.ratchetPKey) -b.initState('Alice', a.identityPKey, a.handshakePKey, a.ratchetPKey) +# load states +a.loadState('Alice', 'Bob') +b.loadState('Bob', 'Alice') # tell who is who if a.mode:
added loadState() method - see test3.py
rxcomm_pyaxo
train
8a5ce9c6a50f8e806fd11b50f648206cbfa9feb4
diff --git a/pkg/fs/roots.go b/pkg/fs/roots.go index <HASH>..<HASH> 100644 --- a/pkg/fs/roots.go +++ b/pkg/fs/roots.go @@ -41,7 +41,8 @@ type rootsDir struct { mu sync.Mutex // guards following lastQuery time.Time - m map[string]blob.Ref // ent name => permanode + m map[string]blob.Ref // ent name => permanode + children map[string]fuse.Node // ent name => child node } func (n *rootsDir) isRO() bool { @@ -100,6 +101,7 @@ func (n *rootsDir) Remove(req *fuse.RemoveRequest, intr fuse.Intr) fuse.Error { } delete(n.m, req.Name) + delete(n.children, req.Name) return nil } @@ -162,6 +164,8 @@ func (n *rootsDir) Rename(req *fuse.RenameRequest, newDir fuse.Node, intr fuse.I panic("Race.") } delete(n.m, req.OldName) + delete(n.children, req.OldName) + delete(n.children, req.NewName) n.m[req.NewName] = target n.mu.Unlock() @@ -179,7 +183,12 @@ func (n *rootsDir) Lookup(name string, intr fuse.Intr) (fuse.Node, fuse.Error) { if !br.Valid() { return nil, fuse.ENOENT } - var nod fuse.Node + + nod, ok := n.children[name] + if ok { + return nod, nil + } + if n.isRO() { nod = newRODir(n.fs, br, name, n.at) } else { @@ -189,6 +198,8 @@ func (n *rootsDir) Lookup(name string, intr fuse.Intr) (fuse.Node, fuse.Error) { name: name, } } + n.children[name] = nod + return nod, nil } @@ -230,6 +241,7 @@ func (n *rootsDir) condRefresh() fuse.Error { } n.m = make(map[string]blob.Ref) + n.children = make(map[string]fuse.Node) // Roots for _, wi := range rootRes.WithAttr {
fs: memoize roots children If we ever return a particular value for a root, we should continue to return the same value to provide a consistent view. camlistore.org/issue/<I> Change-Id: I7cfad<I>ad<I>b0e<I>e<I>f<I>d<I>ba<I>d<I>a6
perkeep_perkeep
train
28e0ee26675ac97c877dbe2e24a5d0f1dd0f3854
diff --git a/lib/undo/config.rb b/lib/undo/config.rb index <HASH>..<HASH> 100644 --- a/lib/undo/config.rb +++ b/lib/undo/config.rb @@ -8,7 +8,7 @@ module Undo attribute :storage, Object, default: ->(config, _) { Undo::Storage::MemoryAdapter.new } attribute :mutator_methods, Array[Symbol], default: [:update, :delete, :destroy] attribute :serializer, Object, default: nil - attribute :uuid_generator, Proc, default: -> (config, _) { -> { SecureRandom.uuid } } + attribute :uuid_generator, Proc, default: -> (config, _) { ->(object) { SecureRandom.uuid } } def with(attribute_updates = {}, &block) diff --git a/lib/undo/model.rb b/lib/undo/model.rb index <HASH>..<HASH> 100644 --- a/lib/undo/model.rb +++ b/lib/undo/model.rb @@ -19,7 +19,7 @@ module Undo attr_reader :object def generate_uuid - config.uuid_generator.call + config.uuid_generator.call object end def store diff --git a/spec/undo/model_spec.rb b/spec/undo/model_spec.rb index <HASH>..<HASH> 100644 --- a/spec/undo/model_spec.rb +++ b/spec/undo/model_spec.rb @@ -21,6 +21,15 @@ describe Undo::Model do expect(SecureRandom).to receive(:uuid) { "123" } expect(model.uuid).to eq "123" end + + it "passes object to custom uuid gerenator" do + uuid_generator = double :uuid_generator + expect(uuid_generator).to receive(:call).with(object) + + model = subject.new object, uuid_generator: uuid_generator + model.uuid + end + end end
Passes object to custom uuid_generator
AlexParamonov_undo
train
d91c2f2714674df2adbf9f4052bb22fd04855525
diff --git a/fantasy_football_auction/auction.py b/fantasy_football_auction/auction.py index <HASH>..<HASH> 100644 --- a/fantasy_football_auction/auction.py +++ b/fantasy_football_auction/auction.py @@ -198,6 +198,7 @@ class Auction: # nomination successful, bidding time self.nominee = nominated_player self.bid = bid + self.tickbids[owner_id] = bid def scores(self, starter_value): """ diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup setup(name='fantasy_football_auction', packages=['fantasy_football_auction'], - version='0.9.2', + version='0.9.3', description='Python library simulating a fantasy football auction. Intended to be used for AI, but you should be ' 'able to use this for other purposes as well. This task assumes that each draftable player has a ' 'specific value (for example, looking at the ratings from FantasyPros).',
fix bug where nomination bid wasn't updated
chairbender_fantasy-football-auction
train
6fb1b400b00436769fd50708a4af71f3c294c110
diff --git a/lib/flex/configuration.rb b/lib/flex/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/flex/configuration.rb +++ b/lib/flex/configuration.rb @@ -50,7 +50,7 @@ module Flex :flex_dir => './flex', :http_client => load_http_client, :http_client_options => {}, - :debug => true, + :debug => false, :debug_result => true, :debug_to_curl => false, :raise_proc => proc{|response| response.status >= 400} diff --git a/lib/flex/rails/engine.rb b/lib/flex/rails/engine.rb index <HASH>..<HASH> 100644 --- a/lib/flex/rails/engine.rb +++ b/lib/flex/rails/engine.rb @@ -7,7 +7,7 @@ module Flex config.flex.variables[:index] = [self.class.name.split('::').first.underscore, ::Rails.env].join('_') config.flex.config_file = ::Rails.root.join('config', 'flex.yml').to_s config.flex.flex_dir = ::Rails.root.join('app', 'flex').to_s - end + config.flex.debug = ::Rails.env.development? end ActiveSupport.on_load(:after_initialize) do diff --git a/lib/generators/flex/setup/templates/flex_initializer.rb.erb b/lib/generators/flex/setup/templates/flex_initializer.rb.erb index <HASH>..<HASH> 100644 --- a/lib/generators/flex/setup/templates/flex_initializer.rb.erb +++ b/lib/generators/flex/setup/templates/flex_initializer.rb.erb @@ -17,8 +17,8 @@ Flex::Configuration.configure do |config| # The custom url of your ElasticSearch server # config.base_uri = 'http://localhost:9200' - # Set it to false to skip the logging of the debug infos - # config.debug = true + # Set it to true to log the debug infos (true by default in development mode) + # config.debug = false # Debug info are actually valid curl commands # config.debug_to_curl = false
config.debug false by default (true by default in Rails.env.development?)
elastics_elastics
train
ae336b5ee7d3e816ac084295da84875f41bd5f8a
diff --git a/caas/broker.go b/caas/broker.go index <HASH>..<HASH> 100644 --- a/caas/broker.go +++ b/caas/broker.go @@ -201,6 +201,8 @@ type Broker interface { type Upgrader interface { // Upgrade sets the OCI image for the app to the specified version. Upgrade(appName string, vers version.Number) error + // OperatorVersion return operator version for the specified application. + OperatorVersion(appName string) (version.Number, error) } // StorageValidator provides methods to validate storage. diff --git a/caas/kubernetes/provider/k8s.go b/caas/kubernetes/provider/k8s.go index <HASH>..<HASH> 100644 --- a/caas/kubernetes/provider/k8s.go +++ b/caas/kubernetes/provider/k8s.go @@ -1295,6 +1295,39 @@ func (k *kubernetesClient) EnsureService( return nil } +func getOperatorPodSelector(appName string) string { + if appName == JujuControllerStackName { + return applicationSelector(appName) + } + return operatorSelector(appName) +} + +func (k *kubernetesClient) OperatorVersion(appName string) (ver version.Number, err error) { + pods, err := k.client().CoreV1().Pods(k.namespace).List( + v1.ListOptions{ + LabelSelector: getOperatorPodSelector(appName), + IncludeUninitialized: false, + FieldSelector: fmt.Sprintf("status.phase==%v", core.PodRunning), + }, + ) + if k8serrors.IsNotFound(err) || pods.Size() == 0 { + return ver, errors.NotFoundf("operator for %q", appName) + } + if err != nil { + return ver, errors.Trace(err) + } + if pods.Size() > 1 { + // this should never happen. + logger.Warningf("found %d operator pods for %q", pods.Size(), appName) + } + for _, c := range pods.Items[0].Spec.Containers { + if ver, err = podcfg.ParseOperatorImageTagVersion(c.Image); err == nil { + return ver, nil + } + } + return ver, errors.NotFoundf("operator for %q", appName) +} + // Upgrade sets the OCI image for the app's operator to the specified version. func (k *kubernetesClient) Upgrade(appName string, vers version.Number) error { var resourceName string diff --git a/cloudconfig/podcfg/image.go b/cloudconfig/podcfg/image.go index <HASH>..<HASH> 100644 --- a/cloudconfig/podcfg/image.go +++ b/cloudconfig/podcfg/image.go @@ -7,6 +7,7 @@ import ( "fmt" "strings" + "github.com/juju/errors" "github.com/juju/version" "github.com/juju/juju/controller" @@ -58,6 +59,25 @@ func RebuildOldOperatorImagePath(imagePath string, ver version.Number) string { return tagImagePath(imagePath, ver) } +// ParseOperatorImageTagVersion parses operator image path and returns operator version. +func ParseOperatorImageTagVersion(p string) (ver version.Number, err error) { + err = errors.NotValidf("Operator image path %q", p) + if !IsJujuOCIImage(p) { + return ver, err + } + splittedPath := strings.Split(p, ":") + if len(splittedPath) != 2 { + return ver, err + } + var e error + ver, e = version.Parse(splittedPath[1]) + if e != nil { + return ver, err + } + ver.Build = 0 + return ver, nil +} + func tagImagePath(path string, ver version.Number) string { var verString string splittedPath := strings.Split(path, ":")
Add a new OperatorVersion method in broker interface to fetch operator version for specified application
juju_juju
train
bb91fae6c7187541f8898e4c001f4955dfd8658f
diff --git a/tests/test_publish.py b/tests/test_publish.py index <HASH>..<HASH> 100644 --- a/tests/test_publish.py +++ b/tests/test_publish.py @@ -76,9 +76,13 @@ def test_publish( release_description='The first flight' ) - release_notes_path = Path('docs').joinpath('releases').joinpath('0.0.2-2017-11-06-Icarus.md') + release_notes_path = Path( + 'docs/releases/0.0.2-{}-Icarus.md'.format( + date.today().isoformat() + ) + ) assert release_notes_path.exists() - + publish.publish() pre = textwrap.dedent( diff --git a/tests/test_stage.py b/tests/test_stage.py index <HASH>..<HASH> 100644 --- a/tests/test_stage.py +++ b/tests/test_stage.py @@ -37,6 +37,11 @@ def test_stage_draft( changes.initialise() stage.stage(draft=True) + release_notes_path = Path( + 'docs/releases/0.0.2-{}.md'.format( + date.today().isoformat() + ) + ) expected_output = textwrap.dedent( """\ Staging [fix] release for version 0.0.2... @@ -44,7 +49,7 @@ def test_stage_draft( Generating Release... Would have created {}:... """.format( - Path('docs').joinpath('releases').joinpath('0.0.2-2017-11-06.md') + release_notes_path ) ) @@ -62,7 +67,7 @@ def test_stage_draft( assert expected_output.splitlines() + expected_release_notes_content == out.splitlines() - assert not Path('docs/releases/0.0.2.md').exists() + assert not release_notes_path.exists() @responses.activate @@ -94,7 +99,11 @@ def test_stage( release_description='The first flight' ) - release_notes_path = Path('docs/releases/0.0.2-2017-11-06-Icarus.md') + release_notes_path = Path( + 'docs/releases/0.0.2-{}-Icarus.md'.format( + date.today().isoformat() + ) + ) expected_output = textwrap.dedent( """\ Staging [fix] release for version 0.0.2... @@ -164,7 +173,11 @@ def test_stage_discard( release_description='The first flight' ) - release_notes_path = Path('docs').joinpath('releases').joinpath('0.0.2-2017-11-06-Icarus.md') + release_notes_path = Path( + 'docs/releases/0.0.2-{}-Icarus.md'.format( + date.today().isoformat() + ) + ) assert release_notes_path.exists() result = git(shlex.split('-c color.status=false status --short --branch')) @@ -194,7 +207,7 @@ def test_stage_discard( Running: git checkout -- version.txt .bumpversion.cfg... Running: rm {release_notes_path}... """.format( - release_notes_path=Path('docs').joinpath('releases').joinpath('0.0.2-2017-11-06-Icarus.md') + release_notes_path=release_notes_path ) ) out, _ = capsys.readouterr() @@ -225,9 +238,7 @@ def test_stage_discard_nothing_staged( expected_output = textwrap.dedent( """\ No staged release to discard... - """.format( - Path('docs').joinpath('releases').joinpath('0.0.2.md') - ) + """ ) out, _ = capsys.readouterr() assert expected_output == out
Use current date for release date in release notes path
michaeljoseph_changes
train
845dfa5dce5f8eede9652fba0976afff649c3528
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -169,13 +169,6 @@ RSpec.configure do |config| DatabaseCleaner.start end - if example.metadata[:type] == :view - # View tests should not hit any services. This ensures the tests are unit - # testing only the views and run fast. - WebMock.disable_net_connect!(allow_localhost: false) - else - WebMock.disable_net_connect!(allow_localhost: true) - end # using :workflow is preferable to :clean_repo, use the former if possible # It's important that this comes after DatabaseCleaner.start ensure_deposit_available_for(user) if example.metadata[:workflow] @@ -195,7 +188,15 @@ RSpec.configure do |config| end config.include(ControllerLevelHelpers, type: :view) - config.before(:each, type: :view) { initialize_controller_helpers(view) } + + config.before(:each, type: :view) do + initialize_controller_helpers(view) + WebMock.disable_net_connect!(allow_localhost: false) + end + + config.after(:each, type: :view) do + WebMock.disable_net_connect!(allow_localhost: true) + end config.before(:all, type: :feature) do # Assets take a long time to compile. This causes two problems:
Allow localhost connections in WebMock after view tests View tests turn off localhost connections explictly; this is mostly just a safeguard against sending slow network connections within the tests. Rather than rely on the next test to turn them back on, we can simply allow the view test to reset the status after it runs. This is more robust to test order problems.
samvera_hyrax
train
855f1e4f52a7ab05fdf04f57a1c81e1e47edf22d
diff --git a/lib/waterline/utils/normalize.js b/lib/waterline/utils/normalize.js index <HASH>..<HASH> 100644 --- a/lib/waterline/utils/normalize.js +++ b/lib/waterline/utils/normalize.js @@ -99,14 +99,24 @@ var normalize = module.exports = { // Move Limit, Skip, sort outside the where criteria if(hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'limit')) { - criteria.limit = _.clone(criteria.where.limit); + criteria.limit = parseInt(_.clone(criteria.where.limit), 10); + if(criteria.limit < 0) criteria.limit = 0; delete criteria.where.limit; } + else if(hop(criteria, 'limit')) { + criteria.limit = parseInt(criteria.limit, 10); + if(criteria.limit < 0) criteria.limit = 0; + } if(hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'skip')) { - criteria.skip = _.clone(criteria.where.skip); + criteria.skip = parseInt(_.clone(criteria.where.skip), 10); + if(criteria.skip < 0) criteria.skip = 0; delete criteria.where.skip; } + else if(hop(criteria, 'skip')) { + criteria.skip = parseInt(criteria.skip, 10); + if(criteria.skip < 0) criteria.skip = 0; + } if(hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'sort')) { criteria.sort = _.clone(criteria.where.sort);
if -1 is used in limit and skip normalize it to zero
balderdashy_waterline
train
271f8a3fe372edb2b857a32a419b03c7764bb492
diff --git a/lib/que/poller.rb b/lib/que/poller.rb index <HASH>..<HASH> 100644 --- a/lib/que/poller.rb +++ b/lib/que/poller.rb @@ -269,6 +269,13 @@ module Que private def any_priority_satisfied?(priorities, jobs) + worker_jobs = allocate_jobs_to_workers(priorities, jobs) + priorities.any? do |worker_priority, waiting_workers_count| + worker_jobs[worker_priority].length == waiting_workers_count + end + end + + def allocate_jobs_to_workers(priorities, jobs) jobs = jobs.sort_by { |job| job.fetch(:priority) } worker_jobs = {} priorities.each do |worker_priority, waiting_workers_count| @@ -282,9 +289,7 @@ module Que end end end - priorities.any? do |worker_priority, waiting_workers_count| - worker_jobs[worker_priority].length == waiting_workers_count - end + worker_jobs end end end
Refactor Poller#any_priority_satisfied? to extract #allocate_jobs_to_workers
chanks_que
train
559bc4b03ee02adf0f17367527bf73279a7a5ba4
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -55,6 +55,7 @@ function configureComponent(configPath, component, componentPath) { // Register the variant config.variants.push({ name: variant, + label: component.label || ((variant === 'default') ? component.name : variant), context: { config: component.config, parameters: component.parameters || {}, @@ -63,6 +64,16 @@ function configureComponent(configPath, component, componentPath) { }, }); + config.variants = config.variants.sort((a, b) => { + const aName = a.label.toLowerCase(); + const bName = b.label.toLowerCase(); + if (aName > bName) { + return 1; + } + return (aName < bName) ? -1 : 0; + }); + config.variants.forEach((v, i) => v.order = i); + // Write the configuration file fs.writeFileSync(configPath, JSON.stringify(config, null, 4)); } @@ -126,19 +137,6 @@ function registerComponent(component) { if (!createCollection(app.components.get('path'), [...componentPath, componentName])) { throw new Error(`Could not create component directory ${componentDirectory}`); } - // try { - // if (!fs.statSync(componentDirectory).isDirectory()) { - // throw new Error(`Could not create component directory ${componentDirectory}`); - // } - // } catch (e) { - // // if (!mkdirp(componentDirectory)) { - // if (!createCollection(app.components.get('path'), [...componentPath, componentName])) { - // throw new Error(`Could not create component directory ${componentDirectory}`); - // } - // } - // if (!fs.statSync(componentDirectory).isDirectory() && !mkdirp(componentDirectory)) { - // throw new Error(`Could not create component directory ${componentDirectory}`); - // } // Write out the template file const componentVariantName = componentName + (component.variant ? `--${slug(component.variant, { lower: true })}` : '');
Added explicit component / variant label
tollwerk_fractal-typo3
train
9c57c30e57e96af8dae35f7b3cc5b2a61348fe69
diff --git a/plugins/redis/redis.go b/plugins/redis/redis.go index <HASH>..<HASH> 100644 --- a/plugins/redis/redis.go +++ b/plugins/redis/redis.go @@ -74,12 +74,12 @@ var ErrProtocolError = errors.New("redis protocol error") // Reads stats from all configured servers accumulates stats. // Returns one of the errors encountered while gather stats (if any). -func (g *Redis) Gather(acc plugins.Accumulator) error { - if len(g.Servers) == 0 { +func (r *Redis) Gather(acc plugins.Accumulator) error { + if len(r.Servers) == 0 { url := &url.URL{ Host: ":6379", } - g.gatherServer(url, acc) + r.gatherServer(url, acc) return nil } @@ -87,7 +87,7 @@ func (g *Redis) Gather(acc plugins.Accumulator) error { var outerr error - for _, serv := range g.Servers { + for _, serv := range r.Servers { u, err := url.Parse(serv) if err != nil { return fmt.Errorf("Unable to parse to address '%s': %s", serv, err) @@ -100,7 +100,7 @@ func (g *Redis) Gather(acc plugins.Accumulator) error { wg.Add(1) go func(serv string) { defer wg.Done() - outerr = g.gatherServer(u, acc) + outerr = r.gatherServer(u, acc) }(serv) } @@ -111,8 +111,8 @@ func (g *Redis) Gather(acc plugins.Accumulator) error { const defaultPort = "6379" -func (g *Redis) gatherServer(addr *url.URL, acc plugins.Accumulator) error { - if g.c == nil { +func (r *Redis) gatherServer(addr *url.URL, acc plugins.Accumulator) error { + if r.c == nil { _, _, err := net.SplitHostPort(addr.Host) if err != nil { @@ -141,12 +141,12 @@ func (g *Redis) gatherServer(addr *url.URL, acc plugins.Accumulator) error { } } - g.c = c + r.c = c } - g.c.Write([]byte("info\r\n")) + r.c.Write([]byte("info\r\n")) - r := bufio.NewReader(g.c) + r := bufio.NewReader(r.c) line, err := r.ReadString('\n') if err != nil {
Redis plugin internal names consistency fix, g -> r
influxdata_telegraf
train
d48c4923c3cc86b607432f8b1a42f79dde47d7eb
diff --git a/tpl/scripts/extract/changeset.php b/tpl/scripts/extract/changeset.php index <HASH>..<HASH> 100644 --- a/tpl/scripts/extract/changeset.php +++ b/tpl/scripts/extract/changeset.php @@ -13,6 +13,8 @@ $object = $vehicle['object']; if (isset($object['changeset'])) { $changeSet = $object['changeset']; $removeRead = (isset($object['remove_read']) && !empty($object['remove_read'])) ? true : false; + + $GLOBALS['modx'] =& $this->modx; $this->modx->getService('registry', 'registry.modRegistry'); $this->modx->registry->getRegister('changes', 'registry.modDbRegister', array('directory' => 'changes'));
Make sure modx will be available in in callback functions
modxcms_teleport
train
81027d5ceb02c598b8b480d7ddcedd2601e57bf1
diff --git a/build_package.py b/build_package.py index <HASH>..<HASH> 100755 --- a/build_package.py +++ b/build_package.py @@ -181,10 +181,22 @@ def build_binary(dsc_path, location, series, arch, ppa=None, verbose=False): return 0 +def build_source(tar_file, location, series, bugs, + debemail=None, debfullname=None, gpgcmd=None, + branch=None, upatch=None, verbose=False): + pass + + def main(argv): """Execute the commands from the command line.""" exitcode = 0 args = get_args(argv) + if args.command == 'source': + exitcode = build_source( + args.tar_file, args.location, args.series, args.bugs, + debemail=args.debemail, debfullname=args.debfullname, + gpgcmd=args.gpgcmd, branch=args.branch, upatch=args.upatch, + verbose=args.verbose) if args.command == 'binary': exitcode = build_binary( args.dsc, args.location, args.series, args.arch, @@ -207,8 +219,8 @@ def get_args(argv=None): '--debfullname', default=os.environ.get("DEBFULLNAME"), help="Your full name; Environment: DEBFULLNAME.") src_parser.add_argument( - '--gpgcmd', default=os.environ.get("GPGCMD"), - help="Path to an alternate gpg signing command; Environment: GPGCMD.") + '--gpgcmd', default='/usr/bin/gpg', + help="Path to an alternate gpg signing command; default /usr/bin/gpg.") src_parser.add_argument( '--branch', default=DEFAULT_SPB, help="The base/previous source package branch.") diff --git a/tests/test_build_package.py b/tests/test_build_package.py index <HASH>..<HASH> 100644 --- a/tests/test_build_package.py +++ b/tests/test_build_package.py @@ -13,6 +13,7 @@ from build_package import ( BUILD_DEB_TEMPLATE, build_in_lxc, CREATE_LXC_TEMPLATE, + DEFAULT_SPB, get_args, main, move_debs, @@ -90,6 +91,18 @@ class BuildPackageTestCase(unittest.TestCase): self.assertEqual(['123', '456'], args.bugs) self.assertFalse(args.verbose) + def test_main_source(self): + with patch('build_package.build_source', autospec=True, + return_value=0) as bs_mock: + code = main([ + 'prog', 'source', '--debemail', 'email', '--debfullname', 'me', + 'my.tar.gz', '~/workspace', 'trusty', '123', '456']) + self.assertEqual(0, code) + bs_mock.assert_called_with( + 'my.tar.gz', '~/workspace', 'trusty', ['123', '456'], + debemail='email', debfullname='me', gpgcmd='/usr/bin/gpg', + branch=DEFAULT_SPB, upatch=0, verbose=False) + def test_get_args_binary(self): args = get_args( ['prog', 'binary', 'my.dsc', '~/workspace', 'trusty', 'i386'])
Added shub for build_source.
juju_juju
train
b129ad91c7e4fc6440af71708d87c1a80afae34e
diff --git a/ara/plugins/callback/ara_default.py b/ara/plugins/callback/ara_default.py index <HASH>..<HASH> 100644 --- a/ara/plugins/callback/ara_default.py +++ b/ara/plugins/callback/ara_default.py @@ -163,6 +163,12 @@ class CallbackModule(CallbackBase): self._end_task() self._end_play() + # Load variables to verify if there is anything relevant for ara + play_vars = play._variable_manager.get_vars(play=play)["vars"] + for key in play_vars.keys(): + if key == "ara_playbook_name": + self._set_playbook_name(name=play_vars[key]) + # Record all the files involved in the play self._load_files(play._loader._FILE_CACHE.keys()) @@ -252,6 +258,10 @@ class CallbackModule(CallbackBase): "/api/v1/playbooks/%s" % self.playbook["id"], status=status, ended=datetime.datetime.now().isoformat() ) + def _set_playbook_name(self, name): + if self.playbook["name"] != name: + self.playbook = self.client.patch("/api/v1/playbooks/%s" % self.playbook["id"], name=name) + def _get_one_item(self, endpoint, **query): """ Searching with the API returns a list of results. This method is used
Implement support for naming playbooks This allows users to give arbitrary names to their playbooks based on the "ara_playbook_name" Ansible variable that can be supplied either as a literal Ansible variable or as an extra_var. Change-Id: I8bbe<I>a1a<I>a<I>effdee<I>c6f
ansible-community_ara
train
f8145cef3a3bc1716ae002af74aede8b6ed59299
diff --git a/minium-webelements-config/src/main/java/minium/web/config/WebElementsConfiguration.java b/minium-webelements-config/src/main/java/minium/web/config/WebElementsConfiguration.java index <HASH>..<HASH> 100644 --- a/minium-webelements-config/src/main/java/minium/web/config/WebElementsConfiguration.java +++ b/minium-webelements-config/src/main/java/minium/web/config/WebElementsConfiguration.java @@ -69,8 +69,8 @@ public class WebElementsConfiguration { @Autowired @Bean - @Order(Ordered.HIGHEST_PRECEDENCE) @Lazy + @Order(Ordered.HIGHEST_PRECEDENCE) public WebModule defaultWebModule(WebDriver wd) { return combine(defaultModule(wd), debugModule()); } @@ -78,7 +78,7 @@ public class WebElementsConfiguration { @Autowired @Bean @Lazy - public Browser<DefaultWebElements> browser(@Lazy WebDriver webDriver, List<WebModule> modules) { + public Browser<DefaultWebElements> browser(WebDriver webDriver, List<WebModule> modules) { WebModule combinedWebModule = WebModules.combine(modules); return new WebDriverBrowser<DefaultWebElements>(webDriver, DefaultWebElements.class, combinedWebModule); }
Unnecessary @Lazy annotations removed
viltgroup_minium
train
526a9faf15c88d59c8e166586b96fcac7456cbf2
diff --git a/core/server/master/src/main/java/alluxio/master/file/DefaultFileSystemMaster.java b/core/server/master/src/main/java/alluxio/master/file/DefaultFileSystemMaster.java index <HASH>..<HASH> 100644 --- a/core/server/master/src/main/java/alluxio/master/file/DefaultFileSystemMaster.java +++ b/core/server/master/src/main/java/alluxio/master/file/DefaultFileSystemMaster.java @@ -3069,12 +3069,17 @@ public final class DefaultFileSystemMaster extends AbstractMaster implements Fil // statusCache stores uri to ufsstatus mapping that is used to construct fingerprint Map<AlluxioURI, UfsStatus> statusCache = new HashMap<>(); listOptions.setRecursive(syncDescendantType == DescendantType.ALL); - UfsStatus[] children = ufs.listStatus(ufsUri.toString(), listOptions); - if (children != null) { - for (UfsStatus childStatus : children) { - statusCache.put(inodePath.getUri().joinUnsafe(childStatus.getName()), - childStatus); + try { + UfsStatus[] children = ufs.listStatus(ufsUri.toString(), listOptions); + if (children != null) { + for (UfsStatus childStatus : children) { + statusCache.put(inodePath.getUri().joinUnsafe(childStatus.getName()), + childStatus); + } } + } catch (Exception e) { + LOG.debug("ListStatus failed as an preparation step for syncMetadata {}", + inodePath.getUri(), e); } SyncResult result = syncInodeMetadata(rpcContext, inodePath, syncDescendantType, statusCache);
[SMALLFIX] Wrap listStatus call in an exception handler to treat it as an optimi… (#<I>) * Wrap listStatus call in an exception handler to treat it as an optimization. * Update debug log message
Alluxio_alluxio
train
3feee8b8164ad92a8d38c32b94a1ea597c4c1ff7
diff --git a/ghost/admin/app/helpers/event-name.js b/ghost/admin/app/helpers/event-name.js index <HASH>..<HASH> 100644 --- a/ghost/admin/app/helpers/event-name.js +++ b/ghost/admin/app/helpers/event-name.js @@ -11,6 +11,8 @@ export const AVAILABLE_EVENTS = [ {event: 'post.published', name: 'Post published', group: 'Posts'}, {event: 'post.published.edited', name: 'Published post updated', group: 'Posts'}, {event: 'post.unpublished', name: 'Post unpublished', group: 'Posts'}, + {event: 'post.scheduled', name: 'Post scheduled', group: 'Posts'}, + {event: 'post.unscheduled', name: 'Post unscheduled', group: 'Posts'}, {event: 'post.tag.attached', name: 'Tag added to post', group: 'Posts'}, {event: 'post.tag.detached', name: 'Tag removed from post', group: 'Posts'},
Added missing schedule events to webhook dropdown (#<I>) - these webhook events are not available to choose from in Ghost admin, even though they exist and work
TryGhost_Ghost
train
1f181eceb1622f221b22203b2fbeb349d8ad74c9
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -64,8 +64,8 @@ setup( keywords='crypto pki x509 certificate ocsp', install_requires=[ - 'asn1crypto>=0.21.0', - 'oscrypto>=0.18.0' + 'asn1crypto>=0.20.0', + 'oscrypto>=0.17.3' ], packages=find_packages(exclude=['tests*', 'dev*']),
Reduce asn1crypto and oscrypto version specs for CI to complete
wbond_ocspbuilder
train
2d0505b8ed566a5c3eafb095fa5fc9a0e46f71e1
diff --git a/bootstrap/environment.php b/bootstrap/environment.php index <HASH>..<HASH> 100755 --- a/bootstrap/environment.php +++ b/bootstrap/environment.php @@ -5,9 +5,9 @@ error_reporting(E_ALL); // system settings ini_set('display_errors', env('ERRORS_DISPLAY', 'On')); -ini_set('log_errors', env('ERRORS_DISPLAY', 'On')); -ini_set('track_errors', env('ERRORS_DISPLAY', 'Off')); -ini_set('html_errors', env('ERRORS_DISPLAY', 'Off')); +ini_set('log_errors', env('ERRORS_LOG', 'On')); +ini_set('track_errors', env('ERRORS_TRACK', 'Off')); +ini_set('html_errors', env('ERRORS_HTML', 'Off')); // native types const TYPE_BOOLEAN = 'boolean'; @@ -23,8 +23,9 @@ const TYPE_UNKNOWN_TYPE = 'unknown type'; // custom types const TYPE_DATE = 'date'; -const __AND__ = 'AND'; -const __OR__ = 'OR'; +const __AND__ = ' AND '; +const __OR__ = ' OR '; +const __COMMA__ = ', '; // used to compose path do generator define('TEMPLATE_DIR', 'kernel/resources/templates'); @@ -39,6 +40,7 @@ if (!function_exists('error_handler')) { */ function error_handler($code, $message, $file, $line) { + throw new ErrorException($message, $code, 1, $file, $line); } set_error_handler("error_handler");
Environment variables - add new constants to project (AND, OR, COMMA)
phpzm_kernel
train
858082f9750aa430c9f60c06dfce6a7d6703c707
diff --git a/superset/models/schedules.py b/superset/models/schedules.py index <HASH>..<HASH> 100644 --- a/superset/models/schedules.py +++ b/superset/models/schedules.py @@ -29,17 +29,17 @@ from superset.models.helpers import AuditMixinNullable, ImportMixin metadata = Model.metadata # pylint: disable=no-member -class ScheduleType(enum.Enum): +class ScheduleType(str, enum.Enum): slice = "slice" dashboard = "dashboard" -class EmailDeliveryType(enum.Enum): +class EmailDeliveryType(str, enum.Enum): attachment = "Attachment" inline = "Inline" -class SliceEmailReportFormat(enum.Enum): +class SliceEmailReportFormat(str, enum.Enum): visualization = "Visualization" data = "Raw data" diff --git a/superset/views/schedules.py b/superset/views/schedules.py index <HASH>..<HASH> 100644 --- a/superset/views/schedules.py +++ b/superset/views/schedules.py @@ -156,7 +156,7 @@ class EmailScheduleView( class DashboardEmailScheduleView( EmailScheduleView ): # pylint: disable=too-many-ancestors - schedule_type = ScheduleType.dashboard.value + schedule_type = ScheduleType.dashboard schedule_type_model = Dashboard add_title = _("Schedule Email Reports for Dashboards") @@ -215,7 +215,7 @@ class DashboardEmailScheduleView( class SliceEmailScheduleView(EmailScheduleView): # pylint: disable=too-many-ancestors - schedule_type = ScheduleType.slice.value + schedule_type = ScheduleType.slice schedule_type_model = Slice add_title = _("Schedule Email Reports for Charts") edit_title = add_title
Fix email reports (#<I>) * Fix email reports * Address comments and inherit from str
apache_incubator-superset
train
5633a6d7ba0c2531db3f0187853cab2a160de352
diff --git a/spec/helper.js b/spec/helper.js index <HASH>..<HASH> 100644 --- a/spec/helper.js +++ b/spec/helper.js @@ -1,3 +1,5 @@ +/* eslint no-console:0 */ + var os = require('os'); var crypto = require('crypto'); var fse = require('fs-extra'); @@ -5,20 +7,27 @@ var fse = require('fs-extra'); var originalCwd = process.cwd(); var createdDirectories = []; +process.on('exit', function () { + createdDirectories.forEach(function (path) { + try { + fse.removeSync(path); + } catch (err) { + console.error('Failed attempt to delete temp directory' + path); + console.error(err); + } + }); +}); + exports.setCleanTestCwd = function () { var random = crypto.randomBytes(16).toString('hex'); var path = os.tmpdir() + '/fs-jetpack-test-' + random; fse.mkdirSync(path); - process.chdir(path); createdDirectories.push(path); + process.chdir(path); }; exports.switchBackToCorrectCwd = function () { process.chdir(originalCwd); - createdDirectories = createdDirectories.filter(function (path) { - fse.removeSync(path); - return false; - }); }; exports.parseMode = function (modeAsNumber) {
Remove all temp directories in bulk after tests are finished
szwacz_fs-jetpack
train
7a3ed8add4cf05a5abe717e7c566c98326db6532
diff --git a/src/drivers/npm/driver.js b/src/drivers/npm/driver.js index <HASH>..<HASH> 100644 --- a/src/drivers/npm/driver.js +++ b/src/drivers/npm/driver.js @@ -721,7 +721,7 @@ class Site { await this.promiseTimeout( page.evaluateHandle(() => // eslint-disable-next-line unicorn/prefer-text-content - document.body.innerText.replace(/\s+/g, ' ') + document.body.innerText.replace(/\s+/g, ' ').slice(0, 25000) ), { jsonValue: () => '' }, 'Timeout (text)' diff --git a/src/drivers/webextension/js/content.js b/src/drivers/webextension/js/content.js index <HASH>..<HASH> 100644 --- a/src/drivers/webextension/js/content.js +++ b/src/drivers/webextension/js/content.js @@ -178,7 +178,7 @@ const Content = { // Text // eslint-disable-next-line unicorn/prefer-text-content - const text = document.body.innerText.replace(/\s+/g, ' ') + const text = document.body.innerText.replace(/\s+/g, ' ').slice(0, 25000) // CSS rules let css = [] diff --git a/src/drivers/webextension/js/driver.js b/src/drivers/webextension/js/driver.js index <HASH>..<HASH> 100644 --- a/src/drivers/webextension/js/driver.js +++ b/src/drivers/webextension/js/driver.js @@ -19,6 +19,8 @@ const hostnameIgnoreList = const xhrDebounce = [] +let xhrAnalyzed = {} + const scriptsPending = [] function getRequiredTechnologies(name, categoryId) { @@ -458,16 +460,34 @@ const Driver = { return } + let originHostname + + try { + ;({ hostname: originHostname } = new URL(request.originUrl)) + } catch (error) { + return + } + if (!xhrDebounce.includes(hostname)) { xhrDebounce.push(hostname) setTimeout(async () => { xhrDebounce.splice(xhrDebounce.indexOf(hostname), 1) - Driver.onDetect( - request.originUrl || request.initiator, - await analyze({ xhr: hostname }) - ).catch(Driver.error) + xhrAnalyzed[originHostname] = xhrAnalyzed[originHostname] || [] + + if (!xhrAnalyzed[originHostname].includes(hostname)) { + xhrAnalyzed[originHostname].push(hostname) + + if (Object.keys(xhrAnalyzed).length > 500) { + xhrAnalyzed = {} + } + + Driver.onDetect( + request.originUrl || request.initiator, + await analyze({ xhr: hostname }) + ).catch(Driver.error) + } }, 1000) } }, @@ -886,6 +906,8 @@ const Driver = { Driver.cache.hostnames = {} Driver.cache.tabs = {} + xhrAnalyzed = {} + await setOption('hostnames', {}) },
Prevent repeat analysis of the identical XHR requests
AliasIO_Wappalyzer
train
7b0fb8639e5da5fd143bdec3f5db01382685554f
diff --git a/leaflet-providers.js b/leaflet-providers.js index <HASH>..<HASH> 100644 --- a/leaflet-providers.js +++ b/leaflet-providers.js @@ -123,8 +123,18 @@ }, variants: { OpenCycleMap: 'cycle', - Transport: 'transport', - TransportDark: 'transport-dark', + Transport: { + options: { + variant: 'transport', + maxZoom: 19 + } + }, + TransportDark: { + options: { + variant: 'transport-dark', + maxZoom: 19 + } + }, Landscape: 'landscape', Outdoors: 'outdoors' }
Thunderforest transport and transport-dark maxZoom <I>
leaflet-extras_leaflet-providers
train
7e7b29c02cc8a1015fb56a7dac6488cf24873922
diff --git a/gitlab.py b/gitlab.py index <HASH>..<HASH> 100644 --- a/gitlab.py +++ b/gitlab.py @@ -584,6 +584,7 @@ class ProjectBranch(GitlabObject): canCreate = False requiredGetAttrs = ['project_id'] requiredListAttrs = ['project_id'] + _constructorTypes = {'commit': 'ProjectCommit'} def protect(self, protect=True): url = self._url % {'project_id': self.project_id}
ProjectBranch: commit is an other object
python-gitlab_python-gitlab
train
bded09bbebdc79c5c438c969107f035ebf4e57db
diff --git a/script-test/subtitles/subtitles.js b/script-test/subtitles/subtitles.js index <HASH>..<HASH> 100644 --- a/script-test/subtitles/subtitles.js +++ b/script-test/subtitles/subtitles.js @@ -47,7 +47,7 @@ require( describe('construction', function () { it('initialises with the legacy subtitles module', function () { - Subtitles(); + Subtitles(null, 'http://some-url', null, null); expect(subtitlesContainer).toHaveBeenCalled(); }); @@ -56,7 +56,7 @@ require( loadUrlMock.and.callFake(function (url, callbackObject) { callbackObject.onError(); }); - Subtitles(); + Subtitles(null, 'http://some-url', null, null); expect(pluginsMock.interface.onSubtitlesLoadError).toHaveBeenCalled(); }); @@ -69,6 +69,12 @@ require( expect(pluginsMock.interface.onSubtitlesTransformError).toHaveBeenCalled(); }); + + it('does not attempt to load subtitles if there is no captions url', function () { + Subtitles(null, undefined, null, null); + + expect(loadUrlMock).not.toHaveBeenCalled(); + }); }); describe('enable', function () { diff --git a/script/subtitles/subtitles.js b/script/subtitles/subtitles.js index <HASH>..<HASH> 100644 --- a/script/subtitles/subtitles.js +++ b/script/subtitles/subtitles.js @@ -13,24 +13,26 @@ define('bigscreenplayer/subtitles/subtitles', var subtitlesEnabled = autoStart; var subtitlesAvailable = !!url; - DebugTool.info('Loading subtitles from: ' + url); - LoadURL(url, { - onLoad: function (responseXML, responseText, status) { - if (!responseXML) { - DebugTool.info('Error: responseXML is invalid.'); - Plugins.interface.onSubtitlesTransformError(); - return; - } + if (subtitlesAvailable) { + DebugTool.info('Loading subtitles from: ' + url); + LoadURL(url, { + onLoad: function (responseXML, responseText, status) { + if (!responseXML) { + DebugTool.info('Error: responseXML is invalid.'); + Plugins.interface.onSubtitlesTransformError(); + return; + } - if (status === 200) { - subtitlesContainer = SubtitlesContainer(mediaPlayer, responseXML, autoStart, playbackElement); + if (status === 200) { + subtitlesContainer = SubtitlesContainer(mediaPlayer, responseXML, autoStart, playbackElement); + } + }, + onError: function (error) { + DebugTool.info('Error loading subtitles data: ' + error); + Plugins.interface.onSubtitlesLoadError(); } - }, - onError: function (error) { - DebugTool.info('Error loading subtitles data: ' + error); - Plugins.interface.onSubtitlesLoadError(); - } - }); + }); + } function start () { subtitlesEnabled = true;
Bug fix - only attempt to load subtitles file when there is a url to do so (#<I>)
bbc_bigscreen-player
train
4796e618f88b7938819724d2a4212bf1249317da
diff --git a/lib/svtplay_dl/postprocess/__init__.py b/lib/svtplay_dl/postprocess/__init__.py index <HASH>..<HASH> 100644 --- a/lib/svtplay_dl/postprocess/__init__.py +++ b/lib/svtplay_dl/postprocess/__init__.py @@ -124,7 +124,7 @@ class postprocess: if self.config.get("merge_subtitle"): if self.subfixes and len(self.subfixes) >= 2: for subfix in self.subfixes: - subfile = orig_filename.parent / (orig_filename.name + "." + subfix + ".srt") + subfile = orig_filename.parent / (orig_filename.stem + "." + subfix + ".srt") os.remove(subfile) else: os.remove(subfile)
postprocess: we should use the filename without extension
spaam_svtplay-dl
train
2d981b36c9875277098397b2938e9a83ce02795a
diff --git a/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances.go b/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances.go index <HASH>..<HASH> 100644 --- a/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances.go +++ b/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances.go @@ -21,7 +21,7 @@ package azure import ( "context" "fmt" - "os" + "strconv" "strings" v1 "k8s.io/api/core/v1" @@ -231,18 +231,22 @@ func (az *Cloud) InstanceShutdownByProviderID(ctx context.Context, providerID st } func (az *Cloud) isCurrentInstance(name types.NodeName, metadataVMName string) (bool, error) { - var err error nodeName := mapNodeNameToVMName(name) + if az.VMType == vmTypeVMSS { - // VMSS vmName is not same with hostname, use hostname instead. - metadataVMName, err = os.Hostname() - if err != nil { - return false, err + // VMSS vmName is not same with hostname, construct the node name "{computer-name-prefix}{base-36-instance-id}". + // Refer https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-instance-ids#scale-set-vm-computer-name. + if ssName, instanceID, err := extractVmssVMName(metadataVMName); err == nil { + instance, err := strconv.ParseInt(instanceID, 10, 64) + if err != nil { + return false, fmt.Errorf("failed to parse VMSS instanceID %q: %v", instanceID, err) + } + metadataVMName = fmt.Sprintf("%s%06s", ssName, strconv.FormatInt(instance, 36)) } } metadataVMName = strings.ToLower(metadataVMName) - return (metadataVMName == nodeName), err + return (metadataVMName == nodeName), nil } // InstanceID returns the cloud provider ID of the specified instance. diff --git a/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances_test.go b/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances_test.go index <HASH>..<HASH> 100644 --- a/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances_test.go +++ b/staging/src/k8s.io/legacy-cloud-providers/azure/azure_instances_test.go @@ -361,3 +361,56 @@ func TestNodeAddresses(t *testing.T) { } } } + +func TestIsCurrentInstance(t *testing.T) { + cloud := &Cloud{ + Config: Config{ + VMType: vmTypeVMSS, + }, + } + testcases := []struct { + nodeName string + metadataVMName string + expected bool + expectError bool + }{ + { + nodeName: "node1", + metadataVMName: "node1", + expected: true, + }, + { + nodeName: "node1", + metadataVMName: "node2", + expected: false, + }, + { + nodeName: "vmss000001", + metadataVMName: "vmss_1", + expected: true, + }, + { + nodeName: "vmss_2", + metadataVMName: "vmss000000", + expected: false, + }, + { + nodeName: "vmss123456", + metadataVMName: "vmss_$123", + expected: false, + expectError: true, + }, + } + + for i, test := range testcases { + real, err := cloud.isCurrentInstance(types.NodeName(test.nodeName), test.metadataVMName) + if test.expectError { + if err == nil { + t.Errorf("Test[%d] unexpected nil err", i) + } + } + if real != test.expected { + t.Errorf("Test[%d] unexpected isCurrentInstance result %v != %v", i, real, test.expected) + } + } +}
Fix isCurrentInstance for Windows by removing the dependency of hostname
kubernetes_kubernetes
train
f06bc781efebf29d9da237b231454a95694d495e
diff --git a/examples/bme280_normal_mode.py b/examples/bme280_normal_mode.py index <HASH>..<HASH> 100644 --- a/examples/bme280_normal_mode.py +++ b/examples/bme280_normal_mode.py @@ -18,7 +18,7 @@ bme280 = adafruit_bme280.Adafruit_BME280_I2C(i2c) #bme_cs = digitalio.DigitalInOut(board.D10) #bme280 = adafruit_bme280.Adafruit_BME280_SPI(spi, bme_cs) -# change this to match the location's pressure (hPa) at sea level +# Change this to match the location's pressure (hPa) at sea level bme280.sea_level_pressure = 1013.25 bme280.mode = adafruit_bme280.MODE_NORMAL bme280.standby_period = adafruit_bme280.STANDBY_TC_500 @@ -26,7 +26,7 @@ bme280.iir_filter = adafruit_bme280.IIR_FILTER_X16 bme280.overscan_pressure = adafruit_bme280.OVERSCAN_X16 bme280.overscan_humidity = adafruit_bme280.OVERSCAN_X1 bme280.overscan_temperature = adafruit_bme280.OVERSCAN_X2 -#The sensor will need a moment to gather inital readings +# The sensor will need a moment to gather initial readings time.sleep(1) while True:
Minor comment cleanup A typo and a couple of trivial formatting tweaks for consistency. No code changes.
adafruit_Adafruit_CircuitPython_BME280
train
ce4d752cf9808fb10526a0a30c5a5b8a8657b533
diff --git a/parallizer.js b/parallizer.js index <HASH>..<HASH> 100644 --- a/parallizer.js +++ b/parallizer.js @@ -53,12 +53,12 @@ function Parallel(max, col, paused){ if(!(this instanceof Parallel)) return new Parallel(max, col, paused); var pm = parseInt(max, 10); this._max = (!isNaN(pm) && pm > 0) ? pm : 1; - if(typeof col === 'function') this._col = new Collector(col); - else if(col instanceof Collector) this._col = col; - else this._col = null; this._running = 0; this._queue = []; this._paused = !!paused; + this._col = null; + if(typeof col === 'function') this._col = new Collector(col); + if(col instanceof Collector) this._col = col; } Parallel.prototype.add = function(fn, args, cb, scope, high){ @@ -72,8 +72,7 @@ Parallel.prototype.add = function(fn, args, cb, scope, high){ self._check(); }); var fno = new Func(fn, args, scope); - if(high) self._queue.unshift(fno); - else self._queue.push(fno); + self._queue[high ? 'unshift': 'push'](fno); if(self._col) self._col.start(); self._check(); }; @@ -118,4 +117,4 @@ if(typeof window === 'object'){ window.parallizer = parallizer; } -})(); \ No newline at end of file +})();
refactored parallizer.Parallel constructor
christophwitzko_parallizer
train
de09bffd84fc2299eeca50c05998706eeef633ac
diff --git a/control/Controller.php b/control/Controller.php index <HASH>..<HASH> 100644 --- a/control/Controller.php +++ b/control/Controller.php @@ -173,6 +173,8 @@ class Controller extends RequestHandler implements TemplateGlobalProvider { * If $Action isn't given, it will use "index" as a default. */ protected function handleAction($request, $action) { + $this->extend('beforeCallActionHandler', $request, $action); + foreach($request->latestParams() as $k => $v) { if($v || !isset($this->urlParams[$k])) $this->urlParams[$k] = $v; }
Update Controller to allow extension in handleAction() Controller's parent class (RequestHandler) has two extensions in its handleAction() method that are obscured by Controller's implementation.
silverstripe_silverstripe-framework
train
66d51f6ff1cf4d438808299cffb4a067edbaa384
diff --git a/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task-meta.js b/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task-meta.js index <HASH>..<HASH> 100644 --- a/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task-meta.js +++ b/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task-meta.js @@ -41,8 +41,10 @@ module.exports = [ taskMetaData.observe('task', function(task) { $scope.task = angular.copy(task); - $scope.task.followUp = unfixDate($scope.task.followUp); - $scope.task.due = unfixDate($scope.task.due); + if ($scope.task) { + $scope.task.followUp = unfixDate($scope.task.followUp); + $scope.task.due = unfixDate($scope.task.due); + } }); taskMetaData.observe('assignee', function(assignee) {
fix(tasklist): add null check to prevent unresolveable error related to CAM-<I>
camunda_camunda-bpm-platform
train
517f7c3d92af36aeefd92959d55519b09bf7c3b3
diff --git a/spec/unit/knife/cookbook_upload_spec.rb b/spec/unit/knife/cookbook_upload_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/knife/cookbook_upload_spec.rb +++ b/spec/unit/knife/cookbook_upload_spec.rb @@ -42,7 +42,8 @@ describe Chef::Knife::CookbookUpload do describe 'run' do before(:each) do - @knife.stub!(:upload).and_return(true) + @cookbook_uploader = stub(:upload_cookbooks => nil) + Chef::CookbookUploader.stub(:new => @cookbook_uploader) Chef::CookbookVersion.stub(:list_all_versions).and_return({}) end @@ -164,7 +165,8 @@ describe Chef::Knife::CookbookUpload do describe 'when a frozen cookbook exists on the server' do it 'should fail to replace it' do - @knife.stub!(:upload).and_raise(Chef::Exceptions::CookbookFrozen) + @cookbook_uploader.should_receive(:upload_cookbooks). + and_raise(Net::HTTPServerException.new('message', stub(:code => "409"))) @knife.ui.should_receive(:error).with(/Failed to upload 1 cookbook/) lambda { @knife.run }.should raise_error(SystemExit) end @@ -180,4 +182,4 @@ describe Chef::Knife::CookbookUpload do end end end # run -end # Chef::Knife::CookbookUpload +end
[CHEF-<I>] Failing spec for frozen cookbook upload
chef_chef
train
6c74952e8eca9890a7d2379c76998191d07d3e73
diff --git a/src/Mime/Html.php b/src/Mime/Html.php index <HASH>..<HASH> 100644 --- a/src/Mime/Html.php +++ b/src/Mime/Html.php @@ -24,10 +24,10 @@ class Html implements MimeInterface { public function extract(array $replacements, UrlInterface $url) { if (preg_match('#<title[^>]*>(.*?)</title>#is', $url->getBody(), $match)) { - $replacements['%composed-title%'] = $replacements['%title%'] = html_entity_decode(preg_replace('/[\s\v]+/', ' ', trim($match[1]))); + $replacements['%composed-title%'] = $replacements['%title%'] = preg_replace('/[\s\v]+/', ' ', trim(html_entity_decode($match[1]))); } return $replacements; } -} \ No newline at end of file +}
html_entity_decode should happen first Some control characters (like the new line) will break Phergie when trying to process the event. Decoding the string first prevents that.
phergie_phergie-irc-plugin-react-url
train
c24e4a48d109e95f86d06769b6cac0c62cefd371
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-oidc-provider', - version='0.3.6', + version='0.3.7', packages=[ 'oidc_provider', 'oidc_provider/lib', 'oidc_provider/lib/endpoints', 'oidc_provider/lib/utils', 'oidc_provider/tests', 'oidc_provider/tests/app',
Update version to <I> after `at_hash` fix Using semantic versioning, a backward compatible bug fix deserves a PATCH increment.
juanifioren_django-oidc-provider
train
ba02e65f74212002bb3c55e2769d76897974c261
diff --git a/lib/collections/VoiceConnectionCollection.js b/lib/collections/VoiceConnectionCollection.js index <HASH>..<HASH> 100644 --- a/lib/collections/VoiceConnectionCollection.js +++ b/lib/collections/VoiceConnectionCollection.js @@ -50,13 +50,21 @@ class VoiceConnectionCollection extends BaseArrayCollection { var gatewaySocket = e.socket.gatewaySocket; var guildId = voiceSocket.guildId; - var pending = this._pendingConnections.get(guildId); - if (!pending) return; - gatewaySocket.disconnectVoice(guildId); + const awaitingEndpoint = + e.error && e.error.message == Errors.VOICE_CHANGING_SERVER; - this._pendingConnections.delete(guildId); - e ? pending.reject(e.error) : pending.reject(); + if (awaitingEndpoint) { + this._createPending(guildId); + } else { + var pending = this._pendingConnections.get(guildId); + if (!pending) return; + + gatewaySocket.disconnectVoice(guildId); + + this._pendingConnections.delete(guildId); + e ? pending.reject(e.error) : pending.reject(); + } }); // process voice connections
Fix pending voice connection creation while awaiting endpoint
qeled_discordie
train
cf97ef7f0f8e59aa8e5a26aa60460ebe631e24bb
diff --git a/devices.js b/devices.js index <HASH>..<HASH> 100755 --- a/devices.js +++ b/devices.js @@ -6028,6 +6028,16 @@ const devices = [ exposes.numeric('battery').withUnit('%'), ], }, + { + zigbeeModel: ['FNB56-DOS07FB3.1'], + model: 'HGZB-13A', + vendor: 'Nue / 3A', + description: 'Door/window sensor', + supports: 'contact', + fromZigbee: [fz.ias_contact_alarm_1], + toZigbee: [], + exposes: [exposes.boolean('contact'), exposes.boolean('battery_low'), exposes.boolean('tamper')], + }, // Smart Home Pty {
Add support for Nue/3A Door sensor HGZB-<I>A (#<I>) * feat: Add support for Nue/3A Door sensor The Nue / 3A door and window sensor is relatively simple and just exposes a binary contact sensor. * Update devices.js
Koenkk_zigbee-shepherd-converters
train
069d53a4862c1c87cb4e9bf10633bd53468de6c0
diff --git a/cleaner.go b/cleaner.go index <HASH>..<HASH> 100644 --- a/cleaner.go +++ b/cleaner.go @@ -37,11 +37,16 @@ var removeNodesRegEx = regexp.MustCompile("" + "^widget$|" + "ajoutVideo|" + "articleheadings|" + + "_articles|" + + "author|" + "author-dropdown|" + + "banner|" + + "^bar$|" + "blog-pager|" + "breadcrumbs|" + "byline|" + "cabecalho|" + + "carousel|" + "cnnStryHghLght|" + "cnn_html_slideshow|" + "cnn_strycaptiontxt|" + @@ -53,6 +58,7 @@ var removeNodesRegEx = regexp.MustCompile("" + "contact|" + "contentTools2|" + "controls|" + + "cookie|" + "^date$|" + "detail_new_|" + "detail_related_|" + @@ -64,36 +70,49 @@ var removeNodesRegEx = regexp.MustCompile("" + "js_replies|" + "[Kk]ona[Ff]ilter|" + "leading|" + + "legend|" + "legende|" + "links|" + "mediaarticlerelated|" + + "menu|" + "menucontainer|" + "meta$|" + + "moreNews|" + "navbar|" + + "newsUnder|" + + "panelss2|" + + "panesCity|" + "pagetools|" + "popup|" + "post-attributes|" + "post-title|" + "relacionado|" + "retweet|" + + "rightBlock|" + + "rss|" + "runaroundLeft|" + "shoutbox|" + + "site_box|" + "site_nav|" + "socialNetworking|" + "social_|" + "socialnetworking|" + "socialtools|" + + "source|" + "sponsor|" + "sub_nav|" + "subscribe|" + + "tabsCity|" + "tag_|" + "tags|" + + "teaser|" + "the_answers|" + "timestamp|" + "tools|" + "vcard|" + "welcome_form|" + "wp-caption-text") + var captionsRegEx = regexp.MustCompile("^caption$") var googleRegEx = regexp.MustCompile(" google ") var moreRegEx = regexp.MustCompile("^[^entry-]more.*$")
DEV-<I> new cleaning classes
advancedlogic_GoOse
train
9d3232eb5681aa50fc818700e44a96a6325c510c
diff --git a/src/constants.js b/src/constants.js index <HASH>..<HASH> 100644 --- a/src/constants.js +++ b/src/constants.js @@ -10,7 +10,7 @@ math.PI = Math.PI; math.SQRT1_2 = Math.SQRT1_2; math.SQRT2 = Math.SQRT2; -math.I = new Complex(0, -1); +math.I = new Complex(0, 1); // lower case constants math.pi = math.PI;
Fixed an incredibly stupid bug with the constant i
josdejong_mathjs
train
8ee84708b579f1045d8460483d07f48ce682dbcc
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -46,9 +46,8 @@ def mock_app(extra_stubs={}) ) end -def test_app(name=nil) +def test_app(name=:default) time = Time.now - name ||= :default app = Dragonfly::App[name] app.datastore = Dragonfly::DataStorage::MemoryDataStore.new app.log = Logger.new(LOG_FILE)
not sure why I wasn't using default args
markevans_dragonfly
train
f2c22946629467eb06036eb50c3dcd2df5111581
diff --git a/view/files/html.php b/view/files/html.php index <HASH>..<HASH> 100644 --- a/view/files/html.php +++ b/view/files/html.php @@ -15,6 +15,11 @@ */ class ComFilesViewFilesHtml extends ComKoowaViewHtml { + /** + * @var string The root path + */ + protected $_root_path = ''; + protected function _initialize(KObjectConfig $config) { $config->auto_fetch = false; @@ -22,6 +27,28 @@ class ComFilesViewFilesHtml extends ComKoowaViewHtml parent::_initialize($config); } + /** + * Root path setter + * + * @param string $path The root path + * + * @return $this + */ + public function setRootPath($path) + { + $this->_root_path = (string) $path; + } + + /** + * Root path getter + * + * @return string The root path + */ + public function getRootPath() + { + return $this->_root_path; + } + protected function _fetchData(KViewContext $context) { $state = $this->getModel()->getState(); @@ -40,8 +67,8 @@ class ComFilesViewFilesHtml extends ComKoowaViewHtml 'initial_response' => true ))->append($this->getConfig()->config); - if (isset($query['folder'])) { - $config->append(array('active' => $query['folder'], 'root_path' => $query['folder'])); + if ($root_path = $this->getRootPath()) { + $config->append(array('active' => $root_path, 'root_path' => $root_path)); } if ($config->initial_response === true)
#<I> Add root path setter and getter
joomlatools_joomlatools-framework
train
f6ed07a9cc287d2f7d4b826adad4e70b65504bb0
diff --git a/choria/framework.go b/choria/framework.go index <HASH>..<HASH> 100644 --- a/choria/framework.go +++ b/choria/framework.go @@ -266,6 +266,10 @@ func (self *Framework) SetupLogging(debug bool) (err error) { self.log.SetLevel(log.DebugLevel) } + log.SetFormatter(self.log.Formatter) + log.SetLevel(self.log.Level) + log.SetOutput(self.log.Out) + return }
(#<I>) configure global logger options We'd like to move to reusing a instance of the logger rather than each caller using its own instance and the general chaos thats there now, but while this is happening the old global level logger still needs to be configured. Previously we made the logger instance on the framework but did not also configure the global logger thus all logging went to stdout by default
choria-io_go-choria
train
8151b56459c942965e18e9b3ab0b33521f437a43
diff --git a/lib/active_admin/view_helpers.rb b/lib/active_admin/view_helpers.rb index <HASH>..<HASH> 100644 --- a/lib/active_admin/view_helpers.rb +++ b/lib/active_admin/view_helpers.rb @@ -97,6 +97,8 @@ module ActiveAdmin def active_admin_filters_form_for(search, options = {}, &block) options[:builder] ||= ActiveAdmin::FilterFormBuilder options[:url] ||= collection_path + options[:html] ||= {} + options[:html][:method] = :get form_for :q, search, options, &block end diff --git a/spec/active_admin/filter_form_builder_spec.rb b/spec/active_admin/filter_form_builder_spec.rb index <HASH>..<HASH> 100644 --- a/spec/active_admin/filter_form_builder_spec.rb +++ b/spec/active_admin/filter_form_builder_spec.rb @@ -21,6 +21,10 @@ describe Admin::PostsController do end end + it "should generate a form which submits via get" do + response.should have_tag("form", :attributes => { :method => 'get' }) + end + it "should generate a search field for a string attribute" do response.should have_tag("input", :attributes => { :name => "q[title_contains]"}) end @@ -30,7 +34,6 @@ describe Admin::PostsController do end it "should generate a search field for a text attribute" do - puts response.body response.should have_tag("input", :attributes => { :name => "q[body_contains]"}) end
Ensure that the filter form is a get request
activeadmin_activeadmin
train
fe1908551514ae53163509691e1fd302f5d0ce7b
diff --git a/lib/coverband.rb b/lib/coverband.rb index <HASH>..<HASH> 100644 --- a/lib/coverband.rb +++ b/lib/coverband.rb @@ -23,11 +23,10 @@ require 'coverband/collectors/coverage' require 'coverband/reporters/base' require 'coverband/reporters/html_report' require 'coverband/reporters/console_report' -require 'coverband/integrations/background' -require 'coverband/integrations/rack_server_check' require 'coverband/reporters/web' -require 'coverband/integrations/background_middleware' require 'coverband/integrations/background' +require 'coverband/integrations/background_middleware' +require 'coverband/integrations/rack_server_check' module Coverband @@configured = false
Remove repeated integrations/background require from coverband.rb
danmayer_coverband
train
af6eaeb1a38d9f3aeacbded9391a0ed159bb47cc
diff --git a/pymc3/stats.py b/pymc3/stats.py index <HASH>..<HASH> 100644 --- a/pymc3/stats.py +++ b/pymc3/stats.py @@ -271,10 +271,10 @@ def compare(traces, models, ic='WAIC'): Parameters ---------- - traces: list of PyMC3 traces - models: list of PyMC3 models + traces : list of PyMC3 traces + models : list of PyMC3 models in the same order as traces. - ic: string + ic : string Information Criterion (WAIC or LOO) used to compare models. Default WAIC. @@ -282,25 +282,25 @@ def compare(traces, models, ic='WAIC'): ------- A DataFrame, ordered from lowest to highest IC. The index reflects the order in which the models are passed to this function. The columns are: - IC: Information Criteria (WAIC or LOO). + IC : Information Criteria (WAIC or LOO). Smaller IC indicates higher out-of-sample predictive fit ("better" model). Default WAIC. - pIC: Estimated effective number of parameters. - dIC: Relative difference between each IC (WAIC or LOO) + pIC : Estimated effective number of parameters. + dIC : Relative difference between each IC (WAIC or LOO) and the lowest IC (WAIC or LOO). It's always 0 for the top-ranked model. weight: Akaike weights for each model. This can be loosely interpreted as the probability of each model (among the compared model) given the data. Be careful that these weights are based on point estimates of the IC (uncertainty is ignored). - SE: Standard error of the IC estimate. + SE : Standard error of the IC estimate. For a "large enough" sample size this is an estimate of the uncertainty in the computation of the IC. - dSE: Standard error of the difference in IC between each model and + dSE : Standard error of the difference in IC between each model and the top-ranked model. It's always 0 for the top-ranked model. - warning: A value of 1 indicates that the computation of the IC may not be - reliable see http://arxiv.org/abs/1507.04544 for details + warning : A value of 1 indicates that the computation of the IC may not be + reliable see http://arxiv.org/abs/1507.04544 for details. """ if ic == 'WAIC': ic_func = waic
docstring update, fix missing space between varname and description
pymc-devs_pymc
train
3ca38e4549d6cb7ef75082ac2cf102706d4e9be9
diff --git a/tests/test_genrequest.py b/tests/test_genrequest.py index <HASH>..<HASH> 100644 --- a/tests/test_genrequest.py +++ b/tests/test_genrequest.py @@ -645,3 +645,74 @@ class TestGenrequest(TestCase): response.get_response(3), "Querying an invalid requestId didn't return None" ) + + def test_genrequest_batch_invalid_xml(self): + + """ Create a batchrequest only using the Communication-object, + send it and request an invalid request id (xml) + """ + + config = get_config() + + if config.getboolean("genrequest_test", "enabled"): + + # Run only if enabled + + comm = Communication(config.get("genrequest_test", "url")) + + token = authenticate( + config.get("genrequest_test", "url"), + config.get("genrequest_test", "account"), + config.get("genrequest_test", "preauthkey") + ) + + self.assertNotEqual( + token, + None, + "Cannot authenticate." + ) + + request = comm.gen_request( + request_type="xml", + token=token, + set_batch=True + ) + + self.assertEqual( + type(request), + RequestXml, + "Generated request wasn't an json-request, which should be " + "the default." + ) + + request.add_request( + "NoOpRequest", + { + + }, + "urn:zimbraMail" + ) + + request.add_request( + "NoOpRequest", + { + + }, + "urn:zimbraMail" + ) + + response = comm.send_request(request) + + if response.is_fault(): + + self.fail( + "Reponse failed: (%s) %s" % ( + response.get_fault_code(), + response.get_fault_message() + ) + ) + + self.assertIsNone( + response.get_response(3), + "Querying an invalid requestId didn't return None" + )
* Added last Batchrequest-test in an xml version (#<I>)
Zimbra-Community_python-zimbra
train
65ab73f9df07039cfce7e7c1271c33e7b2b067a3
diff --git a/Neos.Fusion.Afx/Classes/Parser/Lexer.php b/Neos.Fusion.Afx/Classes/Parser/Lexer.php index <HASH>..<HASH> 100644 --- a/Neos.Fusion.Afx/Classes/Parser/Lexer.php +++ b/Neos.Fusion.Afx/Classes/Parser/Lexer.php @@ -49,7 +49,7 @@ class Lexer public function __construct($string) { $this->string = $string; - $this->currentCharacter = ($string !== '') ? $string{0} : null; + $this->currentCharacter = ($string !== '') ? $string[0] : null; $this->characterPosition = 0; } @@ -250,7 +250,7 @@ class Lexer */ public function rewind(): void { - $this->currentCharacter = $this->string{--$this->characterPosition}; + $this->currentCharacter = $this->string[--$this->characterPosition]; } /** @@ -277,7 +277,7 @@ class Lexer { $c = $this->currentCharacter; if ($this->characterPosition < strlen($this->string) - 1) { - $this->currentCharacter = $this->string{++$this->characterPosition}; + $this->currentCharacter = $this->string[++$this->characterPosition]; } else { $this->currentCharacter = null; } diff --git a/Neos.Fusion.Afx/Classes/Service/AfxService.php b/Neos.Fusion.Afx/Classes/Service/AfxService.php index <HASH>..<HASH> 100644 --- a/Neos.Fusion.Afx/Classes/Service/AfxService.php +++ b/Neos.Fusion.Afx/Classes/Service/AfxService.php @@ -216,7 +216,7 @@ class AfxService // seperate between attributes (before the first spread), meta attributes // spreads and attributes lists between and after spreads foreach ($attributes as $attribute) { - if ($attribute['type'] === 'prop' && $attribute['payload']['identifier']{0} === '@') { + if ($attribute['type'] === 'prop' && $attribute['payload']['identifier'][0] === '@') { $metaAttributes[] = $attribute; } elseif ($attribute['type'] === 'prop' && $spreadIsPresent === false) { $fusionAttributes[] = $attribute;
Task: Remove deprecated curly brace syntax for accessing string offsets This syntax is deprecated as of php <I> in favor of the use of square brackets.
neos_neos-development-collection
train
db5d81d4a09ca0f0f028fcc0499502b44ab67266
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ # Event tickets for SilverStripe Event Calendar +[![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/TheBnl/event-tickets/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/TheBnl/event-tickets/?branch=master) +[![Build Status](https://scrutinizer-ci.com/g/TheBnl/event-tickets/badges/build.png?b=master)](https://scrutinizer-ci.com/g/TheBnl/event-tickets/build-status/master) Add tickets to Event Calendar events. Payments handled trough the SilverStripe Omnipay module. diff --git a/code/checkout/PaymentProcessor.php b/code/checkout/PaymentProcessor.php index <HASH>..<HASH> 100644 --- a/code/checkout/PaymentProcessor.php +++ b/code/checkout/PaymentProcessor.php @@ -10,9 +10,10 @@ namespace Broarm\EventTickets; use Object; use Payment; -use SilverStripe\Omnipay\GatewayFieldsFactory; +use SilverStripe\Omnipay\Exception\Exception; use SilverStripe\Omnipay\GatewayInfo; use SilverStripe\Omnipay\Service\ServiceFactory; +use SilverStripe\Omnipay\Service\ServiceResponse; /** * Class PaymentProcessor @@ -64,6 +65,11 @@ class PaymentProcessor extends Object 'description' => 'test' ); + /** + * PaymentProcessor constructor. + * + * @param Reservation $reservation + */ public function __construct(Reservation $reservation) { $this->reservation = $reservation; @@ -73,7 +79,7 @@ class PaymentProcessor extends Object /** * Create a payment trough the given payment gateway * - * @param $gateway + * @param string $gateway * * @return Payment */ @@ -101,6 +107,12 @@ class PaymentProcessor extends Object return $this->payment; } + /** + * Create the service factory + * Catch any exceptions that might occur + * + * @return null|ServiceResponse + */ public function createServiceFactory() { $factory = ServiceFactory::create(); @@ -108,7 +120,7 @@ class PaymentProcessor extends Object try { $serviceResponse = $service->initiate($this->getGatewayData()); - } catch (SilverStripe\Omnipay\Exception\Exception $ex) { + } catch (Exception $ex) { // error out when an exception occurs user_error($ex->getMessage(), E_USER_WARNING); return null; @@ -117,13 +129,23 @@ class PaymentProcessor extends Object return $serviceResponse; } + /** + * Set and merges the gateway data + * + * @param array $data + */ public function setGatewayData($data = array()) { array_merge($data, $this->gatewayData); } + /** + * Get the gateway data + * + * @return array + */ public function getGateWayData() { return $this->gatewayData; } -} \ No newline at end of file +} diff --git a/code/model/Reservation.php b/code/model/Reservation.php index <HASH>..<HASH> 100644 --- a/code/model/Reservation.php +++ b/code/model/Reservation.php @@ -19,7 +19,6 @@ use Folder; use GridField; use GridFieldConfig_RecordViewer; use HasManyList; -use LiteralField; use ManyManyList; use Member; use ReadonlyField; @@ -30,7 +29,7 @@ use TabSet; use ViewableData; /** - * Class EventReservation + * Class Reservation * * @package Broarm\EventTickets * @@ -261,7 +260,7 @@ class Reservation extends DataObject /** * Create the folder for the qr code and ticket file * - * @return Folder|null + * @return Folder|DataObject|null */ public function fileFolder() {
added badged and documentation to payment processor
TheBnl_event-tickets
train
c112bc6069879ee2e8843299af215ea2aef4fef2
diff --git a/mod/forum/lib.php b/mod/forum/lib.php index <HASH>..<HASH> 100644 --- a/mod/forum/lib.php +++ b/mod/forum/lib.php @@ -2951,7 +2951,7 @@ function forum_subscribed_users($course, $forum, $groupid=0, $context = null, $f */ function forum_get_course_forum($courseid, $type) { // How to set up special 1-per-course forums - global $CFG, $DB, $OUTPUT; + global $CFG, $DB, $OUTPUT, $USER; if ($forums = $DB->get_records_select("forum", "course = ? AND type = ?", array($courseid, $type), "id ASC")) { // There should always only be ONE, but with the right combination of @@ -2965,6 +2965,9 @@ function forum_get_course_forum($courseid, $type) { $forum = new stdClass(); $forum->course = $courseid; $forum->type = "$type"; + if (!empty($USER->htmleditor)) { + $forum->introformat = $USER->htmleditor; + } switch ($forum->type) { case "news": $forum->name = get_string("namenews", "forum");
MDL-<I> Use the correct editor format for course forums
moodle_moodle
train
870f4f89481e1e4791f9c8a05127b29272ef8bd7
diff --git a/primus.js b/primus.js index <HASH>..<HASH> 100644 --- a/primus.js +++ b/primus.js @@ -1228,7 +1228,7 @@ Primus.prototype.uri = function uri(options) { options = options || {}; options.protocol = 'protocol' in options ? options.protocol : 'http'; options.query = url.search && 'query' in options ? (url.search.charAt(0) === '?' ? url.search.slice(1) : url.search) : false; - options.secure = 'secure' in options ? options.secure : url.protocol === 'https:'; + options.secure = 'secure' in options ? options.secure : (url.protocol === 'https:' || url.protocol === 'wss:'); options.auth = 'auth' in options ? options.auth : url.auth; options.pathname = 'pathname' in options ? options.pathname : this.pathname.slice(1); options.port = 'port' in options ? options.port : url.port || (options.secure ? 443 : 80);
[fix] Set the secure flag when the protocol in the connection URL is `wss:`
primus_primus
train
af895f4f91fe3d453c68620ddfbc7ee1386fea6b
diff --git a/drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/debug/AuditView.java b/drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/debug/AuditView.java index <HASH>..<HASH> 100644 --- a/drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/debug/AuditView.java +++ b/drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/debug/AuditView.java @@ -120,14 +120,12 @@ public class AuditView extends AbstractDebugView { ObjectInputStream in = xstream.createObjectInputStream( new FileReader(logFileName) ); try { while (true) { - Object object = in.readObject(); + Object object = in.readObject(); if (object instanceof WorkingMemoryLog) { WorkingMemoryLog log = (WorkingMemoryLog) object; - eventList = log.getEvents(); - phreak = log.getEngine().equalsIgnoreCase("PHREAK"); - break; - } - if (object instanceof LogEvent) { + eventList.addAll(log.getEvents()); + phreak |= log.getEngine().equalsIgnoreCase("PHREAK"); + } else if (object instanceof LogEvent) { eventList.add((LogEvent) object); } else if (object instanceof List) { drools4 = true;
[BZ-<I>] make audit view to show all sequences of events when parsing a threaded audit log
kiegroup_droolsjbpm-tools
train
808e24aa428d434974678a627b80f9eacaf4373c
diff --git a/engine/score-facade/src/main/java/com/hp/score/facade/entities/Execution.java b/engine/score-facade/src/main/java/com/hp/score/facade/entities/Execution.java index <HASH>..<HASH> 100644 --- a/engine/score-facade/src/main/java/com/hp/score/facade/entities/Execution.java +++ b/engine/score-facade/src/main/java/com/hp/score/facade/entities/Execution.java @@ -20,14 +20,10 @@ public class Execution implements Serializable { protected Map<String, Serializable> contexts; protected SystemContext systemContext = new SystemContext(); - //This context is an internal action context for sharing serializable data between actions on the same execution - //TODO Orit- remove - should be part of contexts!! - protected Map<String, Serializable> serializableSessionContext; public Execution() { this.mustGoToQueue = false; this.contexts = new HashMap<>(); - this.serializableSessionContext = new HashMap<>(); } public Execution(Long executionId, Long runningExecutionPlanId, Long position, Map<String, ? extends Serializable> contexts, Map<String, Serializable> systemContext) { @@ -88,10 +84,6 @@ public class Execution implements Serializable { return systemContext; } - public Map<String, Serializable> getSerializableSessionContext() { - return serializableSessionContext; - } - @Override public boolean equals(Object o) { if (this == o) diff --git a/worker/worker-execution/score-worker-execution-impl/src/main/java/com/hp/score/worker/execution/services/ExecutionServiceImpl.java b/worker/worker-execution/score-worker-execution-impl/src/main/java/com/hp/score/worker/execution/services/ExecutionServiceImpl.java index <HASH>..<HASH> 100644 --- a/worker/worker-execution/score-worker-execution-impl/src/main/java/com/hp/score/worker/execution/services/ExecutionServiceImpl.java +++ b/worker/worker-execution/score-worker-execution-impl/src/main/java/com/hp/score/worker/execution/services/ExecutionServiceImpl.java @@ -396,7 +396,6 @@ public final class ExecutionServiceImpl implements ExecutionService { data.putAll(execution.getContexts()); data.put(ExecutionParametersConsts.SYSTEM_CONTEXT, execution.getSystemContext()); data.put(ExecutionParametersConsts.EXECUTION_RUNTIME_SERVICES, execution.getSystemContext()); - data.put(ExecutionParametersConsts.SERIALIZABLE_SESSION_CONTEXT, execution.getSerializableSessionContext()); data.put(ExecutionParametersConsts.EXECUTION, execution); data.put(ExecutionParametersConsts.EXECUTION_CONTEXT, execution.getContexts()); data.put(ExecutionParametersConsts.RUNNING_EXECUTION_PLAN_ID, execution.getRunningExecutionPlanId());
Remove SerializableSessionContext from the Execution and from score
CloudSlang_score
train
e9f8bda944c3e6f729f7b1658befc0ec4703dd9b
diff --git a/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java b/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java index <HASH>..<HASH> 100644 --- a/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java +++ b/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java @@ -22,7 +22,6 @@ import io.aeron.driver.status.SystemCounters; import io.aeron.logbuffer.TermRebuilder; import io.aeron.protocol.DataHeaderFlyweight; import io.aeron.protocol.RttMeasurementFlyweight; -import org.agrona.UnsafeAccess; import org.agrona.collections.ArrayUtil; import org.agrona.concurrent.EpochClock; import org.agrona.concurrent.status.AtomicCounter; @@ -39,6 +38,7 @@ import static io.aeron.driver.LossDetector.rebuildOffset; import static io.aeron.driver.PublicationImage.Status.ACTIVE; import static io.aeron.driver.status.SystemCounterDescriptor.*; import static io.aeron.logbuffer.LogBufferDescriptor.*; +import static org.agrona.UnsafeAccess.UNSAFE; class PublicationImagePadding1 { @@ -503,7 +503,7 @@ public class PublicationImage final long smPosition = nextSmPosition; final int receiverWindowLength = nextSmReceiverWindowLength; - UnsafeAccess.UNSAFE.loadFence(); // LoadLoad required so previous loads don't move past version check below. + UNSAFE.loadFence(); // LoadLoad required so previous loads don't move past version check below. if (changeNumber == beginSmChange) { @@ -540,7 +540,7 @@ public class PublicationImage final int termOffset = lossTermOffset; final int length = lossLength; - UnsafeAccess.UNSAFE.loadFence(); // LoadLoad required so previous loads don't move past version check below. + UNSAFE.loadFence(); // LoadLoad required so previous loads don't move past version check below. if (changeNumber == beginLossChange) {
[Java] Add static import for UnsafeAccess.
real-logic_aeron
train
ec9b9323fa28e4b3bec86dc0cf7929a0f33ab7fa
diff --git a/hazelcast/src/main/java/com/hazelcast/config/security/LdapAuthenticationConfig.java b/hazelcast/src/main/java/com/hazelcast/config/security/LdapAuthenticationConfig.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/config/security/LdapAuthenticationConfig.java +++ b/hazelcast/src/main/java/com/hazelcast/config/security/LdapAuthenticationConfig.java @@ -211,8 +211,8 @@ public class LdapAuthenticationConfig implements AuthenticationConfig { public LoginModuleConfig[] asLoginModuleConfigs() { boolean useSystemUser = !isNullOrEmpty(systemUserDn); LoginModuleConfig loginModuleConfig = new LoginModuleConfig( - useSystemUser ? "com.hazelcast.security.impl.LdapLoginModule" - : "com.hazelcast.security.impl.BasicLdapLoginModule", + useSystemUser ? "com.hazelcast.security.loginimpl.LdapLoginModule" + : "com.hazelcast.security.loginimpl.BasicLdapLoginModule", LoginModuleUsage.REQUIRED); Properties props = loginModuleConfig.getProperties(); diff --git a/hazelcast/src/main/java/com/hazelcast/config/security/TlsAuthenticationConfig.java b/hazelcast/src/main/java/com/hazelcast/config/security/TlsAuthenticationConfig.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/config/security/TlsAuthenticationConfig.java +++ b/hazelcast/src/main/java/com/hazelcast/config/security/TlsAuthenticationConfig.java @@ -39,7 +39,7 @@ public class TlsAuthenticationConfig implements AuthenticationConfig { @Override public LoginModuleConfig[] asLoginModuleConfigs() { - LoginModuleConfig loginModuleConfig = new LoginModuleConfig("com.hazelcast.security.impl.X509CertificateLoginModule", + LoginModuleConfig loginModuleConfig = new LoginModuleConfig("com.hazelcast.security.loginimpl.X509CertificateLoginModule", LoginModuleUsage.REQUIRED); if (roleAttribute != null) { loginModuleConfig.getProperties().setProperty("roleAttribute", roleAttribute);
Move login modules from impl to loginimpl package so the JavaDoc is generated for them.
hazelcast_hazelcast
train
24adb90f0b84bab412c365b0ddb0af55f4df53ca
diff --git a/angr/engines/engine.py b/angr/engines/engine.py index <HASH>..<HASH> 100644 --- a/angr/engines/engine.py +++ b/angr/engines/engine.py @@ -48,7 +48,7 @@ class SimEngine(object): successors = SimSuccessors(addr, old_state) - new_state._inspect('engine_process', when=BP_BEFORE, sim_engine=self, sim_successors=successors) + new_state._inspect('engine_process', when=BP_BEFORE, sim_engine=self, sim_successors=successors, address=addr) successors = new_state._inspect_getattr('sim_successors', successors) try: self._process(new_state, successors, *args, **kwargs) @@ -57,7 +57,7 @@ class SimEngine(object): raise old_state.project._simos.handle_exception(successors, self, *sys.exc_info()) - new_state._inspect('engine_process', when=BP_AFTER, sim_successors=successors) + new_state._inspect('engine_process', when=BP_AFTER, sim_successors=successors, address=addr) successors = new_state._inspect_getattr('sim_successors', successors) # downsizing
Provide the address for the engine_process breakpoint
angr_angr
train
edf69cf61abdc9222bf847226140c99c287fb277
diff --git a/addon/components/basic-dropdown.js b/addon/components/basic-dropdown.js index <HASH>..<HASH> 100644 --- a/addon/components/basic-dropdown.js +++ b/addon/components/basic-dropdown.js @@ -45,6 +45,11 @@ export default Component.extend({ const rootSelector = Ember.testing ? '#ember-testing' : getOwner(this).lookup('application:main').rootElement; return self.document.querySelector(rootSelector); }), + + dropdownId: computed(function() { + return `ember-basic-dropdown-content-${this.elementId}`; + }), + tabIndex: computed('disabled', function() { return !this.get('disabled') ? (this.get('tabindex') || '0') : null; }), diff --git a/addon/templates/components/basic-dropdown.hbs b/addon/templates/components/basic-dropdown.hbs index <HASH>..<HASH> 100644 --- a/addon/templates/components/basic-dropdown.hbs +++ b/addon/templates/components/basic-dropdown.hbs @@ -5,6 +5,7 @@ aria-expanded={{publicAPI.isOpen}} aria-disabled={{disabled}} aria-haspopup="true" + aria-owns={{dropdownId}} aria-labelledby={{ariaLabelledBy}} aria-describedby={{ariaDescribedBy}} aria-required={{ariaRequired}} @@ -16,7 +17,7 @@ </div> {{#if opened}} {{#ember-wormhole to=_wormholeDestination renderInPlace=renderInPlace}} - <div class="ember-basic-dropdown-content {{dropdownClass}} {{_verticalPositionClass}} {{_horizontalPositionClass}}" dir={{dir}}> + <div id={{dropdownId}} class="ember-basic-dropdown-content {{dropdownClass}} {{_verticalPositionClass}} {{_horizontalPositionClass}}" dir={{dir}}> {{yield publicAPI}} </div> {{/ember-wormhole}} diff --git a/tests/integration/components/basic-dropdown-test.js b/tests/integration/components/basic-dropdown-test.js index <HASH>..<HASH> 100644 --- a/tests/integration/components/basic-dropdown-test.js +++ b/tests/integration/components/basic-dropdown-test.js @@ -507,6 +507,25 @@ test('The trigger can be customized with custom id and class', function(assert) assert.equal($trigger.attr('id'), 'foo-id', 'The trigger has the given id'); }); + +test('The content of the dropdown has a unique ID and the trigger has `aria-owns=that-id`', function(assert) { + assert.expect(2); + + this.render(hbs` + {{#basic-dropdown}} + <h3>Content of the dropdown</h3> + {{else}} + <button>Press me</button> + {{/basic-dropdown}} + `); + + Ember.run(() => this.$('.ember-basic-dropdown-trigger').trigger('mousedown')); + let dropdownId = $('.ember-basic-dropdown-content').attr('id'); + assert.ok(dropdownId.match(/^ember-basic-dropdown-content-ember\d+$/), 'The dropdown has a unique id'); + let $trigger = this.$('.ember-basic-dropdown-trigger'); + assert.equal($trigger.attr('aria-owns'), dropdownId, 'The trigger aria-owns=<id-of-the-dropdown-content>'); +}); + function triggerKeydown(domElement, k) { var oEvent = document.createEvent("Events"); oEvent.initEvent('keydown', true, true);
Dropdown popup has a unique ID and the trigger has aria-owns=<that-id>
cibernox_ember-basic-dropdown
train
29294bdb42e7eaaf5846d9a9d2ca037b2068a20f
diff --git a/openquake/calculators/hazard/disagg/core.py b/openquake/calculators/hazard/disagg/core.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/disagg/core.py +++ b/openquake/calculators/hazard/disagg/core.py @@ -104,6 +104,17 @@ class DisaggHazardCalculator(haz_general.BaseHazardCalculatorNext): self.record_init_stats() def task_arg_gen(self, block_size): + """ + Generate task args for disaggregation calculations. + + First, args are generated for hazard curve computation. Once those are + through, args are generated for disagg histogram computation. + + :param int block_size: + The number of items per task. In this case, this the number of + sources for hazard curve calc task, or number of sites for disagg + calc tasks. + """ realizations = models.LtRealization.objects.filter( hazard_calculation=self.hc, is_complete=False)
calcs/hazard/disagg/core: Added doc for the disagg `task_arg_gen`. Former-commit-id: <I>bf9c<I>aa<I>acbc<I>b0e<I>bbc<I>d<I>b1
gem_oq-engine
train
20f52908ab716082a90c1006544905d4b40a4225
diff --git a/lib/dependencies/CommonJsImportsParserPlugin.js b/lib/dependencies/CommonJsImportsParserPlugin.js index <HASH>..<HASH> 100644 --- a/lib/dependencies/CommonJsImportsParserPlugin.js +++ b/lib/dependencies/CommonJsImportsParserPlugin.js @@ -282,7 +282,7 @@ class CommonJsImportsParserPlugin { dep.asiSafe = !parser.isAsiPosition(expr.range[0]); dep.optional = !!parser.scope.inTry; dep.loc = expr.loc; - parser.state.module.addDependency(dep); + parser.state.current.addDependency(dep); return true; } }; @@ -299,7 +299,7 @@ class CommonJsImportsParserPlugin { dep.asiSafe = !parser.isAsiPosition(expr.range[0]); dep.optional = !!parser.scope.inTry; dep.loc = expr.callee.loc; - parser.state.module.addDependency(dep); + parser.state.current.addDependency(dep); parser.walkExpressions(expr.arguments); return true; } diff --git a/lib/dependencies/URLPlugin.js b/lib/dependencies/URLPlugin.js index <HASH>..<HASH> 100644 --- a/lib/dependencies/URLPlugin.js +++ b/lib/dependencies/URLPlugin.js @@ -82,7 +82,7 @@ class URLPlugin { relative ); dep.loc = expr.loc; - parser.state.module.addDependency(dep); + parser.state.current.addDependency(dep); InnerGraph.onUsage(parser.state, e => (dep.usedByExports = e)); return true; });
attach require with property dependencies to the correct DependenciesBlock
webpack_webpack
train
818db48db96b96e5204e291060cf9c6b40046b1b
diff --git a/test/com/opera/core/systems/OperaLauncherRunnerTest.java b/test/com/opera/core/systems/OperaLauncherRunnerTest.java index <HASH>..<HASH> 100644 --- a/test/com/opera/core/systems/OperaLauncherRunnerTest.java +++ b/test/com/opera/core/systems/OperaLauncherRunnerTest.java @@ -80,8 +80,12 @@ public class OperaLauncherRunnerTest @Test public void testOperaLauncherRunnerConstructorWithSettings() { + OperaPaths paths = new OperaPaths(); settings.setRunOperaLauncherFromOperaDriver(true); settings.setOperaBinaryArguments(""); + settings.setOperaBinaryLocation(paths.operaPath()); + settings.setOperaLauncherBinary(paths.launcherPath()); + runner = new OperaLauncherRunner(settings); Assert.assertNotNull(runner); } @@ -101,6 +105,7 @@ public class OperaLauncherRunnerTest @Test public void testStartOpera() { + Assert.assertNotNull(runner); runner.startOpera(); Assert.assertTrue(runner.isOperaRunning()); } @@ -127,13 +132,11 @@ public class OperaLauncherRunnerTest } } - // BEGIN TESTING WITH DIFFERENT OPERA ARGUMENTS - @Test public void testOperaLauncherRunnerConstructorWithSettings2() { settings.setRunOperaLauncherFromOperaDriver(true); - settings.setOperaBinaryArguments("-nowindow"); + settings.setOperaBinaryArguments("-geometry 1024x768"); runner = new OperaLauncherRunner(settings); runner.startOpera(); Assert.assertTrue(runner.isOperaRunning()); @@ -145,32 +148,6 @@ public class OperaLauncherRunnerTest runner.stopOpera(); runner.shutdown(); - try - { - runner.isOperaRunning(); - fail("This test should have generated a OperaRunnerException as we tried to check on the Opera process after being shutdown..."); - } - catch (OperaRunnerException e) - { - } - } - - @Test - public void testOperaLauncherRunnerConstructorWithSettings3() - { - settings.setRunOperaLauncherFromOperaDriver(true); - settings.setOperaBinaryArguments(" -geometry 1024x768 -nohw"); - runner = new OperaLauncherRunner(settings); - runner.startOpera(); - Assert.assertTrue(runner.isOperaRunning()); - } - - @Test - public void testShutdown3() - { - runner.stopOpera(); - runner.shutdown(); - // verify that a second shutdown call doesn't do any harm (shouldn't) runner.shutdown(); } @@ -178,7 +155,7 @@ public class OperaLauncherRunnerTest @Test public void testStartAndStopOperaTenTimesRoundOneStart() { - settings.setOperaBinaryArguments(" -geometry 640x480 -nohw"); + settings.setOperaBinaryArguments("-geometry 640x480"); runner = new OperaLauncherRunner(settings); runner.startOpera(); Assert.assertTrue(runner.isOperaRunning());
Correct OperaLauncherRunner tests and remove repeated one
operasoftware_operaprestodriver
train
43482decacec7821edac8a44f0d1c26381c24ccb
diff --git a/stl/__about__.py b/stl/__about__.py index <HASH>..<HASH> 100644 --- a/stl/__about__.py +++ b/stl/__about__.py @@ -1,6 +1,6 @@ __package_name__ = 'numpy-stl' __import_name__ = 'stl' -__version__ = '1.9.1' +__version__ = '2.0.0' __author__ = 'Rick van Hattem' __author_email__ = 'Wolph@Wol.ph' __description__ = '''
greatly improved tests and added cython support
WoLpH_numpy-stl
train
87b525e7ac3ffb02cb1ca06124b7b29973c82cbf
diff --git a/spec/lib/secure_headers/view_helpers_spec.rb b/spec/lib/secure_headers/view_helpers_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/secure_headers/view_helpers_spec.rb +++ b/spec/lib/secure_headers/view_helpers_spec.rb @@ -68,15 +68,35 @@ module SecureHeaders let(:app) { lambda { |env| [200, env, "app"] } } let(:middleware) { Middleware.new(app) } let(:request) { Rack::Request.new("HTTP_USER_AGENT" => USER_AGENTS[:chrome]) } + let(:filename) { "app/views/asdfs/index.html.erb" } - before(:each) do + before(:all) do Configuration.default do |config| config.csp[:script_src] = %w('self') config.csp[:style_src] = %w('self') end end - it "raises an error when attempting to hash unknown content" do + after(:each) do + Configuration.instance_variable_set(:@script_hashes, nil) + Configuration.instance_variable_set(:@style_hashes, nil) + end + + it "raises an error when using hashed content without precomputed hashes" do + expect { + Message.new(request).result + }.to raise_error(ViewHelpers::UnexpectedHashedScriptException) + end + + it "raises an error when using hashed content with precomputed hashes, but none for the given file" do + Configuration.instance_variable_set(:@script_hashes, filename.reverse => ["'sha256-123'"]) + expect { + Message.new(request).result + }.to raise_error(ViewHelpers::UnexpectedHashedScriptException) + end + + it "raises an error when using previously unknown hashed content with precomputed hashes for a given file" do + Configuration.instance_variable_set(:@script_hashes, filename => ["'sha256-123'"]) expect { Message.new(request).result }.to raise_error(ViewHelpers::UnexpectedHashedScriptException) @@ -87,9 +107,9 @@ module SecureHeaders allow(SecureRandom).to receive(:base64).and_return("abc123") expected_hash = "sha256-3/URElR9+3lvLIouavYD/vhoICSNKilh15CzI/nKqg8=" - Configuration.instance_variable_set(:@script_hashes, "app/views/asdfs/index.html.erb" => ["'#{expected_hash}'"]) + Configuration.instance_variable_set(:@script_hashes, filename => ["'#{expected_hash}'"]) expected_style_hash = "sha256-7oYK96jHg36D6BM042er4OfBnyUDTG3pH1L8Zso3aGc=" - Configuration.instance_variable_set(:@style_hashes, "app/views/asdfs/index.html.erb" => ["'#{expected_style_hash}'"]) + Configuration.instance_variable_set(:@style_hashes, filename => ["'#{expected_style_hash}'"]) # render erb that calls out to helpers. Message.new(request).result @@ -99,9 +119,6 @@ module SecureHeaders expect(env[CSP::HEADER_NAME]).to match(/script-src[^;]*'nonce-abc123'/) expect(env[CSP::HEADER_NAME]).to match(/style-src[^;]*'nonce-abc123'/) expect(env[CSP::HEADER_NAME]).to match(/style-src[^;]*'#{Regexp.escape(expected_style_hash)}'/) - ensure - Configuration.instance_variable_set(:@script_hashes, nil) - Configuration.instance_variable_set(:@style_hashes, nil) end end end
add more tests around unknown hash behavior
twitter_secure_headers
train
ca630e5351fb014de61f3f6fee16f844688ab19a
diff --git a/src/Symfony/Component/Mime/Part/Multipart/FormDataPart.php b/src/Symfony/Component/Mime/Part/Multipart/FormDataPart.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Mime/Part/Multipart/FormDataPart.php +++ b/src/Symfony/Component/Mime/Part/Multipart/FormDataPart.php @@ -56,11 +56,20 @@ final class FormDataPart extends AbstractMultipartPart private function prepareFields(array $fields): array { $values = []; - array_walk_recursive($fields, function ($item, $key) use (&$values) { - if (!\is_array($item)) { - $values[] = $this->preparePart($key, $item); + + $prepare = function ($item, $key, $root = null) use (&$values, &$prepare) { + $fieldName = $root ? sprintf('%s[%s]', $root, $key) : $key; + + if (\is_array($item)) { + array_walk($item, $prepare, $fieldName); + + return; } - }); + + $values[] = $this->preparePart($fieldName, $item); + }; + + array_walk($fields, $prepare); return $values; } diff --git a/src/Symfony/Component/Mime/Tests/Part/Multipart/FormDataPartTest.php b/src/Symfony/Component/Mime/Tests/Part/Multipart/FormDataPartTest.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Mime/Tests/Part/Multipart/FormDataPartTest.php +++ b/src/Symfony/Component/Mime/Tests/Part/Multipart/FormDataPartTest.php @@ -47,6 +47,34 @@ class FormDataPartTest extends TestCase $this->assertEquals([$t, $b, $c], $f->getParts()); } + public function testNestedArrayParts() + { + $p1 = new TextPart('content', 'utf-8', 'plain', '8bit'); + $f = new FormDataPart([ + 'foo' => clone $p1, + 'bar' => [ + 'baz' => [ + clone $p1, + 'qux' => clone $p1, + ], + ], + ]); + + $this->assertEquals('multipart', $f->getMediaType()); + $this->assertEquals('form-data', $f->getMediaSubtype()); + + $p1->setName('foo'); + $p1->setDisposition('form-data'); + + $p2 = clone $p1; + $p2->setName('bar[baz][0]'); + + $p3 = clone $p1; + $p3->setName('bar[baz][qux]'); + + $this->assertEquals([$p1, $p2, $p3], $f->getParts()); + } + public function testToString() { $p = DataPart::fromPath($file = __DIR__.'/../../Fixtures/mimetypes/test.gif');
Changing the multipart form-data behavior to use the form name as an array, which makes it recognizable as an array by PHP on the $_POST globals once it is coming from the HttpClient component
symfony_symfony
train
2d0591abd59467740ca07b1b92afece2a72c713c
diff --git a/librosa/core/spectrum.py b/librosa/core/spectrum.py index <HASH>..<HASH> 100644 --- a/librosa/core/spectrum.py +++ b/librosa/core/spectrum.py @@ -229,7 +229,7 @@ def stft( elif n_fft > y.shape[-1]: raise ParameterError( - "n_fft={} is too small for input signal of length={}".format( + "n_fft={} is too large for input signal of length={}".format( n_fft, y.shape[-1] ) )
stft issues with large FFT size (#<I>) * stft issues with large FFT size * Revert the removal of the userwarning and test * stft issues with large FFT size * Revert the removal of the userwarning and test
librosa_librosa
train
e56e5a74e511ee2b8e46928f47dbb5a771008851
diff --git a/activiti-spring-boot-starter/src/test/java/org/activiti/spring/boot/HistoryConfigurationTest.java b/activiti-spring-boot-starter/src/test/java/org/activiti/spring/boot/HistoryConfigurationTest.java index <HASH>..<HASH> 100644 --- a/activiti-spring-boot-starter/src/test/java/org/activiti/spring/boot/HistoryConfigurationTest.java +++ b/activiti-spring-boot-starter/src/test/java/org/activiti/spring/boot/HistoryConfigurationTest.java @@ -68,8 +68,6 @@ public class HistoryConfigurationTest { @Autowired private ApplicationEventPublisher applicationEventPublisher; - private ApplicationEventPublisher eventPublisher; - @Autowired private ProcessCleanUpUtil processCleanUpUtil; @@ -80,7 +78,7 @@ public class HistoryConfigurationTest { @Before public void init() { - eventPublisher = spy(applicationEventPublisher); + ApplicationEventPublisher eventPublisher = spy(applicationEventPublisher); spy(new ProcessRuntimeImpl(repositoryService, processDefinitionConverter,
Fix for codacy #<I>
Activiti_Activiti
train
3aaf17f22e47b984fd5c3b4204447b930381b202
diff --git a/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/TopologyInfo.java b/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/TopologyInfo.java index <HASH>..<HASH> 100644 --- a/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/TopologyInfo.java +++ b/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/TopologyInfo.java @@ -125,14 +125,10 @@ public final class TopologyInfo { } // Stop accepting topology updates from old requests - caches.replaceAll((name, oldInfo) -> { - CacheInfo newInfo = oldInfo.withNewServers(newTopologyAge, HotRodConstants.SWITCH_CLUSTER_TOPOLOGY, - newCluster.getInitialServers()); - // Updates the balancer in both infos - newInfo.updateBalancerServers(); - // Update the topology ref for both infos and ongoing operations - newInfo.getTopologyIdRef().set(HotRodConstants.SWITCH_CLUSTER_TOPOLOGY); - return newInfo; + caches.forEach((name, oldCacheInfo) -> { + CacheInfo newCacheInfo = oldCacheInfo.withNewServers(newTopologyAge, HotRodConstants.SWITCH_CLUSTER_TOPOLOGY, + newCluster.getInitialServers()); + updateCacheInfo(name, oldCacheInfo, newCacheInfo); }); // Update the topology age for new requests so that the topology updates from their responses are accepted @@ -148,14 +144,11 @@ public final class TopologyInfo { public void reset(WrappedBytes cacheName) { if (log.isTraceEnabled()) log.tracef("Switching to initial server list for cache %s, cluster %s", cacheName, cluster.getName()); - caches.computeIfPresent(cacheName, (name, oldInfo) -> { - CacheInfo newInfo = oldInfo.withNewServers(cluster.getTopologyAge(), HotRodConstants.DEFAULT_CACHE_TOPOLOGY, - cluster.getInitialServers()); - // Updates the balancer in both infos - newInfo.updateBalancerServers(); - // Update the topology ref for both infos and ongoing operations - newInfo.getTopologyIdRef().set(newInfo.getTopologyId()); - return newInfo; + caches.forEach((name, oldCacheInfo) -> { + CacheInfo newCacheInfo = oldCacheInfo.withNewServers(cluster.getTopologyAge(), + HotRodConstants.DEFAULT_CACHE_TOPOLOGY, + cluster.getInitialServers()); + updateCacheInfo(cacheName, oldCacheInfo, newCacheInfo); }); } @@ -175,6 +168,8 @@ public final class TopologyInfo { // The new CacheInfo doesn't have a new balancer instance, so the server update affects both newCacheInfo.updateBalancerServers(); + // Update the topology id for new requests + newCacheInfo.getTopologyIdRef().set(newCacheInfo.getTopologyId()); } public void forEachCache(BiConsumer<WrappedBytes, CacheInfo> action) { diff --git a/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/transport/netty/ChannelFactory.java b/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/transport/netty/ChannelFactory.java index <HASH>..<HASH> 100644 --- a/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/transport/netty/ChannelFactory.java +++ b/client/hotrod-client/src/main/java/org/infinispan/client/hotrod/impl/transport/netty/ChannelFactory.java @@ -327,7 +327,6 @@ public class ChannelFactory { newCacheInfo = cacheInfo.withNewServers(responseTopologyAge, responseTopologyId, addressList); } updateCacheInfo(wrappedCacheName, newCacheInfo, false); - newCacheInfo.getTopologyIdRef().set(responseTopologyId); } else { if (log.isTraceEnabled()) log.tracef("[%s] Ignoring outdated topology: topology id = %s, topology age = %s, servers = %s",
ISPN-<I> Set the request topology id after updating the cache info
infinispan_infinispan
train
fb0f51f84eab7126333d331b19c74ed83d2d3535
diff --git a/src/materials/AdaptiveLuminanceMaterial.js b/src/materials/AdaptiveLuminanceMaterial.js index <HASH>..<HASH> 100644 --- a/src/materials/AdaptiveLuminanceMaterial.js +++ b/src/materials/AdaptiveLuminanceMaterial.js @@ -94,6 +94,30 @@ export class AdaptiveLuminanceMaterial extends ShaderMaterial { } /** + * Returns the lowest possible luminance value. + * + * @return {Number} The minimum luminance. + */ + + getMinLuminance() { + + return this.uniforms.minLuminance.value; + + } + + /** + * Sets the minimum luminance. + * + * @param {Number} value - The minimum luminance. + */ + + setMinLuminance(value) { + + this.uniforms.minLuminance.value = value; + + } + + /** * Returns the luminance adaptation rate. * * @return {Number} The adaptation rate.
Add accessors for min luminance Added getMinLuminance() and setMinLuminance().
vanruesc_postprocessing
train
3d2bb91bd20962e71d4cd58b2267db1839da5ee4
diff --git a/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventory.java b/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventory.java index <HASH>..<HASH> 100644 --- a/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventory.java +++ b/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventory.java @@ -55,4 +55,6 @@ public interface CPDefinitionInventory extends CPDefinitionInventoryModel, return CPDefinitionInventory.class; } }; + + public int[] getAllowedOrderQuantitiesArray(); } \ No newline at end of file diff --git a/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventoryWrapper.java b/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventoryWrapper.java index <HASH>..<HASH> 100644 --- a/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventoryWrapper.java +++ b/commerce-api/src/main/java/com/liferay/commerce/model/CPDefinitionInventoryWrapper.java @@ -228,6 +228,11 @@ public class CPDefinitionInventoryWrapper implements CPDefinitionInventory, return _cpDefinitionInventory.getAllowedOrderQuantities(); } + @Override + public int[] getAllowedOrderQuantitiesArray() { + return _cpDefinitionInventory.getAllowedOrderQuantitiesArray(); + } + /** * Returns the back orders of this cp definition inventory. *
COMMERCE-0 Regenerate (gw buildService)
liferay_com-liferay-commerce
train
2cafb9df164e97384e661bb4e7c8060c5b47e18d
diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -6,6 +6,15 @@ describe('Text formatter', function () { setHeader: function () {} }; + var mockResponseWithFunction = function(obj) { + return { + setHeader: function (key, value) { + obj.key = key; + obj.value = value; + } + }; + }; + it('serializes a JSON object type of body', function (){ var body = {}; expect(formatter(mockRequest, mockResponse, body)).to.be.eql('{}'); @@ -19,34 +28,22 @@ describe('Text formatter', function () { describe('sets the content-length header', function() { it('to zero', function () { var body = ''; - var header; - var value; - var response = { - setHeader: function (_header, _value) { - header = _header; - value = _value; - } - }; + var header = {}; + var response = mockResponseWithFunction(header); formatter(mockRequest, response, body); - expect(header).to.be.eql('Content-Length'); - expect(value).to.be.eql(0); + expect(header.key).to.be.eql('Content-Length'); + expect(header.value).to.be.eql(0); }); it('of a JSON body', function () { var body = { test: 1 }; - var header; - var value; - var response = { - setHeader: function (_header, _value) { - header = _header; - value = _value; - } - }; + var header = {}; + var response = mockResponseWithFunction(header); formatter(mockRequest, response, body); - expect(header).to.be.eql('Content-Length'); - expect(value).to.be.eql(10); + expect(header.key).to.be.eql('Content-Length'); + expect(header.value).to.be.eql(10); }); });
mockResponseWithFunction added
restify_formatter-text
train
7c2999709cd8d97be13915fac19565c900f64aba
diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index <HASH>..<HASH> 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -62,7 +62,7 @@ var metricsMap = map[string][]string{ "AWS/DDoSProtection": {"DDoSDetected", "DDoSAttackBitsPerSecond", "DDoSAttackPacketsPerSecond", "DDoSAttackRequestsPerSecond"}, "AWS/DMS": {"CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCIncomingChanges", "CDCLatencySource", "CDCLatencyTarget", "CDCThroughputBandwidthSource", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CPUUtilization", "FreeStorageSpace", "FreeableMemory", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "ReadIOPS", "ReadLatency", "ReadThroughput", "SwapUsage", "WriteIOPS", "WriteLatency", "WriteThroughput"}, "AWS/DocDB": {"BackupRetentionPeriodStorageUsed", "BufferCacheHitRatio", "ChangeStreamLogSize", "CPUUtilization", "DatabaseConnections", "DBInstanceReplicaLag", "DBClusterReplicaLagMaximum", "DBClusterReplicaLagMinimum", "DiskQueueDepth", "EngineUptime", "FreeableMemory", "FreeLocalStorage", "NetworkReceiveThroughput", "NetworkThroughput", "NetworkTransmitThroughput", "ReadIOPS", "ReadLatency", "ReadThroughput", "SnapshotStorageUsed", "SwapUsage", "TotalBackupStorageBilled", "VolumeBytesUsed", "VolumeReadIOPs", "VolumeWriteIOPs", "WriteIOPS", "WriteLatency", "WriteThroughput"}, - "AWS/DX": {"ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelRx", "ConnectionLightLevelTx", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionState"}, + "AWS/DX": {"ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelRx", "ConnectionLightLevelTx", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionState", "VirtualInterfaceBpsEgress", "VirtualInterfaceBpsIngress", "VirtualInterfacePpsEgress", "VirtualInterfacePpsIngress"}, "AWS/DAX": {"CPUUtilization", "NetworkPacketsIn", "NetworkPacketsOut", "GetItemRequestCount", "BatchGetItemRequestCount", "BatchWriteItemRequestCount", "DeleteItemRequestCount", "PutItemRequestCount", "UpdateItemRequestCount", "TransactWriteItemsCount", "TransactGetItemsCount", "ItemCacheHits", "ItemCacheMisses", "QueryCacheHits", "QueryCacheMisses", "ScanCacheHits", "ScanCacheMisses", "TotalRequestCount", "ErrorRequestCount", "FaultRequestCount", "FailedRequestCount", "QueryRequestCount", "ScanRequestCount", "ClientConnections", "EstimatedDbSize", "EvictedSize"}, "AWS/DynamoDB": {"ConditionalCheckFailedRequests", "ConsumedReadCapacityUnits", "ConsumedWriteCapacityUnits", "OnlineIndexConsumedWriteCapacity", "OnlineIndexPercentageProgress", "OnlineIndexThrottleEvents", "PendingReplicationCount", "ProvisionedReadCapacityUnits", "ProvisionedWriteCapacityUnits", "ReadThrottleEvents", "ReplicationLatency", "ReturnedBytes", "ReturnedItemCount", "ReturnedRecordsCount", "SuccessfulRequestLatency", "SystemErrors", "ThrottledRequests", "TimeToLiveDeletedItemCount", "UserErrors", "WriteThrottleEvents"}, "AWS/EBS": {"BurstBalance", "VolumeConsumedReadWriteOps", "VolumeIdleTime", "VolumeQueueLength", "VolumeReadBytes", "VolumeReadOps", "VolumeThroughputPercentage", "VolumeTotalReadTime", "VolumeTotalWriteTime", "VolumeWriteBytes", "VolumeWriteOps"}, @@ -155,7 +155,7 @@ var dimensionsMap = map[string][]string{ "AWS/DDoSProtection": {"ResourceArn", "AttackVector"}, "AWS/DMS": {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"}, "AWS/DocDB": {"DBClusterIdentifier", "DBInstanceIdentifier", "Role"}, - "AWS/DX": {"ConnectionId"}, + "AWS/DX": {"ConnectionId", "OpticalLaneNumber", "VirtualInterfaceId"}, "AWS/DAX": {"Account", "ClusterId", "NodeId"}, "AWS/DynamoDB": {"GlobalSecondaryIndexName", "Operation", "ReceivingRegion", "StreamLabel", "TableName"}, "AWS/EBS": {"VolumeId"},
Cloudwatch: Add support for AWS DirectConnect virtual interface metrics and add missing dimensions (#<I>) * Cloudwatch: Add support for AWS DirectConnect virtual interface metrics. * Cloudwatch: Adding dimentions to the AWS/DX namespace for virtual network interface based DirectConnect.
grafana_grafana
train
a66888d05e299eb9030c9c80dd0ad16d4f0836ec
diff --git a/sucks.py b/sucks.py index <HASH>..<HASH> 100644 --- a/sucks.py +++ b/sucks.py @@ -190,7 +190,7 @@ class VacBot(ClientXMPP): c.send() def wrap_command(self, ctl): - q = self.make_iq_query(xmlns=u'com:ctl', ito=self.vacuum + '@126.ecorobot.net/atom', + q = self.make_iq_query(xmlns=u'com:ctl', ito=self.vacuum['did'] + '@' + self.vacuum['class'] + '.ecorobot.net/atom', ifrom=self.user + '@' + self.domain + '/' + self.resource) q['type'] = 'set' for child in q.xml: @@ -396,8 +396,8 @@ def run(actions, charge, debug): if actions: config = read_config() api = EcoVacsAPI(config['device_id'], config['email'], config['password_hash']) - vacuum_id = api.devices()[0]['did'] - vacbot = VacBot(api.uid, api.REALM, api.resource, api.user_access_token, vacuum_id) + vacuum = api.devices()[0] + vacbot = VacBot(api.uid, api.REALM, api.resource, api.user_access_token, vacuum) vacbot.connect_and_wait_until_ready() for action in actions: diff --git a/test_sucks.py b/test_sucks.py index <HASH>..<HASH> 100644 --- a/test_sucks.py +++ b/test_sucks.py @@ -75,6 +75,21 @@ def test_should_run(): assert_almost_equal(count, 9000, delta=200) +def test_wrap_command(): + v = VacBot('20170101abcdefabcdefa', 'ecouser.net', 'abcdef12', 'A1b2C3d4efghijklmNOPQrstuvwxyz12', + {"did": "E0000000001234567890", "class": "126", "nick": "bob"}) + c = str(v.wrap_command(Clean(1).to_xml())) + assert_true(re.search(r'from="20170101abcdefabcdefa@ecouser.net/abcdef12"', c)) + assert_true(re.search(r'to="E0000000001234567890@126.ecorobot.net/atom"', c)) + + +def test_model_variation(): + v = VacBot('20170101abcdefabcdefa', 'ecouser.net', 'abcdef12', 'A1b2C3d4efghijklmNOPQrstuvwxyz12', + {"did": "E0000000001234567890", "class": "141", "nick": "bob"}) + c = str(v.wrap_command(Clean(1).to_xml())) + assert_true(re.search(r'to="E0000000001234567890@141.ecorobot.net/atom"', c)) +b + def test_main_api_called(): with requests_mock.mock() as m: r1 = m.get(re.compile('user/login'), @@ -92,22 +107,24 @@ def test_main_api_called(): def test_device_lookup(): api = make_api() with requests_mock.mock() as m: - device_id = 'E0000693817603910264' + device_id = 'E0000001234567890123' r = m.post(re.compile('user.do'), text='{"todo": "result", "devices": [{"did": "%s", "class": "126", "nick": "bob"}], "result": "ok"}' % device_id) d = api.devices() assert_equals(r.call_count, 1) assert_equals(len(d), 1) - assert_equals(d[0]['did'], device_id) + vacuum = d[0] + assert_equals(vacuum['did'], device_id) + assert_equals(vacuum['class'], '126') def make_api(): with requests_mock.mock() as m: m.get(re.compile('user/login'), - text='{"time": 1511200804243, "data": {"accessToken": "7a375650b0b1efd780029284479c4e41", "uid": "2017102559f0ee63c588d", "username": null, "email": "william-ecovacs@pota.to", "country": "us"}, "code": "0000", "msg": "X"}') + text='{"time": 1511200804243, "data": {"accessToken": "0123456789abcdef0123456789abcdef", "uid": "20170101abcdefabcdefa", "username": null, "email": "username@example.com", "country": "us"}, "code": "0000", "msg": "X"}') m.get(re.compile('user/getAuthCode'), - text='{"time": 1511200804607, "data": {"authCode": "5c28dac1ff580210e11292df57e87bef"}, "code": "0000", "msg": "X"}') + text='{"time": 1511200804607, "data": {"authCode": "abcdef01234567890abcdef012345678"}, "code": "0000", "msg": "X"}') m.post(re.compile('user.do'), - text='{"todo": "result", "token": "jt5O7oDR3gPHdVKCeb8Czx8xw8mDXM6s", "result": "ok", "userId": "2017102559f0ee63c588d", "resource": "f8d99c4d"}') + text='{"todo": "result", "token": "base64base64base64base64base64ba", "result": "ok", "userId": "20170101abcdefabcdefa", "resource": "abcdef12"}') return EcoVacsAPI("long_device_id", "account_id", "password_hash")
Possibly adding support for other models.
wpietri_sucks
train
b2cd152e5add7e5971a78dd249d8995cef999daf
diff --git a/lib/bibknowledge_dblayer.py b/lib/bibknowledge_dblayer.py index <HASH>..<HASH> 100644 --- a/lib/bibknowledge_dblayer.py +++ b/lib/bibknowledge_dblayer.py @@ -60,6 +60,19 @@ def get_kbs_info(kbtypeparam="", searchkbname=""): out.append(mydict) return out + +def get_all_kb_names(): + """Returns all knowledge base names + @return list of names + """ + out = [] + res = run_sql("""SELECT name FROM knwKB""") + for row in res: + out.append(row[0]) + return out + + + def get_kb_id(kb_name): """Returns the id of the kb with given name""" res = run_sql("""SELECT id FROM knwKB WHERE name LIKE %s""",
BibIndex: centralisation of synonym treatment * Removes CFG_BIBINDEX_SYNONYM_KBRS variable and moves the per-index synonym definitions to the database. Adapts BibIndex Admin interface accordingly. (addresses #<I>)
inveniosoftware-attic_invenio-knowledge
train
7c91b2e8f3686b6b765ea5771d903ce2bb55f8f3
diff --git a/camera.js b/camera.js index <HASH>..<HASH> 100644 --- a/camera.js +++ b/camera.js @@ -58,20 +58,27 @@ vglModule.camera = function() { var m_computeProjectMatrixTime = ogs.vgl.timestamp(); /** - * Set position of the camera - */ - this.setPosition = function(x, y, z) { - m_position = vec3.create([ x, y, z ]); - this.modified(); - }; - - /** * Get position of the camera */ this.position = function() { return m_position; }; + /** + * Set position of the camera + */ + this.setPosition = function(x, y, z) { + m_position = vec3.create([ x, y, z ]); + this.modified(); + }; + + /** + * Get focal point of the camera + */ + this.focalPoint = function() { + return m_focalPoint; + }; + /** * Set focal point of the camera */ @@ -81,11 +88,11 @@ vglModule.camera = function() { }; /** - * Get focal point of the camera + * Get view-up direction of camera */ - this.focalPoint = function() { - return m_focalPoint; - }; + this.viewUpDirection = function() { + return m_viewUp; + } /** * Set view-up direction of the camera @@ -96,6 +103,21 @@ vglModule.camera = function() { }; /** + * Get center of rotation for camera + */ + this.centerOfRotation = function() { + return m_centerOrRotation; + } + + /** + * Set center of rotation for camera + */ + this.setCenterOfRotation = function(centerOfRotation) { + m_centerOrRotation = centerOfRotation; + this.modified(); + } + + /** * Get clipping range of the camera */ this.getClippingRange = function() {
Added API to get center of rotation
OpenGeoscience_vgl
train
a51b66a6a0038c09be1c54d90efdbc843f588012
diff --git a/View/Index/EntitiesToIndexViewTransformer.php b/View/Index/EntitiesToIndexViewTransformer.php index <HASH>..<HASH> 100644 --- a/View/Index/EntitiesToIndexViewTransformer.php +++ b/View/Index/EntitiesToIndexViewTransformer.php @@ -149,8 +149,9 @@ class EntitiesToIndexViewTransformer extends AbstractEntityToViewTransformer $head->addItem($field, $headItem); } - - $head->addItem('action_widgets', new HeadItem('interface.actions')); + if (!empty($configuration['view']['index']['action_widgets'])) { + $head->addItem('action_widgets', new HeadItem('interface.actions')); + } return $head; } @@ -185,15 +186,16 @@ class EntitiesToIndexViewTransformer extends AbstractEntityToViewTransformer $bodyRow->addItem($field, new BodyRowItem($content)); } + if (!empty($configuration['view']['index']['action_widgets'])) { + $actionWidgets = ''; - $actionWidgets = ''; + foreach ($configuration['view']['index']['action_widgets'] as $widgetGeneratorAlias) { + $actionWidgets .= $this->widgetGeneratorPool->getWidgetGenerator($widgetGeneratorAlias)->generate($entity); + } - foreach ($configuration['view']['index']['action_widgets'] as $widgetGeneratorAlias) { - $actionWidgets .= $this->widgetGeneratorPool->getWidgetGenerator($widgetGeneratorAlias)->generate($entity); + $bodyRow->addItem('action_widgets', new BodyRowItem($actionWidgets)); } - $bodyRow->addItem('action_widgets', new BodyRowItem($actionWidgets)); - $body->addRow($bodyRow); }
Render action widgets column only if it is not empty.
DarvinStudio_DarvinAdminBundle
train
c2b3b06f8a0c7c4b062b8b0d04a41a04572fe6be
diff --git a/citrination_client/search/dataset/query/dataset_query.py b/citrination_client/search/dataset/query/dataset_query.py index <HASH>..<HASH> 100644 --- a/citrination_client/search/dataset/query/dataset_query.py +++ b/citrination_client/search/dataset/query/dataset_query.py @@ -10,7 +10,7 @@ class DatasetQuery(Serializable): """ def __init__(self, logic=None, simple=None, id=None, is_featured=None, name=None, description=None, - owner=None, email=None, query=None, **kwargs): + owner=None, email=None, updated_at=None, query=None, **kwargs): """ Constructor. @@ -22,6 +22,8 @@ class DatasetQuery(Serializable): :param description: One or more :class:`Filter` objects with filters against the description field. :param owner: One or more :class:`Filter` objects with filters against the owner field. :param email: One or more :class:`Filter` objects with filters against the email field. + :param updated_at: One or more :class:`Filter` objects with filters against the time that the dataset was + last updated. :param query: One or more :class:`DatasetQuery` objects with nested queries. """ self._logic = None @@ -40,6 +42,8 @@ class DatasetQuery(Serializable): self.owner = owner self._email = None self.email = email + self._updated_at = None + self.updated_at = updated_at self._query = None self.query = query @@ -140,6 +144,18 @@ class DatasetQuery(Serializable): self._email = None @property + def updated_at(self): + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + self._updated_at = self._get_object(Filter, updated_at) + + @updated_at.deleter + def updated_at(self): + self._updated_at = None + + @property def query(self): return self._query diff --git a/citrination_client/search/pif/query/pif_system_query.py b/citrination_client/search/pif/query/pif_system_query.py index <HASH>..<HASH> 100644 --- a/citrination_client/search/pif/query/pif_system_query.py +++ b/citrination_client/search/pif/query/pif_system_query.py @@ -18,9 +18,9 @@ class PifSystemQuery(BaseObjectQuery): """ def __init__(self, logic=None, simple=None, extract_as=None, extract_all=None, extract_when_missing=None, - tags=None, length=None, offset=None, uid=None, names=None, ids=None, classifications=None, - source=None, quantity=None, chemical_formula=None, composition=None, properties=None, - preparation=None, references=None, sub_systems=None, query=None, **kwargs): + tags=None, length=None, offset=None, uid=None, updated_at=None, names=None, ids=None, + classifications=None, source=None, quantity=None, chemical_formula=None, composition=None, + properties=None, preparation=None, references=None, sub_systems=None, query=None, **kwargs): """ Constructor. @@ -34,6 +34,8 @@ class PifSystemQuery(BaseObjectQuery): :param length: One or more :class:`FieldQuery` operations against the length field. :param offset: One or more :class:`FieldQuery` operations against the offset field. :param uid: One or more :class:`Filter` objects with the filters against the uid field. + :param updated_at: One or more :class:`Filter` objects with filters against the time that the PIF record was + last updated. :param names: One or more :class:`FieldQuery` objects with queries against the names field. :param ids: One or more :class:`IdQuery` objects with queries against the ids field. :param classifications: One or more :class:`ClassificationQuery` objects with queries against the @@ -54,6 +56,8 @@ class PifSystemQuery(BaseObjectQuery): extract_when_missing=extract_when_missing, tags=tags, length=length, offset=offset, **kwargs) self._uid = None self.uid = uid + self._updated_at = None + self.updated_at = updated_at self._names = None self.names = names self._ids = None @@ -92,6 +96,18 @@ class PifSystemQuery(BaseObjectQuery): self._uid = None @property + def updated_at(self): + return self._updated_at + + @updated_at.setter + def updated_at(self, updated_at): + self._updated_at = self._get_object(Filter, updated_at) + + @updated_at.deleter + def updated_at(self): + self._updated_at = None + + @property def names(self): return self._names
add support for updatedAt in queries
CitrineInformatics_python-citrination-client
train
cb0f0dfe989c6a38de8e73e6378407c142348c0a
diff --git a/src/org/opencms/xml/containerpage/CmsContainerElementBean.java b/src/org/opencms/xml/containerpage/CmsContainerElementBean.java index <HASH>..<HASH> 100644 --- a/src/org/opencms/xml/containerpage/CmsContainerElementBean.java +++ b/src/org/opencms/xml/containerpage/CmsContainerElementBean.java @@ -270,7 +270,8 @@ public class CmsContainerElementBean implements Cloneable { String targetFolder, Map<String, String> individualSettings, boolean isCopyModels, - Locale locale) throws CmsException { + Locale locale) + throws CmsException { if (!(resourceType instanceof CmsResourceTypeXmlContent)) { throw new IllegalArgumentException(); @@ -371,6 +372,9 @@ public class CmsContainerElementBean implements Cloneable { */ public String editorHash() { + if (m_editorHash == null) { + m_editorHash = m_elementId.toString() + getSettingsHash(); + } return m_editorHash; }
Fixing flex caching issue when using macro formatters.
alkacon_opencms-core
train
9bfff650a4f6666b486c4ad48170c264607a208a
diff --git a/bench/format/plot.py b/bench/format/plot.py index <HASH>..<HASH> 100644 --- a/bench/format/plot.py +++ b/bench/format/plot.py @@ -166,6 +166,9 @@ class TimeSeriesCollection(): fig.set_size_inches(5,3.7) fig.set_dpi(90) plt.savefig(out_fname) + fig.set_size_inches(20,14.8) + fig.set_dpi(300) + plt.savefig(out_fname + '_large') def plot(self, out_fname, normalize = False): assert self.data @@ -203,6 +206,9 @@ class TimeSeriesCollection(): fig.set_size_inches(5,3.7) fig.set_dpi(90) plt.savefig(out_fname, bbox_inches="tight") + fig.set_size_inches(20,14.8) + fig.set_dpi(300) + plt.savefig(out_fname + '_large', bbox_inches="tight") def stats(self): res = {} @@ -276,6 +282,9 @@ class PlotCollection(): fig.set_size_inches(5,3.7) fig.set_dpi(90) plt.savefig(out_fname, bbox_inches="tight") + fig.set_size_inches(20,14.8) + fig.set_dpi(300) + plt.savefig(out_fname + '_large') #A few useful derivation functions #take discret derivative of a series (shorter by 1)
Push large versions of graphs too.
rethinkdb_rethinkdb
train
4f0e254ccd1cac3f339d2bbb0d1ef6725da05722
diff --git a/nodeshot/views.py b/nodeshot/views.py index <HASH>..<HASH> 100755 --- a/nodeshot/views.py +++ b/nodeshot/views.py @@ -139,24 +139,23 @@ def info(request): devices = [] entry = {} for d in Device.objects.all().order_by('node__status'): - try: - entry['status'] = "on" if d.node.status == 'a' else "off" - entry['device_type'] = d.type - entry['node_name'] = d.node.name - entry['name'] = d.name - entry['ips'] = [ip['ipv4_address'] for ip in d.interface_set.values('ipv4_address')] if d.interface_set.count() > 0 else "" - entry['macs'] = [mac['mac_address'] if mac['mac_address'] != None else '' for mac in d.interface_set.values('mac_address')] if d.interface_set.count() > 0 else "" - # heuristic count for good representation of the signal bar (from 0 to 100) - entry['signal_bar'] = signal_to_bar(d.max_signal) if d.max_signal < 0 else 0 - entry['signal'] = d.max_signal - links = Link.objects.filter(from_interface__device = d) - for l in links: - l.signal_bar = signal_to_bar(l.dbm) if l.to_interface.mac_address not in entry['macs'] else links.remove(l) - entry['links'] = links - entry['ssids'] = [ssid['ssid'] for ssid in d.interface_set.values('ssid')] if d.interface_set.count() > 0 else "" - entry['nodeid'] = d.node.id - devices.append(entry) - except: - pass + entry['status'] = "on" if d.node.status == 'a' else "off" + entry['device_type'] = d.type + entry['node_name'] = d.node.name + entry['name'] = d.name + entry['ips'] = [ip['ipv4_address'] for ip in d.interface_set.values('ipv4_address')] if d.interface_set.count() > 0 else "" + entry['macs'] = [mac['mac_address'] if mac['mac_address'] != None else '' for mac in d.interface_set.values('mac_address')] if d.interface_set.count() > 0 else "" + # heuristic count for good representation of the signal bar (from 0 to 100) + #entry['signal_bar'] = signal_to_bar(d.max_signal) if d.max_signal < 0 else 0 + #entry['signal'] = d.max_signal + links = Link.objects.filter(from_interface__device = d) + # convert QuerySet in list + links = list(links) + for l in links: + l.signal_bar = signal_to_bar(l.dbm) if l.to_interface.mac_address not in entry['macs'] else links.remove(l) + entry['links'] = links + entry['ssids'] = [ssid['ssid'] for ssid in d.interface_set.values('ssid')] if d.interface_set.count() > 0 else "" + entry['nodeid'] = d.node.id + devices.append(entry) entry = {} return render_to_response('info.html',{'devices': devices} ,context_instance=RequestContext(request))
Fixed info_tab bug caused by max_signal field
ninuxorg_nodeshot
train
0f08343ade640f4d4c6e4c48b4ff5726152b66cf
diff --git a/script/debugger/debugview.js b/script/debugger/debugview.js index <HASH>..<HASH> 100644 --- a/script/debugger/debugview.js +++ b/script/debugger/debugview.js @@ -28,8 +28,8 @@ define('bigscreenplayer/debugger/debugview', staticBox.id = 'staticBox'; staticBox.style.position = 'absolute'; - staticBox.style.width = '26%'; - staticBox.style.right = '5%'; + staticBox.style.width = '30%'; + staticBox.style.right = '1%'; staticBox.style.top = '15%'; staticBox.style.bottom = '25%'; staticBox.style.backgroundColor = '#1D1D1D'; @@ -68,24 +68,39 @@ define('bigscreenplayer/debugger/debugview', var dynamicLogs = logData.dynamic; var LINES_TO_DISPLAY = 29; if (dynamicLogs.length === 0) { - logContainer.innerHTML = ''; + logContainer.textContent = ''; } dynamicLogs = dynamicLogs.slice(-LINES_TO_DISPLAY); - logContainer.innerHTML = dynamicLogs.join('\n'); + logContainer.textContent = dynamicLogs.join('\n'); - var staticLogString = ''; - logData.static.forEach(function (log) { - staticLogString = staticLogString + log.key + ': ' + log.value + '\n'; - }); + logData.static.forEach(updateStaticElements); + } - staticContainer.innerHTML = staticLogString; + function updateStaticElements (log) { + var existingElement = document.getElementById(log.key); + var text = log.key + ': ' + log.value; + if (existingElement) { + if (text !== existingElement.textContent) { + existingElement.textContent = text; + } + } else { + createNewStaticElement(log.key, log.value); + } } - function tearDown () { - var logBox = document.getElementById('logBox'); - var staticBox = document.getElementById('staticBox'); + function createNewStaticElement (key, value) { + var staticLog = document.createElement('div'); + + staticLog.id = key; + staticLog.style.paddingBottom = '1%'; + staticLog.style.borderBottom = '1px solid white'; + staticLog.textContent = key + ': ' + value; + staticContainer.appendChild(staticLog); + } + + function tearDown () { DOMHelpers.safeRemoveElement(logBox); DOMHelpers.safeRemoveElement(staticBox); diff --git a/script/mediasources.js b/script/mediasources.js index <HASH>..<HASH> 100644 --- a/script/mediasources.js +++ b/script/mediasources.js @@ -252,10 +252,10 @@ define('bigscreenplayer/mediasources', function updateDebugOutput () { DebugTool.keyValue({key: 'available cdns', value: availableCdns()}); - DebugTool.keyValue({key: 'url', value: getCurrentUrl()}); + DebugTool.keyValue({key: 'url', value: stripQueryParamsAndHash(getCurrentUrl())}); DebugTool.keyValue({key: 'available subtitle cdns', value: availableSubtitlesCdns()}); - DebugTool.keyValue({key: 'subtitles url', value: getCurrentSubtitlesUrl()}); + DebugTool.keyValue({key: 'subtitles url', value: stripQueryParamsAndHash(getCurrentSubtitlesUrl())}); } return {
Debug tool readability improvements (#<I>) * Improved debug-tool readability. * Refactored log rendering
bbc_bigscreen-player
train
d0043925217efb044d92736b1ec138d1d1be634b
diff --git a/src/Illuminate/Cache/Repository.php b/src/Illuminate/Cache/Repository.php index <HASH>..<HASH> 100755 --- a/src/Illuminate/Cache/Repository.php +++ b/src/Illuminate/Cache/Repository.php @@ -477,7 +477,7 @@ class Repository implements ArrayAccess, CacheContract */ public function tags($names) { - if (! method_exists($this->store, 'tags')) { + if (! $this->supportsTags()) { throw new BadMethodCallException('This cache store does not support tagging.'); } @@ -502,6 +502,33 @@ class Repository implements ArrayAccess, CacheContract } /** + * Calculate the number of seconds for the given TTL. + * + * @param \DateTimeInterface|\DateInterval|int $ttl + * @return int + */ + protected function getSeconds($ttl) + { + $duration = $this->parseDateInterval($ttl); + + if ($duration instanceof DateTimeInterface) { + $duration = Carbon::now()->diffInRealSeconds($duration, false); + } + + return (int) $duration > 0 ? $duration : 0; + } + + /** + * Determine if the current store supports tags. + * + * @return bool + */ + public function supportsTags() + { + return method_exists($this->store, 'tags'); + } + + /** * Get the default cache time. * * @return int|null @@ -614,23 +641,6 @@ class Repository implements ArrayAccess, CacheContract } /** - * Calculate the number of seconds for the given TTL. - * - * @param \DateTimeInterface|\DateInterval|int $ttl - * @return int - */ - protected function getSeconds($ttl) - { - $duration = $this->parseDateInterval($ttl); - - if ($duration instanceof DateTimeInterface) { - $duration = Carbon::now()->diffInRealSeconds($duration, false); - } - - return (int) $duration > 0 ? $duration : 0; - } - - /** * Handle dynamic calls into macros or pass missing methods to the store. * * @param string $method diff --git a/tests/Cache/CacheRepositoryTest.php b/tests/Cache/CacheRepositoryTest.php index <HASH>..<HASH> 100755 --- a/tests/Cache/CacheRepositoryTest.php +++ b/tests/Cache/CacheRepositoryTest.php @@ -7,8 +7,10 @@ use DateInterval; use DateTime; use DateTimeImmutable; use Illuminate\Cache\ArrayStore; +use Illuminate\Cache\FileStore; use Illuminate\Cache\RedisStore; use Illuminate\Cache\Repository; +use Illuminate\Cache\TaggableStore; use Illuminate\Container\Container; use Illuminate\Contracts\Cache\Store; use Illuminate\Events\Dispatcher; @@ -312,6 +314,22 @@ class CacheRepositoryTest extends TestCase $repo->tags('foo', 'bar', 'baz'); } + public function testTaggableRepositoriesSupportTags() + { + $taggable = m::mock(TaggableStore::class); + $taggableRepo = new Repository($taggable); + + $this->assertTrue($taggableRepo->supportsTags()); + } + + public function testNonTaggableRepositoryDoesNotSupportTags() + { + $nonTaggable = m::mock(FileStore::class); + $nonTaggableRepo = new Repository($nonTaggable); + + $this->assertFalse($nonTaggableRepo->supportsTags()); + } + protected function getRepository() { $dispatcher = new Dispatcher(m::mock(Container::class));
[8.x] Implement supportsTags() on the Cache Repository (#<I>) * Implement supportsTags() on the Cache Repository in order to find out more elegantly if the currently chosen cache store is taggable. * fixed test variable names * Update Repository.php
laravel_framework
train
8b15b5ba064c014bda3b907d13443f8c17129697
diff --git a/raven/base.py b/raven/base.py index <HASH>..<HASH> 100644 --- a/raven/base.py +++ b/raven/base.py @@ -235,7 +235,7 @@ class Client(object): __excepthook__ = sys.excepthook def handle_exception(*exc_info): - self.captureException(exc_info=exc_info) + self.captureException(exc_info=exc_info, level='fatal') __excepthook__(*exc_info) sys.excepthook = handle_exception
Mark process errors as fatal (#<I>) * Mark process errors as fatal @getsentry/python
getsentry_raven-python
train
8f977548b579cdb53877e5000b6048ceb6d46963
diff --git a/lib/hako/container.rb b/lib/hako/container.rb index <HASH>..<HASH> 100644 --- a/lib/hako/container.rb +++ b/lib/hako/container.rb @@ -18,7 +18,6 @@ module Hako cpu memory links - mount_points ].each do |name| define_method(name) do @definition[name] @@ -33,6 +32,16 @@ module Hako @expanded_env ||= expand_env(@definition.fetch('env', {})) end + def mount_points + @definition['mount_points'].map do |mount_point| + { + source_volume: mount_point.fetch('source_volume'), + container_path: mount_point.fetch('container_path'), + read_only: mount_point.fetch('read_only', false), + } + end + end + private PROVIDERS_KEY = '$providers' diff --git a/lib/hako/schedulers/ecs_definition_comparator.rb b/lib/hako/schedulers/ecs_definition_comparator.rb index <HASH>..<HASH> 100644 --- a/lib/hako/schedulers/ecs_definition_comparator.rb +++ b/lib/hako/schedulers/ecs_definition_comparator.rb @@ -6,9 +6,10 @@ module Hako @expected_container = expected_container end - CONTAINER_KEYS = %i[image cpu memory links docker_labels mount_points].freeze + CONTAINER_KEYS = %i[image cpu memory links docker_labels].freeze PORT_MAPPING_KEYS = %i[container_port host_port protocol].freeze ENVIRONMENT_KEYS = %i[name value].freeze + MOUNT_POINT_KEYS = %i[source_volume container_path read_only].freeze def different?(actual_container) unless actual_container @@ -34,6 +35,15 @@ module Hako end end + if @expected_container[:mount_points].size != actual_container.mount_points.size + return true + end + @expected_container[:mount_points].zip(actual_container.mount_points) do |e, a| + if different_members?(e, a, MOUNT_POINT_KEYS) + return true + end + end + false end
Fix mount_points normalization and comparison
eagletmt_hako
train
3890d97504d5dcac832f74c13f2aa5615fd60187
diff --git a/salesforce/models.py b/salesforce/models.py index <HASH>..<HASH> 100644 --- a/salesforce/models.py +++ b/salesforce/models.py @@ -54,6 +54,7 @@ class SalesforceModelBase(ModelBase): result = super(SalesforceModelBase, cls).__new__(cls, name, bases, attrs, **kwargs) if models.Model not in bases and supplied_db_table is None: result._meta.db_table = result._meta.concrete_model._meta.object_name + result._meta.original_attrs['db_table'] = result._meta.db_table return result def add_to_class(cls, name, value):
Fixed bug in makemigrations if db_table name is omitted in Meta. It was not possible to omit any db_table attribute if running a test with an alternate database: SALESFORCE_DB_ALIAS='default'. The command makemigrations must be run again after the fix if any previous migration was created incorrectly without a db_table attribute.
django-salesforce_django-salesforce
train
36ee2cedcc9a03e31fadf2f6dacd4197b7a82b1d
diff --git a/itests/src/test/java/org/openengsb/itests/remoteclient/SecureSampleConnector.java b/itests/src/test/java/org/openengsb/itests/remoteclient/SecureSampleConnector.java index <HASH>..<HASH> 100644 --- a/itests/src/test/java/org/openengsb/itests/remoteclient/SecureSampleConnector.java +++ b/itests/src/test/java/org/openengsb/itests/remoteclient/SecureSampleConnector.java @@ -50,8 +50,7 @@ import org.slf4j.LoggerFactory; */ public final class SecureSampleConnector { - private static final String registerMessage = - "" + private static final String registerMessage = "" + "{\n" + " \"principal\" : \"admin\",\n" + " \"credentials\" : {\n" @@ -64,12 +63,14 @@ public final class SecureSampleConnector { + " },\n" + " \"message\" : {\n" + " \"methodCall\" : {\n" - + " \"classes\" : [ \"org.openengsb.core.api.model.ConnectorId\", \"org.openengsb.core.api.model.ConnectorDescription\" ],\n" + + " \"classes\" : [ \"org.openengsb.core.api.model.ConnectorDefinition\", " + + " \"org.openengsb.core.api.model.ConnectorDescription\" ],\n" + " \"methodName\" : \"create\",\n" - + " \"realClassImplementation\" : [ \"org.openengsb.core.api.model.ConnectorId\", \"org.openengsb.core.api.model.ConnectorDescription\" ],\n" + + " \"realClassImplementation\" : [ \"org.openengsb.core.api.model.ConnectorDefinition\", " + + " \"org.openengsb.core.api.model.ConnectorDescription\" ],\n" + " \"args\" : [ {\n" - + " \"domainType\" : \"example\",\n" - + " \"connectorType\" : \"external-connector-proxy\",\n" + + " \"domainId\" : \"example\",\n" + + " \"connectorId\" : \"external-connector-proxy\",\n" + " \"instanceId\" : \"example-remote\"\n" + " }, {\n" + " \"properties\" : {\n" @@ -104,12 +105,12 @@ public final class SecureSampleConnector { + " },\n" + " \"message\" : {\n" + " \"methodCall\" : {\n" - + " \"classes\" : [ \"org.openengsb.core.api.model.ConnectorId\" ],\n" + + " \"classes\" : [ \"org.openengsb.core.api.model.ConnectorDefinition\" ],\n" + " \"methodName\" : \"delete\",\n" - + " \"realClassImplementation\" : [ \"org.openengsb.core.api.model.ConnectorId\" ],\n" + + " \"realClassImplementation\" : [ \"org.openengsb.core.api.model.ConnectorDefinition\" ],\n" + " \"args\" : [ {\n" - + " \"domainType\" : \"example\",\n" - + " \"connectorType\" : \"external-connector-proxy\",\n" + + " \"domainId\" : \"example\",\n" + + " \"connectorId\" : \"external-connector-proxy\",\n" + " \"instanceId\" : \"example-remote\"\n" + " } ],\n" + " \"metaData\" : {\n"
[OPENENGSB-<I>] fixed failing integration test
openengsb_openengsb
train
c5b388f5392d42915aceef275dd53bb444e41722
diff --git a/fastlane/lib/fastlane/actions/scan.rb b/fastlane/lib/fastlane/actions/scan.rb index <HASH>..<HASH> 100644 --- a/fastlane/lib/fastlane/actions/scan.rb +++ b/fastlane/lib/fastlane/actions/scan.rb @@ -77,6 +77,20 @@ module Fastlane workspace: "App.xcworkspace", scheme: "MyTests", clean: false + )', + '#Build For Testing + scan( + derived_data_path: "my_folder", + build_for_testing: true + )', + '# run tests using derived data from prev. build + scan( + derived_data_path: "my_folder", + test_without_building: true + )', + '# or run it from an existing xctestrun package + scan( + xctestrun: "/path/to/mytests.xctestrun" )' ] end
[scan] improve docs for build-for-testing,test-without-building and xctestrun (#<I>)
fastlane_fastlane
train
5435375a782c5bc80963df5906edcf2e53284041
diff --git a/src/txkube/_model.py b/src/txkube/_model.py index <HASH>..<HASH> 100644 --- a/src/txkube/_model.py +++ b/src/txkube/_model.py @@ -8,14 +8,20 @@ state. from zope.interface import implementer -from pyrsistent import CheckedPSet, PClass, field, pmap_field, freeze +from pyrsistent import CheckedPSet, PClass, field, pmap_field, pset, freeze from . import IObject from ._invariants import instance_of, provider_of class ObjectMetadata(PClass): + _required = pset({u"name", u"uid"}) + items = pmap_field(unicode, object) + __invariant__ = lambda m: ( + len(m._required - pset(m.items)) == 0, + u"Required metadata missing: {}".format(m._required - pset(m.items)), + ) @property def name(self): @@ -27,6 +33,8 @@ class ObjectMetadata(PClass): class NamespacedObjectMetadata(ObjectMetadata): + _required = ObjectMetadata._required.add(u"namespace") + @property def namespace(self): return self.items[u"namespace"] @@ -46,15 +54,6 @@ class Namespace(PClass): ) @classmethod - def list_location(cls): - return (u"api", u"v1", u"namespaces", u"") - - - def create_location(self): - return (u"api", u"v1", u"namespaces", u"") - - - @classmethod def default(cls): """ Get the default namespace. @@ -85,15 +84,6 @@ class ConfigMap(PClass): ) @classmethod - def list_location(cls): - return (u"api", u"v1", u"configmaps", u"") - - - def create_location(self): - return (u"api", u"v1", u"namespaces", self.metadata.namespace, u"configmaps", u"") - - - @classmethod def from_raw(cls, raw): return cls( metadata=ObjectMetadata( diff --git a/src/txkube/_network.py b/src/txkube/_network.py index <HASH>..<HASH> 100644 --- a/src/txkube/_network.py +++ b/src/txkube/_network.py @@ -66,7 +66,7 @@ class _NetworkClient(object): """ Issue a I{POST} to create the given object. """ - url = self.kubernetes.base_url.child(*obj.create_location()) + url = self.kubernetes.base_url.child(*collection_location(obj)) d = self._post(url, { u"metadata": thaw(obj.metadata.items), }) @@ -81,7 +81,7 @@ class _NetworkClient(object): """ Issue a I{GET} to retrieve objects of a given kind. """ - url = self.kubernetes.base_url.child(*kind.list_location()) + url = self.kubernetes.base_url.child(*collection_location(kind)) d = self._get(url) d.addCallback(check_status) d.addCallback(readBody) @@ -98,6 +98,25 @@ class _NetworkClient(object): return d +def collection_location(obj): + """ + Get the URL for the collection of objects like ``obj``. + + :param obj: Either a type representing a Kubernetes object kind or an + instance of such a type. + + :return tuple[unicode]: Some path segments to stick on to a base URL to + construct the location of the collection of objects like the one + given. + """ + collection = obj.kind.lower() + u"s" + try: + namespace = obj.metadata.namespace + except AttributeError: + return (u"api", u"v1", collection, u"") + return (u"api", u"v1", u"namespaces", namespace, collection, u"") + + @implementer(IKubernetes) @attr.s(frozen=True) class _NetworkKubernetes(object):
Get rid of list_location and create_location Hopefully we will not need this on each "kind" Python type.
LeastAuthority_txkube
train
6989ae2fa4a1c6f95e95c1bb950bffea4b22de5c
diff --git a/src/MvcCore/Ext/Views/Helpers/DataUrlHelper.php b/src/MvcCore/Ext/Views/Helpers/DataUrlHelper.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/Ext/Views/Helpers/DataUrlHelper.php +++ b/src/MvcCore/Ext/Views/Helpers/DataUrlHelper.php @@ -7,7 +7,7 @@ * For the full copyright and license information, please view * the LICENSE.md file that are distributed with this source code. * - * @copyright Copyright (c) 2016 Tom Flídr (https://github.com/mvccore/mvccore) + * @copyright Copyright (c) 2016 Tom Flidr (https://github.com/mvccore) * @license https://mvccore.github.io/docs/mvccore/5.0.0/LICENCE.md */ @@ -20,18 +20,18 @@ namespace MvcCore\Ext\Views\Helpers; * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs * @method \MvcCore\Ext\Views\Helpers\DataUrlHelper GetInstance() */ -class DataUrlHelper implements \MvcCore\Ext\Views\Helpers\IHelper -{ +class DataUrlHelper implements \MvcCore\Ext\Views\Helpers\IHelper { + /** * MvcCore Extension - View Helper - Assets - version: * Comparison by PHP function version_compare(); * @see http://php.net/manual/en/function.version-compare.php */ - const VERSION = '5.0.0-alpha'; + const VERSION = '5.0.0'; /** * Currently rendered view instance. - * @var \MvcCore\View|\MvcCore\IView|NULL + * @var \MvcCore\View|NULL */ protected $view = NULL; @@ -58,7 +58,7 @@ class DataUrlHelper implements \MvcCore\Ext\Views\Helpers\IHelper * This method sets these protected object references: * - `DataUrlHelper::$view` as `\MvcCore\View|\MvcCore\IView` * - `DataUrlHelper::$request` as `\MvcCore\Request|\MvcCore\IRequest` - * @param \MvcCore\View|\MvcCore\IView $view + * @param \MvcCore\View $view * @return \MvcCore\Ext\Views\Helpers\DataUrlHelper */ public function SetView (\MvcCore\IView $view) {
Fixed copyright heading, fixed php docs comments with unnecessary interfaces, upgraded version constant to <I>
mvccore_ext-view-helper-dataurl
train
ea866f6590a0a5f3c6a7724459871b3d5912f50e
diff --git a/client.js b/client.js index <HASH>..<HASH> 100644 --- a/client.js +++ b/client.js @@ -94,7 +94,7 @@ RingpopClient.prototype.destroy = function destroy(callback) { RingpopClient.prototype._request = function _request(opts, endpoint, head, body, callback) { var self = this; - if (this.subChannel.destroyed) { + if (this.subChannel && this.subChannel.destroyed) { process.nextTick(function onTick() { callback(ChannelDestroyedError({ endpoint: endpoint, @@ -104,7 +104,9 @@ RingpopClient.prototype._request = function _request(opts, endpoint, head, body, return; } - if (this.subChannel.topChannel.destroyed) { + if (this.subChannel && + this.subChannel.topChannel && + this.subChannel.topChannel.destroyed) { process.nextTick(function onTick() { callback(ChannelDestroyedError({ endpoint: endpoint,
Check for subChannel and topChannel
esatterwhite_skyring
train
c27e14960e71e647675ff9a05f4b3236aa9ba1e9
diff --git a/lib/client/client.js b/lib/client/client.js index <HASH>..<HASH> 100644 --- a/lib/client/client.js +++ b/lib/client/client.js @@ -282,13 +282,13 @@ Client.prototype.bind = function bind (name, controls: controls }) - // While we are binding to the server, register the callback as error handler + // Connection errors will be reported to the bind callback too (useful when the LDAP server is not available) var self = this function callbackWrapper (err, ret) { - self.removeListener('error', callbackWrapper) + self.removeListener('connectError', callbackWrapper) callback(err, ret) } - this.addListener('error', callbackWrapper) + this.addListener('connectError', callbackWrapper) return this._send(req, [errors.LDAP_SUCCESS], null, callbackWrapper, _bypass) }
Avoid handling all errors now that blocking errors are handled properly
joyent_node-ldapjs
train
a2070db2a918305daebf88ce74abd669d78292f8
diff --git a/src/org/openscience/cdk/config/ElementPTFactory.java b/src/org/openscience/cdk/config/ElementPTFactory.java index <HASH>..<HASH> 100644 --- a/src/org/openscience/cdk/config/ElementPTFactory.java +++ b/src/org/openscience/cdk/config/ElementPTFactory.java @@ -23,16 +23,16 @@ */ package org.openscience.cdk.config; +import org.openscience.cdk.PeriodicTableElement; +import org.openscience.cdk.config.elements.ElementPTReader; +import org.openscience.cdk.tools.LoggingTool; + import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OptionalDataException; import java.util.Vector; -import org.openscience.cdk.PeriodicTableElement; -import org.openscience.cdk.config.elements.ElementPTReader; -import org.openscience.cdk.tools.LoggingTool; - /** * Used to store and return data of a particular chemicalElement. As this class is a * singleton class, one gets an instance with: @@ -48,7 +48,7 @@ public class ElementPTFactory { private static ElementPTFactory efac = null; - private Vector elements = null; + private Vector<PeriodicTableElement> elements = null; private boolean debug = false; private LoggingTool logger; @@ -120,20 +120,18 @@ public class ElementPTFactory *@param symbol An element symbol to search for *@return An array of element that matches the given element symbol */ - public PeriodicTableElement getElement(String symbol) - { - for (int f = 0; f < elements.size(); f++) - { - if (((PeriodicTableElement) elements.elementAt(f)).getSymbol().equals(symbol)) - try { - return (PeriodicTableElement) ((PeriodicTableElement) elements.elementAt(f)).clone(); - } catch (CloneNotSupportedException e) { - logger.error("Could not clone PeriodicTableElement: ", e.getMessage()); + public PeriodicTableElement getElement(String symbol) { + for (PeriodicTableElement element : elements) { + if (element.getSymbol().equals(symbol)) { + try { + return (PeriodicTableElement) element.clone(); + } catch (CloneNotSupportedException e) { + logger.error("Could not clone PeriodicTableElement: ", e.getMessage()); logger.debug(e); - } - - } - return null; + } + } + } + return null; } /** diff --git a/src/org/openscience/cdk/config/elements/ElementPTHandler.java b/src/org/openscience/cdk/config/elements/ElementPTHandler.java index <HASH>..<HASH> 100644 --- a/src/org/openscience/cdk/config/elements/ElementPTHandler.java +++ b/src/org/openscience/cdk/config/elements/ElementPTHandler.java @@ -23,13 +23,13 @@ */ package org.openscience.cdk.config.elements; -import java.util.Vector; - import org.openscience.cdk.PeriodicTableElement; import org.openscience.cdk.tools.LoggingTool; import org.xml.sax.Attributes; import org.xml.sax.helpers.DefaultHandler; +import java.util.Vector; + /** * Reads an element list in CML2 format. An example definition is: * <pre> @@ -61,7 +61,7 @@ public class ElementPTHandler extends DefaultHandler private int scalarType; private LoggingTool logger; private String currentChars; - private Vector elements; + private Vector<PeriodicTableElement> elements; public PeriodicTableElement elementType; public String currentElement; @@ -77,7 +77,7 @@ public class ElementPTHandler extends DefaultHandler * * @return A Vector object with all isotopes */ - public Vector getElements() + public Vector<PeriodicTableElement> getElements() { return elements; } @@ -86,7 +86,7 @@ public class ElementPTHandler extends DefaultHandler public void startDocument() { - elements = new Vector(); + elements = new Vector<PeriodicTableElement>(); scalarType = SCALAR_UNSET; elementType = null; } diff --git a/src/org/openscience/cdk/config/elements/ElementPTReader.java b/src/org/openscience/cdk/config/elements/ElementPTReader.java index <HASH>..<HASH> 100644 --- a/src/org/openscience/cdk/config/elements/ElementPTReader.java +++ b/src/org/openscience/cdk/config/elements/ElementPTReader.java @@ -27,15 +27,16 @@ */ package org.openscience.cdk.config.elements; -import java.io.IOException; -import java.io.Reader; -import java.util.Vector; - +import org.openscience.cdk.PeriodicTableElement; import org.openscience.cdk.tools.LoggingTool; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; +import java.io.IOException; +import java.io.Reader; +import java.util.Vector; + /** * Reader that instantiates an XML parser and customized handler to process * the isotope information in the CML2 isotope data file. The Reader first @@ -119,8 +120,8 @@ public class ElementPTReader { * @return a Vector of Isotope's. Returns an empty vector is some reading error * occured. */ - public Vector readElements() { - Vector elements = new Vector(); + public Vector<PeriodicTableElement> readElements() { + Vector<PeriodicTableElement> elements = new Vector<PeriodicTableElement>(); try { parser.setFeature("http://xml.org/sax/features/validation", false); logger.info("Deactivated validation");
Converted to typed collections and simplified code since lists now specify the type of object being stored git-svn-id: <URL>
cdk_cdk
train
fad3895b919f0d489cf6e485562892841f900dd8
diff --git a/spec/host_name_spec.rb b/spec/host_name_spec.rb index <HASH>..<HASH> 100644 --- a/spec/host_name_spec.rb +++ b/spec/host_name_spec.rb @@ -16,7 +16,10 @@ describe HostName do describe "resolv" do it "should resolv the IP Address for the host name" do - subject.resolv.address.should == example_ip + ip = subject.resolv + + ip.should_not be_nil + ip.address.should == example_ip end it "should return nil for unresolved host names" do @@ -26,9 +29,10 @@ describe HostName do describe "resolv_all" do it "should resolv all IP Addresses for the host name" do - ips = subject.resolv_all.map { |ip| ip.address } + ips = subject.resolv_all - ips.should include(example_ip) + ips.should_not be_empty + ips.find { |ip| ip.address == example_ip }.should_not be_nil end it "should return an empty Array for unresolved host names" do diff --git a/spec/ip_address.rb b/spec/ip_address.rb index <HASH>..<HASH> 100644 --- a/spec/ip_address.rb +++ b/spec/ip_address.rb @@ -17,6 +17,7 @@ describe IPAddress do it "should resolve host-names to IP Addresses" do ip = IPAddress.resolv(example_domain) + ip.should_not be_nil ip.address.should == example_ip end @@ -27,9 +28,13 @@ describe IPAddress do describe "IPAddress.resolv_all" do it "should resolve host-names to multiple IP Addresses" do - ips = IPAddress.resolv_all(example_domain).map { |ip| ip.address } + ips = IPAddress.resolv_all(example_domain) - ips.should include(example_ip) + ips.should_not be_empty + + ips.find { |ip| + ip.address == example_ip + }.should_not be_nil end it "should return an empty Array for unresolved domain names" do @@ -40,8 +45,10 @@ describe IPAddress do describe "resolv" do it "should reverse lookup the host-name for an IP Address" do ip = IPAddress.new(:address => example_ip) - - ip.resolv.address.should == example_domain + host_name = ip.resolv + + host_name.should_not be_nil + host_name.address.should == example_domain end it "should return nil for unresolved domain names" do @@ -54,10 +61,13 @@ describe IPAddress do describe "resolv_all" do it "should reverse lookup the host-names for an IP Address" do ip = IPAddress.new(:address => example_ip) + host_names = ip.resolv_all + + host_names.should_not be_empty? - ip.resolv_all.any? { |host_name| + host_names.find { |host_name| host_name.address == example_domain - }.should == true + }.should_not be_nil end it "should return an empty Array for unresolved domain names" do
Add more nil checks to specs.
ronin-ruby_ronin
train
0807024439a526db3b74f32f7e45e5e8e5044bb8
diff --git a/tests/logs_unittest.py b/tests/logs_unittest.py index <HASH>..<HASH> 100644 --- a/tests/logs_unittest.py +++ b/tests/logs_unittest.py @@ -14,37 +14,59 @@ # version 3 along with OpenQuake. If not, see # <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License. - -from amqplib import client_0_8 as amqp -import logging -import os -import multiprocessing -import sys +import logging.handlers import unittest -import jpype - -from openquake import flags from openquake import java -from openquake import logs -from openquake import job -from openquake.utils import config - -from tests.utils.helpers import cleanup_loggers -class LogsTestCase(unittest.TestCase): +class JavaLogsTestCase(unittest.TestCase): def setUp(self): - java.jvm() + self.jvm = java.jvm() + self.handler = logging.handlers.BufferingHandler(capacity=float('inf')) + self.logger = logging.getLogger('java') + self.logger.addHandler(self.handler) + self.logger.setLevel(logging.DEBUG) + + def tearDown(self): + self.logger.removeHandler(self.handler) + self.logger.setLevel(logging.NOTSET) def test_java_logging(self): - msg = 'This is a test java log entry' - root_logger = jpype.JClass("org.apache.log4j.Logger").getRootLogger() - other_logger = jpype.JClass("org.apache.log4j.Logger").getLogger('grr') + jlogger_class = self.jvm.JClass("org.apache.log4j.Logger") + root_logger = jlogger_class.getRootLogger() + other_logger = jlogger_class.getLogger('other_logger') - root_logger.error(msg) + root_logger.error('java error msg') other_logger.warn('warning message') other_logger.debug('this is verbose debug info') - root_logger.fatal('something bad has happend') + root_logger.fatal('something bad has happened') root_logger.info('information message') - 1/0 + + records = self.handler.buffer + self.assertEqual(records[0].levelno, logging.ERROR) + self.assertEqual(records[0].levelname, 'ERROR') + self.assertEqual(records[0].name, 'java') + self.assertEqual(records[0].msg, 'java error msg') + self.assertEqual(records[0].threadName, 'main') + self.assertEqual(records[0].processName, 'java') + + self.assertEqual(records[1].levelno, logging.WARNING) + self.assertEqual(records[1].levelname, 'WARNING') + self.assertEqual(records[1].name, 'java.other_logger') + self.assertEqual(records[1].msg, 'warning message') + + self.assertEqual(records[2].levelno, logging.DEBUG) + self.assertEqual(records[2].levelname, 'DEBUG') + self.assertEqual(records[2].name, 'java.other_logger') + self.assertEqual(records[2].msg, 'this is verbose debug info') + + self.assertEqual(records[3].levelno, logging.CRITICAL) + self.assertEqual(records[3].levelname, 'CRITICAL') + self.assertEqual(records[3].name, 'java') + self.assertEqual(records[3].msg, 'something bad has happened') + + self.assertEqual(records[4].levelno, logging.INFO) + self.assertEqual(records[4].levelname, 'INFO') + self.assertEqual(records[4].name, 'java') + self.assertEqual(records[4].msg, 'information message')
added real java logging unittest
gem_oq-engine
train