diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/Event/StrictDispatcher.php b/Event/StrictDispatcher.php index <HASH>..<HASH> 100644 --- a/Event/StrictDispatcher.php +++ b/Event/StrictDispatcher.php @@ -11,7 +11,7 @@ namespace Claroline\CoreBundle\Event; -use Symfony\Component\EventDispatcher\EventDispatcher; +use Symfony\Component\EventDispatcher\EventDispatcherInterface; use JMS\DiExtraBundle\Annotation as DI; /** @@ -35,7 +35,7 @@ class StrictDispatcher * "ed" = @DI\Inject("event_dispatcher") * }) */ - public function __construct(EventDispatcher $ed) + public function __construct(EventDispatcherInterface $ed) { $this->eventDispatcher = $ed; }
[CoreBundle] Fixing StrictDispatcher.
diff --git a/builder/vmware/iso/step_create_vmx.go b/builder/vmware/iso/step_create_vmx.go index <HASH>..<HASH> 100644 --- a/builder/vmware/iso/step_create_vmx.go +++ b/builder/vmware/iso/step_create_vmx.go @@ -477,13 +477,12 @@ func (s *stepCreateVMX) Run(_ context.Context, state multistep.StateBag) multist // try and convert the specified network to a device. device, err := netmap.NameIntoDevice(network) - // success. so we know that it's an actual network type inside netmap.conf if err == nil { + // success. so we know that it's an actual network type inside netmap.conf templateData.Network_Type = network templateData.Network_Device = device - - // otherwise, we were unable to find the type, so assume its a custom device. } else { + // otherwise, we were unable to find the type, so assume its a custom device. templateData.Network_Type = "custom" templateData.Network_Device = network }
move comments so indentation is more logical
diff --git a/udiskie/cli.py b/udiskie/cli.py index <HASH>..<HASH> 100644 --- a/udiskie/cli.py +++ b/udiskie/cli.py @@ -49,10 +49,9 @@ def get_backend(clsname, version=None): try: return udisks1() except DBusException: - msg = sys.exc_info()[1].get_dbus_message() log = logging.getLogger(__name__) - log.warning('Failed to connect UDisks1 dbus service: %s.\n' - 'Falling back to UDisks2 [experimental].' % (msg,)) + log.warning('Failed to connect UDisks1 dbus service.\n' + 'Falling back to UDisks2 [experimental].') return udisks2() elif version == 1: return udisks1()
Fix incorrect usage of GLib.GError GLib.GError has no method `.get_dbus_message()` like dbus.DBusException had. Instead, it has an attribute `.message`. But this is not very useful for the user anyway in the case, so we just leave it out.
diff --git a/salt/tops/varstack.py b/salt/tops/varstack.py index <HASH>..<HASH> 100644 --- a/salt/tops/varstack.py +++ b/salt/tops/varstack.py @@ -49,11 +49,9 @@ from __future__ import absolute_import import logging try: - HAS_VARSTACK = False import varstack - HAS_VARSTACK = True except ImportError: - pass + varstack = None # Set up logging log = logging.getLogger(__name__) @@ -63,10 +61,7 @@ __virtualname__ = 'varstack' def __virtual__(): - if not HAS_VARSTACK: - log.error("Can't find varstack master_top") - return False - return __virtualname__ + return (False, 'varstack not installed') if varstack is None else __virtualname__ def top(**kwargs):
Squelch error logging when varstack is not installed This gets rid of spurious errors when varstack isn't installed. Most people won't use this so these errors are not helpful.
diff --git a/wps.php b/wps.php index <HASH>..<HASH> 100644 --- a/wps.php +++ b/wps.php @@ -54,6 +54,10 @@ $whoops_handler = new PrettyPageHandler; $whoops_handler->addDataTableCallback( 'WP', function () { global $wp; + if ( ! $wp instanceof \WP ) { + return array(); + } + $output = get_object_vars( $wp ); unset( $output['private_query_vars'] ); unset( $output['public_query_vars'] ); @@ -68,6 +72,10 @@ $whoops_handler->addDataTableCallback( 'backtrace', function () { $whoops_handler->addDataTableCallback( 'WP_Query', function () { global $wp_query; + if ( ! $wp_query instanceof \WP_Query ) { + return array(); + } + $output = get_object_vars( $wp_query ); $output['query_vars'] = array_filter( $output['query_vars'] ); unset( $output['posts'] );
Added checks on global objects to make sense. Fixes #1
diff --git a/fritzconnection/core/processor.py b/fritzconnection/core/processor.py index <HASH>..<HASH> 100644 --- a/fritzconnection/core/processor.py +++ b/fritzconnection/core/processor.py @@ -326,7 +326,7 @@ class Service: values. Caches the dictionary once retrieved from _scpd. """ if self._state_variables is None: - self._state_variables = self._scpd.actions + self._state_variables = self._scpd.state_variables return self._state_variables def load_scpd(self, address, port):
Bugfix in service to return state_variables instead of actions.
diff --git a/cmd/xl-v1-multipart.go b/cmd/xl-v1-multipart.go index <HASH>..<HASH> 100644 --- a/cmd/xl-v1-multipart.go +++ b/cmd/xl-v1-multipart.go @@ -387,15 +387,16 @@ func (xl xlObjects) PutObjectPart(bucket, object, uploadID string, partID int, s // Initialize md5 writer. md5Writer := md5.New() + lreader := data // Limit the reader to its provided size > 0. if size > 0 { // This is done so that we can avoid erroneous clients sending // more data than the set content size. - data = io.LimitReader(data, size) + lreader = io.LimitReader(data, size) } // else we read till EOF. // Construct a tee reader for md5sum. - teeReader := io.TeeReader(data, md5Writer) + teeReader := io.TeeReader(lreader, md5Writer) // Erasure code data and write across all disks. sizeWritten, checkSums, err := erasureCreateFile(onlineDisks, minioMetaBucket, tmpPartPath, teeReader, xlMeta.Erasure.BlockSize, xl.dataBlocks, xl.parityBlocks, bitRotAlgo, xl.writeQuorum)
Layer LimitReader responsibly allowing sign verification to work (#<I>)
diff --git a/lib/rails3-jquery-autocomplete/simple_form_plugin.rb b/lib/rails3-jquery-autocomplete/simple_form_plugin.rb index <HASH>..<HASH> 100644 --- a/lib/rails3-jquery-autocomplete/simple_form_plugin.rb +++ b/lib/rails3-jquery-autocomplete/simple_form_plugin.rb @@ -49,27 +49,27 @@ module SimpleForm # label_method, value_method = detect_collection_methods association = object.send(reflection.name) - autocomplete_options[:value] = association.send(label_method) if association.respond_to? label_method - puts "label_method:#{label_method}" - puts "value: #{autocomplete_options[:value]}" - # + if association.respond_to? label_method + autocomplete_options[:value] = association.send(label_method) + end out = @builder.autocomplete_field( attribute_name, options[:url], autocomplete_options ) + hidden_options = if association.respond_to? value_method + new_options = {} + new_options[:value] = association.send(value_method) + input_html_options.merge new_options + else + input_html_options + end out << @builder.hidden_field( attribute_name, - rewrite_hidden_option(association.send(value_method)) if association.respond_to? value_method + hidden_options ) out.html_safe end - - def rewrite_hidden_option(value) - new_options = {} - new_options[:value] = value - input_html_options.merge new_options - end end end
before sending methods, check respond_to?
diff --git a/src/repl.js b/src/repl.js index <HASH>..<HASH> 100644 --- a/src/repl.js +++ b/src/repl.js @@ -139,11 +139,11 @@ function completer(text) { return [[], text]; } -var PROMPT_MULTI = chalk.bold("... "); -var PROMPT_SINGLE = chalk.bold("squiggle> "); - function prompt() { - return isMultiline ? PROMPT_MULTI : PROMPT_SINGLE; + var text = isMultiline ? + "......... " : + "squiggle> "; + return chalk.bold(text); } function interruptMessage() { @@ -162,6 +162,9 @@ function toggleMultilineMode(rl) { isMultiline = !isMultiline; rl.setPrompt(prompt()); rl.prompt(); + if (currentCode.length > 0 || rl.line.length > 0) { + rl.write("\n"); + } } function keybindHandler(rl, c, k) {
Tweaks the prompt a bit
diff --git a/drivers/k8055/driver-k8055.rb b/drivers/k8055/driver-k8055.rb index <HASH>..<HASH> 100644 --- a/drivers/k8055/driver-k8055.rb +++ b/drivers/k8055/driver-k8055.rb @@ -98,12 +98,14 @@ class K8055DigitalOutput < K8055Pin def write(value) begin @k8055.synchronize do - if value + if not value and not @state + # workaround to a libk8055 bug ? + elsif value @k8055.digital_on @index - else + elsif not value @k8055.digital_off @index end - @state = value + if value then @state = true else @state = false end end rescue puts "K8055 Error"
Workaround for a libk<I> bug in set_digital
diff --git a/lib/arjdbc/db2/adapter.rb b/lib/arjdbc/db2/adapter.rb index <HASH>..<HASH> 100644 --- a/lib/arjdbc/db2/adapter.rb +++ b/lib/arjdbc/db2/adapter.rb @@ -500,12 +500,13 @@ module ArJdbc change_column_null(table_name, column_name, options[:null]) end end - + # http://publib.boulder.ibm.com/infocenter/db2luw/v9r7/topic/com.ibm.db2.luw.admin.dbobj.doc/doc/t0020132.html - def remove_column(table_name, column_name) #:nodoc: - sql = "ALTER TABLE #{table_name} DROP COLUMN #{column_name}" - - as400? ? execute_and_auto_confirm(sql) : execute(sql) + def remove_column(table_name, *column_names) #:nodoc: + for column_name in column_names.flatten + sql = "ALTER TABLE #{table_name} DROP COLUMN #{column_name}" + as400? ? execute_and_auto_confirm(sql) : execute(sql) + end reorg_table(table_name) end
fix DB2 remove_column not supporting multiple column_names (since <I>)
diff --git a/src/wyil/transforms/VerificationCheck.java b/src/wyil/transforms/VerificationCheck.java index <HASH>..<HASH> 100644 --- a/src/wyil/transforms/VerificationCheck.java +++ b/src/wyil/transforms/VerificationCheck.java @@ -101,7 +101,7 @@ public class VerificationCheck implements Transform { } public static int getLimit() { - return 100; + return 500; } public void setLimit(int limit) {
I may have to have a separate process, or something which I can genuinely control
diff --git a/pkg/httputil/httputil.go b/pkg/httputil/httputil.go index <HASH>..<HASH> 100644 --- a/pkg/httputil/httputil.go +++ b/pkg/httputil/httputil.go @@ -27,6 +27,7 @@ import ( "net" "net/http" "net/url" + "os" "path" "strconv" "strings" @@ -63,7 +64,7 @@ func RequestEntityTooLargeError(conn http.ResponseWriter) { func ServeError(conn http.ResponseWriter, req *http.Request, err error) { conn.WriteHeader(http.StatusInternalServerError) - if IsLocalhost(req) { + if IsLocalhost(req) || os.Getenv("CAMLI_DEV_CAMLI_ROOT") != "" { fmt.Fprintf(conn, "Server error: %s\n", err) return }
httputil: allow full error serving when with devcam When hacking on e.g. importers, I sometimes want to use devcam with -hostname, to test with a non local context. In which case, when hitting an error httputil.ServeError would not print the full error, which makes it harder to debug. Change-Id: I2fb8c<I>d4f<I>fbf<I>addd6e7af<I>
diff --git a/mrcrowbar/common.py b/mrcrowbar/common.py index <HASH>..<HASH> 100644 --- a/mrcrowbar/common.py +++ b/mrcrowbar/common.py @@ -46,6 +46,9 @@ def serialise( obj, fields ): def file_path_recurse( *root_list ): for root in root_list: + if os.path.isfile( root ): + yield root + continue for path, dirs, files in os.walk( root ): for item in files: file_path = os.path.join( path, item )
common.file_path_recurse: fix case where root is a file
diff --git a/languagetool-wikipedia/src/main/java/org/languagetool/dev/wikipedia/WikipediaQuickCheck.java b/languagetool-wikipedia/src/main/java/org/languagetool/dev/wikipedia/WikipediaQuickCheck.java index <HASH>..<HASH> 100644 --- a/languagetool-wikipedia/src/main/java/org/languagetool/dev/wikipedia/WikipediaQuickCheck.java +++ b/languagetool-wikipedia/src/main/java/org/languagetool/dev/wikipedia/WikipediaQuickCheck.java @@ -233,7 +233,8 @@ public class WikipediaQuickCheck { final URL url = new URL(urlString); final String mediaWikiContent = check.getMediaWikiContent(url); final String plainText = check.getPlainText(mediaWikiContent); - final WikipediaQuickCheckResult checkResult = check.checkPage(plainText, new German()); + final Language lang = check.getLanguage(url); + final WikipediaQuickCheckResult checkResult = check.checkPage(plainText, lang); final ContextTools contextTools = new ContextTools(); contextTools.setContextSize(CONTEXT_SIZE); for (RuleMatch ruleMatch : checkResult.getRuleMatches()) {
main method: use the language from the URL, not German
diff --git a/lib/solargraph/pin/reference.rb b/lib/solargraph/pin/reference.rb index <HASH>..<HASH> 100644 --- a/lib/solargraph/pin/reference.rb +++ b/lib/solargraph/pin/reference.rb @@ -12,6 +12,7 @@ module Solargraph def resolve api_map unless @resolved + @resolved = true @name = api_map.find_fully_qualified_namespace(@name, pin.namespace) end end
Avoid infinite recursion when resolving reference pins.
diff --git a/jquery.csv.js b/jquery.csv.js index <HASH>..<HASH> 100755 --- a/jquery.csv.js +++ b/jquery.csv.js @@ -72,7 +72,7 @@ }; /** - * jQuery.CSV2Array(csvString) + * jQuery.CSVEntry2Array(csvString) * Converts a CSV string to a javascript array. * * @param {String} csv The string containing the raw CSV data. @@ -115,4 +115,5 @@ return a; }; -})( jQuery ); \ No newline at end of file +})( jQuery ); +
jquery.csv.js - added a quick documentation fix - added a newline at the end of the file
diff --git a/src/ProfileConfig.php b/src/ProfileConfig.php index <HASH>..<HASH> 100644 --- a/src/ProfileConfig.php +++ b/src/ProfileConfig.php @@ -31,7 +31,7 @@ class ProfileConfig extends Config 'listen' => '::', 'enableLog' => false, 'enableAcl' => false, - 'aclGroupList' => [], + 'aclPermissionList' => [], 'managementIp' => '127.0.0.1', 'reject4' => false, 'reject6' => false,
rename aclGroupList to aclPermissionList
diff --git a/src/main/java/org/dynjs/runtime/builtins/types/array/IsArray.java b/src/main/java/org/dynjs/runtime/builtins/types/array/IsArray.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dynjs/runtime/builtins/types/array/IsArray.java +++ b/src/main/java/org/dynjs/runtime/builtins/types/array/IsArray.java @@ -15,9 +15,10 @@ public class IsArray extends AbstractNativeFunction { return true; } - if ( args[0] instanceof JSObject && ((JSObject)args[0]).hasExternalIndexedData() ) { - return true; - } + // TODO: Nodyn buffers should not be considered an array according to Array.isArray +// if ( args[0] instanceof JSObject && ((JSObject)args[0]).hasExternalIndexedData() ) { +// return true; +// } return false; }
Array.isArray should not return true for non-arrays. Removed the code that caused Array.isArray to return true for any JSObject that has externally indexed data. This feature was added for V8 parity, specifically when dealing with Node.js buffers, however Array.isArray(buffer) in node <I>.x returns false.
diff --git a/py/selenium/webdriver/common/desired_capabilities.py b/py/selenium/webdriver/common/desired_capabilities.py index <HASH>..<HASH> 100644 --- a/py/selenium/webdriver/common/desired_capabilities.py +++ b/py/selenium/webdriver/common/desired_capabilities.py @@ -27,22 +27,22 @@ class DesiredCapabilities(object): CHROME = {"browserName": "chrome", "version": "", - "platform": "any", + "platform": "ANY", "javascriptEnabled": True } HTMLUNIT = {"browserName": "htmlunit", "version": "", - "platform": "windows", + "platform": "ANY", "javascriptEnabled": True } IPHONE = {"browserName": "iphone", "version": "", - "platform": "windows", + "platform": "MAC", "javascriptEnabled": True } ANDROID = {"browserName": "android", "version": "", - "platform": "windows", + "platform": "LINUX", "javascriptEnabled": True }
DavidBurns corrected platforms for desired capabilities r<I>
diff --git a/internal/service/appflow/flow.go b/internal/service/appflow/flow.go index <HASH>..<HASH> 100644 --- a/internal/service/appflow/flow.go +++ b/internal/service/appflow/flow.go @@ -1113,12 +1113,11 @@ func ResourceFlow() *schema.Resource { }, }, "task_properties": { - Type: schema.TypeMap, - Optional: true, - ValidateFunc: validation.StringInSlice(appflow.OperatorPropertiesKeys_Values(), false), + Type: schema.TypeMap, + Optional: true, Elem: &schema.Schema{ Type: schema.TypeString, - ValidateFunc: validation.All(validation.StringMatch(regexp.MustCompile(`\S+`), "must not contain any whitespace characters"), validation.StringLenBetween(0, 2048)), + ValidateFunc: validation.StringLenBetween(0, 2048), }, }, "task_type": {
appflow: amend ValidateFunc for task_properties to avoid type conflict
diff --git a/lhc/file_format/vcf_/merger.py b/lhc/file_format/vcf_/merger.py index <HASH>..<HASH> 100644 --- a/lhc/file_format/vcf_/merger.py +++ b/lhc/file_format/vcf_/merger.py @@ -102,7 +102,7 @@ class VcfMerger(object): return sorted_tops def _updateSorting(self, sorted_tops, entry, idx): - key = (entry.chr, entry.pos) + key = (Chromosome.getIdentifier(entry.chr), entry.pos) sorted_tops.get(key, []).append(idx) def _mergeHeaders(self, hdrs):
fixed vcf merge chromosome out-of-order bug
diff --git a/prometheus_flask_exporter/__init__.py b/prometheus_flask_exporter/__init__.py index <HASH>..<HASH> 100644 --- a/prometheus_flask_exporter/__init__.py +++ b/prometheus_flask_exporter/__init__.py @@ -11,7 +11,7 @@ from flask import Flask, Response from flask import request, make_response, current_app from flask.views import MethodViewType from prometheus_client import Counter, Histogram, Gauge, Summary -from prometheus_client.exposition import choose_encoder +from prometheus_client.exposition import choose_formatter from werkzeug.serving import is_running_from_reloader if sys.version_info[0:2] >= (3, 4): @@ -278,7 +278,7 @@ class PrometheusMetrics(object): if 'PROMETHEUS_MULTIPROC_DIR' in os.environ or 'prometheus_multiproc_dir' in os.environ: multiprocess.MultiProcessCollector(registry) - generate_latest, content_type = choose_encoder(request.headers.get("Accept")) + generate_latest, content_type = choose_formatter(request.headers.get("Accept")) headers = {'Content-Type': content_type} return generate_latest(registry), 200, headers
change choose_encoder to choose_formatter
diff --git a/benchmarks/src/test/java/zipkin2/server/ServerIntegratedBenchmark.java b/benchmarks/src/test/java/zipkin2/server/ServerIntegratedBenchmark.java index <HASH>..<HASH> 100644 --- a/benchmarks/src/test/java/zipkin2/server/ServerIntegratedBenchmark.java +++ b/benchmarks/src/test/java/zipkin2/server/ServerIntegratedBenchmark.java @@ -138,7 +138,7 @@ class ServerIntegratedBenchmark { .withNetworkAliases("backend") .withCommand("backend") .withExposedPorts(9000) - .waitingFor(Wait.forHttp("/health")); + .waitingFor(Wait.forHttp("/actuator/health")); closer.register(backend::stop); GenericContainer<?> frontend = new GenericContainer<>("openzipkin/example-sleuth-webmvc") @@ -146,7 +146,7 @@ class ServerIntegratedBenchmark { .withNetworkAliases("frontend") .withCommand("frontend") .withExposedPorts(8081) - .waitingFor(Wait.forHttp("/health")); + .waitingFor(Wait.forHttp("/actuator/health")); closer.register(frontend::stop); GenericContainer<?> prometheus = new GenericContainer<>("prom/prometheus")
Actually sleuth is using actuator health..
diff --git a/docs/src/pages/discover-more/languages/Languages.js b/docs/src/pages/discover-more/languages/Languages.js index <HASH>..<HASH> 100644 --- a/docs/src/pages/discover-more/languages/Languages.js +++ b/docs/src/pages/discover-more/languages/Languages.js @@ -32,7 +32,7 @@ function Languages() { variant="body2" color="secondary" data-no-link="true" - href={`/${language.code === 'en' ? '' : language.code}/`} + href={language.code === 'en' ? '/' : `/${language.code}/`} > Documentation </Link>
[docs] Fix english language link (#<I>)
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java b/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java index <HASH>..<HASH> 100755 --- a/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java +++ b/core/src/main/java/com/orientechnologies/orient/core/db/document/ODatabaseDocumentTx.java @@ -1206,16 +1206,7 @@ public class ODatabaseDocumentTx extends OListenerManger<ODatabaseListener> impl private void clearOwner() { final Thread current = Thread.currentThread(); - final Thread o = owner.get(); - - if (o == null) { - throw new IllegalStateException("Database is going to be closed but was not opened"); - } - - if (o != current || !owner.compareAndSet(current, null)) { - throw new IllegalStateException("Database was opened in other thread `" + o.getName() + "' thread but closed in `" + - current.getName() + "' thread"); - } + owner.compareAndSet(current, null); } @Override
Issue #<I> was fixed.
diff --git a/tensorforce/util/agent_util.py b/tensorforce/util/agent_util.py index <HASH>..<HASH> 100644 --- a/tensorforce/util/agent_util.py +++ b/tensorforce/util/agent_util.py @@ -25,7 +25,6 @@ from tensorforce.config import Config from tensorforce.exceptions.tensorforce_exceptions import TensorForceValueError from tensorforce.rl_agents.random_agent import RandomAgent from tensorforce.rl_agents.dqn_agent import DQNAgent -from tensorforce.rl_agents.double_dqn_agent import DoubleDQNAgent from tensorforce.rl_agents.naf_agent import NAFAgent @@ -63,6 +62,5 @@ def get_default_config(agent_type): agents = { 'RandomAgent': RandomAgent, 'DQNAgent': DQNAgent, - 'DoubleDQNAgent': DoubleDQNAgent, 'NAFAgent': NAFAgent }
removed ddqn agent from agent util
diff --git a/wal_e/piper.py b/wal_e/piper.py index <HASH>..<HASH> 100644 --- a/wal_e/piper.py +++ b/wal_e/piper.py @@ -124,7 +124,10 @@ def popen_sp(*args, **kwargs): # to the gevent hub. for fp_symbol in ['stdin', 'stdout', 'stderr']: value = getattr(proc, fp_symbol) + if value is not None: + # this branch is only taken if a descriptor is sent in + # with 'PIPE' mode. setattr(proc, fp_symbol, NonBlockPipeFileWrap(value)) return proc
Explain why there is a None guard
diff --git a/src/playbacks/hls/hls.js b/src/playbacks/hls/hls.js index <HASH>..<HASH> 100644 --- a/src/playbacks/hls/hls.js +++ b/src/playbacks/hls/hls.js @@ -153,7 +153,7 @@ export default class HLS extends HTML5VideoPlayback { fillLevels() { this._levels = this.hls.levels.map((level, index) => { - return {id: index , label: `${level.bitrate/1000}Kbps` + return {id: index, level: level, label: `${level.bitrate/1000}Kbps` }}) this.trigger(Events.PLAYBACK_LEVELS_AVAILABLE, this._levels) }
send the entire level object (including height and bitrate) to the PLAYBACK_LEVELS_AVAILABLE event
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,6 +12,7 @@ setup( description='Run commands and manipulate files locally or over SSH using the same interface', long_description=read("README.rst"), author='Michael Williamson', + author_email='mike@zwobble.org', url='http://github.com/mwilliamson/spur.py', keywords="ssh shell subprocess process", packages=['spur'],
Add author_email to setup.py
diff --git a/resources/lang/nl-NL/dashboard.php b/resources/lang/nl-NL/dashboard.php index <HASH>..<HASH> 100644 --- a/resources/lang/nl-NL/dashboard.php +++ b/resources/lang/nl-NL/dashboard.php @@ -35,6 +35,7 @@ return [ 'failure' => 'Er is een fout opgetreden bij het wijzigen van de incident update', ], ], + 'reported_by' => 'Reported by :user', 'add' => [ 'title' => 'Meld een incident', 'success' => 'Incident toegevoegd.',
New translations dashboard.php (Dutch)
diff --git a/pyam_analysis/core.py b/pyam_analysis/core.py index <HASH>..<HASH> 100644 --- a/pyam_analysis/core.py +++ b/pyam_analysis/core.py @@ -16,6 +16,12 @@ import seaborn as sns # ignore warnings warnings.filterwarnings('ignore') +try: + import ixmp + has_ix = True +except Exception: + has_ix = False + # disable autoscroll in Jupyter notebooks try: get_ipython().run_cell_magic(u'javascript', u'',
try to import 'ixmp' package (for reading timeseries from ix object)
diff --git a/database/migrations/2018_01_01_000000_create_permission_tables.php b/database/migrations/2018_01_01_000000_create_permission_tables.php index <HASH>..<HASH> 100644 --- a/database/migrations/2018_01_01_000000_create_permission_tables.php +++ b/database/migrations/2018_01_01_000000_create_permission_tables.php @@ -30,7 +30,7 @@ class CreatePermissionTables extends Migration }); Schema::create($tableNames['model_has_permissions'], function (Blueprint $table) use ($tableNames) { - $table->increments('id'); + $table->bigInteger('id'); $table->unsignedInteger('permission_id'); $table->morphs('model'); @@ -43,7 +43,7 @@ class CreatePermissionTables extends Migration }); Schema::create($tableNames['model_has_roles'], function (Blueprint $table) use ($tableNames) { - $table->increments('id'); + $table->bigInteger('id'); $table->unsignedInteger('role_id'); $table->morphs('model'); @@ -56,7 +56,7 @@ class CreatePermissionTables extends Migration }); Schema::create($tableNames['role_has_permissions'], function (Blueprint $table) use ($tableNames) { - $table->increments('id'); + $table->bigInteger('id'); $table->unsignedInteger('permission_id'); $table->unsignedInteger('role_id');
found issue in migration create_permission_table
diff --git a/yarl/__init__.py b/yarl/__init__.py index <HASH>..<HASH> 100644 --- a/yarl/__init__.py +++ b/yarl/__init__.py @@ -164,10 +164,17 @@ class URL: if host is None: raise ValueError( "Invalid URL: host is required for abolute urls.") + + try: + port = val.port + except ValueError: + raise ValueError( + "Invalid URL: port can't be converted to integer") + netloc = cls._make_netloc(val.username, val.password, host, - val.port, + port, encode=True) path = cls._PATH_QUOTER(val[2]) if netloc:
Issue#<I>: give friendlier error when port cant be converted to int (#<I>)
diff --git a/fastlane/lib/fastlane/actions/gradle.rb b/fastlane/lib/fastlane/actions/gradle.rb index <HASH>..<HASH> 100644 --- a/fastlane/lib/fastlane/actions/gradle.rb +++ b/fastlane/lib/fastlane/actions/gradle.rb @@ -185,8 +185,23 @@ module Fastlane "versionName" => "1.0.0", # ... } + ) + ``` + + You can use this to automatically [sign and zipalign](https://developer.android.com/studio/publish/app-signing.html) your app: + ```ruby + gradle( + task: "assemble", + build_type: "Release", + print_command: false, + properties: { + "android.injected.signing.store.file" => "keystore.jks", + "android.injected.signing.store.password" => "store_password", + "android.injected.signing.key.alias" => "key_alias", + "android.injected.signing.key.password" => "key_password", + } )', - '# If you need to pass sensitive information through the `gradle` action, and don"t want the generated command to be printed before it is run, you can suppress that: + '# If you need to pass sensitive information through the `gradle` action, and don\'t want the generated command to be printed before it is run, you can suppress that: gradle( # ... print_command: false
[gradle] example how to sign and zipalign app (#<I>) * [gradle] example how to sign and zipalign app - code example how to sign and zipalign the app with `gradle` - replace wrong " with correct (and escaped) \' * fix code example
diff --git a/tests/unittests/helpers.py b/tests/unittests/helpers.py index <HASH>..<HASH> 100644 --- a/tests/unittests/helpers.py +++ b/tests/unittests/helpers.py @@ -131,7 +131,7 @@ class ShoebotTestCase(TestCase): seed(0) - bot.run(code, verbose=True) + bot.run(code, verbose=verbose) def run_filename(self, filename, outputfile, windowed=False, namespace=None, verbose=True): """
Tests: Pass verbose option through to run_code
diff --git a/openquake/risklib/asset.py b/openquake/risklib/asset.py index <HASH>..<HASH> 100644 --- a/openquake/risklib/asset.py +++ b/openquake/risklib/asset.py @@ -775,7 +775,7 @@ class Exposure(object): expected_header = self._csv_header() fnames = [os.path.join(dirname, f) for f in csvnames.split()] for fname in fnames: - with open(fname) as f: + with open(fname, encoding='utf-8') as f: fields = next(csv.reader(f)) header = set(fields) if len(header) < len(fields): @@ -788,7 +788,7 @@ class Exposure(object): (fname, sorted(expected_header), sorted(header))) occupancy_periods = self.occupancy_periods.split() for fname in fnames: - with open(fname) as f: + with open(fname, encoding='utf-8') as f: for i, dic in enumerate(csv.DictReader(f), 1): asset = Node('asset', lineno=i) with context(fname, asset):
Force the exposure to be read as UTF-8
diff --git a/skyfield/tests/test_timelib.py b/skyfield/tests/test_timelib.py index <HASH>..<HASH> 100644 --- a/skyfield/tests/test_timelib.py +++ b/skyfield/tests/test_timelib.py @@ -46,6 +46,17 @@ def test_indexing_julian_date(): assert jd.ut1[0] == jd0.ut1 assert jd.delta_t == jd0.delta_t +def test_slicing_julian_date(): + jd = JulianDate(utc=(1974, 10, range(1, 6))) + assert jd.shape == (5,) + jd24 = jd[2:4] + assert jd24.shape == (2,) + assert (jd.tai[2:4] == jd24.tai).all() + assert (jd.tt[2:4] == jd24.tt).all() + assert (jd.tdb[2:4] == jd24.tdb).all() + assert (jd.ut1[2:4] == jd24.ut1).all() + assert jd.delta_t == jd24.delta_t + def test_early_utc(): jd = JulianDate(utc=(1915, 12, 2, 3, 4, 5.6786786)) assert abs(jd.tt - 2420833.6283317441) < epsilon
Confirm that we can also slice JulianDates
diff --git a/salt/states/cron.py b/salt/states/cron.py index <HASH>..<HASH> 100644 --- a/salt/states/cron.py +++ b/salt/states/cron.py @@ -24,7 +24,7 @@ parameters used by Salt to define the various timing values for a cron job: the cron job is for another user, it is necessary to specify that user with the ``user`` parameter. -In a time, a long ago when making changes to an existing cron job, +In a time, a long ago (before 2014.2) when making changes to an existing cron job, the name declaration is the parameter used to uniquely identify the job, so if an existing cron that looks like this: @@ -50,6 +50,7 @@ then a new cron job will be added to the user's crontab. The current behavior is still relying on that mecanism, but you can also specify an identifier to identify your crontabs: +.. versionadded:: 2014.2 .. code-block:: yaml date > /tmp/crontest: @@ -60,6 +61,7 @@ specify an identifier to identify your crontabs: - hour: 2 And, some monthes later, you modify it: +.. versionadded:: 2014.2 .. code-block:: yaml superscript > /tmp/crontest:
add some version infos for cron
diff --git a/tests/test_function_manager.py b/tests/test_function_manager.py index <HASH>..<HASH> 100644 --- a/tests/test_function_manager.py +++ b/tests/test_function_manager.py @@ -82,6 +82,9 @@ def test_call_to(): def find_symbol_name(self, *args, **kwargs): return 'unknown' + def is_hooked(self, addr): + return False + project = dummy() project.arch = ArchAMD64()
Fix the test_function_manager project test stub to have a is_hooked method
diff --git a/test/client/sync-mediator.spec.js b/test/client/sync-mediator.spec.js index <HASH>..<HASH> 100644 --- a/test/client/sync-mediator.spec.js +++ b/test/client/sync-mediator.spec.js @@ -55,8 +55,7 @@ describe('Test the sync via mediator', function() { it('create works.', function() { var ts = new Date().getTime(); - mediator.publish('sync:'+config.datasetId+':create', {id:1, value:'test1'}, ts); - return mediator.promise('done:sync:'+config.datasetId+':create:'+ts) + return mediator.request('sync:'+config.datasetId+':create', [{id:1, value:'test1'}, ts], {uid: ts}) .then(function() { return mediator.request('sync:'+config.datasetId+':list:load') })
Used the mediator.request promise, passing in the create arguments as an array
diff --git a/lib/poolparty/chef.rb b/lib/poolparty/chef.rb index <HASH>..<HASH> 100644 --- a/lib/poolparty/chef.rb +++ b/lib/poolparty/chef.rb @@ -56,9 +56,7 @@ module PoolParty def node_bootsrapped?(remote_instance) # "(gem list; dpkg -l chef) | grep -q chef && echo 'chef installed'" - remote_instance.ssh([ - 'if [ -z "$(gem list | grep chef)" ]; then echo ""; else echo "chef installed"; fi' - ], :do_sudo => false).empty? + remote_instance.ssh(['if [ ! -n "$(gem list 2>/dev/null | grep chef)" ]; then echo "chef installed"; fi'], :do_sudo => false).empty? rescue false end def node_bootstrap!(remote_instance) remote_instance.ssh([ @@ -76,7 +74,11 @@ module PoolParty end def method_missing(m,*args,&block) - cloud.send(m,*args,&block) if cloud.respond_to?(m) + if cloud.respond_to?(m) + cloud.send(m,*args,&block) + else + super + end end end
Adding checking to see if chef is installed on the remote instance
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup setup(name='lifxlan', - version='0.2', + version='0.2.1', description='API for local communication with LIFX devices over a LAN.', url='http://github.com/mclarkk/lifxlan', author='Meghan Clark',
<I> for IP addr support
diff --git a/cnxpublishing/db.py b/cnxpublishing/db.py index <HASH>..<HASH> 100644 --- a/cnxpublishing/db.py +++ b/cnxpublishing/db.py @@ -1241,10 +1241,9 @@ def _upsert_persons(cursor, person_ids, lookup_func): # Check for existing records to update. cursor.execute("SELECT personid from persons where personid = ANY (%s)", (person_ids,)) - try: - existing_person_ids = [x[0] for x in cursor.fetchall()] - except TypeError: - existing_person_ids = [] + + existing_person_ids = [x[0] for x in cursor.fetchall()] + new_person_ids = [p for p in person_ids if p not in existing_person_ids] # Update existing records. @@ -1277,10 +1276,9 @@ def _upsert_users(cursor, user_ids, lookup_func): # Check for existing records to update. cursor.execute("SELECT username from users where username = ANY (%s)", (user_ids,)) - try: - existing_user_ids = [x[0] for x in cursor.fetchall()] - except TypeError: - existing_user_ids = [] + + existing_user_ids = [x[0] for x in cursor.fetchall()] + new_user_ids = [u for u in user_ids if u not in existing_user_ids] # Update existing records.
removing unused exception b/c cursor.fetchall() will always return a list
diff --git a/pulley.js b/pulley.js index <HASH>..<HASH> 100755 --- a/pulley.js +++ b/pulley.js @@ -59,7 +59,7 @@ empty: false, hidden: true }], function( err, result ) { - var auth = result.username + ":" + result.password; + var auth = encodeURIComponent( result.username ) + ":" + encodeURIComponent( result.password ); request.post("https://" + auth + "@api.github.com/authorizations", { json: true, body: {
Update pulley.js: Add encodeURIComponent for auth. Passwords can contain symbols such as '@' (which are not allowed in that position and breaks the request).
diff --git a/lib/action_cable/connection/base.rb b/lib/action_cable/connection/base.rb index <HASH>..<HASH> 100644 --- a/lib/action_cable/connection/base.rb +++ b/lib/action_cable/connection/base.rb @@ -180,11 +180,6 @@ module ActionCable end end - def allowed_origins_match? origin - allowed_origins = Array(server.config.allowed_request_origins) - allowed_origins.any? { |allowed_origin| allowed_origin.is_a?(Regexp) ? allowed_origin =~ origin : allowed_origin == origin } - end - def respond_to_successful_request websocket.rack_response end
Remove unused method allowed_origins in Connection::Base
diff --git a/actioncable/lib/action_cable/server/base.rb b/actioncable/lib/action_cable/server/base.rb index <HASH>..<HASH> 100644 --- a/actioncable/lib/action_cable/server/base.rb +++ b/actioncable/lib/action_cable/server/base.rb @@ -54,7 +54,7 @@ module ActionCable # The worker pool is where we run connection callbacks and channel actions. We do as little as possible on the server's main thread. # The worker pool is an executor service that's backed by a pool of threads working from a task queue. The thread pool size maxes out - # at 4 worker threads by default. Tune the size yourself with `config.action_cable.worker_pool_size`. + # at 4 worker threads by default. Tune the size yourself with <tt>config.action_cable.worker_pool_size</tt>. # # Using Active Record, Redis, etc within your channel actions means you'll get a separate connection from each thread in the worker pool. # Plan your deployment accordingly: 5 servers each running 5 Puma workers each running an 8-thread worker pool means at least 200 database
[ci skip] Fix formatting of documentation of worker_pool method from AC::Server::Base
diff --git a/lib/puppet/type/pfile.rb b/lib/puppet/type/pfile.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/type/pfile.rb +++ b/lib/puppet/type/pfile.rb @@ -34,7 +34,34 @@ module Puppet desc "Whether files should be backed up before being replaced. If a filebucket_ is specified, files will be backed up there; else, they will be backed up in the same directory - with a ``.puppet-bak`` extension." + with a ``.puppet-bak`` extension. + + To use filebuckets, you must first create a filebucket in your + configuration:: + + filebucket { main: + server => puppet + } + + ``puppetmasterd`` creates a filebucket by default, so you can + usually back up to your main server with this configuration. Once + you've described the bucket in your configuration, you can use + it in any file:: + + file { \"/my/file\": + source => \"/path/in/nfs/or/something\", + backup => main + } + + This will back the file up to the central server. + + At this point, the only benefits to doing so are that you do not + have backup files lying around on each of your machines, a given + version of a file is only backed up once, and you can restore + any given file manually, no matter how old. Eventually, + transactional support will be able to automatically restore + filebucketed files. + " attr_reader :bucket defaultto true
Ooops, did not save the docs before committing. git-svn-id: <URL>
diff --git a/lib/how_is/sources/github/issues.rb b/lib/how_is/sources/github/issues.rb index <HASH>..<HASH> 100644 --- a/lib/how_is/sources/github/issues.rb +++ b/lib/how_is/sources/github/issues.rb @@ -156,11 +156,9 @@ module HowIs::Sources last_cursor = fetch_last_cursor return @data if last_cursor.nil? - after = fetch_issues(after, last_cursor) + after, data = fetch_issues(after, data, last_cursor) - @data = @data.select!(&method(:issue_is_relevant?)) - - @data + @data = data.select(&method(:issue_is_relevant?)) end def issue_is_relevant?(issue) @@ -196,7 +194,8 @@ module HowIs::Sources end end - def fetch_issues(after, last_cursor) + def fetch_issues(after, data, last_cursor) + data ||= [] chunk_size = 100 after_str = ", after: #{after.inspect}" unless after.nil? @@ -236,10 +235,10 @@ module HowIs::Sources node } - @data += new_data + data += new_data end - current_last_cursor + [current_last_cursor, data] end def date_le(left, right)
More refactoring. (#2)
diff --git a/scoop/launch/workerLaunch.py b/scoop/launch/workerLaunch.py index <HASH>..<HASH> 100644 --- a/scoop/launch/workerLaunch.py +++ b/scoop/launch/workerLaunch.py @@ -40,6 +40,7 @@ def localWorker(workerNum, size, pythonExecutable, executable, args, c.append("--profile") c.append(executable) c.extend(args) + logging.debug("localWorker: going to start %s" % c) return subprocess.Popen(c) class remoteWorker(subprocessHandling.baseRemote): @@ -52,7 +53,8 @@ class remoteWorker(subprocessHandling.baseRemote): pythonpath = ("export PYTHONPATH={0} " "&&".format(pythonPath) if pythonPath else '') broker = "127.0.0.1" if brokerIsLocalhost else brokerHostname - self.command += [( + + c = ( "{pythonpath} cd {remotePath} && {nice} {pythonExecutable} " "-m scoop.bootstrap.__main__ " "{echoGroup}" @@ -78,7 +80,9 @@ class remoteWorker(subprocessHandling.baseRemote): n=size, executable=executable, arguments=" ".join(args) - )] + ) + logging.debug("addWorker: adding %s" % c) + self.command.append(c) def getCommand(self): return self.command \ No newline at end of file
add more debug info for local and remote worker
diff --git a/update/result.go b/update/result.go index <HASH>..<HASH> 100644 --- a/update/result.go +++ b/update/result.go @@ -3,6 +3,7 @@ package update import ( "fmt" "sort" + "strings" "github.com/weaveworks/flux" ) @@ -44,14 +45,23 @@ func (r Result) ImageIDs() []string { } // Error returns the error for this release (if any) -// TODO: should we concat them here? or what if there are multiple? func (r Result) Error() string { - for _, serviceResult := range r { - if serviceResult.Error != "" { - return serviceResult.Error + var errIds []string + var errStr string + for id, serviceResult := range r { + if serviceResult.Status == ReleaseStatusFailed { + errIds = append(errIds, id.String()) + errStr = serviceResult.Error } } - return "" + switch { + case len(errIds) == 0: + return "" + case len(errIds) == 1: + return fmt.Sprintf("%s failed: %s", errIds[0], errStr) + default: + return fmt.Sprintf("Multiple services failed: %s", strings.Join(errIds, ", ")) + } } type ServiceResult struct {
Only report a release as failed if >0 service failed
diff --git a/osbs/core.py b/osbs/core.py index <HASH>..<HASH> 100755 --- a/osbs/core.py +++ b/osbs/core.py @@ -29,7 +29,7 @@ class OpenshiftException(Exception): def check_response(response): - if response.status_code != httplib.OK: + if response.status_code not in (httplib.OK, httplib.CREATED): raise OpenshiftException(response.status_code) diff --git a/osbs/http.py b/osbs/http.py index <HASH>..<HASH> 100644 --- a/osbs/http.py +++ b/osbs/http.py @@ -87,7 +87,7 @@ class Response(object): def _check_status_code(self): if self.status_code == 0: self.status_code = self.curl.getinfo(pycurl.HTTP_CODE) - if self.status_code != 0 and self.status_code != httplib.OK: + if self.status_code not in (0, httplib.OK, httplib.CREATED): if self.curl: url = self.curl.url else:
Don't treat HTTP <I> ("Created") as an error.
diff --git a/frontends/default/javascripts/jquery/active_scaffold.js b/frontends/default/javascripts/jquery/active_scaffold.js index <HASH>..<HASH> 100644 --- a/frontends/default/javascripts/jquery/active_scaffold.js +++ b/frontends/default/javascripts/jquery/active_scaffold.js @@ -622,7 +622,7 @@ var ActiveScaffold = { if (typeof(element) == 'string') element = '#' + element; var element = $(element); if (options.singular == false) { - if (!(options.id && $(options.id))) { + if (!(options.id && $('#' + options.id).size() > 0)) { element.append(content); } } else {
Bugfix: jquery create-associated_record_form used prototype code; issue <I> reported by clyfe
diff --git a/mdata/store_mock.go b/mdata/store_mock.go index <HASH>..<HASH> 100644 --- a/mdata/store_mock.go +++ b/mdata/store_mock.go @@ -26,9 +26,8 @@ func NewMockStore() *MockStore { // add a chunk to be returned on Search() func (c *MockStore) AddMockResult(metric string, itgen chunk.IterGen) { - if itgens, ok := c.results[metric]; !ok { - itgens = make([]chunk.IterGen, 0) - c.results[metric] = itgens + if _, ok := c.results[metric]; !ok { + c.results[metric] = make([]chunk.IterGen, 0) } c.results[metric] = append(c.results[metric], itgen)
fix itgens not being used thx ineffassign
diff --git a/cslbot/commands/metar.py b/cslbot/commands/metar.py index <HASH>..<HASH> 100644 --- a/cslbot/commands/metar.py +++ b/cslbot/commands/metar.py @@ -37,7 +37,7 @@ def cmd(send, msg, args): return if isinstance(cmdargs.stations, list): cmdargs.stations = ','.join(cmdargs.stations) - req = get('https://aviationweather.gov/adds/dataserver_current/httpparam', + req = get('http://aviationweather.gov/adds/dataserver_current/httpparam', params={'datasource': 'metars', 'requestType': 'retrieve', 'format': 'xml', 'mostRecentForEachStation': 'constraint', 'hoursBeforeNow': '1.25', 'stationString': cmdargs.stations})
seems metar broke their ssl
diff --git a/src/frontend/org/voltdb/iv2/Initiator.java b/src/frontend/org/voltdb/iv2/Initiator.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/iv2/Initiator.java +++ b/src/frontend/org/voltdb/iv2/Initiator.java @@ -61,6 +61,4 @@ public interface Initiator /** Write a viable replay set to the command log */ public void enableWritingIv2FaultLog(); - - public long getCurrentTxnId(); }
Clean up multi-merge leftover interface cruft.
diff --git a/topologies/replset.js b/topologies/replset.js index <HASH>..<HASH> 100644 --- a/topologies/replset.js +++ b/topologies/replset.js @@ -1196,7 +1196,11 @@ function executeWriteOperation(args, options, callback) { } const willRetryWrite = - !args.retrying && options.retryWrites && options.session && isRetryableWritesSupported(self); + !args.retrying && + options.retryWrites && + options.session && + isRetryableWritesSupported(self) && + !options.session.inTransaction(); if (!self.s.replicaSetState.hasPrimary()) { if (self.s.disconnectHandler) {
refactor(replset): don't retry writes if in a transaction
diff --git a/lib/model/activity.js b/lib/model/activity.js index <HASH>..<HASH> 100644 --- a/lib/model/activity.js +++ b/lib/model/activity.js @@ -570,6 +570,9 @@ Activity.prototype.applyJoin = function(callback) { }, function(err, post) { if (err) throw err; + if (!post) { + throw new AppError("Can't join group: no access"); + } post.checkRecipient(act.actor, this); }, function(err, isRecipient) {
Handle the case where there is no post activity for a group
diff --git a/bibliopixel/layout/matrix.py b/bibliopixel/layout/matrix.py index <HASH>..<HASH> 100644 --- a/bibliopixel/layout/matrix.py +++ b/bibliopixel/layout/matrix.py @@ -66,7 +66,7 @@ class Matrix(MultiLayout): serpentine=serpentine, rotation=rotation, y_flip=vert_flip) - else: + elif self.drivers: raise TypeError( "Must provide coord_map if using multiple drivers!")
Now layout.Matrix supports having 0 drivers
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ f = open('README.rst') __doc__ = f.read() f.close() -VERSION = "1.0.6" +VERSION = "1.0.7" classifiers = [ "Development Status :: 5 - Production/Stable",
Leakage of update from setup.py
diff --git a/src/main/resources/META-INF/resources/primefaces-extensions/timepicker/1-timepicker.js b/src/main/resources/META-INF/resources/primefaces-extensions/timepicker/1-timepicker.js index <HASH>..<HASH> 100644 --- a/src/main/resources/META-INF/resources/primefaces-extensions/timepicker/1-timepicker.js +++ b/src/main/resources/META-INF/resources/primefaces-extensions/timepicker/1-timepicker.js @@ -151,12 +151,14 @@ PrimeFacesExt.widget.TimePicker = PrimeFaces.widget.BaseWidget.extend({ mouseup: function(){ $(this).removeClass('ui-state-active'); }, - mousedown: function(){ + mousedown: function(e){ var el = $(this); el.addClass('ui-state-active'); var dir = el.hasClass('pe-timepicker-up') ? 1 : -1; _self.spin(dir); + + e.preventDefault(); } }); },
Fixed issue #<I> (timePicker issue in Chrome)
diff --git a/lib/restful_acl.rb b/lib/restful_acl.rb index <HASH>..<HASH> 100644 --- a/lib/restful_acl.rb +++ b/lib/restful_acl.rb @@ -9,9 +9,9 @@ module RestfulAcl def has_permission? begin - # Load the Model based on the controller name passed in - klass = params[:controller].classify.constantize - + # Load the Model based on the controller name + klass = self.controller_name.classify.constantize + # Load the object requested if the param[:id] exists object = klass.find(params[:id]) unless params[:id].blank? @@ -49,4 +49,4 @@ module RestfulAcl end end -end \ No newline at end of file +end
Refactored the way the controller name is found Thanks to Devon at kuxuesoft.com!
diff --git a/pkg/oc/cli/admin/diagnostics/diagnostics/cluster/network/results.go b/pkg/oc/cli/admin/diagnostics/diagnostics/cluster/network/results.go index <HASH>..<HASH> 100644 --- a/pkg/oc/cli/admin/diagnostics/diagnostics/cluster/network/results.go +++ b/pkg/oc/cli/admin/diagnostics/diagnostics/cluster/network/results.go @@ -14,6 +14,7 @@ import ( "github.com/openshift/source-to-image/pkg/tar" s2ifs "github.com/openshift/source-to-image/pkg/util/fs" + corev1 "k8s.io/api/core/v1" kerrs "k8s.io/apimachinery/pkg/util/errors" kapi "k8s.io/kubernetes/pkg/apis/core" "k8s.io/kubernetes/pkg/kubectl/polymorphichelpers" @@ -99,7 +100,7 @@ func (d *NetworkDiagnostic) copyNetworkPodInfo(pod *kapi.Pod) error { func (d *NetworkDiagnostic) getNetworkPodLogs(pod *kapi.Pod) error { bytelim := int64(1024000) - opts := &kapi.PodLogOptions{ + opts := &corev1.PodLogOptions{ TypeMeta: pod.TypeMeta, Container: pod.Name, Follow: true,
Fix provided options object is not a PodLogOptions error in network diags This seems fallout from <I> rebase (<URL>)
diff --git a/src/test-environment-maker.js b/src/test-environment-maker.js index <HASH>..<HASH> 100644 --- a/src/test-environment-maker.js +++ b/src/test-environment-maker.js @@ -22,12 +22,12 @@ function init(rawSyncFunction, syncFunctionFile) { try { environmentTemplate = fs.readFileSync(__dirname + '/templates/test-environment-template.js', 'utf8').trim(); } catch (ex) { - console.log('ERROR: Unable to read the test helper environment template: ' + ex); + console.log('ERROR: Unable to read the test environment template: ' + ex); throw ex; } - // The test helper environment includes a placeholder string called "%SYNC_FUNC_PLACEHOLDER%" that is to be replaced with the contents of + // The test environment includes a placeholder string called "%SYNC_FUNC_PLACEHOLDER%" that is to be replaced with the contents of // the sync function var environmentString = environmentTemplate.replace( '%SYNC_FUNC_PLACEHOLDER%', @@ -37,7 +37,7 @@ function init(rawSyncFunction, syncFunctionFile) { // valid statement. var environmentStatement = '(' + environmentString + ');'; - // Compile the test helper environment function within the current virtual machine context so it can share access to the "requireAccess", + // Compile the test environment function within the current virtual machine context so it can share access to the "requireAccess", // "channel", "customActionStub", etc. stubs with the test-helper module var environmentFunction = vm.runInThisContext(environmentStatement, options);
Issue #<I>: Minor text changes for test environment maker
diff --git a/volume/volume.go b/volume/volume.go index <HASH>..<HASH> 100644 --- a/volume/volume.go +++ b/volume/volume.go @@ -5,9 +5,6 @@ import ( "os" "runtime" "strings" - - "github.com/Sirupsen/logrus" - "github.com/docker/docker/pkg/system" ) // DefaultDriverName is the driver name used for the driver @@ -79,8 +76,7 @@ func (m *MountPoint) Setup() (string, error) { return "", err } if runtime.GOOS != "windows" { // Windows does not have deprecation issues here - logrus.Warnf("Auto-creating non-existent volume host path %s, this is deprecated and will be removed soon", m.Source) - if err := system.MkdirAll(m.Source, 0755); err != nil { + if err := os.MkdirAll(m.Source, 0755); err != nil { return "", err } }
Remove deprecation warning Auto-creation of non-existing host directories is no longer deprecated (9d5c<I>bed2ac<I>e<I>d<I>e3f5), so this warning is no longer relevant. This removes the deprecation warning. Also removes the "system" package here, because it's only used on non-Windows, so basically just called os.MkdirAll()
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -103,7 +103,7 @@ setup( keywords="read reader edit editor parse parser asam mdf measurement", # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). - packages=find_packages(exclude=["contrib", "docs", "test"]), + packages=find_packages(include=("asammdf*",)), # Alternatively, if you want to distribute just a my_module.py, uncomment # this: # py_modules=["my_module"], @@ -112,14 +112,14 @@ setup( # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=[ - "numpy>=1.16.1", - "pandas", - "numexpr", - "wheel", "canmatrix[arxml, dbc]>=0.8", - "natsort", "lxml", "lz4", + "natsort", + "numexpr", + "numpy>=1.16.1", + "pandas", + "wheel", ], # List additional groups of dependencies here (e.g. development # dependencies). You can install these using the following syntax,
only include asammdf package
diff --git a/pysat/instruments/methods/general.py b/pysat/instruments/methods/general.py index <HASH>..<HASH> 100644 --- a/pysat/instruments/methods/general.py +++ b/pysat/instruments/methods/general.py @@ -285,4 +285,3 @@ def load_csv_data(fnames, read_csv_kwargs=None): data = pds.DataFrame() if len(fdata) == 0 else pds.concat(fdata, axis=0) return data -
STY: fixed flake8 Removed extra line from the end of the file.
diff --git a/jmetal-core/src/main/java/org/uma/jmetal/util/solutionattribute/impl/GenericSolutionAttribute.java b/jmetal-core/src/main/java/org/uma/jmetal/util/solutionattribute/impl/GenericSolutionAttribute.java index <HASH>..<HASH> 100755 --- a/jmetal-core/src/main/java/org/uma/jmetal/util/solutionattribute/impl/GenericSolutionAttribute.java +++ b/jmetal-core/src/main/java/org/uma/jmetal/util/solutionattribute/impl/GenericSolutionAttribute.java @@ -35,7 +35,7 @@ public class GenericSolutionAttribute <S extends Solution<?>, V> implements Solu } @Override - public static Object getAttributeID() { + public Object getAttributeID() { return this.getClass() ; } }
undo changes in GenericSolutionAttribute
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -54,7 +54,7 @@ function coearseAddress (address) { //opts must have appKey module.exports = function (opts) { - var appKey = opts.appKey + var appKey = (opts && opts.caps && opts.caps.shs || opts.appKey) var defaultTimeout = ( opts.defaultTimeout || 5e3 // 5 seconds. ) @@ -109,10 +109,11 @@ module.exports = function (opts) { manifest: 'sync', }, init: function (api, opts, permissions, manifest) { + var shsCap = (opts.caps && opts.caps.shs) || opts.appKey || appKey var shs = Shs({ keys: opts.keys && toSodiumKeys(opts.keys), seed: opts.seed, - appKey: toBuffer(opts.appKey || appKey), + appKey: toBuffer(shsCap), //**************************************** timeout: timeout_handshake, @@ -187,7 +188,6 @@ module.exports = function (opts) { close: function (err, cb) { if(isFunction(err)) cb = err, err = null api.closed = true - ;(server.close || server)(function (err) { api.emit('close', err) cb && cb(err) @@ -207,3 +207,7 @@ module.exports = function (opts) { } + + + +
take cap in consistent way with other uses of caps in ssb-*
diff --git a/core/Http.php b/core/Http.php index <HASH>..<HASH> 100644 --- a/core/Http.php +++ b/core/Http.php @@ -444,6 +444,7 @@ class Http // only get header info if not saving directly to file CURLOPT_HEADER => is_resource($file) ? false : true, CURLOPT_CONNECTTIMEOUT => $timeout, + CURLOPT_TIMEOUT => $timeout, ); // Case core:archive command is triggering archiving on https:// and the certificate is not valid if ($acceptInvalidSslCertificate) {
Fix curl timeout not set completely: this could result in Piwik hang indefinitely on an HTTP request More particularly, this resulted in the installation hanging at the second step (system check) when using PHP's built-in webserver.
diff --git a/handlers/datasets.js b/handlers/datasets.js index <HASH>..<HASH> 100644 --- a/handlers/datasets.js +++ b/handlers/datasets.js @@ -21,7 +21,7 @@ export default { create(req, res) { counter.getNext('datasets', (datasetNumber) => { - let offset = 100; + let offset = 1000; datasetNumber += offset; datasetNumber = 'ds' + ('000000' + datasetNumber).substr(-6,6); req.body._id = datasetNumber; @@ -96,4 +96,4 @@ export default { }); } -}; \ No newline at end of file +};
Bump reserved range for dataset accession numbers to 0-<I>.
diff --git a/javascript/CMSMain.AddForm.js b/javascript/CMSMain.AddForm.js index <HASH>..<HASH> 100644 --- a/javascript/CMSMain.AddForm.js +++ b/javascript/CMSMain.AddForm.js @@ -42,7 +42,7 @@ var hints = this.find('.hints').data('hints'), metadata = this.find('#ParentID .TreeDropdownField').data('metadata'), id = this.find('#ParentID .TreeDropdownField').getValue(), - newClassName = metadata[0].ClassName, + newClassName = metadata.ClassName, disallowedChildren = hints[newClassName ? newClassName : 'Root'].disallowedChildren || [], defaultChildClass = hints[newClassName ? newClassName : 'Root'].defaultChild || null;
MINOR Fixed reading of javascript metadata in CMSMain.AddForm.js
diff --git a/lib/conceptql/annotate_grapher.rb b/lib/conceptql/annotate_grapher.rb index <HASH>..<HASH> 100644 --- a/lib/conceptql/annotate_grapher.rb +++ b/lib/conceptql/annotate_grapher.rb @@ -54,7 +54,7 @@ module ConceptQL edge_options = {} opts = from.last[:annotation] - types(from).tap { |t| puts [from.first, from[1], t].inspect}.each do |type| + types(from).each do |type| type_opts = opts[:counts][type] || {} #next unless (type_opts = (opts[:counts][type])).is_a?(Hash) n = type_opts[:n]
Remove some debug output from AnnotateGrapher
diff --git a/lib/oxidized/model/procurve.rb b/lib/oxidized/model/procurve.rb index <HASH>..<HASH> 100644 --- a/lib/oxidized/model/procurve.rb +++ b/lib/oxidized/model/procurve.rb @@ -22,8 +22,6 @@ class Procurve < Oxidized::Model new_cfg end - cmd 'show running-config' - cmd 'show version' do |cfg| comment cfg end @@ -32,9 +30,11 @@ class Procurve < Oxidized::Model comment cfg end + cmd 'show running-config' + cfg :telnet do - username /^\r?Username:/ - password /Password: / + username /Username:/ + password /Password:/ end cfg :telnet, :ssh do
Be more liberal about username prompt ^\r? was too strict Also move non-config above of config, rancid-style. Also Procurve is unbelievably shitty crapbox, screen drawing is shit, telnet password is maximum <I> chars, ssh password maximum <I> chars, que?
diff --git a/tests/modularinputs/test_modularinput.js b/tests/modularinputs/test_modularinput.js index <HASH>..<HASH> 100644 --- a/tests/modularinputs/test_modularinput.js +++ b/tests/modularinputs/test_modularinput.js @@ -309,6 +309,10 @@ exports.setup = function() { }, "ModularInput Input Validation fails": function(test) { + // Make logger noop so testoutput is cleaner + var loggerErrorBackup = Logger.error; + Logger.error = function(){}; + exports.getScheme = function() { return null; }; @@ -484,6 +488,7 @@ exports.setup = function() { test.equal(5, expectedChildren.length); test.equal(expectedChildren.length, foundChildren.length); + test.ok(asObject); test.ok(testUtils.XMLCompare(ET.parse(expected).getroot(), ET.parse(found).getroot())); test.strictEqual(0, scriptStatus); test.done();
Suppress modinput logging to console for tests
diff --git a/python_modules/dagster/dagster/core/definitions/solid.py b/python_modules/dagster/dagster/core/definitions/solid.py index <HASH>..<HASH> 100644 --- a/python_modules/dagster/dagster/core/definitions/solid.py +++ b/python_modules/dagster/dagster/core/definitions/solid.py @@ -531,15 +531,6 @@ class CompositeSolidDefinition(ISolidDefinition, IContainSolids): return self._config_mapping is not None @property - def has_descendant_config_mapping(self): - return any( - ( - isinstance(solid, CompositeSolidDefinition) and solid.has_config_mapping - for solid in self.iterate_solid_defs() - ) - ) - - @property def has_config_entry(self): has_child_solid_config = any([solid.definition.has_config_entry for solid in self.solids]) return (
(make-pipeline-a-solid-1) Delete has_descendant_config_mapping Summary: Dead code. Test Plan: BK Reviewers: alangenfeld Reviewed By: alangenfeld Differential Revision: <URL>
diff --git a/internal/export/distro/zeromq.go b/internal/export/distro/zeromq.go index <HASH>..<HASH> 100644 --- a/internal/export/distro/zeromq.go +++ b/internal/export/distro/zeromq.go @@ -22,17 +22,18 @@ type zeroMQEventPublisher struct { mux sync.Mutex } -func newZeroMQEventPublisher() zeroMQEventPublisher { +func newZeroMQEventPublisher() sender { newPublisher, _ := zmq.NewSocket(zmq.PUB) LoggingClient.Info("Connecting to analytics 0MQ at: " + Configuration.AnalyticsQueue.Uri()) newPublisher.Bind(Configuration.AnalyticsQueue.Uri()) LoggingClient.Info("Connected to analytics outbound 0MQ") - return zeroMQEventPublisher{ + sender := &zeroMQEventPublisher{ publisher: newPublisher, } + return sender } -func (sender zeroMQEventPublisher) Send(data []byte, event *models.Event) bool { +func (sender *zeroMQEventPublisher) Send(data []byte, event *models.Event) bool { sender.mux.Lock() defer sender.mux.Unlock() LoggingClient.Debug("Sending data to 0MQ: " + string(data[:]))
fix Send method on 0MQ publisher to be pointer
diff --git a/lib/translate_routes.rb b/lib/translate_routes.rb index <HASH>..<HASH> 100755 --- a/lib/translate_routes.rb +++ b/lib/translate_routes.rb @@ -89,7 +89,8 @@ module ActionController @@original_named_routes = Routes.named_routes.routes.dup # Hash {:name => :route} @@original_names = @@original_named_routes.keys - Routes.clear! + #don't delete original routes + #Routes.clear! new_routes = [] new_named_routes = {} @@ -106,8 +107,8 @@ module ActionController new_routes.concat(trans_routes) end - - Routes.routes = new_routes + #merge old routes with new ones + Routes.routes |= new_routes new_named_routes.each { |name, r| Routes.named_routes.add name, r } @@original_names.each{ |old_name| add_untranslated_helpers_to_controllers_and_views(old_name) }
Keep original routes as we still need them
diff --git a/bin/formats/cmd.js b/bin/formats/cmd.js index <HASH>..<HASH> 100644 --- a/bin/formats/cmd.js +++ b/bin/formats/cmd.js @@ -7,18 +7,17 @@ var child_process = require('child_process'); var fs = require('fs'); module.exports = { - exec: function(command) { + exec: function(command) { + if (fs.existsSync('done')) { + fs.unlinkSync('done'); + } // Run the command in a subshell child_process.exec(command + ' 2>&1 1>output && echo done! > done'); // Block the event loop until the command has executed. while (!fs.existsSync('done')) { // Do nothing } - // Read the output - var output = fs.readFileSync('output'); - // Delete temporary files. - fs.unlinkSync('output'); - fs.unlinkSync('done'); - return { stdout: output }; + // Output + return { stdout: fs.readFileSync('output') }; } }; \ No newline at end of file
fix : avoid errors when test crash (with temp files)
diff --git a/server.go b/server.go index <HASH>..<HASH> 100644 --- a/server.go +++ b/server.go @@ -137,6 +137,9 @@ func (u *Upgrader) Upgrade(w http.ResponseWriter, r *http.Request, responseHeade } var rw *bufio.ReadWriter netConn, rw, err = h.Hijack() + if err != nil { + return u.returnError(w, r, http.StatusInternalServerError, err.Error()) + } br = rw.Reader if br.Buffered() > 0 {
Check and handle error return from hijack.
diff --git a/storage/smiles/src/test/java/org/openscience/cdk/smiles/SmilesParserTest.java b/storage/smiles/src/test/java/org/openscience/cdk/smiles/SmilesParserTest.java index <HASH>..<HASH> 100644 --- a/storage/smiles/src/test/java/org/openscience/cdk/smiles/SmilesParserTest.java +++ b/storage/smiles/src/test/java/org/openscience/cdk/smiles/SmilesParserTest.java @@ -2575,4 +2575,10 @@ public class SmilesParserTest extends CDKTestCase { return parser.parseSmiles(smi); } + public void testNoTitle() throws InvalidSmilesException { + SmilesParser parser = new SmilesParser(SilentChemObjectBuilder.getInstance()); + IAtomContainer mol = parser.parseSmiles("CCC"); + Assert.assertNull(mol.getProperty("cdk:Title")); + } + }
Unit test to make sure parsing a SMILES does not set a null cdk:Title (as <I> does, as found in Bioclipse)
diff --git a/lib/gelf/logger.rb b/lib/gelf/logger.rb index <HASH>..<HASH> 100644 --- a/lib/gelf/logger.rb +++ b/lib/gelf/logger.rb @@ -1,6 +1,9 @@ module GELF # Methods for compatibility with Ruby Logger. module LoggerCompatibility + + attr_accessor :formatter + # Does nothing. def close end
Added formatter field to Logger
diff --git a/lib/mail_chimp.rb b/lib/mail_chimp.rb index <HASH>..<HASH> 100644 --- a/lib/mail_chimp.rb +++ b/lib/mail_chimp.rb @@ -6,15 +6,15 @@ module MailChimp after_filter :create_in_mailchimp, :only => [:create] after_filter :update_in_mailchimp, :only => [:update] destroy.after :remove_from_mailchimp # can use r_c? - end - end - def self.hominid - @hominid ||= Hominid::Base.new({:api_key => Spree::Config.get(:mailchimp_api_key)}) - end + def hominid + @hominid ||= Hominid::Base.new({:api_key => Spree::Config.get(:mailchimp_api_key)}) + end - def self.mc_list_id - Spree::Config.get(:mailchimp_list_id) + def mc_list_id + Spree::Config.get(:mailchimp_list_id) + end + end end def create_in_mailchimp
dont use Class methods in a controller action, it doesnt work
diff --git a/nbp/src/main/java/jlibs/nbp/Feeder.java b/nbp/src/main/java/jlibs/nbp/Feeder.java index <HASH>..<HASH> 100644 --- a/nbp/src/main/java/jlibs/nbp/Feeder.java +++ b/nbp/src/main/java/jlibs/nbp/Feeder.java @@ -17,6 +17,7 @@ package jlibs.nbp; import java.io.IOException; import java.nio.CharBuffer; +import java.nio.channels.ReadableByteChannel; import java.nio.charset.CharacterCodingException; /** @@ -49,6 +50,10 @@ public class Feeder{ charBuffer.clear(); } + public final ReadableByteChannel byteChannel(){ + return channel instanceof NBChannel ? ((NBChannel)channel).getChannel() : null; + } + protected Feeder child; private Feeder parent; public final void setChild(Feeder child){
byteChannel() method added
diff --git a/setuptools/extension.py b/setuptools/extension.py index <HASH>..<HASH> 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -1,4 +1,6 @@ import sys +import re +import functools import distutils.core import distutils.extension @@ -37,13 +39,10 @@ class Extension(_Extension): if have_pyrex(): # the build has Cython, so allow it to compile the .pyx files return - def pyx_to_target(source): - lang = self.language or '' - target_ext = '.cpp' if lang.lower() == 'c++' else '.c' - if source.endswith('.pyx'): - source = source[:-4] + target_ext - return source - self.sources = list(map(pyx_to_target, self.sources)) + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) class Library(Extension): """Just like a regular Extension, but built as a library instead"""
Use functools.partial and re.sub to construct the substitution function.
diff --git a/padatious/__init__.py b/padatious/__init__.py index <HASH>..<HASH> 100644 --- a/padatious/__init__.py +++ b/padatious/__init__.py @@ -15,4 +15,4 @@ from .intent_container import IntentContainer from .match_data import MatchData -__version__ = '0.3.7' # Also change in setup.py +__version__ = '0.3.8' # Also change in setup.py diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ with open(join(dirname(abspath(__file__)), 'requirements.txt')) as f: setup( name='padatious', - version='0.3.7', # Also change in padatious/__init__.py + version='0.3.8', # Also change in padatious/__init__.py description='A neural network intent parser', url='http://github.com/MycroftAI/padatious', author='Matthew Scholefield',
Increment version to <I>
diff --git a/frontend/controllers/ProductController.php b/frontend/controllers/ProductController.php index <HASH>..<HASH> 100755 --- a/frontend/controllers/ProductController.php +++ b/frontend/controllers/ProductController.php @@ -102,7 +102,8 @@ class ProductController extends Controller */ private function setSeoData($model) { - $this->view->title = $model->translation->seoTitle ?? $model->translation->title ?? ''; + $this->view->title = !empty(($model->translation->seoTitle)) ? + strip_tags($model->translation->seoTitle) : strip_tags($model->translation->title); $this->view->registerMetaTag([ 'name' => 'description', 'content' => strip_tags($model->translation->seoDescription) ?? ''
Changes getting seo title in ProductController.
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -161,7 +161,7 @@ html_theme = 'alabaster' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied
docs: Get rid of warning about missing _static/ dir
diff --git a/pyqt_distutils/build_ui.py b/pyqt_distutils/build_ui.py index <HASH>..<HASH> 100644 --- a/pyqt_distutils/build_ui.py +++ b/pyqt_distutils/build_ui.py @@ -92,10 +92,10 @@ class build_ui(Command): try: subprocess.check_output(cmd.split(' ')) except subprocess.CalledProcessError as e: - write_message(cmd, 'red') + write_message(cmd, 'yellow') write_message(e.output, 'red') except OSError as e: - write_message(cmd, 'red') + write_message(cmd, 'yellow') write_message(str(e), 'red') else: write_message(cmd, 'green')
Tweak colors Print failing command in yellow and error message in red
diff --git a/pyspectral/rayleigh.py b/pyspectral/rayleigh.py index <HASH>..<HASH> 100644 --- a/pyspectral/rayleigh.py +++ b/pyspectral/rayleigh.py @@ -174,6 +174,8 @@ class Rayleigh(object): clip_angle = np.rad2deg(np.arccos(1. / 25)) sun_zenith = np.clip(np.asarray(sun_zenith), 0, clip_angle) sunzsec = 1. / np.cos(np.deg2rad(sun_zenith)) + clip_angle = np.rad2deg(np.arccos(1. / 3.)) + sat_zenith = np.clip(np.asarray(sat_zenith), 0, clip_angle) satzsec = 1. / np.cos(np.deg2rad(np.asarray(sat_zenith))) shape = sun_zenith.shape
Clip satellite-zenith angles outside range
diff --git a/lib/coverband/collectors/coverage.rb b/lib/coverband/collectors/coverage.rb index <HASH>..<HASH> 100644 --- a/lib/coverband/collectors/coverage.rb +++ b/lib/coverband/collectors/coverage.rb @@ -47,7 +47,11 @@ module Coverband @logger.info 'coverage report: ' @logger.info @file_line_usage.inspect end - rescue RuntimeError => err + # StandardError might be better option + # coverband previously had RuntimeError here + # but runtime error can let a large number of error crash this method + # and this method is currently in a ensure block in middleware + rescue StandardError => err failed! if @verbose @logger.info 'coverage missing' @@ -83,7 +87,11 @@ module Coverband if previous_results new_results = {} current_coverage.each_pair do |file, line_counts| - new_results[file] = array_diff(line_counts, previous_results[file]) + if previous_results[file] + new_results[file] = array_diff(line_counts, previous_results[file]) + else + new_results[file] = line_counts + end end else new_results = current_coverage @@ -138,7 +146,7 @@ module Coverband ::Coverage.start unless ::Coverage.running? else ::Coverage.start - end + end @semaphore = Mutex.new @@previous_results = nil reset_instance
skip calculations when previous results are missing, capture better error
diff --git a/ksamsok/ksamsok.py b/ksamsok/ksamsok.py index <HASH>..<HASH> 100644 --- a/ksamsok/ksamsok.py +++ b/ksamsok/ksamsok.py @@ -135,7 +135,7 @@ class KSamsok: uri = re.sub('rdf/', '', uri) uri = re.sub('html/', '', uri) uri = re.sub('jsonld/', '', uri) - + uri = re.sub('museumdat/', '', uri) # get position of the last / try:
support museumdat as input URI
diff --git a/types/plugin.go b/types/plugin.go index <HASH>..<HASH> 100644 --- a/types/plugin.go +++ b/types/plugin.go @@ -26,10 +26,11 @@ type PluginConfig struct { // Plugin represents a Docker plugin for the remote API type Plugin struct { - ID string `json:"Id,omitempty"` - Name string - Tag string - Active bool + ID string `json:"Id,omitempty"` + Name string + Tag string + // Enabled is true when the plugin is running, is false when the plugin is not running, only installed. + Enabled bool Config PluginConfig Manifest PluginManifest }
replace .Active by .Enabled
diff --git a/ehforwarderbot/__version__.py b/ehforwarderbot/__version__.py index <HASH>..<HASH> 100644 --- a/ehforwarderbot/__version__.py +++ b/ehforwarderbot/__version__.py @@ -1,3 +1,3 @@ # coding=utf-8 -__version__ = "2.1.0.dev3" +__version__ = "2.1.0"
bump: bumping version: <I>.dev3 -> <I>
diff --git a/test/phpunitBootstrap.php b/test/phpunitBootstrap.php index <HASH>..<HASH> 100644 --- a/test/phpunitBootstrap.php +++ b/test/phpunitBootstrap.php @@ -1,11 +1,9 @@ <?php - -define('ARTAX_SYSDIR', dirname(__DIR__)); spl_autoload_register(function($cls) { if (0 === strpos($cls, 'Artax\\')) { $cls = str_replace('\\', '/', $cls); - require ARTAX_SYSDIR . "/src/$cls.php"; + require dirname(__DIR__) . "/src/$cls.php"; } -}); +});
removed constant from testing bootstrap for better multi-process testing
diff --git a/libandroid-navigation-ui/src/main/java/com/mapbox/services/android/navigation/ui/v5/NavigationView.java b/libandroid-navigation-ui/src/main/java/com/mapbox/services/android/navigation/ui/v5/NavigationView.java index <HASH>..<HASH> 100644 --- a/libandroid-navigation-ui/src/main/java/com/mapbox/services/android/navigation/ui/v5/NavigationView.java +++ b/libandroid-navigation-ui/src/main/java/com/mapbox/services/android/navigation/ui/v5/NavigationView.java @@ -240,7 +240,9 @@ public class NavigationView extends CoordinatorLayout implements LifecycleObserv @Override public void setCameraTrackingEnabled(boolean isEnabled) { - camera.setCameraTrackingLocation(isEnabled); + if (camera != null) { + camera.setCameraTrackingLocation(isEnabled); + } } @Override
Null check camera tracking (#<I>)
diff --git a/sark/code/instruction.py b/sark/code/instruction.py index <HASH>..<HASH> 100644 --- a/sark/code/instruction.py +++ b/sark/code/instruction.py @@ -100,4 +100,4 @@ class Instruction(object): @property def regs(self): - return [operand.reg for operand in self.operands] \ No newline at end of file + return set(operand.reg for operand in self.operands) \ No newline at end of file
Removed repetitions in `Instruction.regs`.
diff --git a/windpowerlib/wind_farm.py b/windpowerlib/wind_farm.py index <HASH>..<HASH> 100644 --- a/windpowerlib/wind_farm.py +++ b/windpowerlib/wind_farm.py @@ -21,7 +21,7 @@ class WindFarm(object): Parameters ---------- - wind_turbine_fleet : :pandas:`pandas.DataFrame<frame>` or list(dict) + wind_turbine_fleet : :pandas:`pandas.DataFrame<frame>` or list() Wind turbines of wind farm. DataFrame/Dictionaries must have 'wind_turbine' containing a :class:`~.wind_turbine.WindTurbine` object and either 'number_of_turbines' (number of wind turbines of the same @@ -38,11 +38,11 @@ class WindFarm(object): Attributes ---------- - wind_turbine_fleet : list(dict) - Wind turbines of wind farm. Dictionaries must have 'wind_turbine' + wind_turbine_fleet : :pandas:`pandas.DataFrame<frame>` + Wind turbines of wind farm. DataFrame must have 'wind_turbine' (contains a :class:`~.wind_turbine.WindTurbine` object) and 'number_of_turbines' (number of wind turbines of the same turbine type - in the wind farm) as keys. + in the wind farm) as columns. efficiency : float or :pandas:`pandas.DataFrame<frame>` or None Efficiency of the wind farm. Either constant (float) power efficiency curve (pd.DataFrame) containing 'wind_speed' and 'efficiency'
Adapt docstring of WindFarm
diff --git a/tests/test_project.py b/tests/test_project.py index <HASH>..<HASH> 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -136,6 +136,7 @@ class TestProject(unittest.TestCase): """ uses dummy_function as func to execute tasks """ + proj13 = project.Project(name='Maths Calc', fldr=root_folder, desc='run simple maths functions') t1 = project.Task(1, 'task_1', dummy_function) t1.add_param(param_key='a_number', param_val=3) @@ -148,8 +149,11 @@ class TestProject(unittest.TestCase): proj13.add_task(t1) proj13.add_task(t2) proj13.add_detail('info', 'This project runs a seris of calculations') - proj13.execute_tasks() + dummy_projects = project.Projects() + dummy_projects.add_project(proj13) + #proj13.execute_tasks() + dummy_projects.run() if __name__ == '__main__': unittest.main() \ No newline at end of file
using projects.run instead of project.execute to test coverage
diff --git a/compat/__init__.py b/compat/__init__.py index <HASH>..<HASH> 100644 --- a/compat/__init__.py +++ b/compat/__init__.py @@ -15,7 +15,11 @@ try: except ImportError: import six - +# get_indent +if six.PY3: + from threading import get_ident +else: + from thread import get_ident # noqa try: from django.conf.urls import url, patterns, include, handler404, handler500 @@ -255,6 +259,7 @@ __all__ = [ 'get_model_name', 'get_user_model', 'get_username_field', + 'get_indent', 'import_string', 'user_model_label', 'url',
Added get_indent compatibility.