hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
219787a39a8ea98bc69e2c664b26df54a10fd30d
|
diff --git a/core/src/main/java/jlibs/core/lang/StringUtil.java b/core/src/main/java/jlibs/core/lang/StringUtil.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/jlibs/core/lang/StringUtil.java
+++ b/core/src/main/java/jlibs/core/lang/StringUtil.java
@@ -106,6 +106,25 @@ public class StringUtil{
}
}
+ /**
+ * Converts first character in <code>str</code> to uppercase.
+ * <p>
+ * This method can be called on string of any length.
+ *
+ * @param str string to be converted
+ * @return string with first letter changed to uppercase
+ */
+ public static String capitalize(String str){
+ switch(str.length()){
+ case 0:
+ return str;
+ case 1:
+ return str.toUpperCase();
+ default:
+ return Character.toUpperCase(str.charAt(0))+str.substring(1);
+ }
+ }
+
/*-------------------------------------------------[ Array Join ]---------------------------------------------------*/
public static <T> String join(T[] array){
|
BeanUtil.firstLetterToUpperCase() is moved here as capitalize()
|
santhosh-tekuri_jlibs
|
train
|
c515a8147d715565721c0726653bf5e5deac3b8b
|
diff --git a/src/deployer.js b/src/deployer.js
index <HASH>..<HASH> 100644
--- a/src/deployer.js
+++ b/src/deployer.js
@@ -111,7 +111,7 @@ class Deployer {
let filename = this.config.fileList.pop()
let fileStream = fs.readFileSync(filename)
let fileKey = filename.replace(this.config.fullAssetPath, '').replace(/\\/g, '/')
- let fullFileKey = `${this.config.deployPath}${fileKey}`
+ let fullFileKey = (`${this.config.deployPath}${fileKey}`).replace(/\/\//g, '/');
let pwaSupportForFile = this.config.options.pwa && this.config.options.pwaFiles.split(',').indexOf(fileKey) > -1
let gzip = this.config.options.gzip && globby.sync(this.config.options.gzipFilePattern, { cwd: this.config.fullAssetPath })
|
#<I>: Strip extra forward slashes from full file path.
|
multiplegeorges_vue-cli-plugin-s3-deploy
|
train
|
f7c8ebbdf1fe3438d954a571b0a977044ae65bb6
|
diff --git a/bokeh/charts/_attributes.py b/bokeh/charts/_attributes.py
index <HASH>..<HASH> 100644
--- a/bokeh/charts/_attributes.py
+++ b/bokeh/charts/_attributes.py
@@ -8,7 +8,7 @@ from bokeh.models.sources import ColumnDataSource
from bokeh.charts import DEFAULT_PALETTE
from bokeh.charts._properties import ColumnLabel
from bokeh.charts.utils import marker_types
-from ..util.serialization import make_id
+
class AttrSpec(HasProps):
"""A container for assigning attributes to values and retrieving them as needed.
@@ -135,7 +135,7 @@ class ColorAttr(AttrSpec):
class MarkerAttr(AttrSpec):
name = 'marker'
- iterable = List(String, default=marker_types.keys())
+ iterable = List(String, default=list(marker_types.keys()))
def __init__(self, **kwargs):
iterable = kwargs.pop('markers', None)
diff --git a/bokeh/charts/_chart.py b/bokeh/charts/_chart.py
index <HASH>..<HASH> 100644
--- a/bokeh/charts/_chart.py
+++ b/bokeh/charts/_chart.py
@@ -20,6 +20,7 @@ the generation of several outputs (file, server, notebook).
from __future__ import absolute_import
+from six import iteritems
import numpy as np
from collections import defaultdict
@@ -87,7 +88,7 @@ class Chart(Plot):
# sets overridden defaults
# ToDo: allow Chart/Plot properties as well as ChartOptions
- for option, value in option_props.iteritems():
+ for option, value in iteritems(option_props):
if value != default_props[option]:
setattr(self._options, option, value)
diff --git a/bokeh/charts/_data_source.py b/bokeh/charts/_data_source.py
index <HASH>..<HASH> 100644
--- a/bokeh/charts/_data_source.py
+++ b/bokeh/charts/_data_source.py
@@ -15,6 +15,7 @@ methods.
from __future__ import absolute_import
+from six import iteritems
from six.moves import zip
from operator import itemgetter
from itertools import islice, product
@@ -90,7 +91,7 @@ def groupby(df, **specs):
for name, data in df.groupby(spec_cols):
attrs = {}
- for spec_name, spec in specs.iteritems():
+ for spec_name, spec in iteritems(specs):
if spec.columns is not None:
# get index of the unique column values grouped on for this spec
name_idx = tuple([spec_cols.index(col) for col in spec.columns])
@@ -114,7 +115,7 @@ def groupby(df, **specs):
# collect up the defaults from the attribute specs
else:
attrs = {}
- for spec_name, spec in specs.iteritems():
+ for spec_name, spec in iteritems(specs):
attrs[spec_name] = spec[None]
yield DataGroup(label='all', data=df, attr_specs=attrs)
@@ -302,7 +303,7 @@ class ChartDataSource(object):
required_dims = self._required_dims
selections = self._selections
- dims = [dim for dim, sel in selections.iteritems() if sel is not None]
+ dims = [dim for dim, sel in iteritems(selections) if sel is not None]
# look for a match for selections to dimensional requirements
if len(required_dims) > 0:
@@ -323,7 +324,7 @@ class ChartDataSource(object):
'\n\nAvailable columns are: %s'
req_str = [' and '.join(['%s = <Any Column>' % dim for dim in required_dim])
for required_dim in required_dims]
- selection_str = ['%s = %s' % (str(dim), str(sel)) for dim, sel in selections.iteritems() if sel is not None]
+ selection_str = ['%s = %s' % (str(dim), str(sel)) for dim, sel in iteritems(selections) if sel is not None]
raise ValueError(error_str % (' or '.join(req_str), ', '.join(selection_str), ', '.join(self.columns)))
else:
diff --git a/bokeh/charts/glyphs.py b/bokeh/charts/glyphs.py
index <HASH>..<HASH> 100644
--- a/bokeh/charts/glyphs.py
+++ b/bokeh/charts/glyphs.py
@@ -1,5 +1,6 @@
from __future__ import absolute_import
+from six import iteritems
from collections import defaultdict
import numpy as np
@@ -119,7 +120,7 @@ class AggregateGlyph(NestedCompositeGlyph):
filtered_glyphs = self.filter_glyphs(glyphs)
grouped = self.groupby(filtered_glyphs, 'label')
- for index, group in grouped.iteritems():
+ for index, group in iteritems(grouped):
group = sorted(group, key=lambda x: x.stack_label)
shift = []
for i, glyph in enumerate(group):
@@ -139,7 +140,7 @@ class AggregateGlyph(NestedCompositeGlyph):
width = min(0.2, (1. / len(grouped.keys())) ** 1.1)
# set bar attributes and re-aggregate
- for i, (index, group) in enumerate(grouped.iteritems()):
+ for i, (index, group) in enumerate(iteritems(grouped)):
for glyph in group:
glyph.dodge_shift = step[i + 1]
glyph.width = width
|
Fix python 3 support items Bryan provided.
|
bokeh_bokeh
|
train
|
b308a2bee857826a7f57a13dd7e815af9749073f
|
diff --git a/jenkins/plugin/src/test/java/com/hp/octane/plugins/jenkins/events/EventsTest.java b/jenkins/plugin/src/test/java/com/hp/octane/plugins/jenkins/events/EventsTest.java
index <HASH>..<HASH> 100644
--- a/jenkins/plugin/src/test/java/com/hp/octane/plugins/jenkins/events/EventsTest.java
+++ b/jenkins/plugin/src/test/java/com/hp/octane/plugins/jenkins/events/EventsTest.java
@@ -126,7 +126,6 @@ public class EventsTest {
}
@Test
- //@Ignore
public void testEventsA() throws Exception {
FreeStyleProject p = rule.createFreeStyleProject(projectName);
|
tech: fixing events test and unignoring it
|
hpsa_hpe-application-automation-tools-plugin
|
train
|
53fc4b513eace912fa0b668dc264ac9a51292f5d
|
diff --git a/sinatra-contrib/lib/sinatra/content_for.rb b/sinatra-contrib/lib/sinatra/content_for.rb
index <HASH>..<HASH> 100644
--- a/sinatra-contrib/lib/sinatra/content_for.rb
+++ b/sinatra-contrib/lib/sinatra/content_for.rb
@@ -106,7 +106,7 @@ module Sinatra
#
# <% clear_content_for :head %>
def clear_content_for(key)
- content_blocks.delete(key.to_sym) if content_blocks[key.to_sym].any?
+ content_blocks.delete(key.to_sym) if content_for?(key)
end
# Render the captured blocks for a given key. For example:
|
Refactor to use content_for?()
|
sinatra_sinatra
|
train
|
50a9ebb7d60d372e5e23db053e070e300f06219f
|
diff --git a/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/AbstractLinterMojo.java b/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/AbstractLinterMojo.java
index <HASH>..<HASH> 100644
--- a/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/AbstractLinterMojo.java
+++ b/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/AbstractLinterMojo.java
@@ -60,7 +60,7 @@ public abstract class AbstractLinterMojo<T>
* Contains errors found during jslint processing which will be reported eventually.
*/
private LintReport<T> lintReport;
- private final ProgressIndicator progressIndicator = new ProgressIndicator(getLog());
+ private ProgressIndicator progressIndicator;
/**
* Add a single report to the registry of found errors.
@@ -77,6 +77,7 @@ public abstract class AbstractLinterMojo<T>
*/
@Override
protected void onBeforeExecute() {
+ progressIndicator = new ProgressIndicator(getLog());
getLog().info("failNever: " + failNever);
progressIndicator.reset();
diff --git a/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/support/ProgressIndicator.java b/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/support/ProgressIndicator.java
index <HASH>..<HASH> 100644
--- a/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/support/ProgressIndicator.java
+++ b/wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/support/ProgressIndicator.java
@@ -6,6 +6,7 @@ import org.apache.maven.plugin.logging.Log;
import ro.isdc.wro.model.resource.Resource;
+
/**
* Responsible for logging progress related details. Useful to find the best balance between "to much" vs "none"
* details. This implementation will indicate the state of the progress after a given period of time (ex: each 5
@@ -72,11 +73,9 @@ public class ProgressIndicator {
*/
public void onProcessingResource(final Resource resource) {
totalResources++;
- if (resource != null) {
- log.debug("processing resource: " + resource.getUri());
- if (isLogRequired()) {
- log.info("Resources processed: " + getTotalResources() + ". Last processed: " + resource.getUri());
- }
+ log.debug("processing resource: " + resource.getUri());
+ if (isLogRequired()) {
+ log.info("Resources processed: " + getTotalResources() + ". Last processed: " + resource.getUri());
}
updateLastInvocation();
}
@@ -87,6 +86,7 @@ public class ProgressIndicator {
/**
* This method has a side effect of incrementing the number of resources containing errors.
+ *
* @param errorsToAdd
* number of errors found during processing. This number will be added to the counter holding total number of
* found errors.
diff --git a/wro4j-maven-plugin/src/test/java/ro/isdc/wro/maven/plugin/AbstractTestLinterMojo.java b/wro4j-maven-plugin/src/test/java/ro/isdc/wro/maven/plugin/AbstractTestLinterMojo.java
index <HASH>..<HASH> 100644
--- a/wro4j-maven-plugin/src/test/java/ro/isdc/wro/maven/plugin/AbstractTestLinterMojo.java
+++ b/wro4j-maven-plugin/src/test/java/ro/isdc/wro/maven/plugin/AbstractTestLinterMojo.java
@@ -8,6 +8,7 @@ import java.net.URISyntaxException;
import java.net.URL;
import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugin.testing.SilentLog;
import org.apache.maven.project.MavenProject;
import org.junit.Before;
import org.junit.Test;
@@ -33,6 +34,7 @@ public abstract class AbstractTestLinterMojo {
public void setUp()
throws Exception {
mojo = newLinterMojo();
+ mojo.setLog(new SilentLog());
mojo.setIgnoreMissingResources(false);
setWroWithValidResources();
mojo.setTargetGroups("g1");
|
suppress warnings in mojo while running tests
|
wro4j_wro4j
|
train
|
bd2c430199887f59b3bf9c339f1296525337565d
|
diff --git a/lib/passenger/application_spawner.rb b/lib/passenger/application_spawner.rb
index <HASH>..<HASH> 100644
--- a/lib/passenger/application_spawner.rb
+++ b/lib/passenger/application_spawner.rb
@@ -137,6 +137,69 @@ class ApplicationSpawner < AbstractServer
raise Error, "The application spawner server exited unexpectedly"
end
+ # Spawn an instance of the RoR application. When successful, an Application object
+ # will be returned, which represents the spawned RoR application.
+ #
+ # Unlike spawn_application, this method may be called even when the ApplicationSpawner
+ # server isn't started.
+ #
+ # Raises:
+ # - SystemCallError: Something went wrong.
+ # - IOError: Something went wrong.
+ def spawn_application!
+ # Double fork to prevent zombie processes.
+ a, b = UNIXSocket.pair
+ pid = fork do
+ begin
+ pid = fork do
+ begin
+ $0 = "Rails: #{@app_root}"
+ a.close
+ channel = MessageChannel.new(b)
+ reader, writer = IO.pipe
+ begin
+ handler = RequestHandler.new(reader)
+ channel.write(Process.pid, handler.socket_name,
+ handler.using_abstract_namespace?)
+ channel.send_io(writer)
+ writer.close
+ channel.close
+ handler.main_loop
+ ensure
+ channel.close rescue nil
+ writer.close rescue nil
+ handler.cleanup rescue nil
+ end
+ rescue SignalException => signal
+ if e.message != RequestHandler::HARD_TERMINATION_SIGNAL &&
+ e.message != RequestHandler::SOFT_TERMINATION_SIGNAL
+ print_exception('application', e)
+ end
+ rescue Exception => e
+ print_exception('application', e)
+ ensure
+ exit!
+ end
+ end
+ rescue Exception => e
+ print_exception(self.class.to_s, e)
+ ensure
+ exit!
+ end
+ end
+ b.close
+ Process.waitpid(pid)
+
+ channel = MessageChannel.new(a)
+ pid, socket_name, using_abstract_namespace = channel.read
+ if pid.nil?
+ raise IOError, "Connection closed"
+ end
+ owner_pipe = server.recv_io
+ return Application.new(@app_root, pid, socket_name,
+ using_abstract_namespace == "true", owner_pipe)
+ end
+
# Overrided from AbstractServer#start.
#
# May raise these additional exceptions:
|
Begin implementing conservative spawning support.
|
phusion_passenger
|
train
|
7a3db450a786bc2030d6395ff50b72bd7bc30561
|
diff --git a/scripts/make-release.py b/scripts/make-release.py
index <HASH>..<HASH> 100644
--- a/scripts/make-release.py
+++ b/scripts/make-release.py
@@ -16,6 +16,8 @@ import re
from datetime import datetime, date
from subprocess import Popen, PIPE
+_date_clean_re = re.compile(r'(\d+)(st|nd|rd|th)')
+
def parse_changelog():
with open('CHANGES') as f:
@@ -52,8 +54,7 @@ def bump_version(version):
def parse_date(string):
- string = string.replace('th ', ' ').replace('nd ', ' ') \
- .replace('rd ', ' ').replace('st ', ' ')
+ string = _date_clean_re.sub(r'\1', string)
return datetime.strptime(string, '%B %d %Y')
|
Fixed release script. Backported changes from the flask release script
|
pallets_werkzeug
|
train
|
43f5a1c0252172dedf9dc7c0bf45f540282b02c7
|
diff --git a/lib/Doctrine/MongoDB/Collection.php b/lib/Doctrine/MongoDB/Collection.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/MongoDB/Collection.php
+++ b/lib/Doctrine/MongoDB/Collection.php
@@ -20,7 +20,12 @@
namespace Doctrine\MongoDB;
use Doctrine\Common\EventManager,
- Doctrine\MongoDB\Event\EventArgs;
+ Doctrine\MongoDB\Event\EventArgs,
+ Doctrine\MongoDB\Event\DistinctEventArgs,
+ Doctrine\MongoDB\Event\GroupEventArgs,
+ Doctrine\MongoDB\Event\NearEventArgs,
+ Doctrine\MongoDB\Event\MapReduceEventArgs,
+ Doctrine\MongoDB\Event\UpdateEventArgs;
/**
* Wrapper for the PHP MongoCollection class.
@@ -145,7 +150,7 @@ class Collection
public function update($query, array $newObj, array $options = array())
{
if ($this->eventManager->hasListeners(Events::preUpdate)) {
- $this->eventManager->dispatchEvent(Events::preUpdate, new CollectionUpdateEventArgs($this, $query, $newObj));
+ $this->eventManager->dispatchEvent(Events::preUpdate, new UpdateEventArgs($this, $query, $newObj));
}
$result = $this->doUpdate($query, $newObj, $options);
@@ -251,7 +256,7 @@ class Collection
public function findAndUpdate(array $query, array $newObj, array $options = array())
{
if ($this->eventManager->hasListeners(Events::preFindAndUpdate)) {
- $this->eventManager->dispatchEvent(Events::preFindAndUpdate, new CollectionUpdateEventArgs($this, $query, $query));
+ $this->eventManager->dispatchEvent(Events::preFindAndUpdate, new UpdateEventArgs($this, $query, $query));
}
$document = $this->doFindAndUpdate($query, $newObj, $options);
@@ -277,7 +282,7 @@ class Collection
public function near(array $near, array $query = array(), array $options = array())
{
if ($this->eventManager->hasListeners(Events::preNear)) {
- $this->eventManager->dispatchEvent(Events::preNear, new CollectionNearEventArgs($this, $near, $query));
+ $this->eventManager->dispatchEvent(Events::preNear, new NearEventArgs($this, $near, $query));
}
$result = $this->doNear($near, $query, $options);
@@ -303,7 +308,7 @@ class Collection
public function distinct($field, array $query = array(), array $options = array())
{
if ($this->eventManager->hasListeners(Events::preDistinct)) {
- $this->eventManager->dispatchEvent(Events::preDistinct, new CollectionDistinctEventArgs($this, $field, $query));
+ $this->eventManager->dispatchEvent(Events::preDistinct, new DistinctEventArgs($this, $field, $query));
}
$result = $this->doDistinct($field, $query, $options);
@@ -326,16 +331,16 @@ class Collection
return new ArrayIterator(isset($result['values']) ? $result['values'] : array());
}
- public function mapReduce($map, $reduce, array $query = array(), array $options = array())
+ public function mapReduce($map, $reduce, array $query = array(), array $out = array('inline' => 1), array $options = array())
{
- if ($this->eventManager->hasListeners(Events::preDistinct)) {
- $this->eventManager->dispatchEvent(Events::preDistinct, new CollectionMapReduceEventArgs($this, $map, $reduce, $query));
+ if ($this->eventManager->hasListeners(Events::preMapReduce)) {
+ $this->eventManager->dispatchEvent(Events::preMapReduce, new MapReduceEventArgs($this, $map, $reduce, $query, $out));
}
- $result = $this->doMapReduce($map, $reduce, $query, $options);
+ $result = $this->doMapReduce($map, $reduce, $query, $out, $options);
- if ($this->eventManager->hasListeners(Events::postDistinct)) {
- $this->eventManager->dispatchEvent(Events::postDistinct, new EventArgs($this, $result));
+ if ($this->eventManager->hasListeners(Events::postMapReduce)) {
+ $this->eventManager->dispatchEvent(Events::postMapReduce, new EventArgs($this, $result));
}
return $result;
@@ -462,7 +467,7 @@ class Collection
public function group($keys, array $initial, $reduce, array $options = array())
{
if ($this->eventManager->hasListeners(Events::preGroup)) {
- $this->eventManager->dispatchEvent(Events::preGroup, new CollectionGroupEventArgs($this, $keys, $initial, $reduce));
+ $this->eventManager->dispatchEvent(Events::preGroup, new GroupEventArgs($this, $keys, $initial, $reduce));
}
$result = $this->doGroup($keys, $initial, $reduce, $options);
diff --git a/lib/Doctrine/MongoDB/Events.php b/lib/Doctrine/MongoDB/Events.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/MongoDB/Events.php
+++ b/lib/Doctrine/MongoDB/Events.php
@@ -74,6 +74,9 @@ final class Events
const preDistinct = 'collectionPreDistinct';
const postDistinct = 'collectionPostDistinct';
+ const preMapReduce = 'preMapReduce';
+ const postMapReduce = 'postMapReduce';
+
const preNear = 'collectionPreNear';
const postNear = 'collectionPostNear';
|
fix event names and event arg class references
|
doctrine_mongodb
|
train
|
195c679a7c1ec179986cd9a227a34b564d463ad5
|
diff --git a/lib/shelly/app.rb b/lib/shelly/app.rb
index <HASH>..<HASH> 100644
--- a/lib/shelly/app.rb
+++ b/lib/shelly/app.rb
@@ -194,10 +194,6 @@ module Shelly
attributes["state"]
end
- def trial?
- !!attributes["trial"]
- end
-
def credit
attributes["credit"]
end
diff --git a/lib/shelly/cli/main.rb b/lib/shelly/cli/main.rb
index <HASH>..<HASH> 100644
--- a/lib/shelly/cli/main.rb
+++ b/lib/shelly/cli/main.rb
@@ -109,7 +109,7 @@ module Shelly
say "Creating Cloudfile", :green
app.create_cloudfile
- if app.trial?
+ if app.credit > 0
say_new_line
say "Billing information", :green
say "Cloud created with #{app.credit} Euro credit."
diff --git a/spec/shelly/app_spec.rb b/spec/shelly/app_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/shelly/app_spec.rb
+++ b/spec/shelly/app_spec.rb
@@ -133,7 +133,6 @@ describe Shelly::App do
before do
@response = {"web_server_ip" => "192.0.2.1",
"state" => "running",
- "trial" => true,
"credit" => 23.0,
"git_info" => {
"deployed_commit_message" => "Commit message",
@@ -143,17 +142,6 @@ describe Shelly::App do
@client.stub(:app).and_return(@response)
end
- describe "#trial?" do
- it "should return true if app is trial" do
- @app.should be_trial
- end
-
- it "should return false if app is not trial" do
- @client.stub(:app).and_return("trial" => false)
- @app.should_not be_trial
- end
- end
-
describe "#credit" do
it "should return freecredit that app has" do
@app.credit.should == 23.0
diff --git a/spec/shelly/cli/main_spec.rb b/spec/shelly/cli/main_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/shelly/cli/main_spec.rb
+++ b/spec/shelly/cli/main_spec.rb
@@ -291,7 +291,7 @@ describe Shelly::CLI::Main do
Shelly::App.stub(:inside_git_repository?).and_return(true)
Shelly::App.stub(:new).and_return(@app)
@client.stub(:token).and_return("abc")
- @app.stub(:attributes).and_return({"trial" => false})
+ @app.stub(:attributes).and_return({"credit" => 0})
@app.stub(:git_remote_exist?).and_return(false)
@main.stub(:check => true)
@main.stub(:ask_for_organization)
@@ -409,8 +409,8 @@ More info at http://git-scm.com/book/en/Git-Basics-Getting-a-Git-Repository\e[0m
end
end
- it "should create the app on shelly cloud and show trial information" do
- @app.stub(:attributes).and_return({"trial" => true, "credit" => 40, "organization_name" => "example"})
+ it "should create the app on shelly cloud and show credit information" do
+ @app.stub(:attributes).and_return({"credit" => 40, "organization_name" => "example"})
@client.stub(:shellyapp_url).and_return("http://example.com")
@app.should_receive(:create)
$stdout.should_receive(:puts).with(green "Billing information")
|
Removed trial [#<I>]
|
Ragnarson_shelly
|
train
|
b27498a55beef1d85e1f676311fc0f04ddbb2de1
|
diff --git a/source/rafcon/mvc/controllers/graphical_editor.py b/source/rafcon/mvc/controllers/graphical_editor.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/controllers/graphical_editor.py
+++ b/source/rafcon/mvc/controllers/graphical_editor.py
@@ -2160,6 +2160,7 @@ class GraphicalEditorController(ExtendedController):
if react_to_event(self.view, self.view.editor, event):
logger.debug("copy selection")
global_clipboard.copy(self.model.selection)
+ return True
def _cut_selection(self, *event):
"""Cuts the current selection and copys it to the clipboard.
@@ -2167,6 +2168,7 @@ class GraphicalEditorController(ExtendedController):
if react_to_event(self.view, self.view.editor, event):
logger.debug("cut selection")
global_clipboard.cut(self.model.selection)
+ return True
def _paste_clipboard(self, *event):
"""Paste the current clipboard into the current selection if the current selection is a container state.
@@ -2212,3 +2214,4 @@ class GraphicalEditorController(ExtendedController):
self._resize_state(state_copy_m, new_corner_pos, keep_ratio=True, resize_content=True, publish_changes=True)
self._redraw()
+ return True
diff --git a/source/rafcon/mvc/controllers/graphical_editor_gaphas.py b/source/rafcon/mvc/controllers/graphical_editor_gaphas.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/controllers/graphical_editor_gaphas.py
+++ b/source/rafcon/mvc/controllers/graphical_editor_gaphas.py
@@ -159,6 +159,7 @@ class GraphicalEditorController(ExtendedController):
if react_to_event(self.view, self.view.editor, event):
logger.debug("copy selection")
global_clipboard.copy(self.model.selection)
+ return True
def _cut_selection(self, *event):
"""Cuts the current selection and copys it to the clipboard.
@@ -166,6 +167,7 @@ class GraphicalEditorController(ExtendedController):
if react_to_event(self.view, self.view.editor, event):
logger.debug("cut selection")
global_clipboard.cut(self.model.selection)
+ return True
def _paste_clipboard(self, *event):
"""Paste the current clipboard into the current selection if the current selection is a container state.
@@ -223,6 +225,7 @@ class GraphicalEditorController(ExtendedController):
new_state_v.resize_all_children(old_size, True)
self._meta_data_changed(new_state_v, state_copy_m, 'all', True)
+ return True
def _update_selection_from_gaphas(self, view, selected_items):
selected_items = self.view.editor.selected_items
|
graphical editor: add return values for successful copy, cut and paste actions
|
DLR-RM_RAFCON
|
train
|
4d2a38880925b8d4cf03b59eacad53707e2cd6f7
|
diff --git a/zappa/cli.py b/zappa/cli.py
index <HASH>..<HASH> 100644
--- a/zappa/cli.py
+++ b/zappa/cli.py
@@ -73,9 +73,10 @@ class ZappaCLI(object):
"""
+ cmd_list = "'deploy', 'update', 'undeploy', 'schedule', 'unschedule', 'tail' and 'rollback'"
parser = argparse.ArgumentParser(description='Zappa - Deploy Python applications to AWS Lambda and API Gateway.\n')
parser.add_argument('command_env', metavar='U', type=str, nargs='*',
- help="Command to execute. Can be one of 'deploy', 'update', 'tail' and 'rollback'.")
+ help="Command to execute. Can be one of {}.".format(cmd_list)
parser.add_argument('-n', '--num-rollback', type=int, default=0,
help='The number of versions to rollback.')
parser.add_argument('-s', '--settings_file', type=str, default='zappa_settings.json',
@@ -89,7 +90,7 @@ class ZappaCLI(object):
vargs_nosettings = vargs.copy()
vargs_nosettings.pop('settings_file')
if not any(vargs_nosettings.values()): # pragma: no cover
- parser.error("Please supply a command to execute. Can be one of 'deploy', 'update', 'tail', rollback', 'invoke'.'")
+ parser.error("Please supply a command to execute. Can be one of {}.".format(cmd_list))
return
# Version requires no arguments
|
expand cli help test to include full command list
|
Miserlou_Zappa
|
train
|
79e9879979d564e21fa1e962938fc522984745b2
|
diff --git a/test/example-test.js b/test/example-test.js
index <HASH>..<HASH> 100644
--- a/test/example-test.js
+++ b/test/example-test.js
@@ -2,9 +2,7 @@ var _ = require('lodash')
var helper = require('./support/helper')
var assert = require('assert')
-ogConsoleLog = console.log
-var result = helper.run('example/test/lib/**/*.js'),
- log = helper.log()
+var result = helper.run('example/test/lib/**/*.js')
assert.equal(result, false)
helper.assertLog(
@@ -21,6 +19,6 @@ helper.assertLog(
"not ok 3 - \"blueIsRed\" - test #1 in `example/test/lib/single-function.js`",
" ---",
" message: 'blue' == 'red'",
- / stacktrace: AssertionError: \'blue\' == \'red\'\n at blueIsRed /,
+ / stacktrace: AssertionError: 'blue' == 'red'/,
" ..."
)
|
Make the example test less erratic
|
testdouble_teenytest
|
train
|
d02b4e4c315f9d82b8a77465fab18f718fbeec8d
|
diff --git a/lib/y2r/ast/ruby.rb b/lib/y2r/ast/ruby.rb
index <HASH>..<HASH> 100644
--- a/lib/y2r/ast/ruby.rb
+++ b/lib/y2r/ast/ruby.rb
@@ -255,7 +255,7 @@ module Y2R
# TODO: Use parens only when needed.
class UnaryOperator < Node
def to_ruby(context)
- "#{op}#{expression.to_ruby_enclosed(context)}"
+ "#{op}#{expression.to_ruby_enclosed(context.indented(op.size))}"
end
protected
diff --git a/spec/y2r/ast/ruby_spec.rb b/spec/y2r/ast/ruby_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/y2r/ast/ruby_spec.rb
+++ b/spec/y2r/ast/ruby_spec.rb
@@ -11,6 +11,15 @@ def node_width_mock(width)
mock
end
+def node_width_mock_enclosed(width)
+ mock = double
+ mock.should_receive(:to_ruby_enclosed) do |context|
+ context.width.should == width
+ ""
+ end
+ mock
+end
+
module Y2R::AST::Ruby
RSpec.configure do |c|
c.before :each, :type => :ruby do
@@ -1028,6 +1037,17 @@ module Y2R::AST::Ruby
node.to_ruby(@context_default).should == "+(42 + 43)"
end
end
+
+ describe "formatting" do
+ it "passes correct available width to expression" do
+ node = UnaryOperator.new(
+ :op => "+",
+ :expression => node_width_mock_enclosed(79),
+ )
+
+ node.to_ruby(@context_default)
+ end
+ end
end
end
|
Y2R::AST::Ruby::UnaryOperator: Pass correct available width to expression
|
yast_y2r
|
train
|
747d22358fc45e3aade7f6af7235f55f1f6efe45
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+6.7.1
+-----
+
+- Parse forensic email samples with non-standard date headers
+- Graceful handling of a failure to download the GeoIP database (issue #123)
+
6.7.0
-----
diff --git a/parsedmarc/__init__.py b/parsedmarc/__init__.py
index <HASH>..<HASH> 100644
--- a/parsedmarc/__init__.py
+++ b/parsedmarc/__init__.py
@@ -33,7 +33,7 @@ from parsedmarc.utils import is_outlook_msg, convert_outlook_msg
from parsedmarc.utils import timestamp_to_human, human_timestamp_to_datetime
from parsedmarc.utils import parse_email
-__version__ = "6.7.0"
+__version__ = "6.7.1"
logging.basicConfig(
format='%(levelname)8s:%(filename)s:%(lineno)d:'
diff --git a/parsedmarc/utils.py b/parsedmarc/utils.py
index <HASH>..<HASH> 100644
--- a/parsedmarc/utils.py
+++ b/parsedmarc/utils.py
@@ -17,6 +17,7 @@ import base64
import platform
import atexit
import mailbox
+import re
import dateparser
import dns.reversename
@@ -32,6 +33,8 @@ USER_AGENT = "Mozilla/5.0 ((0 {1})) parsedmarc".format(
platform.release(),
)
+parenthesis_regex = re.compile(r'\s*\(.*\)\s*')
+
null_file = open(os.devnull, "w")
logger = logging.getLogger("parsedmarc")
mailparser_logger = logging.getLogger("mailparser")
@@ -236,6 +239,8 @@ def human_timestamp_to_datetime(human_timestamp, to_utc=False):
DateTime: The converted timestamp
"""
+ human_timestamp = human_timestamp.replace("-0000", "")
+ human_timestamp = parenthesis_regex.sub("", human_timestamp)
settings = {}
if to_utc:
@@ -320,14 +325,22 @@ def get_ip_address_country(ip_address, parallel=False, offline=False):
if db_path is None:
db_path = os.path.join(tempdir, "GeoLite2-Country.mmdb")
if not os.path.exists(db_path):
- download_country_database(db_path)
+ try:
+ download_country_database()
+ except Exception as e:
+ logger.error(e.__str__())
+ return None
if not os.path.exists(db_path):
return None
else:
db_age = datetime.now() - datetime.fromtimestamp(
os.stat(db_path).st_mtime)
if db_age > timedelta(days=7):
- download_country_database()
+ try:
+ download_country_database()
+ except Exception as e:
+ logger.error(e.__str__())
+ return None
db_path = db_path
db_reader = geoip2.database.Reader(db_path)
diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -10,7 +10,7 @@ xmltodict>=0.12.0
geoip2>=2.9.0
imapclient>=2.1.0
mail-parser>=3.9.2
-dateparser>=0.7.1
+dateparser>=0.7.2
elasticsearch>=6.3.1,<7.0.0
elasticsearch-dsl>=6.3.1,<7.0.0
kafka-python>=1.4.4
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ from setuptools import setup
from codecs import open
from os import path
-__version__ = "6.7.0"
+__version__ = "6.7.1"
description = "A Python package and CLI for parsing aggregate and " \
"forensic DMARC reports"
@@ -99,7 +99,7 @@ setup(
'geoip2>=2.9.0', 'urllib3<1.25,>=1.21.1',
'requests>=2.2.16.0', 'imapclient>=2.1.0',
'mail-parser>=3.9.2',
- 'dateparser>=0.7.1',
+ 'dateparser>=0.7.2',
'mailsuite>=1.3.0',
'elasticsearch>=6.3.1,<7.0.0',
'elasticsearch-dsl>=6.3.1,<7.0.0',
|
<I>
- Parse forensic email samples with non-standard date headers
- Graceful handling of a failure to download the GeoIP database (issue #<I>)
|
domainaware_parsedmarc
|
train
|
5ecb46f65d751b00d18b7ca10f4be772349ece90
|
diff --git a/src/Koloader/Autoloader.php b/src/Koloader/Autoloader.php
index <HASH>..<HASH> 100644
--- a/src/Koloader/Autoloader.php
+++ b/src/Koloader/Autoloader.php
@@ -29,6 +29,9 @@ class Autoloader {
/** @var array List of found paths for each of the autoloadable tokens. **/
protected $cachedPaths = [];
+ /** @var bool Did Autoloader already recreate cache during current runtime? */
+ protected $recreated = false;
+
public function __construct($cache = null) {
// User must provide ither instance of ICache
@@ -96,7 +99,7 @@ class Autoloader {
return true;
- } else {
+ } elseif (!$this->recreated) {
// The token was not found in any of the
// cached paths, so recreate the cache.
@@ -130,7 +133,8 @@ class Autoloader {
// Save into cache.
$this->cache->save($this->cacheKey, json_encode($this->cachedPaths));
-
+ $this->recreated = true;
+
}
|
Optimize rescanning for multiple missing things.
Recreate cache only once per runtime.
|
smuuf_koloader
|
train
|
dbc781d51f47c8c866cf2c12d69b15bcf0561584
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,10 +8,8 @@ setup(
packages=['django_webpack'],
package_data={
'django_webpack': [
- '*.js',
- '*.json',
- 'tests/*.py'
- 'tests/test_bundle.js'
+ 'bundle.js',
+ 'package.json',
]
},
install_requires=[
|
Removed the test harness from the PyPI package.
|
markfinger_python-webpack
|
train
|
7d51e61e4e95685fe00957b1c4e5936ee6dbeffa
|
diff --git a/hazelcast/src/main/java/com/hazelcast/spi/OperationService.java b/hazelcast/src/main/java/com/hazelcast/spi/OperationService.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/spi/OperationService.java
+++ b/hazelcast/src/main/java/com/hazelcast/spi/OperationService.java
@@ -35,16 +35,32 @@ public interface OperationService {
* Runs an operation in the calling thread.
*
* @param op the operation to execute in the calling thread
+ * @deprecated since 3.7. Use {@link #run(Operation)}
*/
void runOperationOnCallingThread(Operation op);
/**
+ * Runs an operation in the calling thread.
+ *
+ * @param op the operation to execute in the calling thread
+ */
+ void run(Operation op);
+
+ /**
* Executes an operation in the operation executor pool.
*
* @param op the operation to execute in the operation executor pool.
+ * @deprecated since 3.7. Use {@link #execute(Operation)}.
*/
void executeOperation(Operation op);
+ /**
+ * Executes an operation in the operation executor pool.
+ *
+ * @param op the operation to execute in the operation executor pool.
+ */
+ void execute(Operation op);
+
<E> InternalCompletableFuture<E> invokeOnPartition(String serviceName, Operation op, int partitionId);
/**
diff --git a/hazelcast/src/main/java/com/hazelcast/spi/impl/operationservice/impl/OperationServiceImpl.java b/hazelcast/src/main/java/com/hazelcast/spi/impl/operationservice/impl/OperationServiceImpl.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/spi/impl/operationservice/impl/OperationServiceImpl.java
+++ b/hazelcast/src/main/java/com/hazelcast/spi/impl/operationservice/impl/OperationServiceImpl.java
@@ -255,11 +255,21 @@ public final class OperationServiceImpl implements InternalOperationService, Met
@Override
public void runOperationOnCallingThread(Operation op) {
+ run(op);
+ }
+
+ @Override
+ public void run(Operation op) {
operationExecutor.run(op);
}
@Override
public void executeOperation(Operation op) {
+ execute(op);
+ }
+
+ @Override
+ public void execute(Operation op) {
operationExecutor.execute(op);
}
|
Renamed OperationService executeOperation to execute; runOperationOnCallingThread to run
These names are in line with the OperationExecutor. Otherwise we have naming inconsistencies
which make the code more difficult to understand than needs to be.
|
hazelcast_hazelcast
|
train
|
fdd458d2fe03be275327514f0a6ef88d44511bec
|
diff --git a/cmd/syncthing/gui.go b/cmd/syncthing/gui.go
index <HASH>..<HASH> 100644
--- a/cmd/syncthing/gui.go
+++ b/cmd/syncthing/gui.go
@@ -70,7 +70,15 @@ func startGUI(cfg config.GUIConfiguration, assetDir string, m *model.Model) erro
if err != nil {
l.Infoln("Loading HTTPS certificate:", err)
l.Infoln("Creating new HTTPS certificate")
- newCertificate(confDir, "https-")
+
+ // When generating the HTTPS certificate, use the system host name per
+ // default. If that isn't available, use the "syncthing" default.
+ name, err := os.Hostname()
+ if err != nil {
+ name = tlsDefaultCommonName
+ }
+
+ newCertificate(confDir, "https-", name)
cert, err = loadCert(confDir, "https-")
}
if err != nil {
@@ -78,7 +86,20 @@ func startGUI(cfg config.GUIConfiguration, assetDir string, m *model.Model) erro
}
tlsCfg := &tls.Config{
Certificates: []tls.Certificate{cert},
- ServerName: "syncthing",
+ MinVersion: tls.VersionTLS10, // No SSLv3
+ CipherSuites: []uint16{
+ // No RC4
+ tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+ tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+ tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
+ tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+ tls.TLS_RSA_WITH_AES_128_CBC_SHA,
+ tls.TLS_RSA_WITH_AES_256_CBC_SHA,
+ tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
+ tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA,
+ },
}
rawListener, err := net.Listen("tcp", cfg.Address)
diff --git a/cmd/syncthing/main.go b/cmd/syncthing/main.go
index <HASH>..<HASH> 100644
--- a/cmd/syncthing/main.go
+++ b/cmd/syncthing/main.go
@@ -273,7 +273,7 @@ func main() {
l.Warnln("Key exists; will not overwrite.")
l.Infoln("Device ID:", protocol.NewDeviceID(cert.Certificate[0]))
} else {
- newCertificate(dir, "")
+ newCertificate(dir, "", tlsDefaultCommonName)
cert, err = loadCert(dir, "")
myID = protocol.NewDeviceID(cert.Certificate[0])
if err != nil {
@@ -370,7 +370,7 @@ func syncthingMain() {
// Ensure that that we have a certificate and key.
cert, err = loadCert(confDir, "")
if err != nil {
- newCertificate(confDir, "")
+ newCertificate(confDir, "", tlsDefaultCommonName)
cert, err = loadCert(confDir, "")
if err != nil {
l.Fatalln("load cert:", err)
@@ -909,7 +909,7 @@ next:
// the certificate and used another name.
certName := deviceCfg.CertName
if certName == "" {
- certName = "syncthing"
+ certName = tlsDefaultCommonName
}
err := remoteCert.VerifyHostname(certName)
if err != nil {
diff --git a/cmd/syncthing/tls.go b/cmd/syncthing/tls.go
index <HASH>..<HASH> 100644
--- a/cmd/syncthing/tls.go
+++ b/cmd/syncthing/tls.go
@@ -33,8 +33,8 @@ import (
)
const (
- tlsRSABits = 3072
- tlsName = "syncthing"
+ tlsRSABits = 3072
+ tlsDefaultCommonName = "syncthing"
)
func loadCert(dir string, prefix string) (tls.Certificate, error) {
@@ -43,8 +43,8 @@ func loadCert(dir string, prefix string) (tls.Certificate, error) {
return tls.LoadX509KeyPair(cf, kf)
}
-func newCertificate(dir string, prefix string) {
- l.Infoln("Generating RSA key and certificate...")
+func newCertificate(dir, prefix, name string) {
+ l.Infof("Generating RSA key and certificate for %s...", name)
priv, err := rsa.GenerateKey(rand.Reader, tlsRSABits)
if err != nil {
@@ -57,7 +57,7 @@ func newCertificate(dir string, prefix string) {
template := x509.Certificate{
SerialNumber: new(big.Int).SetInt64(mr.Int63()),
Subject: pkix.Name{
- CommonName: tlsName,
+ CommonName: name,
},
NotBefore: notBefore,
NotAfter: notAfter,
|
Protect GUI HTTPS from some attacks
- Disable SSLv3 against POODLE
- Disable RC4 as a weak cipher
- Set the CommonName to the system host name
|
syncthing_syncthing
|
train
|
41ba4b9a97ea650a617b4a941be6a3cb8ae1a5a2
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -76,12 +76,10 @@ tests_require = [
setup(
name='ConfigArgParse',
- version="0.14.1",
+ version="0.15",
description='A drop-in replacement for argparse that allows options to '
'also be set via config files and/or environment variables.',
long_description=long_description,
- author='Zorro',
- author_email='zorro3.github@gmail.com',
url='https://github.com/bw2/ConfigArgParse',
py_modules=['configargparse'],
include_package_data=True,
|
incremented version to <I>
|
bw2_ConfigArgParse
|
train
|
107fee32a024f056ab08026777250a1b7020b7bf
|
diff --git a/tests/UserDefinedFormControllerTest.php b/tests/UserDefinedFormControllerTest.php
index <HASH>..<HASH> 100644
--- a/tests/UserDefinedFormControllerTest.php
+++ b/tests/UserDefinedFormControllerTest.php
@@ -140,7 +140,7 @@ class UserDefinedFormControllerTest extends FunctionalTest {
$actions = $controller->getFormActions();
$expected = new FieldList(new FormAction('process', 'Custom Button'));
- $expected->push(new ResetFormAction("clearForm"));
+ $expected->push(new ResetFormAction("clearForm", "Clear"));
$this->assertEquals($actions, $expected);
}
|
Make sure tests pass after adding clear button.
|
silverstripe_silverstripe-userforms
|
train
|
a629ea37bc859698150cb55b36984768fa2b5384
|
diff --git a/src/audio/acss_renderer.js b/src/audio/acss_renderer.js
index <HASH>..<HASH> 100644
--- a/src/audio/acss_renderer.js
+++ b/src/audio/acss_renderer.js
@@ -121,6 +121,7 @@ sre.AcssRenderer.prototype.prosodyElement = function(key, value) {
switch (key) {
case sre.Engine.personalityProps.RATE:
return '(richness . ' + value + ')';
+ break;
case sre.Engine.personalityProps.PITCH:
return '(average-pitch . ' + value + ')';
break;
|
Adds missing break. Fixes issue #<I>.
|
zorkow_speech-rule-engine
|
train
|
6dd3a8fcae37a35851cb4b38bb574d7191121bd3
|
diff --git a/Classes/Cache/IdentifierBuilder.php b/Classes/Cache/IdentifierBuilder.php
index <HASH>..<HASH> 100644
--- a/Classes/Cache/IdentifierBuilder.php
+++ b/Classes/Cache/IdentifierBuilder.php
@@ -12,6 +12,7 @@ use SFC\Staticfilecache\Service\CacheService;
use SFC\Staticfilecache\Service\ConfigurationService;
use SFC\Staticfilecache\StaticFileCacheObject;
use TYPO3\CMS\Core\Utility\GeneralUtility;
+use TYPO3\CMS\Core\Utility\StringUtility;
/**
* IdentifierBuilder
@@ -45,7 +46,14 @@ class IdentifierBuilder extends StaticFileCacheObject
$parts['path'] = rawurldecode($parts['path']);
}
- return GeneralUtility::makeInstance(CacheService::class)->getAbsoluteBaseDirectory() . \implode('/', $parts);
+ $absoluteBasePath = GeneralUtility::makeInstance(CacheService::class)->getAbsoluteBaseDirectory();
+ $resultPath = GeneralUtility::resolveBackPath($absoluteBasePath . \implode('/', $parts));
+
+ if (!StringUtility::beginsWith($resultPath, $absoluteBasePath)) {
+ throw new \Exception('The generated filename "' . $resultPath . '" should start with the cache directory "' . $absoluteBasePath . '"', 123781);
+ }
+
+ return $resultPath;
}
/**
diff --git a/Classes/Generator/HtaccessGenerator.php b/Classes/Generator/HtaccessGenerator.php
index <HASH>..<HASH> 100644
--- a/Classes/Generator/HtaccessGenerator.php
+++ b/Classes/Generator/HtaccessGenerator.php
@@ -98,9 +98,9 @@ class HtaccessGenerator extends AbstractGenerator
*
* @param string $templateName
* @param array $variables
- * @param string $fileName
+ * @param string $htaccessFile
*/
- protected function renderTemplateToFile(string $templateName, array $variables, string $fileName)
+ protected function renderTemplateToFile(string $templateName, array $variables, string $htaccessFile)
{
/** @var StandaloneView $renderer */
$renderer = GeneralUtility::makeInstance(StandaloneView::class);
@@ -108,7 +108,7 @@ class HtaccessGenerator extends AbstractGenerator
$renderer->assignMultiple($variables);
$content = \trim((string)$renderer->render());
// Note: Create even empty htaccess files (do not check!!!), so the delete is in sync
- GeneralUtility::writeFile($fileName, $content);
+ GeneralUtility::writeFile($htaccessFile, $content);
}
/**
|
Fix #<I> - Check if generated filenames are part of the cache folder
|
lochmueller_staticfilecache
|
train
|
4b7e3a78baa392492753d47d73a8a42198e9f239
|
diff --git a/app/libraries/Utilities/CustomTemplate.php b/app/libraries/Utilities/CustomTemplate.php
index <HASH>..<HASH> 100644
--- a/app/libraries/Utilities/CustomTemplate.php
+++ b/app/libraries/Utilities/CustomTemplate.php
@@ -72,11 +72,11 @@ class CustomTemplate
* @param int $post_id
*
* @since 0.6.0
- * @access private
+ * @access public
*
* @return string
*/
- private function slug(int $post_id = null): string
+ public function slug(int $post_id = null): string
{
return (string)\get_page_template_slug($post_id);
}
@@ -87,11 +87,11 @@ class CustomTemplate
* @param array $type
*
* @since 0.6.0
- * @access private
+ * @access public
*
* @return bool
*/
- private function is(array $type): bool
+ public function is(array $type): bool
{
return $this->utilities->page->is('page_template', $type);
}
|
Change visibility for `is()` and `slug()` of custom templates utility
They are handy methods that can be used by a child theme.
|
GrottoPress_jentil
|
train
|
7146b67397ab3a4764e618eb1f0cd14ee6b4be04
|
diff --git a/mygeotab/api.py b/mygeotab/api.py
index <HASH>..<HASH> 100644
--- a/mygeotab/api.py
+++ b/mygeotab/api.py
@@ -141,6 +141,22 @@ class API(object):
"""
return self.call('Get', type_name, **parameters)
+ def search(self, type_name, **parameters):
+ """
+ Searches for entities using the API. Shortcut for using get() with a search.
+ :param type_name: The type of entity
+ :param parameters: Additional parameters to send.
+ :return: The JSON result (decoded into a dict) from the server
+ :raise MyGeotabException: Raises when an exception occurs on the MyGeotab server
+ """
+ if parameters:
+ results_limit = parameters.get('resultsLimit', None)
+ if results_limit is not None:
+ del parameters['resultsLimit']
+ parameters = dict(search=parameters)
+ return self.call('Get', type_name, resultsLimit=results_limit, **parameters)
+ return self.get(type_name)
+
def add(self, type_name, entity):
"""
Adds an entity using the API. Shortcut for using call() with the 'Add' method.
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,9 +18,9 @@ setup(
description='An unofficial Python client for the MyGeotab API',
long_description=open('README.rst').read(),
install_requires=[
- 'requests',
- 'click',
- 'pytz'
+ 'requests>=2.6',
+ 'click>=3.3',
+ 'pytz>=2014.7'
],
extras_require={
'ipy': ['ipython']
|
Cleaned up requirements.
Added a "search" method, shortcutting both "get" and "call"
|
Geotab_mygeotab-python
|
train
|
fe3fa08bb7d8029b7f489eb2fadffb1aa145bb75
|
diff --git a/lib/influxdb/client.rb b/lib/influxdb/client.rb
index <HASH>..<HASH> 100644
--- a/lib/influxdb/client.rb
+++ b/lib/influxdb/client.rb
@@ -51,7 +51,7 @@ module InfluxDB
# +:retry+:: number of times a failed request should be retried. Defaults to infinite.
def initialize(database = nil, **opts)
opts[:database] = database if database.is_a? String
- @config = InfluxDB::Config.new(opts)
+ @config = InfluxDB::Config.new(**opts)
@stopped = false
@writer = find_writer
diff --git a/lib/influxdb/version.rb b/lib/influxdb/version.rb
index <HASH>..<HASH> 100644
--- a/lib/influxdb/version.rb
+++ b/lib/influxdb/version.rb
@@ -1,3 +1,3 @@
module InfluxDB # :nodoc:
- VERSION = "0.7.0".freeze
+ VERSION = "0.7.1".freeze
end
|
Adjust for <I>-style keyword arguments
lib/influxdb/client.rb:<I>: warning: Using the last argument as keyword parameters is deprecated; maybe ** should be added to the call
lib/influxdb/config.rb:<I>: warning: The called method `initialize' is defined here
|
influxdata_influxdb-ruby
|
train
|
30319b165f275ddf61d0679cc2aa0983dc530215
|
diff --git a/lib/treeize.js b/lib/treeize.js
index <HASH>..<HASH> 100644
--- a/lib/treeize.js
+++ b/lib/treeize.js
@@ -222,11 +222,11 @@ Treeize.prototype.grow = function(data, options) {
data.forEach(function(row) {
this.data.seed.push(row);
var trails = {}; // LUT for trails (find parent of new node in trails path)
- var trail = root = this.data.tree; // OPTIMIZATION: do we need to reset this trail for each row?
+ var trail = base = this.data.tree; // OPTIMIZATION: do we need to reset this trail for each row?
this.log('CURRENT TRAIL STATUS>', trail);
var t = null;
- // set initial root object path for non-array datasets
+ // set initial base object path for non-array datasets
if (opt.output.resultsAsObject) {
trails[''] = trail;
}
@@ -282,17 +282,17 @@ Treeize.prototype.grow = function(data, options) {
// ONLY INSERT IF NOT PRUNED
if (!opt.output.prune || !_.isEmpty(blueprintExtended)) {
- // IF 0 DEPTH AND RESULTSASOBJECT, EXTEND ROOT
+ // IF 0 DEPTH AND RESULTSASOBJECT, EXTEND base
if (opt.output.resultsAsObject && node.depth === 0) {
- _.extend(trails[node.path] = trail = root, blueprintExtended);
- this.log('extending blueprint onto root>', trail);
+ _.extend(trails[node.path] = trail = base, blueprintExtended);
+ this.log('extending blueprint onto base>', trail);
- // IF ROOT TRAIL IS NOT YET MAPPED
+ // IF base TRAIL IS NOT YET MAPPED
} else if (node.isCollection && !(trail = trails[node.parent])) {
- this.log('PARENT TRAIL NOT FOUND (ROOT?)');
+ this.log('PARENT TRAIL NOT FOUND (base?)');
// set up target node if doesn't exist
- if (!(trail = _.findWhere(root, blueprint))) {
- root.push(trail = blueprintExtended);
+ if (!(trail = _.findWhere(base, blueprint))) {
+ base.push(trail = blueprintExtended);
} else {
_.extend(trail, blueprintExtended);
}
@@ -300,7 +300,7 @@ Treeize.prototype.grow = function(data, options) {
// NORMAL NODE TRAVERSAL
} else {
- // NOT ROOT CASE
+ // NOT base CASE
if (node.isCollection) {
// handle collection nodes
this.log('inserting into collection node', trail);
@@ -320,9 +320,9 @@ Treeize.prototype.grow = function(data, options) {
}
} else {
// handle non-collection nodes
- if (trail == root && node.parent === '') {
- root.push(trails[node.parent] = trail = {});
- this.log('root insertion');
+ if (trail == base && node.parent === '') {
+ base.push(trails[node.parent] = trail = {});
+ this.log('base insertion');
}
trail = trails[node.parent];
|
renamed "root" variable to "base" to avoid deprecation error
|
kwhitley_treeize
|
train
|
a6684eeb786663839cf383ea54b681d429a83177
|
diff --git a/railties/lib/generators/test_unit/mailer/templates/functional_test.rb b/railties/lib/generators/test_unit/mailer/templates/functional_test.rb
index <HASH>..<HASH> 100644
--- a/railties/lib/generators/test_unit/mailer/templates/functional_test.rb
+++ b/railties/lib/generators/test_unit/mailer/templates/functional_test.rb
@@ -7,7 +7,6 @@ class <%= class_name %>Test < ActionMailer::TestCase
@expected.to = "to@example.org"
@expected.from = "from@example.com"
@expected.body = read_fixture("<%= action %>")
- @expected.date = Time.now
assert_equal @expected, <%= class_name %>.<%= action %>
end
|
don't set @expected.date in generated mailer test
|
rails_rails
|
train
|
b2965eaf3510908583063342bbc3b0e5be33005c
|
diff --git a/Mixtape/cluster/base.py b/Mixtape/cluster/base.py
index <HASH>..<HASH> 100644
--- a/Mixtape/cluster/base.py
+++ b/Mixtape/cluster/base.py
@@ -67,7 +67,7 @@ class MultiSequenceClusterMixin(object):
def _concat(self, sequences):
self.__lengths = [len(s) for s in sequences]
if len(sequences) > 0 and isinstance(sequences[0], np.ndarray):
- concat = np.concatenate(sequences).copy()
+ concat = np.ascontiguousarray(np.concatenate(sequences))
elif isinstance(sequences[0], md.Trajectory):
# if the input sequences are not numpy arrays, we need to guess
# how to concatenate them. this operation below works for mdtraj
diff --git a/Mixtape/decomposition/base.py b/Mixtape/decomposition/base.py
index <HASH>..<HASH> 100644
--- a/Mixtape/decomposition/base.py
+++ b/Mixtape/decomposition/base.py
@@ -64,7 +64,7 @@ class MultiSequenceDecompositionMixin(BaseEstimator):
def _concat(self, sequences):
self.__lengths = [len(s) for s in sequences]
if len(sequences) > 0 and isinstance(sequences[0], np.ndarray):
- concat = np.concatenate(sequences).copy()
+ concat = np.concatenate(sequences)
else:
# if the input sequences are not numpy arrays, we need to guess
# how to concatenate them. this operation below works for mdtraj
|
decomposition.base no longer worries about contiguous arrays, cluster.base now uses np.ascontiguousarray to make it contiguous
|
msmbuilder_msmbuilder
|
train
|
214278702b297c011b2c5b5495c3efcc17d7d1dd
|
diff --git a/lib/genericNodeProvider.js b/lib/genericNodeProvider.js
index <HASH>..<HASH> 100644
--- a/lib/genericNodeProvider.js
+++ b/lib/genericNodeProvider.js
@@ -76,19 +76,25 @@ function GenericNodeProvider(options) {
* @param {Object} providerOptions - Provider specific options.
* @param {String} providerOptions.propertyPathId - Object property path ('.' separated)
* that describes how to fetch the ID
- * from the provided resource.
+ * from the provided resource. An empty
+ * property name in the path fetches all
+ * keys in the current object.
*
* "example.uuid",
*
* @param {String} providerOptions.propertyPathIpPrivate - Object property path ('.' separated)
* that describes how to fetch the private
- * IP from the provided resource.
+ * IP from the provided resource. An empty
+ * property name in the path fetches all
+ * keys in the current object.
*
* "example.address.private"
*
* @param {String} [providerOptions.propertyPathIpPublic] - Object property path ('.' separated)
* that describes how to fetch the public
- * IP from the provided resource.
+ * IP from the provided resource. An empty
+ * property name in the path fetches all
+ * keys in the current object.
*
* "example.address.public"
*
@@ -180,7 +186,8 @@ GenericNodeProvider.prototype.getNodesFromUri = function getNodesFromUri(uri, op
};
/**
- * Gets the value from an Object's property using a provided Object property path.
+ * Gets the value from an Object's property using a provided Object property path. An empty
+ * property name in the path fetches all keys in the current Object.
*
* @param {String[]} pathArray - Array of properties to traverse through.
* @param {Object} obj - Object to traverse.
@@ -195,6 +202,9 @@ function getDataFromPropPath(pathArray, obj) {
if (typeof result !== 'object' || result === null) {
return {};
}
+ if (prop === '') {
+ return result;
+ }
return result[prop];
}, obj);
}
|
Add support for propertyPath wildcard
|
F5Networks_f5-cloud-libs
|
train
|
76ec4bf91082ef66c62713e1d5a6eff78c24606a
|
diff --git a/indra/databases/uniprot_client.py b/indra/databases/uniprot_client.py
index <HASH>..<HASH> 100644
--- a/indra/databases/uniprot_client.py
+++ b/indra/databases/uniprot_client.py
@@ -265,6 +265,56 @@ def get_gene_name(protein_id, web_fallback=True):
return gene_name
return None
+
+def get_gene_alt_labels(protein_id):
+ """Return a list of other names for the protein."""
+ g = query_protein(protein_id)
+ if g is None:
+ return None
+ query = rdf_prefixes + """
+ SELECT ?name
+ WHERE {
+ ?gene skos:altLabel ?name .
+ }
+ """
+ res = g.query(query)
+ if res:
+ return [r[0].toPython() for r in res]
+ return None
+
+
+def get_gene_alt_names(protein_id):
+ """Get the structured names of the gene."""
+ g = query_protein(protein_id)
+ if g is None:
+ return None
+ query = rdf_prefixes + """
+ SELECT ?name
+ WHERE {
+ ?gene :fullName | :shortName ?name .
+ }
+ """
+ res = g.query(query)
+ if res:
+ return [r[0].toPython() for r in res]
+ return None
+
+
+def get_all_gene_names(protein_id):
+ """Get all names associated with the protein id."""
+ ret = []
+ gene_name = get_gene_name(protein_id)
+ if gene_name:
+ ret.append(gene_name)
+ alt_labels = get_gene_alt_labels(protein_id)
+ if alt_labels:
+ ret.extend(alt_labels)
+ alt_names = get_gene_alt_names(protein_id)
+ if alt_names:
+ ret.extend(alt_names)
+ return ret
+
+
@lru_cache(maxsize=1000)
def get_sequence(protein_id):
try:
diff --git a/indra/tests/test_uniprot_client.py b/indra/tests/test_uniprot_client.py
index <HASH>..<HASH> 100644
--- a/indra/tests/test_uniprot_client.py
+++ b/indra/tests/test_uniprot_client.py
@@ -59,6 +59,16 @@ def test_get_gene_name_no_table_entry():
'''
+def test_get_synonyms():
+ synonyms = uniprot_client.get_gene_alt_labels('P31938')
+ assert synonyms, synonyms
+ other_names = uniprot_client.get_gene_alt_names('P31938')
+ assert other_names, other_names
+ all_names = uniprot_client.get_all_gene_names('P31938')
+ assert (set(synonyms) & set(other_names)).issubset(set(all_names)), \
+ all_names
+
+
def test_get_gene_name_nonhuman():
gene_name = uniprot_client.get_gene_name('P31938')
assert(gene_name == 'Map2k1')
|
Add synonym capabilities to the uniprot client.
|
sorgerlab_indra
|
train
|
e473c007d374c1198c5dcb1669cd019f9fd5f3ad
|
diff --git a/cassandra/decoder.py b/cassandra/decoder.py
index <HASH>..<HASH> 100644
--- a/cassandra/decoder.py
+++ b/cassandra/decoder.py
@@ -1,31 +1,17 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
from binascii import hexlify
from collections import namedtuple
-try:
- from collections import OrderedDict
-except ImportError: # Python <2.7
- from cassandra.util import OrderedDict # NOQA
-
import datetime
import logging
+import re
import socket
import types
from uuid import UUID
+
+try:
+ from collections import OrderedDict
+except ImportError: # Python <2.7
+ from cassandra.util import OrderedDict # NOQA
+
try:
from cStringIO import StringIO
except ImportError:
@@ -60,12 +46,20 @@ HEADER_DIRECTION_TO_CLIENT = 0x80
HEADER_DIRECTION_MASK = 0x80
+NON_ALPHA_REGEX = re.compile('\W')
+END_UNDERSCORE_REGEX = re.compile('^_*(\w*[a-zA-Z0-9])_*$')
+
+
+def _clean_column_name(name):
+ return END_UNDERSCORE_REGEX.sub("\g<1>", NON_ALPHA_REGEX.sub("_", name))
+
+
def tuple_factory(colnames, rows):
return rows
def named_tuple_factory(colnames, rows):
- Row = namedtuple('Row', colnames)
+ Row = namedtuple('Row', map(_clean_column_name, colnames))
return [Row(*row) for row in rows]
diff --git a/tests/unit/test_types.py b/tests/unit/test_types.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_types.py
+++ b/tests/unit/test_types.py
@@ -1,10 +1,9 @@
import unittest
import datetime
import cassandra
-from cassandra.cqltypes import CassandraType, BooleanType, lookup_casstype_simple, lookup_casstype, \
- AsciiType, LongType, DecimalType, DoubleType, FloatType, Int32Type, UTF8Type, IntegerType, SetType, cql_typename
-
-from cassandra.cluster import Cluster
+from cassandra.cqltypes import (BooleanType, lookup_casstype_simple, lookup_casstype,
+ LongType, DecimalType, SetType, cql_typename)
+from cassandra.decoder import named_tuple_factory
class TypeTests(unittest.TestCase):
@@ -105,3 +104,12 @@ class TypeTests(unittest.TestCase):
self.assertEqual(cql_typename('DateType'), 'timestamp')
self.assertEqual(cql_typename('org.apache.cassandra.db.marshal.ListType(IntegerType)'), 'list<varint>')
+
+ def test_named_tuple_colname_substitution(self):
+ colnames = ("func(abc)", "[applied]", "func(func(abc))", "foo_bar")
+ rows = [(1, 2, 3, 4)]
+ result = named_tuple_factory(colnames, rows)[0]
+ self.assertEqual(result[0], result.func_abc)
+ self.assertEqual(result[1], result.applied)
+ self.assertEqual(result[2], result.func_func_abc)
+ self.assertEqual(result[3], result.foo_bar)
|
Sanitize column names for named_tuple_factory
Fixes PYTHON-<I>
|
datastax_python-driver
|
train
|
c7c8f5612bb726ca8fe85389c688bb5d412e8fdb
|
diff --git a/server/server.go b/server/server.go
index <HASH>..<HASH> 100644
--- a/server/server.go
+++ b/server/server.go
@@ -96,6 +96,7 @@ type BgpServer struct {
fsmStateCh chan *FsmMsg
acceptCh chan *net.TCPConn
+ mgmtCh chan func()
GrpcReqCh chan *GrpcRequest
policy *table.RoutingPolicy
listeners []*TCPListener
@@ -114,6 +115,7 @@ func NewBgpServer() *BgpServer {
policy: table.NewRoutingPolicy(),
roaManager: roaManager,
watchers: newWatcherManager(),
+ mgmtCh: make(chan func(), 1),
}
}
@@ -216,6 +218,8 @@ func (server *BgpServer) Serve() {
CONT:
select {
+ case f := <-server.mgmtCh:
+ f()
case rmsg := <-server.roaManager.ReceiveROA():
server.roaManager.HandleROAEvent(rmsg)
case conn := <-server.acceptCh:
|
add management task channel
replace GrpcRequest/Response. Simpler and handy golang-native APIs.
|
osrg_gobgp
|
train
|
be11c6b8656e5427c416431e9d551076b942367c
|
diff --git a/app/controllers/sessions_controller.rb b/app/controllers/sessions_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/sessions_controller.rb
+++ b/app/controllers/sessions_controller.rb
@@ -3,11 +3,11 @@ class SessionsController < ApplicationController
auth = request.env["omniauth.auth"]
account = ::Account.from_omniauth(auth)
session[:account_id] = account.id
- redirect_to root_url, notice: "Signed in!"
+ redirect_to after_bookingsync_sign_in_path, notice: "Signed in!"
end
def destroy
session[:account_id] = nil
- redirect_to root_url, notice: "Signed out"
+ redirect_to after_bookingsync_sign_out_path, notice: "Signed out"
end
end
diff --git a/lib/bookingsync/engine/helpers.rb b/lib/bookingsync/engine/helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/bookingsync/engine/helpers.rb
+++ b/lib/bookingsync/engine/helpers.rb
@@ -8,6 +8,14 @@ module BookingSync::Engine::Helpers
private
+ def after_bookingsync_sign_in_path
+ root_path
+ end
+
+ def after_bookingsync_sign_out_path
+ root_path
+ end
+
def handle_oauth_error(error)
if error.code == "Not authorized"
if current_account
|
Applications can override after sign in/out paths
They default to root_path, can be changed by defining
after_bookingsync_sign_in_path and after_bookingsync_sign_out_path
in ApplicationController.
|
BookingSync_bookingsync-engine
|
train
|
914c8a5aabd3e5bf74ace66bb5f308d2f8427549
|
diff --git a/noodles/__init__.py b/noodles/__init__.py
index <HASH>..<HASH> 100644
--- a/noodles/__init__.py
+++ b/noodles/__init__.py
@@ -2,5 +2,4 @@ from .decorator import *
from .datamodel import *
from .run import *
from .run_parallel import run_parallel
-from .run_fireworks import *
from .utility import *
|
removing fireworks runner from ``__init__.py``
|
NLeSC_noodles
|
train
|
fe39aba238ca2dc3d1359f7a0309ead3de4739f0
|
diff --git a/src/front-door/azext_front_door/custom.py b/src/front-door/azext_front_door/custom.py
index <HASH>..<HASH> 100644
--- a/src/front-door/azext_front_door/custom.py
+++ b/src/front-door/azext_front_door/custom.py
@@ -417,17 +417,21 @@ def list_fd_backends(cmd, resource_group_name, front_door_name, backend_pool_nam
def remove_fd_backend(cmd, resource_group_name, front_door_name, backend_pool_name, index):
+ from knack.util import CLIError
client = cf_frontdoor(cmd.cli_ctx, None)
frontdoor = client.get(resource_group_name, front_door_name)
backend_pool = next((x for x in frontdoor.backend_pools if x.name == backend_pool_name), None)
if not backend_pool:
- from knack.util import CLIError
raise CLIError("Backend pool '{}' could not be found on frontdoor '{}'".format(
backend_pool_name, front_door_name))
try:
- backend_pool.backends.pop(index - 1)
+ if index > 0:
+ backend_pool.backends.pop(index - 1)
+ elif index < 0:
+ backend_pool.backends.pop(index)
+ else:
+ raise CLIError('invalid index. Index can range from 1 to {}'.format(len(backend_pool.backends)))
except IndexError:
- from knack.util import CLIError
raise CLIError('invalid index. Index can range from 1 to {}'.format(len(backend_pool.backends)))
client.create_or_update(resource_group_name, front_door_name, frontdoor).result()
diff --git a/src/front-door/setup.py b/src/front-door/setup.py
index <HASH>..<HASH> 100644
--- a/src/front-door/setup.py
+++ b/src/front-door/setup.py
@@ -8,7 +8,7 @@
from codecs import open
from setuptools import setup, find_packages
-VERSION = "1.0.3"
+VERSION = "1.0.4"
CLASSIFIERS = [
'Development Status :: 4 - Beta',
|
fix negative index (#<I>)
|
Azure_azure-cli-extensions
|
train
|
cc15efdad83acd50f21373035029accad786aade
|
diff --git a/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsService.java b/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsService.java
index <HASH>..<HASH> 100644
--- a/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsService.java
+++ b/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsService.java
@@ -54,7 +54,9 @@ public interface WfsService {
* @author Jan De Moerloose
*/
public enum WfsRequest {
- GETCAPABILITIES("GetCapabilities");
+ GET_CAPABILITIES("GetCapabilities"),
+ DESCRIBE_FEATURE_TYPE("DescribeFeatureType"),
+ GET_FEATURE("GetFeature");
private String request;
diff --git a/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsServiceImpl.java b/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsServiceImpl.java
index <HASH>..<HASH> 100644
--- a/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsServiceImpl.java
+++ b/plugin/wfs/wfs/src/main/java/org/geomajas/gwt2/plugin/wfs/client/service/WfsServiceImpl.java
@@ -176,7 +176,7 @@ public class WfsServiceImpl implements WfsService {
// Parameter: request type
url.append("&request=GetCapabilities");
- return finishUrl(WfsRequest.GETCAPABILITIES, url);
+ return finishUrl(WfsRequest.GET_CAPABILITIES, url);
}
// ------------------------------------------------------------------------
|
GBE-<I>: missing requests in enum
|
geomajas_geomajas-project-client-gwt2
|
train
|
f3f4f4aeb62c6893d3932b0c68f1e4cabe3461a5
|
diff --git a/plugins/pushes/harmony/plugin.rb b/plugins/pushes/harmony/plugin.rb
index <HASH>..<HASH> 100644
--- a/plugins/pushes/harmony/plugin.rb
+++ b/plugins/pushes/harmony/plugin.rb
@@ -12,13 +12,25 @@ module VagrantPlugins
config(:harmony, :push) do
require File.expand_path("../config", __FILE__)
+ init!
Config
end
push(:harmony) do
require File.expand_path("../push", __FILE__)
+ init!
Push
end
+
+ protected
+
+ def self.init!
+ return if defined?(@_init)
+ I18n.load_path << File.expand_path(
+ "templates/locales/TODO.yml", Vagrant.source_root)
+ I18n.reload!
+ @_init = true
+ end
end
end
end
|
pushes/harmony: stub I<I>n
|
hashicorp_vagrant
|
train
|
d7824550d2147565eeeaff8803d95f822888e22f
|
diff --git a/lib/media/drm_engine.js b/lib/media/drm_engine.js
index <HASH>..<HASH> 100644
--- a/lib/media/drm_engine.js
+++ b/lib/media/drm_engine.js
@@ -968,9 +968,10 @@ shaka.media.DrmEngine.prototype.onKeyStatusesChange_ = function(event) {
/**
* Returns a Promise to a map of EME support for well-known key systems.
*
+ * @param {boolean} persistentStateRequired
* @return {!Promise.<!Object.<string, boolean>>}
*/
-shaka.media.DrmEngine.support = function() {
+shaka.media.DrmEngine.support = function(persistentStateRequired) {
// Every object in the support hierarchy has a "basic" member.
// All "basic" members must be true for the library to be usable.
var basic =
@@ -994,8 +995,14 @@ shaka.media.DrmEngine.support = function() {
'com.adobe.primetime'
];
+ var config = {};
+ if (persistentStateRequired) {
+ config.persistentState = 'required';
+ config.sessionTypes = ['persistent-license'];
+ }
+
testKeySystems.forEach(function(keySystem) {
- var p = navigator.requestMediaKeySystemAccess(keySystem, [{}])
+ var p = navigator.requestMediaKeySystemAccess(keySystem, [config])
.then(function() {
support[keySystem] = true;
}, function() {
diff --git a/lib/player.js b/lib/player.js
index <HASH>..<HASH> 100644
--- a/lib/player.js
+++ b/lib/player.js
@@ -251,7 +251,7 @@ shaka.Player.support = function() {
if (basic) {
var manifest = shaka.media.ManifestParser.support();
var media = shaka.media.MediaSourceEngine.support();
- return shaka.media.DrmEngine.support().then(function(drm) {
+ return shaka.media.DrmEngine.support(false).then(function(drm) {
/** @type {!shakaExtern.SupportType} */
var support = {
manifest: manifest,
@@ -807,21 +807,14 @@ shaka.Player.prototype.selectTrack = function(track, opt_clearBuffer) {
if (!this.streamingEngine_)
return;
- /** @type {shakaExtern.Stream} */
- var stream;
var period = this.streamingEngine_.getCurrentPeriod();
- period.streamSets.forEach(function(streamSet) {
- streamSet.streams.forEach(function(curStream) {
- if (curStream.id == track.id)
- stream = curStream;
- });
- });
-
- if (!stream) {
+ var data = shaka.util.StreamUtils.findStreamForTrack(period, track);
+ if (!data) {
shaka.log.error('Unable to find the track with id "' + track.id +
'"; did we change Periods?');
return;
}
+ var stream = data.stream;
// Double check that the track is allowed to be played.
if (!stream.allowedByApplication || !stream.allowedByKeySystem) {
diff --git a/lib/util/stream_utils.js b/lib/util/stream_utils.js
index <HASH>..<HASH> 100644
--- a/lib/util/stream_utils.js
+++ b/lib/util/stream_utils.js
@@ -176,6 +176,26 @@ shaka.util.StreamUtils.getTracks = function(period, activeStreams) {
/**
+ * Find the stream and stream set for the given track.
+ *
+ * @param {shakaExtern.Period} period
+ * @param {shakaExtern.Track} track
+ * @return {?{stream: shakaExtern.Stream, streamSet: shakaExtern.StreamSet}}
+ */
+shaka.util.StreamUtils.findStreamForTrack = function(period, track) {
+ for (var i = 0; i < period.streamSets.length; i++) {
+ var streamSet = period.streamSets[i];
+ for (var j = 0; j < streamSet.streams.length; j++) {
+ var stream = streamSet.streams[j];
+ if (stream.id == track.id)
+ return {stream: stream, streamSet: streamSet};
+ }
+ }
+ return null;
+};
+
+
+/**
* Determines if the given stream set has any playable streams.
* @param {shakaExtern.StreamSet} streamSet
* @return {boolean}
diff --git a/test/drm_engine_integration.js b/test/drm_engine_integration.js
index <HASH>..<HASH> 100644
--- a/test/drm_engine_integration.js
+++ b/test/drm_engine_integration.js
@@ -44,9 +44,9 @@ describe('DrmEngine', function() {
var originalTimeout;
beforeAll(function(done) {
- var supportTest = shaka.media.DrmEngine.support().then(function(result) {
- support = result;
- }).catch(fail);
+ var supportTest = shaka.media.DrmEngine.support(false)
+ .then(function(result) { support = result; })
+ .catch(fail);
originalTimeout = jasmine.DEFAULT_TIMEOUT_INTERVAL;
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000; // ms
|
Drm support testing can be used to detect offline support.
Change-Id: I4a9ce<I>e<I>e2ff<I>f<I>a<I>bdc5b<I>e3c
|
google_shaka-player
|
train
|
ffd6b008eecbcfddcd90abd7830ecc5c967860e8
|
diff --git a/js/editors/keycontrol.js b/js/editors/keycontrol.js
index <HASH>..<HASH> 100644
--- a/js/editors/keycontrol.js
+++ b/js/editors/keycontrol.js
@@ -6,11 +6,11 @@ var keyboardHelpVisible = false;
$body.keydown(keycontrol);
var panelShortcuts = {
- 49: 'javascript',
- 50: 'css',
- 51: 'html',
- 52: 'console',
- 53: 'live'
+ 49: 'javascript', // 1
+ 50: 'css', // 2
+ 51: 'html', // 3
+ 52: 'console', // 4
+ 53: 'live' // 5
};
function keycontrol(event) {
@@ -156,30 +156,29 @@ function normalise(event) {
altKey: event.altKey,
orig: event
};
-
+
if ( event.which == null && (event.charCode != null || event.keyCode != null) ) {
myEvent.which = event.charCode != null ? event.charCode : event.keyCode;
}
-
- // this is retarded - I'm having to mess with the event just to get Firefox
- // to send through the right value. i.e. when you include a shift key modifier
- // in Firefox, if it's punctuation - event.which is zero :(
- // Note that I'm only doing this for the ? symbol
- if (event.which === 47 && event.type == 'keypress') {
- myEvent.type = 'keydown';
- myEvent.which = event.which == 47 ? 191 : 0;
- }
// Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs)
if ( !event.metaKey && event.ctrlKey ) {
myEvent.metaKey = event.ctrlKey;
}
-
+
+ // this is retarded - I'm having to mess with the event just to get Firefox
+ // to send through the right value. i.e. when you include a shift key modifier
+ // in Firefox, if it's punctuation - event.which is zero :(
+ // Note that I'm only doing this for the ? symbol + ctrl + shift
+ if (event.which === 0 && event.ctrlKey === true && event.shiftKey === true && event.type == 'keydown') {
+ myEvent.which = 191;
+ }
+
var oldStop = event.stop;
myEvent.stop = function () {
myEvent.stopping = true;
oldStop && oldStop.call(event);
};
-
+
return myEvent;
}
\ No newline at end of file
|
Fixed help key shortcut in Firefox. Still madness.
|
jsbin_jsbin
|
train
|
b92011507a0f3aae09427d8929ed9971c88223b7
|
diff --git a/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/Internals.java b/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/Internals.java
index <HASH>..<HASH> 100644
--- a/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/Internals.java
+++ b/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/Internals.java
@@ -27,10 +27,12 @@ public class Internals {
public static final String SERIALIZED_APPLICATION_SERVER = "application-server";
public static final String SERIALIZED_CAMUNDA_INTEGRATION = "camunda-integration";
+ public static final String SERIALIZED_LICENSE_KEY = "license-key";
protected Database database;
@SerializedName(value = SERIALIZED_APPLICATION_SERVER)
protected ApplicationServer applicationServer;
+ @SerializedName(value = SERIALIZED_LICENSE_KEY)
protected LicenseKeyData licenseKey;
protected Map<String, Command> commands;
@SerializedName(value = SERIALIZED_CAMUNDA_INTEGRATION)
diff --git a/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/LicenseKeyData.java b/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/LicenseKeyData.java
index <HASH>..<HASH> 100644
--- a/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/LicenseKeyData.java
+++ b/engine/src/main/java/org/camunda/bpm/engine/impl/telemetry/dto/LicenseKeyData.java
@@ -18,11 +18,18 @@ package org.camunda.bpm.engine.impl.telemetry.dto;
import java.util.Map;
+import com.google.gson.annotations.SerializedName;
+
public class LicenseKeyData {
+ public static final String SERIALIZED_VALID_UNTIL = "valid-until";
+ public static final String SERIALIZED_IS_UNLIMITED = "unlimited";
+
protected String customer;
protected String type;
+ @SerializedName(value = SERIALIZED_VALID_UNTIL)
protected String validUntil;
+ @SerializedName(value = SERIALIZED_IS_UNLIMITED)
protected Boolean isUnlimited;
protected Map<String, String> features;
protected String raw;
|
adjust serialized names of telemetry license data
|
camunda_camunda-bpm-platform
|
train
|
947d6e8f34094be2f2347869bd35c7a4bdeefdda
|
diff --git a/claripy/operations.py b/claripy/operations.py
index <HASH>..<HASH> 100644
--- a/claripy/operations.py
+++ b/claripy/operations.py
@@ -305,6 +305,9 @@ def boolean_and_simplifier(*args):
if len(args) == 1:
return args[0]
+ if any(a.is_false() for a in args):
+ return ast.all_operations.false
+
if any(a.is_true() for a in args):
new_args = tuple(a for a in args if not a.is_true())
if len(new_args) > 0:
@@ -316,6 +319,9 @@ def boolean_or_simplifier(*args):
if len(args) == 1:
return args[0]
+ if any(a.is_true() for a in args):
+ return ast.all_operations.true
+
if any(a.is_false() for a in args):
new_args = tuple(a for a in args if not a.is_false())
if len(new_args) > 0:
|
the other obvious case for and/or simplification
|
angr_claripy
|
train
|
60576d2a2ebb31df5bed3268a192c823818b15bb
|
diff --git a/tests/distance/test_distance_anderberg.py b/tests/distance/test_distance_anderberg.py
index <HASH>..<HASH> 100644
--- a/tests/distance/test_distance_anderberg.py
+++ b/tests/distance/test_distance_anderberg.py
@@ -49,7 +49,7 @@ class AnderbergTestCases(unittest.TestCase):
self.assertEqual(self.cmp.sim('', 'a'), 0.0)
self.assertEqual(self.cmp.sim('abc', ''), 0.0)
self.assertEqual(self.cmp.sim('', 'abc'), 0.0)
- self.assertEqual(self.cmp.sim('abc', 'abc'), 0.00510204081632653)
+ self.assertEqual(self.cmp.sim('abc', 'abc'), 0.01020408163265306)
self.assertEqual(self.cmp.sim('abcd', 'efgh'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Nigel', 'Niall'), 0.0)
@@ -57,7 +57,26 @@ class AnderbergTestCases(unittest.TestCase):
self.assertAlmostEqual(self.cmp.sim('Colin', 'Coiln'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Coiln', 'Colin'), 0.0)
self.assertAlmostEqual(
- self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.0044642857
+ self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.0089285714
+ )
+
+ def test_anderberg_sim_score(self):
+ """Test abydos.distance.Anderberg.sim_score."""
+ # Base cases
+ self.assertEqual(self.cmp.sim_score('', ''), 0.0)
+ self.assertEqual(self.cmp.sim_score('a', ''), 0.0)
+ self.assertEqual(self.cmp.sim_score('', 'a'), 0.0)
+ self.assertEqual(self.cmp.sim_score('abc', ''), 0.0)
+ self.assertEqual(self.cmp.sim_score('', 'abc'), 0.0)
+ self.assertEqual(self.cmp.sim_score('abc', 'abc'), 0.00510204081632653)
+ self.assertEqual(self.cmp.sim_score('abcd', 'efgh'), 0.0)
+
+ self.assertAlmostEqual(self.cmp.sim_score('Nigel', 'Niall'), 0.0)
+ self.assertAlmostEqual(self.cmp.sim_score('Niall', 'Nigel'), 0.0)
+ self.assertAlmostEqual(self.cmp.sim_score('Colin', 'Coiln'), 0.0)
+ self.assertAlmostEqual(self.cmp.sim_score('Coiln', 'Colin'), 0.0)
+ self.assertAlmostEqual(
+ self.cmp.sim_score('ATCAACGAGT', 'AACGATTAG'), 0.0044642857
)
|
added sim_score/corrected sim values
|
chrislit_abydos
|
train
|
a2cd2a015ae5d497ad7ea74837f3d69095666854
|
diff --git a/datascience/tables.py b/datascience/tables.py
index <HASH>..<HASH> 100644
--- a/datascience/tables.py
+++ b/datascience/tables.py
@@ -639,12 +639,12 @@ class Table(collections.abc.MutableMapping):
binned[label + ' ' + tag] = np.append(counts, 0)
return binned
- ##################
- # Export/Display #
- ##################
+ ##########################
+ # Exporting / Displaying #
+ ##########################
def __repr__(self):
- return '<{0}({1} rows): | {3} |>'.format(
+ return '<{0}({1} cols, {2} rows): | {3} |>'.format(
type(self).__name__,
len(self),self.num_rows,
" | ".join(map(str, self.column_labels)))
|
Make repr more accurate
It was confusing rows with columns before.
|
data-8_datascience
|
train
|
e13bb3cfb415d2cc024e9456da66bbddb0095fa6
|
diff --git a/lib/core/mixin/action/commit.rb b/lib/core/mixin/action/commit.rb
index <HASH>..<HASH> 100644
--- a/lib/core/mixin/action/commit.rb
+++ b/lib/core/mixin/action/commit.rb
@@ -28,6 +28,12 @@ module Commit
false
end
end
+
+ #---
+
+ def commit_ignore
+ [ :commit, :allow_empty, :propogate_commit, :message, :author ]
+ end
#-----------------------------------------------------------------------------
# Operations
diff --git a/lib/core/mixin/action/project.rb b/lib/core/mixin/action/project.rb
index <HASH>..<HASH> 100644
--- a/lib/core/mixin/action/project.rb
+++ b/lib/core/mixin/action/project.rb
@@ -18,6 +18,12 @@ module Project
]
register_str :project_revision, :master, 'nucleon.core.action.project.options.project_revision'
end
+
+ #---
+
+ def project_ignore
+ [ :project_provider, :project_reference, :project_revision ]
+ end
#-----------------------------------------------------------------------------
# Operations
diff --git a/lib/core/mixin/action/push.rb b/lib/core/mixin/action/push.rb
index <HASH>..<HASH> 100644
--- a/lib/core/mixin/action/push.rb
+++ b/lib/core/mixin/action/push.rb
@@ -22,6 +22,12 @@ module Push
register_str :remote, :edit, 'nucleon.core.mixin.action.push.options.remote'
register_str :revision, :master, 'nucleon.core.mixin.action.push.options.revision'
end
+
+ #---
+
+ def push_ignore
+ [ :push, :pull, :propogate_push, :remote, :revision ]
+ end
#-----------------------------------------------------------------------------
# Operations
|
Adding ignore methods to each of the action mixins.
|
coralnexus_nucleon
|
train
|
6f0e8fb3bbeb75a5f0f04d0d879332b2612f3570
|
diff --git a/src/missing-support.js b/src/missing-support.js
index <HASH>..<HASH> 100644
--- a/src/missing-support.js
+++ b/src/missing-support.js
@@ -5,22 +5,38 @@ let formatBrowserName = require('./util').formatBrowserName
let caniuse = require('caniuse-db/fulldata-json/data-1.0')
-function filterStats (browsers, stats) {
- return _.transform(stats, (resultStats, versionData, browser) => {
+function filterStats(browsers, stats) {
+ return _.reduce(stats, function (resultStats, versionData, browser) {
// filter only versions of selected browsers that don't support this
// feature (i.e., don't have 'y' in their stats)
- let versWithoutSupport = _.transform(versionData, (result, support, ver) => {
- let selected = browsers.test(browser, ver)
- if (selected && (!/(^|\s)y($|\s)/.test(support))) {
- result[selected[1]] = support
+ const feature = _.reduce(versionData, function (result, support, ver) {
+ const selected = browsers.test(browser, ver);
+ if (selected) {
+ if(!(/(^|\s)y($|\s)/.test(support))) {
+ const testprop = (/(^|\s)a($|\s)/.test(support) ? 'partial' : 'missing');
+ if(!result[testprop][browser]) {
+ result[testprop][browser] = {};
+ }
+ result[testprop][browser][selected[1]] = support;
+ }
}
- })
- // filter out browsers for which there are *no* (selected) versions lacking
- // support.
- if (_.keys(versWithoutSupport).length !== 0) {
- resultStats[browser] = versWithoutSupport
+ return result;
+ }, { missing: {}, partial: {}});
+
+ if (_.keys(feature.missing).length !== 0) {
+ resultStats.missing = feature.missing;
+ }
+ if (_.keys(feature.partial).length !== 0) {
+ resultStats.partial = feature.partial;
}
- })
+ return resultStats;
+ }, { missing: {}, partial: {}});
+}
+function lackingBrowsers(browserStats) {
+ return _.reduce(browserStats, function (res, versions, browser) {
+ res.push(formatBrowserName(browser, _.keys(versions)))
+ return res;
+ }, []).join(", ");
}
/**
@@ -31,14 +47,18 @@ function filterStats (browsers, stats) {
* ```
* {
* 'feature-name': {
- * title: 'Title of feature',
- * missing: "IE (8), Chrome (31)",
- * partial: "IE (7), Firefox (29)",
+ * title: 'Title of feature'
+ * missing: "IE (8), Chrome (31)"
* missingData: {
* // map of browser -> version -> (lack of)support code
* ie: { '8': 'n' },
* chrome: { '31': 'n' }
- * },
+ * }
+ * partialData: {
+ * // map of browser -> version -> (partial)support code
+ * ie: { '7': 'a' },
+ * ff: { '29': 'a #1' }
+ * }
* caniuseData: {
* // caniuse-db json data for this feature
* }
@@ -49,43 +69,39 @@ function filterStats (browsers, stats) {
*
* `feature-name` is a caniuse-db slug.
*/
-function missing (browserRequest) {
- let browsers = new BrowserSelection(browserRequest)
-
- let result = {}
-
- Object.keys(features).forEach((feature) => {
- let featureData = caniuse.data[feature]
- let missingData = filterStats(browsers, featureData.stats)
+function missing(browserRequest) {
+ const browsers = new BrowserSelection(browserRequest);
+ let result = {};
- // browsers missing support for this feature
- let lackOfSupport = _.reduce(missingData, function (res, versions, browser) {
- const support = versions[_.keys(versions)[0]];
- const browserName = formatBrowserName(browser, _.keys(versions));
- const partial = /(^|\s)a($|\s)/.test(support);
- res[partial ? 'partial' : 'missing'].push(browserName);
- return res;
- }, {missing: [], partial: []});
+ Object.keys(features).forEach(function (feature) {
+ const featureData = caniuse.data[feature];
+ const lackData = filterStats(browsers, featureData.stats);
+ const missingData = lackData.missing;
+ const partialData = lackData.partial;
+ // browsers with missing or partial support for this feature
+ const missing = lackingBrowsers(missingData);
+ const partial = lackingBrowsers(partialData);
- if (lackOfSupport.missing.length > 0 || lackOfSupport.partial.length > 0) {
+ if (missing.length > 0 || partial.length > 0) {
result[feature] = {
title: featureData.title,
- missingData: missingData,
caniuseData: featureData
};
- if (lackOfSupport.missing.length > 0) {
- result[feature].missing = lackOfSupport.missing.join(", ");
+ if (missing.length > 0) {
+ result[feature].missingData = missingData;
+ result[feature].missing = missing;
}
- if (lackOfSupport.partial.length > 0) {
- result[feature].partial = lackOfSupport.partial.join(", ");
+ if (partial.length > 0) {
+ result[feature].partialData = partialData;
+ result[feature].partial = partial;
}
}
- })
+ });
return {
browsers: browsers.list(),
features: result
- }
+ };
}
module.exports = missing
\ No newline at end of file
|
now also included partial data, not only their names
|
anandthakker_doiuse
|
train
|
0548f392a9867683c4cff8dc98d4e75e5f9874b5
|
diff --git a/lib/azure-assets.js b/lib/azure-assets.js
index <HASH>..<HASH> 100644
--- a/lib/azure-assets.js
+++ b/lib/azure-assets.js
@@ -103,7 +103,11 @@ module.exports = Adapter.extend({
contentType: mime.lookup(resolvedFile)
}
- blobService.createBlockBlobFromLocalFile(AZURE_CONTAINER_NAME, targetFile, resolvedFile, function(error, result, response){
+ if(this.config.assets.gzip !== false) {
+ options["contentEncoding"] = "gzip";
+ }
+
+ blobService.createBlockBlobFromLocalFile(AZURE_CONTAINER_NAME, targetFile, resolvedFile, options, function(error, result, response){
if(!error){
// file uploaded
} else {
|
Serve gzipped content from Azure if requested
|
duizendnegen_ember-cli-deploy-azure
|
train
|
21d0e595fe16a1d77bec43d555efde0405b52801
|
diff --git a/kernel/src/main/java/com/qspin/qtaste/config/StaticConfiguration.java b/kernel/src/main/java/com/qspin/qtaste/config/StaticConfiguration.java
index <HASH>..<HASH> 100644
--- a/kernel/src/main/java/com/qspin/qtaste/config/StaticConfiguration.java
+++ b/kernel/src/main/java/com/qspin/qtaste/config/StaticConfiguration.java
@@ -37,7 +37,7 @@ public class StaticConfiguration {
public static final String JYTHON_LIB = JYTHON_HOME + "/Lib";
public static final String TEST_SCRIPT_FILENAME = "TestScript.py";
public static final String TEST_DATA_FILENAME = "TestData.csv";
- public static final String TEST_REQUIREMENTS_FILENAME = "TestRequirements.xml";
+ public static final String TEST_REQUIREMENTS_FILENAME = "Req.xml";
public static final String TEST_SCRIPT_DOC_TOOLS_DIR = QTASTE_ROOT + "/tools/TestScriptDoc";
public static final String TEST_SCRIPT_DOC_XML_FILENAME = "TestScript-doc.xml";
public static final String TEST_SCRIPT_DOC_HTML_FILENAME = "TestScript-doc.html";
diff --git a/kernel/src/main/java/com/qspin/qtaste/ui/xmleditor/TestRequirementEditor.java b/kernel/src/main/java/com/qspin/qtaste/ui/xmleditor/TestRequirementEditor.java
index <HASH>..<HASH> 100644
--- a/kernel/src/main/java/com/qspin/qtaste/ui/xmleditor/TestRequirementEditor.java
+++ b/kernel/src/main/java/com/qspin/qtaste/ui/xmleditor/TestRequirementEditor.java
@@ -190,7 +190,8 @@ public class TestRequirementEditor extends JPanel {
String outputFile = path + File.separator
+ StaticConfiguration.TEST_REQUIREMENTS_FILENAME;
output = new BufferedWriter(new FileWriter(new File(outputFile)));
-
+ output.write("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>");
+ output.newLine();
output.write("<" + XMLFile.ROOT_ELEMENT + ">");
for (TestRequirement req : m_TestRequirementModel.getRequirements()) {
output.newLine();
|
[KERNEL] Change requirement xml file name.
|
qspin_qtaste
|
train
|
14c94a3b2cfdaf7a4b94ee9e460abc430ec761ee
|
diff --git a/cfgrib/dataset.py b/cfgrib/dataset.py
index <HASH>..<HASH> 100644
--- a/cfgrib/dataset.py
+++ b/cfgrib/dataset.py
@@ -257,14 +257,7 @@ def build_geography_coordinates(index, encode_geography, log=LOG):
return geo_dims, geo_shape, geo_coord_vars
-def build_data_var_components(
- index,
- encode_parameter=False, encode_time=False, encode_geography=False, encode_vertical=False,
- log=LOG,
-):
- data_var_attrs_keys = DATA_ATTRIBUTES_KEYS[:]
- data_var_attrs_keys.extend(GRID_TYPE_MAP.get(index.getone('gridType'), []))
- data_var_attrs = enforce_unique_attributes(index, data_var_attrs_keys)
+def do_encode_first(data_var_attrs, coords_map, encode_parameter, encode_time, encode_vertical):
if encode_parameter:
if 'GRIB_cfName' in data_var_attrs:
data_var_attrs['standard_name'] = data_var_attrs['GRIB_cfName']
@@ -272,8 +265,6 @@ def build_data_var_components(
data_var_attrs['long_name'] = data_var_attrs['GRIB_name']
if 'GRIB_units' in data_var_attrs:
data_var_attrs['units'] = data_var_attrs['GRIB_units']
-
- coords_map = HEADER_COORDINATES_MAP[:]
if encode_time:
coords_map.extend(REF_TIME_COORDINATE_MAP)
else:
@@ -282,6 +273,20 @@ def build_data_var_components(
coords_map.extend(PLEV_COORDINATE_MAP)
else:
coords_map.extend(VERTICAL_COORDINATE_MAP)
+
+
+def build_data_var_components(
+ index,
+ encode_parameter=False, encode_time=False, encode_geography=False, encode_vertical=False,
+ log=LOG,
+):
+ data_var_attrs_keys = DATA_ATTRIBUTES_KEYS[:]
+ data_var_attrs_keys.extend(GRID_TYPE_MAP.get(index.getone('gridType'), []))
+ data_var_attrs = enforce_unique_attributes(index, data_var_attrs_keys)
+ coords_map = HEADER_COORDINATES_MAP[:]
+
+ do_encode_first(data_var_attrs, coords_map, encode_parameter, encode_time, encode_vertical)
+
coord_vars = collections.OrderedDict()
for coord_key, increasing in coords_map:
values = sorted(index[coord_key], reverse=not increasing)
|
Move some encode complexity to own function.
|
ecmwf_cfgrib
|
train
|
d657a8c2878ba6335c5e8bce306f2f7b9fccadf8
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
# lettuce
-> Version 0.1.24 - barium
+> Version 0.1.25 - barium
## On release names
diff --git a/lettuce/__init__.py b/lettuce/__init__.py
index <HASH>..<HASH> 100644
--- a/lettuce/__init__.py
+++ b/lettuce/__init__.py
@@ -15,7 +15,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-version = '0.1.24'
+version = '0.1.25'
release = 'barium'
import os
diff --git a/tests/unit/test_main.py b/tests/unit/test_main.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_main.py
+++ b/tests/unit/test_main.py
@@ -21,7 +21,7 @@ from mox import Mox
def test_has_version():
"A nice python module is supposed to have a version"
- assert_equals(lettuce.version, '0.1.24')
+ assert_equals(lettuce.version, '0.1.25')
def test_import():
"lettuce importer does import"
|
bump to <I> applying pull requests from @garron and @jlsnpi. Thank you guys
|
aloetesting_aloe_django
|
train
|
8a58b2e27506f3344b375d5237f627c8129b96ad
|
diff --git a/app/controllers/neighborly/balanced/bankaccount/routing_numbers_controller.rb b/app/controllers/neighborly/balanced/bankaccount/routing_numbers_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/neighborly/balanced/bankaccount/routing_numbers_controller.rb
+++ b/app/controllers/neighborly/balanced/bankaccount/routing_numbers_controller.rb
@@ -1,6 +1,5 @@
module Neighborly::Balanced::Bankaccount
class RoutingNumbersController < ApplicationController
- skip_before_filter :force_http
def show
routing_number = RoutingNumber.where(number: params[:id]).first
|
Remove the skip filter for force_http
Now the hole apllication is on ssl!
|
FromUte_dune-balanced-bankaccount
|
train
|
9bfc5d9ef1c8acf133aafce723e242dc91ccf41f
|
diff --git a/ChangeLog b/ChangeLog
index <HASH>..<HASH> 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -43,6 +43,9 @@ ChangeLog for PyLint
* #112698: fixed crashes related to non-inferable __all__ attributes and
invalid __all__ contents (patch by Torsten Marek)
+ * Include full warning id for I0020 and I0021 and make sure to flush
+ warnings after each module, not at the end of the pylint run.
+ (patch by Torsten Marek)
2012-10-05 -- 0.26.0
* #106534: add --ignore-imports option to code similarity checking
diff --git a/lint.py b/lint.py
index <HASH>..<HASH> 100644
--- a/lint.py
+++ b/lint.py
@@ -585,6 +585,7 @@ This is used by the global evaluation report (RP0004).'}),
# if it's actually a c extension)
self.current_file = astng.file
self.check_astng_module(astng, walker, rawcheckers)
+ self._add_suppression_messages()
# notify global end
self.set_current_module('')
self.stats['statement'] = walker.nbstatements
@@ -680,7 +681,6 @@ This is used by the global evaluation report (RP0004).'}),
if persistent run, pickle results for later comparison
"""
- self._add_suppression_messages()
if self.base_name is not None:
# load previous results if any
previous_stats = config.load_results(self.base_name)
@@ -705,11 +705,13 @@ This is used by the global evaluation report (RP0004).'}),
for warning, lines in self._raw_module_msgs_state.iteritems():
for line, enable in lines.iteritems():
if not enable and (warning, line) not in self._ignored_msgs:
- self.add_message('I0021', line, None, (warning,))
+ self.add_message('I0021', line, None,
+ (self.get_msg_display_string(warning),))
for (warning, from_), lines in self._ignored_msgs.iteritems():
for line in lines:
- self.add_message('I0020', line, None, (warning, from_))
+ self.add_message('I0020', line, None,
+ (self.get_msg_display_string(warning), from_))
def report_evaluation(self, sect, stats, previous_stats):
"""make the global evaluation report"""
diff --git a/utils.py b/utils.py
index <HASH>..<HASH> 100644
--- a/utils.py
+++ b/utils.py
@@ -264,6 +264,17 @@ class MessagesHandlerMixIn:
except KeyError:
raise UnknownMessage('No such message id %s' % msgid)
+ def get_msg_display_string(self, msgid):
+ """Generates a user-consumable representation of a message.
+
+ Can be just the message ID or the ID and the symbol.
+ """
+ if self.config.symbols:
+ symbol = self.check_message_id(msg_id).symbol
+ if symbol:
+ msgid += '(%s)' % symbol
+ return msgid
+
def get_message_state_scope(self, msgid, line=None):
"""Returns the scope at which a message was enabled/disabled."""
try:
|
Two small fixes for suppression warnings I<I>{2,1}:
- make sure to include the full message name (with symbol, if requested) in the output
- flush I<I>{0,1} warnings after each module, not after complete run
|
PyCQA_pylint
|
train
|
dfd810a9a1021bd2ed0936b8393f2b457d7f778b
|
diff --git a/av/container.pyx b/av/container.pyx
index <HASH>..<HASH> 100644
--- a/av/container.pyx
+++ b/av/container.pyx
@@ -42,9 +42,6 @@ cdef class Container(object):
self.name = name
self.proxy = ContainerProxy()
- if format_name is not None:
- self.proxy.ptr.iformat = self.format.in_
- self.proxy.ptr.oformat = self.format.out
def __repr__(self):
return '<av.%s %r>' % (self.__class__.__name__, self.name)
@@ -53,8 +50,14 @@ cdef class Container(object):
cdef class InputContainer(Container):
def __cinit__(self, *args, **kwargs):
+
err_check(
- lib.avformat_open_input(&self.proxy.ptr, self.name, NULL, NULL),
+ lib.avformat_open_input(
+ &self.proxy.ptr,
+ self.name,
+ self.format.in_ if self.format else NULL,
+ NULL
+ ),
self.name,
)
err_check(lib.avformat_find_stream_info(self.proxy.ptr, NULL))
@@ -172,13 +175,13 @@ cdef class OutputContainer(Container):
def __cinit__(self, *args, **kwargs):
- cdef lib.AVOutputFormat* container_format = lib.av_guess_format(NULL, self.name, NULL)
- if not container_format:
+ cdef lib.AVOutputFormat* format = self.format.out if self.format else lib.av_guess_format(NULL, self.name, NULL)
+ if not format:
raise ValueError("Could not deduce output format")
err_check(lib.avformat_alloc_output_context2(
&self.proxy.ptr,
- container_format,
+ format,
NULL,
self.name,
))
diff --git a/examples/decode.py b/examples/decode.py
index <HASH>..<HASH> 100644
--- a/examples/decode.py
+++ b/examples/decode.py
@@ -17,6 +17,7 @@ def format_time(time, time_base):
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('path')
+arg_parser.add_argument('-f', '--format')
arg_parser.add_argument('-a', '--audio', action='store_true')
arg_parser.add_argument('-v', '--video', action='store_true')
arg_parser.add_argument('-s', '--subs', action='store_true')
@@ -28,7 +29,7 @@ args = arg_parser.parse_args()
proc = None
-video = open(args.path)
+video = open(args.path, format=args.format)
print 'container:', video
print '\tformat:', video.format
diff --git a/examples/filmstrip.py b/examples/filmstrip.py
index <HASH>..<HASH> 100644
--- a/examples/filmstrip.py
+++ b/examples/filmstrip.py
@@ -1,3 +1,4 @@
+import argparse
import os
import sys
import pprint
@@ -8,18 +9,28 @@ from PIL import Image
from av import open
+parser = argparse.ArgumentParser()
+parser.add_argument('-f', '--format')
+parser.add_argument('-n', '--frames', type=int, default=0)
+parser.add_argument('path', nargs='+')
+args = parser.parse_args()
+
max_size = 24 * 60 # One minute's worth.
def frame_iter(video):
+ count = 0
streams = [s for s in video.streams if s.type == b'video']
streams = [streams[0]]
for packet in video.demux(streams):
for frame in packet.decode():
yield frame
+ count += 1
+ if args.frames and count > args.frames:
+ return
-for src_path in sys.argv[1:]:
+for src_path in args.path:
print src_path
@@ -28,7 +39,7 @@ for src_path in sys.argv[1:]:
if not os.path.exists(dir_name):
os.makedirs(dir_name)
- video = open(src_path)
+ video = open(src_path, format=args.format)
frames = frame_iter(video)
for chunk_i in itertools.count(1):
@@ -40,7 +51,7 @@ for src_path in sys.argv[1:]:
if chunk is None:
chunk = Image.new("RGB", (max_size, frame.height))
- img = Image.frombuffer("RGB", (frame.width, frame.height), frame.to_rgb(), "raw", "RGB", 0, 1)
+ img = frame.to_image()
img = img.resize((1, frame.height), Image.ANTIALIAS)
chunk.paste(img, (frame_i, 0))
|
Properly hook requested formats into InputContainer (OutputContainer untested)
Finally some progress on #<I> and #<I>.
|
mikeboers_PyAV
|
train
|
ba9be92632f959a38abe25296cfab425826d8851
|
diff --git a/src/Resolver.php b/src/Resolver.php
index <HASH>..<HASH> 100644
--- a/src/Resolver.php
+++ b/src/Resolver.php
@@ -144,7 +144,9 @@ class Resolver{
// $resolvedRoute->setOrigin( ... ); // set previous route
return $resolvedRoute; // return augmented route object clone
}
- throw new ResolverException( 'Route is not resolvable' );
+ if( $strict )
+ throw new ResolverException( 'Route is not resolvable' );
+ return FALSE;
}
}
?>
|
Support not strict mode in resolver.
|
CeusMedia_Router
|
train
|
21e662c77468d1e3fc4efaa049021e2ad0e49b4d
|
diff --git a/integration-cli/docker_cli_search_test.go b/integration-cli/docker_cli_search_test.go
index <HASH>..<HASH> 100644
--- a/integration-cli/docker_cli_search_test.go
+++ b/integration-cli/docker_cli_search_test.go
@@ -44,54 +44,6 @@ func (s *DockerSuite) TestSearchStarsOptionWithWrongParameter(c *check.C) {
assert.Assert(c, strings.Contains(out, "invalid syntax"), "couldn't find the invalid value warning")
}
-func (s *DockerSuite) TestSearchCmdOptions(c *check.C) {
- testRequires(c, Network, DaemonIsLinux)
-
- out, _ := dockerCmd(c, "search", "--help")
- assert.Assert(c, strings.Contains(out, "Usage:\tdocker search [OPTIONS] TERM"))
-
- outSearchCmd, _ := dockerCmd(c, "search", "busybox")
- assert.Assert(c, strings.Count(outSearchCmd, "\n") > 3, outSearchCmd)
- outSearchCmdNotrunc, _ := dockerCmd(c, "search", "--no-trunc=true", "busybox")
- assert.Assert(c, len(outSearchCmd) <= len(outSearchCmdNotrunc), "The no-trunc option can't take effect.")
-
- outSearchCmdautomated, _ := dockerCmd(c, "search", "--filter", "is-automated=true", "busybox") //The busybox is a busybox base image, not an AUTOMATED image.
- outSearchCmdautomatedSlice := strings.Split(outSearchCmdautomated, "\n")
- for i := range outSearchCmdautomatedSlice {
- assert.Assert(c, !strings.HasPrefix(outSearchCmdautomatedSlice[i], "busybox "), "The busybox is not an AUTOMATED image: %s", outSearchCmdautomated)
- }
-
- outSearchCmdNotOfficial, _ := dockerCmd(c, "search", "--filter", "is-official=false", "busybox") //The busybox is a busybox base image, official image.
- outSearchCmdNotOfficialSlice := strings.Split(outSearchCmdNotOfficial, "\n")
- for i := range outSearchCmdNotOfficialSlice {
- assert.Assert(c, !strings.HasPrefix(outSearchCmdNotOfficialSlice[i], "busybox "), "The busybox is not an OFFICIAL image: %s", outSearchCmdNotOfficial)
- }
-
- outSearchCmdOfficial, _ := dockerCmd(c, "search", "--filter", "is-official=true", "busybox") //The busybox is a busybox base image, official image.
- outSearchCmdOfficialSlice := strings.Split(outSearchCmdOfficial, "\n")
- assert.Equal(c, len(outSearchCmdOfficialSlice), 3) // 1 header, 1 line, 1 carriage return
- assert.Assert(c, strings.HasPrefix(outSearchCmdOfficialSlice[1], "busybox "), "The busybox is an OFFICIAL image: %s", outSearchCmdOfficial)
-
- outSearchCmdStars, _ := dockerCmd(c, "search", "--filter", "stars=10", "busybox")
- assert.Assert(c, strings.Count(outSearchCmdStars, "\n") <= strings.Count(outSearchCmd, "\n"), "Number of images with 10+ stars should be less than that of all images:\noutSearchCmdStars: %s\noutSearch: %s\n", outSearchCmdStars, outSearchCmd)
-
- dockerCmd(c, "search", "--filter", "is-automated=true", "--filter", "stars=2", "--no-trunc=true", "busybox")
-
- // --automated deprecated since Docker 1.13
- outSearchCmdautomated1, _ := dockerCmd(c, "search", "--automated=true", "busybox") //The busybox is a busybox base image, not an AUTOMATED image.
- outSearchCmdautomatedSlice1 := strings.Split(outSearchCmdautomated1, "\n")
- for i := range outSearchCmdautomatedSlice1 {
- assert.Assert(c, !strings.HasPrefix(outSearchCmdautomatedSlice1[i], "busybox "), "The busybox is not an AUTOMATED image: %s", outSearchCmdautomated)
- }
-
- // -s --stars deprecated since Docker 1.13
- outSearchCmdStars1, _ := dockerCmd(c, "search", "--stars=2", "busybox")
- assert.Assert(c, strings.Count(outSearchCmdStars1, "[OK]") <= strings.Count(outSearchCmd, "[OK]"), "The quantity of images with stars should be less than that of all images: %s", outSearchCmdStars1)
-
- // -s --stars deprecated since Docker 1.13
- dockerCmd(c, "search", "--stars=2", "--automated=true", "--no-trunc=true", "busybox")
-}
-
// search for repos which start with "ubuntu-" on the central registry
func (s *DockerSuite) TestSearchOnCentralRegistryWithDash(c *check.C) {
testRequires(c, Network, DaemonIsLinux)
|
Remove TestSearchCmdOptions test
This test is dependent on the search results returned by Docker Hub, which
can change at any moment, and causes this test to be unpredictable.
Removing this test instead of trying to catch up with Docker Hub any time
the results change, because it's effectively testing Docker Hub, and not
the daemon.
Unit tests are already in place to test the core functionality of the daemon,
so it should be safe to remove this test.
|
moby_moby
|
train
|
20e15fde559065bccae45fca03a8c8e6ae44a468
|
diff --git a/lib/Doctrine/Common/ClassLoader.php b/lib/Doctrine/Common/ClassLoader.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/Common/ClassLoader.php
+++ b/lib/Doctrine/Common/ClassLoader.php
@@ -234,9 +234,13 @@ class ClassLoader
} else if (is_string($loader) && $loader($className)) { // "MyClass::loadClass"
return true;
}
+
+ if (class_exists($className, false) || interface_exists($className, false)) {
+ return true;
+ }
}
- return class_exists($className, false) || interface_exists($className, false);
+ return false;
}
/**
|
Providing fix for silent loaders support
|
doctrine_common
|
train
|
2db63b2d793706fc84e7d11ab8bc84ca6a9e97a7
|
diff --git a/enterprise/src/test/java/org/graylog/plugins/enterprise/search/elasticsearch/ElasticsearchBackendTest.java b/enterprise/src/test/java/org/graylog/plugins/enterprise/search/elasticsearch/ElasticsearchBackendTest.java
index <HASH>..<HASH> 100644
--- a/enterprise/src/test/java/org/graylog/plugins/enterprise/search/elasticsearch/ElasticsearchBackendTest.java
+++ b/enterprise/src/test/java/org/graylog/plugins/enterprise/search/elasticsearch/ElasticsearchBackendTest.java
@@ -1,11 +1,14 @@
package org.graylog.plugins.enterprise.search.elasticsearch;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import org.graylog.plugins.enterprise.search.Parameter;
import org.graylog.plugins.enterprise.search.Query;
import org.graylog.plugins.enterprise.search.QueryInfo;
import org.graylog.plugins.enterprise.search.QueryParameter;
+import org.graylog.plugins.enterprise.search.Search;
+import org.graylog.plugins.enterprise.search.SearchJob;
import org.graylog.plugins.enterprise.search.SearchType;
import org.graylog.plugins.enterprise.search.elasticsearch.searchtypes.ESDateHistogram;
import org.graylog.plugins.enterprise.search.elasticsearch.searchtypes.ESMessageList;
@@ -19,9 +22,11 @@ import org.junit.BeforeClass;
import org.junit.Test;
import javax.inject.Provider;
+import java.util.Collections;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
public class ElasticsearchBackendTest {
@@ -54,4 +59,25 @@ public class ElasticsearchBackendTest {
Maps.immutableEntry("foo", QueryParameter.any("foo")));
}
+ @Test
+ public void unboundParameter() throws Exception {
+ try {
+ final Query query = Query.builder()
+ .id("query1")
+ .timerange(RelativeRange.create(600))
+ .query(ElasticsearchQueryString.builder().queryString("_exists_:$TESTPARAM$").build())
+ .searchTypes(ImmutableSet.of(MessageList.builder().id("1").build()))
+ .build();
+ final Search search = Search.builder()
+ .id("search1")
+ .queries(ImmutableSet.of(query))
+ .build();
+ final SearchJob job = new SearchJob("job1", search);
+
+ backend.generate(job, query, Collections.emptySet());
+ fail("Must throw exception");
+ } catch (IllegalStateException e) {
+ assertThat(e).hasMessageContaining("TESTPARAM");
+ }
+ }
}
\ No newline at end of file
|
using an unbound, implicitly required, parameter should throw a proper exception (Graylog2/graylog-plugin-enterprise#<I>)
this will eventually be included in a proper error response, but for now we simply
throw something more specific than the NPE before
fixes Graylog2/graylog-plugin-enterprise#<I>
|
Graylog2_graylog2-server
|
train
|
1463ee306893d41c0e83636d398f06506a7f4b8c
|
diff --git a/xmantissa/webapp.py b/xmantissa/webapp.py
index <HASH>..<HASH> 100644
--- a/xmantissa/webapp.py
+++ b/xmantissa/webapp.py
@@ -401,7 +401,7 @@ def upgradePrivateApplication1To2(oldApp):
privateKey=oldApp.privateKey,
privateIndexPage=oldApp.privateIndexPage)
newApp.installedOn.findOrCreate(
- CustomizedPublicPage,
- prefixURL=u'').installOn(newApp.installedOn)
+ CustomizedPublicPage).installOn(newApp.installedOn)
return newApp
+
upgrade.registerUpgrader(upgradePrivateApplication1To2, 'private_web_application', 1, 2)
|
remove prefixURL assignment in CustomizedPublicPage's installation
|
twisted_mantissa
|
train
|
bfa89682e782d9d7d62c168869739792147230b3
|
diff --git a/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js b/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js
index <HASH>..<HASH> 100644
--- a/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js
+++ b/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js
@@ -25,8 +25,8 @@ export default class Node extends PureComponent {
currentDocumentNodeContextPath: PropTypes.string,
focusedNodeContextPath: PropTypes.string,
toggledNodeContextPaths: PropTypes.object,
- hiddenContextPaths: PropTypes.arrayOf(PropTypes.string),
- intermediateContextPaths: PropTypes.arrayOf(PropTypes.string),
+ hiddenContextPaths: PropTypes.object,
+ intermediateContextPaths: PropTypes.object,
loadingNodeContextPaths: PropTypes.object,
errorNodeContextPaths: PropTypes.object,
canBeInserted: PropTypes.bool,
|
BUGFIX: hiddenContextPaths and intermediateContextPaths are objects not arrays
|
neos_neos-ui
|
train
|
ba2eacc10431951d84832c442f945290292f64e3
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -24,7 +24,7 @@ Copyright:
from setuptools import setup
setup(name='bio_utils',
- version='0.7.19a12',
+ version='0.7.19a13',
description='library of common bioinformatic tasks',
classifiers=[
'Development Status :: 3 - Alpha',
@@ -40,7 +40,7 @@ setup(name='bio_utils',
keywords='bioinformatics iterators verifiers verify iterate utilities',
url='https://github.com/Brazelton-Lab/bio_utils/',
download_url='https://github.com/Brazelton-Lab/metameta/tarball/'
- '0.7.19a12',
+ '0.7.19a13',
author='Alex Hyer',
author_email='theonehyer@gmail.com',
license='GPLv3',
diff --git a/source/conf.py b/source/conf.py
index <HASH>..<HASH> 100644
--- a/source/conf.py
+++ b/source/conf.py
@@ -34,7 +34,7 @@ extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
- 'sphinx.ext.githubpages',
+ #'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
@@ -63,7 +63,7 @@ author = 'William Brazelton, Alex Hyer, Christopher Thornton'
# The short X.Y version.
version = '0.7'
# The full version, including alpha/beta/rc tags.
-release = '0.7.19a12'
+release = '0.7.19a13'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
|
Removed line from conf.py
Removed line to attempt to use RTD
|
Brazelton-Lab_bio_utils
|
train
|
a73b86fb8579b8d5ec9d06c0a37b8f67e967251e
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -36,9 +36,6 @@ module.exports = {
get DSAPI() {
return require('./dsapi');
},
- get UsageAPI() {
- return require('./usageapi');
- },
get SAPI() {
return require('./sapi');
},
|
RELENG-<I>: retire usageapi builds/refs
|
joyent_node-sdc-clients
|
train
|
7c671269b64ccb60234b0be4777d418f27e9c501
|
diff --git a/tests/create_test.py b/tests/create_test.py
index <HASH>..<HASH> 100644
--- a/tests/create_test.py
+++ b/tests/create_test.py
@@ -235,7 +235,7 @@ class CreateTest(unittest.TestCase):
self.assertIn('multipart/form-data', req.headers['Content-Type'])
bdy = req._request.body
- messages = req.text.split('--'+bdy)
+ messages = req.text.split('--'+str(bdy))
for msg in messages:
msg = msg.strip()
|
can py3 just stop crying?
|
Alveo_pyalveo
|
train
|
4c0a267e17dba6e205079c397a05026c887b6352
|
diff --git a/tests/MaxMind/Db/Test/Reader/MetadataTest.php b/tests/MaxMind/Db/Test/Reader/MetadataTest.php
index <HASH>..<HASH> 100644
--- a/tests/MaxMind/Db/Test/Reader/MetadataTest.php
+++ b/tests/MaxMind/Db/Test/Reader/MetadataTest.php
@@ -44,7 +44,7 @@ class MetadataTest extends TestCase
public function testTooManyConstructorArgs(): void
{
$this->expectException(ArgumentCountError::class);
- $this->expectExceptionMessage('MaxMind\Db\Reader\Metadata::__construct() expects exactly 1 parameter, 2 given');
+ $this->expectExceptionMessage('MaxMind\Db\Reader\Metadata::__construct() expects exactly 1');
new Metadata([], 1);
}
diff --git a/tests/MaxMind/Db/Test/ReaderTest.php b/tests/MaxMind/Db/Test/ReaderTest.php
index <HASH>..<HASH> 100644
--- a/tests/MaxMind/Db/Test/ReaderTest.php
+++ b/tests/MaxMind/Db/Test/ReaderTest.php
@@ -302,7 +302,7 @@ class ReaderTest extends TestCase
public function testTooManyConstructorArgs(): void
{
$this->expectException(ArgumentCountError::class);
- $this->expectExceptionMessage('MaxMind\Db\Reader::__construct() expects exactly 1 parameter, 2 given');
+ $this->expectExceptionMessage('MaxMind\Db\Reader::__construct() expects exactly 1');
new Reader('README.md', 1);
}
@@ -318,7 +318,7 @@ class ReaderTest extends TestCase
public function testTooManyGetArgs(): void
{
$this->expectException(ArgumentCountError::class);
- $this->expectExceptionMessage('MaxMind\Db\Reader::get() expects exactly 1 parameter, 2 given');
+ $this->expectExceptionMessage('MaxMind\Db\Reader::get() expects exactly 1');
$reader = new Reader(
'tests/data/test-data/MaxMind-DB-test-decoder.mmdb'
);
@@ -340,7 +340,7 @@ class ReaderTest extends TestCase
public function testMetadataArgs(): void
{
$this->expectException(ArgumentCountError::class);
- $this->expectExceptionMessage('MaxMind\Db\Reader::metadata() expects exactly 0 parameters, 1 given');
+ $this->expectExceptionMessage('MaxMind\Db\Reader::metadata() expects exactly 0');
$reader = new Reader(
'tests/data/test-data/MaxMind-DB-test-decoder.mmdb'
);
@@ -360,7 +360,7 @@ class ReaderTest extends TestCase
public function testCloseArgs(): void
{
$this->expectException(ArgumentCountError::class);
- $this->expectExceptionMessage('MaxMind\Db\Reader::close() expects exactly 0 parameters, 1 given');
+ $this->expectExceptionMessage('MaxMind\Db\Reader::close() expects exactly 0');
$reader = new Reader(
'tests/data/test-data/MaxMind-DB-test-decoder.mmdb'
);
|
relax test for <I>RC1
|
maxmind_MaxMind-DB-Reader-php
|
train
|
e0e903291a305043516fa7493c6e76a70bc7ce7a
|
diff --git a/mod/chat/lang/en/chat.php b/mod/chat/lang/en/chat.php
index <HASH>..<HASH> 100644
--- a/mod/chat/lang/en/chat.php
+++ b/mod/chat/lang/en/chat.php
@@ -98,7 +98,7 @@ $string['nomessages'] = 'No messages yet';
$string['normalkeepalive'] = 'KeepAlive';
$string['normalstream'] = 'Stream';
$string['noscheduledsession'] = 'No scheduled session';
-$string['notallowenter'] = 'You are not allow to enter the chat room.';
+$string['notallowenter'] = 'You are not allowed to enter the chat room.';
$string['notlogged'] = 'You are not logged in!';
$string['nopermissiontoseethechatlog'] = 'You don\'t have permission to see the chat logs.';
$string['oldping'] = 'Disconnect timeout';
|
MDL-<I>: Corrected language string when not allowed to enter the chat room.
|
moodle_moodle
|
train
|
2ead1fdb540d4fc6023bfa7294869ea7a18bcbc9
|
diff --git a/tests/test.js b/tests/test.js
index <HASH>..<HASH> 100644
--- a/tests/test.js
+++ b/tests/test.js
@@ -315,7 +315,7 @@ describe('integration tests', function () {
});
});
- it('should throw if trying to order by OneToMany relation properties', function () {
+ it('should throw if trying to order by HasManyRelation relation properties', function () {
expect(function () {
objectionFind(Person).build({"orderBy": "movies.name"});
diff --git a/tests/utils.js b/tests/utils.js
index <HASH>..<HASH> 100644
--- a/tests/utils.js
+++ b/tests/utils.js
@@ -166,7 +166,7 @@ function createModels(knex) {
Person.relationMappings = {
parent: {
- relation: objection.OneToOneRelation,
+ relation: objection.BelongsToOneRelation,
modelClass: Person,
join: {
from: 'Person.pid',
@@ -175,7 +175,7 @@ function createModels(knex) {
},
pets: {
- relation: objection.OneToManyRelation,
+ relation: objection.HasManyRelation,
modelClass: Animal,
join: {
from: 'Person.id',
|
Update tests with refactored method names
|
Vincit_objection-find
|
train
|
f7950b480c0acf7fdc7be948a58dabbefc105855
|
diff --git a/actionview/test/template/output_safety_helper_test.rb b/actionview/test/template/output_safety_helper_test.rb
index <HASH>..<HASH> 100644
--- a/actionview/test/template/output_safety_helper_test.rb
+++ b/actionview/test/template/output_safety_helper_test.rb
@@ -94,12 +94,13 @@ class OutputSafetyHelperTest < ActionView::TestCase
end
test "to_sentence is not affected by $," do
+ separator_was = $,
$, = "|"
begin
assert_equal "one and two", to_sentence(["one", "two"])
assert_equal "one, two, and three", to_sentence(["one", "two", "three"])
ensure
- $, = nil
+ $, = separator_was
end
end
end
|
Keep the value of `$,` and restore it
As unit tests, we do not know the value of `$,` when this
test case started. It' better to keep the value when the
test case fnished.
|
rails_rails
|
train
|
3e97cb7a73c526b596417ce0d1e08331fba95f01
|
diff --git a/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/api/impl/ScalingAPIImplTest.java b/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/api/impl/ScalingAPIImplTest.java
index <HASH>..<HASH> 100644
--- a/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/api/impl/ScalingAPIImplTest.java
+++ b/shardingsphere-scaling/shardingsphere-scaling-core/src/test/java/org/apache/shardingsphere/scaling/core/api/impl/ScalingAPIImplTest.java
@@ -17,6 +17,7 @@
package org.apache.shardingsphere.scaling.core.api.impl;
+import lombok.SneakyThrows;
import org.apache.shardingsphere.governance.repository.api.config.GovernanceCenterConfiguration;
import org.apache.shardingsphere.governance.repository.api.config.GovernanceConfiguration;
import org.apache.shardingsphere.scaling.core.api.JobInfo;
@@ -28,6 +29,8 @@ import org.apache.shardingsphere.scaling.core.fixture.EmbedTestingServer;
import org.apache.shardingsphere.scaling.core.job.JobStatus;
import org.apache.shardingsphere.scaling.core.job.progress.JobProgress;
import org.apache.shardingsphere.scaling.core.util.JobConfigurationUtil;
+import org.apache.shardingsphere.scaling.core.util.ReflectionUtil;
+import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -42,18 +45,14 @@ import static org.junit.Assert.assertTrue;
public final class ScalingAPIImplTest {
- private final ScalingAPI scalingAPI = ScalingAPIFactory.getScalingAPI();
+ private static ScalingAPI scalingAPI;
@BeforeClass
+ @SneakyThrows(ReflectiveOperationException.class)
public static void init() {
EmbedTestingServer.start();
- ScalingContext.getInstance().init(mockServerConfig());
- }
-
- private static ServerConfiguration mockServerConfig() {
- ServerConfiguration result = new ServerConfiguration();
- result.setGovernanceConfig(new GovernanceConfiguration("test", new GovernanceCenterConfiguration("Zookeeper", EmbedTestingServer.getConnectionString(), new Properties()), true));
- return result;
+ ReflectionUtil.setFieldValue(ScalingContext.getInstance(), "serverConfig", mockServerConfig());
+ scalingAPI = ScalingAPIFactory.getScalingAPI();
}
@Test
@@ -106,4 +105,16 @@ public final class ScalingAPIImplTest {
Map<Integer, JobProgress> jobProgressMap = scalingAPI.getProgress(jobId.get());
assertThat(jobProgressMap.size(), is(2));
}
+
+ @AfterClass
+ @SneakyThrows(ReflectiveOperationException.class)
+ public static void afterClass() {
+ ReflectionUtil.setFieldValue(ScalingContext.getInstance(), "serverConfig", null);
+ }
+
+ private static ServerConfiguration mockServerConfig() {
+ ServerConfiguration result = new ServerConfiguration();
+ result.setGovernanceConfig(new GovernanceConfiguration("test", new GovernanceCenterConfiguration("Zookeeper", EmbedTestingServer.getConnectionString(), new Properties()), true));
+ return result;
+ }
}
|
Optimize ScalingAPIImplTest (#<I>)
* Optimize ScalingAPIImplTest
Co-authored-by: qiulu3 <Lucas<I>>
|
apache_incubator-shardingsphere
|
train
|
5aee90f31357ba4850063d8de292c7258fce6528
|
diff --git a/actors/v2actions/route.go b/actors/v2actions/route.go
index <HASH>..<HASH> 100644
--- a/actors/v2actions/route.go
+++ b/actors/v2actions/route.go
@@ -1,13 +1,15 @@
package v2actions
-import (
- "fmt"
-
- "code.cloudfoundry.org/cli/api/cloudcontroller/ccv2"
-)
+import "fmt"
// Route represents a CLI Route.
-type Route ccv2.Route
+type Route struct {
+ GUID string
+ Host string
+ Domain string
+ Path string
+ Port int
+}
// OrphanedRoutesNotFoundError is an error wrapper that represents the case
// when no orphaned routes are found.
@@ -40,7 +42,7 @@ func (actor Actor) GetOrphanedRoutesBySpace(spaceGUID string) ([]Route, Warnings
}
if len(apps) == 0 {
- domain, warnings, err := actor.GetDomain(route.DomainFields.GUID)
+ domain, warnings, err := actor.GetDomain(route.DomainGUID)
allWarnings = append(allWarnings, warnings...)
if err != nil {
return nil, allWarnings, err
diff --git a/actors/v2actions/route_test.go b/actors/v2actions/route_test.go
index <HASH>..<HASH> 100644
--- a/actors/v2actions/route_test.go
+++ b/actors/v2actions/route_test.go
@@ -44,12 +44,12 @@ var _ = Describe("Route Actions", func() {
BeforeEach(func() {
fakeCloudControllerClient.GetSpaceRoutesReturns([]ccv2.Route{
{
- GUID: "orphaned-route-guid-1",
- DomainFields: ccv2.Domain{GUID: "some-domain-guid", Name: " some-domain.com"},
+ GUID: "orphaned-route-guid-1",
+ DomainGUID: "some-domain-guid",
},
{
- GUID: "orphaned-route-guid-2",
- DomainFields: ccv2.Domain{GUID: "some-other-domain-guid", Name: "some-other-domain.com"},
+ GUID: "orphaned-route-guid-2",
+ DomainGUID: "some-other-domain-guid",
},
{
GUID: "not-orphaned-route-guid-3",
diff --git a/api/cloudcontroller/ccv2/route.go b/api/cloudcontroller/ccv2/route.go
index <HASH>..<HASH> 100644
--- a/api/cloudcontroller/ccv2/route.go
+++ b/api/cloudcontroller/ccv2/route.go
@@ -9,12 +9,11 @@ import (
// Route represents a Cloud Controller Route.
type Route struct {
- GUID string
- Host string
- Domain string
- Path string
- Port int
- DomainFields Domain
+ GUID string
+ Host string
+ Path string
+ Port int
+ DomainGUID string
}
// UnmarshalJSON helps unmarshal a Cloud Controller Route response.
@@ -36,7 +35,7 @@ func (route *Route) UnmarshalJSON(data []byte) error {
route.Host = ccRoute.Entity.Host
route.Path = ccRoute.Entity.Path
route.Port = ccRoute.Entity.Port
- route.DomainFields.GUID = ccRoute.Entity.DomainGUID
+ route.DomainGUID = ccRoute.Entity.DomainGUID
return nil
}
diff --git a/api/cloudcontroller/ccv2/route_test.go b/api/cloudcontroller/ccv2/route_test.go
index <HASH>..<HASH> 100644
--- a/api/cloudcontroller/ccv2/route_test.go
+++ b/api/cloudcontroller/ccv2/route_test.go
@@ -100,32 +100,32 @@ var _ = Describe("Route", func() {
Expect(err).NotTo(HaveOccurred())
Expect(routes).To(ConsistOf([]Route{
{
- GUID: "route-guid-1",
- Host: "host-1",
- Path: "path",
- Port: 0,
- DomainFields: Domain{GUID: "some-http-domain"},
+ GUID: "route-guid-1",
+ Host: "host-1",
+ Path: "path",
+ Port: 0,
+ DomainGUID: "some-http-domain",
},
{
- GUID: "route-guid-2",
- Host: "host-2",
- Path: "",
- Port: 3333,
- DomainFields: Domain{GUID: "some-tcp-domain"},
+ GUID: "route-guid-2",
+ Host: "host-2",
+ Path: "",
+ Port: 3333,
+ DomainGUID: "some-tcp-domain",
},
{
- GUID: "route-guid-3",
- Host: "host-3",
- Path: "path",
- Port: 0,
- DomainFields: Domain{GUID: "some-http-domain"},
+ GUID: "route-guid-3",
+ Host: "host-3",
+ Path: "path",
+ Port: 0,
+ DomainGUID: "some-http-domain",
},
{
- GUID: "route-guid-4",
- Host: "host-4",
- Path: "",
- Port: 333,
- DomainFields: Domain{GUID: "some-tcp-domain"},
+ GUID: "route-guid-4",
+ Host: "host-4",
+ Path: "",
+ Port: 333,
+ DomainGUID: "some-tcp-domain",
},
}))
Expect(warnings).To(ConsistOf(Warnings{"this is a warning", "this is another warning"}))
|
separated out the ccv2.Route struct from the v2actions.Route struct
- ccv2.Route had extra fields that were only used in the v2actions
package
[#<I>, #<I>]
|
cloudfoundry_cli
|
train
|
296b74e01a9409beb593a69ae885b30875031bb2
|
diff --git a/training/deepspeech_training/util/sample_collections.py b/training/deepspeech_training/util/sample_collections.py
index <HASH>..<HASH> 100644
--- a/training/deepspeech_training/util/sample_collections.py
+++ b/training/deepspeech_training/util/sample_collections.py
@@ -18,6 +18,7 @@ from .audio import (
get_audio_type_from_extension,
write_wav
)
+from .io import open_remote
BIG_ENDIAN = 'big'
INT_SIZE = 4
@@ -80,7 +81,7 @@ def load_sample(filename, label=None):
audio_type = get_audio_type_from_extension(ext)
if audio_type is None:
raise ValueError('Unknown audio type extension "{}"'.format(ext))
- with open(filename, 'rb') as audio_file:
+ with open_remote(filename, 'rb') as audio_file:
if label is None:
return Sample(audio_type, audio_file.read(), sample_id=filename)
return LabeledSample(audio_type, audio_file.read(), label, sample_id=filename)
@@ -119,7 +120,7 @@ class DirectSDBWriter:
raise ValueError('Audio type "{}" not supported'.format(audio_type))
self.audio_type = audio_type
self.bitrate = bitrate
- self.sdb_file = open(sdb_filename, 'wb', buffering=buffering)
+ self.sdb_file = open_remote(sdb_filename, 'wb', buffering=buffering)
self.offsets = []
self.num_samples = 0
@@ -215,7 +216,7 @@ class SDB: # pylint: disable=too-many-instance-attributes
"""
self.sdb_filename = sdb_filename
self.id_prefix = sdb_filename if id_prefix is None else id_prefix
- self.sdb_file = open(sdb_filename, 'rb', buffering=REVERSE_BUFFER_SIZE if reverse else buffering)
+ self.sdb_file = open_remote(sdb_filename, 'rb', buffering=REVERSE_BUFFER_SIZE if reverse else buffering)
self.offsets = []
if self.sdb_file.read(len(MAGIC)) != MAGIC:
raise RuntimeError('No Sample Database')
@@ -345,7 +346,7 @@ class CSVWriter: # pylint: disable=too-many-instance-attributes
self.labeled = labeled
if labeled:
fieldnames.append('transcript')
- self.csv_file = open(csv_filename, 'w', encoding='utf-8', newline='')
+ self.csv_file = open_remote(csv_filename, 'w', encoding='utf-8', newline='')
self.csv_writer = csv.DictWriter(self.csv_file, fieldnames=fieldnames)
self.csv_writer.writeheader()
self.counter = 0
@@ -399,7 +400,7 @@ class TarWriter: # pylint: disable=too-many-instance-attributes
include : str[]
List of files to include into tar root.
"""
- self.tar = tarfile.open(tar_filename, 'w:gz' if gz else 'w')
+ self.tar = tarfile.open_remote(tar_filename, 'w:gz' if gz else 'w')
samples_dir = tarfile.TarInfo('samples')
samples_dir.type = tarfile.DIRTYPE
self.tar.addfile(samples_dir)
@@ -499,7 +500,7 @@ class CSV(SampleList):
"""
rows = []
csv_dir = Path(csv_filename).parent
- with open(csv_filename, 'r', encoding='utf8') as csv_file:
+ with open_remote(csv_filename, 'r', encoding='utf8') as csv_file:
reader = csv.DictReader(csv_file)
if 'transcript' in reader.fieldnames:
if labeled is None:
|
Remote I/O for sample_collections
|
mozilla_DeepSpeech
|
train
|
68b12695c5660c5c2c92fb5f999e58f377b31aa6
|
diff --git a/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchFragment.java b/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchFragment.java
index <HASH>..<HASH> 100644
--- a/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchFragment.java
+++ b/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchFragment.java
@@ -144,6 +144,7 @@ public class SearchFragment extends BaseMailsFragment<SearchView, SearchPresente
}
@Override public void showSearchNotStartedYet() {
+ loadingView.setVisibility(View.GONE);
contentView.setVisibility(View.GONE);
errorView.setVisibility(View.GONE);
authView.setVisibility(View.GONE);
diff --git a/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchPresenter.java b/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchPresenter.java
index <HASH>..<HASH> 100644
--- a/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchPresenter.java
+++ b/sample-mail/src/main/java/com/hannesdorfmann/mosby/sample/mail/search/SearchPresenter.java
@@ -1,5 +1,6 @@
package com.hannesdorfmann.mosby.sample.mail.search;
+import android.text.TextUtils;
import com.hannesdorfmann.mosby.mvp.rx.lce.scheduler.AndroidSchedulerTransformer;
import com.hannesdorfmann.mosby.sample.mail.base.presenter.BaseRxMailPresenter;
import com.hannesdorfmann.mosby.sample.mail.model.mail.Mail;
@@ -58,6 +59,13 @@ public class SearchPresenter extends BaseRxMailPresenter<SearchView, List<Mail>>
public void searchFor(String query, boolean pullToRefresh) {
+ // If searching for empty string, then do nothing
+ if (isViewAttached() && TextUtils.isEmpty(query)) {
+ unsubscribe();
+ getView().showSearchNotStartedYet();
+ return;
+ }
+
// in case the previous action was load more we have to reset the view
if (isViewAttached()) {
getView().showLoadMore(false);
@@ -73,15 +81,4 @@ public class SearchPresenter extends BaseRxMailPresenter<SearchView, List<Mail>>
}
}
- @Override protected void onError(Throwable e, boolean pullToRefresh) {
- super.onError(e, pullToRefresh);
- }
-
- @Override protected void onNext(List<Mail> data) {
- super.onNext(data);
- }
-
- @Override protected void onCompleted() {
- super.onCompleted();
- }
}
diff --git a/sample-mail/src/main/res/layout/fragment_search.xml b/sample-mail/src/main/res/layout/fragment_search.xml
index <HASH>..<HASH> 100644
--- a/sample-mail/src/main/res/layout/fragment_search.xml
+++ b/sample-mail/src/main/res/layout/fragment_search.xml
@@ -38,6 +38,7 @@
android:layout_height="wrap_content"
android:hint="Search word ..."
android:textCursorDrawable="@drawable/text_cursor"
+ android:textSize="14sp"
app:met_baseColor="@color/white"
app:met_floatingLabel="none"
app:met_primaryColor="@color/white"
diff --git a/viewstate/src/main/java/com/hannesdorfmann/mosby/mvp/viewstate/ViewStateManager.java b/viewstate/src/main/java/com/hannesdorfmann/mosby/mvp/viewstate/ViewStateManager.java
index <HASH>..<HASH> 100644
--- a/viewstate/src/main/java/com/hannesdorfmann/mosby/mvp/viewstate/ViewStateManager.java
+++ b/viewstate/src/main/java/com/hannesdorfmann/mosby/mvp/viewstate/ViewStateManager.java
@@ -133,7 +133,7 @@ public class ViewStateManager<V extends MvpView> {
}
// Save the viewstate
- if (viewState != null && viewState instanceof RestoreableViewState) {
+ if (viewState != null && viewState instanceof RestoreableViewState && !retainingInstanceState) {
((RestoreableViewState) viewState).saveInstanceState(outState);
}
|
ViewStateManager respects retain instance state
|
sockeqwe_mosby
|
train
|
2c95d829e0761319588043e1943e33476976b51b
|
diff --git a/tests/dummy/app/app.js b/tests/dummy/app/app.js
index <HASH>..<HASH> 100644
--- a/tests/dummy/app/app.js
+++ b/tests/dummy/app/app.js
@@ -3,7 +3,7 @@ import Resolver from './resolver';
import loadInitializers from 'ember-load-initializers';
import config from './config/environment';
-if (!window.AudioContext) {
+if (!window.AudioContext && !window.webkitAudioContext) {
document.write(`Oh poo. Looks like this browser doesn't support the Web Audio API.<br><br>`);
document.write('<a href="http://caniuse.com/#feat=audio-api">See supported browsers.</a><br><br>');
document.write('<a href="http://lmgtfy.com/?q=web+audio+api+audiocontext+polyfill">There are polyfills, but I have not tested them.</a>');
|
still display dummy app for safari
|
sethbrasile_ember-audio
|
train
|
292a66dd0f014179bbfe367948d5a2d91daf7b29
|
diff --git a/src/jottalib/JFS.py b/src/jottalib/JFS.py
index <HASH>..<HASH> 100644
--- a/src/jottalib/JFS.py
+++ b/src/jottalib/JFS.py
@@ -1011,6 +1011,15 @@ class JFS(object):
elif o.tag == 'filedirlist': return JFSFileDirList(o, jfs=self, parentpath=parent)
raise JFSError("invalid object: %s <- %s" % (repr(o), url_or_requests_response))
+ def getLatest(self, files=10, sort=None):
+ 'Yield a list of the n latest files, optionally sorted by `sort`.'
+ url = posixpath.join(self.rootpath,
+ '/Jotta/Latest?sort=updated&max=%i&web=true' % files)
+ result = self.getObject(url)
+ for _f in result.files():
+ yield _f
+
+
def stream(self, url, chunk_size=64*1024):
'Iterator to get remote content by chunk_size (bytes)'
r = self.request(url)
diff --git a/tests/test_JFS.py b/tests/test_JFS.py
index <HASH>..<HASH> 100644
--- a/tests/test_JFS.py
+++ b/tests/test_JFS.py
@@ -191,6 +191,14 @@ class TestJFS:
assert jfs_f.path == clean_room_path
jfs_f.delete()
+ def test_latest_files(self):
+ # ensure we can get latest files, and that they look "sane"
+ assert len(list(jfs.getLatest(files=0))) == 0
+ assert len(list(jfs.getLatest(files=1))) == 1
+ for f in jfs.getLatest(files=20):
+ assert isinstance(f, (JFS.JFSFile, JFS.JFSIncompleteFile, JFS.JFSCorruptFile))
+
+
class TestJFSDevice:
def test_xml(self):
@@ -440,7 +448,6 @@ class TestJFSFile:
#TODO: test file operations: .stream(), .rename(), .read(), .read_partial, .delete etc
#TODO: test revisions
- #@pytest.mark.xfail # TODO: figure out the best API for writing unicode strings
def test_unicode_contents(self):
data = six.StringIO(u'123abcæøå')
p = "/Jotta/Archive/testfile_unicode_contents.txt"
|
Add method to get latest files. With test. Closes #<I>
|
havardgulldahl_jottalib
|
train
|
c50154fe3a6134f8f2f8da5db396cd5d8226cb02
|
diff --git a/framework/yii/bootstrap/ButtonGroup.php b/framework/yii/bootstrap/ButtonGroup.php
index <HASH>..<HASH> 100644
--- a/framework/yii/bootstrap/ButtonGroup.php
+++ b/framework/yii/bootstrap/ButtonGroup.php
@@ -17,7 +17,7 @@ use yii\helpers\Html;
*
* ```php
* // a button group with items configuration
- * echo ButtonGroup::::widget([
+ * echo ButtonGroup::widget([
* 'buttons' => [
* ['label' => 'A'],
* ['label' => 'B'],
@@ -25,7 +25,7 @@ use yii\helpers\Html;
* ]);
*
* // button group with an item as a string
- * echo ButtonGroup::::widget([
+ * echo ButtonGroup::widget([
* 'buttons' => [
* Button::widget(['label' => 'A']),
* ['label' => 'B'],
|
Fixes issue #<I>: Typo in comment
|
yiisoft_yii2-bootstrap4
|
train
|
5a4f1af776756a6b5be68fbaeea464121da96a2e
|
diff --git a/lib/daemon.js b/lib/daemon.js
index <HASH>..<HASH> 100644
--- a/lib/daemon.js
+++ b/lib/daemon.js
@@ -7,7 +7,6 @@ import loglevelMessagePrefix from 'loglevel-message-prefix';
import Configuration from './Configuration';
import Importer from './Importer';
-import ModuleFinder from './ModuleFinder';
import rerouteConsoleLog from './rerouteConsoleLog';
import version from './version';
@@ -35,18 +34,6 @@ export default function daemon(parentPid) {
});
originalConsoleLog(
`ImportJS (v${version()}) DAEMON active. Logs will go to: ${pathToLogFile}`);
-
- const moduleFinder = ModuleFinder.getForWorkingDirectory(
- process.cwd(), config.get('excludes'));
- moduleFinder.initializeStorage(config.get('cacheLocation'))
- .then(() => moduleFinder.startWatcher())
- .then(() => {
- loglevel.info(`ModuleFinder is enabled for ${process.cwd()}`);
- })
- .catch((err: Object) => {
- throw new Error(err);
- });
-
if (parentPid) {
// Editor plugins should provide a `--parent-pid=<pid>` argument on startup,
// so that we can check that the daemon process hasn't turned into a zombie
diff --git a/lib/importjs.js b/lib/importjs.js
index <HASH>..<HASH> 100644
--- a/lib/importjs.js
+++ b/lib/importjs.js
@@ -1,14 +1,27 @@
// @flow
+import console from 'console';
import fs from 'fs';
import program from 'commander';
import Configuration from './Configuration';
import Importer from './Importer';
+import ModuleFinder from './ModuleFinder';
import daemon, { pathToLogFile } from './daemon';
import packageJson from '../package.json';
+function initializeModuleFinder(): Promise {
+ const config = new Configuration('importjs');
+ const moduleFinder = ModuleFinder.getForWorkingDirectory(
+ process.cwd(), config.get('excludes'));
+ return moduleFinder.initializeStorage(config.get('cacheLocation'))
+ .then((): Promise => moduleFinder.startWatcher())
+ .catch((err: Object) => {
+ throw new Error(err);
+ });
+}
+
function stdoutWrite(str: string) {
process.stdout.write(`${str}\n`);
}
@@ -43,19 +56,23 @@ function runCommand(
pathToFile: string,
{ overwrite }: { overwrite: boolean }
) {
- getLines(pathToFile, (lines: Array<string>) => {
- const importer = new Importer(lines, pathToFile);
- executor(importer).then((result: Object) => {
- if (overwrite) {
- fs.writeFile(pathToFile, result.fileContent, (err: Error) => {
- if (err) throw err;
- });
- } else {
- stdoutWrite(JSON.stringify(result));
- }
- }).catch((error: Object) => {
- console.error(error); // eslint-disable-line no-console
- process.exit(1);
+ initializeModuleFinder().then(() => {
+ getLines(pathToFile, (lines: Array<string>) => {
+ const importer = new Importer(lines, pathToFile);
+ executor(importer).then((result: Object) => {
+ if (overwrite) {
+ fs.writeFile(pathToFile, result.fileContent, (err: Error) => {
+ if (err) throw err;
+ process.exit(0);
+ });
+ } else {
+ stdoutWrite(JSON.stringify(result));
+ process.exit(0);
+ }
+ }).catch((error: Object) => {
+ console.error(error); // eslint-disable-line no-console
+ process.exit(1);
+ });
});
});
}
@@ -101,9 +118,12 @@ program.command('add <imports> <pathToFile>')
program.command('goto <word> <pathToFile>')
.action((word: string, pathToFile: string) => {
- getLines(pathToFile, (lines: Array<string>) => {
- new Importer(lines, pathToFile).goto(word).then((result: Object) => {
- stdoutWrite(JSON.stringify(result));
+ initializeModuleFinder().then(() => {
+ getLines(pathToFile, (lines: Array<string>) => {
+ new Importer(lines, pathToFile).goto(word).then((result: Object) => {
+ stdoutWrite(JSON.stringify(result));
+ process.exit(0);
+ });
});
});
});
@@ -112,7 +132,9 @@ program.command('start')
.description('start a daemon')
.option('--parent-pid <n>', parseInt)
.action(({ parentPid }: Object) => {
- daemon(parentPid);
+ initializeModuleFinder().then(() => {
+ daemon(parentPid);
+ });
});
program.command('cachepath')
|
Start ModuleFinder for regular CLI calls
Now that we only have the ModuleFinder to find imports, we need to make
sure it's started whenever we import something.
|
Galooshi_import-js
|
train
|
e8b8b0afefbbaccb02cdcbcf745e675fdf7d1ac7
|
diff --git a/zipline/data/history_loader.py b/zipline/data/history_loader.py
index <HASH>..<HASH> 100644
--- a/zipline/data/history_loader.py
+++ b/zipline/data/history_loader.py
@@ -34,6 +34,7 @@ from zipline.lib.adjustment import Float64Multiply, Float64Add
from zipline.utils.cache import ExpiringCache
from zipline.utils.memoize import lazyval
from zipline.utils.numpy_utils import float64_dtype
+from zipline.utils.pandas_utils import find_in_sorted_index
class HistoryCompatibleUSEquityAdjustmentReader(object):
@@ -376,14 +377,10 @@ class HistoryLoader(with_metaclass(ABCMeta)):
size = len(dts)
asset_windows = {}
needed_assets = []
+ cal = self._calendar
assets = self._asset_finder.retrieve_all(assets)
-
- try:
- end_ix = self._calendar.searchsorted(end)
- except KeyError:
- raise KeyError("{0} not in calendar [{1}...{2}]".format(
- end, self._calendar[0], self._calendar[-1]))
+ end_ix = find_in_sorted_index(cal, end)
for asset in assets:
try:
@@ -401,15 +398,9 @@ class HistoryLoader(with_metaclass(ABCMeta)):
asset_windows[asset] = window
if needed_assets:
- start = dts[0]
-
offset = 0
- try:
- start_ix = self._calendar.searchsorted(start)
- except KeyError:
- raise KeyError("{0} not in calendar [{1}...{2}]".format(
- start, self._calendar[0], self._calendar[-1]))
- cal = self._calendar
+ start_ix = find_in_sorted_index(cal, dts[0])
+
prefetch_end_ix = min(end_ix + self._prefetch_length, len(cal) - 1)
prefetch_end = cal[prefetch_end_ix]
prefetch_dts = cal[start_ix:prefetch_end_ix + 1]
diff --git a/zipline/utils/pandas_utils.py b/zipline/utils/pandas_utils.py
index <HASH>..<HASH> 100644
--- a/zipline/utils/pandas_utils.py
+++ b/zipline/utils/pandas_utils.py
@@ -91,6 +91,39 @@ def mask_between_time(dts, start, end, include_start=True, include_end=True):
)
+def find_in_sorted_index(dts, dt):
+ """
+ Find the index of ``dt`` in ``dts``.
+
+ This function should be used instead of `dts.get_loc(dt)` if the index is
+ large enough that we don't want to initialize a hash table in ``dts``. In
+ particular, this should always be used on minutely trading calendars.
+
+ Parameters
+ ----------
+ dts : pd.DatetimeIndex
+ Index in which to look up ``dt``. **Must be sorted**.
+ dt : pd.Timestamp
+ ``dt`` to be looked up.
+
+ Returns
+ -------
+ ix : int
+ Integer index such that dts[ix] == dt.
+
+ Raises
+ ------
+ KeyError
+ If dt is not in ``dts``.
+ """
+ ix = dts.searchsorted(dt)
+ if dts[ix] != dt:
+ raise KeyError(
+ "{0} is not in calendar [{1} ... {2}]".format(dt, dts[0], dts[-1])
+ )
+ return ix
+
+
def nearest_unequal_elements(dts, dt):
"""
Find values in ``dts`` closest but not equal to ``dt``.
|
BUG: Fix bad error handling in history loader.
Fixes a bug where we'd fail to raise an error if the start/end of a
history window call don't aren't in the loader's calendar.
We were started dropping this error after a previous change swapped out
calls to `index.get_loc` with calls to `index.searchsorted` to avoid
creating hash tables in pandas.
|
quantopian_zipline
|
train
|
c9ca1774c7672bae249fa3f7113b8e26d71cb3ef
|
diff --git a/cgroupspy/controllers.py b/cgroupspy/controllers.py
index <HASH>..<HASH> 100644
--- a/cgroupspy/controllers.py
+++ b/cgroupspy/controllers.py
@@ -38,6 +38,7 @@ class Controller(object):
"""
tasks = MultiLineIntegerFile("tasks")
+ procs = MultiLineIntegerFile("cgroup.procs")
notify_on_release = FlagFile("notify_on_release")
clone_children = FlagFile("cgroup.clone_children")
@@ -91,7 +92,7 @@ class CpuAcctController(Controller):
"""
acct_stat = DictFile("cpuacct.stat", readonly=True)
usage = IntegerFile("cpuacct.usage")
- usage_percpu = IntegerListFile("cpuacct.usage_percpu")
+ usage_percpu = IntegerListFile("cpuacct.usage_percpu", readonly=True)
class CpuSetController(Controller):
diff --git a/cgroupspy/interfaces.py b/cgroupspy/interfaces.py
index <HASH>..<HASH> 100644
--- a/cgroupspy/interfaces.py
+++ b/cgroupspy/interfaces.py
@@ -124,12 +124,16 @@ class IntegerListFile(ListFile):
"""
ex: 253237230463342 317756630269369 247294096796305 289833051422078
"""
- readonly = True
def sanitize_get(self, value):
value_list = super(IntegerListFile, self).sanitize_get(value)
return map(int, value_list)
+ def sanitize_set(self, value):
+ if value is None:
+ value = -1
+ return int(value)
+
class CommaDashSetFile(BaseFileInterface):
@@ -156,12 +160,16 @@ class CommaDashSetFile(BaseFileInterface):
class MultiLineIntegerFile(BaseFileInterface):
- readonly = True
def sanitize_get(self, value):
int_list = [int(val) for val in value.strip().split("\n") if val]
return int_list
+ def sanitize_set(self, value):
+ if value is None:
+ value = -1
+ return int(value)
+
class SplitValueFile(BaseFileInterface):
"""
|
Add support for cgroup.procs (thread groups) in addition to tasks (threads)
Also make IntegerListFile and MultiLineIntegerFile writable so procs and tasks can be written
|
cloudsigma_cgroupspy
|
train
|
e29494088e3983bac7049873db68d84e49e41c1d
|
diff --git a/lib/activatable/instance_methods.rb b/lib/activatable/instance_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/activatable/instance_methods.rb
+++ b/lib/activatable/instance_methods.rb
@@ -13,5 +13,9 @@ module Activatable
self.update(field_name => true)
end
end
+
+ def deactivate_by(field_name)
+ self.update(field_name => false)
+ end
end
end
\ No newline at end of file
diff --git a/lib/activatable/version.rb b/lib/activatable/version.rb
index <HASH>..<HASH> 100644
--- a/lib/activatable/version.rb
+++ b/lib/activatable/version.rb
@@ -1,3 +1,3 @@
module Activatable
- VERSION = "0.0.2"
+ VERSION = "0.0.3"
end
|
Added #deactivate_by
|
kimrgrey_activatable
|
train
|
b3146683f24b7e9b2dcc19483857848f27ed1798
|
diff --git a/src/tilesource.js b/src/tilesource.js
index <HASH>..<HASH> 100644
--- a/src/tilesource.js
+++ b/src/tilesource.js
@@ -65,7 +65,7 @@ $.TileSource = function( width, height, tileSize, tileOverlap, minLevel, maxLeve
height: args[1],
tileSize: args[2],
tileOverlap: args[3],
- minlevel: args[4],
+ minLevel: args[4],
maxLevel: args[5]
};
}
|
applying patch provided by eikeon for position parameter constructor of TileSource. At some point I hope to deprecate most of these constructors that have more than two positional parameters.
|
openseadragon_openseadragon
|
train
|
eebbf8e4349f1d69da00e4409cec8db029d26265
|
diff --git a/explorer/templates.go b/explorer/templates.go
index <HASH>..<HASH> 100644
--- a/explorer/templates.go
+++ b/explorer/templates.go
@@ -400,6 +400,9 @@ func makeTemplateFuncMap(params *chaincfg.Params) template.FuncMap {
"threeSigFigs": threeSigFigs,
"remaining": func(idx int, max, t int64) string {
x := (max - int64(idx)) * t
+ if x == 0 {
+ return "imminent"
+ }
allsecs := int(time.Duration(x).Seconds())
str := ""
if allsecs > 604799 {
|
when time remaining is 0, show "imminent"
|
decred_dcrdata
|
train
|
b0a0aac95a8177bf7dce64d85db33560654198c9
|
diff --git a/phoebe/parameters/parameters.py b/phoebe/parameters/parameters.py
index <HASH>..<HASH> 100644
--- a/phoebe/parameters/parameters.py
+++ b/phoebe/parameters/parameters.py
@@ -2459,9 +2459,9 @@ class ParameterSet(object):
:parameter xunit: unit to plot the x-array (will default based on x if not provided)
:type xunit: str or astropy.unit.Unit
- :parameter xunit: unit to plot the y-array (will default based on y if not provided)
+ :parameter yunit: unit to plot the y-array (will default based on y if not provided)
:type yunit: str or astropy.unit.Unit
- :parameter xunit: unit to plot the z-array (will default based on z if not provided)
+ :parameter zunit: unit to plot the z-array (will default based on z if not provided)
:type zunit: str or astropy.unit.Unit
|
fixed typo in docstring for PS.plot
|
phoebe-project_phoebe2
|
train
|
6fe12fc9916d6586a62b23ef81fc0d136ecd3183
|
diff --git a/packages/mjml-group/src/index.js b/packages/mjml-group/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/mjml-group/src/index.js
+++ b/packages/mjml-group/src/index.js
@@ -5,18 +5,7 @@ import widthParser from 'mjml-core/lib/helpers/widthParser'
export default class MjGroup extends BodyComponent {
static allowedAttributes = {
'background-color': 'color',
- border: 'unit(px)',
- 'border-bottom': 'unit(px)',
- 'border-left': 'unit(px)',
- 'border-radius': 'unit(px)',
- 'border-right': 'unit(px)',
- 'border-top': 'unit(px)',
direction: 'enum(ltr,rtl)',
- 'padding-bottom': 'unit(px,%)',
- 'padding-left': 'unit(px,%)',
- 'padding-right': 'unit(px,%)',
- 'padding-top': 'unit(px,%)',
- padding: 'unit(px,%){1,4}',
'vertical-align': 'string',
width: 'unit(px,%)',
}
@@ -64,12 +53,6 @@ export default class MjGroup extends BodyComponent {
direction: this.getAttribute('direction'),
'vertical-align': this.getAttribute('vertical-align'),
'background-color': this.getAttribute('background-color'),
- border: this.getAttribute('border'),
- 'border-bottom': this.getAttribute('border-bottom'),
- 'border-left': this.getAttribute('border-left'),
- 'border-radius': this.getAttribute('border-radius'),
- 'border-right': this.getAttribute('border-right'),
- 'border-top': this.getAttribute('border-top'),
},
tdOutlook: {
'vertical-align': this.getAttribute('vertical-align'),
|
remove actually not allowed attributes on mj-group
|
mjmlio_mjml
|
train
|
9dbc17dac5aef81c5dbbd3d61d1ea00602cc72ed
|
diff --git a/workspaces/scripts/src/commands/__tests__/run-tests.test.js b/workspaces/scripts/src/commands/__tests__/run-tests.test.js
index <HASH>..<HASH> 100644
--- a/workspaces/scripts/src/commands/__tests__/run-tests.test.js
+++ b/workspaces/scripts/src/commands/__tests__/run-tests.test.js
@@ -1,15 +1,19 @@
// @flow
import { spawn } from 'promisify-child-process';
+import { hasWorkspaces } from '../utils/workspaces';
import { CONFIG, JEST_PATH } from '../run-tests';
import { cli } from '../../test-utils';
jest.mock('promisify-child-process');
+jest.mock('../utils/workspaces');
+jest.mock('@freighter/logger');
describe('test', () => {
beforeEach(() => {
jest.clearAllMocks();
+ (hasWorkspaces: Function).mockResolvedValue(true);
(spawn: Function).mockResolvedValue({
stderr: null,
stdout: null,
@@ -40,4 +44,11 @@ describe('test', () => {
code: 15,
});
});
+
+ it('skips the tests if no workspaces exist', async () => {
+ (hasWorkspaces: Function).mockResolvedValue(false);
+ await cli('test');
+
+ expect(spawn).not.toHaveBeenCalled();
+ });
});
diff --git a/workspaces/scripts/src/commands/run-tests.js b/workspaces/scripts/src/commands/run-tests.js
index <HASH>..<HASH> 100644
--- a/workspaces/scripts/src/commands/run-tests.js
+++ b/workspaces/scripts/src/commands/run-tests.js
@@ -1,6 +1,8 @@
// @flow
import { spawn } from 'promisify-child-process';
+import logger from '@freighter/logger';
+import { hasWorkspaces } from './utils/workspaces';
import { command, exit } from './decorator';
export const JEST_PATH = require.resolve('jest/bin/jest');
@@ -18,6 +20,12 @@ type Options = {
};
export const test = async (cmd: Options) => {
+ const repoPath = process.cwd();
+ if (!(await hasWorkspaces(repoPath))) {
+ logger.warn('No workspaces found. Skipping tests.');
+ return;
+ }
+
const givenArgs = [];
if (cmd.watch) {
givenArgs.push('--watch', '--collectCoverage=false');
|
Skip tests if no workspaces exist
Objective: test, lint, flow, and ci commands should pass successfully
out of the box. For both eslint and Jest, they don't like it when a glob
doesn't match any patterns, and there won't be any patterns until the
user adds a workspace.
This quits tests early with a warning if no workspaces were found.
|
PsychoLlama_freighter
|
train
|
a8ea63f5c6aa5f37462685278fc937c831ef8424
|
diff --git a/ldapdao/plugins/auth/ldapdao/ldapdao.auth.php b/ldapdao/plugins/auth/ldapdao/ldapdao.auth.php
index <HASH>..<HASH> 100644
--- a/ldapdao/plugins/auth/ldapdao/ldapdao.auth.php
+++ b/ldapdao/plugins/auth/ldapdao/ldapdao.auth.php
@@ -203,8 +203,6 @@ class ldapdaoAuthDriver extends jAuthDriverBase implements jIAuthDriver {
$connect = $this->_bindLdapAdminUser();
// check if he is in our database
- $dao = jDao::get($this->_params['dao'], $this->_params['profile']);
- $user = $dao->getByLogin($login);
if (!$user) {
// it's a new user, let's create it
|
Fix duplicated code, user data were loaded twice
|
jelix_ldapdao-module
|
train
|
f57c87ecb7f4416556c5c843500da2d34852896f
|
diff --git a/lib/releaf/acts_as_node.rb b/lib/releaf/acts_as_node.rb
index <HASH>..<HASH> 100644
--- a/lib/releaf/acts_as_node.rb
+++ b/lib/releaf/acts_as_node.rb
@@ -9,6 +9,9 @@ module ActsAsNode
end
def self.classes
+ # eager load in dev env
+ Rails.application.eager_load! if Rails.env.development?
+
@classes
end
|
eager load in dev env
|
cubesystems_releaf
|
train
|
05028020b6149efd771a1e3e8ad9d683495f4984
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/BackwardChainingTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/BackwardChainingTest.java
index <HASH>..<HASH> 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/BackwardChainingTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/BackwardChainingTest.java
@@ -885,7 +885,7 @@ public class BackwardChainingTest {
// from http://kti.mff.cuni.cz/~bartak/prolog/genealogy.html
String str = "" +
- "package org.drools.test \n" +
+ "package org.drools.test2 \n" +
"global java.util.List list\n" +
"dialect \"mvel\"\n" +
|
changed package name, so it no longer conflicts
|
kiegroup_drools
|
train
|
37fdc78fb6fff080afd868f8723acc002eeaa135
|
diff --git a/DependencyInjection/SonataDoctrineORMAdminExtension.php b/DependencyInjection/SonataDoctrineORMAdminExtension.php
index <HASH>..<HASH> 100644
--- a/DependencyInjection/SonataDoctrineORMAdminExtension.php
+++ b/DependencyInjection/SonataDoctrineORMAdminExtension.php
@@ -83,6 +83,8 @@ class SonataDoctrineORMAdminExtension extends Extension
'integer' => 'SonataAdminBundle:CRUD:base_list_field.html.twig',
'decimal' => 'SonataAdminBundle:CRUD:base_list_field.html.twig',
'identifier' => 'SonataAdminBundle:CRUD:base_list_field.html.twig',
+ 'currency' => 'SonataAdminBundle:CRUD:list_currency.html.twig',
+ 'percent' => 'SonataAdminBundle:CRUD:list_percent.html.twig',
),
'show' => array(
'array' => 'SonataAdminBundle:CRUD:show_array.html.twig',
@@ -96,6 +98,8 @@ class SonataDoctrineORMAdminExtension extends Extension
'bigint' => 'SonataAdminBundle:CRUD:base_show_field.html.twig',
'integer' => 'SonataAdminBundle:CRUD:base_show_field.html.twig',
'decimal' => 'SonataAdminBundle:CRUD:base_show_field.html.twig',
+ 'currency' => 'SonataAdminBundle:CRUD:base_currency.html.twig',
+ 'percent' => 'SonataAdminBundle:CRUD:base_percent.html.twig',
)
)
)
@@ -110,6 +114,8 @@ class SonataDoctrineORMAdminExtension extends Extension
'bigint' => 'SonataIntlBundle:CRUD:list_decimal.html.twig',
'integer' => 'SonataIntlBundle:CRUD:list_decimal.html.twig',
'decimal' => 'SonataIntlBundle:CRUD:list_decimal.html.twig',
+ 'currency' => 'SonataIntlBundle:CRUD:list_currency.html.twig',
+ 'percent' => 'SonataIntlBundle:CRUD:list_percent.html.twig',
));
$defaultConfig['templates']['types']['show'] = array_merge($defaultConfig['templates']['types']['show'], array(
@@ -119,6 +125,8 @@ class SonataDoctrineORMAdminExtension extends Extension
'bigint' => 'SonataIntlBundle:CRUD:show_decimal.html.twig',
'integer' => 'SonataIntlBundle:CRUD:show_decimal.html.twig',
'decimal' => 'SonataIntlBundle:CRUD:show_decimal.html.twig',
+ 'currency' => 'SonataIntlBundle:CRUD:show_currency.html.twig',
+ 'percent' => 'SonataIntlBundle:CRUD:show_percent.html.twig',
));
}
|
configure new types available in the AdminBundle
|
sonata-project_SonataDoctrineORMAdminBundle
|
train
|
5e0c50d814b08cbdd46f1fcce77b34210d546669
|
diff --git a/src/connect/connect.js b/src/connect/connect.js
index <HASH>..<HASH> 100644
--- a/src/connect/connect.js
+++ b/src/connect/connect.js
@@ -101,4 +101,4 @@ export function createConnect({
}
}
-export default createConnect()
+export default /*#__PURE__*/ createConnect()
diff --git a/src/hooks/useDispatch.js b/src/hooks/useDispatch.js
index <HASH>..<HASH> 100644
--- a/src/hooks/useDispatch.js
+++ b/src/hooks/useDispatch.js
@@ -38,4 +38,4 @@ export function createDispatchHook(context = ReactReduxContext) {
* )
* }
*/
-export const useDispatch = createDispatchHook()
+export const useDispatch = /*#__PURE__*/ createDispatchHook()
diff --git a/src/hooks/useSelector.js b/src/hooks/useSelector.js
index <HASH>..<HASH> 100644
--- a/src/hooks/useSelector.js
+++ b/src/hooks/useSelector.js
@@ -131,4 +131,4 @@ export function createSelectorHook(context = ReactReduxContext) {
* return <div>{counter}</div>
* }
*/
-export const useSelector = createSelectorHook()
+export const useSelector = /*#__PURE__*/ createSelectorHook()
diff --git a/src/hooks/useStore.js b/src/hooks/useStore.js
index <HASH>..<HASH> 100644
--- a/src/hooks/useStore.js
+++ b/src/hooks/useStore.js
@@ -34,4 +34,4 @@ export function createStoreHook(context = ReactReduxContext) {
* return <div>{store.getState()}</div>
* }
*/
-export const useStore = createStoreHook()
+export const useStore = /*#__PURE__*/ createStoreHook()
|
Add pure annotations to help with DCE
|
reduxjs_react-redux
|
train
|
818e518966a3e5403b7a4993a88698e96c653f10
|
diff --git a/test/integration/cloud/permissions.js b/test/integration/cloud/permissions.js
index <HASH>..<HASH> 100644
--- a/test/integration/cloud/permissions.js
+++ b/test/integration/cloud/permissions.js
@@ -43,4 +43,10 @@ describe('Permission checking', () => {
.update({ hacked: true });
});
});
+
+ describe('When alice tries to read bobs timeline', () => {
+ ctx.requestShouldError(403, async () => {
+ ctx.response = await ctx.alice.feed('timeline', ctx.bob.userId).get();
+ });
+ });
});
|
Test that you can't read other peoples timelines
|
GetStream_stream-js
|
train
|
5b36da106f532a31c76cf4c804a3d51b8bcb418b
|
diff --git a/src/saml2/__init__.py b/src/saml2/__init__.py
index <HASH>..<HASH> 100644
--- a/src/saml2/__init__.py
+++ b/src/saml2/__init__.py
@@ -590,7 +590,7 @@ class SamlBase(ExtensionContainer):
"""
keys = ['text']
keys.extend([n for (n, t, r) in self.c_attributes.values()])
- keys.extend([v[1] for v in self.c_children.values()])
+ keys.extend([v[0] for v in self.c_children.values()])
return keys
def children_with_values(self):
|
Real bug, but then noone seems to have used the method
|
IdentityPython_pysaml2
|
train
|
b66c6116a2979dff90518e070db5931f8bdff42a
|
diff --git a/lib/slimmer/test.rb b/lib/slimmer/test.rb
index <HASH>..<HASH> 100644
--- a/lib/slimmer/test.rb
+++ b/lib/slimmer/test.rb
@@ -10,6 +10,7 @@ module Slimmer
<script src="http://static.preview.alphagov.co.uk/javascripts/libs/jquery/jquery-1.6.2.min.js"></script><!-- no defer on jquery -->
<script src="http://static.preview.alphagov.co.uk/javascripts/libs/jquery/jquery-ui-1.8.16.custom.min.js" defer></script>
<script src="http://static.preview.alphagov.co.uk/javascripts/libs/jquery/plugins/jquery.base64.js" defer></script>
+ <script src="http://static.preview.alphagov.co.uk/javascripts/libs/jquery/plugins/jquery.mustache.js" defer></script>
<script src="http://static.preview.alphagov.co.uk/javascripts/search.js" defer></script>
<script src="http://static.preview.alphagov.co.uk/javascripts/devolution.js" defer></script>
<script src="http://static.preview.alphagov.co.uk/javascripts/popup.js" defer></script>
|
jQuery mustache is provided.
|
alphagov_slimmer
|
train
|
c745b299de0c184111a9530ca300d55c4886e1c1
|
diff --git a/lib/js/src/jest.js b/lib/js/src/jest.js
index <HASH>..<HASH> 100644
--- a/lib/js/src/jest.js
+++ b/lib/js/src/jest.js
@@ -285,7 +285,7 @@ function testAll(name, inputs, callback) {
function beforeAllAsync(timeout, callback) {
beforeAll((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -302,7 +302,7 @@ function beforeAllPromise(timeout, callback) {
function beforeEachAsync(timeout, callback) {
beforeEach((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -319,7 +319,7 @@ function beforeEachPromise(timeout, callback) {
function afterAllAsync(timeout, callback) {
afterAll((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -328,7 +328,7 @@ function afterAllAsync(timeout, callback) {
}
function afterAllPromise(timeout, callback) {
- afterAll((function () {
+ afterAll((function (param) {
return Promise.resolve(Curry._1(callback, /* () */0));
}), Js_undefined.fromOption(timeout));
return /* () */0;
@@ -336,7 +336,7 @@ function afterAllPromise(timeout, callback) {
function afterEachAsync(timeout, callback) {
afterEach((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -397,12 +397,12 @@ var Only = /* module */[
/* testAll */testAll$1
];
-function testAsync$2(name, _, callback) {
+function testAsync$2(name, param, callback) {
it.skip(name, callback);
return /* () */0;
}
-function testPromise$2(name, _, callback) {
+function testPromise$2(name, param, callback) {
it.skip(name, (function () {
return Curry._1(callback, /* () */0);
}));
@@ -433,7 +433,7 @@ function expect$1(a) {
function expectFn(f, a) {
return /* `Just */[
826472012,
- (function () {
+ (function (param) {
return Curry._1(f, a);
})
];
@@ -809,7 +809,7 @@ function Runner(funarg) {
};
var beforeAllAsync = function (timeout, callback) {
beforeAll((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -824,7 +824,7 @@ function Runner(funarg) {
};
var beforeEachAsync = function (timeout, callback) {
beforeEach((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -839,7 +839,7 @@ function Runner(funarg) {
};
var afterAllAsync = function (timeout, callback) {
afterAll((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -847,14 +847,14 @@ function Runner(funarg) {
return /* () */0;
};
var afterAllPromise = function (timeout, callback) {
- afterAll((function () {
+ afterAll((function (param) {
return Promise.resolve(Curry._1(callback, /* () */0));
}), Js_undefined.fromOption(timeout));
return /* () */0;
};
var afterEachAsync = function (timeout, callback) {
afterEach((function (finish) {
- Curry._1(callback, (function () {
+ Curry._1(callback, (function (param) {
return Curry._1(finish, /* () */0);
}));
return undefined;
@@ -908,11 +908,11 @@ function Runner(funarg) {
/* testPromise */testPromise$1,
/* testAll */testAll$1
];
- var testAsync$2 = function (name, _, callback) {
+ var testAsync$2 = function (name, param, callback) {
it.skip(name, callback);
return /* () */0;
};
- var testPromise$2 = function (name, _, callback) {
+ var testPromise$2 = function (name, param, callback) {
it.skip(name, (function () {
return Curry._1(callback, /* () */0);
}));
|
update js artifacts to <I>
|
glennsl_bs-jest
|
train
|
b7d1585bd334a763cc730b213d1ca20f5796a3ba
|
diff --git a/endesive/__init__.py b/endesive/__init__.py
index <HASH>..<HASH> 100644
--- a/endesive/__init__.py
+++ b/endesive/__init__.py
@@ -2,6 +2,6 @@
__author__ = 'Grzegorz Makarewicz'
__license__ = 'MIT'
-__version__ = '1.2.1'
+__version__ = '1.2.2'
__all__ = [__author__, __license__, __version__]
diff --git a/endesive/pdf/cms.py b/endesive/pdf/cms.py
index <HASH>..<HASH> 100644
--- a/endesive/pdf/cms.py
+++ b/endesive/pdf/cms.py
@@ -205,7 +205,7 @@ class SignedData(object):
/Type
/Annot
/Subtype
-/FreeText
+%s
/AP <</N %d 0 R>>
/BS <</S /S /Type /Border /W 0>>
/C []
@@ -216,13 +216,17 @@ class SignedData(object):
/P %d 0 R
/FT
/Sig
+%s
/T(Signature%d)
/V %d 0 R
''' % (
+ b'/Widget' if udct.get(b'sigbutton', False) else b'/FreeText',
no + 4, pdfa.Contents.encode('latin1'),
pdfa.AP.N.Resources.Font.keys()[0].encode('latin1'),
pdfar,
- page, nsig, no + 5)),
+ page,
+ b'/SM(TabletPOSinline)' if udct.get(b'sigbutton', False) else b'',
+ nsig, no + 5)),
self.makeobj(no + 4, b'''
/BBox %s
diff --git a/examples/pdf-sign-cms.py b/examples/pdf-sign-cms.py
index <HASH>..<HASH> 100755
--- a/examples/pdf-sign-cms.py
+++ b/examples/pdf-sign-cms.py
@@ -10,6 +10,8 @@ from endesive import pdf
def main():
dct = {
b'sigflags': 3,
+ b'sigpage': 1,
+ b'sigbutton': True,
b'contact': b'mak@trisoft.com.pl',
b'location': b'Szczecin',
b'signingdate': b'20180731082642+02\'00\'',
|
next option for pdf signer- sigbutton
|
m32_endesive
|
train
|
5569becded4fd1d2c921d8ddf05964b9b29ed358
|
diff --git a/framework/Commands/Migrate.php b/framework/Commands/Migrate.php
index <HASH>..<HASH> 100644
--- a/framework/Commands/Migrate.php
+++ b/framework/Commands/Migrate.php
@@ -30,7 +30,14 @@ class Migrate
if (!$this->isInstalled()) {
$this->install();
}
- $migrations = $this->getMigrationsAfter($this->getLastMigrationTime());
+
+ $lastMigrationTime = $this->getLastMigrationTime();
+
+ if (!empty($notExecutedMigrations = $this->getNotExecutedMigrationsBefore($lastMigrationTime))) {
+ throw new Exception('Not executed earlier migrations found: ' . implode(', ', $notExecutedMigrations));
+ }
+
+ $migrations = $this->getMigrationsAfter($lastMigrationTime);
foreach ($migrations as $migration) {
$this->writeLn($migration->getName() . ' up...');
$migration->up();
@@ -78,6 +85,31 @@ class Migrate
return $migrations;
}
+ protected function getNotExecutedMigrationsBefore($time)
+ {
+ $migrationsBefore = [];
+
+ foreach (glob($this->getMigrationsPath() . DS . sprintf(self::SEARCH_FILE_NAME_PATTERN, '*', '*') . '.php') as $fileName) {
+ $className = self::MIGRATIONS_NAMESPACE . '\\' . pathinfo($fileName, PATHINFO_FILENAME);
+ /** @var Migration $migration */
+ $migration = new $className;
+
+ if ($migration->getTimestamp() < $time) {
+ $query = (new Query())
+ ->select('COUNT(*)')
+ ->from(self::TABLE_NAME)
+ ->where('time = :time')
+ ->params([':time' => $migration->getTimestamp()]);
+
+ if (0 === $this->app->db->default->query($query)->fetchScalar()) {
+ $migrationsBefore[] = $migration->getName();
+ }
+ }
+ }
+
+ return $migrationsBefore;
+ }
+
protected function getMigrationsPath($module = null)
{
if (null == $module) {
|
ONCE-<I>: edited migrate command
|
pr-of-it_t4
|
train
|
5a281c511d9c232ac2e4508278c1a8d63c41b386
|
diff --git a/backbone.js b/backbone.js
index <HASH>..<HASH> 100644
--- a/backbone.js
+++ b/backbone.js
@@ -990,6 +990,7 @@
// routes can be defined at the bottom of the route map.
_bindRoutes: function() {
if (!this.routes) return;
+ this.routes = _.result(this, 'routes');
var route, routes = _.keys(this.routes);
while ((route = routes.pop()) != null) {
this.route(route, this.routes[route]);
|
Make Router.routes accept a function
[#<I>]
|
jashkenas_backbone
|
train
|
09eb51ecc48b7143bf89abec31e53b0cd4abb0cd
|
diff --git a/gbdxtools/ipe/graph.py b/gbdxtools/ipe/graph.py
index <HASH>..<HASH> 100644
--- a/gbdxtools/ipe/graph.py
+++ b/gbdxtools/ipe/graph.py
@@ -21,5 +21,9 @@ def register_ipe_graph(conn, ipe_graph):
def get_ipe_metadata(conn, ipe_id, node='toa_reflectance'):
meta = {}
meta['image'] = conn.get(VIRTUAL_IPE_URL + "/metadata/idaho-virtual/{}/{}/image.json".format(ipe_id, node)).json()
- meta['georef'] = conn.get(VIRTUAL_IPE_URL + "/metadata/idaho-virtual/{}/{}/georeferencing.json".format(ipe_id, node)).json()
+ meta['rpcs'] = conn.get(VIRTUAL_IPE_URL + "/metadata/idaho-virtual/{}/{}/rpcs.json".format(ipe_id, node)).json()
+ try:
+ meta['georef'] = conn.get(VIRTUAL_IPE_URL + "/metadata/idaho-virtual/{}/{}/georeferencing.json".format(ipe_id, node)).json()
+ except:
+ meta['georef'] = None
return meta
|
include rpcs in ipe metadata
|
DigitalGlobe_gbdxtools
|
train
|
b4339073c5c87e7df1399f70d44eb73bfda35d0b
|
diff --git a/Tests/LdapClientTest.php b/Tests/LdapClientTest.php
index <HASH>..<HASH> 100644
--- a/Tests/LdapClientTest.php
+++ b/Tests/LdapClientTest.php
@@ -24,7 +24,7 @@ class LdapClientTest extends TestCase
* Sets up the fixture, for example, opens a network connection.
* This method is called before a test is executed.
*
- * @return void
+ * @return void
*/
protected function setUp()
{
@@ -39,6 +39,19 @@ class LdapClientTest extends TestCase
}
/**
+ * Tears down the fixture, for example, close a network connection.
+ * This method is called after a test is executed.
+ *
+ * @return void
+ */
+ protected function tearDown()
+ {
+ unset($this->object);
+
+ parent::tearDown();
+ }
+
+ /**
* @covers Joomla\Ldap\Ldap::connect
*/
public function testConnect()
@@ -47,42 +60,36 @@ class LdapClientTest extends TestCase
}
/**
- * Test...
- *
- * @todo Implement testClose().
- *
- * @return void
+ * @covers Joomla\Ldap\Ldap::setDn
+ * @uses Joomla\Ldap\Ldap::getDn
*/
- public function testClose()
+ public function testSetDnWithNoUserDn()
{
- // Remove the following lines when you implement this test.
- $this->markTestIncomplete('This test has not been implemented yet.');
+ $dn = 'cn=admin,dc=joomla,dc=org';
+
+ $this->object->setDn($dn);
+
+ $this->assertSame($dn, $this->object->getDn());
}
/**
- * Test...
- *
- * @todo Implement testSetDn().
- *
- * @return void
+ * @covers Joomla\Ldap\Ldap::setDn
+ * @uses Joomla\Ldap\Ldap::getDn
*/
- public function testSetDn()
+ public function testSetDnWithUserDn()
{
- // Remove the following lines when you implement this test.
- $this->markTestIncomplete('This test has not been implemented yet.');
+ $this->object->setDn('uid=[username],cn=admin,dc=joomla,dc=org');
+ $this->object->setDn('admin');
+
+ $this->assertSame('uid=admin,cn=admin,dc=joomla,dc=org', $this->object->getDn());
}
/**
- * Test...
- *
- * @todo Implement testGetDn().
- *
- * @return void
+ * @covers Joomla\Ldap\Ldap::getDn
*/
public function testGetDn()
{
- // Remove the following lines when you implement this test.
- $this->markTestIncomplete('This test has not been implemented yet.');
+ $this->assertNull($this->object->getDn());
}
/**
@@ -94,6 +101,12 @@ class LdapClientTest extends TestCase
*/
public function testAnonymous_bind()
{
+ if (!$this->object->connect())
+ {
+ $this->markTestSkipped('Could not connect to LDAP server');
+ }
+
+ var_dump($this->object->anonymous_bind());
// Remove the following lines when you implement this test.
$this->markTestIncomplete('This test has not been implemented yet.');
}
diff --git a/src/LdapClient.php b/src/LdapClient.php
index <HASH>..<HASH> 100644
--- a/src/LdapClient.php
+++ b/src/LdapClient.php
@@ -124,6 +124,16 @@ class LdapClient
}
/**
+ * Class destructor.
+ *
+ * @since __DEPLOY_VERSION__
+ */
+ public function __destruct()
+ {
+ $this->close();
+ }
+
+ /**
* Connect to server
*
* @return boolean True if successful
@@ -179,7 +189,12 @@ class LdapClient
*/
public function close()
{
- @ ldap_close($this->resource);
+ if ($this->resource && is_resource($this->resource))
+ {
+ @ldap_close($this->resource);
+ }
+
+ $this->resource = null;
}
/**
@@ -229,9 +244,7 @@ class LdapClient
*/
public function anonymous_bind()
{
- $bindResult = @ldap_bind($this->resource);
-
- return $bindResult;
+ return @ldap_bind($this->resource);
}
/**
|
Add destructor to client, expand close method a bit, tests for DN methods, dump anon bind
|
joomla-framework_ldap
|
train
|
076745c4dfaaa29c16b008698aaf6f4c7ce307a3
|
diff --git a/lib.go b/lib.go
index <HASH>..<HASH> 100644
--- a/lib.go
+++ b/lib.go
@@ -55,7 +55,8 @@ type Sheet struct {
type File struct {
worksheets map[string]*zip.File
referenceTable []string
- Sheets []*Sheet
+ Sheets []*Sheet // sheet access by index
+ Sheet map[string]*Sheet // sheet access by name
}
// getRangeFromString is an internal helper function that converts
@@ -278,7 +279,7 @@ func readRowsFromSheet(Worksheet *xlsxWorksheet, reftable []string) []*Row {
// readSheetsFromZipFile is an internal helper function that loops
// over the Worksheets defined in the XSLXWorkbook and loads them into
// Sheet objects stored in the Sheets slice of a xlsx.File struct.
-func readSheetsFromZipFile(f *zip.File, file *File) ([]*Sheet, error) {
+func readSheetsFromZipFile(f *zip.File, file *File) ([]*Sheet, []string, error) {
var workbook *xlsxWorkbook
var error error
var rc io.ReadCloser
@@ -286,24 +287,26 @@ func readSheetsFromZipFile(f *zip.File, file *File) ([]*Sheet, error) {
workbook = new(xlsxWorkbook)
rc, error = f.Open()
if error != nil {
- return nil, error
+ return nil, nil, error
}
decoder = xml.NewDecoder(rc)
error = decoder.Decode(workbook)
if error != nil {
- return nil, error
+ return nil, nil, error
}
sheets := make([]*Sheet, len(workbook.Sheets.Sheet))
+ names := make([]string, len(workbook.Sheets.Sheet))
for i, rawsheet := range workbook.Sheets.Sheet {
worksheet, error := getWorksheetFromSheet(rawsheet, file.worksheets)
if error != nil {
- return nil, error
+ return nil, nil, error
}
sheet := new(Sheet)
sheet.Rows = readRowsFromSheet(worksheet, file.referenceTable)
sheets[i] = sheet
+ names[i] = rawsheet.Name
}
- return sheets, nil
+ return sheets, names, nil
}
// readSharedStringsFromZipFile() is an internal helper function to
@@ -341,6 +344,8 @@ func OpenFile(filename string) (x *File, e error) {
var reftable []string
var worksheets map[string]*zip.File
f, error = zip.OpenReader(filename)
+ var sheetMap map[string]*Sheet
+
if error != nil {
return nil, error
}
@@ -371,7 +376,7 @@ func OpenFile(filename string) (x *File, e error) {
return nil, error
}
file.referenceTable = reftable
- sheets, error := readSheetsFromZipFile(workbook, file)
+ sheets, names, error := readSheetsFromZipFile(workbook, file)
if error != nil {
return nil, error
}
@@ -381,6 +386,11 @@ func OpenFile(filename string) (x *File, e error) {
return nil, error
}
file.Sheets = sheets
+ sheetMap = make(map[string]*Sheet,len(names))
+ for i := 0; i < len(names); i++ {
+ sheetMap[names[i]] = sheets[i]
+ }
+ file.Sheet = sheetMap
f.Close()
return file, nil
}
|
access by name
add sheet map so we can access by name
|
tealeg_xlsx
|
train
|
85343cfd5fece3f698b579d6177aa6f3a07853e0
|
diff --git a/boot/settings.js b/boot/settings.js
index <HASH>..<HASH> 100644
--- a/boot/settings.js
+++ b/boot/settings.js
@@ -81,7 +81,8 @@ settings.response_types_supported = [
'id_token',
'token id_token',
'id_token token',
- 'code id_token token'
+ 'code id_token token',
+ 'none'
]
/**
diff --git a/models/Client.js b/models/Client.js
index <HASH>..<HASH> 100644
--- a/models/Client.js
+++ b/models/Client.js
@@ -63,7 +63,8 @@ var Client = Modinha.define('clients', {
enum: [
'code',
'id_token',
- 'id_token token'
+ 'id_token token',
+ 'none'
]
},
diff --git a/oidc/authorize.js b/oidc/authorize.js
index <HASH>..<HASH> 100644
--- a/oidc/authorize.js
+++ b/oidc/authorize.js
@@ -27,7 +27,8 @@ function authorize (req, res, next) {
var params = req.connectParams
var responseTypes = params.response_type.split(' ')
var responseMode = params.response_mode ||
- (params.response_type === 'code') ? '?' : '#'
+ (params.response_type === 'code' ||
+ params.response_type === 'none') ? '?' : '#'
// ACCESS GRANTED
if (params.authorize === 'true') {
diff --git a/oidc/requireSignin.js b/oidc/requireSignin.js
index <HASH>..<HASH> 100644
--- a/oidc/requireSignin.js
+++ b/oidc/requireSignin.js
@@ -13,7 +13,8 @@ function requireSignin (req, res, next) {
var params = req.connectParams
var prompt = params.prompt
var responseMode = params.response_mode ||
- (params.response_type === 'code') ? '?' : '#'
+ (params.response_type === 'code' ||
+ params.response_type === 'none') ? '?' : '#'
// redirect with error if unauthenticated
// and prompt is "none"
diff --git a/oidc/validateAuthorizationParams.js b/oidc/validateAuthorizationParams.js
index <HASH>..<HASH> 100644
--- a/oidc/validateAuthorizationParams.js
+++ b/oidc/validateAuthorizationParams.js
@@ -15,7 +15,8 @@ var responseTypes = [
'id_token', // implicit flow
'token id_token', // implicit flow
'id_token token', // implicit flow
- 'code id_token token' // hybrid flow
+ 'code id_token token', // hybrid flow
+ 'none' //
]
/**
diff --git a/test/unit/oidc/authorize.coffee b/test/unit/oidc/authorize.coffee
index <HASH>..<HASH> 100644
--- a/test/unit/oidc/authorize.coffee
+++ b/test/unit/oidc/authorize.coffee
@@ -415,6 +415,52 @@ describe 'Authorize', ->
)
+ describe 'with consent and "none" response type', ->
+
+ before (done) ->
+ sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
+ code: '1234'
+ })
+
+ req =
+ session: {}
+ client:
+ _id: 'uuid1'
+ user:
+ _id: 'uuid2'
+ connectParams:
+ authorize: 'true'
+ response_type: 'none'
+ redirect_uri: 'https://host/callback'
+ state: 'r4nd0m'
+ res =
+ redirect: sinon.spy()
+ next = sinon.spy()
+
+ authorize req, res, next
+ done()
+
+ after ->
+ AuthorizationCode.insert.restore()
+
+ it 'should redirect to the redirect_uri', ->
+ res.redirect.should.have.been.calledWith sinon.match(
+ req.connectParams.redirect_uri
+ )
+
+ it 'should provide a query string', ->
+ res.redirect.should.have.been.calledWith sinon.match('?')
+
+ it 'should not provide authorization code', ->
+ res.redirect.should.not.have.been.calledWith sinon.match 'code=1234'
+
+ it 'should provide state', ->
+ res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
+
+ it 'should not provide session_state', ->
+ res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
+
+
describe 'with consent and response mode param', ->
|
feat: Support `none` response_type (#<I>)
|
anvilresearch_connect
|
train
|
78acd5f39a3054f05e782471de83257067d144e6
|
diff --git a/src/Rocketeer/Commands/DeployCommand.php b/src/Rocketeer/Commands/DeployCommand.php
index <HASH>..<HASH> 100644
--- a/src/Rocketeer/Commands/DeployCommand.php
+++ b/src/Rocketeer/Commands/DeployCommand.php
@@ -1,13 +1,12 @@
<?php
namespace Rocketeer\Commands;
-use Illuminate\Console\Command;
use Rocketeer\Rocketeer;
/**
* Your interface to deploying your projects
*/
-class DeployCommand extends Command
+class DeployCommand extends BaseDeployCommand
{
/**
|
Make DeployCommand extends BaseDeployCommand now that it's lighter
|
rocketeers_rocketeer
|
train
|
f7b65926510e85bb27e51a185dc357a9ee5fa9b8
|
diff --git a/Space/Field/__init__.py b/Space/Field/__init__.py
index <HASH>..<HASH> 100644
--- a/Space/Field/__init__.py
+++ b/Space/Field/__init__.py
@@ -119,5 +119,6 @@ class SuperposedField(Field):
else:
raise ValueError('at least 3 coordinates are needed for point')
for field in self.fields:
- total_field += field.vector_field(field.to_local_coordinate_system(xyz))
+ vector_field_local = field.vector_field(field.to_local_coordinate_system(xyz))
+ total_field += field.to_global_coordinate_system(vector_field_local)
return total_field
|
Fixed nasty bug in Superposition Vector Field calculation
|
bond-anton_BDSpace
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.