hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
b07d7c492009fd080bef523b63b8948efaabe7d9
|
diff --git a/lib/chatrix/matrix.rb b/lib/chatrix/matrix.rb
index <HASH>..<HASH> 100644
--- a/lib/chatrix/matrix.rb
+++ b/lib/chatrix/matrix.rb
@@ -189,17 +189,21 @@ module Chatrix
#
# @param params [Hash{String=>String},nil] Query parameters to add to
# the options hash.
- # @param content [Hash,nil] Request content to add to the options hash.
+ # @param content [Hash,#read,nil] Request content. Can be a hash,
+ # stream, or `nil`.
# @return [Hash] Options hash ready to be passed into a server request.
- def make_request_options(params, content)
- options = {
- query: @access_token ? { access_token: @access_token } : {}
- }
-
- options[:query].merge!(params) if params.is_a? Hash
- options[:body] = content.to_json if content.is_a? Hash
+ def make_request_options(params, content, headers = {})
+ { headers: headers }.tap do |o|
+ o[:query] = @access_token ? { access_token: @access_token } : {}
+ o[:query].merge!(params) if params.is_a? Hash
+ o.merge! make_request_body content
+ end
+ end
- options
+ def make_request_body(content)
+ key = content.respond_to?(:read) ? :body_stream : :body
+ value = content.is_a? Hash ? content.to_json : content
+ { key => value }
end
# Parses a HTTParty Response object and returns it if it was successful.
|
Allow passing of a stream object as request body
|
Sharparam_chatrix
|
train
|
45286c28f261f9493062291e8741c2e7d5972b56
|
diff --git a/annis-service/src/main/java/annis/administration/CorpusAdministration.java b/annis-service/src/main/java/annis/administration/CorpusAdministration.java
index <HASH>..<HASH> 100644
--- a/annis-service/src/main/java/annis/administration/CorpusAdministration.java
+++ b/annis-service/src/main/java/annis/administration/CorpusAdministration.java
@@ -190,6 +190,8 @@ public class CorpusAdministration
catch(IOException ex)
{
log.error("Could not find any corpus in " + f.getPath(), ex);
+ importStats.setStatus(false);
+ importStats.addException(f.getAbsolutePath(), ex);
}
}
} // end for each given path
diff --git a/annis-service/src/main/java/annis/utils/RelANNISHelper.java b/annis-service/src/main/java/annis/utils/RelANNISHelper.java
index <HASH>..<HASH> 100644
--- a/annis-service/src/main/java/annis/utils/RelANNISHelper.java
+++ b/annis-service/src/main/java/annis/utils/RelANNISHelper.java
@@ -96,6 +96,11 @@ public class RelANNISHelper
}
}
+ if (result.isEmpty())
+ {
+ throw new IOException("no corpus found");
+ }
+
return result;
}
|
Throw also an io exception when the file traverser does not find any corpus.
closes #<I>
|
korpling_ANNIS
|
train
|
88c8921d3a74f869ebbbd8aef9b14956c49acceb
|
diff --git a/tests/metrics_test.py b/tests/metrics_test.py
index <HASH>..<HASH> 100644
--- a/tests/metrics_test.py
+++ b/tests/metrics_test.py
@@ -58,6 +58,47 @@ def test_timing(setup):
assert expected == actual
+def test_timing_handling(setup):
+ with mock.patch('time.time', return_value=42.0):
+ with pytest.raises(Exception) as excinfo:
+ with uwsgi_metrics.timing(__name__, 'exc_timer', handle=True):
+ raise Exception('testing exception handling')
+ emit(None)
+ assert 'testing exception handling' == str(excinfo.value)
+
+ actual = uwsgi_metrics.view()
+ expected = {
+ 'version': __version__,
+ 'counters': {},
+ 'gauges': {},
+ 'histograms': {},
+ 'meters': {},
+ 'timers': {
+ 'tests.metrics_test.exc_timer': {
+ 'count': 1,
+ 'max': 0.0,
+ 'mean': 0.0,
+ 'min': 0.0,
+ 'p50': 0.0,
+ 'p75': 0.0,
+ 'p95': 0.0,
+ 'p98': 0.0,
+ 'p99': 0.0,
+ 'p999': 0.0,
+ 'stddev': 0.0,
+ 'm15_rate': 0.0,
+ 'm1_rate': 0.0,
+ 'm5_rate': 0.0,
+ 'mean_rate': 0.0,
+ 'duration_units': 'milliseconds',
+ 'rate_units': 'calls/second',
+ }
+ }
+ }
+
+ assert expected == actual
+
+
def test_timer(setup):
with mock.patch('time.time', return_value=42.0):
uwsgi_metrics.timer(__name__, 'my_timer', 0.0)
diff --git a/uwsgi_metrics/metrics.py b/uwsgi_metrics/metrics.py
index <HASH>..<HASH> 100644
--- a/uwsgi_metrics/metrics.py
+++ b/uwsgi_metrics/metrics.py
@@ -158,19 +158,31 @@ def view():
@contextlib.contextmanager
-def timing(module, name):
+def timing(module, name, handle=False):
"""
Context manager to time a section of code::
with timing(__name__, 'my_timer'):
do_some_operation()
+
+ If handle is set to True, then this will log results even when
+ the caller raises exceptions (which is not the default behavior).
"""
+ raise_exception = False
start_time_s = time.time()
- yield
+ if handle:
+ try:
+ yield
+ except:
+ raise_exception = True
+ else:
+ yield
end_time_s = time.time()
delta_s = end_time_s - start_time_s
delta_ms = delta_s * 1000
timer(module, name, delta_ms)
+ if raise_exception:
+ raise
@uwsgidecorators.mulefunc(1)
|
Added exception handling to metrics.timing context manager.
|
Yelp_uwsgi_metrics
|
train
|
0b0b89eb476911c01bd7fa18fc4d345e5381aea1
|
diff --git a/datatableview/datatables.py b/datatableview/datatables.py
index <HASH>..<HASH> 100644
--- a/datatableview/datatables.py
+++ b/datatableview/datatables.py
@@ -393,8 +393,10 @@ class Datatable(six.with_metaclass(DatatableMetaclass)):
i_begin = self.config['start_offset']
i_end = self.config['start_offset'] + self.config['page_length']
object_list = self._records[i_begin:i_end]
+ else:
+ object_list = self._records
- return self._records
+ return object_list
def get_records(self):
"""
|
Fixed broken pagination due to last change
|
pivotal-energy-solutions_django-datatable-view
|
train
|
31596ff3befa3de2d6e4b05fef9f06fd9a9d4700
|
diff --git a/app/inputs/custom_inputs/surround_input.rb b/app/inputs/custom_inputs/surround_input.rb
index <HASH>..<HASH> 100644
--- a/app/inputs/custom_inputs/surround_input.rb
+++ b/app/inputs/custom_inputs/surround_input.rb
@@ -3,8 +3,7 @@ module CustomInputs
include UiBibz::Ui::Core
def input(wrapper_options)
- options = options || {}
- options = options.merge({ builder: @builder })
+ options = @options.merge({ builder: @builder })
UiBibz::Ui::Core::SurroundField.new(attribute_name, options, input_html_options).render
end
|
fix surround_input for simple_form
|
thooams_Ui-Bibz
|
train
|
7603ebe3e61955771ed2118f8cba67f98b954866
|
diff --git a/osbs/utils.py b/osbs/utils.py
index <HASH>..<HASH> 100644
--- a/osbs/utils.py
+++ b/osbs/utils.py
@@ -74,7 +74,7 @@ def checkout_git_repo(git_uri, git_ref, git_branch):
def get_df_parser(git_uri, git_ref, git_branch):
with checkout_git_repo(git_uri, git_ref, git_branch) as code_dir:
- dfp = DockerfileParser(os.path.join(code_dir, 'Dockerfile'), cache_content=True)
+ dfp = DockerfileParser(os.path.join(code_dir), cache_content=True)
return dfp
|
Don't pass 'Dockerfile' explicitly to DockerfileParser
|
projectatomic_osbs-client
|
train
|
d51fca0588fe9085a1e6eec7e5c4c1ad80d66c4b
|
diff --git a/web/src/main/java/com/graphhopper/http/GraphHopperWeb.java b/web/src/main/java/com/graphhopper/http/GraphHopperWeb.java
index <HASH>..<HASH> 100644
--- a/web/src/main/java/com/graphhopper/http/GraphHopperWeb.java
+++ b/web/src/main/java/com/graphhopper/http/GraphHopperWeb.java
@@ -203,7 +203,22 @@ public class GraphHopperWeb implements GraphHopperAPI
Instruction instr;
if (sign == Instruction.USE_ROUNDABOUT || sign == Instruction.LEAVE_ROUNDABOUT)
{
- instr = new RoundaboutInstruction(sign, text, ia, instPL);
+ RoundaboutInstruction ri = new RoundaboutInstruction(sign, text, ia, instPL);
+
+ if (jsonObj.has("exit_number"))
+ {
+ ri.setExitNumber(jsonObj.getInt("exit_number"));
+ }
+
+ if (jsonObj.has("turn_angle"))
+ {
+ // TODO provide setTurnAngle setter
+ double angle = jsonObj.getDouble("turn_angle");
+ ri.setDirOfRotation(angle);
+ ri.setRadian((angle < 0 ? -Math.PI : Math.PI) - angle);
+ }
+
+ instr = ri;
} else if (sign == Instruction.REACHED_VIA)
{
ViaInstruction tmpInstr = new ViaInstruction(text, ia, instPL);
|
fixing bug in GraphHopperWeb
|
graphhopper_graphhopper
|
train
|
7b70cc1e3871810fb720f04d678261f92a8dc27d
|
diff --git a/lib/assets.js b/lib/assets.js
index <HASH>..<HASH> 100644
--- a/lib/assets.js
+++ b/lib/assets.js
@@ -353,21 +353,25 @@ module.exports = {
// are retroactive to the very first use of the mixin. So apostrophe-ui-2
// can alter decisions made in the apostrophe module, for instance.
return self.forAllAssetScenesAndUpgrades(function(scene, callback) {
- var masterWeb = '/css/master-' + scene + '.less';
+ var masterWeb = '/css/master-' + scene + '-' + self._generation + '.less';
var masterFile = self.options.rootDir + '/public' + masterWeb;
var stylesheets = self.filterAssets(self._assets.stylesheets, scene, true);
- fs.writeFileSync(masterFile, _.map(stylesheets, function(stylesheet) {
- // Cope with the way we push .css but actually write .less
- // because of the middleware. TODO: think about killing that
- var importName = stylesheet.web.replace('.css', '.less');
- if (!fs.existsSync(self.options.rootDir + '/public' + importName)) {
- importName = stylesheet.web;
- }
- // For import what we need is a relative path which will work on
- // the filesystem too thanks to the symbolic links for modules
- var relPath = path.relative(path.dirname(masterWeb), importName);
- return '@import \'' + relPath + '\';';
- }).join("\n"));
+ // Avoid race conditions, if apostrophe:generation created
+ // the file already leave it alone
+ if (!fs.existsSync(masterFile)) {
+ fs.writeFileSync(masterFile, _.map(stylesheets, function(stylesheet) {
+ // Cope with the way we push .css but actually write .less
+ // because of the middleware. TODO: think about killing that
+ var importName = stylesheet.web.replace('.css', '.less');
+ if (!fs.existsSync(self.options.rootDir + '/public' + importName)) {
+ importName = stylesheet.web;
+ }
+ // For import what we need is a relative path which will work on
+ // the filesystem too thanks to the symbolic links for modules
+ var relPath = path.relative(path.dirname(masterWeb), importName);
+ return '@import \'' + relPath + '\';';
+ }).join("\n"));
+ }
self._lessMasters[scene] = {
// The nature of the LESS middleware is that it expects you to
// request a CSS file and uses LESS to render it if available
@@ -436,6 +440,12 @@ module.exports = {
fs.mkdirSync(dir);
}
var filename = dir + '/' + scene + '-' + self._generation + '.' + typeMap[type];
+ // Avoid race conditions - don't try to write the
+ // same file again if apostrophe:generation already
+ // created it for us
+ if (fs.existsSync(filename)) {
+ return;
+ }
if ((type === 'stylesheets') && self.options.bless) {
var bless = require('bless');
var output = path.dirname(filename);
@@ -444,8 +454,10 @@ module.exports = {
options: {}
}).parse(content.toString(), function (err, files) {
if (files.length === 1) {
- // No splitting needed for <= IE9
- fs.writeFileSync(filename, content);
+ // No splitting needed, small enough for <= IE9 already
+ if (!fs.existsSync(filename)) {
+ fs.writeFileSync(filename, content);
+ }
return;
}
var master = '';
|
Multiprocess fix: fixed a race condition that caused broken asset files. Do not create asset files that apostrophe:generation has already created. Also, the LESS master filenames now contain the generation ID.
|
apostrophecms_apostrophe
|
train
|
ee1814f7bc5e8922c101bd9759835db1d7c7ce49
|
diff --git a/packages/material-ui/src/FormLabel/FormLabel.js b/packages/material-ui/src/FormLabel/FormLabel.js
index <HASH>..<HASH> 100644
--- a/packages/material-ui/src/FormLabel/FormLabel.js
+++ b/packages/material-ui/src/FormLabel/FormLabel.js
@@ -28,6 +28,10 @@ export const styles = (theme) => ({
'&$focused': {
color: theme.palette.secondary.main,
},
+ '&$error': {
+ // To remove once we migrate to emotion
+ color: theme.palette.error.main,
+ },
},
/* Pseudo-class applied to the root element if `focused={true}`. */
focused: {},
diff --git a/packages/material-ui/src/FormLabel/FormLabel.test.js b/packages/material-ui/src/FormLabel/FormLabel.test.js
index <HASH>..<HASH> 100644
--- a/packages/material-ui/src/FormLabel/FormLabel.test.js
+++ b/packages/material-ui/src/FormLabel/FormLabel.test.js
@@ -154,4 +154,28 @@ describe('<FormLabel />', () => {
});
});
});
+
+ describe('prop: color', () => {
+ it('should have color secondary class', () => {
+ const { container } = render(<FormLabel color="secondary" />);
+ expect(container.querySelectorAll(`.${classes.colorSecondary}`)).to.have.lengthOf(1);
+ expect(container.querySelector(`.${classes.root}`)).to.have.class(classes.colorSecondary);
+ });
+
+ it('should have the focused class and style', () => {
+ const { container, getByTestId } = render(
+ <FormLabel data-testid="FormLabel" color="secondary" focused />,
+ );
+ expect(container.querySelector(`.${classes.colorSecondary}`)).to.have.class(classes.focused);
+ expect(getByTestId('FormLabel')).toHaveComputedStyle({ color: 'rgb(245, 0, 87)' });
+ });
+
+ it('should have the error class and style, even when focused', () => {
+ const { container, getByTestId } = render(
+ <FormLabel data-testid="FormLabel" color="secondary" focused error />,
+ );
+ expect(container.querySelector(`.${classes.colorSecondary}`)).to.have.class(classes.error);
+ expect(getByTestId('FormLabel')).toHaveComputedStyle({ color: 'rgb(244, 67, 54)' });
+ });
+ });
});
diff --git a/packages/material-ui/src/OutlinedInput/OutlinedInput.js b/packages/material-ui/src/OutlinedInput/OutlinedInput.js
index <HASH>..<HASH> 100644
--- a/packages/material-ui/src/OutlinedInput/OutlinedInput.js
+++ b/packages/material-ui/src/OutlinedInput/OutlinedInput.js
@@ -40,6 +40,10 @@ export const styles = (theme) => {
'&$focused $notchedOutline': {
borderColor: theme.palette.secondary.main,
},
+ '&$error $notchedOutline': {
+ // To remove once we migrate to emotion
+ borderColor: theme.palette.error.main,
+ },
},
/* Styles applied to the root element if the component is focused. */
focused: {},
|
[Textfield] Add error color for form input with secondary color (#<I>)
|
mui-org_material-ui
|
train
|
3d28ed11b7f2968c33fedd59de55e7862580fd8d
|
diff --git a/.ci/ansible_tests.py b/.ci/ansible_tests.py
index <HASH>..<HASH> 100755
--- a/.ci/ansible_tests.py
+++ b/.ci/ansible_tests.py
@@ -66,7 +66,6 @@ with ci_lib.Fold('job_setup'):
ci_lib.dump_file(inventory_path)
if not ci_lib.exists_in_path('sshpass'):
- run("sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 78BD65473CB3BD13")
run("sudo apt-get update")
run("sudo apt-get install -y sshpass")
|
added wrong key for debian 9 test container
|
dw_mitogen
|
train
|
33b5c8ce394045a04c13c363a371638b6f00a411
|
diff --git a/src/livestreamer/plugins/ownedtv.py b/src/livestreamer/plugins/ownedtv.py
index <HASH>..<HASH> 100644
--- a/src/livestreamer/plugins/ownedtv.py
+++ b/src/livestreamer/plugins/ownedtv.py
@@ -37,7 +37,7 @@ class OwnedTV(Plugin):
data = fd.read()
fd.close()
- match = re.search(b"own3d.tv\/livestreamfb\/(\d+)", data)
+ match = re.search(b"document.location.hash='/live/(\d+)'", data)
if match:
return int(match.group(1))
|
alter regular expression in ownedtv plugin to again find the channel id
|
streamlink_streamlink
|
train
|
c9695103a3437a305a008b85bc0dcb76ca3a3755
|
diff --git a/lib/dm-core/associations/relationship.rb b/lib/dm-core/associations/relationship.rb
index <HASH>..<HASH> 100644
--- a/lib/dm-core/associations/relationship.rb
+++ b/lib/dm-core/associations/relationship.rb
@@ -139,11 +139,11 @@ module DataMapper
children = child_identity_map.values
- bind_values = *children.map { |c| child_key.get(c) }.uniq
- query_values = bind_values.reject { |k| parent_identity_map[[k]] }
+ bind_values = children.map { |c| child_key.get(c) }.uniq
+ query_values = bind_values.reject { |k| parent_identity_map[k] }
bind_values = query_values unless query_values.empty?
- query = parent_key.map { |k| [ k, bind_values ] }.to_hash
+ query = parent_key.zip(bind_values.transpose).to_hash
association_accessor = "#{self.name}_association"
|
fixed problem with composite keys and SEL. Temporary solution (composite keys still doesn't work properly until we support OR)
|
datamapper_dm-core
|
train
|
e57027f14add99bcce19beb363c282d8585bac55
|
diff --git a/lib/main.js b/lib/main.js
index <HASH>..<HASH> 100644
--- a/lib/main.js
+++ b/lib/main.js
@@ -136,6 +136,9 @@ const registerWdioCommands = function() {
}
}
+if (!isLocal && process.env.WDIO_INIT_TESTABLEUTILS !== 'true')
+ registerWdioCommands();
+
const testSteps = new TestSteps();
module.exports.isLocal = isLocal;
|
init wdio custom commands if agent is not updated
|
testable_node-script-utils
|
train
|
1fba49d8115ea23ea00dd2f0cae7f4a380ecaec3
|
diff --git a/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoActions.java b/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoActions.java
index <HASH>..<HASH> 100644
--- a/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoActions.java
+++ b/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoActions.java
@@ -62,12 +62,12 @@ public class DemoActions {
private static final String MAIN_FIELD = "myField";
/** The Analyzer used in all methods **/
- private static final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
+ private static final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_33);
private InfinispanDirectory index;
private final Cache cache;
-
+
public DemoActions(InfinispanDirectory index, Cache cache) {
this.index = index;
this.cache = cache;
@@ -94,7 +94,7 @@ public class DemoActions {
return emptyList();
}
}
-
+
/**
* Returns a list of the values of all stored fields
* @throws IOException
@@ -129,7 +129,7 @@ public class DemoActions {
* @throws ParseException
*/
public Query parseQuery(String queryLine) throws ParseException {
- QueryParser parser = new QueryParser(Version.LUCENE_29, MAIN_FIELD, analyzer);
+ QueryParser parser = new QueryParser(Version.LUCENE_33, MAIN_FIELD, analyzer);
return parser.parse(queryLine);
}
@@ -137,7 +137,7 @@ public class DemoActions {
* Returns a list of Addresses of all members in the cluster
*/
public List<Address> listAllMembers() {
- EmbeddedCacheManager cacheManager = (EmbeddedCacheManager) cache.getCacheManager();
+ EmbeddedCacheManager cacheManager = cache.getCacheManager();
return cacheManager.getMembers();
}
diff --git a/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoDriver.java b/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoDriver.java
index <HASH>..<HASH> 100644
--- a/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoDriver.java
+++ b/demos/lucene-directory-demo/src/main/java/org/infinispan/lucenedemo/DemoDriver.java
@@ -38,11 +38,15 @@ import org.infinispan.remoting.transport.Address;
* clustering capabilities.
* This class parses the user input and drives the actions implemented in DemoActions.
*
+ * As always when running JGroups to run a demo cluster of multiple applications running
+ * on the same host, set these JVM options:
+ * -Djava.net.preferIPv4Stack=true -Djgroups.bind_addr=127.0.0.1
+ *
* @author Sanne Grinovero
* @since 4.0
*/
public class DemoDriver implements Runnable {
-
+
private final DemoActions actions;
public DemoDriver(InfinispanDirectory infinispanDirectory, Cache cache) {
|
Small polish in lucene-directory-demo
|
infinispan_infinispan
|
train
|
846979049d755107a7d8341c71e8b2dc955dd4f4
|
diff --git a/scarlet/assets/models.py b/scarlet/assets/models.py
index <HASH>..<HASH> 100644
--- a/scarlet/assets/models.py
+++ b/scarlet/assets/models.py
@@ -55,7 +55,6 @@ class Asset(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
- @property
def url(self):
"""
This is a wrapper of file.url
|
Converted Asset model url property to a funtion
|
ff0000_scarlet
|
train
|
1ef95d2d0d99edca521bd3e907570775e3d42b93
|
diff --git a/spec/functional/audit_entries_spec.rb b/spec/functional/audit_entries_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/audit_entries_spec.rb
+++ b/spec/functional/audit_entries_spec.rb
@@ -16,14 +16,17 @@ describe RightApi::Client, :functional=>true do
puts "=" * 80
end
- server_arrays =
- @client.server_arrays.index
+ unless @client.nil?
+ server_arrays =
+ @client.server_arrays.index
- @server_array =
- server_arrays.find { |sa| sa.name.match(/test/i) } ||
- server_arrays.first
+ @server_array =
+ server_arrays.find { |sa| sa.name.match(/test/i) } ||
+ server_arrays.first
+ end
- raise "sorry, can't test, no server arrays in your RS" unless @server_array
+ # Commented while we dont execute credential tests
+ # raise "sorry, can't test, no server arrays in your RS" unless @server_array
end
describe '#audit_entries' do
@@ -31,6 +34,7 @@ describe RightApi::Client, :functional=>true do
describe '#create' do
it 'creates audit entries' do
+ pending("Not running tests based on credentials")
ae = @client.audit_entries.create(:audit_entry => {
'auditee_href' => @server_array.href,
@@ -49,6 +53,7 @@ describe RightApi::Client, :functional=>true do
describe '#detail' do
it 'returns the detail plain text' do
+ pending("Not running tests based on credentials")
ae = @client.audit_entries.create(:audit_entry => {
'auditee_href' => @server_array.href,
diff --git a/spec/functional/client_spec.rb b/spec/functional/client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/client_spec.rb
+++ b/spec/functional/client_spec.rb
@@ -4,6 +4,7 @@ describe RightApi::Client, :functional=>true do
context "given a valid set of credentials in the config/login.yml file" do
+
before(:all) do
@creds = '../../../config/login.yml'
begin
@@ -15,15 +16,18 @@ describe RightApi::Client, :functional=>true do
end
# Don't bother to run tests if the client didn't initialize
- @client.should_not be_nil
+ # Commented while credentials tests are pending
+ # @client.should_not be_nil
end
it "logs in" do
+ pending("Not running tests based on credentials")
@client.send(:headers)[:cookies].should_not be_nil
@client.session.index.message.should == 'You have successfully logged into the RightScale API.'
end
it "returns valid cookies" do
+ pending("Not running tests based on credentials")
@client.cookies.class.should == Hash
@client.cookies['_session_id'].should_not be_nil
@client.cookies['domain'].should match /rightscale.com$/
@@ -31,6 +35,7 @@ describe RightApi::Client, :functional=>true do
end
it "accepts a cookie argument when creating a new client" do
+ pending("Not running tests based on credentials")
my_hash = YAML.load_file(File.expand_path(@creds, __FILE__))
my_hash.delete(:email)
my_hash.delete(:password)
@@ -41,6 +46,7 @@ describe RightApi::Client, :functional=>true do
end
it "accepts an access_token argument when creating a new client" do
+ pending("Not running tests based on credentials")
my_hash = YAML.load_file(File.expand_path(@creds, __FILE__))
my_hash.delete(:email)
my_hash.delete(:password)
@@ -58,11 +64,13 @@ describe RightApi::Client, :functional=>true do
end
it "timestamps cookies" do
+ pending("Not running tests based on credentials")
@client.cookies.timestamp.should_not == nil
end
it "keeps track of the cookies all the time" do
+ pending("Not running tests based on credentials")
t0 = @client.cookies.timestamp
@@ -73,11 +81,13 @@ describe RightApi::Client, :functional=>true do
end
it "accepts a YAML argument when creating a new client" do
+ pending("Not running tests based on credentials")
client2 = RightApi::Client.new(YAML.load_file(File.expand_path(@creds, __FILE__)))
client2.cookies.should_not == @client.cookies
end
it "sends post/get/put/delete requests to the server correctly" do
+ pending("Not running tests based on credentials")
deployment_name = "right_api_client functional test #{Time.now.to_s}"
# create a new deployment
new_deployment = @client.deployments.create(:deployment => {:name => deployment_name})
@@ -100,6 +110,7 @@ describe RightApi::Client, :functional=>true do
# Tags are a bit special as they use POST and return content type so they need specific tests
it "adds tag to deployment" do
+ pending("Not running tests based on credentials")
deployment_name = "right_api_client functional test #{Time.now.to_s}"
# create a new deployment
@@ -120,6 +131,7 @@ describe RightApi::Client, :functional=>true do
end
it "singularizes resource_types correctly" do
+ pending("Not running tests based on credentials")
@client.get_singular('servers').should == 'server'
@client.get_singular('deployments').should == 'deployment'
@client.get_singular('audit_entries').should == 'audit_entry'
@@ -128,6 +140,7 @@ describe RightApi::Client, :functional=>true do
end
it "returns the resource when calling #resource(href)" do
+ pending("Not running tests based on credentials")
d0 = @client.deployments.index.first
@@ -137,6 +150,7 @@ describe RightApi::Client, :functional=>true do
end
it "raises meaningful errors" do
+ pending("Not running tests based on credentials")
err = begin
@client.resource('/api/nada')
@@ -152,6 +166,7 @@ describe RightApi::Client, :functional=>true do
end
it "wraps errors with _details" do
+ pending("Not running tests based on credentials")
err = begin
@client.deployments(:id => 'nada').show
|
Put all tests that use credentials in pending state
|
rightscale_right_api_client
|
train
|
f4bfea11b3bd7a315f7c00734bdaf71bfa808ba0
|
diff --git a/scs_core/osio/data/user_metadata.py b/scs_core/osio/data/user_metadata.py
index <HASH>..<HASH> 100644
--- a/scs_core/osio/data/user_metadata.py
+++ b/scs_core/osio/data/user_metadata.py
@@ -66,9 +66,7 @@ class UserMetadata(User):
# UserMetadata...
gravatar_hash = jdict.get('gravatar-hash')
- topics = [UserTopic.construct_from_jdict(dt_jdict) for dt_jdict in jdict.get('topics')]
-
- # TODO: sort topics by path
+ topics = [UserTopic.construct_from_jdict(topic_jdict) for topic_jdict in jdict.get('topics')]
return UserMetadata(id, name, None, None, start, gravatar_hash, topics)
diff --git a/scs_core/osio/manager/topic_manager.py b/scs_core/osio/manager/topic_manager.py
index <HASH>..<HASH> 100644
--- a/scs_core/osio/manager/topic_manager.py
+++ b/scs_core/osio/manager/topic_manager.py
@@ -13,6 +13,7 @@ from scs_core.osio.client.rest_client import RESTClient
from scs_core.osio.data.device_topic import DeviceTopic
from scs_core.osio.data.topic_summary import TopicSummary
from scs_core.osio.data.topic_metadata import TopicMetadata
+from scs_core.osio.data.user_topic import UserTopic
from scs_core.osio.manager.message_manager import NextMessageQuery
@@ -76,16 +77,13 @@ class TopicManager(object):
def find_for_user(self, user_id):
request_path = '/v1/users/' + user_id + '/topics'
- topics = {}
-
# request...
self.__rest_client.connect()
try:
jdict = self.__rest_client.get(request_path)
- for topic in jdict:
- print(topic)
+ topics = [UserTopic.construct_from_jdict(topic_jdict) for topic_jdict in jdict]
finally:
self.__rest_client.close()
|
Added user_topics top-level script.
|
south-coast-science_scs_core
|
train
|
a77fd16869997fa36eb548c7976004a60063b7bb
|
diff --git a/salt/beacons/log.py b/salt/beacons/log.py
index <HASH>..<HASH> 100644
--- a/salt/beacons/log.py
+++ b/salt/beacons/log.py
@@ -69,6 +69,12 @@ def beacon(config):
file: <path>
<tag>:
regex: <pattern>
+
+ .. note::
+
+ regex matching is based on the `re`_ module
+
+ .. _re: https://docs.python.org/3.6/library/re.html#regular-expression-syntax
'''
ret = []
|
Update salt.beacons.log to reflect that re module is used for matching.
As more non-python users are starting to use Salt, it would be useful to
point out which regular expression engines are in use so that users can
adapt accordingly.
|
saltstack_salt
|
train
|
aab36b62cd1219e9c61e5cff6cecb544e04c8184
|
diff --git a/cassandra/__init__.py b/cassandra/__init__.py
index <HASH>..<HASH> 100644
--- a/cassandra/__init__.py
+++ b/cassandra/__init__.py
@@ -6,7 +6,7 @@ class NullHandler(logging.Handler):
def emit(self, record):
pass
-# logging.getLogger('cassandra').addHandler(NullHandler())
+logging.getLogger('cassandra').addHandler(NullHandler())
__version_info__ = (1, 0, '0b7', 'post')
|
Actually add NullHandler in logging to cassandra
For some reason this was commented out duing the initial implementation
|
datastax_python-driver
|
train
|
9e6b813422e695f54171f530c5d6cce12c1ff0e7
|
diff --git a/kie-server-parent/kie-server-controller/kie-server-controller-rest/src/main/java/org/kie/server/controller/rest/docs/ParameterSamples.java b/kie-server-parent/kie-server-controller/kie-server-controller-rest/src/main/java/org/kie/server/controller/rest/docs/ParameterSamples.java
index <HASH>..<HASH> 100644
--- a/kie-server-parent/kie-server-controller/kie-server-controller-rest/src/main/java/org/kie/server/controller/rest/docs/ParameterSamples.java
+++ b/kie-server-parent/kie-server-controller/kie-server-controller-rest/src/main/java/org/kie/server/controller/rest/docs/ParameterSamples.java
@@ -242,7 +242,7 @@ public class ParameterSamples {
" \"container-id\" : \"evaluation_1.0.0-SNAPSHOT\",\n" +
" \"container-name\" : \"evaluation\",\n" +
" \"server-template-key\" : null,\n" +
- " \"release-id\" : {\"\n" +
+ " \"release-id\" : {\n" +
" \"group-id\" : \"evaluation\",\n" +
" \"artifact-id\" : \"evaluation\",\n" +
" \"version\" : \"1.0.0-SNAPSHOT\"\n" +
|
Controller REST - Remove breaking " in JSON example body (#<I>)
|
kiegroup_droolsjbpm-integration
|
train
|
f5edc69320b1e561d06b4c09b2ee396bdcead56d
|
diff --git a/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadata.php b/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadata.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadata.php
+++ b/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadata.php
@@ -19,6 +19,8 @@
namespace Doctrine\ODM\PHPCR\Mapping;
+use Doctrine\Common\Persistence\Mapping\ClassMetadata as ClassMetadataInterface;
+
/**
* Metadata class
*
@@ -30,7 +32,7 @@ namespace Doctrine\ODM\PHPCR\Mapping;
* @author Jonathan H. Wage <jonwage@gmail.com>
* @author Roman Borschel <roman@code-factory.org>
*/
-class ClassMetadata extends ClassMetadataInfo
+class ClassMetadata extends ClassMetadataInfo implements ClassMetadataInterface
{
/**
* The ReflectionProperty instances of the mapped class.
diff --git a/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadataInfo.php b/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadataInfo.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadataInfo.php
+++ b/lib/Doctrine/ODM/PHPCR/Mapping/ClassMetadataInfo.php
@@ -20,7 +20,6 @@
namespace Doctrine\ODM\PHPCR\Mapping;
use ReflectionClass;
-use Doctrine\Common\Persistence\Mapping\ClassMetadata;
/**
* A <tt>ClassMetadata</tt> instance holds all the object-document mapping metadata
@@ -44,7 +43,7 @@ use Doctrine\Common\Persistence\Mapping\ClassMetadata;
* @author Benjamin Eberlei <kontakt@beberlei.de>
* @author Lukas Kahwe Smith <smith@pooteeweet.org>
*/
-class ClassMetadataInfo implements ClassMetadata
+class ClassMetadataInfo
{
const TO_ONE = 5;
const TO_MANY = 10;
@@ -679,6 +678,19 @@ class ClassMetadataInfo implements ClassMetadata
}
/**
+ * Get identifier field names of this class.
+ *
+ * Since PHPCR only allows exactly one identifier field this is a proxy
+ * to {@see getIdentifier()} and returns an array.
+ *
+ * @return array
+ */
+ public function getIdentifierFieldNames()
+ {
+ return array($this->identifier);
+ }
+
+ /**
* {@inheritDoc}
*/
public function getReflectionClass()
|
Updated the ClassMetadata classes to the latest Common interface
|
doctrine_phpcr-odm
|
train
|
970beaac4af4dc7ed9d412e18381f115b74c399c
|
diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py
index <HASH>..<HASH> 100644
--- a/gcloud/storage/blob.py
+++ b/gcloud/storage/blob.py
@@ -263,15 +263,20 @@ class Blob(_PropertyMixin):
"""
return self.bucket.delete_blob(self.name)
- def download_to_file(self, file_obj):
+ def download_to_file(self, file_obj, connection=None):
"""Download the contents of this blob into a file-like object.
:type file_obj: file
:param file_obj: A file handle to which to write the blob's data.
+ :type connection: :class:`gcloud.storage.connection.Connection` or
+ ``NoneType``
+ :param connection: Optional. The connection to use when sending
+ requests. If not provided, falls back to default.
+
:raises: :class:`gcloud.exceptions.NotFound`
"""
-
+ connection = _require_connection(connection)
download_url = self.media_link
# Use apitools 'Download' facility.
@@ -282,7 +287,7 @@ class Blob(_PropertyMixin):
headers['Range'] = 'bytes=0-%d' % (self.chunk_size - 1,)
request = http_wrapper.Request(download_url, 'GET', headers)
- download.InitializeDownload(request, self.connection.http)
+ download.InitializeDownload(request, connection.http)
# Should we be passing callbacks through from caller? We can't
# pass them as None, because apitools wants to print to the console
@@ -290,29 +295,39 @@ class Blob(_PropertyMixin):
download.StreamInChunks(callback=lambda *args: None,
finish_callback=lambda *args: None)
- def download_to_filename(self, filename):
+ def download_to_filename(self, filename, connection=None):
"""Download the contents of this blob into a named file.
:type filename: string
:param filename: A filename to be passed to ``open``.
+ :type connection: :class:`gcloud.storage.connection.Connection` or
+ ``NoneType``
+ :param connection: Optional. The connection to use when sending
+ requests. If not provided, falls back to default.
+
:raises: :class:`gcloud.exceptions.NotFound`
"""
with open(filename, 'wb') as file_obj:
- self.download_to_file(file_obj)
+ self.download_to_file(file_obj, connection=connection)
mtime = time.mktime(self.updated.timetuple())
os.utime(file_obj.name, (mtime, mtime))
- def download_as_string(self):
+ def download_as_string(self, connection=None):
"""Download the contents of this blob as a string.
+ :type connection: :class:`gcloud.storage.connection.Connection` or
+ ``NoneType``
+ :param connection: Optional. The connection to use when sending
+ requests. If not provided, falls back to default.
+
:rtype: bytes
:returns: The data stored in this blob.
:raises: :class:`gcloud.exceptions.NotFound`
"""
string_buffer = BytesIO()
- self.download_to_file(string_buffer)
+ self.download_to_file(string_buffer, connection=connection)
return string_buffer.getvalue()
def upload_from_file(self, file_obj, rewind=False, size=None,
diff --git a/gcloud/storage/test_blob.py b/gcloud/storage/test_blob.py
index <HASH>..<HASH> 100644
--- a/gcloud/storage/test_blob.py
+++ b/gcloud/storage/test_blob.py
@@ -318,7 +318,7 @@ class Test_Blob(unittest2.TestCase):
(chunk1_response, b'abc'),
(chunk2_response, b'def'),
)
- bucket = _Bucket(connection)
+ bucket = _Bucket(None)
MEDIA_LINK = 'http://example.com/media/'
properties = {'mediaLink': MEDIA_LINK}
blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties)
@@ -326,7 +326,7 @@ class Test_Blob(unittest2.TestCase):
blob._CHUNK_SIZE_MULTIPLE = 1
blob.chunk_size = chunk_size
fh = BytesIO()
- blob.download_to_file(fh)
+ blob.download_to_file(fh, connection=connection)
self.assertEqual(fh.getvalue(), b'abcdef')
def test_download_to_file_default(self):
@@ -350,7 +350,7 @@ class Test_Blob(unittest2.TestCase):
(chunk1_response, b'abc'),
(chunk2_response, b'def'),
)
- bucket = _Bucket(connection)
+ bucket = _Bucket(None)
MEDIA_LINK = 'http://example.com/media/'
properties = {'mediaLink': MEDIA_LINK,
'updated': '2014-12-06T13:13:50.690Z'}
@@ -358,7 +358,7 @@ class Test_Blob(unittest2.TestCase):
blob._CHUNK_SIZE_MULTIPLE = 1
blob.chunk_size = 3
with NamedTemporaryFile() as f:
- blob.download_to_filename(f.name)
+ blob.download_to_filename(f.name, connection=connection)
f.flush()
with open(f.name, 'rb') as g:
wrote = g.read()
@@ -379,13 +379,13 @@ class Test_Blob(unittest2.TestCase):
(chunk1_response, b'abc'),
(chunk2_response, b'def'),
)
- bucket = _Bucket(connection)
+ bucket = _Bucket(None)
MEDIA_LINK = 'http://example.com/media/'
properties = {'mediaLink': MEDIA_LINK}
blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties)
blob._CHUNK_SIZE_MULTIPLE = 1
blob.chunk_size = 3
- fetched = blob.download_as_string()
+ fetched = blob.download_as_string(connection=connection)
self.assertEqual(fetched, b'abcdef')
def _upload_from_file_simple_test_helper(self, properties=None,
|
Removing using of Blob.connection in all Blob.download* methods.
Allowing the default connection to be used as fallback or takes
an explicit connection argument.
|
googleapis_google-cloud-python
|
train
|
0de43fd112806a86a6d12e70dd7c8f20df959a0b
|
diff --git a/samcli/local/docker/lambda_image.py b/samcli/local/docker/lambda_image.py
index <HASH>..<HASH> 100644
--- a/samcli/local/docker/lambda_image.py
+++ b/samcli/local/docker/lambda_image.py
@@ -8,7 +8,6 @@ from enum import Enum
from pathlib import Path
import sys
-import re
import platform
import docker
@@ -328,6 +327,8 @@ class LambdaImage:
: return bool: True, if the image name ends with rapid-$SAM_CLI_VERSION. False, otherwise
"""
- if not re.search(fr":{RAPID_IMAGE_TAG_PREFIX}-\d+\.\d+.\d+$", image_name):
+ try:
+ return image_name.split(":")[1].startswith(f"{RAPID_IMAGE_TAG_PREFIX}-")
+ except (IndexError, AttributeError):
+ # split() returned 1 or less items or image_name is None
return False
- return True
diff --git a/tests/unit/local/docker/test_lambda_image.py b/tests/unit/local/docker/test_lambda_image.py
index <HASH>..<HASH> 100644
--- a/tests/unit/local/docker/test_lambda_image.py
+++ b/tests/unit/local/docker/test_lambda_image.py
@@ -449,3 +449,13 @@ class TestLambdaImage(TestCase):
)
docker_client_mock.images.remove.assert_not_called()
+
+ def test_is_rapid_image(self):
+ self.assertFalse(LambdaImage.is_rapid_image(None))
+ self.assertFalse(LambdaImage.is_rapid_image(""))
+ self.assertFalse(LambdaImage.is_rapid_image("my_repo"))
+ self.assertFalse(LambdaImage.is_rapid_image("my_repo:tag"))
+ self.assertTrue(LambdaImage.is_rapid_image("my_repo:rapid-1.29beta"))
+ self.assertFalse(LambdaImage.is_rapid_image(f"public.ecr.aws/lambda/python:3.9"))
+ self.assertFalse(LambdaImage.is_rapid_image(f"public.ecr.aws/sam/emulation-python3.6:latest"))
+ self.assertTrue(LambdaImage.is_rapid_image(f"public.ecr.aws/sam/emulation-python3.6:rapid-1.29.0"))
|
fix: rapid regrex does not fit extended version number (#<I>)
* fix: rapid regrex does not fit extended version number in nightly/beta releases
* update the regex
* Add comment on rapid regex matching cases
* black reformat
* check if the tag start with :rapid-
* black reformat
|
awslabs_aws-sam-cli
|
train
|
a41dd5f4867e717d12bee5ab113d23d3e3abb389
|
diff --git a/ask-sdk-core/src/com/amazon/ask/builder/SkillConfiguration.java b/ask-sdk-core/src/com/amazon/ask/builder/SkillConfiguration.java
index <HASH>..<HASH> 100644
--- a/ask-sdk-core/src/com/amazon/ask/builder/SkillConfiguration.java
+++ b/ask-sdk-core/src/com/amazon/ask/builder/SkillConfiguration.java
@@ -202,6 +202,11 @@ public class SkillConfiguration {
return this;
}
+ public Builder appendCustomUserAgent(String userAgent) {
+ customUserAgent = customUserAgent == null ? userAgent : customUserAgent + " " + userAgent;
+ return this;
+ }
+
public String getCustomUserAgent() {
return customUserAgent;
}
diff --git a/ask-sdk-core/src/com/amazon/ask/module/SdkModuleContext.java b/ask-sdk-core/src/com/amazon/ask/module/SdkModuleContext.java
index <HASH>..<HASH> 100644
--- a/ask-sdk-core/src/com/amazon/ask/module/SdkModuleContext.java
+++ b/ask-sdk-core/src/com/amazon/ask/module/SdkModuleContext.java
@@ -113,4 +113,9 @@ public class SdkModuleContext {
return this;
}
+ public SdkModuleContext appendCustomUserAgent(String customUserAgent) {
+ skillConfigBuilder.appendCustomUserAgent(customUserAgent);
+ return this;
+ }
+
}
diff --git a/ask-sdk-core/tst/com/amazon/ask/builder/SkillConfigurationTest.java b/ask-sdk-core/tst/com/amazon/ask/builder/SkillConfigurationTest.java
index <HASH>..<HASH> 100644
--- a/ask-sdk-core/tst/com/amazon/ask/builder/SkillConfigurationTest.java
+++ b/ask-sdk-core/tst/com/amazon/ask/builder/SkillConfigurationTest.java
@@ -93,4 +93,26 @@ public class SkillConfigurationTest {
assertEquals(skillId, config.getSkillId());
}
+ @Test
+ public void set_get_custom_user_agent() {
+ String customUserAgent = "foo";
+ SkillConfiguration config = SkillConfiguration.builder().withCustomUserAgent(customUserAgent).build();
+ assertEquals(customUserAgent, config.getCustomUserAgent());
+ }
+
+ @Test
+ public void append_single_user_agent() {
+ String customUserAgent = "foo";
+ SkillConfiguration config = SkillConfiguration.builder().appendCustomUserAgent(customUserAgent).build();
+ assertEquals(customUserAgent, config.getCustomUserAgent());
+ }
+
+ @Test
+ public void append_multiple_user_agents() {
+ String customUserAgent1 = "foo";
+ String customUserAgent2 = "bar";
+ SkillConfiguration config = SkillConfiguration.builder().appendCustomUserAgent(customUserAgent1).appendCustomUserAgent(customUserAgent2).build();
+ assertEquals("foo bar", config.getCustomUserAgent());
+ }
+
}
|
Allow SDK modules to append custom user agent (#<I>)
|
alexa_alexa-skills-kit-sdk-for-java
|
train
|
e354a073126e65f86cabde13d433dc5a15f27602
|
diff --git a/src/Provider/Pimple/ViewHelperContainerServiceProvider.php b/src/Provider/Pimple/ViewHelperContainerServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Provider/Pimple/ViewHelperContainerServiceProvider.php
+++ b/src/Provider/Pimple/ViewHelperContainerServiceProvider.php
@@ -61,7 +61,7 @@ class ViewHelperContainerServiceProvider implements ServiceProviderInterface
$helpers = new Container();
$helpers['activeMenu'] = function () use ($c) {
- return new ActiveMenu($c['request.mvc'], $c['view.helper.activeMenu.class']);
+ return new ActiveMenu($c['request.mvc'], $c['request.http'], $c['view.helper.activeMenu.class']);
};
$c['assetVersions'] = function ($c) {
diff --git a/src/View/Helper/ActiveMenu/ActiveMenu.php b/src/View/Helper/ActiveMenu/ActiveMenu.php
index <HASH>..<HASH> 100644
--- a/src/View/Helper/ActiveMenu/ActiveMenu.php
+++ b/src/View/Helper/ActiveMenu/ActiveMenu.php
@@ -13,6 +13,7 @@ namespace Mendo\Mvc\View\Helper\ActiveMenu;
use Mendo\Mvc\View\Helper\ViewHelperInterface;
use Mendo\Mvc\Request\MvcRequest;
+use Mendo\Http\Request\HttpRequest;
/**
* @author Mathieu Decaffmeyer <mdecaffmeyer@gmail.com>
@@ -20,11 +21,13 @@ use Mendo\Mvc\Request\MvcRequest;
class ActiveMenu implements ViewHelperInterface
{
private $mvcRequest;
+ private $httpRequest;
private $class;
- public function __construct(MvcRequest $mvcRequest, $class = 'active')
+ public function __construct(MvcRequest $mvcRequest, HttpRequest $httpRequest, $class = 'active')
{
$this->mvcRequest = $mvcRequest;
+ $this->httpRequest = $httpRequest;
$this->class = $class;
}
@@ -47,27 +50,41 @@ class ActiveMenu implements ViewHelperInterface
public function activeMenu($route)
{
+ $queryData = [];
+ if (strpos($route, '?') !== false) {
+ list($route, $queryString) = explode('?', $route);
+ $parts = parse_url('?'.$queryString);
+ if (array_key_exists('query', $parts)) {
+ parse_str($parts['query'], $queryData);
+ }
+ }
+
$route = explode('/', $route);
+
+ $module = null;
+ $controller = null;
+ $action = null;
+ $params = [];
+
switch (count($route)) {
+ default:
+ $params = array_slice($route, 3);
+ $params = $this->makeKeyValuePairs($params);
case 3:
- $module = $route[0];
- $controller = $route[1];
$action = $route[2];
- break;
case 2:
- $module = $route[0];
$controller = $route[1];
- $action = null;
- break;
case 1:
$module = $route[0];
- $controller = null;
- $action = null;
break;
- default:
+ case 0:
throw new \InvalidArgumentException('$route invalid');
}
+ if ($queryData && array_diff($queryData, $this->httpRequest->getQuery())) {
+ return '';
+ }
+
if ($module !== $this->mvcRequest->getModule()) {
return '';
}
@@ -88,6 +105,14 @@ class ActiveMenu implements ViewHelperInterface
return '';
}
+ if (!$params) {
+ return $this->class;
+ }
+
+ if (array_diff($params, $this->mvcRequest->getParams())) {
+ return '';
+ }
+
return $this->class;
}
@@ -95,4 +120,18 @@ class ActiveMenu implements ViewHelperInterface
{
return $needle === '' || strpos($haystack, $needle) === 0;
}
+
+ private function makeKeyValuePairs(array $array)
+ {
+ $pairs = [];
+ $nb = count($array);
+ for ($i = 0; $i < $nb - 1; $i += 2) {
+ $pairs[$array[$i]] = $array[$i+1];
+ }
+ if ($i < $nb) {
+ $pairs[$array[$i]] = '';
+ }
+
+ return $pairs;
+ }
}
|
ActiveMenu view helper: allow to check if menu is active based on route params and query string
|
gobline_application
|
train
|
5cc767ec2244d9460eb3a66de834d51d3617d07c
|
diff --git a/bayes_opt/bayesian_optimization.py b/bayes_opt/bayesian_optimization.py
index <HASH>..<HASH> 100644
--- a/bayes_opt/bayesian_optimization.py
+++ b/bayes_opt/bayesian_optimization.py
@@ -38,16 +38,17 @@ def acq_max(ac, gp, y_max, restarts, bounds):
:return: x_max, The arg max of the acquisition function.
"""
+ # Start with the lower bound as the argmax
x_max = bounds[:, 0]
ei_max = 0
- for i in range(restarts):
- # Sample some points at random.
- x_try = np.asarray([np.random.uniform(x[0], x[1], size=1) for x in bounds]).T
+ x_tries = np.random.uniform(bounds[:, 0], bounds[:, 1],
+ size=(restarts, bounds.shape[0]))
+ for x_try in x_tries:
# Find the minimum of minus the acquisition function
res = minimize(lambda x: -ac(x.reshape(1, -1), gp=gp, y_max=y_max),
- x_try,
+ x_try.reshape(1, -1),
bounds=bounds,
method="L-BFGS-B")
|
Makes the optimization of the utility function a bit more elegant
|
fmfn_BayesianOptimization
|
train
|
22e9fc92b8427a843ea0196107fd10a8c5383af2
|
diff --git a/lib/OpenLayers/Layer/HTTPRequest.js b/lib/OpenLayers/Layer/HTTPRequest.js
index <HASH>..<HASH> 100644
--- a/lib/OpenLayers/Layer/HTTPRequest.js
+++ b/lib/OpenLayers/Layer/HTTPRequest.js
@@ -143,9 +143,10 @@ OpenLayers.Layer.HTTPRequest = OpenLayers.Class(OpenLayers.Layer, {
*/
redraw: function(force) {
if (force) {
- this.params["_olSalt"] = Math.random();
+ this.mergeNewParams({"_olSalt": Math.random()});
+ } else {
+ return OpenLayers.Layer.prototype.redraw.call(this);
}
- return OpenLayers.Layer.prototype.redraw.call(this);
},
/**
|
Don't change existing behavior for redraw(true)
|
openlayers_openlayers
|
train
|
a57c0808a623380b8d5f9c764247312e11c22648
|
diff --git a/Swat/SwatYesNoFlydown.php b/Swat/SwatYesNoFlydown.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatYesNoFlydown.php
+++ b/Swat/SwatYesNoFlydown.php
@@ -1,6 +1,6 @@
<?php
-require_once('Swat/SwatFlydown.php');
+require_once 'Swat/SwatFlydown.php';
/**
* A flydown (aka combo-box) selection widget for a Yes/No option.
@@ -9,11 +9,16 @@ require_once('Swat/SwatFlydown.php');
* @license http://opensource.org/licenses/gpl-license.php GNU Public License
* @copyright silverorange 2005
*/
-class SwatYesNoFlydown extends SwatFlydown {
-
+class SwatYesNoFlydown extends SwatFlydown
+{
const NO = 1;
const YES = 2;
+ /**
+ * Initializes this yes/no flydown
+ *
+ * Sets the options of this flydown to be yes and no.
+ */
public function init() {
$this->options = array(self::NO => _S("No"),
self::YES => _S("Yes"));
@@ -21,19 +26,24 @@ class SwatYesNoFlydown extends SwatFlydown {
parent::init();
}
- public function getValueAsBoolean() {
+ /**
+ * Gets the value of this yes/no flywodn as a boolean
+ *
+ * If the value is not set, this methods returns null.
+ *
+ * @return boolean the value of this yes/no flywdown.
+ */
+ public function getValueAsBoolean()
+ {
switch ($this->value) {
- case self::NO:
- return false;
-
- case self::YES:
- return true;
-
- default:
- return null;
+ case self::NO:
+ return false;
+ case self::YES:
+ return true;
+ default:
+ return null;
}
}
-
}
?>
|
Updated documentation for init() method
svn commit r<I>
|
silverorange_swat
|
train
|
4f8a371a3068434733380bed41e1069977d3bd51
|
diff --git a/rules/end-event-required.js b/rules/end-event-required.js
index <HASH>..<HASH> 100644
--- a/rules/end-event-required.js
+++ b/rules/end-event-required.js
@@ -15,7 +15,7 @@ module.exports = utils => {
}
function check(node, reporter) {
- if (is(node, 'Process')) {
+ if (is(node, 'bpmn:Process')) {
if (!hasEndEvent(node)) {
reporter.report(node.id, ERROR);
}
|
chore(rules): assert for full name
|
bpmn-io_bpmnlint
|
train
|
0353b8b69b2f26f667a8ddcf29da3c80b73dff5c
|
diff --git a/lib/a11y-check.js b/lib/a11y-check.js
index <HASH>..<HASH> 100644
--- a/lib/a11y-check.js
+++ b/lib/a11y-check.js
@@ -35,7 +35,7 @@ function failureSummary(result, nodeData) {
if (!hasPassingPass && failingPasses.length > 0) {
var p = dqreConfiguration.data.failureSummaries[dqre.constants.type.PASS].failureMessage(failingPasses);
if (failMessage) {
- failMessage += "\n";
+ failMessage += '\n';
}
failMessage += p;
}
diff --git a/lib/export.js b/lib/export.js
index <HASH>..<HASH> 100644
--- a/lib/export.js
+++ b/lib/export.js
@@ -1,3 +1,4 @@
/*global dqre, global */
dqre.version = 'dev';
global.dqre = dqre;
+global.dqreConfiguration = dqreConfiguration;
diff --git a/test/unit/a11y-check.js b/test/unit/a11y-check.js
index <HASH>..<HASH> 100644
--- a/test/unit/a11y-check.js
+++ b/test/unit/a11y-check.js
@@ -67,13 +67,13 @@ describe('dqre.a11yCheck', function () {
dqreConfiguration.data.failureSummaries = {
FAIL: {
failureMessage: function anonymous(it) {
- var out = "Fix any of the following: \n";
+ var out = 'Fix any of the following: \n';
var arr1 = it;
if (arr1) {
var value, i1 = -1, l1 = arr1.length - 1;
while (i1 < l1) {
value = arr1[i1 += 1];
- out += " " + value + "\n";
+ out += ' ' + value + '\n';
}
}
return out;
@@ -81,13 +81,13 @@ describe('dqre.a11yCheck', function () {
},
PASS: {
failureMessage: function anonymous(it) {
- var out = "Fix all of the following: \n";
+ var out = 'Fix all of the following: \n';
var arr1 = it;
if (arr1) {
var value, i1 = -1, l1 = arr1.length - 1;
while (i1 < l1) {
value = arr1[i1 += 1];
- out += " " + value + "\n";
+ out += ' ' + value + '\n';
}
}
return out;
|
KSD-<I> fixing lint issues
|
dequelabs_axe-core
|
train
|
64daccac4f2c99d0ae5f9fbc79c704e96e941847
|
diff --git a/client/requests.go b/client/requests.go
index <HASH>..<HASH> 100644
--- a/client/requests.go
+++ b/client/requests.go
@@ -46,7 +46,7 @@ func NewPutCaRequest(config config.Config, caIdentifier, caType, cert, priv stri
}
caBody := models.CaBody{
ContentType: caType,
- Value: &ca,
+ Value: &ca,
}
return newCaRequest("PUT", config, caIdentifier, caBody)
diff --git a/commands/login.go b/commands/login.go
index <HASH>..<HASH> 100644
--- a/commands/login.go
+++ b/commands/login.go
@@ -33,6 +33,9 @@ func (cmd LoginCommand) Execute([]string) error {
token, err := actions.NewAuthToken(client.NewHttpClient(cfg.AuthURL), cfg).GetAuthToken(cmd.Username, cmd.Password)
if err != nil {
+ cfg.AccessToken = ""
+ cfg.RefreshToken = ""
+ config.WriteConfig(cfg)
return err
}
diff --git a/commands/login_test.go b/commands/login_test.go
index <HASH>..<HASH> 100644
--- a/commands/login_test.go
+++ b/commands/login_test.go
@@ -193,6 +193,7 @@ var _ = Describe("Login", func() {
Context("when credentials are invalid", func() {
var (
badUaaServer *Server
+ session *Session
)
BeforeEach(func() {
@@ -207,17 +208,28 @@ var _ = Describe("Login", func() {
}`),
),
)
- })
- It("fails to login", func() {
- setConfigAuthUrl(badUaaServer.URL())
+ cfg, _ := config.ReadConfig()
+ cfg.AuthURL = badUaaServer.URL()
+ cfg.AccessToken = "fake_token"
+ cfg.RefreshToken = "fake_refresh_token"
+ config.WriteConfig(cfg)
- session := runCommand("login", "-u", "user", "-p", "pass")
+ session = runCommand("login", "-u", "user", "-p", "pass")
+ })
+ It("fails to login", func() {
Eventually(session).Should(Exit(1))
Eventually(session.Err).Should(Say("The provided username and password combination are incorrect. Please validate your input and retry your request."))
Expect(badUaaServer.ReceivedRequests()).Should(HaveLen(1))
})
+
+ It("removes any existing tokens", func() {
+ Eventually(session).Should(Exit(1))
+ cfg, _ := config.ReadConfig()
+ Expect(cfg.AccessToken).To(BeEmpty())
+ Expect(cfg.RefreshToken).To(BeEmpty())
+ })
})
})
|
Remove the auth tokens upon failed login
[#<I>] Any existing user login session should be discarded if user attempts to log in again
|
cloudfoundry-incubator_credhub-cli
|
train
|
11dbd7ff0d107bec89bc2f76f17ca3c5de575688
|
diff --git a/src-modules/org/opencms/workplace/tools/sites/CmsSitesGlobalSettings.java b/src-modules/org/opencms/workplace/tools/sites/CmsSitesGlobalSettings.java
index <HASH>..<HASH> 100644
--- a/src-modules/org/opencms/workplace/tools/sites/CmsSitesGlobalSettings.java
+++ b/src-modules/org/opencms/workplace/tools/sites/CmsSitesGlobalSettings.java
@@ -36,6 +36,7 @@ import org.opencms.jsp.CmsJspActionElement;
import org.opencms.main.CmsException;
import org.opencms.main.OpenCms;
import org.opencms.site.CmsSite;
+import org.opencms.util.CmsFileUtil;
import org.opencms.widgets.CmsSelectWidget;
import org.opencms.widgets.CmsSelectWidgetOption;
import org.opencms.widgets.CmsVfsFileWidget;
@@ -210,17 +211,14 @@ public class CmsSitesGlobalSettings extends CmsWidgetDialog {
for (CmsSite site : sites) {
if (!((site.getSiteRoot() == null) || site.getSiteRoot().equals("") || site.getSiteRoot().equals("/"))) {
// is not null and not the root site => potential option
- if (site.getSiteRoot().startsWith(OpenCms.getSiteManager().getDefaultUri())) {
+ if (site.getSiteRoot().startsWith(
+ CmsFileUtil.removeTrailingSeparator(OpenCms.getSiteManager().getDefaultUri()))) {
// is the current default site use as default option
CmsSelectWidgetOption option = new CmsSelectWidgetOption(
site.getSiteRoot() + "/",
true,
site.getTitle());
defaultUriOptions.add(option);
- } else if (site.getSiteRoot().equals(OpenCms.getSiteManager().getWorkplaceServer())) {
- // is the current wp server use as default option
- CmsSelectWidgetOption option = new CmsSelectWidgetOption(site.getUrl() + "/", true, site.getTitle());
- wpServerOptions.add(option);
} else {
// no default, create a option
CmsSelectWidgetOption option = new CmsSelectWidgetOption(
@@ -228,8 +226,16 @@ public class CmsSitesGlobalSettings extends CmsWidgetDialog {
false,
site.getTitle());
defaultUriOptions.add(option);
- option = new CmsSelectWidgetOption(site.getUrl() + "/", false, site.getTitle());
+ }
+ if (site.getUrl().equals(OpenCms.getSiteManager().getWorkplaceServer())) {
+ // is the current wp server use as default option
+ CmsSelectWidgetOption option = new CmsSelectWidgetOption(site.getUrl(), true, site.getTitle());
wpServerOptions.add(option);
+ } else {
+ // no default, create a option
+ CmsSelectWidgetOption option = new CmsSelectWidgetOption(site.getUrl(), false, site.getTitle());
+ wpServerOptions.add(option);
+
}
}
}
|
Fixing issue where creating options for global settings were incorrect.
|
alkacon_opencms-core
|
train
|
dada7d66a48fed53e1ebb733b075276bbb68cde4
|
diff --git a/blocks/dock.js b/blocks/dock.js
index <HASH>..<HASH> 100644
--- a/blocks/dock.js
+++ b/blocks/dock.js
@@ -367,7 +367,7 @@ M.blocks.dock = {
var spacewidth = this.resize_block_space(placeholder);
var blocktitle = Y.Node.getDOMNode(this.cachedcontentnode.one('.title h2')).cloneNode(true);
- blocktitle.innerHTML = blocktitle.innerHTML.replace(/([a-zA-Z0-9])/g, "$1<br />");
+ blocktitle.innerHTML = blocktitle.innerHTML.replace(/(.)/g, "$1<br />");
var commands = this.cachedcontentnode.all('.title .commands');
var blockcommands = Y.Node.create('<div class="commands"></div>');
|
navigation MDL-<I> Changed regex to replace all characters not just a-zA-Z0-9 thanks for picking this up David
|
moodle_moodle
|
train
|
fd347d556b67eff6b8cf20e61d27372230d156db
|
diff --git a/tests/fixtures/imports_on_separate_lines/imports_on_separate_lines1_in.py b/tests/fixtures/imports_on_separate_lines/imports_on_separate_lines1_in.py
index <HASH>..<HASH> 100644
--- a/tests/fixtures/imports_on_separate_lines/imports_on_separate_lines1_in.py
+++ b/tests/fixtures/imports_on_separate_lines/imports_on_separate_lines1_in.py
@@ -1,6 +1,4 @@
-import math
-import sys
-import os
+import math, sys, os
from subprocess import Popen, PIPE
from myclas import MyClass
@@ -17,6 +15,4 @@ class the_class():
import math
def other_func():
- import os
- import sys
- import math
+ import os, sys, math
|
Fix fixture "imports_on_separate_lines1".
The fix had nothing to do and the input file was the same as the
output file.
|
spulec_pep8ify
|
train
|
76832720c483c155a8e03a14400836de4a886903
|
diff --git a/tests/ImageFileTest.php b/tests/ImageFileTest.php
index <HASH>..<HASH> 100644
--- a/tests/ImageFileTest.php
+++ b/tests/ImageFileTest.php
@@ -23,6 +23,25 @@ class ImageFileTest extends \PHPUnit_Framework_TestCase
$png->save();
}
+ public function testAddChunkMaxKey()
+ {
+ $key = str_repeat('x', Image::MAX_KEYWORD_BYTES);
+
+ $png = new ImageFile(__DIR__ . self::TEST_IMAGE_PATH);
+ $png->addITXtChunk($key, 'en', '');
+ }
+
+ /**
+ * @expectedException Exception
+ */
+ public function testAddChunkOverMaxKey()
+ {
+ $key = str_repeat('x', Image::MAX_KEYWORD_BYTES + 1);
+
+ $png = new ImageFile(__DIR__ . self::TEST_IMAGE_PATH);
+ $png->addITXtChunk($key, 'en', '');
+ }
+
public function testGetITXtChunkFromKeyMatch()
{
$png = new ImageFile(__DIR__ . self::TEST_ITXT_IMAGE_PATH);
|
Tests for edge cases of key length
|
UoMCS_php-png
|
train
|
5d7e00e12200c3147bf6663a8aa9503cb4c4d846
|
diff --git a/CHERRYPYTEAM.txt b/CHERRYPYTEAM.txt
index <HASH>..<HASH> 100644
--- a/CHERRYPYTEAM.txt
+++ b/CHERRYPYTEAM.txt
@@ -1 +1 @@
-See http://trac.cherrypy.org/cgi-bin/trac.cgi/wiki/CherryPyTeam
+See http://trac.cherrypy.org/wiki/CherryPyTeam
diff --git a/cherrypy/_cpconfig.py b/cherrypy/_cpconfig.py
index <HASH>..<HASH> 100644
--- a/cherrypy/_cpconfig.py
+++ b/cherrypy/_cpconfig.py
@@ -152,7 +152,7 @@ def _load(configFile = None):
if section not in configMap:
configMap[section] = {}
for option in configParser.options(section):
- value = _cputil.unrepr(configParser.get(section, option))
+ value = _cputil.unreprWrapper(configParser.get(section, option))
configMap[section][option] = value
def outputConfigMap():
diff --git a/cherrypy/_cputil.py b/cherrypy/_cputil.py
index <HASH>..<HASH> 100644
--- a/cherrypy/_cputil.py
+++ b/cherrypy/_cputil.py
@@ -137,3 +137,15 @@ class Builder:
def unrepr(s):
return Builder().build(getObj(s))
+def unreprWrapper(someString):
+ """unrepr raises an exception if the string is not a recognized type this function returns the
+ source string in that case
+
+ '123abc' would cause unrepr to raise an exception
+ """
+ if not someString:
+ return someString
+ try:
+ return unrepr(someString)
+ except:
+ retur
|
replaced unrepr with smarter unreprWrapper, updated wiki link in CHERRYPYTEAM.txt
|
cherrypy_cheroot
|
train
|
6cfe1ec76f1acf20186e60ac6b0ccc3ff3db4a97
|
diff --git a/hazelcast-client/src/main/java/com/hazelcast/client/impl/HazelcastClientInstanceImpl.java b/hazelcast-client/src/main/java/com/hazelcast/client/impl/HazelcastClientInstanceImpl.java
index <HASH>..<HASH> 100644
--- a/hazelcast-client/src/main/java/com/hazelcast/client/impl/HazelcastClientInstanceImpl.java
+++ b/hazelcast-client/src/main/java/com/hazelcast/client/impl/HazelcastClientInstanceImpl.java
@@ -733,10 +733,10 @@ public class HazelcastClientInstanceImpl implements HazelcastInstance, Serializa
proxyManager.destroy();
connectionManager.shutdown();
clusterService.shutdown();
- executionService.shutdown();
partitionService.stop();
transactionManager.shutdown();
invocationService.shutdown();
+ executionService.shutdown();
listenerService.shutdown();
nearCacheManager.destroyAllNearCaches();
if (discoveryService != null) {
diff --git a/hazelcast-client/src/main/java/com/hazelcast/client/proxy/ClientReliableTopicProxy.java b/hazelcast-client/src/main/java/com/hazelcast/client/proxy/ClientReliableTopicProxy.java
index <HASH>..<HASH> 100644
--- a/hazelcast-client/src/main/java/com/hazelcast/client/proxy/ClientReliableTopicProxy.java
+++ b/hazelcast-client/src/main/java/com/hazelcast/client/proxy/ClientReliableTopicProxy.java
@@ -16,6 +16,7 @@
package com.hazelcast.client.proxy;
+import com.hazelcast.client.HazelcastClientNotActiveException;
import com.hazelcast.client.config.ClientReliableTopicConfig;
import com.hazelcast.client.impl.HazelcastClientInstanceImpl;
import com.hazelcast.client.spi.ClientProxy;
@@ -40,13 +41,14 @@ import com.hazelcast.topic.TopicOverloadException;
import com.hazelcast.topic.TopicOverloadPolicy;
import com.hazelcast.topic.impl.reliable.ReliableMessageListenerAdapter;
import com.hazelcast.topic.impl.reliable.ReliableTopicMessage;
+import com.hazelcast.util.ExceptionUtil;
import com.hazelcast.util.UuidUtil;
import com.hazelcast.version.MemberVersion;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
+import java.util.concurrent.RejectedExecutionException;
import static com.hazelcast.ringbuffer.impl.RingbufferService.TOPIC_RB_PREFIX;
import static com.hazelcast.topic.impl.reliable.ReliableTopicService.SERVICE_NAME;
@@ -272,7 +274,9 @@ public class ClientReliableTopicProxy<E> extends ClientProxy implements ITopic<E
return;
}
- if (t instanceof ExecutionException && t.getCause() instanceof StaleSequenceException) {
+ t = ExceptionUtil.peel(t);
+
+ if (t instanceof StaleSequenceException) {
// StaleSequenceException.getHeadSeq() is not available on the client-side, see #7317
long remoteHeadSeq = ringbuffer.headSequence();
@@ -300,6 +304,11 @@ public class ClientReliableTopicProxy<E> extends ClientProxy implements ITopic<E
logger.finest("Terminating MessageListener " + listener + " on topic: " + name + ". "
+ "Reason: Topic is destroyed");
}
+ } else if (t instanceof HazelcastClientNotActiveException || t instanceof RejectedExecutionException) {
+ if (logger.isFinestEnabled()) {
+ logger.finest("Terminating MessageListener " + listener + " on topic: " + name + ". "
+ + "Reason: HazelcastClient is shutting down");
+ }
} else {
logger.warning("Terminating MessageListener " + listener + " on topic: " + name + ". "
+ "Reason: Unhandled exception, message: " + t.getMessage(), t);
@@ -337,5 +346,4 @@ public class ClientReliableTopicProxy<E> extends ClientProxy implements ITopic<E
}
}
}
-
}
|
Handle HazelcastClientNotActiveException in ClientReliableTopicProxy
Because of shutdown order of events, invocation was getting
RejectedExecutionException from EventService rather than
HazelcastClientNotActiveException.
Both the shutdown order fix and handling the Exception is added to pr.
fixes #<I>
|
hazelcast_hazelcast
|
train
|
4591691ded83ae0ab217f98d73c7f66e4fd19a88
|
diff --git a/src/Application.js b/src/Application.js
index <HASH>..<HASH> 100644
--- a/src/Application.js
+++ b/src/Application.js
@@ -2,7 +2,7 @@
* @Author: mjreich
* @Date: 2015-05-18 17:03:15
* @Last Modified 2016-02-12
-* @Last Modified time: 2016-02-12 11:57:01
+* @Last Modified time: 2016-02-12 12:08:06
*/
import _ from 'underscore'
@@ -271,6 +271,7 @@ export default class Application extends Dispatcher {
plugin = plugin.default
plugin = new plugin(this);
} catch(e) {
+ this.log.warn('Error booting module', (plugin._packageJson ? plugin._packageJson.name : plugin.name))
this.log.warn(e.stack)
}
return Promise.resolve(plugin)
diff --git a/src/PluginManager.js b/src/PluginManager.js
index <HASH>..<HASH> 100644
--- a/src/PluginManager.js
+++ b/src/PluginManager.js
@@ -2,7 +2,7 @@
* @Author: mike
* @Date: 2015-05-18 17:05:09
* @Last Modified 2016-02-12
-* @Last Modified time: 2016-02-12 12:00:55
+* @Last Modified time: 2016-02-12 12:19:23
*/
'use strict';
@@ -24,7 +24,7 @@ class PluginManager {
constructor(app, options) {
this.app = app
- options = options || {}
+ this.options = options = options || {}
this._loaded = []
@@ -91,7 +91,8 @@ class PluginManager {
*/
loadPackage(name, directory, packages) {
if(_.contains(this._loaded, name)) return
- if(name.indexOf("@nxus/") < 0 && name.indexOf("nxus-") < 0) return
+ if(name.indexOf("@nxus/") == -1 && name.indexOf("nxus-") == -1) return
+ if(name.indexOf("@nxus/core") > -1) return
this.app.log.debug('Loading node module ' + name)
var pkg
this._loaded.push(name)
@@ -101,11 +102,12 @@ class PluginManager {
if(!pkg) return
var peerDeps = (pkg._packageJson && pkg._packageJson.peerDependencies) || {}
for(let dep in peerDeps) {
- this.loadPackage(dep, fs.realpathSync(directory+"/../..")+"/"+dep, packages)
+ this.loadPackage(dep, this.options.appDir+"/node_modules/"+dep, packages)
}
var getPackages = (packages, targets, directory) => {
targets.forEach((t) => {
if(t.indexOf("@nxus/") < 0 && t.indexOf("nxus-") < 0) return
+ if(t.indexOf("@nxus/core") > -1) return
var innerDir = path.join(directory, 'node_modules') + '/' + t
var innerPkg = this.accumulatePackage(packages, innerDir)
if(!innerPkg) return
|
Improved logging and fixed issue with peerDep modules not booting
|
nxus_core
|
train
|
025d3ed2fe6bf086f2e716b95f1446397de0b8b7
|
diff --git a/logdissect/__init__.py b/logdissect/__init__.py
index <HASH>..<HASH> 100644
--- a/logdissect/__init__.py
+++ b/logdissect/__init__.py
@@ -1,4 +1,4 @@
-__version__ = '1.4-dev'
+__version__ = '1.7-dev'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/logdissect'
|
Update version: <I>-dev (dev branch for 2.x)
|
dogoncouch_logdissect
|
train
|
839b5bc30b96c2e30a0c54811192ad4638fe6c20
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -23,6 +23,7 @@ class Stopwatch {
this.total = 0;
this.paused = false;
this.started = null;
+ this.parallels = 0;
return this;
}
add(time) {
@@ -31,6 +32,7 @@ class Stopwatch {
return this;
}
start() {
+ this.parallels++;
if (this.started === null) {
this.paused ? (this.paused = false) : (this.lap = 0);
this.started = process.hrtime();
@@ -61,6 +63,7 @@ class Stopwatch {
}
pause() {
if (this.started !== null) {
+ if (--this.parallels) return this;
this.lap += this.time();
this.paused = true;
this.started = null;
@@ -68,6 +71,7 @@ class Stopwatch {
return this;
}
stop() {
+ if (--this.parallels) return this;
if (this.started || this.paused) {
let time = this.lap += this.time();
this.laps += 1;
|
feat: parallel laps
The `start` method can be called many times before `stop` is called.
Every `start` call must be balanced with a `stop` call before the lap is considered finished.
|
aleclarson_elaps
|
train
|
1a8e761b43ed9ca05a5b052c599a2fe318151251
|
diff --git a/tests/Unit/App/Http/Middleware/Transformers/Api/V1/ArticlesTransformerTest.php b/tests/Unit/App/Http/Middleware/Transformers/Api/V1/ArticlesTransformerTest.php
index <HASH>..<HASH> 100644
--- a/tests/Unit/App/Http/Middleware/Transformers/Api/V1/ArticlesTransformerTest.php
+++ b/tests/Unit/App/Http/Middleware/Transformers/Api/V1/ArticlesTransformerTest.php
@@ -99,7 +99,7 @@ class ArticlesTransformerTest extends TestCase
* Сброс статуса Записи, если не указаны категории.
* @return void
*/
- public function testResetStateArticleWithoutCategoriesToUnpublished(): void
+ public function testResetStateArticleWithoutCategoriesToDraft(): void
{
$request = $this->createRequestWithCustomData([
'title' => 'Some title',
@@ -111,7 +111,7 @@ class ArticlesTransformerTest extends TestCase
$transformer = $this->createTransformer($request);
$transformed = $transformer->default();
- $this->assertSame(1, $transformed['state']);
+ $this->assertSame(0, $transformed['state']);
}
/**
@@ -121,7 +121,7 @@ class ArticlesTransformerTest extends TestCase
* Сброс статуса Записи, если статус не указан.
* @return void
*/
- public function testResetStateArticleWithoutStateToUnpublished(): void
+ public function testResetStateArticleWithoutStateToDraft(): void
{
$request = $this->createRequestWithCustomData([
'title' => 'Some title',
@@ -131,7 +131,7 @@ class ArticlesTransformerTest extends TestCase
$transformer = $this->createTransformer($request);
$transformed = $transformer->default();
- $this->assertSame(1, $transformed['state']);
+ $this->assertSame(0, $transformed['state']);
}
/**
|
Update ArticlesTransformerTest.php
|
russsiq_bixbite
|
train
|
1d6b8d6fc4a623865ba315a9fb444d04c51aadb9
|
diff --git a/src/main/java/net/bootsfaces/component/dataTable/DataTable.java b/src/main/java/net/bootsfaces/component/dataTable/DataTable.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/bootsfaces/component/dataTable/DataTable.java
+++ b/src/main/java/net/bootsfaces/component/dataTable/DataTable.java
@@ -571,7 +571,8 @@ dataTableProperties
}
/**
- * Set the map cntaining the DataTable properties for this instance.
+ * Set the map containing the DataTable properties for this instance. <p>
+ * EXPERIMENTAL: Does not support multiple DataTables on the same page.
* @param _dataTableProperties The map
*/
public void setDataTableProperties(Map<DataTablePropertyType, Object> _dataTableProperties){
diff --git a/src/main/java/net/bootsfaces/component/dataTable/DataTableRenderer.java b/src/main/java/net/bootsfaces/component/dataTable/DataTableRenderer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/bootsfaces/component/dataTable/DataTableRenderer.java
+++ b/src/main/java/net/bootsfaces/component/dataTable/DataTableRenderer.java
@@ -203,7 +203,9 @@ public class DataTableRenderer extends CoreRenderer {
//# Get instance of wrapper, and replace it with the unwrapped table.
"var wrapper = $('#" + clientId + "_wrapper');" +
"wrapper.replaceWith(element);" +
- "var table = element.DataTable();", null);
+ "var table = element.DataTable();" +
+ "var workInProgressErrorMessage = 'Multiple DataTables on the same page are not yet supported when using " +
+ "dataTableProperties attribute; Could not save state';", null);
//# Use DataTable API to set initial state of the table display
rw.writeText("table.page("+page+");" +
"table.search("+searchTerm+");" +
@@ -212,18 +214,24 @@ public class DataTableRenderer extends CoreRenderer {
//# Event setup: http://datatables.net/reference/event/page
rw.writeText("element.on('page.dt', function(){" +
"var info = table.page.info();" +
+ "try {" +
"BsF.ajax.callAjax(this, event, null, null, null, " +
"'" + DataTablePropertyType.currentPage + ":'+info.page);" +
+ "} catch(e) { console.warn(workInProgressErrorMessage, e); }" +
"});", null);
//# Event setup: https://datatables.net/reference/event/length
rw.writeText("element.on('length.dt', function(e, settings, len) {" +
+ "try {" +
"BsF.ajax.callAjax(this, event, null, null, null, " +
"'" + DataTablePropertyType.pageLength + ":'+len);" +
+ "} catch(e) { console.warn(workInProgressErrorMessage, e); }" +
"});", null);
//# Event setup: https://datatables.net/reference/event/search
rw.writeText("element.on('search.dt', function() {" +
+ "try {" +
"BsF.ajax.callAjax(this, event, null, null, null, " +
"'" + DataTablePropertyType.searchTerm + ":'+table.search());" +
+ "} catch(e) { console.warn(workInProgressErrorMessage, e); }" +
"});", null);
}
//# End enclosure
|
#<I> - add some javascript error handling and logging. update the setter to explain this is still an experimental feature and yet lacks the ability to work when multiple data tables are on the same page.
|
TheCoder4eu_BootsFaces-OSP
|
train
|
fd18f6f093db2fd360968f1b624ad83938f86ef1
|
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/event_based.py
+++ b/openquake/calculators/event_based.py
@@ -287,6 +287,9 @@ class EventBasedCalculator(base.HazardCalculator):
store_rlzs_by_grp(self.datastore)
self.init_logic_tree(self.csm.info)
self._store_ruptures(srcs_by_grp)
+ nr = len(self.datastore['ruptures'])
+ ne = len(self.datastore['events'])
+ logging.info('Stored {:,d} ruptures and {:,d} events'.format(nr, ne))
def genargs():
ruptures = []
|
More logging [skip CI]
|
gem_oq-engine
|
train
|
048176ee981b322c1eaaac1bb18baf072346eafc
|
diff --git a/spec/puppet-lint/lexer/string_slurper_spec.rb b/spec/puppet-lint/lexer/string_slurper_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/puppet-lint/lexer/string_slurper_spec.rb
+++ b/spec/puppet-lint/lexer/string_slurper_spec.rb
@@ -229,6 +229,20 @@ describe PuppetLint::Lexer::StringSlurper do
])
end
end
+
+ context 'an interpolation with a complex function chain' do
+ let(:string) { '${key} ${flatten([$value]).join("\nkey ")}"' }
+
+ it 'keeps the whole function chain in a single interpolation segment' do
+ expect(segments).to eq([
+ [:STRING, ''],
+ [:INTERP, 'key'],
+ [:STRING, ' '],
+ [:INTERP, 'flatten([$value]).join("\nkey ")'],
+ [:STRING, ''],
+ ])
+ end
+ end
end
end
end
diff --git a/spec/puppet-lint/lexer_spec.rb b/spec/puppet-lint/lexer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/puppet-lint/lexer_spec.rb
+++ b/spec/puppet-lint/lexer_spec.rb
@@ -715,6 +715,117 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
expect(manifest).to eq('"foo$bar$"')
end
end
+
+ context 'an interpolation with a complex function chain' do
+ let(:segments) do
+ [
+ [:STRING, ''],
+ [:INTERP, 'key'],
+ [:STRING, ' '],
+ [:INTERP, 'flatten([$value]).join("\nkey ")'],
+ [:STRING, ''],
+ ]
+ end
+
+ it 'creates a tokenised string' do
+ expect(tokens).to have(15).tokens
+
+ expect(tokens[0]).to have_attributes(
+ :type => :DQPRE,
+ :value => '',
+ :line => 1,
+ :column => 1
+ )
+ expect(tokens[1]).to have_attributes(
+ :type => :VARIABLE,
+ :value => 'key',
+ :line => 1,
+ :column => 4
+ )
+ expect(tokens[2]).to have_attributes(
+ :type => :DQMID,
+ :value => ' ',
+ :line => 1,
+ :column => 7
+ )
+ expect(tokens[3]).to have_attributes(
+ :type => :FUNCTION_NAME,
+ :value => 'flatten',
+ :line => 1,
+ :column => 11
+ )
+ expect(tokens[4]).to have_attributes(
+ :type => :LPAREN,
+ :value => '(',
+ :line => 1,
+ :column => 18
+ )
+ expect(tokens[5]).to have_attributes(
+ :type => :LBRACK,
+ :value => '[',
+ :line => 1,
+ :column => 19
+ )
+ expect(tokens[6]).to have_attributes(
+ :type => :VARIABLE,
+ :value => 'value',
+ :line => 1,
+ :column => 20
+ )
+ expect(tokens[7]).to have_attributes(
+ :type => :RBRACK,
+ :value => ']',
+ :line => 1,
+ :column => 26
+ )
+ expect(tokens[8]).to have_attributes(
+ :type => :RPAREN,
+ :value => ')',
+ :line => 1,
+ :column => 27
+ )
+ expect(tokens[9]).to have_attributes(
+ :type => :DOT,
+ :value => '.',
+ :line => 1,
+ :column => 28
+ )
+ expect(tokens[10]).to have_attributes(
+ :type => :FUNCTION_NAME,
+ :value => 'join',
+ :line => 1,
+ :column => 29
+ )
+ expect(tokens[11]).to have_attributes(
+ :type => :LPAREN,
+ :value => '(',
+ :line => 1,
+ :column => 33
+ )
+ expect(tokens[12]).to have_attributes(
+ :type => :STRING,
+ :value => '\nkey ',
+ :line => 1,
+ :column => 34
+ )
+ expect(tokens[13]).to have_attributes(
+ :type => :RPAREN,
+ :value => ')',
+ :line => 1,
+ :column => 42
+ )
+ expect(tokens[14]).to have_attributes(
+ :type => :DQPOST,
+ :value => '',
+ :line => 1,
+ :column => 43
+ )
+ end
+
+ it 'can render the result back into a manifest' do
+ expect(manifest).to eq('"${key} ${flatten([$value]).join("\nkey ")}"')
+ end
+ end
end
context ':STRING / :DQ' do
|
Add specs to cover #<I>
|
rodjek_puppet-lint
|
train
|
126c4409909266fd6f8ba5603b71f27458409b82
|
diff --git a/source/rafcon/utils/hashable.py b/source/rafcon/utils/hashable.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/utils/hashable.py
+++ b/source/rafcon/utils/hashable.py
@@ -14,25 +14,25 @@ import hashlib
class Hashable(object):
@staticmethod
- def update_hash_from_dict(obj_hash, object):
+ def update_hash_from_dict(obj_hash, object_):
"""Updates an existing hash object with another Hashable, list, set, tuple, dict or stringifyable object
:param obj_hash: The hash object (see Python hashlib documentation)
- :param object: The value that should be added to the hash (can be another Hashable or a dictionary)
+ :param object_: The value that should be added to the hash (can be another Hashable or a dictionary)
"""
- if isinstance(object, Hashable):
- object.update_hash(obj_hash)
- elif isinstance(object, (list, set, tuple)):
- if isinstance(object, set): # A set is not ordered
- object = sorted(object)
- for element in object:
+ if isinstance(object_, Hashable):
+ object_.update_hash(obj_hash)
+ elif isinstance(object_, (list, set, tuple)):
+ if isinstance(object_, set): # A set is not ordered
+ object_ = sorted(object_)
+ for element in object_:
Hashable.update_hash_from_dict(obj_hash, element)
- elif isinstance(object, dict):
- for key in sorted(object.keys()): # A dict is not ordered
+ elif isinstance(object_, dict):
+ for key in sorted(object_.keys()): # A dict is not ordered
Hashable.update_hash_from_dict(obj_hash, key)
- Hashable.update_hash_from_dict(obj_hash, object[key])
+ Hashable.update_hash_from_dict(obj_hash, object_[key])
else:
- obj_hash.update(str(object))
+ obj_hash.update(str(object_))
def update_hash(self, obj_hash):
"""Should be implemented by derived classes to update the hash with their data fields
|
fix(Hashable): Do not shadow builtin name
Use "object_" instead of "object"
|
DLR-RM_RAFCON
|
train
|
6a5cfabbf71e7bc49e67331ad2ac53f3a20ced08
|
diff --git a/app/controllers/sail/settings_controller.rb b/app/controllers/sail/settings_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/sail/settings_controller.rb
+++ b/app/controllers/sail/settings_controller.rb
@@ -30,14 +30,15 @@ module Sail
def switcher
respond_to do |format|
format.json do
- render json: {
- value: Sail::Setting.switcher(positive: s_params[:positive],
- negative: s_params[:negative],
- throttled_by: s_params[:throttled_by])
- }
-
- rescue Sail::Setting::UnexpectedCastType
- head(:bad_request)
+ begin
+ render json: {
+ value: Sail::Setting.switcher(positive: s_params[:positive],
+ negative: s_params[:negative],
+ throttled_by: s_params[:throttled_by])
+ }
+ rescue Sail::Setting::UnexpectedCastType
+ head(:bad_request)
+ end
end
end
end
|
Make rescue block syntax compliant with Ruby < <I>
|
vinistock_sail
|
train
|
7e71c4607f213770b95579cd29292002687a740b
|
diff --git a/util/common_task.py b/util/common_task.py
index <HASH>..<HASH> 100644
--- a/util/common_task.py
+++ b/util/common_task.py
@@ -33,6 +33,7 @@ def deactivate_services(user):
if authinfo.forum_username != "":
ForumManager.disable_user(authinfo.forum_username)
AuthServicesInfoManager.update_user_forum_info("", "", user)
+ AuthServicesInfoManager.update_main_char_Id("", user)
def generate_corp_group_name(corpname):
|
Added removal of main api when deactivated services
|
allianceauth_allianceauth
|
train
|
3f70195e1cdf1a2f2bbf570efa22a75b515f1e88
|
diff --git a/django_airavata/wagtailapps/base/blocks.py b/django_airavata/wagtailapps/base/blocks.py
index <HASH>..<HASH> 100644
--- a/django_airavata/wagtailapps/base/blocks.py
+++ b/django_airavata/wagtailapps/base/blocks.py
@@ -150,8 +150,8 @@ class CustomEmbedBlock(StructBlock):
class Meta:
icon = "fa-link"
template = "blocks/embed_block.html"
- help_text = ("Insert an embed URL e.g "
- "https://www.youtube.com/embed/SGJFWirQ3ks")
+ help_text = ("Insert a youtube URL e.g "
+ "https://www.youtube.com/watch?v=SGJFWirQ3ks")
class CssCommentBlock(StructBlock):
|
Update comment, youtube embed urls don't work
|
apache_airavata-django-portal
|
train
|
962b9c2937b10b50f9231006ef58a16b6887a815
|
diff --git a/lib/logstasher/log_subscriber.rb b/lib/logstasher/log_subscriber.rb
index <HASH>..<HASH> 100644
--- a/lib/logstasher/log_subscriber.rb
+++ b/lib/logstasher/log_subscriber.rb
@@ -13,7 +13,8 @@ module LogStasher
data.merge! extract_exception(payload)
data.merge! extract_custom_fields(payload)
- event = LogStash::Event.new('@fields' => data, '@tags' => ['request'])
+ tag = payload[:exception] ? 'exception' : 'request'
+ event = LogStash::Event.new('@fields' => data, '@tags' => [tag])
event.tags << 'exception' if payload[:exception]
LogStasher.logger << event.to_json + "\n"
end
|
Prevent error when seeing an exception thrown by Rails.
|
shadabahmed_logstasher
|
train
|
586c346059f263aa72242a61f863396c56181c8f
|
diff --git a/actionpack/lib/action_dispatch/journey/router.rb b/actionpack/lib/action_dispatch/journey/router.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_dispatch/journey/router.rb
+++ b/actionpack/lib/action_dispatch/journey/router.rb
@@ -115,7 +115,7 @@ module ActionDispatch
def get_routes_as_head(routes)
precedence = (routes.map(&:precedence).max || 0) + 1
- routes = routes.select { |r|
+ routes.select { |r|
r.verb === "GET" && !(r.verb === "HEAD")
}.map! { |r|
Route.new(r.name,
@@ -126,8 +126,6 @@ module ActionDispatch
route.precedence = r.precedence + precedence
end
}
- routes.flatten!
- routes
end
end
end
|
Remove unnecessary flatten! method call.
|
rails_rails
|
train
|
b390f6da558e14dc050c5858b10d889e96484fa5
|
diff --git a/pyroSAR/sqlite_util.py b/pyroSAR/sqlite_util.py
index <HASH>..<HASH> 100644
--- a/pyroSAR/sqlite_util.py
+++ b/pyroSAR/sqlite_util.py
@@ -30,11 +30,19 @@ def sqlite_setup(driver=':memory:', extensions=None):
class __Handler(object):
- def __init__(self, driver, extensions):
+ def __init__(self, driver=':memory:', extensions=None):
self.conn = sqlite3.connect(driver)
self.conn.enable_load_extension(True)
- for ext in extensions:
- self.load_extension(ext)
+ self.extensions = []
+ if isinstance(extensions, list):
+ for ext in extensions:
+ self.load_extension(ext)
+
+ @property
+ def version(self):
+ spatialite_cursor = self.conn.execute('''SELECT spatialite_version()''')
+ spatialite_version = spatialite_cursor.fetchall()[0][0].encode('ascii')
+ return {'sqlite': sqlite3.version, 'spatialite': spatialite_version}
def get_tablenames(self):
cursor = self.conn.execute('''SELECT * FROM sqlite_master WHERE type="table"''')
@@ -48,6 +56,9 @@ class __Handler(object):
try:
self.conn.load_extension(option)
select = option
+ self.extensions.append(option)
+ print('loading extension {0} as {1}'.format(extension, option))
+ break
except sqlite3.OperationalError:
continue
if select is None:
@@ -62,3 +73,4 @@ class __Handler(object):
raise RuntimeError('no library found for extension {}'.format(extension))
print('loading extension {0} as {1}'.format(extension, ext_mod))
self.conn.load_extension(ext_mod)
+ self.extensions.append(ext_mod)
|
handler class improvements and fixes:
- fixed error in looping through spatialite library options
- introduced new attributes extensions and version
- added parameter defaults to __init__
|
johntruckenbrodt_spatialist
|
train
|
ff96d69c8c00e41f5794eeffd0af4a69802e7192
|
diff --git a/lib/brainstem/concerns/controller_dsl.rb b/lib/brainstem/concerns/controller_dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/brainstem/concerns/controller_dsl.rb
+++ b/lib/brainstem/concerns/controller_dsl.rb
@@ -65,8 +65,8 @@ module Brainstem
# whereas setting it within an action context will force that action to
# be undocumented.
#
- def nodoc!(_description = nil)
- configuration[brainstem_params_context][:nodoc] = true
+ def nodoc!(description = true)
+ configuration[brainstem_params_context][:nodoc] = description
end
#
@@ -75,8 +75,8 @@ module Brainstem
# will force the controller to be undocumented, whereas setting it
# within an action context will force that action to be undocumented.
#
- def internal!(_description = nil)
- configuration[brainstem_params_context][:internal] = true
+ def internal!(description = true)
+ configuration[brainstem_params_context][:internal] = description
end
#
diff --git a/lib/brainstem/concerns/presenter_dsl.rb b/lib/brainstem/concerns/presenter_dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/brainstem/concerns/presenter_dsl.rb
+++ b/lib/brainstem/concerns/presenter_dsl.rb
@@ -47,12 +47,12 @@ module Brainstem
configuration[:description] = options.merge(info: str)
end
- def nodoc!(_description = nil)
- configuration[:nodoc] = true
+ def nodoc!(description = true)
+ configuration[:nodoc] = description
end
- def internal!(_description = nil)
- configuration[:internal] = true
+ def internal!(description = true)
+ configuration[:internal] = description
end
#
diff --git a/spec/brainstem/concerns/controller_dsl_spec.rb b/spec/brainstem/concerns/controller_dsl_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/brainstem/concerns/controller_dsl_spec.rb
+++ b/spec/brainstem/concerns/controller_dsl_spec.rb
@@ -13,21 +13,37 @@ module Brainstem
end
describe ".nodoc!" do
- it "sets the config nodoc to true" do
+ it "sets the config nodoc to passed in description" do
subject.brainstem_params do
nodoc! "Description for why these are nodoc"
end
+ expect(subject.configuration[:_default][:nodoc]).to eq "Description for why these are nodoc"
+ end
+
+ it "sets the config nodoc to default value (true)" do
+ subject.brainstem_params do
+ nodoc!
+ end
+
expect(subject.configuration[:_default][:nodoc]).to eq true
end
end
describe ".internal!" do
- it "sets the config internal to true" do
+ it "sets the config internal to passed in description" do
subject.brainstem_params do
internal! "Description for why these are internal docs"
end
+ expect(subject.configuration[:_default][:internal]).to eq "Description for why these are internal docs"
+ end
+
+ it "sets the config internal to default value (true)" do
+ subject.brainstem_params do
+ internal!
+ end
+
expect(subject.configuration[:_default][:internal]).to eq true
end
end
diff --git a/spec/brainstem/concerns/presenter_dsl_spec.rb b/spec/brainstem/concerns/presenter_dsl_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/brainstem/concerns/presenter_dsl_spec.rb
+++ b/spec/brainstem/concerns/presenter_dsl_spec.rb
@@ -125,21 +125,25 @@ describe Brainstem::Concerns::PresenterDSL do
end
describe 'the nodoc! method' do
- before do
+ it "is stored in the configuration" do
presenter_class.nodoc! "Description for why these are nodoc"
+ expect(presenter_class.configuration[:nodoc]).to eq "Description for why these are nodoc"
end
- it "is stored in the configuration" do
+ it "is defaults to true" do
+ presenter_class.nodoc!
expect(presenter_class.configuration[:nodoc]).to be true
end
end
describe 'the internal! method' do
- before do
+ it "is stored in the configuration" do
presenter_class.internal! "Description for why these are internal docs"
+ expect(presenter_class.configuration[:internal]).to eq "Description for why these are internal docs"
end
- it "is stored in the configuration" do
+ it "is defaults to true" do
+ presenter_class.internal!
expect(presenter_class.configuration[:internal]).to be true
end
end
|
Save description in the configuration for nodoc! and internal!
|
mavenlink_brainstem
|
train
|
36fdbb25c7885585f7b953bc9475a33483bb4dbf
|
diff --git a/app/controllers/api/v1/owners_controller.rb b/app/controllers/api/v1/owners_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/api/v1/owners_controller.rb
+++ b/app/controllers/api/v1/owners_controller.rb
@@ -1,3 +1,5 @@
+require 'yaml'
+
class Api::V1::OwnersController < Api::BaseController
skip_before_filter :verify_authenticity_token, :only => [:create, :destroy]
diff --git a/app/controllers/api/v1/rubygems_controller.rb b/app/controllers/api/v1/rubygems_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/api/v1/rubygems_controller.rb
+++ b/app/controllers/api/v1/rubygems_controller.rb
@@ -1,3 +1,5 @@
+require 'yaml'
+
class Api::V1::RubygemsController < Api::BaseController
skip_before_filter :verify_authenticity_token, :only => [:create, :yank, :unyank]
diff --git a/app/controllers/api/v1/searches_controller.rb b/app/controllers/api/v1/searches_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/api/v1/searches_controller.rb
+++ b/app/controllers/api/v1/searches_controller.rb
@@ -1,3 +1,5 @@
+require 'yaml'
+
class Api::V1::SearchesController < Api::BaseController
skip_before_filter :verify_authenticity_token
diff --git a/app/controllers/api/v1/web_hooks_controller.rb b/app/controllers/api/v1/web_hooks_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/api/v1/web_hooks_controller.rb
+++ b/app/controllers/api/v1/web_hooks_controller.rb
@@ -1,3 +1,5 @@
+require 'yaml'
+
class Api::V1::WebHooksController < Api::BaseController
skip_before_filter :verify_authenticity_token
diff --git a/test/functional/api/v1/rubygems_controller_test.rb b/test/functional/api/v1/rubygems_controller_test.rb
index <HASH>..<HASH> 100644
--- a/test/functional/api/v1/rubygems_controller_test.rb
+++ b/test/functional/api/v1/rubygems_controller_test.rb
@@ -1,4 +1,5 @@
-require File.join(File.dirname(__FILE__), '..', '..', '..', 'test_helper')
+require 'test_helper'
+require 'yaml'
class Api::V1::RubygemsControllerTest < ActionController::TestCase
should "route old paths to new controller" do
diff --git a/test/functional/api/v1/searches_controller_test.rb b/test/functional/api/v1/searches_controller_test.rb
index <HASH>..<HASH> 100644
--- a/test/functional/api/v1/searches_controller_test.rb
+++ b/test/functional/api/v1/searches_controller_test.rb
@@ -1,4 +1,5 @@
require 'test_helper'
+require 'yaml'
class Api::V1::SearchesControllerTest < ActionController::TestCase
context "with some gems" do
diff --git a/test/functional/api/v1/web_hooks_controller_test.rb b/test/functional/api/v1/web_hooks_controller_test.rb
index <HASH>..<HASH> 100644
--- a/test/functional/api/v1/web_hooks_controller_test.rb
+++ b/test/functional/api/v1/web_hooks_controller_test.rb
@@ -1,4 +1,5 @@
require 'test_helper'
+require 'yaml'
class Api::V1::WebHooksControllerTest < ActionController::TestCase
def self.should_not_find_it
|
Don't depend on Rails to require YAML
|
rubygems_rubygems.org
|
train
|
1182a8a87ed7b2b135939b39a6d7a46aad36da96
|
diff --git a/lxd/storage/drivers/volume.go b/lxd/storage/drivers/volume.go
index <HASH>..<HASH> 100644
--- a/lxd/storage/drivers/volume.go
+++ b/lxd/storage/drivers/volume.go
@@ -20,7 +20,7 @@ const tmpVolSuffix = ".lxdtmp"
const defaultBlockSize = "10GB"
// vmBlockFilesystemSize is the size of a VM block volume's associated filesystem volume.
-const vmBlockFilesystemSize = "50MB"
+const vmBlockFilesystemSize = "100MB"
// DefaultFilesystem filesytem to use for block devices by default.
const DefaultFilesystem = "ext4"
|
lxd/storage/drivers: Bump VM fs size to <I>MB
The UEFI nvram on aarch<I> is <I>MB so wasn't fitting in our previous
value when stored on a block based storage driver.
|
lxc_lxd
|
train
|
2321c102a42614588b6c26f99b48b7af2af45e0f
|
diff --git a/client/extensions/woocommerce/app/products/product-variation-types-form.js b/client/extensions/woocommerce/app/products/product-variation-types-form.js
index <HASH>..<HASH> 100644
--- a/client/extensions/woocommerce/app/products/product-variation-types-form.js
+++ b/client/extensions/woocommerce/app/products/product-variation-types-form.js
@@ -3,7 +3,7 @@
*/
import React, { Component, PropTypes } from 'react';
import i18n from 'i18n-calypso';
-import { find } from 'lodash';
+import { find, debounce } from 'lodash';
/**
* Internal dependencies
@@ -15,6 +15,10 @@ import TokenField from 'components/token-field';
export default class ProductVariationTypesForm extends Component {
+ state = {
+ attributeNames: {},
+ };
+
static propTypes = {
product: PropTypes.shape( {
id: PropTypes.isRequired,
@@ -31,14 +35,8 @@ export default class ProductVariationTypesForm extends Component {
if ( ! product.attributes ) {
this.addType();
}
- }
- constructor( props ) {
- super( props );
-
- this.addType = this.addType.bind( this );
- this.updateName = this.updateName.bind( this );
- this.updateValues = this.updateValues.bind( this );
+ this.debouncedUpdateName = debounce( this.updateName, 300 );
}
getNewFields() {
@@ -49,34 +47,43 @@ export default class ProductVariationTypesForm extends Component {
};
}
- addType() {
+ addType = () => {
const { product, editProductAttribute } = this.props;
editProductAttribute( product, null, this.getNewFields() );
}
- updateName( e ) {
+ updateNameHandler = ( e ) => {
+ const attributeNames = { ...this.state.attributeNames };
+ attributeNames[ e.target.id ] = e.target.value;
+ this.setState( { attributeNames } );
+ this.debouncedUpdateName( e.target.id, e.target.value );
+ }
+
+ updateName( attributeId, name ) {
const { product, editProductAttribute } = this.props;
const attribute = product.attributes && find( product.attributes, function( a ) {
- return a.uid === e.target.id;
+ return a.uid === attributeId;
} );
- editProductAttribute( product, attribute, { name: e.target.value } );
+ editProductAttribute( product, attribute, { name } );
}
- updateValues( values, attribute ) {
+ updateValues = ( values, attribute ) => {
const { product, editProductAttribute } = this.props;
editProductAttribute( product, attribute, { options: values } );
}
renderInputs( attribute ) {
+ const { attributeNames } = this.state;
+ const attributeName = attributeNames && attributeNames[ attribute.uid ] || attribute.name;
return (
<div key={ attribute.uid } className="products__variation-types-form-fieldset">
<FormTextInput
placeholder={ i18n.translate( 'Color' ) }
- value={ attribute.name }
+ value={ attributeName }
id={ attribute.uid }
name="type"
className="products__variation-types-form-field"
- onChange={ this.updateName }
+ onChange={ this.updateNameHandler }
/>
<TokenField
placeholder={ i18n.translate( 'Comma separate these' ) }
|
Add a debounce to the variation type name field so typing in the input box doesn't become slow with a huge number of variations. (#<I>)
|
Automattic_wp-calypso
|
train
|
c268d9184667a02d0b6c34d57c3c846abc72572c
|
diff --git a/meshio/xdmf/main.py b/meshio/xdmf/main.py
index <HASH>..<HASH> 100644
--- a/meshio/xdmf/main.py
+++ b/meshio/xdmf/main.py
@@ -311,12 +311,7 @@ class XdmfReader:
class XdmfWriter:
def __init__(
- self,
- filename,
- mesh,
- data_format="HDF",
- compression=None,
- compression_opts=None,
+ self, filename, mesh, data_format="HDF", compression=None, compression_opts=None
):
if data_format not in ["XML", "Binary", "HDF"]:
raise WriteError(
@@ -438,14 +433,16 @@ class XdmfWriter:
NumberOfElements=str(total_num_cells),
)
total_num_cell_items = sum(numpy.prod(c.data.shape) for c in cells)
- num_lines = sum(c.data.shape[0] for c in cells if c.type == "line")
- dim = str(total_num_cell_items + total_num_cells + num_lines)
+ num_vertices_and_lines = sum(
+ c.data.shape[0] for c in cells if c.type in {"vertex", "line"}
+ )
+ dim = str(total_num_cell_items + total_num_cells + num_vertices_and_lines)
cd = numpy.concatenate(
[
numpy.hstack(
[
numpy.full(
- (value.shape[0], 2 if key == "line" else 1),
+ (value.shape[0], 2 if key in {"vertex", "line"} else 1),
meshio_type_to_xdmf_index[key],
),
value,
|
Polyvertex needs number of cells (1) in XDMF mixed topology
|
nschloe_meshio
|
train
|
a4239641f685c0f06258309620e28ea4c2911fd0
|
diff --git a/test/classes/phing/tasks/ext/PhpLintTaskTest.php b/test/classes/phing/tasks/ext/PhpLintTaskTest.php
index <HASH>..<HASH> 100755
--- a/test/classes/phing/tasks/ext/PhpLintTaskTest.php
+++ b/test/classes/phing/tasks/ext/PhpLintTaskTest.php
@@ -61,6 +61,10 @@ class PhpLintTaskTest extends BuildFileTest
*/
public function testDeprecated()
{
+ if (defined('HHVM_VERSION')) {
+ $this->markTestSkipped("HHVM lint does not support testing for deprecated statements");
+ }
+
file_put_contents(
PHING_TEST_BASE . '/tmp/phplint_file.php',
'<?php class TestClass {}; $t = & new TestClass();'
|
Refs #<I> - no support for deprecated lint in hhvm
|
phingofficial_phing
|
train
|
1a3bb60986d90e32c04575111b1ccb8eab24a3e5
|
diff --git a/client.go b/client.go
index <HASH>..<HASH> 100644
--- a/client.go
+++ b/client.go
@@ -627,8 +627,6 @@ func (c *Client) execute(req *Request) (*Response, error) {
c.transport.Proxy = http.ProxyURL(req.proxyURL)
} else if c.proxyURL != nil {
c.transport.Proxy = http.ProxyURL(c.proxyURL)
- } else {
- c.transport.Proxy = nil
}
req.Time = time.Now()
|
#<I> don't set transport proxy to nil by default
|
go-resty_resty
|
train
|
b358d4d73004f7821d3ddde87d189717fc4e2042
|
diff --git a/salesforce/backend/query.py b/salesforce/backend/query.py
index <HASH>..<HASH> 100644
--- a/salesforce/backend/query.py
+++ b/salesforce/backend/query.py
@@ -188,7 +188,7 @@ def prep_for_deserialize(model, record, using, init_list=None):
if len(record.keys()) == 1 and model._meta.db_table in record:
# this is for objects with ManyToManyField and OneToOneField
- while len(record) == 1:
+ while len(record) == 1 and list(record.values())[0]:
record = list(record.values())[0]
if record is None:
return None
diff --git a/salesforce/fields.py b/salesforce/fields.py
index <HASH>..<HASH> 100644
--- a/salesforce/fields.py
+++ b/salesforce/fields.py
@@ -180,7 +180,7 @@ class TimeField(SfField, models.TimeField):
class ForeignKey(SfField, models.ForeignKey):
- """ForeignKey with sf_read_only attribute for Salesforce."""
+ """ForeignKey with sf_read_only attribute and acceptable by Salesforce."""
def __init__(self, *args, **kwargs):
# Checks parameters before call to ancestor.
if DJANGO_19_PLUS and args[1:2]:
@@ -214,4 +214,9 @@ class ForeignKey(SfField, models.ForeignKey):
return attname, column
+class OneToOneField(ForeignKey, models.OneToOneField):
+ """OneToOneField with sf_read_only attribute and acceptable by Salesforce."""
+ pass
+
+
AutoField = SalesforceAutoField
diff --git a/salesforce/testrunner/example/models.py b/salesforce/testrunner/example/models.py
index <HASH>..<HASH> 100644
--- a/salesforce/testrunner/example/models.py
+++ b/salesforce/testrunner/example/models.py
@@ -384,3 +384,16 @@ class Task(models.Model):
who = models.ForeignKey(Lead, on_delete=models.DO_NOTHING, blank=True, null=True)
# Refer
what = models.ForeignKey(Account, related_name='task_what_set', on_delete=models.DO_NOTHING, blank=True, null=True)
+
+
+# OneToOneField
+
+class ApexEmailNotification(models.Model):
+ """Stores Salesforce users and external email addresses to be notified when unhandled Apex exceptions occur.
+
+ Available in API version 35.0 and later.
+ """
+ # A semicolon-delimited list of email addresses to notify when unhandled Apex exceptions occur.
+ user = models.OneToOneField('User', related_name='apex_email_notification', on_delete=models.DO_NOTHING, blank=True, null=True)
+ # Users of your org to notify when unhandled Apex exceptions occur.
+ email = models.CharField(unique=True, max_length=255, verbose_name='email', blank=True)
diff --git a/salesforce/tests/test_integration.py b/salesforce/tests/test_integration.py
index <HASH>..<HASH> 100644
--- a/salesforce/tests/test_integration.py
+++ b/salesforce/tests/test_integration.py
@@ -19,7 +19,7 @@ from django.test import TestCase
from django.utils import timezone
from salesforce.testrunner.example.models import (Account, Contact, Lead, User,
- BusinessHours, ChargentOrder, CronTrigger,
+ ApexEmailNotification, BusinessHours, ChargentOrder, CronTrigger,
Opportunity, OpportunityContactRole,
Product, Pricebook, PricebookEntry, Note, Task,
Organization, models_template,
@@ -126,6 +126,52 @@ class BasicSOQLRoTest(TestCase):
test_contact.delete()
test_account.delete()
+ @skipUnless(default_is_sf, "Default database should be any Salesforce.")
+ def test_one_to_one_field(self):
+ # test 1a is unique field
+ self.assertEqual(ApexEmailNotification._meta.get_field('user').unique, True)
+
+ current_sf_user = User.objects.get(Username=current_user)
+ orig_objects = list(ApexEmailNotification.objects.filter(
+ Q(user=current_sf_user) | Q(email='apex.bugs@example.com')))
+ try:
+ notifier_u = current_sf_user.apex_email_notification
+ new_u = None
+ except ApexEmailNotification.DoesNotExist:
+ notifier_u = new_u = ApexEmailNotification(user=current_sf_user)
+ notifier_u.save()
+ try:
+ notifier_e = ApexEmailNotification.objects.get(email='apex.bugs@example.com')
+ new_e = None
+ except ApexEmailNotification.DoesNotExist:
+ notifier_e = new_e = ApexEmailNotification(email='apex.bugs@example.com')
+ notifier_e.save()
+
+ try:
+ # test 1b is unique value
+ duplicate = ApexEmailNotification(user=current_sf_user)
+ # the method self.assertRaise was too verbose about exception
+ try:
+ duplicate.save()
+ except salesforce.backend.base.SalesforceError as exc:
+ self.assertEqual(exc.data['errorCode'], 'DUPLICATE_VALUE')
+ else:
+ self.assertRaises(salesforce.backend.base.SalesforceError, duplicate.save)
+
+ # test 2: the reverse relation is a value, not a set
+ result = User.objects.exclude(apex_email_notification__user=None)
+ self.assertIn(current_user, [x.Username for x in result])
+
+ # test 3: relation to the parent
+ result = ApexEmailNotification.objects.filter(user__Username=notifier_u.user.Username)
+ self.assertEqual(len(result), 1)
+ self.assertEqual(result[0].user_id, notifier_u.user_id)
+ finally:
+ if new_u:
+ new_u.delete()
+ if new_e:
+ new_e.delete()
+
def test_update_date(self):
"""Test updating a date.
"""
|
Implemented OneToOneField relationship, written test
|
django-salesforce_django-salesforce
|
train
|
b4fd4c2e766899f892ab794645dae8225d42ddba
|
diff --git a/openstack_dashboard/dashboards/project/instances/tabs.py b/openstack_dashboard/dashboards/project/instances/tabs.py
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/dashboards/project/instances/tabs.py
+++ b/openstack_dashboard/dashboards/project/instances/tabs.py
@@ -25,6 +25,7 @@ from openstack_dashboard.dashboards.project.instances \
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances import console
from openstack_dashboard.dashboards.project.instances import interfaces_tables
+from openstack_dashboard import policy
from openstack_dashboard.utils import settings as settings_utils
@@ -49,12 +50,13 @@ class OverviewTab(tabs.Tab):
return {"instance": instance}
-class InterfacesTab(tabs.TableTab):
+class InterfacesTab(policy.PolicyTargetMixin, tabs.TableTab):
name = _("Interfaces")
slug = "interfaces"
table_classes = (interfaces_tables.InterfacesTable, )
template_name = "horizon/common/_detail_table.html"
preload = False
+ policy_rules = (("compute", "os_compute_api:os-attach-interfaces"),)
def get_interfaces_data(self):
instance = self.tab_group.kwargs['instance']
@@ -75,11 +77,12 @@ class InterfacesTab(tabs.TableTab):
return ports
-class LogTab(tabs.Tab):
+class LogTab(policy.PolicyTargetMixin, tabs.Tab):
name = _("Log")
slug = "log"
template_name = "project/instances/_detail_log.html"
preload = False
+ policy_rules = (("compute", "os_compute_api:os-console-output"),)
def get_context_data(self, request):
instance = self.tab_group.kwargs['instance']
@@ -96,11 +99,12 @@ class LogTab(tabs.Tab):
"log_length": log_length}
-class ConsoleTab(tabs.Tab):
+class ConsoleTab(policy.PolicyTargetMixin, tabs.Tab):
name = _("Console")
slug = "console"
template_name = "project/instances/_detail_console.html"
preload = False
+ policy_rules = (("compute", "os_compute_api:os-consoles:show"),)
def get_context_data(self, request):
instance = self.tab_group.kwargs['instance']
@@ -126,12 +130,13 @@ class ConsoleTab(tabs.Tab):
return bool(settings.CONSOLE_TYPE)
-class AuditTab(tabs.TableTab):
+class AuditTab(policy.PolicyTargetMixin, tabs.TableTab):
name = _("Action Log")
slug = "audit"
table_classes = (a_tables.AuditTable,)
template_name = "project/instances/_detail_audit.html"
preload = False
+ policy_rules = (("compute", "os_compute_api:os-instance-usage-audit-log"),)
def get_audit_data(self):
actions = []
@@ -145,7 +150,8 @@ class AuditTab(tabs.TableTab):
return sorted(actions, reverse=True, key=lambda y: y.start_time)
-class InstanceDetailTabs(tabs.DetailTabsGroup):
+class InstanceDetailTabs(policy.PolicyTargetMixin, tabs.DetailTabsGroup):
slug = "instance_details"
tabs = (OverviewTab, InterfacesTab, LogTab, ConsoleTab, AuditTab)
sticky = True
+ policy_rules = (("compute", "os_compute_api:os-consoles:show"),)
|
Server tabs: add policy enforcement for tab display
We have nova policies for all of these so should observe them. Otherwise
if a user e.g. doesn't have rights to view the console log of a VM they
will get an error every time they click on an instance name.
Change-Id: I<I>bb<I>c4c0aea0a<I>bf<I>e9fe<I>a<I>ce<I>
|
openstack_horizon
|
train
|
53f3a3c69f673d5525fee4c0d8a2c87f1a5f11b4
|
diff --git a/gpiozero/input_devices.py b/gpiozero/input_devices.py
index <HASH>..<HASH> 100644
--- a/gpiozero/input_devices.py
+++ b/gpiozero/input_devices.py
@@ -404,9 +404,14 @@ class MotionSensor(SmoothedInputDevice):
and GND. VCC should be connected to the Pi's +5V pin, GND to one of the
Pi's ground pins, and finally OUT to the GPIO specified as the value of the
`pin` parameter in the constructor.
+
+ This class defaults `queue_len` to 1, effectively removing the averaging
+ of the internal queue. If your PIR sensor has a short fall time and is
+ particularly "jittery" you may wish to set this to a higher value (e.g. 5)
+ to mitigate this.
"""
def __init__(
- self, pin=None, queue_len=5, sample_rate=10, threshold=0.5,
+ self, pin=None, queue_len=1, sample_rate=10, threshold=0.5,
partial=False):
super(MotionSensor, self).__init__(
pin, pull_up=False, threshold=threshold,
|
Fix #<I>
Make MotionSensor more responsive by effectively removing the averaging
queue. Also add note on how to smooth out "jittery" PIR sensors by
increasing the queue length back up to 5.
|
RPi-Distro_python-gpiozero
|
train
|
e72dd7089bb4730b89557bab67086363ff5e067e
|
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index <HASH>..<HASH> 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.114.0
+current_version = 0.115.0
tag = True
commit = True
message = Change version: {current_version} -> {new_version} [ci skip]
diff --git a/instabot/api/api.py b/instabot/api/api.py
index <HASH>..<HASH> 100644
--- a/instabot/api/api.py
+++ b/instabot/api/api.py
@@ -55,7 +55,7 @@ is_py3 = version_info[0] == 3
is_py37 = version_info[:2] == (3, 7)
-version = "0.114.0"
+version = "0.115.0"
current_path = os.path.abspath(os.getcwd())
diff --git a/instabot/bot/bot.py b/instabot/bot/bot.py
index <HASH>..<HASH> 100644
--- a/instabot/bot/bot.py
+++ b/instabot/bot/bot.py
@@ -1,4 +1,4 @@
-version = "0.114.0"
+version = "0.115.0"
import atexit
import datetime
import logging
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,7 @@ with open(path.join(here, "README.md"), encoding="utf-8") as f:
setup(
name="instabot",
- version="0.114.0",
+ version="0.115.0",
description="Instagram bot scripts for promotion and API python wrapper.",
long_description=long_description,
author="Daniil Okhlopkov, Evgeny Kemerov",
|
Change version: <I> -> <I> [ci skip]
|
instagrambot_instabot
|
train
|
22466f0c81113479c78d185a100f356fc1623b43
|
diff --git a/core/src/main/java/com/graphhopper/routing/Path.java b/core/src/main/java/com/graphhopper/routing/Path.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/graphhopper/routing/Path.java
+++ b/core/src/main/java/com/graphhopper/routing/Path.java
@@ -223,6 +223,9 @@ public class Path
if (Double.isInfinite(speed) || Double.isNaN(speed) || speed < 0)
throw new IllegalStateException("Invalid speed stored in edge! " + speed);
+ if (speed == 0)
+ throw new IllegalStateException("Speed cannot be 0 for unblocked edge, use access properties to mark edge blocked! Should only occur for shortest path calculation. See #242.");
+
return (long) (distance * 3600 / speed);
}
diff --git a/core/src/test/java/com/graphhopper/routing/AbstractRoutingAlgorithmTester.java b/core/src/test/java/com/graphhopper/routing/AbstractRoutingAlgorithmTester.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/com/graphhopper/routing/AbstractRoutingAlgorithmTester.java
+++ b/core/src/test/java/com/graphhopper/routing/AbstractRoutingAlgorithmTester.java
@@ -87,7 +87,7 @@ public abstract class AbstractRoutingAlgorithmTester
Graph graphFastest = createGraph(false);
initDirectedAndDiffSpeed(graphFastest);
- Path p2 = createAlgo(graphFastest,
+ Path p2 = createAlgo(graphFastest,
AlgorithmOptions.start().flagEncoder(carEncoder).weighting(new FastestWeighting(carEncoder)).build()).
calcPath(0, 3);
assertEquals(Helper.createTList(0, 4, 6, 7, 5, 3), p2.calcNodes());
@@ -666,6 +666,26 @@ public abstract class AbstractRoutingAlgorithmTester
}
@Test
+ public void test0SpeedButUnblocked_Issue242()
+ {
+ Graph graph = createGraph(false);
+ long flags = carEncoder.setAccess(carEncoder.setSpeed(0, 0), true, true);
+
+ graph.edge(0, 1).setFlags(flags).setDistance(10);
+ graph.edge(1, 2).setFlags(flags).setDistance(10);
+
+ RoutingAlgorithm algo = createAlgo(graph);
+ try
+ {
+ Path p = algo.calcPath(0, 2);
+ assertTrue(false);
+ } catch (Exception ex)
+ {
+ assertTrue(ex.getMessage(), ex.getMessage().startsWith("Speed cannot be 0"));
+ }
+ }
+
+ @Test
public void testTwoWeightsPerEdge2()
{
// other direction should be different!
|
fixed #<I>, Exception if speed is 0 but valid access flags and using shortest path algo.
|
graphhopper_graphhopper
|
train
|
17de52fc78dfdd772942a0ecc458e589e26d6bf8
|
diff --git a/spec/netsuite/records/basic_record_spec.rb b/spec/netsuite/records/basic_record_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/netsuite/records/basic_record_spec.rb
+++ b/spec/netsuite/records/basic_record_spec.rb
@@ -4,7 +4,9 @@ describe 'basic records' do
let(:basic_record_list) {
[
NetSuite::Records::Currency,
- NetSuite::Records::Location
+ NetSuite::Records::Location,
+ NetSuite::Records::JobStatus,
+ NetSuite::Records::TimeBill
]
}
@@ -23,12 +25,16 @@ describe 'basic records' do
expect(record_instance).to respond_to(:external_id=)
end
- # ensure that all fields can be set
standard_fields = (record_class.fields - record_class.record_refs).to_a
+ custom_object_fields = standard_fields.select { |f| !record_instance.send(f).nil? }
+ standard_fields -= custom_object_fields
+ # ensure that all fields can be set
standard_fields.each { |f| expect(record_instance).to have_field(f) }
record_class.record_refs.each { |f| expect(record_instance).to have_record_ref(f) }
+ # TODO handle custom object fields
+
6.times do
record_instance.send(:"#{standard_fields.sample}=", "Test Value")
end
|
Handle records with custom field objects in general record spec
|
NetSweet_netsuite
|
train
|
4e80009891fa8a81b84e35dac6cba4ca96d1f1e1
|
diff --git a/plugin/blog/Resources/modules/blog/blog.controller.js b/plugin/blog/Resources/modules/blog/blog.controller.js
index <HASH>..<HASH> 100644
--- a/plugin/blog/Resources/modules/blog/blog.controller.js
+++ b/plugin/blog/Resources/modules/blog/blog.controller.js
@@ -73,8 +73,6 @@ export default class BlogController {
this._setMessage('info', 'post_filtered_by_search', {searchTerms: decodeURI(_$routeParams.get(this).terms)}, true, 'icap_blog', true)
}
)
-
- this.blog.search(this.searchTerms)
break
case 'author':
@@ -215,4 +213,4 @@ BlogController.$inject = [
'$scope',
'$routeParams',
'tinyMceConfig'
-]
\ No newline at end of file
+]
|
[BlogBundle] Fix blog search (#<I>)
Unnecessary line of code prevented search from being fullfilled.
|
claroline_Distribution
|
train
|
2815c1510ac44f819a7fab24c232b0b8cfa6a3cd
|
diff --git a/server/standalone.js b/server/standalone.js
index <HASH>..<HASH> 100644
--- a/server/standalone.js
+++ b/server/standalone.js
@@ -352,6 +352,12 @@ define(['logManager',
}
});
});
+ __app.get(/^\/pluginoutput\/.*/,ensureAuthenticated,function(req,res){
+ var filepath = req.path.replace('/pluginoutput',CONFIG.intoutdir);
+ res.sendfile(filepath,function(err){
+ res.send(404);
+ });
+ });
__logger.info("creating basic static content related routing rules");
//static contents
diff --git a/worker/simpleworker.js b/worker/simpleworker.js
index <HASH>..<HASH> 100644
--- a/worker/simpleworker.js
+++ b/worker/simpleworker.js
@@ -46,6 +46,13 @@ function(CONSTANT,Core,Storage,GUID,DUMP,logManager,FS,PATH,PluginFSServer,Plugi
pluginBasePaths = parameters.pluginBasePaths;
serverPort = parameters.serverPort || 80;
interpreteroutputdirectory = parameters.interpreteroutputdirectory || "";
+ if(interpreteroutputdirectory){
+ try{
+ FS.mkdirSync(PATH.resolve(interpreteroutputdirectory));
+ } catch(e){
+ console.log('output directory cannot be created');
+ }
+ }
storage = new Storage({'host':parameters.ip,'port':parameters.port,'database':parameters.db,'log':logManager.create('SERVER-WORKER-'+process.pid)});
storage.openDatabase(function(err){
if(err){
|
some improvements on artifact downloading - plugin framework -
Former-commit-id: b<I>bc4a0a<I>ae<I>de1aa<I>d<I>fe9cee<I>fd
|
webgme_webgme-engine
|
train
|
6a5e19f2df0f02585b1f6d3f2cb3f85bcab0cee9
|
diff --git a/lib/Cisco.php b/lib/Cisco.php
index <HASH>..<HASH> 100755
--- a/lib/Cisco.php
+++ b/lib/Cisco.php
@@ -173,7 +173,7 @@ class Cisco
$invlines = explode("\r\n", $show_inventory);
foreach ($invlines as $line) {
// LEGACY PERL CODE: $x =~ /^\s*PID:\s(\S+).*SN:\s+(\S+)\s*$/;
- if (preg_match('/.*PID:\s(\S+)\s.*/', $line, $reg)) {
+ if (preg_match('/.*PID:\s(\S+?)(,|\s).*/', $line, $reg)) {
$model = $reg[1];
return $model;
|
Update Cisco.php
Modified inventory-to-model detection to remove commas at end
|
metaclassing_utility
|
train
|
2f3d6f8dff7d50fb1e60b90cbb3245dfec2fd4f6
|
diff --git a/test/parsing.js b/test/parsing.js
index <HASH>..<HASH> 100644
--- a/test/parsing.js
+++ b/test/parsing.js
@@ -31,5 +31,11 @@ describe('Book parsing', function () {
it('should correctly parse the languages', function() {
assert.equal(book2.books.length, 2);
+
+ assert.equal(book2.books[0].options.lang, "en");
+ assert.equal(book2.books[0].options.title, "English Book");
+
+ assert.equal(book2.books[1].options.lang, "fr");
+ assert.equal(book2.books[1].options.title, "French Book");
});
});
|
Improve test for parsing multilanguages books
|
GitbookIO_gitbook
|
train
|
fdf18029689b548d0c14039f2cedf5501667661f
|
diff --git a/scripts/benchmark.js b/scripts/benchmark.js
index <HASH>..<HASH> 100755
--- a/scripts/benchmark.js
+++ b/scripts/benchmark.js
@@ -1,4 +1,5 @@
#!/usr/bin/env node
+Error.stackTraceLimit = Infinity;
const uglify = require('uglify-js');
const Table = require('cli-table');
@@ -10,7 +11,9 @@ const zlib = require('zlib');
const fs = require('fs');
const path = require('path');
-babel.register();
+
+require('babel-jest/node_modules/babel-core').register();
+
const filename = process.argv[2];
if (!filename) {
console.error('Error: No filename specified');
@@ -74,12 +77,6 @@ function test(name, callback) {
test('babel', function (code, callback) {
return babel.transform(code, {
- experimental: true,
- whitelist: [],
- optional: [
- 'minification.memberExpressionLiterals',
- 'minification.propertyLiterals',
- ],
plugins: [
// 'constant-folding',
require('../src/mangle-names-plugin'),
|
update to run in babel 6
|
babel_minify
|
train
|
2d0799ac7f507c5592c8dc8a7715b2064989c9d1
|
diff --git a/Auth/OpenID/MySQLStore.php b/Auth/OpenID/MySQLStore.php
index <HASH>..<HASH> 100644
--- a/Auth/OpenID/MySQLStore.php
+++ b/Auth/OpenID/MySQLStore.php
@@ -28,7 +28,7 @@ class Auth_OpenID_MySQLStore extends Auth_OpenID_SQLStore {
" timestamp INTEGER,\n".
" salt CHAR(40),\n".
" UNIQUE (server_url(255), timestamp, salt)\n".
- ") TYPE=InnoDB";
+ ") ENGINE=InnoDB";
$this->sql['assoc_table'] =
"CREATE TABLE %s (\n".
@@ -39,7 +39,7 @@ class Auth_OpenID_MySQLStore extends Auth_OpenID_SQLStore {
" lifetime INTEGER,\n".
" assoc_type VARCHAR(64),\n".
" PRIMARY KEY (server_url(255), handle)\n".
- ") TYPE=InnoDB";
+ ") ENGINE=InnoDB";
$this->sql['set_assoc'] =
"REPLACE INTO %s VALUES (?, ?, !, ?, ?, ?)";
|
[project @ Use ENGINE=InnoDB instead of TYPE=InnoDB, which is deprecated in current versions of MySQL]
|
openid_php-openid
|
train
|
fcb56d03d8e21419c84b88f112d24103685b907e
|
diff --git a/commands/review/post/command.go b/commands/review/post/command.go
index <HASH>..<HASH> 100644
--- a/commands/review/post/command.go
+++ b/commands/review/post/command.go
@@ -31,10 +31,10 @@ import (
var Command = &gocli.Command{
UsageLine: `
- post [-update=RRID] [-fixes=RRID] [-open] [REVISION]
+ post [-update=RRID] [-fixes=RRID] [-reviewer=REVIEWER] [-open] [REVISION]
post [-fixes=RRID] [-no_fetch] [-no_rebase] [-ask_once]
- [-pick] [-open] [-no_dialog] -parent=BRANCH`,
+ [-pick] [-reviewer=REVIEWER] [-open] [-no_dialog] -parent=BRANCH`,
Short: "post code review requests",
Long: `
Post a code review request for each commit specified.
@@ -65,6 +65,7 @@ var (
flagOpen bool
flagParent string
flagPick bool
+ flagReviewer string
flagUpdate uint
)
@@ -86,6 +87,8 @@ func init() {
"branch to be used in computing the revision range")
Command.Flags.BoolVar(&flagPick, "pick", flagPick,
"pick only some of the selected commits for review")
+ Command.Flags.StringVar(&flagReviewer, "reviewer", flagReviewer,
+ "reviewer to assign to the newly created review requests")
Command.Flags.UintVar(&flagUpdate, "update", flagUpdate,
"update an existing review request with REVISION")
@@ -687,6 +690,9 @@ func sendReviewRequests(ctxs []*common.ReviewContext) error {
if flagUpdate != 0 {
postOpts["update"] = flagUpdate
}
+ if flagReviewer != "" {
+ postOpts["reviewer"] = flagReviewer
+ }
if flagOpen {
postOpts["open"] = true
}
diff --git a/modules/github/codereview/code_review_tool.go b/modules/github/codereview/code_review_tool.go
index <HASH>..<HASH> 100644
--- a/modules/github/codereview/code_review_tool.go
+++ b/modules/github/codereview/code_review_tool.go
@@ -281,7 +281,9 @@ func createAssignedReviewRequest(
// Create a new review issue.
issueResource, err := createIssue(
- task, config, owner, repo, issue.FormatTitle(), issue.FormatBody(), milestone)
+ task, config, owner, repo,
+ issue.FormatTitle(), issue.FormatBody(),
+ optValueString(opts["reviewer"]), milestone)
if err != nil {
return nil, errs.NewError(task, err)
}
@@ -374,7 +376,9 @@ func createUnassignedReviewRequest(
// Create a new review issue.
issueResource, err := createIssue(
- task, config, owner, repo, issue.FormatTitle(), issue.FormatBody(), milestone)
+ task, config, owner, repo,
+ issue.FormatTitle(), issue.FormatBody(),
+ optValueString(opts["reviewer"]), milestone)
if err != nil {
return nil, errs.NewError(task, err)
}
@@ -544,6 +548,7 @@ func createIssue(
repo string,
issueTitle string,
issueBody string,
+ assignee string,
milestone *github.Milestone,
) (issue *github.Issue, err error) {
@@ -554,6 +559,7 @@ func createIssue(
Title: github.String(issueTitle),
Body: github.String(issueBody),
Labels: &labels,
+ Assignee: github.String(assignee),
Milestone: milestone.Number,
})
if err != nil {
@@ -658,3 +664,10 @@ func getOrCreateMilestoneForCommit(
func milestoneTitle(v *version.Version) string {
return fmt.Sprintf("%v-review", v.BaseString())
}
+
+func optValueString(value interface{}) string {
+ if value == nil {
+ return ""
+ }
+ return value.(string)
+}
diff --git a/modules/reviewboard/code_review_tool.go b/modules/reviewboard/code_review_tool.go
index <HASH>..<HASH> 100644
--- a/modules/reviewboard/code_review_tool.go
+++ b/modules/reviewboard/code_review_tool.go
@@ -117,9 +117,10 @@ func postReviewRequestForCommit(
// Parse the options.
var (
- fixes = formatOptInteger(opts["fixes"])
- update = formatOptInteger(opts["update"])
- open bool
+ fixes = formatOptInteger(opts["fixes"])
+ update = formatOptInteger(opts["update"])
+ reviewer = formatOptString(opts["reviewer"])
+ open bool
)
if _, ok := opts["open"]; ok {
open = true
@@ -140,6 +141,9 @@ func postReviewRequestForCommit(
if update != "" {
args = append(args, "--review-request-id", update)
}
+ if reviewer != "" {
+ args = append(args, "--target-people", reviewer)
+ }
if open {
args = append(args, "--open")
}
@@ -191,6 +195,13 @@ func formatOptInteger(value interface{}) string {
return fmt.Sprintf("%v", value)
}
+func formatOptString(value interface{}) string {
+ if value == nil {
+ return ""
+ }
+ return value.(string)
+}
+
func ensureRbtVersion() error {
hint := `
You need to install RBTools version 0.7. Please run
|
review post: Add -reviewer flag
This flag can be used to specify the reviewer in case a new review
request is being created. The behaviour obviously depends on the code
review module being used.
Change-Id: <I>fd3f<I>e4
Story-Id: SF-<I>
|
salsaflow_salsaflow
|
train
|
43fb39fe518347e2103d761810cb0bf859f7ac06
|
diff --git a/src/Screen/Contracts/Groupable.php b/src/Screen/Contracts/Groupable.php
index <HASH>..<HASH> 100644
--- a/src/Screen/Contracts/Groupable.php
+++ b/src/Screen/Contracts/Groupable.php
@@ -19,4 +19,11 @@ interface Groupable extends Fieldable
* @return Groupable
*/
public function setGroup(array $group = []): self;
+
+ /**
+ * @param string $name
+ *
+ * @return $this
+ */
+ public function form(string $name): self ;
}
diff --git a/src/Screen/Fields/Group.php b/src/Screen/Fields/Group.php
index <HASH>..<HASH> 100644
--- a/src/Screen/Fields/Group.php
+++ b/src/Screen/Fields/Group.php
@@ -86,4 +86,18 @@ class Group extends Field implements Groupable
{
return $this->set('class', 'col');
}
+
+ /**
+ * @param string $name
+ *
+ * @return $this
+ */
+ public function form(string $name): self
+ {
+ $group = array_map(function ($field) use ($name) {
+ return $field->form($name);
+ }, $this->getGroup());
+
+ return $this->setGroup($group);
+ }
}
|
refs #<I> Fixed usage group in filter
|
orchidsoftware_platform
|
train
|
7519ceba9eba143e4a268c955317566910643b2f
|
diff --git a/cmd/juju/cloud/remove.go b/cmd/juju/cloud/remove.go
index <HASH>..<HASH> 100644
--- a/cmd/juju/cloud/remove.go
+++ b/cmd/juju/cloud/remove.go
@@ -129,7 +129,7 @@ func (c *removeCloudCommand) removeLocalCloud(ctxt *cmd.Context) error {
if err := cloud.WritePersonalCloudMetadata(personalClouds); err != nil {
return errors.Trace(err)
}
- ctxt.Infof("Removed details of cloud %q from the client", c.Cloud)
+ ctxt.Infof("Removed details of cloud %q from this client", c.Cloud)
return nil
}
diff --git a/cmd/juju/cloud/remove_test.go b/cmd/juju/cloud/remove_test.go
index <HASH>..<HASH> 100644
--- a/cmd/juju/cloud/remove_test.go
+++ b/cmd/juju/cloud/remove_test.go
@@ -85,7 +85,7 @@ func (s *removeSuite) TestRemoveCloudLocal(c *gc.C) {
assertPersonalClouds(c, "homestack", "homestack2")
ctx, err := cmdtesting.RunCommand(c, command, "homestack", "--client")
c.Assert(err, jc.ErrorIsNil)
- c.Assert(cmdtesting.Stderr(ctx), gc.Equals, "Removed details of cloud \"homestack\" from the client\n")
+ c.Assert(cmdtesting.Stderr(ctx), gc.Equals, "Removed details of cloud \"homestack\" from this client\n")
assertPersonalClouds(c, "homestack2")
}
@@ -103,7 +103,7 @@ func (s *removeSuite) TestRemoveCloudNoControllers(c *gc.C) {
c.Assert(err, jc.ErrorIsNil)
assertPersonalClouds(c, "homestack2")
c.Assert(cmdtesting.Stdout(ctx), gc.Equals, ``)
- c.Assert(cmdtesting.Stderr(ctx), gc.Matches, "Removed details of cloud \"homestack\" from the client\n")
+ c.Assert(cmdtesting.Stderr(ctx), gc.Matches, "Removed details of cloud \"homestack\" from this client\n")
}
func (s *removeSuite) TestRemoveCloudControllerControllerOnly(c *gc.C) {
@@ -134,7 +134,7 @@ func (s *removeSuite) TestRemoveCloudBoth(c *gc.C) {
c.Assert(command.ControllerName, gc.Equals, "mycontroller")
s.api.CheckCallNames(c, "RemoveCloud", "Close")
c.Assert(cmdtesting.Stderr(ctx), gc.Equals,
- "Removed details of cloud \"homestack\" from the client\n"+
+ "Removed details of cloud \"homestack\" from this client\n"+
"Removed details of cloud \"homestack\" from controller \"mycontroller\"\n")
}
|
Normalize output for remove cloud-command
This commit normalizes the output of cloud-command by replacing "from
the client" with "from this client". This is congruent with the command's
help text and makes it clear that it is the local client that gets
affected.
|
juju_juju
|
train
|
a5ad6207267252def7ae1388bb325a721cc9e4a1
|
diff --git a/ipyrad/core/assembly.py b/ipyrad/core/assembly.py
index <HASH>..<HASH> 100644
--- a/ipyrad/core/assembly.py
+++ b/ipyrad/core/assembly.py
@@ -1278,10 +1278,8 @@ def expander(namepath):
if "~" in namepath:
namepath = namepath.replace("~", os.path.expanduser("~"))
if "../" in namepath:
- _, post = namepath.split("../")
namepath = os.path.abspath(
- os.path.join(
- os.path.dirname(""), '..', post))
+ glob.glob(namepath)[0])
elif "./" in namepath:
_, post = namepath.split("./")
|
fixed expander to handle more than one ../ in paths.
|
dereneaton_ipyrad
|
train
|
adb231646dab7c5fe254b4f24c287582c8a35cd3
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -31,14 +31,31 @@ function algoliasearchHelper( client, index, opts ) {
return new AlgoliaSearchHelper( client, index, opts );
}
+/**
+ * The version currently used
+ * @member module:algoliasearchHelper.version
+ */
+algoliasearchHelper.version = "2.0.4";
+
+/**
+ * Constructor for the Helper.
+ * @member module:algoliasearchHelper.AlgoliaSearchHelper
+ * @see AlgoliaSearchHelper
+ */
algoliasearchHelper.AlgoliaSearchHelper = AlgoliaSearchHelper;
+
+/**
+ * Constructor for the object containing all the parameters of the search.
+ * @member module:algoliasearchHelper.SearchParameters
+ * @see SearchParameters
+ */
algoliasearchHelper.SearchParameters = SearchParameters;
-algoliasearchHelper.SearchResults = SearchResults;
/**
- * The version currently used
- * @member module:"algoliasearch-helper".version
+ * Constructor for the object containing the results of the search.
+ * @member module:algoliasearchHelper.SearchResults
+ * @see SearchResults
*/
-algoliasearchHelper.version = "2.0.4";
+algoliasearchHelper.SearchResults = SearchResults;
module.exports = algoliasearchHelper;
|
Fix doc (wrong namepath + add missing doc of constructors)
|
algolia_algoliasearch-helper-js
|
train
|
1116c9d1c1cda135d79f6466318a00417c3b9335
|
diff --git a/src/phpFastCache/CacheManager.php b/src/phpFastCache/CacheManager.php
index <HASH>..<HASH> 100644
--- a/src/phpFastCache/CacheManager.php
+++ b/src/phpFastCache/CacheManager.php
@@ -52,11 +52,11 @@ class CacheManager
* @var array
*/
protected static $config = [
- 'default_chmod' => 0777, // 0777 recommended
- 'fallback' => false, //Fall back when old driver is not support
'securityKey' => 'auto',// The securityKey that will be used to create sub-directory
'htaccess' => true,// Auto-generate .htaccess if tit is missing
+ 'default_chmod' => 0777, // 0777 recommended
'path' => '',// if not set will be the value of sys_get_temp_dir()
+ 'fallback' => false, //Fall back when old driver is not support
"limited_memory_each_object" => 4096, // maximum size (bytes) of object store in memory
"compress_data" => false, // compress stored data, if the backend supports it
];
|
Re-ordered option in cache manager
|
PHPSocialNetwork_phpfastcache
|
train
|
18d7f0829ba3935538471dc40933da56ec883601
|
diff --git a/src/test/moment/is_valid.js b/src/test/moment/is_valid.js
index <HASH>..<HASH> 100644
--- a/src/test/moment/is_valid.js
+++ b/src/test/moment/is_valid.js
@@ -76,6 +76,7 @@ test('string with bad month name', function (assert) {
});
test('string with spaceless format', function (assert) {
+ assert.equal(moment('10Sep2001', 'DDMMMYYYY').isValid(), true, 'Parsing 10Sep2001 should result in a valid date');
assert.equal(moment('10Sept2001', 'DDMMMYYYY').isValid(), true, 'Parsing 10Sept2001 should result in a valid date');
});
|
Added Sep/Sept parsing test
|
moment_moment
|
train
|
e6c53bc18ba557adb5aef811a0a68d166a989569
|
diff --git a/PPI/App.php b/PPI/App.php
index <HASH>..<HASH> 100755
--- a/PPI/App.php
+++ b/PPI/App.php
@@ -302,8 +302,7 @@ class App {
));
$templateLocator = new TemplateLocator($fileLocator);
-
- $assetsHelper = new \Symfony\Component\Templating\Helper\AssetsHelper($this->_request->getRequestUri());
+ $assetsHelper = new \Symfony\Component\Templating\Helper\AssetsHelper($this->_request->getBasePath());
switch($this->getOption('templatingEngine')) {
|
[Templating] Fixing the base path passed to the AssetsHelper
|
ppi_framework
|
train
|
8f0206ace5475d312049ff4e0ead61dcea161ca8
|
diff --git a/seed_message_sender/settings.py b/seed_message_sender/settings.py
index <HASH>..<HASH> 100644
--- a/seed_message_sender/settings.py
+++ b/seed_message_sender/settings.py
@@ -67,6 +67,7 @@ MIDDLEWARE_CLASSES = (
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
+ 'seed_papertrail.middleware.RequestTimingMiddleware',
)
ROOT_URLCONF = 'seed_message_sender.urls'
@@ -128,6 +129,16 @@ TEMPLATES = [
},
]
+PAPERTRAIL = os.environ.get('PAPERTRAIL')
+if PAPERTRAIL:
+ import seed_papertrail # noqa
+ PAPERTRAIL_HOST, _, PAPERTRAIL_PORT = PAPERTRAIL.partition(':')
+ LOGGING = seed_papertrail.auto_configure(
+ host=PAPERTRAIL_HOST,
+ port=int(PAPERTRAIL_PORT),
+ system=os.environ.get('MARATHON_APP_DOCKER_IMAGE', 'seed'),
+ program=os.environ.get('MESOS_TASK_ID', 'message_sender'))
+
# Sentry configuration
RAVEN_CONFIG = {
# DevOps will supply you with this.
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -51,6 +51,7 @@ setup(
'go_http==0.3.0',
'drfdocs==0.0.11',
'django-redis-cache==1.7.1',
+ 'seed-papertrail>=1.3.0',
],
classifiers=[
'Development Status :: 4 - Beta',
|
add papertrail middleware timing logging
|
praekeltfoundation_seed-message-sender
|
train
|
6db474bad64af03dadda8efe20fb040a9ea61dde
|
diff --git a/src/sap.ui.fl/src/sap/ui/fl/write/_internal/Versions.js b/src/sap.ui.fl/src/sap/ui/fl/write/_internal/Versions.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.fl/src/sap/ui/fl/write/_internal/Versions.js
+++ b/src/sap.ui.fl/src/sap/ui/fl/write/_internal/Versions.js
@@ -261,6 +261,7 @@ sap.ui.define([
* @param {string} mPropertyBag.layer - Layer for which the versions should be retrieved
* @param {string} mPropertyBag.title - Title of the to be activated version
* @param {string} mPropertyBag.appComponent - Application Component
+ * @param {string} mPropertyBag.displayedVersion - Id of the displayed version
* @returns {Promise<sap.ui.fl.Version>} Promise resolving with the updated list of versions for the application
* when the version was activated;
* rejects if an error occurs, the layer does not support draft handling, there is unsaved content, there is no draft to activate or
@@ -270,12 +271,11 @@ sap.ui.define([
var oModel = Versions.getVersionsModel(mPropertyBag);
var aVersions = oModel.getProperty("/versions");
var bDraftExists = _doesDraftExistInVersions(aVersions);
- var sDisplayedVersion = oModel.getProperty("/displayedVersion");
var sActiveVersion = oModel.getProperty("/activeVersion");
- if (sDisplayedVersion === sActiveVersion) {
+ if (mPropertyBag.displayedVersion === sActiveVersion) {
return Promise.reject("Version is already active");
}
- mPropertyBag.version = sDisplayedVersion;
+ mPropertyBag.version = mPropertyBag.displayedVersion;
var oDirtyChangeInfo = _getDirtyChangesInfo(mPropertyBag);
var aChangePersistences = oDirtyChangeInfo.changePersistences;
diff --git a/src/sap.ui.fl/src/sap/ui/fl/write/api/VersionsAPI.js b/src/sap.ui.fl/src/sap/ui/fl/write/api/VersionsAPI.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.fl/src/sap/ui/fl/write/api/VersionsAPI.js
+++ b/src/sap.ui.fl/src/sap/ui/fl/write/api/VersionsAPI.js
@@ -179,6 +179,7 @@ sap.ui.define([
* @param {sap.ui.core.Control} mPropertyBag.control - Control for which the request is done
* @param {string} mPropertyBag.layer - Layer for which the versions should be retrieved
* @param {string} mPropertyBag.title - Title of the to be activated version
+ * @param {string} mPropertyBag.displayedVersion - Id of the displayed version
*
* @ui5-restricted sap.ui.rta
*
@@ -205,7 +206,8 @@ sap.ui.define([
reference: Utils.normalizeReference(sReference),
layer: mPropertyBag.layer,
title: mPropertyBag.title,
- appComponent: Utils.getAppComponentForControl(mPropertyBag.control)
+ appComponent: Utils.getAppComponentForControl(mPropertyBag.control),
+ displayedVersion: mPropertyBag.displayedVersion
});
};
diff --git a/src/sap.ui.fl/test/sap/ui/fl/qunit/write/_internal/Versions.qunit.js b/src/sap.ui.fl/test/sap/ui/fl/qunit/write/_internal/Versions.qunit.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.fl/test/sap/ui/fl/qunit/write/_internal/Versions.qunit.js
+++ b/src/sap.ui.fl/test/sap/ui/fl/qunit/write/_internal/Versions.qunit.js
@@ -526,7 +526,8 @@ sap.ui.define([
layer: Layer.CUSTOMER,
reference: sReference,
nonNormalizedReference: sReference,
- appComponent: this.oAppComponent
+ appComponent: this.oAppComponent,
+ displayedVersion: "1"
};
var oFirstVersion = {
diff --git a/src/sap.ui.rta/src/sap/ui/rta/RuntimeAuthoring.js b/src/sap.ui.rta/src/sap/ui/rta/RuntimeAuthoring.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.rta/src/sap/ui/rta/RuntimeAuthoring.js
+++ b/src/sap.ui.rta/src/sap/ui/rta/RuntimeAuthoring.js
@@ -941,12 +941,14 @@ sap.ui.define([
RuntimeAuthoring.prototype._activate = function(sVersionTitle) {
var sLayer = this.getLayer();
var oSelector = this.getRootControlInstance();
+ var sDisplayedVersion = this._oVersionsModel.getProperty("/displayedVersion");
return this._serializeAndSave()
.then(function () {
return VersionsAPI.activate({
layer: sLayer,
control: oSelector,
- title: sVersionTitle
+ title: sVersionTitle,
+ displayedVersion: sDisplayedVersion
});
}).then(function () {
this._showMessageToast("MSG_DRAFT_ACTIVATION_SUCCESS");
|
[INTERNAL][FIX] keep the display version to activate the right version
This fixes activating only dirty changes
Change-Id: Iae8b<I>b<I>e8d<I>a<I>ebf<I>b<I>b<I>b1bf<I>
BCP: <I>
|
SAP_openui5
|
train
|
249cf051fd665caee9470b2809ee65e037e92ccb
|
diff --git a/promised-node-http.js b/promised-node-http.js
index <HASH>..<HASH> 100644
--- a/promised-node-http.js
+++ b/promised-node-http.js
@@ -89,7 +89,7 @@ define(function (require)
if(!def.rejected)
def.reject(e);
}
- return deep.promise(def);
+ return deep.when(def);
}
return requester;
});
\ No newline at end of file
diff --git a/role-controller.js b/role-controller.js
index <HASH>..<HASH> 100644
--- a/role-controller.js
+++ b/role-controller.js
@@ -195,7 +195,7 @@ define(function RoleControllerDefine(require)
response.body = resolved;
def.resolve(response.body);
});
- return deep.promise(def);
+ return deep.when(def);
}
}
diff --git a/stores/files.js b/stores/files.js
index <HASH>..<HASH> 100644
--- a/stores/files.js
+++ b/stores/files.js
@@ -86,7 +86,7 @@ FSStore.prototype = {
else
def.resolve(files);
})
- return deep.promise(def);
+ return deep.when(def);
},
"delete": function(id, options){
// console.log("Remote delete : ", id);
diff --git a/stores/filesystem.js b/stores/filesystem.js
index <HASH>..<HASH> 100644
--- a/stores/filesystem.js
+++ b/stores/filesystem.js
@@ -86,7 +86,7 @@ FSStore.prototype = {
else
def.resolve(files);
})
- return deep.promise(def);
+ return deep.when(def);
},
"delete": function(id, options){
// console.log("Remote delete : ", id);
@@ -197,7 +197,7 @@ var FileSystem = function(options){
});
});
//console.log("before proise return of FSStore get")
- return deep.promise(def);
+ return deep.when(def);
},
post: function(object, directives){
//console.log("fs : post : ", object, directives)
diff --git a/stores/remote-json.js b/stores/remote-json.js
index <HASH>..<HASH> 100644
--- a/stores/remote-json.js
+++ b/stores/remote-json.js
@@ -234,7 +234,7 @@ define(function (require)
else
def.reject(new Error("deep.store.remotejson.bulk failed : "+uri+" - details : "+JSON.stringify(arguments)));
});
- return deep(deep.promise(def))
+ return deep(deep.when(def))
.store(this)
.done(function (success) {
this.range = deep.Chain.range;
diff --git a/uploader-facet.js b/uploader-facet.js
index <HASH>..<HASH> 100644
--- a/uploader-facet.js
+++ b/uploader-facet.js
@@ -470,7 +470,7 @@ define(function (require){
deferred.resolve(JSON.stringify(result))
}
};
- infos.promise = deep.promise(deferred);
+ infos.promise = deep.when(deferred);
}*/
var UploadFacet = {
diff --git a/utils.js b/utils.js
index <HASH>..<HASH> 100755
--- a/utils.js
+++ b/utils.js
@@ -37,7 +37,7 @@ define(function (require)
request.body = null;
def.reject(error);
});
- return deep.promise(def);
+ return deep.when(def);
}();
},
parseAcceptHeader:function (headers)
|
refactor deep.promise in deep.when
|
deepjs_autobahn
|
train
|
1ce51f429fe1d5229ffbd416599218686626485c
|
diff --git a/tests/TestCase/ORM/DomainRulesIntegrationTest.php b/tests/TestCase/ORM/DomainRulesIntegrationTest.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase/ORM/DomainRulesIntegrationTest.php
+++ b/tests/TestCase/ORM/DomainRulesIntegrationTest.php
@@ -62,9 +62,8 @@ class DomainRulesIntegrationTest extends TestCase {
->domainRules()
->add(function (Entity $author, array $options) use ($table) {
$this->assertSame($options['repository'], $table->association('authors')->target());
- $author->errors('name', ['This is an error']);
return false;
- });
+ }, ['errorField' => 'name', 'message' => 'This is an error']);
$this->assertFalse($table->save($entity));
$this->assertTrue($entity->isNew());
@@ -95,9 +94,8 @@ class DomainRulesIntegrationTest extends TestCase {
->target()
->domainRules()
->add(function (Entity $entity) {
- $entity->errors('title', ['Some error']);
return false;
- });
+ }, ['errorField' => 'title', 'message' => 'This is an error']);
$this->assertFalse($table->save($entity));
$this->assertTrue($entity->isNew());
@@ -136,12 +134,8 @@ class DomainRulesIntegrationTest extends TestCase {
->target()
->domainRules()
->add(function (Entity $entity) {
- if ($entity->title !== '1') {
- $entity->errors('title', ['an error']);
- return false;
- }
- return true;
- });
+ return $entity->title === '1';
+ }, ['errorField' => 'title', 'message' => 'This is an error']);
$this->assertFalse($table->save($entity));
$this->assertTrue($entity->isNew());
@@ -183,12 +177,8 @@ class DomainRulesIntegrationTest extends TestCase {
->target()
->domainRules()
->add(function (Entity $article) {
- if (!is_numeric($article->title)) {
- $article->errors('title', ['an error']);
- return false;
- }
- return true;
- });
+ return is_numeric($article->title);
+ }, ['errorField' => 'title', 'message' => 'This is an error']);
$result = $table->save($entity, ['atomic' => false]);
$this->assertSame($entity, $result);
|
Using the error setter for domain rules in the tests
|
cakephp_cakephp
|
train
|
fb3c4a6dcf419155678892f13724eb331180124c
|
diff --git a/tests/test_blackrock.py b/tests/test_blackrock.py
index <HASH>..<HASH> 100644
--- a/tests/test_blackrock.py
+++ b/tests/test_blackrock.py
@@ -5,6 +5,7 @@ def test_blackwing_corruptor():
game = prepare_game()
game.player1.discard_hand()
blackwing1 = game.player1.give("BRM_034")
+ assert not blackwing1.powered_up
blackwing1.play()
assert blackwing1.health == 4
assert game.player1.hero.health == 30
@@ -15,6 +16,7 @@ def test_blackwing_corruptor():
game.player2.discard_hand()
game.player2.give(WHELP)
blackwing2 = game.player2.give("BRM_034")
+ assert blackwing2.powered_up
blackwing2.play(target=game.player1.hero)
assert game.player1.hero.health == 27
diff --git a/tests/test_classic.py b/tests/test_classic.py
index <HASH>..<HASH> 100755
--- a/tests/test_classic.py
+++ b/tests/test_classic.py
@@ -1596,11 +1596,13 @@ def test_imp_master():
def test_kill_command():
game = prepare_game(HUNTER, HUNTER)
kc = game.player1.give("EX1_539")
+ assert not kc.powered_up
kc.play(target=game.player1.opponent.hero)
assert game.player2.hero.health == 30 - 3
game.player1.give(CHICKEN).play()
kc = game.player1.give("EX1_539")
+ assert kc.powered_up
kc.play(target=game.player1.hero)
assert game.player1.hero.health == 30 - 5
diff --git a/tests/test_gvg.py b/tests/test_gvg.py
index <HASH>..<HASH> 100644
--- a/tests/test_gvg.py
+++ b/tests/test_gvg.py
@@ -964,6 +964,7 @@ def test_tinkertown_technician():
game.player1.discard_hand()
game.player1.give(WISP).play()
tech = game.player1.give("GVG_102")
+ assert not tech.powered_up
tech.play()
assert tech.atk == tech.health == 3
assert len(game.player1.hand) == 0
@@ -971,6 +972,7 @@ def test_tinkertown_technician():
dummy = game.player1.give(TARGET_DUMMY)
dummy.play()
tech2 = game.player1.give("GVG_102")
+ assert tech2.powered_up
tech2.play()
assert tech2.atk == tech2.health == 4
assert len(game.player1.hand) == 1
|
Add powered_up assertions to various tests
|
jleclanche_fireplace
|
train
|
4c1bb9dc76f715c4106c21c3803ac7db43fd5f33
|
diff --git a/openquake/hazardlib/gsim/douglas_stochastic_2013.py b/openquake/hazardlib/gsim/douglas_stochastic_2013.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/gsim/douglas_stochastic_2013.py
+++ b/openquake/hazardlib/gsim/douglas_stochastic_2013.py
@@ -17,7 +17,7 @@
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
-Module exports
+Module exports
:class:`DouglasEtAl2013StochasticSD001Q200K005`
:class:`DouglasEtAl2013StochasticSD001Q200K020`
:class:`DouglasEtAl2013StochasticSD001Q200K040`
@@ -141,6 +141,9 @@ class DouglasEtAl2013StochasticSD001Q200K005(GMPE):
#: The required distance parameter is hypocentral distance
REQUIRES_DISTANCES = {'rhypo'}
+ #: Definined for a reference velocity of 1100 m/s (Table 4)
+ DEFINED_FOR_REFERENCE_VELOCITY = 1100.0
+
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
|
Adds reference velocity to Douglas (<I>) GMPEs
|
gem_oq-engine
|
train
|
33851d07626e5800d18e2c213145b86664515884
|
diff --git a/Controller/CRUDController.php b/Controller/CRUDController.php
index <HASH>..<HASH> 100644
--- a/Controller/CRUDController.php
+++ b/Controller/CRUDController.php
@@ -104,7 +104,7 @@ class CRUDController extends Controller
$rootAdmin->setRequest($request);
if ($request->get('uniqid')) {
- $rootAdmin->setUniqid($request->get('uniqid'));
+ $this->admin->setUniqid($request->get('uniqid'));
}
}
|
Fix regression with uniqid
|
sonata-project_SonataAdminBundle
|
train
|
37ca9bb4eb9454b5633ca5ed08ace1a0ef38ecc5
|
diff --git a/gxa/src/main/java/uk/ac/ebi/atlas/experimentpage/baseline/download/ProteomicsBaselineExperimentDownloadController.java b/gxa/src/main/java/uk/ac/ebi/atlas/experimentpage/baseline/download/ProteomicsBaselineExperimentDownloadController.java
index <HASH>..<HASH> 100644
--- a/gxa/src/main/java/uk/ac/ebi/atlas/experimentpage/baseline/download/ProteomicsBaselineExperimentDownloadController.java
+++ b/gxa/src/main/java/uk/ac/ebi/atlas/experimentpage/baseline/download/ProteomicsBaselineExperimentDownloadController.java
@@ -1,5 +1,6 @@
package uk.ac.ebi.atlas.experimentpage.baseline.download;
+import uk.ac.ebi.atlas.profiles.baseline.BaselineProfileInputStreamFactory;
import uk.ac.ebi.atlas.trader.ExperimentTrader;
import uk.ac.ebi.atlas.web.ProteomicsBaselineRequestPreferences;
import org.springframework.context.annotation.Scope;
@@ -9,7 +10,6 @@ import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import uk.ac.ebi.atlas.experimentpage.baseline.BaselineExperimentPageController;
-import uk.ac.ebi.atlas.profiles.baseline.ProteomicsBaselineProfileInputStreamFactory;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
@@ -26,11 +26,12 @@ public class ProteomicsBaselineExperimentDownloadController extends BaselineExpe
private final BaselineExperimentDownloadService<ProteomicsBaselineRequestPreferences> baselineExperimentDownloadService;
@Inject
- public ProteomicsBaselineExperimentDownloadController(ProteomicsBaselineProfileInputStreamFactory inputStreamFactory,
- BaselineProfilesWriterServiceFactory
- baselineProfilesWriterServiceFactory,ExperimentTrader experimentTrader) {
- this.baselineExperimentDownloadService = new BaselineExperimentDownloadService<>(inputStreamFactory,
- baselineProfilesWriterServiceFactory,experimentTrader);
+ public ProteomicsBaselineExperimentDownloadController(
+ BaselineProfileInputStreamFactory baselineProfileInputStreamFactory,
+ BaselineProfilesWriterServiceFactory baselineProfilesWriterServiceFactory,
+ ExperimentTrader experimentTrader) {
+ this.baselineExperimentDownloadService = new BaselineExperimentDownloadService<>(
+ baselineProfileInputStreamFactory, baselineProfilesWriterServiceFactory,experimentTrader);
}
@RequestMapping(value = "/experiments/{experimentAccession}.tsv", params = PARAMS_TYPE_PROTEOMICS_BASELINE)
|
Forgot to include this in 3b<I>d<I>da<I>de<I>d<I>f<I>ab4ba<I>b
(cherry picked from commit f2ccd<I>)
|
ebi-gene-expression-group_atlas
|
train
|
8e675b1c6e500f6154343a18edc408b13c6d0585
|
diff --git a/build/webpack.prod.conf.js b/build/webpack.prod.conf.js
index <HASH>..<HASH> 100644
--- a/build/webpack.prod.conf.js
+++ b/build/webpack.prod.conf.js
@@ -79,25 +79,25 @@ var webpackConfig = merge(baseWebpackConfig, {
chunksSortMode: 'dependency'
}),
// split vendor js into its own file
- new webpack.optimize.CommonsChunkPlugin({
- name: 'vendor',
- minChunks: function (module, count) {
- // any required modules inside node_modules are extracted to vendor
- return (
- module.resource &&
- /\.js$/.test(module.resource) &&
- module.resource.indexOf(
- path.join(__dirname, '../node_modules')
- ) === 0
- )
- }
- }),
+ // new webpack.optimize.CommonsChunkPlugin({
+ // name: 'vendor',
+ // minChunks: function (module, count) {
+ // // any required modules inside node_modules are extracted to vendor
+ // return (
+ // module.resource &&
+ // /\.js$/.test(module.resource) &&
+ // module.resource.indexOf(
+ // path.join(__dirname, '../node_modules')
+ // ) === 0
+ // )
+ // }
+ // }),
// extract webpack runtime and module manifest to its own file in order to
// prevent vendor hash from being updated whenever app bundle is updated
- new webpack.optimize.CommonsChunkPlugin({
- name: 'manifest',
- chunks: ['vendor']
- }),
+ // new webpack.optimize.CommonsChunkPlugin({
+ // name: 'manifest',
+ // chunks: ['vendor']
+ // }),
// copy custom static assets
new CopyWebpackPlugin([
{
diff --git a/styleguide.config.js b/styleguide.config.js
index <HASH>..<HASH> 100644
--- a/styleguide.config.js
+++ b/styleguide.config.js
@@ -46,9 +46,9 @@ if (process.env.NODE_ENV === 'development') {
module.exports = {
webpackConfig: webpConfig,
- require: [
- './src/css/main.postcss',
- ],
+ // require: [
+ // './src/css/main.postcss',
+ // ],
ignore: [
'**/examples/**',
'**/testing/**', // ignore e2e testing example
@@ -90,9 +90,14 @@ module.exports = {
components: 'src/compositions/**/*.vue',
},
],
- // dangerouslyUpdateWebpackConfig(webpackConfig) {
- // // WARNING: inspect Vue Styleguidist Webpack config before modifying it,
- // // otherwise you may break Styleguidist
- // console.log(webpackConfig);
- // },
+ dangerouslyUpdateWebpackConfig(webpackConfig) {
+ // WARNING: inspect Vue Styleguidist Webpack config before modifying it,
+ // otherwise you may break Styleguidist
+ console.log(webpackConfig);
+ const newConfig = webpackConfig;
+ newConfig.output.filename = '[name].bundle.js';
+ newConfig.output.chunkFilename = 'chunk.[chunkhash:6].js';
+ newConfig.output.publicPath = '/js/chunk/';
+ return newConfig;
+ },
};
|
Disable code splitting for docs
|
rei_rei-cedar
|
train
|
fa3163a973bb495dae4ef4ed551654ba693436f2
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,6 +6,18 @@ from setuptools import find_packages
import weka
+CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
+
+def get_reqs(*fns):
+ lst = []
+ for fn in fns:
+ for package in open(os.path.join(CURRENT_DIR, fn)).readlines():
+ package = package.strip()
+ if not package:
+ continue
+ lst.append(package.strip())
+ return lst
+
setup(name='weka',
version=weka.__version__,
description='A Python wrapper for the Weka data mining library.',
@@ -28,4 +40,5 @@ setup(name='weka',
"Topic :: Text Processing :: General",
],
platforms=['OS Independent'],
+ install_requires=get_reqs('pip-requirements.txt'),
)
diff --git a/weka/__init__.py b/weka/__init__.py
index <HASH>..<HASH> 100644
--- a/weka/__init__.py
+++ b/weka/__init__.py
@@ -1,2 +1,2 @@
-VERSION = (1, 0, 0)
+VERSION = (1, 0, 1)
__version__ = '.'.join(map(str, VERSION))
|
Fixed setup.py to include required dependencies.
|
chrisspen_weka
|
train
|
e12b4923e6ffe1ef1c5d759943344199dd88dd90
|
diff --git a/rules/es6.js b/rules/es6.js
index <HASH>..<HASH> 100644
--- a/rules/es6.js
+++ b/rules/es6.js
@@ -34,7 +34,7 @@ module.exports = {
ignoreReadBeforeAssign: true,
} ],
'prefer-destructuring': [ 'error', {
- array: true,
+ array: false,
object: true,
}, {
enforceForRenamedProperties: false,
|
turn off prefer-destructuring for arrays
|
riophae_eslint-config-riophae
|
train
|
d9f1df0ecfd0b6fcc4a0c890422769e7e837a8c4
|
diff --git a/cmd/influxd/run/command.go b/cmd/influxd/run/command.go
index <HASH>..<HASH> 100644
--- a/cmd/influxd/run/command.go
+++ b/cmd/influxd/run/command.go
@@ -14,7 +14,6 @@ import (
"time"
"github.com/BurntSushi/toml"
- "github.com/influxdata/influxdb"
)
const logo = `
@@ -95,13 +94,8 @@ func (cmd *Command) Run(args ...string) error {
return fmt.Errorf("apply env config: %v", err)
}
- // If we have a node ID, ignore the join argument
- // We are not using the reference to this node var, just checking
- // to see if we have a node ID on disk
- if node, _ := influxdb.LoadNode(config.Meta.Dir, []string{config.Meta.HTTPBindAddress}); node == nil || node.ID == 0 {
- if options.Join != "" {
- config.Meta.JoinPeers = strings.Split(options.Join, ",")
- }
+ if options.Join != "" {
+ config.Meta.JoinPeers = strings.Split(options.Join, ",")
}
// Validate the configuration.
diff --git a/services/meta/client.go b/services/meta/client.go
index <HASH>..<HASH> 100644
--- a/services/meta/client.go
+++ b/services/meta/client.go
@@ -843,7 +843,6 @@ func (c *Client) JoinMetaServer(httpAddr, tcpAddr string) error {
// Something failed, try the next node
currentServer++
}
- return nil
}
func (c *Client) CreateMetaNode(httpAddr, tcpAddr string) (*NodeInfo, error) {
diff --git a/services/meta/store.go b/services/meta/store.go
index <HASH>..<HASH> 100644
--- a/services/meta/store.go
+++ b/services/meta/store.go
@@ -11,7 +11,6 @@ import (
"sync"
"time"
- "github.com/davecgh/go-spew/spew"
"github.com/influxdata/influxdb/services/meta/internal"
"github.com/gogo/protobuf/proto"
@@ -86,7 +85,6 @@ func (s *store) open(raftln net.Listener) error {
c := NewClient(joinPeers, s.config.HTTPSEnabled)
for {
peers := c.peers()
- spew.Dump(peers)
if len(s.config.JoinPeers)-len(peers) == 0 {
initializePeers = peers
break
@@ -96,31 +94,19 @@ func (s *store) open(raftln net.Listener) error {
time.Sleep(time.Second)
}
}
- initializePeers = append(initializePeers, s.raftAddr)
- if err := func() error {
- s.mu.Lock()
- defer s.mu.Unlock()
-
- // Check if store has already been opened.
- if s.opened {
- return ErrStoreOpen
- }
- s.opened = true
-
- // Create the root directory if it doesn't already exist.
- if err := os.MkdirAll(s.path, 0777); err != nil {
- return fmt.Errorf("mkdir all: %s", err)
- }
+ if err := s.setOpen(); err != nil {
+ return err
+ }
- // Open the raft store.
- if err := s.openRaft(initializePeers, raftln); err != nil {
- return fmt.Errorf("raft: %s", err)
- }
+ // Create the root directory if it doesn't already exist.
+ if err := os.MkdirAll(s.path, 0777); err != nil {
+ return fmt.Errorf("mkdir all: %s", err)
+ }
- return nil
- }(); err != nil {
- return err
+ // Open the raft store.
+ if err := s.openRaft(initializePeers, raftln); err != nil {
+ return fmt.Errorf("raft: %s", err)
}
// Wait for a leader to be elected so we know the raft log is loaded
@@ -162,6 +148,17 @@ func (s *store) open(raftln net.Listener) error {
return nil
}
+func (s *store) setOpen() error {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ // Check if store has already been opened.
+ if s.opened {
+ return ErrStoreOpen
+ }
+ s.opened = true
+ return nil
+}
+
// peers returns the raft peers known to this store
func (s *store) peers() []string {
if s.raftState == nil {
@@ -207,6 +204,8 @@ func (s *store) filterAddr(addrs []string, filter string) ([]string, error) {
}
func (s *store) openRaft(initializePeers []string, raftln net.Listener) error {
+ s.mu.Lock()
+ defer s.mu.Unlock()
rs := newRaftState(s.config, s.raftAddr)
rs.logger = s.logger
rs.path = s.path
|
sane cluster starting with join args
|
influxdata_influxdb
|
train
|
f9289cd7ac17b35f28184f25fdc9204189fd480a
|
diff --git a/packages/ember-runtime/lib/compare.js b/packages/ember-runtime/lib/compare.js
index <HASH>..<HASH> 100644
--- a/packages/ember-runtime/lib/compare.js
+++ b/packages/ember-runtime/lib/compare.js
@@ -109,7 +109,18 @@ export default function compare(v, w) {
return spaceship(v, w);
case 'string':
- return spaceship(v.localeCompare(w), 0);
+ // We are comparing Strings using operators instead of `String#localeCompare`
+ // because of unexpected behavior for certain edge cases.
+ // For example `'Z'.localeCompare('a')` returns `1`.
+ //
+ // See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/localeCompare#Description
+ if (v < w) {
+ return -1;
+ } else if (v === w) {
+ return 0;
+ }
+
+ return 1;
case 'array':
var vLen = v.length;
diff --git a/packages/ember-runtime/tests/core/compare_test.js b/packages/ember-runtime/tests/core/compare_test.js
index <HASH>..<HASH> 100644
--- a/packages/ember-runtime/tests/core/compare_test.js
+++ b/packages/ember-runtime/tests/core/compare_test.js
@@ -74,4 +74,8 @@ QUnit.test('comparables should return values in the range of -1, 0, 1', function
equal(compare('a', negOne), 1, 'Second item comparable - returns -1 (negated)');
equal(compare('b', zero), 0, 'Second item comparable - returns 0 (negated)');
equal(compare('c', one), -1, 'Second item comparable - returns 1 (negated)');
+
+ equal(compare('A', 'Z'), -1, `'A' < 'Z' returns -1`);
+ equal(compare('Z', 'a'), -1, `'Z' < 'a' returns -1`);
+ equal(compare('a', 'z'), -1, `'a' < 'z' returns -1`);
});
|
[BUGFIX canary] Update Ember.compare to use operators
instead of String#localeCompare
There are edge cases where String#localeCompare behaves unexpectedly,
so, we'll use relational operators instead to compare Strings.
Closes #<I>
|
emberjs_ember.js
|
train
|
ce473897a30e05ab20284b9c4fd3dc1d8664d1fc
|
diff --git a/brozzler/__init__.py b/brozzler/__init__.py
index <HASH>..<HASH> 100644
--- a/brozzler/__init__.py
+++ b/brozzler/__init__.py
@@ -258,7 +258,7 @@ def jinja2_environment(behaviors_dir=None):
'js-templates'))
else:
_loader=jinja2.PackageLoader('brozzler', 'js-templates')
- _jinja2_env = jinja2.Environment(loader=_loader)
+ _jinja2_env = jinja2.Environment(loader=_loader, auto_reload=False)
_jinja2_env.filters['json'] = json.dumps
return _jinja2_env
|
Disable Jinja2 template auto_reload for higher performance
Every time we run a JS behavior, we load a Jinja2 template.
By default, Jinja2 has option `auto_reload=True`. This mean that
every time a template is requested the loader checks if the source file changed
and if yes, it will reload the template. For higher performance it’s possible
to disable that.
Also note that Jinja caches <I> templates by default.
Ref: <URL>
|
internetarchive_brozzler
|
train
|
b7078069c591a4e727d8735b0b262e20eb00a7a0
|
diff --git a/packages/jss/.size-snapshot.json b/packages/jss/.size-snapshot.json
index <HASH>..<HASH> 100644
--- a/packages/jss/.size-snapshot.json
+++ b/packages/jss/.size-snapshot.json
@@ -1,30 +1,30 @@
{
"dist/jss.js": {
- "bundled": 61836,
- "minified": 22817,
- "gzipped": 6879
+ "bundled": 61869,
+ "minified": 22827,
+ "gzipped": 6889
},
"dist/jss.min.js": {
- "bundled": 60459,
- "minified": 22048,
- "gzipped": 6519
+ "bundled": 60492,
+ "minified": 22059,
+ "gzipped": 6533
},
"dist/jss.cjs.js": {
- "bundled": 56577,
- "minified": 24717,
- "gzipped": 6872
+ "bundled": 56608,
+ "minified": 24744,
+ "gzipped": 6885
},
"dist/jss.esm.js": {
- "bundled": 56045,
- "minified": 24282,
- "gzipped": 6782,
+ "bundled": 56076,
+ "minified": 24309,
+ "gzipped": 6795,
"treeshaked": {
"rollup": {
- "code": 20045,
+ "code": 20054,
"import_statements": 352
},
"webpack": {
- "code": 21512
+ "code": 21521
}
}
}
diff --git a/packages/jss/src/plugins/fontFaceRule.js b/packages/jss/src/plugins/fontFaceRule.js
index <HASH>..<HASH> 100644
--- a/packages/jss/src/plugins/fontFaceRule.js
+++ b/packages/jss/src/plugins/fontFaceRule.js
@@ -30,18 +30,20 @@ export class FontFaceRule implements BaseRule {
if (Array.isArray(this.style)) {
let str = ''
for (let index = 0; index < this.style.length; index++) {
- str += toCss(this.key, this.style[index])
+ str += toCss(this.at, this.style[index])
if (this.style[index + 1]) str += '\n'
}
return str
}
- return toCss(this.key, this.style, options)
+ return toCss(this.at, this.style, options)
}
}
+const keyRegExp = /@font-face/
+
export default {
onCreateRule(key: string, style: JssStyle, options: RuleOptions): FontFaceRule | null {
- return key === '@font-face' ? new FontFaceRule(key, style, options) : null
+ return keyRegExp.test(key) ? new FontFaceRule(key, style, options) : null
}
}
diff --git a/packages/jss/tests/integration/rules.js b/packages/jss/tests/integration/rules.js
index <HASH>..<HASH> 100644
--- a/packages/jss/tests/integration/rules.js
+++ b/packages/jss/tests/integration/rules.js
@@ -259,7 +259,7 @@ describe('Integration: rules', () => {
})
describe('@font-face rule', () => {
- function checkSingle() {
+ it('should return CSS', () => {
const rule = jss.createRule('@font-face', {
'font-family': 'MyHelvetica',
src: 'local("Helvetica")'
@@ -272,23 +272,19 @@ describe('Integration: rules', () => {
src: local("Helvetica");
}
`)
- }
+ })
- function checkMulti(options) {
- const rule = jss.createRule(
- '@font-face',
- [
- {
- 'font-family': 'MyHelvetica',
- src: 'local("Helvetica")'
- },
- {
- 'font-family': 'MyComicSans',
- src: 'local("ComicSans")'
- }
- ],
- options
- )
+ it('should handle when @font-face is an array', () => {
+ const rule = jss.createRule('@font-face', [
+ {
+ 'font-family': 'MyHelvetica',
+ src: 'local("Helvetica")'
+ },
+ {
+ 'font-family': 'MyComicSans',
+ src: 'local("ComicSans")'
+ }
+ ])
expect(rule.type).to.be('font-face')
expect(rule.key).to.be('@font-face')
expect(rule.toString()).to.be(stripIndent`
@@ -301,14 +297,28 @@ describe('Integration: rules', () => {
src: local("ComicSans");
}
`)
- }
-
- it('should return CSS', () => {
- checkSingle()
})
- it('should handle multiple font-faces', () => {
- checkMulti()
+ it('should handle multiple @font-face', () => {
+ const sheet = jss.createStyleSheet()
+ sheet.addRule('@font-face', {
+ 'font-family': 'MyHelvetica',
+ src: 'local("Helvetica")'
+ })
+ sheet.addRule('@font-face', {
+ 'font-family': 'MyComicSans',
+ src: 'local("ComicSans")'
+ })
+ expect(sheet.toString()).to.be(stripIndent`
+ @font-face {
+ font-family: MyHelvetica;
+ src: local("Helvetica");
+ }
+ @font-face {
+ font-family: MyComicSans;
+ src: local("ComicSans");
+ }
+ `)
})
})
|
Fix multiple .addRule calls with font-face (#<I>)
* Fix multiple @font-face
* keep key as a uniq identifier, use this.at for serialization
* no need for extraction into separate functions
|
cssinjs_jss
|
train
|
6c2830fca01a6c95c6bba6fae3db0416f45d247e
|
diff --git a/Swat/SwatTreeFlydown.php b/Swat/SwatTreeFlydown.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatTreeFlydown.php
+++ b/Swat/SwatTreeFlydown.php
@@ -51,10 +51,12 @@ class SwatTreeFlydown extends SwatFlydown
if (!$this->visible)
return;
- // temporarily encode the path into the value for parent::display()
+ // temporarily set the value to the path for parent::display()
$actual_value = $this->value;
- $this->value = implode('/', $this->path);
+ $this->value = $this->path;
+
parent::display();
+
$this->value = $actual_value;
}
@@ -84,27 +86,27 @@ class SwatTreeFlydown extends SwatFlydown
/**
* Flattens this flydown's tree into an array of flydown options
*
- * The tree is represented by placing spaces in front of nodes on different
- * levels.
+ * The tree is represented by placing spaces in front of option titles for
+ * different levels. The values of the options are set to an array
+ * representing the tree nodes's paths in the tree.
*
* @param array $options a reference to an array to add the flattened tree
* nodes to.
* @param SwatTreeFlydownNode $node the tree node to flatten.
* @param integer $level the current level of recursion.
- * @param string $path the current path represented as a string of tree
- * node option values separated by forward slashes.
+ * @param array $path the current path represented as an array of tree
+ * node option values.
*/
private function flattenTree(&$options, SwatTreeFlydownNode $node,
- $level = 0, $path = '')
+ $level = 0, $path = array())
{
- $tree_option = clone $node->getFlydownOption();
+ $tree_option = clone $node->getOption();
+
$pad = str_repeat(' ', $level * 3);
- $tree_option->title = $pad.$tree_option->title;
+ $path[] = $tree_option->value;
- if (strlen($path) > 0)
- $path.= '/'.$tree_option->value;
- else
- $path = $tree_option->value;
+ $tree_option->title = $pad.$tree_option->title;
+ $tree_option->value = $path;
$options[] = $tree_option;
@@ -142,7 +144,7 @@ class SwatTreeFlydown extends SwatFlydown
if ($this->value === null) {
$this->path = array();
} else {
- $this->path = explode('/', $this->value);
+ $this->path = $this->value;
$this->value = end($this->path);
}
}
|
- Fix option generation in flatten nodes to use the path
- Don't attempt to flatten the path to a string. SwatFlydown will properly
encode array types so use them everywhere.
- Updated docs.
svn commit r<I>
|
silverorange_swat
|
train
|
099b608dac9f53db15b3bec13433cbaea5926711
|
diff --git a/src/base.js b/src/base.js
index <HASH>..<HASH> 100644
--- a/src/base.js
+++ b/src/base.js
@@ -23,9 +23,9 @@ define(
"type": "any",
"required": false,
"repeating": true
- }],
- "implementation": superConstructor
- })
+ }]
+ }),
+ superConstructor
);
Base.method(
@@ -37,9 +37,9 @@ define(
"type": "arguments|array",
"name": "args",
"required": false
- }],
- "implementation": superConstructorApply
- })
+ }]
+ }),
+ superConstructorApply
);
Base.method(
@@ -55,9 +55,9 @@ define(
"required": false,
"repeating": true
}],
- "implementation": superInvoke,
"returns": "any"
- })
+ }),
+ superInvoke
);
Base.method(
@@ -69,9 +69,9 @@ define(
"name": "args",
"required": false
}],
- "implementation": superApply,
"returns": "any"
- })
+ }),
+ superApply
);
Base.method(
@@ -86,9 +86,9 @@ define(
"required": false,
"repeating": true
}],
- "implementation": invokeFunction,
"returns": "any"
- })
+ }),
+ invokeFunction
);
Base.method(
@@ -103,9 +103,9 @@ define(
"required": false,
"repeating": true
}],
- "implementation": invokeMethod,
"returns": "any"
- })
+ }),
+ invokeMethod
);
Base.method(
@@ -121,9 +121,9 @@ define(
"required": false,
"repeating": true
}],
- "implementation": proxyFunction,
"returns": "function"
- })
+ }),
+ proxyFunction
);
Base.method(
@@ -139,9 +139,9 @@ define(
"required": false,
"repeating": true
}],
- "implementation": proxyMethod,
"returns": "function"
- })
+ }),
+ proxyMethod
);
Base.prototype._super = {};
diff --git a/src/singleton.js b/src/singleton.js
index <HASH>..<HASH> 100644
--- a/src/singleton.js
+++ b/src/singleton.js
@@ -9,24 +9,26 @@ define(
var Singleton = Class(
meta({
"name": "Singleton",
- "extends": Base,
- "init": function () {
- this.invoke(addInstance);
- this.invoke(attachStaticGetInstance);
- }
- })
+ "extends": Base
+ }),
+ constructor
);
- //Singleton.extends(Base);
+ Singleton.method(
+ meta({
+ "entity": "method",
+ "for": "Singleton",
+ "static": true,
+ "name": "getInstance",
+ "description": "Useful for getting the singleton instance from class constructor."
+ }),
+ getInstance
+ );
- Singleton.method(meta({
- "entity": "method",
- "for": "Singleton",
- "static": true,
- "name": "getInstance",
- "description": "Useful for getting the singleton instance from class constructor.",
- "implementation": getInstance
- }));
+ function constructor () {
+ this.invoke(addInstance);
+ this.invoke(attachStaticGetInstance);
+ }
function addInstance () {
var constructor = this.constructor;
|
update method signatures to new method api signature (options, implementation)
|
bob-gray_solv
|
train
|
8a65d4b6ff118febebfe2c2fedf034935d3786bf
|
diff --git a/lib/suby.rb b/lib/suby.rb
index <HASH>..<HASH> 100644
--- a/lib/suby.rb
+++ b/lib/suby.rb
@@ -1,10 +1,10 @@
+gem 'rubyzip2'
+require 'zip'
+
require_relative 'suby/filename_parser'
require_relative 'suby/downloader'
require_relative 'suby/interface'
-gem 'rubyzip2'
-require 'zip'
-
module Suby
NotFoundError = Class.new StandardError
DownloaderError = Class.new StandardError
|
reorder require: external before internal
|
eregon_suby
|
train
|
64f3eb130d83a579680d90fd67652d2b39ffbc4e
|
diff --git a/Cake/Test/TestCase/Database/QueryTest.php b/Cake/Test/TestCase/Database/QueryTest.php
index <HASH>..<HASH> 100644
--- a/Cake/Test/TestCase/Database/QueryTest.php
+++ b/Cake/Test/TestCase/Database/QueryTest.php
@@ -1953,10 +1953,29 @@ class QueryTest extends TestCase {
$this->assertContains('SELECT', $sql);
$this->assertContains('FROM', $sql);
$this->assertContains('WHERE', $sql);
- $this->assertEquals('FOR UPDATE', substr($sql, -10));
+ $this->assertEquals(' FOR UPDATE', substr($sql, -11));
}
/**
+ * Test that append() will actually append a string to a select query
+ *
+ * @return void
+ */
+ public function testAppendInsert() {
+ $query = new Query($this->connection);
+ $sql = $query
+ ->insert('articles', ['id', 'title'])
+ ->values([1, 'a title'])
+ ->append('RETURNING id')
+ ->sql();
+ $this->assertContains('INSERT', $sql);
+ $this->assertContains('INTO', $sql);
+ $this->assertContains('VALUES', $sql);
+ $this->assertEquals(' RETURNING id', substr($sql, -13));
+ }
+
+
+/**
* Assertion for comparing a table's contents with what is in it.
*
* @param string $table
|
Unit testing append when combined with insert
|
cakephp_cakephp
|
train
|
47db62bf8dc380d31f1c106a2cb39b18b7cbf36f
|
diff --git a/ci/ci_build.rb b/ci/ci_build.rb
index <HASH>..<HASH> 100755
--- a/ci/ci_build.rb
+++ b/ci/ci_build.rb
@@ -119,9 +119,8 @@ puts "[CruiseControl] #{`mysql --version`}"
puts "[CruiseControl] #{`pg_config --version`}"
puts "[CruiseControl] SQLite3: #{`sqlite3 -version`}"
`gem env`.each_line {|line| print "[CruiseControl] #{line}"}
-# Commented until bundler supports --list again
-# puts "[CruiseControl] Bundled gems:"
-# `gem bundle --list`.each_line {|line| print "[CruiseControl] #{line}"}
+puts "[CruiseControl] Bundled gems:"
+`bundle show`.each_line {|line| print "[CruiseControl] #{line}"}
puts "[CruiseControl] Local gems:"
`gem list`.each_line {|line| print "[CruiseControl] #{line}"}
|
CI: show bundled gems
|
rails_rails
|
train
|
2fa62cb419ac2c95a6ca88c9304d03bd13080ac6
|
diff --git a/core/src/main/java/com/redhat/lightblue/client/Query.java b/core/src/main/java/com/redhat/lightblue/client/Query.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/redhat/lightblue/client/Query.java
+++ b/core/src/main/java/com/redhat/lightblue/client/Query.java
@@ -186,8 +186,8 @@ public class Query extends Expression
* { field: <field>, regex: <pattern>, caseInsensitive: <caseInsensitive>, ... }
* </pre>
*/
- public static Query withValue(String field, Literal value, boolean caseInsensitive) {
- return regex(field, escape(value.node.asText()), caseInsensitive, false, false, false);
+ public static Query withValue(String field, String value, boolean caseInsensitive) {
+ return regex(field, escapeRegExPattern(value), caseInsensitive, false, false, false);
}
/**
@@ -203,15 +203,6 @@ public class Query extends Expression
/**
* <pre>
- * { field: <field>, regex: <pattern>, caseInsensitive: <caseInsensitive>, ... }
- * </pre>
- */
- public static Query withValue(String field, Object value, boolean caseInsensitive) {
- return withValue(field,Literal.value(value), caseInsensitive);
- }
-
- /**
- * <pre>
* { field: <field>, op: <op>, rvalue: <value> }
* </pre>
*/
@@ -397,19 +388,6 @@ public class Query extends Expression
/**
* <pre>
- * { field: <field>, op: <in/nin>, values: [ values ] }
- * </pre>
- */
- public static Query withValues(String field, boolean caseInsensitive, Literal...values) {
- List<String> sValues = new ArrayList<String>();
- for (Literal value : values){
- sValues.add(escape(value.node.asText()));
- }
- return regex(field, StringUtils.join(sValues, "|"), caseInsensitive, false, false, false);
- }
-
- /**
- * <pre>
* { field: <field>, op: <in/nin>, rfield: <rfield> }
* </pre>
*/
@@ -527,7 +505,7 @@ public class Query extends Expression
private static final String ESCAPECHARS=".^$*+?()[{\\|";
- public static String escape(String s) {
+ public static String escapeRegExPattern(String s) {
StringBuilder bld = new StringBuilder();
int n = s.length();
for (int i = 0; i < n; i++) {
diff --git a/lightblue-client-integration-test/src/test/java/com/redhat/lightblue/client/integration/test/CountryDAOTest.java b/lightblue-client-integration-test/src/test/java/com/redhat/lightblue/client/integration/test/CountryDAOTest.java
index <HASH>..<HASH> 100644
--- a/lightblue-client-integration-test/src/test/java/com/redhat/lightblue/client/integration/test/CountryDAOTest.java
+++ b/lightblue-client-integration-test/src/test/java/com/redhat/lightblue/client/integration/test/CountryDAOTest.java
@@ -131,65 +131,6 @@ public class CountryDAOTest extends AbstractLightblueClientCRUDController {
countries = data.parseProcessed(Country[].class);
assertEquals(0, countries.length);
-
- // ---
-
- request = new DataFindRequest(Country.objectType, Country.objectVersion);
- request.select(Projection.includeField("*"));
- request.where(Query.withValues("iso2Code", true, Literal.values("pl", "PL")));
-
- data = client.data(request);
- countries = data.parseProcessed(Country[].class);
-
- assertEquals(1, countries.length);
-
- // ---
-
- request = new DataFindRequest(Country.objectType, Country.objectVersion);
- request.select(Projection.includeField("*"));
- request.where(Query.withValues("iso2Code", false, Literal.values("pl", "PL")));
-
- data = client.data(request);
- countries = data.parseProcessed(Country[].class);
-
- assertEquals(1, countries.length);
-
- // ---
-
- Country c = new Country();
- c.setName("Russia");
- c.setIso2Code("RS");
- c.setIso3Code("RUS");
-
- DataInsertRequest insertRequest = new DataInsertRequest(Country.objectType, Country.objectVersion);
-
- insertRequest.create(c);
-
- client.data(insertRequest);
-
- request = new DataFindRequest(Country.objectType, Country.objectVersion);
- request.select(Projection.includeField("*"));
- request.where(Query.withValues("iso2Code", false, Literal.values("rs", "PL")));
-
- data = client.data(request);
- countries = data.parseProcessed(Country[].class);
-
- assertEquals(1, countries.length);
- assertEquals("PL", countries[0].getIso2Code());
-
- // ---
-
- request = new DataFindRequest(Country.objectType, Country.objectVersion);
- request.select(Projection.includeField("*"));
- request.where(Query.withValues("iso2Code", true, Literal.values("rs", "PL")));
-
- data = client.data(request);
- countries = data.parseProcessed(Country[].class);
-
- assertEquals(2, countries.length);
- assertEquals("PL", countries[0].getIso2Code());
- assertEquals("RS", countries[1].getIso2Code());
-
}
@Test
|
Cleaned up case insensitive APIs
|
lightblue-platform_lightblue-client
|
train
|
6d30a524c9f016df28a0897cf9e00c0c2cc08cfc
|
diff --git a/push/src/main/java/org/jboss/aerogear/unifiedpush/message/sender/GCMPushNotificationSender.java b/push/src/main/java/org/jboss/aerogear/unifiedpush/message/sender/GCMPushNotificationSender.java
index <HASH>..<HASH> 100644
--- a/push/src/main/java/org/jboss/aerogear/unifiedpush/message/sender/GCMPushNotificationSender.java
+++ b/push/src/main/java/org/jboss/aerogear/unifiedpush/message/sender/GCMPushNotificationSender.java
@@ -95,7 +95,7 @@ public class GCMPushNotificationSender {
cleanupInvalidRegistrationIDsForVariant(androidVariant.getVariantID(), multicastResult, registrationIDs);
} catch (IllegalArgumentException e) {
- logger.warning("Error connection to your GCM project. Double check your Google API Key");
+ logger.log(Level.WARNING, "Error connection to your GCM project. Double check your Google API Key");
} catch (Exception e) {
// general GCM exceptions:
logger.log(Level.SEVERE, "Error sending messages to GCM server", e);
|
small logging improvements as suggested by dbevenius
|
aerogear_aerogear-unifiedpush-server
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.