hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
152fa4755a359a5d047a3b611d0bf95f677cc790
|
diff --git a/code/VersionedDataObjectDetailsForm.php b/code/VersionedDataObjectDetailsForm.php
index <HASH>..<HASH> 100644
--- a/code/VersionedDataObjectDetailsForm.php
+++ b/code/VersionedDataObjectDetailsForm.php
@@ -83,6 +83,8 @@ class VersionedDataObjectDetailsForm_ItemRequest extends GridFieldDetailForm_Ite
}
}
+ $this->extend("updateItemEditForm", $form);
+
VersionedReadingMode::restoreOriginalReadingMode();
return $form;
|
Added ability to manipulate the edit form
Update VersionedDataObjectDetailsForm.php
Adjusting to use updateItemEditForm to be inline with parent classes api
|
heyday_silverstripe-versioneddataobjects
|
train
|
820e2c4e957376c8c1c06d5a7cc1a34b241dc451
|
diff --git a/src/test/java/com/restfb/FacebookClientTest.java b/src/test/java/com/restfb/FacebookClientTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/restfb/FacebookClientTest.java
+++ b/src/test/java/com/restfb/FacebookClientTest.java
@@ -33,8 +33,11 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
+import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
import com.restfb.WebRequestor.Response;
import com.restfb.exception.FacebookJsonMappingException;
@@ -321,23 +324,15 @@ class FacebookClientTest {
});
}
- @Test
- void deleteObjectReturnsJson() {
- FacebookClient facebookClient = facebookClientWithResponse(new Response(200, "{\"success\":true}"));
+ @ParameterizedTest
+ @MethodSource("responseProvider")
+ void deleteObjectReturns(String responseBody) {
+ FacebookClient facebookClient = facebookClientWithResponse(new Response(200, responseBody));
assertThat(facebookClient.deleteObject("12345")).isTrue();
}
- @Test
- void deleteObjectReturnsJsonGreetingMessengerPlatform() {
- FacebookClient facebookClient =
- facebookClientWithResponse(new Response(200, "{\"result\":\"Successfully deleted greeting\"}"));
- assertThat(facebookClient.deleteObject("12345")).isTrue();
- }
-
- @Test
- void deleteObjectReturnsText() {
- FacebookClient facebookClient = facebookClientWithResponse(new Response(200, "true"));
- assertThat(facebookClient.deleteObject("12345")).isTrue();
+ private static Stream<String> responseProvider() {
+ return Stream.of("{\"success\":true}", "{\"result\":\"Successfully deleted greeting\"}", "true");
}
@Test
diff --git a/src/test/java/com/restfb/scope/ScopeBuilderTest.java b/src/test/java/com/restfb/scope/ScopeBuilderTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/restfb/scope/ScopeBuilderTest.java
+++ b/src/test/java/com/restfb/scope/ScopeBuilderTest.java
@@ -30,20 +30,20 @@ class ScopeBuilderTest {
@Test
void noPermission() {
ScopeBuilder s = new ScopeBuilder();
- assertThat(s.toString()).isEqualTo("public_profile");
+ assertThat(s).hasToString("public_profile");
}
@Test
void noPublicProfilePermission() {
ScopeBuilder s = new ScopeBuilder(true);
- assertThat(s.toString()).isEqualTo("");
+ assertThat(s.toString()).isEmpty();
}
@Test
void singlePermission() {
ScopeBuilder s = new ScopeBuilder();
s.addPermission(FacebookPermissions.USER_GENDER);
- assertThat(s.toString()).isEqualTo("public_profile,user_gender");
+ assertThat(s).hasToString("public_profile,user_gender");
}
@Test
@@ -51,7 +51,7 @@ class ScopeBuilderTest {
ScopeBuilder s = new ScopeBuilder();
s.addPermission(FacebookPermissions.USER_GENDER);
s.addPermission(FacebookPermissions.USER_AGE_RANGE);
- assertThat(s.toString()).isEqualTo("public_profile,user_gender,user_age_range");
+ assertThat(s).hasToString("public_profile,user_gender,user_age_range");
}
}
diff --git a/src/test/java/com/restfb/types/UrlPayloadTest.java b/src/test/java/com/restfb/types/UrlPayloadTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/restfb/types/UrlPayloadTest.java
+++ b/src/test/java/com/restfb/types/UrlPayloadTest.java
@@ -33,7 +33,7 @@ class UrlPayloadTest {
void checkToString() {
MediaAttachment attachment = new MediaAttachment(MediaAttachment.Type.IMAGE, "exampleUrl");
String toStringAttachment = "MediaAttachment[payload=UrlPayload[isReusable=null url=exampleUrl] type=image]";
- assertThat(attachment.toString()).isEqualTo(toStringAttachment);
+ assertThat(attachment).hasToString(toStringAttachment);
}
@Test
|
NoIssue: minor cleanups
|
restfb_restfb
|
train
|
bee4cb38c299872cd3a8c7d250721cc2218fd2a9
|
diff --git a/src/main/java/com/buschmais/jqassistant/plugin/rdbms/impl/scanner/SchemaScannerPlugin.java b/src/main/java/com/buschmais/jqassistant/plugin/rdbms/impl/scanner/SchemaScannerPlugin.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/buschmais/jqassistant/plugin/rdbms/impl/scanner/SchemaScannerPlugin.java
+++ b/src/main/java/com/buschmais/jqassistant/plugin/rdbms/impl/scanner/SchemaScannerPlugin.java
@@ -50,9 +50,9 @@ import schemacrawler.utility.SchemaCrawlerUtility;
import com.buschmais.jqassistant.core.scanner.api.Scanner;
import com.buschmais.jqassistant.core.scanner.api.Scope;
import com.buschmais.jqassistant.core.store.api.Store;
+import com.buschmais.jqassistant.plugin.common.api.model.PropertyDescriptor;
import com.buschmais.jqassistant.plugin.common.api.scanner.AbstractScannerPlugin;
import com.buschmais.jqassistant.plugin.common.api.scanner.filesystem.FileResource;
-import com.buschmais.jqassistant.plugin.java.api.model.PropertyDescriptor;
import com.buschmais.jqassistant.plugin.java.api.model.PropertyFileDescriptor;
import com.buschmais.jqassistant.plugin.java.impl.scanner.PropertyFileScannerPlugin;
import com.buschmais.jqassistant.plugin.rdbms.api.model.BaseColumnDescriptor;
|
Added profile activation. Added asciidoc. Completed PomIT. #<I>
|
buschmais_jqa-rdbms-plugin
|
train
|
a6b7deb4a6467a8179d68408bc16a490c0d2fde7
|
diff --git a/internetarchive/cli/ia_upload.py b/internetarchive/cli/ia_upload.py
index <HASH>..<HASH> 100644
--- a/internetarchive/cli/ia_upload.py
+++ b/internetarchive/cli/ia_upload.py
@@ -153,6 +153,8 @@ def main(argv, session):
# Make sure the collection being uploaded to exists.
collection_id = args['--metadata'].get('collection')
if collection_id and not args['--no-collection-check'] and not args['--status-check']:
+ if isinstance(collection_id, list):
+ collection_id = collection_id[0]
collection = session.get_item(collection_id)
if not collection.exists:
sys.stderr.write(
|
Fixed bug in ``ia upload`` where all commands would fail if multiple
collections were specified.
|
jjjake_internetarchive
|
train
|
f75856d400862084d0ef1e47d8799adf8c048b65
|
diff --git a/inspire_matcher/core.py b/inspire_matcher/core.py
index <HASH>..<HASH> 100644
--- a/inspire_matcher/core.py
+++ b/inspire_matcher/core.py
@@ -37,6 +37,9 @@ def compile(query, record, collections=None, match_deleted=False):
def _compile_filters(query, collections, match_deleted):
+ if not query:
+ return None
+
if match_deleted and not collections:
return query
diff --git a/tests/test_core.py b/tests/test_core.py
index <HASH>..<HASH> 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -518,3 +518,14 @@ def test_compile_with_collections():
}
assert expected == result
+
+
+def test_compile_returns_none_if_empty_inner():
+ query = {
+ 'type': 'exact',
+ 'path': 'dummy.path',
+ 'search_path': 'dummy.search.path',
+ }
+ record = {}
+
+ assert compile(query, record) is None
|
core: correctly handle empty queries
Previously, if the inner query was empty and filters needed to be added,
an exception was triggered. This commit fixes it by checking whether the
inner query is non-empty before adding the filters.
|
inspirehep_inspire-matcher
|
train
|
fa0d6f8126dae4e3858681b28856054de2ad3478
|
diff --git a/src/main/java/org/whitesource/agent/dependency/resolver/ViaMultiModuleAnalyzer.java b/src/main/java/org/whitesource/agent/dependency/resolver/ViaMultiModuleAnalyzer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/whitesource/agent/dependency/resolver/ViaMultiModuleAnalyzer.java
+++ b/src/main/java/org/whitesource/agent/dependency/resolver/ViaMultiModuleAnalyzer.java
@@ -19,6 +19,8 @@ public class ViaMultiModuleAnalyzer {
private static final String APP_PATH = "AppPath";
private static final String DEPENDENCY_MANAGER_PATH = "DependencyManagerFilePath";
private static final String PROJECT_FOLDER_PATH = "ProjectFolderPath";
+ private static final String DEFAULT_NAME = "defaultName";
+ private static final String ALT_NAME = "altName";
/* --- Members --- */
@@ -84,15 +86,15 @@ public class ViaMultiModuleAnalyzer {
}
bufferedWriter.write(replaceAllSlashes(appPathProperty));
bufferedWriter.write(System.lineSeparator());
- bufferedWriter.write(replaceAllSlashes("defaultName" + Constants.EQUALS + parentFileName));
+ bufferedWriter.write(replaceAllSlashes(DEFAULT_NAME + counter + Constants.EQUALS + parentFileName));
bufferedWriter.write(System.lineSeparator());
if (folderNameCounter.get(parentFileName) == null){
folderNameCounter.put(parentFileName,0);
- bufferedWriter.write(replaceAllSlashes("altName" + Constants.EQUALS + parentFileName));
+ bufferedWriter.write(replaceAllSlashes(ALT_NAME + counter + Constants.EQUALS + parentFileName));
} else {
int i = folderNameCounter.get(parentFileName) + 1;
folderNameCounter.put(parentFileName,i);
- bufferedWriter.write(replaceAllSlashes("altName" + Constants.EQUALS + parentFileName + i));
+ bufferedWriter.write(replaceAllSlashes(ALT_NAME + counter + Constants.EQUALS + parentFileName + i));
}
bufferedWriter.write(System.lineSeparator());
counter++;
|
WSE-<I> - improved output of FSA
|
whitesource_fs-agent
|
train
|
920abf958e2098a3c3195feaf0ba1c1615b07444
|
diff --git a/lib/neovim/event_loop.rb b/lib/neovim/event_loop.rb
index <HASH>..<HASH> 100644
--- a/lib/neovim/event_loop.rb
+++ b/lib/neovim/event_loop.rb
@@ -34,7 +34,7 @@ module Neovim
@connection = connection
@serializer = Serializer.new
@message_builder = MessageBuilder.new
- @message_writers = []
+ @write_queue = []
end
def stop
@@ -47,15 +47,15 @@ module Neovim
end
def request(method, *args, &response_handler)
- enqueue_rpc_writer(:request, method, args, response_handler)
+ @write_queue.push([:request, method, args, response_handler])
end
def respond(request_id, return_value, error)
- enqueue_rpc_writer(:response, request_id, return_value, error)
+ @write_queue.push([:response, request_id, return_value, error])
end
def notify(method, *args)
- enqueue_rpc_writer(:notification, method, args)
+ @write_queue.push([:notification, method, args])
end
def run(&callback)
@@ -65,8 +65,12 @@ module Neovim
break if !@running
break if @shutdown
- while writer = @message_writers.shift
- writer.call
+ while write_args = @write_queue.shift
+ @message_builder.write(*write_args) do |arr|
+ @serializer.write(arr) do |bytes|
+ @connection.write(bytes)
+ end
+ end
end
@connection.read do |bytes|
@@ -96,19 +100,5 @@ module Neovim
end
end
end
-
- private
-
- def enqueue_rpc_writer(type, *args)
- @message_writers << Proc.new do
- debug("writing rpc #{type} #{args}")
-
- @message_builder.write(type, *args) do |arr|
- @serializer.write(arr) do |bytes|
- @connection.write(bytes)
- end
- end
- end
- end
end
end
|
Inline async writing, don't use procs
|
neovim_neovim-ruby
|
train
|
597917e35e5c7458e7d521e97ff2ca27bef13b0b
|
diff --git a/aiohttp/client.py b/aiohttp/client.py
index <HASH>..<HASH> 100644
--- a/aiohttp/client.py
+++ b/aiohttp/client.py
@@ -399,10 +399,7 @@ class ClientSession:
if timeout is sentinel:
real_timeout = self._timeout # type: ClientTimeout
else:
- if not isinstance(timeout, ClientTimeout):
- real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
- else:
- real_timeout = timeout
+ real_timeout = timeout # type: ignore[assignment]
# timeout is cumulative for all request operations
# (request, redirects, responses, data consuming)
tm = TimeoutHandle(self._loop, real_timeout.total)
|
Drop dead code, float timeout support was removed (#<I>)
|
aio-libs_aiohttp
|
train
|
f26c68f4302d21cffab999c9346733382340c927
|
diff --git a/django_payzen/tools.py b/django_payzen/tools.py
index <HASH>..<HASH> 100644
--- a/django_payzen/tools.py
+++ b/django_payzen/tools.py
@@ -81,6 +81,6 @@ def is_signature_valid(post_args):
vads_args = [arg for arg in post_args if arg.startswith("vads_")]
signature_str = ""
for key in sorted(vads_args):
- signature_str += post_args[key][0] + "+"
+ signature_str += post_args[key] + "+"
signature_str += app_settings.VADS_CERTIFICATE
- return hashlib.sha1(signature_str).hexdigest() == post_args["signature"][0]
+ return hashlib.sha1(signature_str).hexdigest() == post_args["signature"]
diff --git a/django_payzen/views.py b/django_payzen/views.py
index <HASH>..<HASH> 100644
--- a/django_payzen/views.py
+++ b/django_payzen/views.py
@@ -23,14 +23,7 @@ class ResponseView(generic.View):
"Django-Payzen : Response signature detected as invalid")
return http.HttpResponse()
# Payzen data is checked and valid
- filtered_data = {}
- for key, value in request.POST.items():
- if value:
- if (isinstance(value, list) and len(value)):
- filtered_data.update({key: value[0]})
- else:
- filtered_data.update({key: value})
- form = forms.PaymentResponseForm(filtered_data)
+ form = forms.PaymentResponseForm(request.POST)
if form.is_valid():
response = form.save()
logger.info("Django-Payzen : Transaction {} response received !"
|
Bugfix in checking integrity data in payzen response.
|
bsvetchine_django-payzen
|
train
|
79774f2181e5bc5c287c238b34ede0b732abf91a
|
diff --git a/admin/views/accounts/edit/inc-meta.php b/admin/views/accounts/edit/inc-meta.php
index <HASH>..<HASH> 100644
--- a/admin/views/accounts/edit/inc-meta.php
+++ b/admin/views/accounts/edit/inc-meta.php
@@ -92,7 +92,7 @@
$field['error'] = implode(' ', ${'upload_error_' . $field['key']});
}
- echo form_field_mm($field);
+ echo form_field_cdn_object_picker($field);
break;
case 'string':
|
Using `form_field_cdn_object_picker()` instead of `form_field_mm()`
|
nails_module-auth
|
train
|
81e1c170189103ef33d276e6c99e92ada360ed62
|
diff --git a/squad/http.py b/squad/http.py
index <HASH>..<HASH> 100644
--- a/squad/http.py
+++ b/squad/http.py
@@ -26,16 +26,23 @@ def auth(func, mode=AuthMode.READ):
def auth_wrapper(*args, **kwargs):
request = args[0]
group_slug = args[1]
- project_slug = args[2]
-
group = get_object_or_404(models.Group, slug=group_slug)
- project = get_object_or_404(group.projects, slug=project_slug)
-
request.group = group
+
+ user = request.user
+
+ if len(args) < 3:
+ # no project, authenticate against group only
+ if mode == AuthMode.READ or group.writable_by(user):
+ return func(*args, **kwargs)
+ else:
+ raise PermissionDenied()
+
+ project_slug = args[2]
+ project = get_object_or_404(group.projects, slug=project_slug)
request.project = project
tokenkey = request.META.get('HTTP_AUTH_TOKEN', None)
- user = request.user
token = None
if tokenkey:
try:
|
squad.http: short circuit group-only authentication
This makes it possible to require authentication for locations where
there is no project, e.g. a page for a specific group, but not for any
of its projects.
|
Linaro_squad
|
train
|
1129226ba8b1a02bc5c2a11e48a97db151f0d0ff
|
diff --git a/src/security.js b/src/security.js
index <HASH>..<HASH> 100644
--- a/src/security.js
+++ b/src/security.js
@@ -22,7 +22,6 @@ module.exports = (skin) => {
if (!secret || secret.length < 15) {
secret = createNewSecret()
- skin.logger.warn('current secret wasn\'t safe enough, created a new one')
}
const adminPassword = process.env.SKIN_ADMIN_PASSWORD ||
|
removed warning when secret doesn't exist
|
botpress_botpress
|
train
|
4cccae1d2c3f1e3c3b191c827e8049f5a036a293
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,7 @@
* [#7439](https://github.com/rubocop-hq/rubocop/issues/7439): Make `Style/FormatStringToken` ignore percent escapes (`%%`). ([@buehmann][])
* [#7438](https://github.com/rubocop-hq/rubocop/issues/7438): Fix assignment edge-cases in `Layout/MultilineAssignmentLayout`. ([@gsamokovarov][])
+* [#7449](https://github.com/rubocop-hq/rubocop/pull/7449): Make `Style/IfUnlessModifier` respect `rubocop:disable` comments for `Metrics/LineLength`. ([@jonas054][])
## 0.75.1 (2019-10-14)
diff --git a/lib/rubocop/cop/style/if_unless_modifier.rb b/lib/rubocop/cop/style/if_unless_modifier.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/cop/style/if_unless_modifier.rb
+++ b/lib/rubocop/cop/style/if_unless_modifier.rb
@@ -70,8 +70,15 @@ module RuboCop
return false unless max_line_length
range = node.source_range
- range.first_line == range.last_line &&
- range.last_column > max_line_length
+ return false unless range.first_line == range.last_line
+ return false unless line_length_enabled_at_line?(range.first_line)
+
+ range.last_column > max_line_length
+ end
+
+ def line_length_enabled_at_line?(line)
+ processed_source.comment_config
+ .cop_enabled_at_line?('Metrics/LineLength', line)
end
def named_capture_in_condition?(node)
diff --git a/spec/rubocop/cop/style/if_unless_modifier_spec.rb b/spec/rubocop/cop/style/if_unless_modifier_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rubocop/cop/style/if_unless_modifier_spec.rb
+++ b/spec/rubocop/cop/style/if_unless_modifier_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe RuboCop::Cop::Style::IfUnlessModifier do
end
end
- context 'when Metrics/LineLength is disabled' do
+ context 'when Metrics/LineLength is disabled in configuration' do
let(:line_length_config) { { 'Enabled' => false, 'Max' => 80 } }
it 'accepts' do
@@ -47,6 +47,18 @@ RSpec.describe RuboCop::Cop::Style::IfUnlessModifier do
RUBY
end
end
+
+ context 'when Metrics/LineLength is disabled with a comment' do
+ it 'accepts' do
+ expect_no_offenses(<<~RUBY)
+ def f
+ # rubocop:disable Metrics/LineLength
+ #{source}
+ # rubocop:enable Metrics/LineLength
+ end
+ RUBY
+ end
+ end
end
context 'multiline if that fits on one line' do
|
Make IfUnlessModifier respect rubocop:disable comments
When Style/IfUnlessModifier decides that a line on modifier form is too long
and that it should be written on normal form, it must respect comments that
disable the Metrics/LineLength cop for the line in question. If that cop is
disabled, then there can be no offense.
|
rubocop-hq_rubocop
|
train
|
843f5ccd3afa326f47d6edfa409580a1b9df60da
|
diff --git a/includes/functions/functions_edit.php b/includes/functions/functions_edit.php
index <HASH>..<HASH> 100644
--- a/includes/functions/functions_edit.php
+++ b/includes/functions/functions_edit.php
@@ -94,7 +94,7 @@ function select_edit_control_inline($name, $values, $empty, $selected, $extra=''
(array_key_exists($selected, $values) ? htmlspecialchars($values[$selected]) : '').
'</span>' .
WT_JS_START .
- 'jQuery("#' . $name . '").editable("' . WT_SERVER_NAME . WT_SCRIPT_PATH . 'save.php", {type:"select", data:' . json_encode($values) . ', submit:"' . i18n::translate('OK') . '", style:"inherit", callback:function(value, settings) {$(this).innerHTML=settings.data[value];} })' .
+ 'jQuery("#' . $name . '").editable("' . WT_SERVER_NAME . WT_SCRIPT_PATH . 'save.php", {type:"select", data:' . json_encode($values) . ', submit:"' . i18n::translate('OK') . '", style:"inherit", callback:function(value, settings) {jQuery(this).innerHTML=settings.data[value];} })' .
WT_JS_END;
}
|
Fix: function $() clashes in different libraries.
|
fisharebest_webtrees
|
train
|
839f872faa4b422f853d9af259524c4537c1cded
|
diff --git a/test/katex-spec.js b/test/katex-spec.js
index <HASH>..<HASH> 100644
--- a/test/katex-spec.js
+++ b/test/katex-spec.js
@@ -2415,7 +2415,7 @@ describe("An aligned environment", function() {
});
it("should not eat the last row when its first cell is empty", function() {
- const ae = getParsed("\\begin{aligned}&E_1 & (1)\\\\&E_2 & (2)\\\\E_3 & (3)\\end{aligned}")[0];
+ const ae = getParsed("\\begin{aligned}&E_1 & (1)\\\\&E_2 & (2)\\\\&E_3 & (3)\\end{aligned}")[0];
expect(ae.value.body.length).toBe(3);
});
});
|
Fix linefeed test (#<I>)
I must have been asleep when I wrote the tests for PR#<I>.
|
KaTeX_KaTeX
|
train
|
7d252fb215cf7afb9153ab5109d4a565ee4a7c50
|
diff --git a/rectangle/rectangle.py b/rectangle/rectangle.py
index <HASH>..<HASH> 100644
--- a/rectangle/rectangle.py
+++ b/rectangle/rectangle.py
@@ -3,6 +3,9 @@ import itertools as it
import numpy as np
+__all__ = ['Rect']
+
+
class Rect:
"""
|
Added missing "all" line.
|
NeilGirdhar_rectangle
|
train
|
ebbdcb3c7ee9bc1ab7a2bff93eb89609c3ecfcf9
|
diff --git a/src/pyokit/datastruct/genomicInterval.py b/src/pyokit/datastruct/genomicInterval.py
index <HASH>..<HASH> 100755
--- a/src/pyokit/datastruct/genomicInterval.py
+++ b/src/pyokit/datastruct/genomicInterval.py
@@ -284,7 +284,8 @@ def bucketIterator(elements, buckets):
((previous.chrom > current.chrom) or
((previous.chrom == current.chrom) and
(previous.start > current.start))):
- raise GenomicIntervalError("not sorted")
+ raise GenomicIntervalError("elements not sorted. Saw " +
+ str(previous) + " before " + str(current))
def updateOpen(openHeap, elementIterator, bucketChrom,
bucketStart, bucketEnd):
@@ -346,7 +347,8 @@ def bucketIterator(elements, buckets):
if prevBucket is not None and ((bucket.chrom < prevBucket.chrom) or
(bucket.chrom == prevBucket.chrom and
bucket.start < prevBucket.start)):
- raise GenomicIntervalError("not sorted")
+ raise GenomicIntervalError("regions-of-interest are not sorted. Saw " +
+ str(prevBucket) + " before " + str(bucket))
updateOpen(openElems, elementIter, bucket.chrom, bucket. start, bucket.end)
# be careful here not to leak a reference to the heap; if the caller
|
improved exception messages in bucket iterator
|
pjuren_pyokit
|
train
|
1f00968385f7f1a51f3ccb1b7dd631d58f298522
|
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -37,7 +37,7 @@
"homepage": "https://github.com/ikr/translator-couch",
"dependencies": {
"blueimp-md5": "~1.1.0",
- "messageformat": "git+https://github.com/SlexAxton/messageformat.js.git#56c548efb12fd8420ec13300bdcb1ebdcaba86eb",
+ "messageformat": "~0.1.8",
"couch-js-devkit": "~1.0.0",
"cradle": "~0.6.6",
"async": "~0.2.10",
diff --git a/src/compileJs.js b/src/compileJs.js
index <HASH>..<HASH> 100644
--- a/src/compileJs.js
+++ b/src/compileJs.js
@@ -3,7 +3,7 @@
(function () {
'use strict';
- module.exports = function (getRow, MessageFormat, language, pluralFunc) {
+ module.exports = function (getRow, MessageFormat, language, pluralFunc, messageformatIncludeJs) {
var js = '',
mf = new MessageFormat(language, pluralFunc),
row,
@@ -54,6 +54,7 @@
'(function (g) {',
'var MessageFormat = {locale: {}};',
'MessageFormat.locale.' + language + ' = ' + pluralFunc.toString() + ';',
+ messageformatIncludeJs,
'g.i18n = {};',
js,
'})(window);'
diff --git a/src/schema.js b/src/schema.js
index <HASH>..<HASH> 100644
--- a/src/schema.js
+++ b/src/schema.js
@@ -17,6 +17,11 @@
'utf8'
),
+ messageformatIncludeJs = fs.readFileSync(
+ __dirname + '/../node_modules/messageformat/lib/messageformat.include.js',
+ 'utf8'
+ ),
+
langToPluralFuncMap = fs.readdirSync(
__dirname + '/../node_modules/messageformat/locale'
).map(function (langFileName) {
@@ -49,6 +54,7 @@
locale: 'module.exports = ' + JSON.stringify(locale) + ';',
language: 'module.exports = ' + JSON.stringify(language(locale)) + ';',
messageformat: messageformatJs,
+ messageformatIncludeJs: 'module.exports = ' + JSON.stringify(messageformatIncludeJs) + ';',
pluralFunc: langToPluralFuncMap[language(locale)],
compileJs: devkit.couchModuleText(compileJs),
compilePo: devkit.couchModuleText(compilePo)
@@ -104,9 +110,16 @@
var MessageFormat = require('lib/messageformat'),
language = require('lib/language'),
pluralFunc = require('lib/pluralFunc'),
+ messageformatIncludeJs = require('lib/messageformatIncludeJs'),
compileJs = require('lib/compileJs');
- return compileJs(getRow, MessageFormat, language, pluralFunc);
+ return compileJs(
+ getRow,
+ MessageFormat,
+ language,
+ pluralFunc,
+ messageformatIncludeJs
+ );
});
},
diff --git a/tests/compileJs.test.js b/tests/compileJs.test.js
index <HASH>..<HASH> 100644
--- a/tests/compileJs.test.js
+++ b/tests/compileJs.test.js
@@ -1,7 +1,7 @@
/* jshint nomen:false */
/* jshint evil:true */
-describe('compileJs()', function () {
+describe('compileJs', function () {
'use strict';
var fs = require('fs'),
@@ -21,6 +21,11 @@ describe('compileJs()', function () {
return MessageFormat.locale[language];
},
+ messageformatIncludeJs = fs.readFileSync(
+ __dirname + '/../node_modules/messageformat/lib/messageformat.include.js',
+ 'utf8'
+ ),
+
compileJs = require('../src/compileJs'),
stubGetRow = function () {
@@ -55,7 +60,15 @@ describe('compileJs()', function () {
beforeEach(function () {
var window = {};
- eval(compileJs(stubGetRow(), MessageFormat, 'de', pluralFunc('de')));
+
+ eval(compileJs(
+ stubGetRow(),
+ MessageFormat,
+ 'de',
+ pluralFunc('de'),
+ messageformatIncludeJs
+ ));
+
delete MessageFormat.locale.de;
i18n = window.i18n;
});
|
Upgrade and adapt to the new MessageFormat version
|
ikr_translator-couch
|
train
|
c10f574d7e426fb821e0d3b9886dc35b75d33c45
|
diff --git a/uaa/src/test/java/org/cloudfoundry/identity/uaa/mock/token/JwtBearerGrantMockMvcTests.java b/uaa/src/test/java/org/cloudfoundry/identity/uaa/mock/token/JwtBearerGrantMockMvcTests.java
index <HASH>..<HASH> 100644
--- a/uaa/src/test/java/org/cloudfoundry/identity/uaa/mock/token/JwtBearerGrantMockMvcTests.java
+++ b/uaa/src/test/java/org/cloudfoundry/identity/uaa/mock/token/JwtBearerGrantMockMvcTests.java
@@ -118,21 +118,21 @@ public class JwtBearerGrantMockMvcTests extends AbstractTokenMockMvcTests {
Map<String, Object> originUserClaims = JwtTokenUtils.getClaimsForToken(accessTokenForOriginZoneUser);
//Verify values for new shadow user set
- ScimUser originShadowUser = getScimUser(originUser.getEmails().get(0).getValue(), originZoneOriginKey, targetZone.getId());
- assertEquals(originShadowUser.getUserName(), originUserClaims.get("user_name"));
- assertEquals(originShadowUser.getExternalId(), originUser.getId());
+ ScimUser shadowUser = getScimUser(originUser.getEmails().get(0).getValue(), originZoneOriginKey, targetZone.getId());
+ assertEquals(shadowUser.getUserName(), originUserClaims.get("user_name"));
+ assertEquals(shadowUser.getExternalId(), originUser.getId());
//JWT Bearer with token from target Zone and external User
performJWTBearerGrantForJWT(targetZone, accessTokenForOriginZoneUser);
//Verify username and External ID not changed after this internal grant
- ScimUser originShadowUserAfterExchange = getScimUser(originUser.getEmails().get(0).getValue(), originZoneOriginKey, targetZone.getId());
- assertEquals(originShadowUser.getUserName(), originShadowUserAfterExchange.getUserName());
- assertEquals(originShadowUser.getExternalId(), originShadowUserAfterExchange.getExternalId());
+ ScimUser shadowUserAfterExchange = getScimUser(originUser.getEmails().get(0).getValue(), originZoneOriginKey, targetZone.getId());
+ assertEquals(shadowUser.getUserName(), shadowUserAfterExchange.getUserName());
+ assertEquals(shadowUser.getExternalId(), shadowUserAfterExchange.getExternalId());
}
@Test
- void default_zone_jwt_grant_user_update_same_zone_with_registration() throws Exception {
+ void non_default_zone_jwt_grant_user_update_same_zone_with_registration() throws Exception {
BaseClientDetails targetZoneClient = new BaseClientDetails(generator.generate(), "", "openid", "password",
null);
targetZoneClient.setClientSecret(SECRET);
|
Rename variable and test case for clarity
|
cloudfoundry_uaa
|
train
|
859c834e111b28f0fd68686c826606bdb43e778c
|
diff --git a/python2/pyinotify.py b/python2/pyinotify.py
index <HASH>..<HASH> 100755
--- a/python2/pyinotify.py
+++ b/python2/pyinotify.py
@@ -1632,6 +1632,10 @@ class AsyncioNotifier(Notifier):
threshold, timeout)
loop.add_reader(self._fd, self.handle_read)
+ def stop(self):
+ self.loop.remove_reader(self._fd)
+ Notifier.stop(self)
+
def handle_read(self, *args, **kwargs):
self.read_events()
self.process_events()
diff --git a/python3/pyinotify.py b/python3/pyinotify.py
index <HASH>..<HASH> 100755
--- a/python3/pyinotify.py
+++ b/python3/pyinotify.py
@@ -1622,6 +1622,10 @@ class AsyncioNotifier(Notifier):
threshold, timeout)
loop.add_reader(self._fd, self.handle_read)
+ def stop(self):
+ self.loop.remove_reader(self._fd)
+ Notifier.stop(self)
+
def handle_read(self, *args, **kwargs):
self.read_events()
self.process_events()
|
Implement stop method in AsyncioNotifier
|
seb-m_pyinotify
|
train
|
29791eb4258b74027b9eec500c3393ad12755d4b
|
diff --git a/src/main/java/org/apache/groovy/util/concurrentlinkedhashmap/ConcurrentLinkedHashMap.java b/src/main/java/org/apache/groovy/util/concurrentlinkedhashmap/ConcurrentLinkedHashMap.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/apache/groovy/util/concurrentlinkedhashmap/ConcurrentLinkedHashMap.java
+++ b/src/main/java/org/apache/groovy/util/concurrentlinkedhashmap/ConcurrentLinkedHashMap.java
@@ -779,8 +779,8 @@ public final class ConcurrentLinkedHashMap<K, V> extends AbstractMap<K, V>
Node<K, V> node = objectHolder.getObject();
if (null == node) {
- V value = null;
- final int weight = 1; // weigher.weightOf(key, value);
+ V value = prior.getValue();
+ final int weight = weigher.weightOf(key, value);
final WeightedValue<V> weightedValue = new WeightedValue<V>(value, weight);
node = new Node<K, V>(key, weightedValue);
} else {
|
Refine `ConcurrentLinkedHashMap` to create node accurately
|
apache_groovy
|
train
|
ea08f38cf280ab1ca9d37ee770d6f6101411173e
|
diff --git a/Controller/TemplateController.php b/Controller/TemplateController.php
index <HASH>..<HASH> 100755
--- a/Controller/TemplateController.php
+++ b/Controller/TemplateController.php
@@ -19,6 +19,7 @@ namespace CampaignChain\Campaign\TemplateBundle\Controller;
use CampaignChain\Campaign\TemplateBundle\Validator\TemplateValidator;
use CampaignChain\CoreBundle\EntityService\CampaignService;
+use CampaignChain\CoreBundle\EntityService\HookService;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use CampaignChain\CoreBundle\Entity\Campaign;
use Symfony\Component\HttpFoundation\JsonResponse;
@@ -84,9 +85,11 @@ class TemplateController extends Controller
// We need the campaign ID for storing the hooks. Hence we must flush here.
$em->flush();
+ /** @var HookService $hookService */
$hookService = $this->get('campaignchain.core.hook');
- $campaign = $hookService->processHooks(static::BUNDLE_NAME, static::MODULE_IDENTIFIER, $campaign, $form,
+ $hookService->processHooks(static::BUNDLE_NAME, static::MODULE_IDENTIFIER, $campaign, $form,
true);
+ $campaign = $hookService->getEntity();
$hookService = $this->get('campaignchain.core.hook');
$campaign->setTriggerHook(
@@ -159,8 +162,10 @@ class TemplateController extends Controller
'id' => $campaign->getId(),
));
} else {
+ /** @var HookService $hookService */
$hookService = $this->get('campaignchain.core.hook');
- $campaign = $hookService->processHooks(static::BUNDLE_NAME, static::MODULE_IDENTIFIER, $campaign, $form);
+ $hookService->processHooks(static::BUNDLE_NAME, static::MODULE_IDENTIFIER, $campaign, $form);
+ $campaign = $hookService->getEntity();
$em->persist($campaign);
$em->flush();
@@ -236,9 +241,10 @@ class TemplateController extends Controller
$em->persist($campaign);
+ /** @var HookService $hookService */
$hookService = $this->get('campaignchain.core.hook');
$hookService->processHooks(static::BUNDLE_NAME, static::MODULE_IDENTIFIER, $campaign, $data);
-
+ $campaign = $hookService->getEntity();
$em->flush();
$responseData['start_date'] = $campaign->getStartDate()->format(\DateTime::ISO8601);
|
CampaignChain/campaignchain#<I> Don't allow to change campaign start or end date beyond first action
|
CampaignChain_campaign-template
|
train
|
62a59ffe6d397fcfc65aa0f1f7d50b530ff88098
|
diff --git a/lib/transforms/bundleRelations.js b/lib/transforms/bundleRelations.js
index <HASH>..<HASH> 100644
--- a/lib/transforms/bundleRelations.js
+++ b/lib/transforms/bundleRelations.js
@@ -6,7 +6,7 @@ var _ = require('underscore'),
module.exports = function (queryObj, options) {
options = options || {};
- var bundleStrategyName = options.bundleStrategyName || 'oneBundlePerIncludingAsset';
+ var bundleStrategyName = options.strategyName || 'oneBundlePerIncludingAsset';
return function bundleRelations(assetGraph) {
function getDiscriminatorForRelation(relation) {
@@ -20,7 +20,7 @@ module.exports = function (queryObj, options) {
return discriminatorFragments.join(':');
}
- // Internal helper function. Reuses the parse trees of existing assets, so be careful!
+ // Reuses the parse trees of existing assets, so be careful!
function makeBundle(assetsToBundle) {
if (assetsToBundle.length === 0) {
throw new Error('makeBundle: Bundle must contain at least one asset');
|
bundleRelations: Fixed sharedBundles strategy.
|
assetgraph_assetgraph
|
train
|
3f5750faa0884145b4c99842d24cc05fe9bf530b
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,7 @@ Changelog
=========
## 0.2.7 (2013-xx-xx)
+* Fix: Polish oauth error detection to cover cases from i.e. Facebook resource owner
* Fix: Changed authorization url for Vkontakte resource owner
## 0.2.6 (2013-06-24)
diff --git a/Security/Http/Firewall/OAuthListener.php b/Security/Http/Firewall/OAuthListener.php
index <HASH>..<HASH> 100644
--- a/Security/Http/Firewall/OAuthListener.php
+++ b/Security/Http/Firewall/OAuthListener.php
@@ -96,6 +96,9 @@ class OAuthListener extends AbstractAuthenticationListener
}
/**
+ * Detects errors returned by resource owners and transform them into
+ * human readable messages
+ *
* @param Request $request
*
* @throws AuthenticationException
@@ -105,7 +108,11 @@ class OAuthListener extends AbstractAuthenticationListener
$error = null;
// Try to parse content if error was not in request query
- if ($request->query->has('error')) {
+ if ($request->query->has('error') || $request->query->has('error_code')) {
+ if ($request->query->has('error_message')) {
+ throw new AuthenticationException(rawurldecode($request->query->get('error_message')));
+ }
+
$content = json_decode($request->getContent(), true);
if (JSON_ERROR_NONE === json_last_error() && isset($content['error'])) {
if (isset($content['error']['message'])) {
@@ -141,45 +148,33 @@ class OAuthListener extends AbstractAuthenticationListener
// "translate" error to human readable format
switch ($errorCode) {
case 'access_denied':
- $error = 'You have refused access for this site.';
- break;
+ return 'You have refused access for this site.';
case 'authorization_expired':
- $error = 'Authorization expired.';
- break;
+ return 'Authorization expired.';
case 'bad_verification_code':
- $error = 'Bad verification code.';
- break;
+ return 'Bad verification code.';
case 'consumer_key_rejected':
- $error = 'You have refused access for this site.';
- break;
+ return 'You have refused access for this site.';
case 'incorrect_client_credentials':
- $error = 'Incorrect client credentials.';
- break;
+ return 'Incorrect client credentials.';
case 'invalid_assertion':
- $error = 'Invalid assertion.';
- break;
+ return 'Invalid assertion.';
case 'redirect_uri_mismatch':
- $error = 'Redirect URI mismatches configured one.';
- break;
+ return 'Redirect URI mismatches configured one.';
case 'unauthorized_client':
- $error = 'Unauthorized client.';
- break;
+ return 'Unauthorized client.';
case 'unknown_format':
- $error = 'Unknown format.';
- break;
-
- default:
- $error = sprintf('Unknown OAuth error: "%s".', $errorCode);
+ return 'Unknown format.';
}
- return $error;
+ return sprintf('Unknown OAuth error: "%s".', $errorCode);
}
}
diff --git a/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php b/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php
index <HASH>..<HASH> 100644
--- a/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php
+++ b/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php
@@ -40,6 +40,21 @@ json;
$this->resourceOwner->getAccessToken($request, 'http://redirect.to/');
}
+ /**
+ * @expectedException \Symfony\Component\Security\Core\Exception\AuthenticationException
+ */
+ public function testGetAccessTokenErrorResponse()
+ {
+ $this->mockBuzz();
+
+ $request = new Request(array(
+ 'error_code' => 901,
+ 'error_message' => 'This app is in sandbox mode. Edit the app configuration at http://developers.facebook.com/apps to make the app publicly visible.'
+ ));
+
+ $this->resourceOwner->getAccessToken($request, 'http://redirect.to/');
+ }
+
protected function setUpResourceOwner($name, $httpUtils, array $options)
{
$options = array_merge(
diff --git a/Tests/OAuth/ResourceOwner/GenericOAuth2ResourceOwnerTest.php b/Tests/OAuth/ResourceOwner/GenericOAuth2ResourceOwnerTest.php
index <HASH>..<HASH> 100644
--- a/Tests/OAuth/ResourceOwner/GenericOAuth2ResourceOwnerTest.php
+++ b/Tests/OAuth/ResourceOwner/GenericOAuth2ResourceOwnerTest.php
@@ -11,6 +11,8 @@
namespace HWI\Bundle\OAuthBundle\Tests\OAuth\ResourceOwner;
+use Buzz\Message\MessageInterface;
+use Buzz\Message\RequestInterface;
use HWI\Bundle\OAuthBundle\OAuth\ResourceOwner\GenericOAuth2ResourceOwner;
use Symfony\Component\HttpFoundation\Request;
@@ -175,6 +177,10 @@ json;
$this->assertEquals('access_token', $userResponse->getOAuthToken());
}
+ /**
+ * @param RequestInterface $request
+ * @param MessageInterface $response
+ */
public function buzzSendMock($request, $response)
{
$response->setContent($this->buzzResponse);
|
Add better detection of oauth errors returned from resource owners
|
hwi_HWIOAuthBundle
|
train
|
56653cbfd5455eacf85d2e40c978065754679473
|
diff --git a/salt/fileserver/gitfs.py b/salt/fileserver/gitfs.py
index <HASH>..<HASH> 100644
--- a/salt/fileserver/gitfs.py
+++ b/salt/fileserver/gitfs.py
@@ -928,6 +928,9 @@ def wait_for_write_lock(filename):
if salt.utils.is_fcntl_available(check_sunos=True):
fcntl.flock(fhandle.fileno(), fcntl.LOCK_UN)
+ fhandle.close()
+ os.remove(filename)
+
def update():
'''
|
Added closing of filehandle and removal of gitfs update.lk
|
saltstack_salt
|
train
|
67b4d8cc73339157537e00eed76dfa56e7a6a829
|
diff --git a/node/WritableStream.js b/node/WritableStream.js
index <HASH>..<HASH> 100644
--- a/node/WritableStream.js
+++ b/node/WritableStream.js
@@ -30,11 +30,13 @@ Object.getOwnPropertyNames(Readability.prototype).forEach(function(name){
});
WritableStream.prototype.onend = function(){
- for(var candidate, skipLevel = 1;
- (candidate = this._getCandidateNode()).info.textLength < 250 &&
- skipLevel < 4;
- skipLevel++){
- this.reset();
+ for(
+ var candidate, skipLevel = 1;
+ (candidate = this._getCandidateNode()).info.textLength < 250 &&
+ skipLevel < 4;
+ skipLevel++
+ ){
+ this.onreset();
this.setSkipLevel(skipLevel);
for(var i = 0; i < this._ws_queue.length; i+=2){
|
[minor] semantic changes in WritableStream.js
|
fb55_readabilitySAX
|
train
|
a5860e0d3916010c6b099ba103da80cc414f3d1f
|
diff --git a/fluent/fluent.go b/fluent/fluent.go
index <HASH>..<HASH> 100644
--- a/fluent/fluent.go
+++ b/fluent/fluent.go
@@ -8,12 +8,15 @@ import (
"net"
"reflect"
"strconv"
+ "strings"
"sync"
"time"
)
const (
defaultHost = "127.0.0.1"
+ defaultNetwork = "tcp"
+ defaultSocketPath = ""
defaultPort = 24224
defaultTimeout = 3 * time.Second
defaultBufferLimit = 8 * 1024 * 1024
@@ -23,13 +26,15 @@ const (
)
type Config struct {
- FluentPort int
- FluentHost string
- Timeout time.Duration
- BufferLimit int
- RetryWait int
- MaxRetry int
- TagPrefix string
+ FluentPort int
+ FluentHost string
+ FluentNetwork string
+ FluentSocketPath string
+ Timeout time.Duration
+ BufferLimit int
+ RetryWait int
+ MaxRetry int
+ TagPrefix string
}
type Fluent struct {
@@ -42,12 +47,18 @@ type Fluent struct {
// New creates a new Logger.
func New(config Config) (f *Fluent, err error) {
+ if config.FluentNetwork == "" {
+ config.FluentNetwork = defaultNetwork
+ }
if config.FluentHost == "" {
config.FluentHost = defaultHost
}
if config.FluentPort == 0 {
config.FluentPort = defaultPort
}
+ if config.FluentSocketPath == "" {
+ config.FluentSocketPath = defaultSocketPath
+ }
if config.Timeout == 0 {
config.Timeout = defaultTimeout
}
@@ -194,7 +205,11 @@ func (f *Fluent) close() (err error) {
// connect establishes a new connection using the specified transport.
func (f *Fluent) connect() (err error) {
- f.conn, err = net.DialTimeout("tcp", f.Config.FluentHost+":"+strconv.Itoa(f.Config.FluentPort), f.Config.Timeout)
+ if strings.HasPrefix(f.Config.FluentNetwork, "unix") {
+ f.conn, err = net.DialTimeout(f.Config.FluentNetwork, f.Config.FluentSocketPath, f.Config.Timeout)
+ return
+ }
+ f.conn, err = net.DialTimeout(f.Config.FluentNetwork, f.Config.FluentHost+":"+strconv.Itoa(f.Config.FluentPort), f.Config.Timeout)
return
}
|
Add support for Unix domain socket.
|
fluent_fluent-logger-golang
|
train
|
f85a12d089b8366e3bbb6c79c09fa40045c45aab
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-indonesia-regions',
- version='1.0.0-rc.1',
+ version='1.0.1',
packages=find_packages(),
include_package_data=True,
license='MIT License',
|
feature: finalize version <I>
|
Keda87_django-indonesia-regions
|
train
|
4eb7c3bdf3fb108b89beac5743c48e2ea64abfd2
|
diff --git a/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java b/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java
index <HASH>..<HASH> 100755
--- a/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java
@@ -999,65 +999,63 @@ public class ODistributedStorage implements OStorage, OFreezableStorage, OAutosh
final List<ORecordOperation> tmpEntries = new ArrayList<ORecordOperation>();
- while (iTx.getCurrentRecordEntries().iterator().hasNext()) {
- for (ORecordOperation txEntry : iTx.getCurrentRecordEntries())
- tmpEntries.add(txEntry);
+ for (ORecordOperation txEntry : iTx.getCurrentRecordEntries())
+ tmpEntries.add(txEntry);
- iTx.clearRecordEntries();
+ iTx.clearRecordEntries();
- for (ORecordOperation op : tmpEntries) {
- final OAbstractRecordReplicatedTask task;
-
- final ORecord record = op.getRecord();
+ for (ORecordOperation op : tmpEntries) {
+ final OAbstractRecordReplicatedTask task;
- final ORecordId rid = (ORecordId) record.getIdentity();
+ final ORecord record = op.getRecord();
- switch (op.type) {
- case ORecordOperation.CREATED: {
- if (rid.isNew()) {
- // CREATE THE TASK PASSING THE RECORD OR A COPY BASED ON EXECUTION TYPE: IF ASYNCHRONOUS THE COPY PREVENT TO EARLY
- // ASSIGN CLUSTER IDS
- final ORecord rec = executionModeSynch ? record : record.copy();
- task = new OCreateRecordTask(rec);
- if (record instanceof ODocument)
- ((ODocument) record).validate();
- break;
- }
- // ELSE TREAT IT AS UPDATE: GO DOWN
- }
+ final ORecordId rid = (ORecordId) record.getIdentity();
- case ORecordOperation.UPDATED: {
+ switch (op.type) {
+ case ORecordOperation.CREATED: {
+ if (rid.isNew()) {
+ // CREATE THE TASK PASSING THE RECORD OR A COPY BASED ON EXECUTION TYPE: IF ASYNCHRONOUS THE COPY PREVENT TO EARLY
+ // ASSIGN CLUSTER IDS
+ final ORecord rec = executionModeSynch ? record : record.copy();
+ task = new OCreateRecordTask(rec);
if (record instanceof ODocument)
((ODocument) record).validate();
+ break;
+ }
+ // ELSE TREAT IT AS UPDATE: GO DOWN
+ }
- // LOAD PREVIOUS CONTENT TO BE USED IN CASE OF UNDO
- final OStorageOperationResult<ORawBuffer> previousContent = wrapped.readRecord(rid, null, false, null);
+ case ORecordOperation.UPDATED: {
+ if (record instanceof ODocument)
+ ((ODocument) record).validate();
- if (previousContent.getResult() == null)
- // DELETED
- throw new ORecordNotFoundException("Cannot update record '" + rid + "' because has been deleted");
+ // LOAD PREVIOUS CONTENT TO BE USED IN CASE OF UNDO
+ final OStorageOperationResult<ORawBuffer> previousContent = wrapped.readRecord(rid, null, false, null);
- final int v = executionModeSynch ? record.getVersion() : record.getVersion();
+ if (previousContent.getResult() == null)
+ // DELETED
+ throw new ORecordNotFoundException("Cannot update record '" + rid + "' because has been deleted");
- task = new OUpdateRecordTask(rid, previousContent.getResult().getBuffer(), previousContent.getResult().version,
- record.toStream(), v, ORecordInternal.getRecordType(record));
+ final int v = executionModeSynch ? record.getVersion() : record.getVersion();
- break;
- }
+ task = new OUpdateRecordTask(rid, previousContent.getResult().getBuffer(), previousContent.getResult().version,
+ record.toStream(), v, ORecordInternal.getRecordType(record));
- case ORecordOperation.DELETED: {
- final int v = executionModeSynch ? record.getVersion() : record.getVersion();
- task = new ODeleteRecordTask(rid, v);
- break;
- }
+ break;
+ }
- default:
- continue;
- }
+ case ORecordOperation.DELETED: {
+ final int v = executionModeSynch ? record.getVersion() : record.getVersion();
+ task = new ODeleteRecordTask(rid, v);
+ break;
+ }
- involvedClusters.add(getClusterNameByRID(rid));
- txTask.add(task);
+ default:
+ continue;
}
+
+ involvedClusters.add(getClusterNameByRID(rid));
+ txTask.add(task);
}
OTransactionInternal.setStatus((OTransactionAbstract) iTx, OTransaction.TXSTATUS.COMMITTING);
|
removed no needed loop in distributed commit.
|
orientechnologies_orientdb
|
train
|
8b3fcb4beba9cccfe42fa5ba8e7a60bb4bb63f9d
|
diff --git a/src/main/java/org/rnorth/testcontainers/containers/DatabaseContainer.java b/src/main/java/org/rnorth/testcontainers/containers/DatabaseContainer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/rnorth/testcontainers/containers/DatabaseContainer.java
+++ b/src/main/java/org/rnorth/testcontainers/containers/DatabaseContainer.java
@@ -1,9 +1,11 @@
package org.rnorth.testcontainers.containers;
+import org.rnorth.testcontainers.containers.traits.LinkableContainer;
+
/**
* @author richardnorth
*/
-public interface DatabaseContainer {
+public interface DatabaseContainer extends LinkableContainer {
String getName();
diff --git a/src/main/java/org/rnorth/testcontainers/containers/MySQLContainer.java b/src/main/java/org/rnorth/testcontainers/containers/MySQLContainer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/rnorth/testcontainers/containers/MySQLContainer.java
+++ b/src/main/java/org/rnorth/testcontainers/containers/MySQLContainer.java
@@ -6,7 +6,7 @@ import com.spotify.docker.client.messages.ContainerInfo;
/**
* @author richardnorth
*/
-public class MySQLContainer extends AbstractContainer implements DatabaseContainer {
+public class MySQLContainer extends AbstractContainer implements DatabaseContainer {
private static final String IMAGE = "mysql";
private String mySqlPort;
@@ -50,4 +50,9 @@ public class MySQLContainer extends AbstractContainer implements DatabaseContain
public String getJdbcUrl() {
return "jdbc:mysql://" + dockerHostIpAddress + ":" + mySqlPort + "/test";
}
+
+ @Override
+ public String getContainerId() {
+ return containerId;
+ }
}
diff --git a/src/main/java/org/rnorth/testcontainers/containers/NginxContainer.java b/src/main/java/org/rnorth/testcontainers/containers/NginxContainer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/rnorth/testcontainers/containers/NginxContainer.java
+++ b/src/main/java/org/rnorth/testcontainers/containers/NginxContainer.java
@@ -4,7 +4,7 @@ import com.spotify.docker.client.messages.ContainerConfig;
import com.spotify.docker.client.messages.ContainerInfo;
import com.spotify.docker.client.messages.HostConfig;
import com.spotify.docker.client.messages.PortBinding;
-import org.rnorth.testcontainers.containers.AbstractContainer;
+import org.rnorth.testcontainers.containers.traits.LinkableContainer;
import java.net.MalformedURLException;
import java.net.URL;
@@ -15,7 +15,7 @@ import java.util.Map;
/**
* @author richardnorth
*/
-public class NginxContainer extends AbstractContainer {
+public class NginxContainer extends AbstractContainer implements LinkableContainer {
private String nginxPort;
private String htmlContentPath;
private Map<String, List<PortBinding>> ports;
@@ -58,4 +58,9 @@ public class NginxContainer extends AbstractContainer {
public void setCustomConfig(String htmlContentPath) {
binds.add(htmlContentPath + ":/usr/share/nginx/html:ro");
}
+
+ @Override
+ public String getContainerId() {
+ return containerId;
+ }
}
diff --git a/src/main/java/org/rnorth/testcontainers/containers/PostgreSQLContainer.java b/src/main/java/org/rnorth/testcontainers/containers/PostgreSQLContainer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/rnorth/testcontainers/containers/PostgreSQLContainer.java
+++ b/src/main/java/org/rnorth/testcontainers/containers/PostgreSQLContainer.java
@@ -51,4 +51,8 @@ public class PostgreSQLContainer extends AbstractContainer implements DatabaseCo
return "jdbc:postgresql://" + dockerHostIpAddress + ":" + postgresPort + "/test";
}
+ @Override
+ public String getContainerId() {
+ return containerId;
+ }
}
|
Implement LinkableContainer interface on concrete container classes
|
testcontainers_testcontainers-java
|
train
|
e1195c4a0d760d24392498cb7b3de925e1c1e2f3
|
diff --git a/src/Plugin/Presto/OptionalDependency/InstallPrestoTheme.php b/src/Plugin/Presto/OptionalDependency/InstallPrestoTheme.php
index <HASH>..<HASH> 100644
--- a/src/Plugin/Presto/OptionalDependency/InstallPrestoTheme.php
+++ b/src/Plugin/Presto/OptionalDependency/InstallPrestoTheme.php
@@ -28,7 +28,7 @@ class InstallPrestoTheme extends AbstractOptionalDependency{
public function defaultConfiguration()
{
return [
- 'presto_theme' => '',
+ static::THEME_NAME => '',
];
}
@@ -46,9 +46,7 @@ class InstallPrestoTheme extends AbstractOptionalDependency{
if($this->configuration[static::THEME_NAME] === 1) {
return TRUE;
}
- else {
return FALSE;
- }
}
/**
|
#<I>: fix codacy issue
|
Sitback_presto
|
train
|
15568865cba8b432a05ae46a13c77ea2b3760f39
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -31,7 +31,7 @@ EJSE.prototype = {
// Start intercepting requests, looking for '.ejs' files.
listen: function() {
- if (!protocol) return
+ if (!protocol) return this
var self = this
protocol.interceptStringProtocol('file', function(request, callback) {
@@ -65,7 +65,7 @@ EJSE.prototype = {
// Stop intercepting requests, restoring the original `file://` protocol handler.
stopListening: function() {
- if (!protocol) return
+ if (!protocol) return this
protocol.uninterceptProtocol('file')
return this
|
Consistently return "this". Resolves #2
|
bowheart_ejs-electron
|
train
|
400149478476a48779f47de5bfc790f35f9e7fe0
|
diff --git a/eureka-client/src/main/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolver.java b/eureka-client/src/main/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolver.java
index <HASH>..<HASH> 100644
--- a/eureka-client/src/main/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolver.java
+++ b/eureka-client/src/main/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolver.java
@@ -76,7 +76,7 @@ public class ConfigClusterResolver implements ClusterResolver<AwsEndpoint> {
endpoints.add(new AwsEndpoint(
serviceURI.getHost(),
serviceURI.getPort(),
- "https".equalsIgnoreCase(serviceURI.getSchemeSpecificPart()),
+ "https".equalsIgnoreCase(serviceURI.getScheme()),
serviceURI.getPath(),
getRegion(),
zone
diff --git a/eureka-client/src/test/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolverTest.java b/eureka-client/src/test/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolverTest.java
index <HASH>..<HASH> 100644
--- a/eureka-client/src/test/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolverTest.java
+++ b/eureka-client/src/test/java/com/netflix/discovery/shared/resolver/aws/ConfigClusterResolverTest.java
@@ -32,7 +32,7 @@ public class ConfigClusterResolverTest {
"http://1.1.2.2:8000/eureka/v2/"
);
private final List<String> endpointsE = Arrays.asList(
- "http://1.1.3.1:8000/eureka/v2/"
+ "https://1.1.3.1:8000/eureka/v2/"
);
private ConfigClusterResolver resolver;
@@ -56,5 +56,11 @@ public class ConfigClusterResolverTest {
public void testReadFromConfig() {
List<AwsEndpoint> endpoints = resolver.getClusterEndpoints();
assertThat(endpoints.size(), equalTo(6));
+
+ for (AwsEndpoint endpoint : endpoints) {
+ if (endpoint.getZone().equals("us-east-1e")) {
+ assertThat(endpoint.isSecure(), equalTo(true));
+ }
+ }
}
}
|
Use getScheme() to compare against https.
fixes gh-<I>
|
Netflix_eureka
|
train
|
ae5c8aee2361f502a05190787bdff3bd3a0aae95
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100755
--- a/index.js
+++ b/index.js
@@ -73,23 +73,27 @@ function FSWatcher(_opts) {
this.closed = false;
this._throttled = Object.create(null);
+ function undef(key) {
+ return opts[key] === undefined;
+ }
+
// Set up default options.
- if (!('persistent' in opts)) opts.persistent = false;
- if (!('ignoreInitial' in opts)) opts.ignoreInitial = false;
- if (!('ignorePermissionErrors' in opts)) opts.ignorePermissionErrors = false;
- if (!('interval' in opts)) opts.interval = 100;
- if (!('binaryInterval' in opts)) opts.binaryInterval = 300;
+ if (undef('persistent')) opts.persistent = false;
+ if (undef('ignoreInitial')) opts.ignoreInitial = false;
+ if (undef('ignorePermissionErrors')) opts.ignorePermissionErrors = false;
+ if (undef('interval')) opts.interval = 100;
+ if (undef('binaryInterval')) opts.binaryInterval = 300;
this.enableBinaryInterval = opts.binaryInterval !== opts.interval;
// Enable fsevents on OS X when polling is disabled.
// Which is basically super fast watcher.
- if (!('useFsEvents' in opts)) opts.useFsEvents = !opts.usePolling;
+ if (undef('useFsEvents')) opts.useFsEvents = !opts.usePolling;
// If we can't use fs events, disable it in any case.
if (!canUseFsEvents) opts.useFsEvents = false;
// Use polling by default on Linux and Mac (if not using fsevents).
// Disable polling on Windows.
- if (!('usePolling' in opts) && !opts.useFsEvents) opts.usePolling = !isWin32;
+ if (undef('usePolling') && !opts.useFsEvents) opts.usePolling = !isWin32;
this._isntIgnored = function(entry) {
return !this._isIgnored(entry.path, entry.stat);
|
Fix issues with options defined as `undefined`
brunch/brunch#<I>
|
paulmillr_chokidar
|
train
|
77abaaa3f04ec4696aa2087b85cca93104c27553
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,7 @@
- add type hints
- rename `McsArgs.repr` to `McsArgs.qualname`
- add `McsArgs.is_abstract` property
+- make compatible with Python 3.5
## 0.3.0 (2018/09/30)
diff --git a/py_meta_utils.py b/py_meta_utils.py
index <HASH>..<HASH> 100644
--- a/py_meta_utils.py
+++ b/py_meta_utils.py
@@ -28,7 +28,7 @@ class McsArgs:
@property
def qualname(self) -> str:
if self.module:
- return f'{self.module}.{self.name}'
+ return self.module + '.' + self.name
return self.name
@property
@@ -48,7 +48,7 @@ class McsArgs:
return iter([self.mcs, self.name, self.bases, self.clsdict])
def __repr__(self):
- return f'<McsArgs class={self.qualname}>'
+ return '<McsArgs class={qualname!r}>'.format(qualname=self.qualname)
class MetaOption:
@@ -79,8 +79,11 @@ class MetaOption:
pass
def __repr__(self):
- return f'<{self.__class__.__name__} name={self.name!r}, ' \
- f'default={self.default!r}, inherit={self.inherit}>'
+ return '{cls}(name={name!r}, default={default!r}, inherit={inherit})'.format(
+ cls=self.__class__.__name__,
+ name=self.name,
+ default=self.default,
+ inherit=self.inherit)
class AbstractMetaOption(MetaOption):
@@ -130,7 +133,7 @@ class MetaOptionsFactory:
for option in self._get_meta_options():
assert not hasattr(self, option.name), \
- f"Can't override field {option.name}."
+ "Can't override field {name}.".format(name=option.name)
value = option.get_value(Meta, base_classes_meta, mcs_args)
option.check_value(value, mcs_args)
meta_attrs.pop(option.name, None)
@@ -138,10 +141,11 @@ class MetaOptionsFactory:
setattr(self, option.name, value)
if meta_attrs:
- # Some attributes in the Meta aren't allowed here
+ # Only allow attributes on the Meta that have a respective MetaOption
raise TypeError(
- f"'class Meta' for {self._mcs_args.name!r} got unknown "
- f"attribute(s) {','.join(sorted(meta_attrs.keys()))}")
+ '`class Meta` for {cls} got unknown attribute(s) {attrs}'.format(
+ cls=mcs_args.name,
+ attrs=', '.join(sorted(meta_attrs.keys()))))
def __repr__(self):
return '<{cls} meta_options={attrs!r}>'.format(
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,15 +14,6 @@ setup(
author='Brian Cappello',
license='MIT',
- # https://pypi.python.org/pypi?%3Aaction=list_classifiers
- classifiers=[
- 'Development Status :: 4 - Beta',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: MIT License',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
- ],
py_modules=['py_meta_utils'],
install_requires=[],
extras_require={
@@ -36,4 +27,14 @@ setup(
python_requires='>=3.5',
include_package_data=True,
zip_safe=False,
+
+ # https://pypi.python.org/pypi?%3Aaction=list_classifiers
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ ],
)
|
make compatible with Python <I>
|
briancappello_py-meta-utils
|
train
|
9f134f7b0e8236bf09637f779b8c3361658ee0a7
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -51,9 +51,9 @@ copyright = u'2011, Marc Brinkmann'
# built documents.
#
# The short X.Y version.
-version = '0.7'
+version = '0.6.3'
# The full version, including alpha/beta/rc tags.
-release = '0.7.dev1'
+release = '0.6.3.dev1'
# The language for content autogenerated by Sphinx. Refer to documentation for
# a list of supported languages.
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@ def read(fname):
setup(
name='Flask-KVSession',
- version='0.7.dev1',
+ version='0.6.3.dev1',
url='https://github.com/mbr/flask-kvsession',
license='MIT',
author='Marc Brinkmann',
|
Start developing version <I>.dev1 (after release of <I>)
|
mbr_flask-kvsession
|
train
|
77e605ffe8e02425e1843efba078538ef5b3edc2
|
diff --git a/nodeconductor/structure/serializers.py b/nodeconductor/structure/serializers.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/structure/serializers.py
+++ b/nodeconductor/structure/serializers.py
@@ -141,5 +141,5 @@ class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta(object):
model = User
- fields = ('uuid', 'username', 'first_name', 'last_name', 'projects')
+ fields = ('url', 'uuid', 'username', 'first_name', 'last_name', 'projects')
lookup_field = 'uuid'
|
Include url in user resourcerepresentation
NC-<I>
|
opennode_waldur-core
|
train
|
8212030793aadb5a12827c0447bb07d901feec6a
|
diff --git a/headless/headless-commerce/headless-commerce-admin-inventory-impl/src/main/java/com/liferay/headless/commerce/admin/inventory/internal/resource/v1_0/WarehouseResourceImpl.java b/headless/headless-commerce/headless-commerce-admin-inventory-impl/src/main/java/com/liferay/headless/commerce/admin/inventory/internal/resource/v1_0/WarehouseResourceImpl.java
index <HASH>..<HASH> 100644
--- a/headless/headless-commerce/headless-commerce-admin-inventory-impl/src/main/java/com/liferay/headless/commerce/admin/inventory/internal/resource/v1_0/WarehouseResourceImpl.java
+++ b/headless/headless-commerce/headless-commerce-admin-inventory-impl/src/main/java/com/liferay/headless/commerce/admin/inventory/internal/resource/v1_0/WarehouseResourceImpl.java
@@ -168,7 +168,7 @@ public class WarehouseResourceImpl extends BaseWarehouseResourceImpl {
id),
warehouse);
- Response.ResponseBuilder responseBuilder = Response.ok();
+ Response.ResponseBuilder responseBuilder = Response.noContent();
return responseBuilder.build();
}
|
EMP-<I> headless-commerce-admin-inventory - fix http response status code to <I> (no content)
|
liferay_com-liferay-commerce
|
train
|
6347f73b0eb87e72e33be715e583484797652568
|
diff --git a/src/descriptions/Sms.php b/src/descriptions/Sms.php
index <HASH>..<HASH> 100644
--- a/src/descriptions/Sms.php
+++ b/src/descriptions/Sms.php
@@ -87,6 +87,11 @@
'type' => 'string',
'location' => 'json',
],
+ 'callback' => [
+ 'required' => false,
+ 'type' => 'string',
+ 'location' => 'json'
+ ],
]
],
'SearchMessage' => [
|
Add callback param to Sms description
|
fillup_nexmo
|
train
|
9c97bf5510d390bf2aa9a862ce86884b347e4c40
|
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
@@ -980,7 +980,6 @@ module ActiveRecord
when 3; 'mediumint'
when nil, 4; 'int'
when 5..8; 'bigint'
- when 11; 'int(11)' # backward compatibility with Rails 2.0
else raise(ActiveRecordError, "No integer type has byte size #{limit}")
end
end
|
Remove `limit: <I>` as backward-compatibility with Rails <I>
Integer limit as a byte size was introduced from Rails <I>.
`limit: <I>` is not a byte size, but take care for backward-compatibility
with Rails <I> (a<I>af6). Integer limit out of range should be allowed
to raise by #<I>. I think we should remove this backward-compatibility.
|
rails_rails
|
train
|
7c9430af78945359127c3f0ae0286db296f7ac8b
|
diff --git a/src/Illuminate/Database/Schema/Builder.php b/src/Illuminate/Database/Schema/Builder.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Database/Schema/Builder.php
+++ b/src/Illuminate/Database/Schema/Builder.php
@@ -34,7 +34,7 @@ class Builder
/**
* The default string length for migrations.
*
- * @var int
+ * @var int|null
*/
public static $defaultStringLength = 255;
diff --git a/src/Illuminate/Database/Schema/Grammars/PostgresGrammar.php b/src/Illuminate/Database/Schema/Grammars/PostgresGrammar.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Database/Schema/Grammars/PostgresGrammar.php
+++ b/src/Illuminate/Database/Schema/Grammars/PostgresGrammar.php
@@ -509,7 +509,11 @@ class PostgresGrammar extends Grammar
*/
protected function typeChar(Fluent $column)
{
- return "char({$column->length})";
+ if ($column->length) {
+ return "char({$column->length})";
+ }
+
+ return 'char';
}
/**
@@ -520,7 +524,11 @@ class PostgresGrammar extends Grammar
*/
protected function typeString(Fluent $column)
{
- return "varchar({$column->length})";
+ if ($column->length) {
+ return "varchar({$column->length})";
+ }
+
+ return 'varchar';
}
/**
diff --git a/tests/Database/DatabasePostgresSchemaGrammarTest.php b/tests/Database/DatabasePostgresSchemaGrammarTest.php
index <HASH>..<HASH> 100755
--- a/tests/Database/DatabasePostgresSchemaGrammarTest.php
+++ b/tests/Database/DatabasePostgresSchemaGrammarTest.php
@@ -4,6 +4,7 @@ namespace Illuminate\Tests\Database;
use Illuminate\Database\Connection;
use Illuminate\Database\Schema\Blueprint;
+use Illuminate\Database\Schema\Builder;
use Illuminate\Database\Schema\ForeignIdColumnDefinition;
use Illuminate\Database\Schema\Grammars\PostgresGrammar;
use Mockery as m;
@@ -434,6 +435,52 @@ class DatabasePostgresSchemaGrammarTest extends TestCase
$this->assertSame('alter table "users" add column "foo" varchar(100) null default \'bar\'', $statements[0]);
}
+ public function testAddingStringWithoutLengthLimit()
+ {
+ $blueprint = new Blueprint('users');
+ $blueprint->string('foo');
+ $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
+
+ $this->assertCount(1, $statements);
+ $this->assertSame('alter table "users" add column "foo" varchar(255) not null', $statements[0]);
+
+ Builder::$defaultStringLength = null;
+
+ $blueprint = new Blueprint('users');
+ $blueprint->string('foo');
+ $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
+
+ try {
+ $this->assertCount(1, $statements);
+ $this->assertSame('alter table "users" add column "foo" varchar not null', $statements[0]);
+ } finally {
+ Builder::$defaultStringLength = 255;
+ }
+ }
+
+ public function testAddingCharWithoutLengthLimit()
+ {
+ $blueprint = new Blueprint('users');
+ $blueprint->char('foo');
+ $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
+
+ $this->assertCount(1, $statements);
+ $this->assertSame('alter table "users" add column "foo" char(255) not null', $statements[0]);
+
+ Builder::$defaultStringLength = null;
+
+ $blueprint = new Blueprint('users');
+ $blueprint->char('foo');
+ $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
+
+ try {
+ $this->assertCount(1, $statements);
+ $this->assertSame('alter table "users" add column "foo" char not null', $statements[0]);
+ } finally {
+ Builder::$defaultStringLength = 255;
+ }
+ }
+
public function testAddingText()
{
$blueprint = new Blueprint('users');
|
feat: allow non length limited strings and char for postgres (#<I>)
|
laravel_framework
|
train
|
68347a9318586aa2e131c1a251271b6f72f21366
|
diff --git a/lib/active_admin_role/active_admin/dsl.rb b/lib/active_admin_role/active_admin/dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin_role/active_admin/dsl.rb
+++ b/lib/active_admin_role/active_admin/dsl.rb
@@ -8,7 +8,7 @@ module ActiveAdminRole
controller.resource_class.roles.each_key do |role|
batch_action "assign as #{role}" do |ids|
- formatted_ids = ids - [current_admin_user.id.to_s]
+ formatted_ids = ids - [active_admin_role_current_user.try!(:id).to_s]
resource_class.where(id: formatted_ids).update_all(role: resource_class.roles[role])
if Rails::VERSION::MAJOR >= 5
diff --git a/lib/active_admin_role/active_admin/resource_controller.rb b/lib/active_admin_role/active_admin/resource_controller.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin_role/active_admin/resource_controller.rb
+++ b/lib/active_admin_role/active_admin/resource_controller.rb
@@ -16,6 +16,14 @@ module ActiveAdminRole
def authorize_access_resource!
authorize_resource!(active_admin_config.resource_class)
end
+
+ def active_admin_role_current_user
+ send(active_admin_role_current_user_method_name)
+ end
+
+ def active_admin_role_current_user_method_name
+ ActiveAdminRole.config.current_user_method_name
+ end
end
end
end
diff --git a/lib/active_admin_role/config.rb b/lib/active_admin_role/config.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin_role/config.rb
+++ b/lib/active_admin_role/config.rb
@@ -1,6 +1,6 @@
module ActiveAdminRole
class Config
- attr_accessor :roles, :super_user_roles, :guest_user_roles, :user_class_name, :default_state
+ attr_accessor :roles, :super_user_roles, :guest_user_roles, :user_class_name, :default_state, :current_user_method_name
def initialize
@roles = { guest: 0, support: 1, staff: 2, manager: 3, admin: 99 }
@@ -8,6 +8,7 @@ module ActiveAdminRole
@super_user_roles = [:admin]
@user_class_name = "AdminUser"
@default_state = :cannot
+ @current_user_method_name = "current_admin_user"
end
def default_state=(value)
diff --git a/lib/generators/active_admin_role/templates/initializer.rb b/lib/generators/active_admin_role/templates/initializer.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/active_admin_role/templates/initializer.rb
+++ b/lib/generators/active_admin_role/templates/initializer.rb
@@ -12,6 +12,10 @@ ActiveAdminRole.configure do |config|
# == User class name | default: 'AdminUser'
config.user_class_name = "<%= model_class_name %>"
+ # [Optional:String]
+ # == method name of #current_user in Controller
+ config.current_user_method_name = "current_<%= model_class_name.underscore.tr('/', '_') %>"
+
# [Optional:Symbol]
# == Default permission | default: :cannot
config.default_state = :cannot
|
Configurable current_user method on ResourceController
|
yhirano55_active_admin_role
|
train
|
d09ba42a893c3f37d8188abe55e72f0a59e54fdf
|
diff --git a/src/HalRenderer.php b/src/HalRenderer.php
index <HASH>..<HASH> 100644
--- a/src/HalRenderer.php
+++ b/src/HalRenderer.php
@@ -42,6 +42,31 @@ class HalRenderer implements RenderInterface
return $ro->view;
}
+ private function valuateElements(ResourceObject &$ro)
+ {
+ foreach ($ro->body as $key => &$embeded) {
+ if ($embeded instanceof AbstractRequest) {
+ $isDefferentSchema = $this->isDifferentSchema($ro, $embeded->resourceObject);
+ if ($isDefferentSchema === true) {
+ $ro->body['_embedded'][$key] = $embeded()->body;
+ unset($ro->body[$key]);
+ continue;
+ }
+ unset($ro->body[$key]);
+ $view = $this->render($embeded());
+ $ro->body['_embedded'][$key] = json_decode($view);
+ }
+ }
+ }
+
+ /**
+ * Return "is different schema" (page <-> app)
+ */
+ private function isDifferentSchema(ResourceObject $parentRo, ResourceObject $childRo) : bool
+ {
+ return $parentRo->uri->scheme . $parentRo->uri->host !== $childRo->uri->scheme . $childRo->uri->host;
+ }
+
private function getReverseMatchedLink(string $uri) : string
{
return $uri;
@@ -60,10 +85,19 @@ class HalRenderer implements RenderInterface
private function valuate(ResourceObject $ro) : array
{
+ // evaluate all request in body.
+ if (is_array($ro->body)) {
+ $this->valuateElements($ro);
+ }
// HAL
$body = $ro->body ?: [];
+ if (is_scalar($body)) {
+ $body = ['value' => $body];
+
+ return [$ro, $body];
+ }
- return [$ro, $body];
+ return[$ro, (array) $body];
}
private function addLinks(array $body, array $annotations, Hal $hal)
|
render embed resource in HAL renderer
|
bearsunday_BEAR.Resource
|
train
|
77211a8f6af68a1030bd4f8787f0983f2024197f
|
diff --git a/rootpy/plotting/views.py b/rootpy/plotting/views.py
index <HASH>..<HASH> 100755
--- a/rootpy/plotting/views.py
+++ b/rootpy/plotting/views.py
@@ -201,7 +201,7 @@ the same view, use a SubdirectoryView.
>>> subdir1 = io.Directory('subdir1', 'subdir directory in 1')
>>> _ = subdir1.cd()
>>> hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
->>> hist.FillRandom('gaus', 5000)
+>>> hist.FillRandom('gaus', 2000)
>>> keep.append(hist)
>>> _ = basedir.cd()
>>> subdir2 = io.Directory('subdir2', 'subdir directory 2')
@@ -214,17 +214,24 @@ The directory structure is now::
base/subdir1/hist
base/subdir2/hist
->>> subdir1view = SubdirectoryView(basedir, 'subdir1')
->>> subdir2view = SubdirectoryView(basedir, 'subdir2')
+Subdirectory views work on top of other views.
+
+>>> baseview = ScaleView(basedir, 0.1)
+>>> subdir1view = SubdirectoryView(baseview, 'subdir1')
+>>> subdir2view = SubdirectoryView(baseview, 'subdir2')
>>> histo1 = subdir1view.Get('mutau_mass')
>>> histo2 = subdir2view.Get('mutau_mass')
->>> exp_histo1 = basedir.Get("subdir1/mutau_mass")
->>> exp_histo2 = basedir.Get("subdir2/mutau_mass")
->>> exp_histo1 == histo1
+>>> exp_histo1 = baseview.Get("subdir1/mutau_mass")
+>>> exp_histo2 = baseview.Get("subdir2/mutau_mass")
+>>> def equivalent(h1, h2):
+... return (abs(h1.GetMean() - h2.GetMean()) < 1e-4 and
+... abs(h1.GetRMS() - h2.GetRMS()) < 1e-4 and
+... abs(h1.Integral() - h2.Integral()) < 1e-4)
+>>> equivalent(exp_histo1, histo1)
True
->>> exp_histo2 == histo2
+>>> equivalent(exp_histo2, histo2)
True
->>> histo1 == histo2
+>>> equivalent(histo1, histo2)
False
'''
|
Add doctest to show that subdir views work on top of other views as expected
|
rootpy_rootpy
|
train
|
6bc589c7419932f713c6cb0045482ff33c91f9f8
|
diff --git a/src/Extensions/ContentReviewCMSExtension.php b/src/Extensions/ContentReviewCMSExtension.php
index <HASH>..<HASH> 100644
--- a/src/Extensions/ContentReviewCMSExtension.php
+++ b/src/Extensions/ContentReviewCMSExtension.php
@@ -159,7 +159,7 @@ class ContentReviewCMSExtension extends LeftAndMainExtension
$data = array_merge($data, $extraData);
}
- $response = HTTPResponse::create(Convert::raw2json($data));
+ $response = HTTPResponse::create(json_encode($data));
$response->addHeader('Content-Type', 'application/json');
return $response;
|
FIX Replace Convert JSON methods with json_* methods, deprecated from SilverStripe <I>
|
silverstripe_silverstripe-contentreview
|
train
|
31950c3bfc18b64ce56e89179e90ea955032bc1f
|
diff --git a/platform-mbean/src/test/java/org/jboss/as/platform/mbean/PlatformMBeanResourceUnitTestCase.java b/platform-mbean/src/test/java/org/jboss/as/platform/mbean/PlatformMBeanResourceUnitTestCase.java
index <HASH>..<HASH> 100644
--- a/platform-mbean/src/test/java/org/jboss/as/platform/mbean/PlatformMBeanResourceUnitTestCase.java
+++ b/platform-mbean/src/test/java/org/jboss/as/platform/mbean/PlatformMBeanResourceUnitTestCase.java
@@ -367,10 +367,13 @@ public class PlatformMBeanResourceUnitTestCase {
Assert.assertEquals(mbean.isThreadContentionMonitoringEnabled(), threadContentionEnabled);
boolean threadCPUSupported = describedResource.resource.get(PlatformMBeanConstants.THREAD_CPU_TIME_SUPPORTED).asBoolean();
Assert.assertEquals(mbean.isThreadCpuTimeSupported(), threadCPUSupported);
- boolean threadCPUEnabled = describedResource.resource.get(PlatformMBeanConstants.THREAD_CPU_TIME_ENABLED).asBoolean();
- Assert.assertEquals(mbean.isThreadCpuTimeSupported(), threadCPUEnabled);
boolean currentThreadPUSupported = describedResource.resource.get(PlatformMBeanConstants.CURRENT_THREAD_CPU_TIME_SUPPORTED).asBoolean();
Assert.assertEquals(mbean.isCurrentThreadCpuTimeSupported(), currentThreadPUSupported);
+ boolean threadCPUEnabled = describedResource.resource.get(PlatformMBeanConstants.THREAD_CPU_TIME_ENABLED).asBoolean();
+ if (threadCPUSupported || currentThreadPUSupported) {
+ threadCPUEnabled = describedResource.resource.get(PlatformMBeanConstants.THREAD_CPU_TIME_ENABLED).asBoolean();
+ Assert.assertEquals(mbean.isThreadCpuTimeSupported(), threadCPUEnabled);
+ }
ModelNode op = getOperation("reset-peak-thread-count", "threading", null);
Assert.assertFalse(executeOp(op, false).isDefined());
|
According to openJDK sources (both 6 & 7):
<URL>
method (isThreadCpuTimeEnabled) mustn't called if both of isThreadCpuTimeSupported isCurrentThreadCpuTimeSupported returned false.
This commit makes this test safe on platforms which not supported Thread time feature.
|
wildfly_wildfly
|
train
|
d5294ef91b061064785f077db0fd88717495ef16
|
diff --git a/src/Composer/Repository/Vcs/GitHubDriver.php b/src/Composer/Repository/Vcs/GitHubDriver.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Repository/Vcs/GitHubDriver.php
+++ b/src/Composer/Repository/Vcs/GitHubDriver.php
@@ -46,7 +46,7 @@ class GitHubDriver extends VcsDriver
*/
public function initialize()
{
- preg_match('#^(?:(?:https?|git)://([^/]+)/|git@([^:]+):)([^/]+)/(.+?)(?:\.git)?$#', $this->url, $match);
+ preg_match('#^(?:(?:https?|git)://([^/]+)/|git@([^:]+):)([^/]+)/(.+?)(?:\.git|/)?$#', $this->url, $match);
$this->owner = $match[3];
$this->repository = $match[4];
$this->originUrl = !empty($match[1]) ? $match[1] : $match[2];
@@ -61,6 +61,11 @@ class GitHubDriver extends VcsDriver
$this->fetchRootIdentifier();
}
+ public function getRepositoryUrl()
+ {
+ return 'https://'.$this->originUrl.'/'.$this->owner.'/'.$this->repository;
+ }
+
/**
* {@inheritDoc}
*/
@@ -250,7 +255,7 @@ class GitHubDriver extends VcsDriver
*/
public static function supports(IOInterface $io, Config $config, $url, $deep = false)
{
- if (!preg_match('#^((?:https?|git)://([^/]+)/|git@([^:]+):)([^/]+)/(.+?)(?:\.git)?$#', $url, $matches)) {
+ if (!preg_match('#^((?:https?|git)://([^/]+)/|git@([^:]+):)([^/]+)/(.+?)(?:\.git|/)?$#', $url, $matches)) {
return false;
}
@@ -401,6 +406,9 @@ class GitHubDriver extends VcsDriver
return;
}
+ $this->owner = $repoData['owner']['login'];
+ $this->repository = $repoData['name'];
+
$this->isPrivate = !empty($repoData['private']);
if (isset($repoData['default_branch'])) {
$this->rootIdentifier = $repoData['default_branch'];
diff --git a/tests/Composer/Test/Repository/Vcs/GitHubDriverTest.php b/tests/Composer/Test/Repository/Vcs/GitHubDriverTest.php
index <HASH>..<HASH> 100644
--- a/tests/Composer/Test/Repository/Vcs/GitHubDriverTest.php
+++ b/tests/Composer/Test/Repository/Vcs/GitHubDriverTest.php
@@ -91,7 +91,7 @@ class GitHubDriverTest extends \PHPUnit_Framework_TestCase
$remoteFilesystem->expects($this->at(3))
->method('getContents')
->with($this->equalTo('github.com'), $this->equalTo($repoApiUrl), $this->equalTo(false))
- ->will($this->returnValue('{"master_branch": "test_master", "private": true}'));
+ ->will($this->returnValue('{"master_branch": "test_master", "private": true, "owner": {"login": "composer"}, "name": "packagist"}'));
$configSource = $this->getMock('Composer\Config\ConfigSourceInterface');
$authConfigSource = $this->getMock('Composer\Config\ConfigSourceInterface');
@@ -138,7 +138,7 @@ class GitHubDriverTest extends \PHPUnit_Framework_TestCase
$remoteFilesystem->expects($this->at(0))
->method('getContents')
->with($this->equalTo('github.com'), $this->equalTo($repoApiUrl), $this->equalTo(false))
- ->will($this->returnValue('{"master_branch": "test_master"}'));
+ ->will($this->returnValue('{"master_branch": "test_master", "owner": {"login": "composer"}, "name": "packagist"}'));
$repoConfig = array(
'url' => $repoUrl,
@@ -181,7 +181,7 @@ class GitHubDriverTest extends \PHPUnit_Framework_TestCase
$remoteFilesystem->expects($this->at(0))
->method('getContents')
->with($this->equalTo('github.com'), $this->equalTo($repoApiUrl), $this->equalTo(false))
- ->will($this->returnValue('{"master_branch": "test_master"}'));
+ ->will($this->returnValue('{"master_branch": "test_master", "owner": {"login": "composer"}, "name": "packagist"}'));
$remoteFilesystem->expects($this->at(1))
->method('getContents')
|
Update repo url from github and handle trailing slashes, refs composer/packagist#<I> refs composer/packagist#<I>
|
mothership-ec_composer
|
train
|
fc3feba953b01976d251cff328769040ebc4ae53
|
diff --git a/lxd/api_1.0.go b/lxd/api_1.0.go
index <HASH>..<HASH> 100644
--- a/lxd/api_1.0.go
+++ b/lxd/api_1.0.go
@@ -334,9 +334,14 @@ func doApi10Update(d *Daemon, req api.ServerPut, patch bool) Response {
}
}
+ clustered, err := cluster.Enabled(d.db)
+ if err != nil {
+ return InternalError(errors.Wrap(err, "Failed to check for cluster state"))
+ }
+
nodeChanged := map[string]string{}
var newNodeConfig *node.Config
- err := d.db.Transaction(func(tx *db.NodeTx) error {
+ err = d.db.Transaction(func(tx *db.NodeTx) error {
var err error
newNodeConfig, err = node.ConfigLoad(tx)
if err != nil {
@@ -348,7 +353,7 @@ func doApi10Update(d *Daemon, req api.ServerPut, patch bool) Response {
curClusterAddress := newNodeConfig.ClusterAddress()
newClusterAddress, ok := nodeValues["cluster.https_address"]
- if ok && curClusterAddress != "" && !util.IsAddressCovered(newClusterAddress.(string), curClusterAddress) {
+ if clustered && ok && curClusterAddress != "" && !util.IsAddressCovered(newClusterAddress.(string), curClusterAddress) {
return fmt.Errorf("Changing cluster.https_address is currently not supported")
}
|
lxd/config: Allow modifying cluster.https_address
This allows modifying cluster.https_address when LXD isn't clustered yet.
Closes #<I>
|
lxc_lxd
|
train
|
540cb85bb8fca758d58c921b7441c903611371b8
|
diff --git a/src/javascript/core/utils/Basic.js b/src/javascript/core/utils/Basic.js
index <HASH>..<HASH> 100644
--- a/src/javascript/core/utils/Basic.js
+++ b/src/javascript/core/utils/Basic.js
@@ -288,10 +288,10 @@ define('moxie/core/utils/Basic', [], function() {
/**
- Generates an unique ID. This is 99.99% unique since it takes the current time and 5 random numbers.
- The only way a user would be able to get the same ID is if the two persons at the same exact milisecond manages
- to get 5 the same random numbers between 0-65535 it also uses a counter so each call will be guaranteed to be page unique.
- It's more probable for the earth to be hit with an ansteriod. Y
+ Generates an unique ID. The only way a user would be able to get the same ID is if the two persons
+ at the same exact millisecond manage to get the same 5 random numbers between 0-65535; it also uses
+ a counter so each ID is guaranteed to be unique for the given page. It is more probable for the earth
+ to be hit with an asteroid.
@method guid
@static
|
Basic, Utils: Correct typos in guid() comments.
|
moxiecode_moxie
|
train
|
49aabdf13d2ee74380ec2d21f57ffde494a9bf9d
|
diff --git a/scripts/serverless.js b/scripts/serverless.js
index <HASH>..<HASH> 100755
--- a/scripts/serverless.js
+++ b/scripts/serverless.js
@@ -38,12 +38,11 @@ const processSpanPromise = (async () => {
}
const Serverless = require('../lib/Serverless');
-
serverless = new Serverless();
- serverless.onExitPromise = processSpanPromise;
- serverless.invocationId = invocationId;
try {
+ serverless.onExitPromise = processSpanPromise;
+ serverless.invocationId = invocationId;
await serverless.init();
if (serverless.invokedInstance) serverless = serverless.invokedInstance;
await serverless.run();
|
refactor: Improve error handling scope
|
serverless_serverless
|
train
|
474c73f4b8bb75a0d2e4f17ab884d6e368123b02
|
diff --git a/doitlive.py b/doitlive.py
index <HASH>..<HASH> 100644
--- a/doitlive.py
+++ b/doitlive.py
@@ -61,7 +61,7 @@ THEMES = OrderedDict([
('osx', '{hostname}:{dir} {user}$'),
('osx_color', '{hostname.blue}:{dir.green} {user.cyan}$'),
- ('robbyrussell', '{r_arrow.red} {dir.cyan} {git_branch.red.paren.git}')
+ ('robbyrussell', '{r_arrow.red} {dir.cyan} {git_branch.red.paren.git}'),
])
@@ -139,7 +139,7 @@ def get_current_git_branch():
command = ['git', 'symbolic-ref', '--short', '-q', 'HEAD']
try:
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = proc.communicate()
+ out, _ = proc.communicate()
return out.strip()
except subprocess.CalledProcessError:
pass
@@ -391,6 +391,7 @@ HEADER_TEMPLATE = """# Recorded with the doitlive recorder
"""
+STOP_COMMAND = 'stop'
def run_recorder(shell, prompt):
commands = []
@@ -398,7 +399,7 @@ def run_recorder(shell, prompt):
while True:
formatted_prompt = prefix + format_prompt(THEMES[prompt]) + ' '
command = click.prompt(formatted_prompt, prompt_suffix='')
- if command == 'finish':
+ if command == STOP_COMMAND:
break
commands.append(command)
output = run_command(command, shell=shell, test_mode=TESTING)
@@ -412,10 +413,10 @@ def run_recorder(shell, prompt):
type=click.Path(dir_okay=False, writable=True))
@cli.command()
def record(session_file, shell, prompt):
- """Record a session file. If no path is passed, commands are written to
+ """Record a session file. If no argument is passed, commands are written to
./session.sh.
- When you are finished recording, run the "finish" command.
+ When you are finished recording, run the "stop" command.
"""
if os.path.exists(session_file):
click.confirm(
@@ -427,7 +428,7 @@ def record(session_file, shell, prompt):
secho('RECORDING SESSION: {}'.format(filename),
fg='yellow', bold=True)
- echo('Type ' + style('"finish"', bold=True, fg='green') +
+ echo('Type ' + style('"{}"'.format(STOP_COMMAND), bold=True, fg='green') +
' when you are done recording.')
click.pause()
@@ -444,7 +445,9 @@ def record(session_file, shell, prompt):
fp.write(HEADER_TEMPLATE.format(shell=shell, prompt=prompt))
fp.write('\n\n'.join(commands))
fp.write('\n')
- echo('Done. Run "doitlive play {0}" to play back your session.'.format(filename))
+
+ play_cmd = style('doitlive play {}'.format(filename), bold=True)
+ echo('Done. Run {} to play back your session.'.format(play_cmd))
if __name__ == '__main__':
cli()
diff --git a/tests/test_doitlive.py b/tests/test_doitlive.py
index <HASH>..<HASH> 100644
--- a/tests/test_doitlive.py
+++ b/tests/test_doitlive.py
@@ -182,7 +182,7 @@ def recording_session(runner, commands=None, args=None):
with runner.isolated_filesystem():
command_input = '\n'.join(commands)
- user_input = ''.join(['\n', command_input, '\nfinish\n'])
+ user_input = ''.join(['\n', command_input, '\nstop\n'])
runner.invoke(cli, ['record'] + args, input=user_input)
yield
@@ -190,7 +190,7 @@ class TestRecorder:
def test_record_creates_session_file(self, runner):
with runner.isolated_filesystem():
- result = runner.invoke(cli, ['record'], input='\necho "Hello"\nfinish\n')
+ result = runner.invoke(cli, ['record'], input='\necho "Hello"\nstop\n')
assert result.exit_code == 0, result.output
assert os.path.exists('session.sh')
@@ -231,7 +231,7 @@ class TestRecorder:
cd_to = os.path.join(initial_dir, 'mydir')
os.mkdir(cd_to)
user_input = ''.join([
- '\n', 'cd mydir', '\n', 'pwd', '\n', '\nfinish\n'
+ '\n', 'cd mydir', '\n', 'pwd', '\n', '\nstop\n'
])
result = runner.invoke(cli, ['record'], input=user_input)
assert result.exit_code == 0
|
'stop' is the command to finish recording
|
sloria_doitlive
|
train
|
56c268ddd0aa55fde69480330d10fe6883a95e38
|
diff --git a/web/src/main/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadController.java b/web/src/main/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadController.java
index <HASH>..<HASH> 100644
--- a/web/src/main/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadController.java
+++ b/web/src/main/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadController.java
@@ -27,6 +27,7 @@ import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
+import uk.ac.ebi.atlas.search.EFO.ConditionSearchEFOExpander;
import uk.ac.ebi.atlas.utils.VisitorException;
import uk.ac.ebi.atlas.web.GeneQuerySearchRequestParameters;
@@ -43,11 +44,13 @@ public class BioentitiesSearchDifferentialDownloadController {
private DiffAnalyticsSearchService diffAnalyticsSearchService;
private DiffAnalyticsTSVWriter tsvWriter;
+ private ConditionSearchEFOExpander efoExpander;
@Inject
- public BioentitiesSearchDifferentialDownloadController(DiffAnalyticsSearchService diffAnalyticsSearchService, DiffAnalyticsTSVWriter tsvWriter) {
+ public BioentitiesSearchDifferentialDownloadController(DiffAnalyticsSearchService diffAnalyticsSearchService, DiffAnalyticsTSVWriter tsvWriter, ConditionSearchEFOExpander efoExpander) {
this.diffAnalyticsSearchService = diffAnalyticsSearchService;
this.tsvWriter = tsvWriter;
+ this.efoExpander = efoExpander;
}
@RequestMapping(value = "/query.tsv")
@@ -78,7 +81,10 @@ public class BioentitiesSearchDifferentialDownloadController {
writer.setResponseWriter(response.getWriter());
writer.writeHeader(requestParameters);
- int count = diffAnalyticsSearchService.visitEachExpression(requestParameters.getGeneQuery(), requestParameters.getCondition(), requestParameters.getOrganism(), requestParameters.isExactMatch(), writer);
+
+ String condition = efoExpander.fetchExpandedTermWithEFOChildren(requestParameters.getCondition());
+
+ int count = diffAnalyticsSearchService.visitEachExpression(requestParameters.getGeneQuery(), condition, requestParameters.getOrganism(), requestParameters.isExactMatch(), writer);
LOGGER.info("downloadGeneQueryResults streamed " + count + " differential gene expressions");
} catch (VisitorException e) {
LOGGER.warn("downloadGeneQueryResults aborted, connection may have been lost with the client:" + e.getMessage());
diff --git a/web/src/test/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadControllerSIT.java b/web/src/test/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadControllerSIT.java
index <HASH>..<HASH> 100644
--- a/web/src/test/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadControllerSIT.java
+++ b/web/src/test/java/uk/ac/ebi/atlas/search/diffanalytics/BioentitiesSearchDifferentialDownloadControllerSIT.java
@@ -9,6 +9,7 @@ import uk.ac.ebi.atlas.acceptance.rest.fixtures.RestAssuredFixture;
import java.util.List;
+import static com.jayway.restassured.RestAssured.get;
import static com.jayway.restassured.RestAssured.given;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
@@ -19,6 +20,16 @@ public class BioentitiesSearchDifferentialDownloadControllerSIT extends RestAssu
private EndPoint subject = new EndPoint("/gxa/query.tsv?geneQuery=Cyba&exactMatch=true&_exactMatch=on&organism=Any&condition=");
@Test
+ public void shouldFindResultsForChildEfoTerms() {
+ Response response = get("query.tsv?geneQuery=&exactMatch=true&_exactMatch=on&organism=Any&condition=sex");
+
+ ResponseBody body = response.getBody();
+
+ String[] lines = body.asString().split("\n");
+ assertThat(lines.length, is(1102));
+ }
+
+ @Test
public void verifyLengthOfDocument() {
ResponseBody body = subject.getResponseBody();
|
fix - Include EFO children in diff search results download
|
ebi-gene-expression-group_atlas
|
train
|
b90301db8644a52ccfd0f5fa0d190e74ee617ed7
|
diff --git a/src/IMAP/Message.php b/src/IMAP/Message.php
index <HASH>..<HASH> 100644
--- a/src/IMAP/Message.php
+++ b/src/IMAP/Message.php
@@ -720,7 +720,7 @@ class Message {
}
if (function_exists('iconv') && $from != 'UTF-7' && $to != 'UTF-7') {
- return iconv($from, $to.'//IGNORE', $str);
+ return @iconv($from, $to.'//IGNORE', $str);
} else {
if (!$from) {
return mb_convert_encoding($str, $to);
|
iconv(): error supressor for //IGNORE added #<I>
|
Webklex_laravel-imap
|
train
|
233c38cb4ddc79aa4de33f75f2e5c9bced29a7be
|
diff --git a/bin/staticka.php b/bin/staticka.php
index <HASH>..<HASH> 100644
--- a/bin/staticka.php
+++ b/bin/staticka.php
@@ -2,6 +2,6 @@
require 'vendor/autoload.php';
-$app = new Rougin\Staticka\Console\Application;
+$app = new Staticka\Console\Application;
$app->run();
diff --git a/src/Console/Commands/BuildCommand.php b/src/Console/Commands/BuildCommand.php
index <HASH>..<HASH> 100644
--- a/src/Console/Commands/BuildCommand.php
+++ b/src/Console/Commands/BuildCommand.php
@@ -55,8 +55,8 @@ class BuildCommand extends Command
{
$this->setName('build')->setDescription('Build a site from source');
- $this->addOption('source', null, 4, 'Source of the site', getcwd());
- $this->addOption('path', null, 4, 'Path of the site to be built', getcwd() . '/build');
+ $this->addOption('source', null, 4, 'Source of the site');
+ $this->addOption('path', null, 4, 'Path of the site to be built');
}
/**
@@ -189,9 +189,11 @@ class BuildCommand extends Command
*/
protected function settings()
{
- $site = realpath($this->input->getOption('source'));
+ $site = getcwd() . '/' . $this->input->getOption('source');
- $build = realpath($this->input->getOption('path')) ?: $site . '/build';
+ $path = $this->input->getOption('path');
+
+ $build = $path ? getcwd() . '/' . $path : $site . '/build';
$settings = new Settings;
diff --git a/src/Console/Commands/WatchCommand.php b/src/Console/Commands/WatchCommand.php
index <HASH>..<HASH> 100644
--- a/src/Console/Commands/WatchCommand.php
+++ b/src/Console/Commands/WatchCommand.php
@@ -43,10 +43,10 @@ class WatchCommand extends Command
*/
public function configure()
{
- $this->setName('watch')->setDescription('Watch changes from source');
+ $this->setName('watch')->setDescription('Watch changes from the source directory');
- $this->addOption('source', null, 4, 'Source of the site', getcwd());
- $this->addOption('path', null, 4, 'Path of the site to be built', getcwd() . '/build');
+ $this->addOption('source', null, 4, 'Source of the site');
+ $this->addOption('path', null, 4, 'Path of the site to be built');
$this->addOption('test', null, 1, 'Option to be use for unit testing');
}
@@ -78,14 +78,13 @@ class WatchCommand extends Command
{
list($this->input, $this->output) = array($input, $output);
- list($counter, $source) = array(1, realpath($input->getOption('source')));
+ $source = getcwd() . '/' . $input->getOption('source');
- $settings = new Settings;
+ list($counter, $settings) = array(1, new Settings);
$settings = $settings->load($source . '/staticka.php');
- $output->writeln('<info>Watching ' . $source . ' for changes...</info>');
- $output->writeln('');
+ $output->writeln('<info>Watching ' . $source . ' for changes...' . PHP_EOL . '</info>');
$files = $this->files($settings);
|
Update bin/staticka.php and console commands
|
staticka_staticka
|
train
|
f366f2bd562df050cd3e107c1693f9c929539c97
|
diff --git a/Form/Type/DateRangeType.php b/Form/Type/DateRangeType.php
index <HASH>..<HASH> 100644
--- a/Form/Type/DateRangeType.php
+++ b/Form/Type/DateRangeType.php
@@ -28,8 +28,13 @@ class DateRangeType extends AbstractType
public function buildForm(FormBuilder $builder, array $options)
{
- $builder->add('start', 'date', array('required' => false));
- $builder->add('end', 'date', array('required' => false));
+ $builder->add('start', 'date', array_merge(array('required' => false), $options['field_options']));
+ $builder->add('end', 'date', array_merge(array('required' => false), $options['field_options']));
+ }
+
+ public function getDefaultOptions(array $options)
+ {
+ return $options;
}
public function getName()
diff --git a/Form/Type/Filter/DateRangeType.php b/Form/Type/Filter/DateRangeType.php
index <HASH>..<HASH> 100644
--- a/Form/Type/Filter/DateRangeType.php
+++ b/Form/Type/Filter/DateRangeType.php
@@ -47,10 +47,10 @@ class DateRangeType extends AbstractType
$choices = array(
self::TYPE_RANGE => $this->translator->trans('label_date_type_range', array(), 'SonataAdminBundle'),
);
-
+
$builder
->add('type', 'choice', array('choices' => $choices, 'required' => false))
- ->add('value', 'sonata_type_date_range', array('required' => false))
+ ->add('value', 'sonata_type_date_range', array('field_options' => array_merge(array('format' => 'yyyy-MM-dd'), $options['field_options'])))
;
}
diff --git a/Form/Type/Filter/DateType.php b/Form/Type/Filter/DateType.php
index <HASH>..<HASH> 100644
--- a/Form/Type/Filter/DateType.php
+++ b/Form/Type/Filter/DateType.php
@@ -69,7 +69,7 @@ class DateType extends AbstractType
$builder
->add('type', 'choice', array('choices' => $choices, 'required' => false))
- ->add('value', 'date', array('required' => false))
+ ->add('value', 'date', array_merge(array('required' => false, 'format' => 'yyyy-MM-dd'), $options['field_options']))
;
}
|
added widget option for date and dateRange filters
|
sonata-project_SonataAdminBundle
|
train
|
ccc54747312569996b623e644042ed4d7fe530cf
|
diff --git a/test/unit/Geometry/models/ThroatSeedTest.py b/test/unit/Geometry/models/ThroatSeedTest.py
index <HASH>..<HASH> 100644
--- a/test/unit/Geometry/models/ThroatSeedTest.py
+++ b/test/unit/Geometry/models/ThroatSeedTest.py
@@ -8,6 +8,7 @@ class ThroatSeedTest:
self.geo = OpenPNM.Geometry.GenericGeometry(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
+ self.geo['pore.seed'] = sp.rand(self.net.Np)
def test_random(self):
self.geo.models.add(propname='throat.seed',
@@ -17,7 +18,7 @@ class ThroatSeedTest:
assert sp.amax(self.geo['throat.seed']) > 1.9
assert sp.amin(self.geo['throat.seed']) > 0.1
- def test_neighbor_min(self):
+ def test_neighbor(self):
self.geo.models.add(propname='throat.seed_max',
model=OpenPNM.Geometry.models.throat_seed.neighbor,
mode='max')
|
Forgot to set pore.seed on the test geom
|
PMEAL_OpenPNM
|
train
|
fec537bb127a1ebb9f1e100fa2648bb9f6566bff
|
diff --git a/tests/functional/legacy_api/test_removed.py b/tests/functional/legacy_api/test_removed.py
index <HASH>..<HASH> 100644
--- a/tests/functional/legacy_api/test_removed.py
+++ b/tests/functional/legacy_api/test_removed.py
@@ -18,3 +18,11 @@ def test_removed_upload_apis(webtest, action):
resp = webtest.post("/pypi?:action={}".format(action), status=410)
assert resp.status == \
"410 This API is no longer supported, instead simply upload the file."
+
+
+def test_remove_doc_upload(webtest):
+ resp = webtest.post("/pypi?:action=doc_upload", status=410)
+ assert resp.status == (
+ "410 Uploading documentation is no longer supported, we recommend "
+ "using https://readthedocs.org/."
+ )
diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
index <HASH>..<HASH> 100644
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -1151,6 +1151,16 @@ def test_submit(pyramid_request):
"410 This API is no longer supported, instead simply upload the file."
+def test_doc_upload(pyramid_request):
+ resp = pypi.doc_upload(pyramid_request)
+
+ assert resp.status_code == 410
+ assert resp.status == (
+ "410 Uploading documentation is no longer supported, we recommend "
+ "using https://readthedocs.org/."
+ )
+
+
def test_forbidden_legacy():
exc, request = pretend.stub(), pretend.stub()
resp = pypi.forbidden_legacy(exc, request)
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -95,4 +95,5 @@ def test_routes():
pretend.call("legacy.api.pypi.file_upload", "file_upload"),
pretend.call("legacy.api.pypi.submit", "submit"),
pretend.call("legacy.api.pypi.submit_pkg_info", "submit_pkg_info"),
+ pretend.call("legacy.api.pypi.doc_upload", "doc_upload"),
]
diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
index <HASH>..<HASH> 100644
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -696,6 +696,18 @@ def submit(request):
)
+@view_config(
+ route_name="legacy.api.pypi.doc_upload",
+ decorator=[require_POST, csrf_exempt],
+)
+def doc_upload(request):
+ return _exc_with_message(
+ HTTPGone,
+ "Uploading documentation is no longer supported, we recommend using "
+ "https://readthedocs.org/.",
+ )
+
+
@forbidden_view_config(request_param=":action")
def forbidden_legacy(exc, request):
# We're not going to do anything amazing here, this just exists to override
diff --git a/warehouse/routes.py b/warehouse/routes.py
index <HASH>..<HASH> 100644
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -67,6 +67,7 @@ def includeme(config):
"legacy.api.pypi.submit_pkg_info",
"submit_pkg_info",
)
+ config.add_pypi_action_route("legacy.api.pypi.doc_upload", "doc_upload")
# Legacy Documentation
config.add_route("legacy.docs", config.registry.settings["docs.url"])
|
Disable uploading documentation, direct users to ReadTheDocs.
|
pypa_warehouse
|
train
|
9b8b480c8cddf7e9ddfb723701f71132eee32d7b
|
diff --git a/system/src/Grav/Common/Data/BlueprintSchema.php b/system/src/Grav/Common/Data/BlueprintSchema.php
index <HASH>..<HASH> 100644
--- a/system/src/Grav/Common/Data/BlueprintSchema.php
+++ b/system/src/Grav/Common/Data/BlueprintSchema.php
@@ -244,8 +244,8 @@ class BlueprintSchema extends BlueprintSchemaBase implements ExportInterface
|| !empty($field['disabled'])
// Field validation is set to be ignored
|| !empty($field['validate']['ignore'])
- // Field is toggleable and the toggle is turned off
- || (!empty($field['toggleable']) && empty($toggles[$key]))
+ // Field is overridable and the toggle is turned off
+ || (!empty($field['overridable']) && empty($toggles[$key]))
) {
continue;
}
@@ -279,9 +279,9 @@ class BlueprintSchema extends BlueprintSchemaBase implements ExportInterface
continue;
}
- // Skip toggleable fields without value.
- // TODO: We need better toggleable support, which is not just ignoring required values but also looking if defaults are good.
- if (!empty($field['toggleable']) && !isset($data[$name])) {
+ // Skip overridable fields without value.
+ // TODO: We need better overridable support, which is not just ignoring required values but also looking if defaults are good.
+ if (!empty($field['overridable']) && !isset($data[$name])) {
continue;
}
|
Added basic overridable support for the fields
|
getgrav_grav
|
train
|
8e4b7498fcfd77a9d7c7558ab435ef6c31d8f2ae
|
diff --git a/lib/fluent/command/plugin_generator.rb b/lib/fluent/command/plugin_generator.rb
index <HASH>..<HASH> 100644
--- a/lib/fluent/command/plugin_generator.rb
+++ b/lib/fluent/command/plugin_generator.rb
@@ -236,14 +236,17 @@ HELP
end
class NoLicense
- attr_reader :name, :full_name, :preamble, :text
+ attr_reader :name, :full_name, :text
def initialize
@name = ""
@full_name = ""
- @preamble = ""
@text = ""
end
+
+ def preamble(usename)
+ ""
+ end
end
class ApacheLicense
|
Define NoLicense#preamble same as ApacheLicense#preamble
|
fluent_fluentd
|
train
|
d1bf928c550ec1e21df9b45c83318cc0bc6d0061
|
diff --git a/grimoire_elk/arthur.py b/grimoire_elk/arthur.py
index <HASH>..<HASH> 100755
--- a/grimoire_elk/arthur.py
+++ b/grimoire_elk/arthur.py
@@ -254,7 +254,13 @@ def load_identities(ocean_backend, enrich_backend):
items_count = 0
new_identities = []
- for item in ocean_backend.fetch():
+ # Support that ocean_backend is a list of items (old API)
+ if isinstance(ocean_backend, list):
+ items = ocean_backend
+ else:
+ items = ocean_backend.fetch()
+
+ for item in items:
items_count += 1
# Get identities from new items to be added to SortingHat
identities = enrich_backend.get_identities(item)
|
[enrich] Support old API when load identities from raw items (needed by track_items.py library)
|
chaoss_grimoirelab-elk
|
train
|
d7fe34c440b5b52edd0ebabfa942d560e35224f3
|
diff --git a/src/fx/fx.js b/src/fx/fx.js
index <HASH>..<HASH> 100644
--- a/src/fx/fx.js
+++ b/src/fx/fx.js
@@ -295,13 +295,13 @@ jQuery.fn.extend({
* @example $("p").animate({
* opacity: 'show'
* }, "slow", "easein");
- * @desc An example of using an 'easing' function to provide a different style of animation. This will only work if you have a plugin that provides this easing function (Only 'linear' is provided by default, with jQuery).
+ * @desc An example of using an 'easing' function to provide a different style of animation. This will only work if you have a plugin that provides this easing function (Only "swing" and "linear" are provided by default, with jQuery).
*
* @name animate
* @type jQuery
* @param Hash params A set of style attributes that you wish to animate, and to what end.
* @param String|Number speed (optional) A string representing one of the three predefined speeds ("slow", "normal", or "fast") or the number of milliseconds to run the animation (e.g. 1000).
- * @param String easing (optional) The name of the easing effect that you want to use (e.g. swing or linear). Defaults to "swing".
+ * @param String easing (optional) The name of the easing effect that you want to use (e.g. "swing" or "linear"). Defaults to "swing".
* @param Function callback (optional) A function to be executed whenever the animation completes.
* @cat Effects
*/
|
improve documentation of animate() function by mentioning both the 'swing' and 'linear' easings which are provided by default
|
jquery_jquery
|
train
|
7e3eb9770b1b5cc4b6b86f8d2fb8056f14dadbf7
|
diff --git a/news-bundle/src/Resources/contao/dca/tl_news.php b/news-bundle/src/Resources/contao/dca/tl_news.php
index <HASH>..<HASH> 100644
--- a/news-bundle/src/Resources/contao/dca/tl_news.php
+++ b/news-bundle/src/Resources/contao/dca/tl_news.php
@@ -282,7 +282,7 @@ $GLOBALS['TL_DCA']['tl_news'] = array
'inputType' => 'imageSize',
'options' => $GLOBALS['TL_CROP'],
'reference' => &$GLOBALS['TL_LANG']['MSC'],
- 'eval' => array('rgxp'=>'digit', 'nospace'=>true, 'helpwizard'=>true, 'tl_class'=>'w50'),
+ 'eval' => array('rgxp'=>'natural', 'nospace'=>true, 'helpwizard'=>true, 'tl_class'=>'w50'),
'sql' => "varchar(64) NOT NULL default ''"
),
'imagemargin' => array
diff --git a/news-bundle/src/Resources/contao/dca/tl_news_archive.php b/news-bundle/src/Resources/contao/dca/tl_news_archive.php
index <HASH>..<HASH> 100644
--- a/news-bundle/src/Resources/contao/dca/tl_news_archive.php
+++ b/news-bundle/src/Resources/contao/dca/tl_news_archive.php
@@ -212,7 +212,7 @@ $GLOBALS['TL_DCA']['tl_news_archive'] = array
'label' => &$GLOBALS['TL_LANG']['tl_news_archive']['perPage'],
'exclude' => true,
'inputType' => 'text',
- 'eval' => array('rgxp'=>'digit', 'tl_class'=>'w50'),
+ 'eval' => array('rgxp'=>'natural', 'tl_class'=>'w50'),
'sql' => "smallint(5) unsigned NOT NULL default '0'"
),
'moderate' => array
diff --git a/news-bundle/src/Resources/contao/dca/tl_news_feed.php b/news-bundle/src/Resources/contao/dca/tl_news_feed.php
index <HASH>..<HASH> 100644
--- a/news-bundle/src/Resources/contao/dca/tl_news_feed.php
+++ b/news-bundle/src/Resources/contao/dca/tl_news_feed.php
@@ -184,7 +184,7 @@ $GLOBALS['TL_DCA']['tl_news_feed'] = array
'default' => 25,
'exclude' => true,
'inputType' => 'text',
- 'eval' => array('mandatory'=>true, 'rgxp'=>'digit', 'tl_class'=>'w50'),
+ 'eval' => array('mandatory'=>true, 'rgxp'=>'natural', 'tl_class'=>'w50'),
'sql' => "smallint(5) unsigned NOT NULL default '0'"
),
'feedBase' => array
|
[News] Add a regex to check for nonnegative natural numbers (see #<I>)
|
contao_contao
|
train
|
f5f4dc730c1da556862dcfb7d8823cf966c89b5f
|
diff --git a/tests/POTests/QueryBuilder/Statements/SelectTest.php b/tests/POTests/QueryBuilder/Statements/SelectTest.php
index <HASH>..<HASH> 100644
--- a/tests/POTests/QueryBuilder/Statements/SelectTest.php
+++ b/tests/POTests/QueryBuilder/Statements/SelectTest.php
@@ -151,8 +151,8 @@ class SelectTest extends PHPUnit_Framework_TestCase
$this->o->limit(10);
$this->assertEquals($sql, $this->o->toSql());
- $sql .= ', 2';
- $this->o->limit(10, 2);
+ $sql .= ', 3';
+ $this->o->limit(10, 3);
$this->assertEquals($sql, $this->o->toSql());
}
@@ -194,8 +194,8 @@ class SelectTest extends PHPUnit_Framework_TestCase
*/
public function itAddsLeftJoin()
{
- $this->o->from('table')->leftJoin('t1')->leftJoin('t2', 't1.id = t2.t1_id');
- $sql = 'LEFT JOIN t1 LEFT JOIN t2 ON t1.id = t2.t1_id';
+ $this->o->from('table')->leftJoin('t1')->leftJoin('t3', 't1.id = t3.t1_id');
+ $sql = 'LEFT JOIN t1 LEFT JOIN t3 ON t1.id = t3.t1_id';
$this->assertEquals($sql, $this->o->getJoins()->toSql());
}
@@ -216,7 +216,7 @@ class SelectTest extends PHPUnit_Framework_TestCase
->where('a', 'b')
->where('a', 'x', '!=')
->where(array(
- 'foo' => 'bar',
+ 'bar' => 'foo',
'foobar' => 'foo'
));
@@ -224,7 +224,7 @@ class SelectTest extends PHPUnit_Framework_TestCase
'a = 1',
'a = "b"',
'a != "x"',
- 'foo = "bar"',
+ 'bar = "foo"',
'foobar = "foo"',
);
|
Deceived phpcpd
|
mjacobus_php-query-builder
|
train
|
f96623cb74023231eff065d236460c323ac94019
|
diff --git a/course/lib.php b/course/lib.php
index <HASH>..<HASH> 100644
--- a/course/lib.php
+++ b/course/lib.php
@@ -2410,6 +2410,11 @@ function create_course($data, $editoroptions = NULL) {
}
}
+ if (empty($CFG->enablecourserelativedates)) {
+ // Make sure we're not setting the relative dates mode when the setting is disabled.
+ unset($data->relativedatesmode);
+ }
+
if ($errorcode = course_validate_dates((array)$data)) {
throw new moodle_exception($errorcode);
}
@@ -2522,6 +2527,9 @@ function update_course($data, $editoroptions = NULL) {
$oldcourse = course_get_format($data->id)->get_course();
$context = context_course::instance($oldcourse->id);
+ // Make sure we're not changing whatever the course's relativedatesmode setting is.
+ unset($data->relativedatesmode);
+
// Capture the updated fields for the log data.
$updatedfields = [];
foreach (get_object_vars($oldcourse) as $field => $value) {
diff --git a/course/tests/courselib_test.php b/course/tests/courselib_test.php
index <HASH>..<HASH> 100644
--- a/course/tests/courselib_test.php
+++ b/course/tests/courselib_test.php
@@ -745,6 +745,63 @@ class core_course_courselib_testcase extends advanced_testcase {
$this->assertGreaterThan($oldtimemodified, $newtimemodified);
}
+ /**
+ * Relative dates mode settings provider for course creation.
+ */
+ public function create_course_relative_dates_provider() {
+ return [
+ [0, 0, 0],
+ [0, 1, 0],
+ [1, 0, 0],
+ [1, 1, 1],
+ ];
+ }
+
+ /**
+ * Test create_course by attempting to change the relative dates mode.
+ *
+ * @dataProvider create_course_relative_dates_provider
+ * @param int $setting The value for the 'enablecourserelativedates' admin setting.
+ * @param int $mode The value for the course's 'relativedatesmode' field.
+ * @param int $expectedvalue The expected value of the 'relativedatesmode' field after course creation.
+ */
+ public function test_relative_dates_mode_for_course_creation($setting, $mode, $expectedvalue) {
+ global $DB;
+
+ $this->resetAfterTest();
+
+ set_config('enablecourserelativedates', $setting);
+
+ // Generate a course with relative dates mode set to 1.
+ $course = $this->getDataGenerator()->create_course(['relativedatesmode' => $mode]);
+
+ // Verify that the relative dates match what's expected.
+ $relativedatesmode = $DB->get_field('course', 'relativedatesmode', ['id' => $course->id]);
+ $this->assertEquals($expectedvalue, $relativedatesmode);
+ }
+
+ /**
+ * Test update_course by attempting to change the relative dates mode.
+ */
+ public function test_relative_dates_mode_for_course_update() {
+ global $DB;
+
+ $this->resetAfterTest();
+
+ set_config('enablecourserelativedates', 1);
+
+ // Generate a course with relative dates mode set to 1.
+ $course = $this->getDataGenerator()->create_course(['relativedatesmode' => 1]);
+
+ // Attempt to update the course with a changed relativedatesmode.
+ $course->relativedatesmode = 0;
+ update_course($course);
+
+ // Verify that the relative dates mode has not changed.
+ $relativedatesmode = $DB->get_field('course', 'relativedatesmode', ['id' => $course->id]);
+ $this->assertEquals(1, $relativedatesmode);
+ }
+
public function test_course_add_cm_to_section() {
global $DB;
$this->resetAfterTest(true);
|
MDL-<I> course: Relative dates mode handling
* On course creation, make sure that we can enable relative dates mode
only when the site setting is enabled.
* On course update, make sure that the relative dates mode setting
cannot be changed.
|
moodle_moodle
|
train
|
4bf1910bb1ea9c353ac7cd0de16f91a39444a9c3
|
diff --git a/estnltk/taggers/morph/morf.py b/estnltk/taggers/morph/morf.py
index <HASH>..<HASH> 100644
--- a/estnltk/taggers/morph/morf.py
+++ b/estnltk/taggers/morph/morf.py
@@ -31,7 +31,7 @@ class VabamorfTagger(Tagger):
def __init__(self,
layer_name='morph_analysis',
- postanalysis_tagger=PostMorphAnalysisTagger(),
+ postanalysis_tagger=None,
**kwargs):
"""Initialize VabamorfTagger class.
@@ -52,6 +52,14 @@ class VabamorfTagger(Tagger):
logical analyses for disambiguation (if required) and fills in
values of extra attributes in morph_analysis Spans.
"""
+ # Check if the user has provided a custom postanalysis_tagger
+ postanalysis_tagger_given = False
+ if postanalysis_tagger:
+ postanalysis_tagger_given = True
+ else:
+ # Initialize default postanalysis_tagger
+ postanalysis_tagger=PostMorphAnalysisTagger(layer_name=layer_name)
+
self.kwargs = kwargs
self.layer_name = layer_name
@@ -75,12 +83,6 @@ class VabamorfTagger(Tagger):
layer_name=layer_name )
self.vabamorf_disambiguator = VabamorfDisambiguator( vm_instance=vm_instance, \
layer_name=layer_name )
- # If a custom layer name is used, rewrite postanalysis_tagger's layer name, if required
- if postanalysis_tagger and postanalysis_tagger.layer_name != layer_name:
- # Note: if user has already created a PostMorphAnalysisTagger and passed
- # to us, we assume that she also has fixed layer_name in a way that
- # it matches the input layer_name;
- postanalysis_tagger=PostMorphAnalysisTagger(layer_name=layer_name)
self.configuration = {'postanalysis_tagger':self.postanalysis_tagger.__class__.__name__, }
# 'vabamorf_analyser':self.vabamorf_analyser.__class__.__name__,
diff --git a/estnltk/taggers/morph/postanalysis_tagger.py b/estnltk/taggers/morph/postanalysis_tagger.py
index <HASH>..<HASH> 100644
--- a/estnltk/taggers/morph/postanalysis_tagger.py
+++ b/estnltk/taggers/morph/postanalysis_tagger.py
@@ -204,7 +204,7 @@ class PostMorphAnalysisTagger(Tagger):
Text object to which ignore-markings will be added.
'''
comp_token_id = 0
- for spanlist in text.morph_analysis.spans:
+ for spanlist in text[self.layer_name].spans:
if comp_token_id < len(text['compound_tokens'].spans):
comp_token = text['compound_tokens'].spans[comp_token_id]
if (comp_token.start == spanlist.start and \
@@ -242,7 +242,7 @@ class PostMorphAnalysisTagger(Tagger):
'''
comp_token_id = 0
has_normalized = 'normalized' in text['compound_tokens'].attributes
- for spanlist in text.morph_analysis.spans:
+ for spanlist in text[self.layer_name].spans:
if comp_token_id < len(text['compound_tokens'].spans):
comp_token = text['compound_tokens'].spans[comp_token_id]
if (comp_token.start == spanlist.start and \
@@ -293,7 +293,7 @@ class PostMorphAnalysisTagger(Tagger):
'email' in comp_token.type:
for span in spanlist:
# Set partofspeech to H
- setattr(span, 'partofspeech', 'H')
+ setattr(span, 'partofspeech', 'H')
comp_token_id += 1
# 5) Fix abbreviations, such as 'toim.', 'Tlk.'
if self.configuration['fix_abbreviations'] and \
diff --git a/estnltk/tests/test_morph/test_vabamorftagger.py b/estnltk/tests/test_morph/test_vabamorftagger.py
index <HASH>..<HASH> 100644
--- a/estnltk/tests/test_morph/test_vabamorftagger.py
+++ b/estnltk/tests/test_morph/test_vabamorftagger.py
@@ -24,7 +24,7 @@ def test_analyse_segmentation_and_morphology():
text = Text('Tere, maailm!')
text.analyse('segmentation')
text.analyse('morphology')
- assert True
+ assert 'morph_analysis' in text.layers
def test_default_morph_analysis():
@@ -234,3 +234,14 @@ def test_default_morph_analysis_on_empty_input():
# Check results
assert [] == text['morph_analysis'].to_records()
+
+def test_default_morph_analysis_with_different_layer_name():
+ # Should be able to use a different layer name
+ # without running into errors
+ morph_analyser = VabamorfTagger( layer_name='my_morph' )
+ text = Text('Tere, maailm!')
+ text.tag_layer(['words', 'sentences'])
+ morph_analyser.tag(text)
+ # Check results
+ assert 'my_morph' in text.layers
+
|
Fix in VabamorfTagger: layer_name can now be changed without running into errors
|
estnltk_estnltk
|
train
|
34f1396d5263ba02c81349a07ad08b5f84e8a3e0
|
diff --git a/graphql_jwt/mixins.py b/graphql_jwt/mixins.py
index <HASH>..<HASH> 100644
--- a/graphql_jwt/mixins.py
+++ b/graphql_jwt/mixins.py
@@ -6,7 +6,6 @@ from graphene.types.generic import GenericScalar
from . import exceptions
from .refresh_token.mixins import RefreshTokenMixin
from .settings import jwt_settings
-from .shortcuts import get_token
from .utils import get_payload, get_user_by_payload
@@ -57,7 +56,11 @@ class KeepAliveRefreshMixin(object):
if jwt_settings.JWT_REFRESH_EXPIRED_HANDLER(orig_iat, info.context):
raise exceptions.JSONWebTokenError(_('Refresh has expired'))
- token = get_token(user, info.context, origIat=orig_iat)
+ payload = jwt_settings.JWT_PAYLOAD_HANDLER(user, info.context)
+ payload['origIat'] = orig_iat
+
+ token = jwt_settings.JWT_ENCODE_HANDLER(payload, info.context)
+
return cls(token=token, payload=payload)
diff --git a/graphql_jwt/refresh_token/mixins.py b/graphql_jwt/refresh_token/mixins.py
index <HASH>..<HASH> 100644
--- a/graphql_jwt/refresh_token/mixins.py
+++ b/graphql_jwt/refresh_token/mixins.py
@@ -5,8 +5,7 @@ from django.utils.translation import ugettext as _
import graphene
from .. import exceptions
-from ..shortcuts import get_token
-from ..utils import get_payload
+from ..settings import jwt_settings
from .shortcuts import get_refresh_token
@@ -22,8 +21,11 @@ class RefreshTokenMixin(object):
if refresh_token.is_expired(info.context):
raise exceptions.JSONWebTokenError(_('Refresh token is expired'))
- token = get_token(refresh_token.user, info.context)
- payload = get_payload(token, info.context)
+ payload = jwt_settings.JWT_PAYLOAD_HANDLER(
+ refresh_token.user,
+ info.context)
+
+ token = jwt_settings.JWT_ENCODE_HANDLER(payload, info.context)
refreshed_token = refresh_token.rotate().token
return cls(token=token, payload=payload, refresh_token=refreshed_token)
|
Improved refresh mixins payloads
|
flavors_django-graphql-jwt
|
train
|
5d862c390beef4c880ef2ae624dfa5e8945a4b56
|
diff --git a/packages/cozy-client/src/associations/helpers.js b/packages/cozy-client/src/associations/helpers.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-client/src/associations/helpers.js
+++ b/packages/cozy-client/src/associations/helpers.js
@@ -22,16 +22,12 @@ export const responseToRelationship = response =>
})
const attachRelationship = (doc, relationships) => {
- if (
- doc.relationships &&
- relationships &&
- isEqual(Object.keys(doc.relationships), Object.keys(relationships))
- ) {
- return doc
- }
return {
...doc,
- relationships: relationships || {}
+ relationships: {
+ ...doc.relationships,
+ ...relationships
+ }
}
}
|
fix: Combine relationships when attaching them to responses
|
cozy_cozy-client
|
train
|
cec6cdddf342d0b1e630d6ffa648d860a42f196a
|
diff --git a/integration/install_test.go b/integration/install_test.go
index <HASH>..<HASH> 100644
--- a/integration/install_test.go
+++ b/integration/install_test.go
@@ -390,7 +390,7 @@ func poolAdd() ExecFlow {
}
return true
})
- c.Assert(ok, check.Equals, true, check.Commentf("nodes not ready after 2 minutes: %v", res))
+ c.Assert(ok, check.Equals, true, check.Commentf("nodes not ready after 2 minutes: %v - all nodes: %v", res, T("node-list").Run(env)))
for _, ip := range nodeIPs {
res = T("node-update", ip, "pool="+poolName).Run(env)
c.Assert(res, ResultOk)
|
integration: list all nodes on failure waiting cluster
|
tsuru_tsuru
|
train
|
210ee5eb8afb15a1fe205df3fcee583b0bc21f9e
|
diff --git a/src/components/button/button.spec.js b/src/components/button/button.spec.js
index <HASH>..<HASH> 100644
--- a/src/components/button/button.spec.js
+++ b/src/components/button/button.spec.js
@@ -39,4 +39,11 @@ describe('md-button', function() {
expect($log.warn).not.toHaveBeenCalled();
}));
+ it('should convert attributes on an md-button to attributes on the generated button', inject(function($compile, $rootScope) {
+ var button = $compile('<md-button hide hide-sm></md-button>')($rootScope);
+ $rootScope.$apply();
+ expect(button[0].hasAttribute('hide')).toBe(true);
+ expect(button[0].hasAttribute('hide-sm')).toBe(true);
+ }));
+
});
|
test(button): adds test to confirm issue button attributes are working properly
This confirms that #<I> is resolved.
|
angular_material
|
train
|
06e724332160858d36de9cb382b36c286f66e341
|
diff --git a/src/Sylius/Component/Core/Uploader/ImageUploader.php b/src/Sylius/Component/Core/Uploader/ImageUploader.php
index <HASH>..<HASH> 100644
--- a/src/Sylius/Component/Core/Uploader/ImageUploader.php
+++ b/src/Sylius/Component/Core/Uploader/ImageUploader.php
@@ -38,7 +38,7 @@ class ImageUploader implements ImageUploaderInterface
return;
}
- if (null !== $image->getPath()) {
+ if (null !== $image->getPath() && $this->has($image->getPath())) {
$this->remove($image->getPath());
}
@@ -77,4 +77,14 @@ class ImageUploader implements ImageUploaderInterface
substr($path, 4)
);
}
+
+ /**
+ * @param string $path
+ *
+ * @return bool
+ */
+ private function has($path)
+ {
+ return $this->filesystem->has($path);
+ }
}
|
Check if image exists before trying to delete it
Fixes an issue when source image was deleted on existing image in the database and uploading a new one would result in error because there is nothing to delete
|
Sylius_Sylius
|
train
|
3a424238b10d912d5d467a8ee827813d1933cdfc
|
diff --git a/dftimewolf/lib/processors/localplaso.py b/dftimewolf/lib/processors/localplaso.py
index <HASH>..<HASH> 100755
--- a/dftimewolf/lib/processors/localplaso.py
+++ b/dftimewolf/lib/processors/localplaso.py
@@ -49,6 +49,9 @@ class LocalPlasoProcessor(BaseModule):
if self._timezone:
cmd.extend(['-z', self._timezone])
+ # Analyze all available partitions.
+ cmd.extend(['--partition', 'all'])
+
# Setup logging.
cmd.extend(['--logfile', log_file_path])
|
Make localplaso process all partitions (fixes log2timeline#<I>) (#<I>)
|
log2timeline_dftimewolf
|
train
|
c0b99ff36ff1d1ecba9ff87f66646bb889a9005c
|
diff --git a/lib/sugarcrm/associations/association.rb b/lib/sugarcrm/associations/association.rb
index <HASH>..<HASH> 100644
--- a/lib/sugarcrm/associations/association.rb
+++ b/lib/sugarcrm/associations/association.rb
@@ -55,7 +55,7 @@ module SugarCRM
def to_s
"#<SugarCRM::Association @proxy_methods=[#{@proxy_methods.join(", ")}], " +
"@link_field=\"#{@link_field}\", @target=#{@target}, @owner=#{@owner.class}, " +
- "@cardinality=#{@cardinality}>"
+ "@cardinality=:#{@cardinality}>"
end
protected
|
fix Association#to_s to display cardinality symbol properly
|
chicks_sugarcrm
|
train
|
9c37a711e252eb7598095bf4753e7d3a89028427
|
diff --git a/adafruit_bme280.py b/adafruit_bme280.py
index <HASH>..<HASH> 100644
--- a/adafruit_bme280.py
+++ b/adafruit_bme280.py
@@ -198,8 +198,6 @@ class Adafruit_BME280:
def mode(self, value):
if not value in _BME280_MODES:
raise ValueError('Mode \'%s\' not supported' % (value))
- if self._mode == value:
- return
self._mode = value
self._write_ctrl_meas()
|
Always write the new mode to the sensor
In FORCE mode, the sensor changes back to SLEEP after completeing a single
measurement, but we are not updating the mode internally. It's better to
always write the mode to the sensor, and trust the caller not to change it
needlessly.
|
adafruit_Adafruit_CircuitPython_BME280
|
train
|
416d0c64c1bff97278b4b807183c042ed5f85e2c
|
diff --git a/README b/README
index <HASH>..<HASH> 100644
--- a/README
+++ b/README
@@ -1,6 +1,6 @@
The S2 Geometry Library is a spherical geometry library, useful for manipulating
regions on the sphere (commonly on Earth) and indexing geographic data.
-See http://godoc.org/code.google.com/p/gos2/s2 for documentation.
+See http://godoc.org/github.com/timehop/gos2/s2 for documentation.
For an analogous library in C++, see http://code.google.com/p/s2-geometry-library/.
diff --git a/r3/vector.go b/r3/vector.go
index <HASH>..<HASH> 100644
--- a/r3/vector.go
+++ b/r3/vector.go
@@ -4,7 +4,7 @@ import (
"fmt"
"math"
- "code.google.com/p/gos2/s1"
+ "github.com/timehop/gos2/s1"
)
// Vector represents a point in ℝ³.
diff --git a/s2/cellid_test.go b/s2/cellid_test.go
index <HASH>..<HASH> 100644
--- a/s2/cellid_test.go
+++ b/s2/cellid_test.go
@@ -3,8 +3,8 @@ package s2
import (
"testing"
- "code.google.com/p/gos2/r2"
- "code.google.com/p/gos2/s1"
+ "github.com/timehop/gos2/r2"
+ "github.com/timehop/gos2/s1"
)
func TestCellIDFromFace(t *testing.T) {
diff --git a/s2/latlng.go b/s2/latlng.go
index <HASH>..<HASH> 100644
--- a/s2/latlng.go
+++ b/s2/latlng.go
@@ -4,7 +4,7 @@ import (
"fmt"
"math"
- "code.google.com/p/gos2/s1"
+ "github.com/timehop/gos2/s1"
)
// LatLng represents a point on the unit sphere as a pair of angles.
diff --git a/s2/point.go b/s2/point.go
index <HASH>..<HASH> 100644
--- a/s2/point.go
+++ b/s2/point.go
@@ -3,8 +3,8 @@ package s2
import (
"math"
- "code.google.com/p/gos2/r3"
- "code.google.com/p/gos2/s1"
+ "github.com/timehop/gos2/r3"
+ "github.com/timehop/gos2/s1"
)
// Direction is an indication of the ordering of a set of points
diff --git a/s2/rect.go b/s2/rect.go
index <HASH>..<HASH> 100644
--- a/s2/rect.go
+++ b/s2/rect.go
@@ -4,8 +4,8 @@ import (
"fmt"
"math"
- "code.google.com/p/gos2/r1"
- "code.google.com/p/gos2/s1"
+ "github.com/timehop/gos2/r1"
+ "github.com/timehop/gos2/s1"
)
// Rect represents a closed latitude-longitude rectangle.
diff --git a/s2/rect_test.go b/s2/rect_test.go
index <HASH>..<HASH> 100644
--- a/s2/rect_test.go
+++ b/s2/rect_test.go
@@ -4,8 +4,8 @@ import (
"math"
"testing"
- "code.google.com/p/gos2/r1"
- "code.google.com/p/gos2/s1"
+ "github.com/timehop/gos2/r1"
+ "github.com/timehop/gos2/s1"
)
func TestEmptyAndFullRects(t *testing.T) {
diff --git a/s2/stuv.go b/s2/stuv.go
index <HASH>..<HASH> 100644
--- a/s2/stuv.go
+++ b/s2/stuv.go
@@ -3,7 +3,7 @@ package s2
import (
"math"
- "code.google.com/p/gos2/r3"
+ "github.com/timehop/gos2/r3"
)
const (
|
Change references from original repo to timehop repo
|
timehop_gos2
|
train
|
94cbfdd83245c01b63d5a036f9d315b0b05d462c
|
diff --git a/fluent_comments/moderation.py b/fluent_comments/moderation.py
index <HASH>..<HASH> 100644
--- a/fluent_comments/moderation.py
+++ b/fluent_comments/moderation.py
@@ -1,4 +1,5 @@
import warnings
+from akismet import Akismet
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import smart_str
from fluent_comments import appsettings
@@ -21,14 +22,6 @@ try:
except ImportError:
from urlparse import urljoin # Python 2
-# Optional dependency (for lacking Python 3 support)
-try:
- from akismet import Akismet
-except ImportError:
- Akismet = None
- if appsettings.FLUENT_CONTENTS_USE_AKISMET:
- warnings.warn("No `akismet` package has been installed, disabling Akismet checks for django-fluent-comments.", RuntimeWarning)
-
# Akismet code originally based on django-comments-spamfighter.
@@ -52,7 +45,7 @@ class FluentCommentsModerator(CommentModerator):
close_after = appsettings.FLUENT_COMMENTS_CLOSE_AFTER_DAYS
moderate_after = appsettings.FLUENT_COMMENTS_MODERATE_AFTER_DAYS
email_notification = False # Using signals instead
- akismet_check = appsettings.FLUENT_CONTENTS_USE_AKISMET and Akismet is not None
+ akismet_check = appsettings.FLUENT_CONTENTS_USE_AKISMET
akismet_check_action = appsettings.FLUENT_COMMENTS_AKISMET_ACTION
def allow(self, comment, content_object, request):
|
Akismet is no longer optional, remove soft dependency handling
|
django-fluent_django-fluent-comments
|
train
|
13288282a92bb081184dee5f8be0330c7ab1e9b9
|
diff --git a/src/dependencies/contrib/_rest_framework.py b/src/dependencies/contrib/_rest_framework.py
index <HASH>..<HASH> 100644
--- a/src/dependencies/contrib/_rest_framework.py
+++ b/src/dependencies/contrib/_rest_framework.py
@@ -91,7 +91,8 @@ def apply_generic_api_view_methods(handler, injector):
"lookup_field",
"lookup_url_kwarg",
"filter_backends",
- "filter_class",
+ "filterset_class",
+ "filter_class", # Legacy name for django-filter 1.x
"pagination_class",
]:
if attribute in injector:
diff --git a/tests/helpers/polls/api/filtersets.py b/tests/helpers/polls/api/filtersets.py
index <HASH>..<HASH> 100644
--- a/tests/helpers/polls/api/filtersets.py
+++ b/tests/helpers/polls/api/filtersets.py
@@ -1,7 +1,11 @@
from django.contrib.auth.models import User
+from django_filters import VERSION
from django_filters.rest_framework import FilterSet
+use_filterset_name = VERSION >= (2, 0)
+
+
class UserFilter(FilterSet):
class Meta(object):
diff --git a/tests/helpers/polls/api/views.py b/tests/helpers/polls/api/views.py
index <HASH>..<HASH> 100644
--- a/tests/helpers/polls/api/views.py
+++ b/tests/helpers/polls/api/views.py
@@ -18,7 +18,7 @@ from .commands import (
UserOperations,
UserUpdateOperations,
)
-from .filtersets import UserFilter
+from .filtersets import UserFilter, use_filterset_name
from .metadata import DenyMetadata
from .negotiation import DenyNegotiation
from .serializers import UserSerializer
@@ -112,7 +112,10 @@ class UserListView(Injector):
queryset = User.objects.all()
serializer_class = UserSerializer
filter_backends = (DjangoFilterBackend,)
- filter_class = UserFilter
+ if use_filterset_name:
+ filterset_class = UserFilter
+ else:
+ filter_class = UserFilter
pagination_class = LimitOffsetPagination
|
Support modern filterset_class attribute in the rest framework contrib.
|
dry-python_dependencies
|
train
|
9f3859f648e9840e1725539194714fe55b903e42
|
diff --git a/src/main/org/openscience/cdk/smiles/SmilesParser.java b/src/main/org/openscience/cdk/smiles/SmilesParser.java
index <HASH>..<HASH> 100644
--- a/src/main/org/openscience/cdk/smiles/SmilesParser.java
+++ b/src/main/org/openscience/cdk/smiles/SmilesParser.java
@@ -65,10 +65,19 @@ import java.util.StringTokenizer;
* @cdk.bug 1579244
*/
@TestClass("org.openscience.cdk.smiles.SmilesParserTest")
-public class SmilesParser {
+public final class SmilesParser {
private static ILoggingTool logger = LoggingToolFactory.createLoggingTool(SmilesParser.class);
- protected IChemObjectBuilder builder;
+
+ /**
+ * The builder determines which CDK domain objects to create.
+ */
+ private final IChemObjectBuilder builder;
+
+ /**
+ * Direct converter from Beam to CDK.
+ */
+ private final BeamToCDK beamToCDK;
/*
* Boolean to preserve aromaticity as provided in the Smiles itself (through lowecase letters (c1cccc1) or colons).
@@ -78,12 +87,14 @@ public class SmilesParser {
private boolean preservingAromaticity = false;
/**
- * Constructor for the SmilesParser object.
+ * Create a new SMILES parser which will create {@link IAtomContainer}s with
+ * the specified builder.
*
- * @param builder IChemObjectBuilder used to create the IMolecules from
+ * @param builder used to create the CDK domain objects
*/
- public SmilesParser(IChemObjectBuilder builder) {
- this.builder = builder;
+ public SmilesParser(final IChemObjectBuilder builder) {
+ this.builder = builder;
+ this.beamToCDK = new BeamToCDK(builder);
}
/**
|
Encapsulating parser and documenting fields and constructor.
|
cdk_cdk
|
train
|
4d090f18e8aae007e0c7d7e4525a5e2292b553f5
|
diff --git a/src/toil/provisioners/aws/__init__.py b/src/toil/provisioners/aws/__init__.py
index <HASH>..<HASH> 100644
--- a/src/toil/provisioners/aws/__init__.py
+++ b/src/toil/provisioners/aws/__init__.py
@@ -77,31 +77,33 @@ def choose_spot_zone(zones, bid, spot_history):
:return: the name of the selected zone
>>> from collections import namedtuple
- >>> FauxHistory = namedtuple( 'FauxHistory', [ 'price', 'availability_zone' ] )
- >>> ZoneTuple = namedtuple( 'ZoneTuple', [ 'name' ] )
-
- >>> zones = [ZoneTuple( 'us-west-2a' ), ZoneTuple( 'us-west-2b' )]
- >>> spot_history = [ FauxHistory( 0.1, 'us-west-2a' ), \
- FauxHistory( 0.2,'us-west-2a'), \
- FauxHistory( 0.3,'us-west-2b'), \
- FauxHistory( 0.6,'us-west-2b')]
- >>> # noinspection PyProtectedMember
- >>> choose_spot_zone(zones, 0.15, spot_history )
+ >>> FauxHistory = namedtuple('FauxHistory', ['price', 'availability_zone'])
+ >>> ZoneTuple = namedtuple('ZoneTuple', ['name'])
+ >>> zones = [ZoneTuple('us-west-2a'), ZoneTuple('us-west-2b')]
+ >>> spot_history = [FauxHistory(0.1, 'us-west-2a'), \
+ FauxHistory(0.2, 'us-west-2a'), \
+ FauxHistory(0.3, 'us-west-2b'), \
+ FauxHistory(0.6, 'us-west-2b')]
+ >>> choose_spot_zone(zones, 0.15, spot_history)
'us-west-2a'
- >>> spot_history={ FauxHistory( 0.1, 'us-west-2a' ), \
- FauxHistory( 0.7, 'us-west-2a' ), \
- FauxHistory( 0.1, "us-west-2b" ), \
- FauxHistory( 0.6, 'us-west-2b' ) }
- >>> # noinspection PyProtectedMember
+ >>> spot_history=[FauxHistory(0.3, 'us-west-2a'), \
+ FauxHistory(0.2, 'us-west-2a'), \
+ FauxHistory(0.1, 'us-west-2b'), \
+ FauxHistory(0.6, 'us-west-2b')]
>>> choose_spot_zone(zones, 0.15, spot_history)
'us-west-2b'
- """
- # Create two lists of tuples of form: [ (zone.name, std_deviation), ... ] one for zones
+ >>> spot_history=[FauxHistory(0.1, 'us-west-2a'), \
+ FauxHistory(0.7, 'us-west-2a'), \
+ FauxHistory(0.1, 'us-west-2b'), \
+ FauxHistory(0.6, 'us-west-2b')]
+ >>> choose_spot_zone(zones, 0.15, spot_history)
+ 'us-west-2b'
+ """
+ # Create two lists of tuples of form: [(zone.name, std_deviation), ...] one for zones
# over the bid price and one for zones under bid price. Each are sorted by increasing
# standard deviation values.
- #
markets_under_bid, markets_over_bid = [], []
for zone in zones:
zone_histories = [zone_history for zone_history in spot_history if zone_history.availability_zone == zone.name]
@@ -113,8 +115,7 @@ def choose_spot_zone(zones, bid, spot_history):
zone_tuple = ZoneTuple(name=zone.name, price_deviation=price_deviation)
(markets_over_bid, markets_under_bid)[recent_price < bid].append(zone_tuple)
- return min(markets_under_bid or markets_over_bid,
- key=attrgetter('price_deviation')).name
+ return min(markets_under_bid or markets_over_bid, key=attrgetter('price_deviation')).name
def optimize_spot_bid(ctx, instance_type, spot_bid):
@@ -162,6 +163,7 @@ def _check_spot_bid(spot_bid, spot_history):
logger.warn("Your bid $ %f is more than double this instance type's average "
"spot price ($ %f) over the last week", spot_bid, average)
+
def _get_spot_history(ctx, instance_type):
"""
Returns list of 1,000 most recent spot market data points represented as SpotPriceHistory
@@ -189,6 +191,7 @@ sdbFullPolicy = dict(Version="2012-10-17", Statement=[
iamFullPolicy = dict(Version="2012-10-17", Statement=[
dict(Effect="Allow", Resource="*", Action="iam:*")])
+
def checkValidNodeTypes(provisioner, nodeTypes):
"""
Raises if an invalid nodeType is specified for aws, azure, or gce.
|
Spot bid doctest py2/3 compatibility.
|
DataBiosphere_toil
|
train
|
ba6c0321808b5b8da0b795c7a66ee303ef636c94
|
diff --git a/transport/src/main/java/io/netty/channel/ChannelInboundMessageHandlerAdapter.java b/transport/src/main/java/io/netty/channel/ChannelInboundMessageHandlerAdapter.java
index <HASH>..<HASH> 100644
--- a/transport/src/main/java/io/netty/channel/ChannelInboundMessageHandlerAdapter.java
+++ b/transport/src/main/java/io/netty/channel/ChannelInboundMessageHandlerAdapter.java
@@ -28,6 +28,8 @@ public abstract class ChannelInboundMessageHandlerAdapter<I>
@Override
public final void inboundBufferUpdated(ChannelHandlerContext ctx) throws Exception {
+ firstMessageReceived(ctx);
+
MessageBuf<I> in = ctx.inboundMessageBuffer();
for (;;) {
I msg = in.poll();
@@ -40,7 +42,11 @@ public abstract class ChannelInboundMessageHandlerAdapter<I>
ctx.fireExceptionCaught(t);
}
}
+
+ lastMessageReceived(ctx);
}
+ public void firstMessageReceived(ChannelHandlerContext ctx) throws Exception { }
public abstract void messageReceived(ChannelHandlerContext ctx, I msg) throws Exception;
+ public void lastMessageReceived(ChannelHandlerContext ctx) throws Exception { }
}
|
[#<I>] Provide pre/post handler methods to ChannelInboundMessageHandler.inboundBufferUpdated()
- Add pre- and post- handler methods
|
netty_netty
|
train
|
c1c4d9a315539f5ea4c5f42686b909944647a4cf
|
diff --git a/blockstore/lib/config.py b/blockstore/lib/config.py
index <HASH>..<HASH> 100644
--- a/blockstore/lib/config.py
+++ b/blockstore/lib/config.py
@@ -46,7 +46,7 @@ BLOCKSTORED_CONFIG_FILE = 'blockstore.ini'
try:
BLOCKSTORED_SERVER = os.environ['BLOCKSTORED_SERVER']
- BLOCKSTORED_PORT = os.environ['BLOCKSTORED_PORT']
+ BLOCKSTORED_PORT = int(os.environ['BLOCKSTORED_PORT'])
except KeyError:
BLOCKSTORED_SERVER = 'localhost'
BLOCKSTORED_PORT = DEFAULT_BLOCKSTORED_PORT
|
fixed bug where port read from ENV should be int
|
blockstack_blockstack-core
|
train
|
2a2c1d596019020809c196352c1cd7bd08af5802
|
diff --git a/src/com/google/javascript/jscomp/CheckAccessControls.java b/src/com/google/javascript/jscomp/CheckAccessControls.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/CheckAccessControls.java
+++ b/src/com/google/javascript/jscomp/CheckAccessControls.java
@@ -30,6 +30,7 @@ import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import com.google.javascript.rhino.jstype.ObjectType;
+import com.google.javascript.rhino.jstype.Property;
import java.util.ArrayDeque;
@@ -670,9 +671,9 @@ class CheckAccessControls implements ScopedCallback, HotSwapCompilerPass {
}
if (objectType != null) {
- JSDocInfo docInfo = objectType.getOwnPropertyJSDocInfo(propertyName);
- definingSource = docInfo.getStaticSourceFile();
- isClassType = docInfo.isConstructor();
+ Property p = objectType.getOwnSlot(propertyName);
+ definingSource = p.getNode().getStaticSourceFile();
+ isClassType = p.getJSDocInfo().isConstructor();
} else if (isPrivateByConvention) {
// We can only check visibility references if we know what file
// it was defined in.
|
When looking for the source file associated with a property definition use the
node rather than the node associated with the JSDocInfo object.
This paves the way for removing the "associated node" in the JSDocInfo object.
-------------
Created by MOE: <URL>
|
google_closure-compiler
|
train
|
0fd2e9968671f7f23b946f68d047cd087d751183
|
diff --git a/openquake/commands/info.py b/openquake/commands/info.py
index <HASH>..<HASH> 100644
--- a/openquake/commands/info.py
+++ b/openquake/commands/info.py
@@ -71,8 +71,8 @@ def print_csm_info(fname):
oqparam = readinput.get_oqparam(fname)
csm = readinput.get_composite_source_model(oqparam, in_memory=False)
print(csm.info)
- print('See https://github.com/gem/oq-risklib/blob/master/doc/'
- 'effective-realizations.rst for an explanation')
+ print('See http://docs.openquake.org/oq-engine/stable/'
+ 'effective-realizations.html for an explanation')
rlzs_assoc = csm.info.get_rlzs_assoc()
print(rlzs_assoc)
tot, pairs = get_pickled_sizes(rlzs_assoc)
diff --git a/openquake/commands/tests/commands_test.py b/openquake/commands/tests/commands_test.py
index <HASH>..<HASH> 100644
--- a/openquake/commands/tests/commands_test.py
+++ b/openquake/commands/tests/commands_test.py
@@ -67,7 +67,7 @@ class Print(object):
class InfoTestCase(unittest.TestCase):
EXPECTED = '''<CompositionInfo
b1, x15.xml, grp=[0], weight=1.00: 1 realization(s)>
-See https://github.com/gem/oq-risklib/blob/master/doc/effective-realizations.rst for an explanation
+See http://docs.openquake.org/oq-engine/stable/effective-realizations.html for an explanation
<RlzsAssoc(size=1, rlzs=1)
0,AkkarBommer2010(): ['<0,b1~@_AkkarBommer2010_@_@_@_@_@,w=1.0>']>
=============== ======
|
Fix link pointing to effective-realizations docu
|
gem_oq-engine
|
train
|
55bc3bfdb9f5057d99b4194dc981e7ccd7602ba9
|
diff --git a/doc/config.rst b/doc/config.rst
index <HASH>..<HASH> 100644
--- a/doc/config.rst
+++ b/doc/config.rst
@@ -115,8 +115,10 @@ Glossary of Configuration Values
"service key". It is not always consistent in :mod:`fedmsg.core`.
relay_inbound
- ``str`` - A string set to a special zeromq endpoint where the inbound,
- passive zmq SUB socket for ``fedmsg-relay`` is listening.
+ ``str`` - A list of special zeromq endpoints where the inbound,
+ passive zmq SUB sockets for for instances of ``fedmsg-relay`` are
+ listening.
+
Commands like ``fedmsg-logger`` actively connect here and publish their
messages.
diff --git a/extras/mediawiki/fedmsg-emit.php b/extras/mediawiki/fedmsg-emit.php
index <HASH>..<HASH> 100644
--- a/extras/mediawiki/fedmsg-emit.php
+++ b/extras/mediawiki/fedmsg-emit.php
@@ -77,7 +77,15 @@ function initialize() {
$context = new ZMQContext(1, true);
$queue = $context->getSocket(ZMQ::SOCKET_PUB, "pub-a-dub-dub");
- $queue->connect($config['relay_inbound']);
+ if (is_array($config['relay_inbound'])) {
+ // API for fedmsg >= 0.5.2
+ // TODO - be more robust here and if connecting to the first one fails, try
+ // the next, and the next, and etc...
+ $queue->connect($config['relay_inbound'][0]);
+ } else {
+ // API for fedmsg <= 0.5.1
+ $queue->connect($config['relay_inbound']);
+ }
return true;
}
diff --git a/fedmsg.d/base.py b/fedmsg.d/base.py
index <HASH>..<HASH> 100644
--- a/fedmsg.d/base.py
+++ b/fedmsg.d/base.py
@@ -36,4 +36,10 @@ config = dict(
# hub to not be strict when comparing messages topics to subscription
# topics.
zmq_strict=False,
+
+ # TODO - consider removing the whole concept of post_init_sleep in the
+ # future.
+ # This used to be set to 1 for safety, but it turns out it was
+ # unnecessary.
+ post_init_sleep=0,
)
diff --git a/fedmsg.d/endpoints.py b/fedmsg.d/endpoints.py
index <HASH>..<HASH> 100644
--- a/fedmsg.d/endpoints.py
+++ b/fedmsg.d/endpoints.py
@@ -34,7 +34,9 @@ config = dict(
# This is the output side of the relay to which all other
# services can listen.
- "relay_outbound": ["tcp://*:4001"],
+ "relay_outbound": [
+ "tcp://*:4001",
+ ],
# For other, more 'normal' services, fedmsg will try to guess the
# name of it's calling module to determine which endpoint definition
@@ -54,5 +56,7 @@ config = dict(
# It is also used by the git-hook, for the same reason.
# It is also used by the mediawiki php plugin which, due to the oddities of
# php, can't maintain a single passive-bind endpoint of it's own.
- relay_inbound="tcp://127.0.0.1:2003",
+ relay_inbound=[
+ "tcp://127.0.0.1:2003",
+ ],
)
diff --git a/fedmsg/commands/relay.py b/fedmsg/commands/relay.py
index <HASH>..<HASH> 100644
--- a/fedmsg/commands/relay.py
+++ b/fedmsg/commands/relay.py
@@ -21,6 +21,7 @@
"""
import fedmsg
+from fedmsg.core import _listify
from fedmsg.commands import command
from fedmsg.consumers.relay import RelayConsumer
@@ -34,8 +35,8 @@ def relay(**kw):
``fedmsg-relay`` is a service which binds to two ports, listens for
messages on one and emits them on the other. ``fedmsg-logger``
requires that an instance of ``fedmsg-relay`` be running *somewhere*
- and that it's inbound address be listed in the config as
- :term:`relay_inbound`.
+ and that it's inbound address be listed in the config as one of the entries
+ in :term:`relay_inbound`.
``fedmsg-relay`` becomes a necessity for integration points that cannot
bind consistently to and serve from a port. See :doc:`topology` for the
@@ -43,11 +44,16 @@ def relay(**kw):
SUB.bind()->PUB.bind() relay.
"""
+ # TODO - Works is needed here in order for multiple failover fedmsg-relays
+ # to work. If three endpoints are declared for relay_outbound, which one
+ # should *this* relay use? Same goes for relay_inbound.
+ # The config format underspecifies.
+
# Do just like in fedmsg.commands.hub and mangle fedmsg-config.py to work
# with moksha's expected configuration.
moksha_options = dict(
zmq_publish_endpoints=",".join(kw['endpoints']["relay_outbound"]),
- zmq_subscribe_endpoints=kw['relay_inbound'],
+ zmq_subscribe_endpoints=",".join(_listify(kw['relay_inbound'])),
zmq_subscribe_method="bind",
)
kw.update(moksha_options)
diff --git a/fedmsg/tests/fedmsg-test-config.py b/fedmsg/tests/fedmsg-test-config.py
index <HASH>..<HASH> 100644
--- a/fedmsg/tests/fedmsg-test-config.py
+++ b/fedmsg/tests/fedmsg-test-config.py
@@ -58,7 +58,7 @@ config = dict(
"tcp://www.flugle.horn:88",
],
},
- relay_inbound="tcp://127.0.0.1:%i" % (port - 1),
+ relay_inbound=["tcp://127.0.0.1:%i" % (port - 1)],
environment="dev",
high_water_mark=0,
io_threads=1,
|
Laid some backwards-compatible groundwork for fedmsg-relay failover.
- Identified a problem for future work.
- Relates to #<I>.
|
fedora-infra_fedmsg
|
train
|
699589872edfc737254086b0282858b38965c5c2
|
diff --git a/qiime_tools/util.py b/qiime_tools/util.py
index <HASH>..<HASH> 100644
--- a/qiime_tools/util.py
+++ b/qiime_tools/util.py
@@ -131,6 +131,12 @@ def parse_taxonomy_table(idtaxFNH):
def split_phylogeny(p, level='s'):
+ """
+ Return the phylogenetic taxonomy of bacteria in string 'p' to level specified.
+ The different level of identification are kingdom (k), phylum (p), class (c),
+ order (o), family (f), genus (g) and species (s). If level is not provided,
+ the default level of identification is species.
+ """
level = level+'__'
result = p.split(level)
return result[0]+level+result[1].split(';')[0]
|
Added docstring for split_phylogeny()
|
smdabdoub_phylotoast
|
train
|
02e5fd2abaa4878b084af3f12a9ecdf0a1b5de88
|
diff --git a/toc.js b/toc.js
index <HASH>..<HASH> 100644
--- a/toc.js
+++ b/toc.js
@@ -5,14 +5,41 @@ var through = require('through2');
var toc = require('markdown-toc');
module.exports = function(verb) {
+ verb.postLayout(/\.md/, function(file, next) {
+ var opts = extend({}, verb.options, file.options);
+ if (opts.toc !== true) {
+ next();
+ return;
+ }
+ file.toc = toc(file.content, opts);
+ next();
+ });
+
+ verb.preWrite(/\.md/, function(file, next) {
+ var opts = extend({}, verb.options, file.options);
+ var str = file.contents.toString();
+
+ var tocString = (file.toc && file.toc.content) || '';
+ if (tocString === '' || opts.toc !== true) {
+ str = str.replace(/^#+ TOC/gm, '');
+ tocString = '';
+ } else {
+ tocString += '\n\n_(TOC generated by [verb](https://github.com/verbose/verb))_';
+ }
+
+ str = str.split('<!-- toc -->').join(tocString);
+ str = str.replace(/\n{2,}/g, '\n\n');
+ file.contents = new Buffer(str);
+ next();
+ });
+
verb.plugin('toc.create', function(options) {
return through.obj(function(file, enc, next) {
var opts = extend({}, verb.options, options, file.options);
- if (opts.toc === false) {
+ if (opts.toc !== true) {
return next(null, file);
}
- var str = file.contents.toString();
- file.toc = toc(str, opts);
+ file.toc = toc(file.content, opts);
next(null, file);
});
});
@@ -23,13 +50,13 @@ module.exports = function(verb) {
var str = file.contents.toString();
var tocString = (file.toc && file.toc.content) || '';
- if (tocString === '' || opts.toc === false) {
+ if (tocString === '' || opts.toc !== true) {
str = str.replace(/^#+ TOC/gm, '');
tocString = '';
+ } else {
+ tocString += '\n\n_(TOC generated by [verb](https://github.com/verbose/verb))_';
}
- tocString += '\n\n_(Table of contents generated by [verb](https://github.com/verbose/verb))_';
-
str = str.split('<!-- toc -->').join(tocString);
str = str.replace(/\n{2,}/g, '\n\n');
file.contents = new Buffer(str);
|
adds toc middleware. this is temporary, we already have libs for this
we'll update those libs with this code
|
verbose_verb-generate-readme
|
train
|
fba0e6af7f3f0adfae575a331933bb2221c709af
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -34,14 +34,18 @@ module.exports = function (Pouch) {
return name.replace(/^_pouch_/, ''); // TODO: remove when fixed in Pouch
}
+ function canIgnore(dbName) {
+ return (dbName === ALL_DBS_NAME) ||
+ // TODO: get rid of this when we have a real 'onDependentDbRegistered'
+ // event (pouchdb/pouchdb#2438)
+ (dbName.indexOf('-mrview-') !== -1) ||
+ // TODO: might be a better way to detect remote DBs
+ (/^https?:\/\//.test(dbName));
+ }
+
Pouch.on('created', function (dbName) {
dbName = normalize(dbName);
-
- if (dbName === ALL_DBS_NAME) {
- return;
- } else if (dbName.indexOf('-mrview-') !== -1) {
- // HACK: get rid of this when we have a real 'onDependentDbRegistered'
- // event (pouchdb/pouchdb#2438)
+ if (canIgnore(dbName)) {
return;
}
init();
@@ -61,7 +65,7 @@ module.exports = function (Pouch) {
Pouch.on('destroyed', function (dbName) {
dbName = normalize(dbName);
- if (dbName === ALL_DBS_NAME) {
+ if (canIgnore(dbName)) {
return;
}
init();
|
(#4) - filter http databases
|
pouchdb-community_pouchdb-all-dbs
|
train
|
134f250d657c4d13f6d3143d70c76815e7be0db9
|
diff --git a/russound/russound.py b/russound/russound.py
index <HASH>..<HASH> 100755
--- a/russound/russound.py
+++ b/russound/russound.py
@@ -69,6 +69,7 @@ class Russound:
send_msg = self.create_send_message("F0 @cc 00 7F 00 00 @kk 05 02 02 00 00 F1 23 00 @pr 00 @zz 00 01",
controller, zone, power)
+ _LOGGER.debug("Sending message %s", send_msg)
self.send_data(send_msg)
self.receive_data() # Clear response buffer
_LOGGER.info("Russound on controller %s and zone %s power set to %s.", controller, zone, power)
@@ -82,6 +83,7 @@ class Russound:
_LOGGER.info("Russound volume on controller %s and zone %s set to level %s.", controller, zone, volume)
send_msg = self.create_send_message("F0 @cc 00 7F 00 00 @kk 05 02 02 00 00 F1 21 00 @pr 00 @zz 00 01",
controller, zone, volume//2)
+ _LOGGER.debug("Sending message %s", send_msg)
self.send_data(send_msg)
self.receive_data() # Clear response buffer
@@ -90,6 +92,7 @@ class Russound:
send_msg = self.create_send_message("F0 @cc 00 7F 00 @zz @kk 05 02 00 00 00 F1 3E 00 00 00 @pr 00 01",
controller, zone, source)
+ _LOGGER.debug("Sending message %s", send_msg)
self.send_data(send_msg)
self.receive_data() # Clear response buffer in case there is any response data (ensures correct results on future reads)
@@ -125,7 +128,7 @@ class Russound:
"F0 00 00 70 00 00 7F 00 00 04 02 00 @zz 06 00 00 01 00 01 00 FF FF F7", zone)
send_msg = self.create_send_message("F0 @cc 00 7F 00 00 @kk 01 04 02 00 @zz 06 00 00", controller, zone)
-
+ _LOGGER.debug("Sending message %s", send_msg)
self.send_data(send_msg)
response_stream = self.receive_data() # Expected response is as per pg 23 of cav6.6_rnet_protocol_v1.01.00.pdf
matching_message = self.find_matching_message(response_stream, resp_msg_signature)
@@ -150,6 +153,7 @@ class Russound:
"F0 00 00 70 00 00 7F 00 00 04 02 00 @zz 01 00 00 01 00 01 00 FF FF F7", zone)
send_msg = self.create_send_message("F0 @cc 00 7F 00 00 @kk 01 04 02 00 @zz 01 00 00", controller, zone)
+ _LOGGER.debug("Sending message %s", send_msg)
self.send_data(send_msg)
response_stream = self.receive_data()
matching_message = self.find_matching_message(response_stream, resp_msg_signature)
@@ -172,6 +176,7 @@ class Russound:
"F0 00 00 70 00 00 7F 00 00 04 02 00 @zz 02 00 00 01 00 01 00 FF FF F7", zone)
send_msg = self.create_send_message("F0 @cc 00 7F 00 00 @kk 01 04 02 00 @zz 02 00 00", controller, zone)
data = self.calc_checksum(send_msg)
+ _LOGGER.debug("Sending message %s", send_msg)
self.send_data(data)
response_stream = self.receive_data()
matching_message = self.find_matching_message(response_stream, resp_msg_signature)
|
Added some more debug logging.
|
laf_russound
|
train
|
e75c59c1f3f021a116a0f3015e70e43b0be76240
|
diff --git a/pyecore/ecore.py b/pyecore/ecore.py
index <HASH>..<HASH> 100644
--- a/pyecore/ecore.py
+++ b/pyecore/ecore.py
@@ -101,7 +101,7 @@ class Core(object):
elif inspect.isfunction(feature):
if k == '__init__':
continue
- argspect = inspect.getargspec(feature)
+ argspect = inspect.getfullargspec(feature)
args = argspect.args
if len(args) < 1 or args[0] != 'self':
continue
|
Change 'getargspec' by 'getfullargspec'
It seems that 'getargspec' is deprecated since Python <I>, it's
replacement is 'getfullargspec'.
|
pyecore_pyecore
|
train
|
9d59078489dc834d2afe4242bef30c88b739f14d
|
diff --git a/immutables/__init__.py b/immutables/__init__.py
index <HASH>..<HASH> 100644
--- a/immutables/__init__.py
+++ b/immutables/__init__.py
@@ -2,3 +2,4 @@ from ._map import Map
__all__ = 'Map',
+__version__ = '0.1'
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,15 +1,26 @@
+import os.path
import platform
import setuptools
-VERSION = '0.1'
-
CFLAGS = ['-O2']
if platform.uname().system != 'Windows':
CFLAGS.extend(['-std=c99', '-fsigned-char', '-Wall',
'-Wsign-compare', '-Wconversion'])
+with open(os.path.join(
+ os.path.dirname(__file__), 'immutables', '__init__.py')) as f:
+ for line in f:
+ if line.startswith('__version__ ='):
+ _, _, version = line.partition('=')
+ VERSION = version.strip(" \n'\"")
+ break
+ else:
+ raise RuntimeError(
+ 'unable to read the version from immutables/__init__.py')
+
+
setuptools.setup(
name='immutables',
version=VERSION,
|
Read version from __init__
|
MagicStack_immutables
|
train
|
811d763e161458bb893920e5903c3646ede60943
|
diff --git a/packages/@uppy/core/src/Plugin.js b/packages/@uppy/core/src/Plugin.js
index <HASH>..<HASH> 100644
--- a/packages/@uppy/core/src/Plugin.js
+++ b/packages/@uppy/core/src/Plugin.js
@@ -158,9 +158,20 @@ module.exports = class Plugin {
}
this.uppy.log(`Not installing ${callerPluginName}`)
- throw new Error(`Invalid target option given to ${callerPluginName}. Please make sure that the element
- exists on the page, or that the plugin you are targeting has been installed. Check that the <script> tag initializing Uppy
- comes at the bottom of the page, before the closing </body> tag (see https://github.com/transloadit/uppy/issues/1042).`)
+
+ let message = `Invalid target option given to ${callerPluginName}.`
+ if (typeof target === 'function') {
+ message += ' The given target is not a Plugin class. ' +
+ 'Please check that you\'re not specifying a React Component instead of a plugin. ' +
+ 'If you are using @uppy/* packages directly, make sure you have only 1 version of @uppy/core installed: ' +
+ 'run `npm ls @uppy/core` on the command line and verify that all the versions match and are deduped correctly.'
+ } else {
+ message += 'If you meant to target an HTML element, please make sure that the element exists. ' +
+ 'Check that the <script> tag initializing Uppy is right before the closing </body> tag at the end of the page. ' +
+ '(see https://github.com/transloadit/uppy/issues/1042)\n\n' +
+ 'If you meant to target a plugin, please confirm that your `import` statements or `require` calls are correct.'
+ }
+ throw new Error(message)
}
render (state) {
|
core: add some more suggestions to the incorrect `target` warning (#<I>)
|
transloadit_uppy
|
train
|
9d3580b23ac6e5a55013339c3b13cde5c6a6072b
|
diff --git a/acorn.js b/acorn.js
index <HASH>..<HASH> 100644
--- a/acorn.js
+++ b/acorn.js
@@ -2482,8 +2482,8 @@
node.source = null;
semicolon();
} else {
- // export * from '...'
- // export { x, y as z } [from '...']
+ // export * from '...';
+ // export { x, y as z } [from '...'];
var isBatch = tokType === _star;
node.declaration = null;
node['default'] = false;
@@ -2495,6 +2495,7 @@
if (isBatch) unexpected();
node.source = null;
}
+ semicolon();
}
return finishNode(node, "ExportDeclaration");
}
@@ -2549,6 +2550,7 @@
// (it doesn't support mixed default + named yet)
node.kind = node.specifiers[0]['default'] ? "default" : "named";
}
+ semicolon();
return finishNode(node, "ImportDeclaration");
}
|
make ImportDeclaration and ExportDeclaration semicolons more spec-compliant
|
babel_babylon
|
train
|
b829f50895cfc2e35bc37a9996c01bfddf3ce968
|
diff --git a/drivers/router/vue-router.2.x.js b/drivers/router/vue-router.2.x.js
index <HASH>..<HASH> 100644
--- a/drivers/router/vue-router.2.x.js
+++ b/drivers/router/vue-router.2.x.js
@@ -1,5 +1,11 @@
module.exports = {
+ _init: function () {
+ if ( ! this.options.Vue.router) {
+ return 'vue-router.2.x.js : Vue.router must be set.';
+ }
+ },
+
_bindData: function (data, ctx) {
var error, success;
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -14,7 +14,7 @@
"main": "src/index.js",
- "version": "2.2.1-beta",
+ "version": "2.3.0-beta",
"repository": {
"type": "git",
@@ -24,7 +24,7 @@
"licenses": "MIT, GPL",
"dependencies": {
- "@websanova/vue-auth": "2.2.1-beta"
+ "@websanova/vue-auth": "2.3.0-beta"
},
"devDependencies": {
diff --git a/src/auth.js b/src/auth.js
index <HASH>..<HASH> 100644
--- a/src/auth.js
+++ b/src/auth.js
@@ -403,6 +403,8 @@ module.exports = function () {
};
function Auth(Vue, options) {
+ var i, ii, msg, drivers = ['auth', 'http', 'router'];
+
this.currentToken = null;
this.options = __utils.extend(defaultOptions, [options || {}]);
@@ -418,6 +420,22 @@ module.exports = function () {
}
});
+ for (i = 0, ii = drivers.length; i < ii; i++) {
+ if ( ! this.options[drivers[i]]) {
+ console.error('Error (@websanova/vue-auth): "' + drivers[i] + '" driver must be set.');
+ return;
+ }
+
+ if (this.options[drivers[i]]._init) {
+ msg = this.options[drivers[i]]._init.call(this);
+
+ if (msg) {
+ console.error('Error (@websanova/vue-auth): ' + msg);
+ return;
+ }
+ }
+ }
+
// Init interceptors.
this.options.router._beforeEach.call(this, this.options.routerBeforeEach, this.options.transitionEach);
this.options.http._interceptor.call(this, this.options.requestIntercept, this.options.responseIntercept);
|
Add some error detection (and output) for drivers.
|
websanova_vue-auth
|
train
|
183a0cb9af892666698c51f656a30daa3bfd50ca
|
diff --git a/tests/test_xopen.py b/tests/test_xopen.py
index <HASH>..<HASH> 100644
--- a/tests/test_xopen.py
+++ b/tests/test_xopen.py
@@ -542,6 +542,14 @@ def test_xopen_falls_back_to_gzip_open(lacking_pigz_permissions):
assert f.readline() == CONTENT_LINES[0].encode("utf-8")
+def test_xopen_falls_back_to_gzip_open_no_isal(lacking_pigz_permissions,
+ monkeypatch):
+ import xopen # xopen local overrides xopen global variable
+ monkeypatch.setattr(xopen, "igzip", None)
+ with xopen.xopen("tests/file.txt.gz", "rb") as f:
+ assert f.readline() == CONTENT_LINES[0].encode("utf-8")
+
+
def test_open_many_gzip_writers(tmp_path):
files = []
for i in range(1, 61):
|
Add test for lacking both isal and pigz
|
marcelm_xopen
|
train
|
5e39632e9144071afd14bf93da513e45088d5621
|
diff --git a/lib/did_you_mean/experimental.rb b/lib/did_you_mean/experimental.rb
index <HASH>..<HASH> 100644
--- a/lib/did_you_mean/experimental.rb
+++ b/lib/did_you_mean/experimental.rb
@@ -1,3 +1,2 @@
-require 'did_you_mean'
require 'did_you_mean/experimental/initializer_name_correction'
require 'did_you_mean/experimental/ivar_name_correction'
diff --git a/lib/did_you_mean/experimental/ivar_name_correction.rb b/lib/did_you_mean/experimental/ivar_name_correction.rb
index <HASH>..<HASH> 100644
--- a/lib/did_you_mean/experimental/ivar_name_correction.rb
+++ b/lib/did_you_mean/experimental/ivar_name_correction.rb
@@ -1,6 +1,22 @@
+# frozen-string-literal: true
+
+require 'did_you_mean'
+
module DidYouMean
- module Experimental
- module IvarNameCorrection
+ module Experimental #:nodoc:
+ class IvarNameCheckerBuilder #:nodoc:
+ attr_reader :original_checker
+
+ def initialize(original_checker) #:nodoc:
+ @original_checker = original_checker
+ end
+
+ def new(no_method_error) #:nodoc:
+ IvarNameChecker.new(no_method_error, original_checker: @original_checker)
+ end
+ end
+
+ class IvarNameChecker #:nodoc:
REPLS = {
"(irb)" => -> { Readline::HISTORY.to_a.last }
}
@@ -13,17 +29,17 @@ module DidYouMean
end
end
- NameError.send(:attr, :frame_binding)
+ attr_reader :original_checker
- def initialize(no_method_error)
- super
+ def initialize(no_method_error, original_checker: )
+ @original_checker = original_checker.new(no_method_error)
@location = no_method_error.backtrace_locations.first
@ivar_names = no_method_error.frame_binding.receiver.instance_variables
end
def corrections
- super + ivar_name_corrections
+ original_checker.corrections + ivar_name_corrections
end
def ivar_name_corrections
@@ -33,12 +49,12 @@ module DidYouMean
private
def receiver_name
- return unless receiver.nil?
+ return unless @original_checker.receiver.nil?
abs_path = @location.absolute_path
lineno = @location.lineno
- /@(\w+)*\.#{method_name}/ =~ line(abs_path, lineno).to_s && $1
+ /@(\w+)*\.#{@original_checker.method_name}/ =~ line(abs_path, lineno).to_s && $1
end
def line(abs_path, lineno)
@@ -51,7 +67,8 @@ module DidYouMean
end
end
end
-
- SPELL_CHECKERS['NoMethodError'].prepend(IvarNameCorrection)
end
+
+ NameError.send(:attr, :frame_binding)
+ SPELL_CHECKERS['NoMethodError'] = Experimental::IvarNameCheckerBuilder.new(SPELL_CHECKERS['NoMethodError'])
end
|
Refactor experimental/ivar_name_correction
|
yuki24_did_you_mean
|
train
|
e7c34ecff698509c7547c919149860f857acdd25
|
diff --git a/lib/swf.rb b/lib/swf.rb
index <HASH>..<HASH> 100644
--- a/lib/swf.rb
+++ b/lib/swf.rb
@@ -25,10 +25,26 @@ module SWF
@domain_name = d
end
+ SLOT_TIME = 1
+
+ def domains
+ collision = 0
+ begin
+ swf.domains
+ rescue => e
+ collision += 1 if collision < 10
+ puts "Collision #{collision}"
+ max_slot_delay = 2**collision - 1
+ sleep(SLOT_TIME * rand(0 .. max_slot_delay))
+ retry
+ end
+ end
+
def domain
# if we need a new domain, make it in the aws console
- raise UnknownSWFDomain, "#{domain_name} is not a valid SWF domain" unless swf.domains[domain_name].exists?
- swf.domains[domain_name]
+
+ raise UnknownSWFDomain, "#{domain_name} is not a valid SWF domain" unless domains[domain_name].exists?
+ domains[domain_name]
end
def task_list= tl
|
exponential backoff to querying for swf domains
|
change_aws-swf
|
train
|
edbed47dd7ae5e1731542d6b0aac523da53985ef
|
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -1,5 +1,5 @@
#!/usr/bin/env node
-var irc = require(__dirname + '/lib/ircnode.js'),
+var irc = require('ircnode'),
net = require('net'),
assert = require('assert');
@@ -31,8 +31,8 @@ var splitTests = {
for (var u in splitTests) {
var actual = irc.splitcmd(u);
var expected = splitTests[u];
+
assert.deepEqual(actual, expected);
}
process.exit(0);
-
|
minor adjustments to the test as suggested by totallymike
|
totallymike_ircnode
|
train
|
29657d72d51d4a42f4c63a76f2e796cf340037a4
|
diff --git a/lib/ChannelRegistry.js b/lib/ChannelRegistry.js
index <HASH>..<HASH> 100644
--- a/lib/ChannelRegistry.js
+++ b/lib/ChannelRegistry.js
@@ -91,7 +91,7 @@ ChannelRegistry.prototype.generateClientId = function() {
*
* @return {string} The ID of the client.
*
- * @see Player
+ * @see node.Player
*/
ChannelRegistry.prototype.addClient = function(playerObj) {
var clientId, errStr, res;
diff --git a/lib/GameServer.js b/lib/GameServer.js
index <HASH>..<HASH> 100644
--- a/lib/GameServer.js
+++ b/lib/GameServer.js
@@ -206,6 +206,47 @@ GameServer.prototype.attachListeners = function() {
GameServer.prototype.attachCustomListeners = function() {};
+
+/**
+ * ### GameServer.onConnect
+ *
+ * Send a HI msg to the client, and log its arrival
+ *
+ * @param {string} client The id of the freshly connected client
+ */
+GameServer.prototype.onConnect = function(connectionId, socketObj) {
+ var res, clientObj, clientId;
+
+ clientObj = {
+ sid: connectionId
+ };
+
+ // this.decorateClient(handshakeData);
+
+ //if (this.authorizeClient(clientObj)) {
+ // this.sysLogger.log('Unauthorized connection ' + connectionId);
+ // return;
+ //}
+
+ // TODO: pass only the connectionID ?
+ clientId = this.registry.addClient(clientObj);
+ if (!clientId) {
+ return false;
+ }
+
+ // TODO: we need to match the connectionId with the FROM / TO msg
+ // ? registerAlias
+
+ //this.socket.registerClient(clientId, socketObj);
+ //this.welcomeClient(clientId);
+ // was with connectionId
+ this.socket.registerClient(connectionId, socketObj);
+ this.welcomeClient(connectionId);
+
+ return true;
+};
+
+
/**
* ### GameServer.welcomeClient
*
diff --git a/lib/ServerChannel.js b/lib/ServerChannel.js
index <HASH>..<HASH> 100644
--- a/lib/ServerChannel.js
+++ b/lib/ServerChannel.js
@@ -73,20 +73,18 @@ ServerChannel.prototype.createServers = function() {
var adminOptions, playerOptions;
adminOptions = {
- channel: this,
- name: '[ADMIN_SERVER]',
+ channel: this,
+ name: '[ADMIN_SERVER]',
endpoint: this.options.admin,
- user_options: this.options
};
this.admin = new AdminServer(adminOptions);
playerOptions = {
- channel: this,
- name: '[PLAYER_SERVER]',
+ channel: this,
+ name: '[PLAYER_SERVER]',
endpoint: this.options.player,
- user_options: this.options
};
this.player = new PlayerServer(playerOptions);
diff --git a/lib/sockets/SocketDirect.js b/lib/sockets/SocketDirect.js
index <HASH>..<HASH> 100644
--- a/lib/sockets/SocketDirect.js
+++ b/lib/sockets/SocketDirect.js
@@ -35,19 +35,21 @@ function SocketDirect(game_server) {
SocketDirect.prototype.generateID = function() {
return J.uniqueKey(this.clients);
- //return '' + Math.random() * 1000000000000000000;
};
//## METHODS
-SocketDirect.prototype.connect = function(client) {
-
- var id = this.generateID();
+SocketDirect.prototype.connect = function(client) {
+ var id, res;
+ debugger;
+ id = this.generateID();
+ // add it temporarily
this.clients[id] = client;
-
- this.game_server.socket.registerClient(id, this);
- this.game_server.welcomeClient(id);
+ res = this.game_server.onConnect(id, this);
+ if (!res) {
+ delete this.clients[id];
+ }
};
SocketDirect.prototype.message = function(msg) {
diff --git a/lib/sockets/SocketIo.js b/lib/sockets/SocketIo.js
index <HASH>..<HASH> 100644
--- a/lib/sockets/SocketIo.js
+++ b/lib/sockets/SocketIo.js
@@ -40,20 +40,19 @@ SocketIo.prototype.attachListeners = function() {
this.channel = this.sio.of(this.game_server.endpoint).on('connection',
function(socket) {
- // Register the socket as a class variable
-
- that.game_server.socket.registerClient(socket.id, that);
-
- // Send Welcome Msg and notify others
- that.game_server.welcomeClient(socket.id);
-
- socket.on('message', function(msg) {
- that.game_server.onMessage(msg);
+ var res;
+
+ res = that.game_server.onConnect(socket.id, that);
+
+ if (res) {
+ socket.on('message', function(msg) {
+ that.game_server.onMessage(msg);
});
-
+
socket.on('disconnect', function() {
- that.game_server.onDisconnect(socket.id, socket);
+ that.game_server.onDisconnect(socket.id, socket);
});
+ }
});
this.sio.sockets.on("shutdown", that.game_server.onShutdown);
|
Connection broken. Need to map socketId with client id / aliases
|
nodeGame_nodegame-server
|
train
|
82953cdbb5fc297feb9a447c71b4fc8b6dda29e4
|
diff --git a/numina/array/wavecalib/__main__.py b/numina/array/wavecalib/__main__.py
index <HASH>..<HASH> 100644
--- a/numina/array/wavecalib/__main__.py
+++ b/numina/array/wavecalib/__main__.py
@@ -115,7 +115,8 @@ def wvcal_spectrum(filename, ns1, ns2,
# fit and subtract background
if nwin_background > 0:
- background = ndimage.filters.median_filter(sp_mean, size=81)
+ background = ndimage.filters.median_filter(sp_mean,
+ size=nwin_background)
sp_mean -= background
# save spectrum before wavelength calibration in external
|
Fix error when computing background.
The size of the median filter was set to <I> instead to nwin_background.
|
guaix-ucm_numina
|
train
|
9f09bcde3b0782fabb3af93594be12565fab6587
|
diff --git a/src/serialize/GameObject.js b/src/serialize/GameObject.js
index <HASH>..<HASH> 100644
--- a/src/serialize/GameObject.js
+++ b/src/serialize/GameObject.js
@@ -125,6 +125,14 @@ class GameObject extends Serializable {
get bendingVelocityMultiple() { return null; }
/**
+ * The angle bending multiple is a getter, which returns the
+ * amount of angle bending.
+ * @memberof GameObject
+ * @member {Number} bendingAngleMultiple
+ */
+ get bendingAngleMultiple() { return null; }
+
+ /**
* synchronize this object to the state of an other object
* @param {GameObject} other the other object to synchronize to
*/
diff --git a/src/serialize/PhysicalObject2D.js b/src/serialize/PhysicalObject2D.js
index <HASH>..<HASH> 100644
--- a/src/serialize/PhysicalObject2D.js
+++ b/src/serialize/PhysicalObject2D.js
@@ -124,10 +124,16 @@ class PhysicalObject2D extends GameObject {
// angle bending factor
let angleBending = bending;
+ let velocityBending = bending;
+ let angularVelocityBending = bending;
if (typeof this.bendingAngleMultiple === 'number')
angleBending = this.bendingAngleMultiple;
if (isLocal && (typeof this.bendingAngleLocalMultiple === 'number'))
angleBending = this.bendingAngleLocalMultiple;
+ if (typeof this.bendingVelocityMultiple === 'number')
+ velocityBending = this.bendingVelocityMultiple;
+ if (typeof this.bendingAngularVelocityMultiple === 'number')
+ angulerVelocityBending = this.bendingAngularVelocityMultiple;
// get the incremental delta position
this.incrementScale = bending / bendingIncrements;
@@ -136,10 +142,10 @@ class PhysicalObject2D extends GameObject {
this.bendingPositionDelta.multiplyScalar(this.incrementScale);
this.bendingVelocityDelta = this.velocity.clone();
this.bendingVelocityDelta.subtract(original.velocity);
- this.bendingVelocityDelta.multiplyScalar(this.incrementScale);
+ this.bendingVelocityDelta.multiplyScalar(this.incrementScale * velocityBending);
// get the incremental angular-velocity
- this.bendingAVDelta = (this.angularVelocity - original.angularVelocity) * this.incrementScale;
+ this.bendingAVDelta = (this.angularVelocity - original.angularVelocity) * this.incrementScale * angularVelocityBending;
// get the incremental angle correction
this.bendingAngleDelta = MathUtils.interpolateDeltaWithWrapping(original.angle, this.angle, angleBending, 0, 2 * Math.PI) / bendingIncrements;
|
allow override of velocity and AV bending in 2D physics
|
lance-gg_lance
|
train
|
1d8a3d59055b7f21bc4d13d7d40eed914b716ebc
|
diff --git a/tests/Integration/Database/EloquentMorphManyTest.php b/tests/Integration/Database/EloquentMorphManyTest.php
index <HASH>..<HASH> 100644
--- a/tests/Integration/Database/EloquentMorphManyTest.php
+++ b/tests/Integration/Database/EloquentMorphManyTest.php
@@ -72,7 +72,6 @@ class Post extends Model
}
}
-
class Comment extends Model
{
public $table = 'comments';
|
Apply fixes from StyleCI (#<I>)
|
laravel_framework
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.