hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
174f0fb925334693574d60cbb822c95d4efd737f
|
diff --git a/any_imagefield/models/backends/filebrowser.py b/any_imagefield/models/backends/filebrowser.py
index <HASH>..<HASH> 100644
--- a/any_imagefield/models/backends/filebrowser.py
+++ b/any_imagefield/models/backends/filebrowser.py
@@ -1,8 +1,29 @@
from __future__ import absolute_import
-from filebrowser.fields import FileBrowseField
+from filebrowser.fields import FileBrowseField, FileObject
-class AnyFileField(FileBrowseField):
+class PatchedFileObject(FileObject):
+ @property
+ def url(self):
+ # The filebrowser returns a FileObject which differs from Django's FieldFile / ImageFieldFile object
+ # Assign 'url' attribute for compatibility with
+ return self.url_full
+
+
+class CompatibleFileBrowseField(FileBrowseField):
+ """
+ Internal class to fix compatibility between Django's native FileField
+ and django-filebrowser's FileBrowseField
+ """
+
+ def to_python(self, value):
+ value = super(CompatibleFileBrowseField, self).to_python(value)
+ if value.__class__ is FileObject:
+ value.__class__ = PatchedFileObject
+ return value
+
+
+class AnyFileField(CompatibleFileBrowseField):
"""
The file browse field based on django-filebrowser.
"""
@@ -17,7 +38,7 @@ class AnyFileField(FileBrowseField):
super(AnyFileField, self).__init__(*args, **defaults)
-class AnyImageField(FileBrowseField):
+class AnyImageField(CompatibleFileBrowseField):
"""
The image browse field based on django-filebrowser.
"""
diff --git a/any_imagefield/models/fields.py b/any_imagefield/models/fields.py
index <HASH>..<HASH> 100644
--- a/any_imagefield/models/fields.py
+++ b/any_imagefield/models/fields.py
@@ -36,6 +36,16 @@ class AnyImageField(active_backend.AnyImageField):
When *django-filebrowser* is not installed, it will display the
standard :class:`~django.db.models.ImageField` with a preview attached to it.
"""
+ def __init__(self, *args, **kwargs):
+ # django-filebrowser has no concept of a 'width_field',
+ # only Django's ImageField has this feature.
+ if 'width_field' in kwargs:
+ raise NotImplementedError("Unable to use 'width_field' in AnyImageField, not all backends support this feature.")
+ if 'height_field' in kwargs:
+ raise NotImplementedError("Unable to use 'height_field' in AnyImageField, not all backends support this feature.")
+
+ super(AnyImageField, self).__init__(*args, **kwargs)
+
# Tell South how to create custom fields
|
Fixed compatibility issues between the standard ImageField and the FileBrowseField
|
edoburu_django-any-imagefield
|
train
|
c9552bd8f7e929aee0fdc415216126dc4cfab744
|
diff --git a/rtv/downloader/vodtvp.py b/rtv/downloader/vodtvp.py
index <HASH>..<HASH> 100644
--- a/rtv/downloader/vodtvp.py
+++ b/rtv/downloader/vodtvp.py
@@ -1,38 +1,91 @@
import datetime
+import json
import re
+from bs4 import BeautifulSoup
+
from rtv.downloader.common import Downloader
class VodTVPDL(Downloader):
- _VALID_URL = r'https?://(?:www\.)?vod.tvp\.pl/.*/(?P<show_name>[\w-]+),(?:[\w-]+-)?(?P<date>\d{8}).*'
+ _VALID_URL = r'https?://(?:www\.)?vod.tvp\.pl/' \
+ r'.*?' \
+ r'(?:,?(?P<date>[\d\-]+)?)' \
+ r',' \
+ r'(?P<object_id>\d+)'
def get_podcast_date(self):
match = re.match(self._VALID_URL, self.url)
+ if not match:
+ return None
+
+ date_str = match.group('date')
+ date_formats = [
+ '%d%m%Y',
+ '%d%m%Y-%H%M'
+ ]
+ for d in date_formats:
+ try:
+ return datetime.datetime.strptime(date_str, d)
+ except (ValueError, AttributeError):
+ pass
+
+ def get_podcast_show_name(self):
+ """
+ Get podcast show name from the podcast site. It's located in the div with 'data-hover'
+ attribute under the 'title' key.
+ Returns:
+ str: Podcast show name.
- if match:
- date_str = match.group('date')
- return datetime.datetime.strptime(date_str, '%d%m%Y')
+ """
+ soup = BeautifulSoup(self.html, 'html.parser')
+ div = soup.find('div', attrs={'data-hover': True})
+ data = json.loads(div['data-hover'])
+ show_name = data.get('title')
- def get_show_name(self):
- show_name_raw = super().get_info().get('title')
- match = re.match(r'(?P<show_name>.*),(?:.*,)?\s*\d{2}.\d{2}.\d{4}', show_name_raw)
+ return show_name
- if match:
- return match.group('show_name')
+ def get_podcast_title(self):
+ """
+ Get podcast title from the podcast site. It's located in the div with 'data-hover'
+ attribute under the 'episodeCount' key.
+ Returns:
+ str: Podcast title.
- # TODO: FIX invoking super().get_info() two times, add it as instance attribute and set in init?
- def get_title(self):
- return self.get_show_name() # These shows have no title, only show_name and description
+ """
+ # considered as a worse solution since most of the podcasts have only date in the title
+ # soup = BeautifulSoup(self.html, 'html.parser')
+ # div = soup.find('div', attrs={'data-hover': True})
+ # data = json.loads(div['data-hover'])
+ # title = data.get('episodeCount')
- # TODO: Scrape title from url and capitalize letters (or from webpage)
+ soup = BeautifulSoup(self.html, 'html.parser')
+ title = soup.find('meta', {'property': 'og:title'})['content']
+ return title
+
+ def get_podcast_description(self):
+ """
+ Get podcast description from the podcast site. It's located in the meta tag
+ with 'og:description' attribute under 'content' attribute.
+ Returns:
+ str: Podcast description.
+
+ """
+ soup = BeautifulSoup(self.html, 'html.parser')
+ description = soup.find('meta', {'property': 'og:description'})['content']
+ return description
def get_info(self):
- entry = {
- 'title': self.get_title(),
- 'show_name': self.get_show_name(),
- 'date': self.get_podcast_date(),
- 'url': self.url,
- 'ext': 'mp4'
+ self.get_html()
+
+ podcast_info = {
+ 'entries': [{
+ 'title': self.get_podcast_title(),
+ 'show_name': self.get_podcast_show_name(),
+ 'description': self.get_podcast_description(),
+ 'date': self.get_podcast_date(),
+ 'url': self.url,
+ 'ext': 'mp4'
+ }]
}
- return {'entries': [entry]}
+ return podcast_info
|
Fix vodtvp, scraping description, title, show name and date now works
|
radzak_rtv-downloader
|
train
|
f5fe8caf2e2a95ce02805c4ab11598b5a6fb1ce8
|
diff --git a/textx/lang.py b/textx/lang.py
index <HASH>..<HASH> 100644
--- a/textx/lang.py
+++ b/textx/lang.py
@@ -89,7 +89,7 @@ def comment_block(): return _(r'/\*(.|\n)*?\*/')
ID = _(r'[^\d\W]\w*\b', rule_name='ID', root=True)
BOOL = _(r'(True|true|False|false|0|1)\b', rule_name='BOOL', root=True)
INT = _(r'[-+]?[0-9]+\b', rule_name='INT', root=True)
-FLOAT = _(r'[+-]?(\d+(\.\d*)?|\.\b\d+)([eE][+-]?\d+)?(?<=[\w\.])(?![\w\.])',
+FLOAT = _(r'[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?(?<=[\w\.])(?![\w\.])',
'FLOAT', root=True)
STRING = _(r'("(\\"|[^"])*")|(\'(\\\'|[^\'])*\')', 'STRING', root=True)
NUMBER = OrderedChoice(nodes=[FLOAT, INT], rule_name='NUMBER', root=True)
|
removing stray \b
|
textX_textX
|
train
|
69f4a69af8823863b31480ce09628e549d794a5d
|
diff --git a/lib/jsduck/ast.rb b/lib/jsduck/ast.rb
index <HASH>..<HASH> 100644
--- a/lib/jsduck/ast.rb
+++ b/lib/jsduck/ast.rb
@@ -169,9 +169,9 @@ module JsDuck
if ast.ext_define?
detect_ext_define(cls, ast)
elsif ast.ext_extend?
- detect_ext_something(:extends, cls, ast.raw)
+ detect_ext_something(:extends, cls, ast)
elsif ast.ext_override?
- detect_ext_something(:override, cls, ast.raw)
+ detect_ext_something(:override, cls, ast)
elsif ast.object_expression?
detect_class_members_from_object(cls, ast.raw)
elsif ast.array_expression?
@@ -186,9 +186,9 @@ module JsDuck
# The type parameter must be correspondingly either :extend or :override.
def detect_ext_something(type, cls, ast)
args = ast["arguments"]
- cls[type] = to_s(args[0])
- if args.length == 2 && object?(args[1])
- detect_class_members_from_object(cls, args[1])
+ cls[type] = args[0].to_s
+ if args.length == 2 && args[1].object_expression?
+ detect_class_members_from_object(cls, args[1].raw)
end
end
|
Refactor Ast#detect_ext_something.
Making use of AstNodeArray class.
|
senchalabs_jsduck
|
train
|
e9edf7b60c4ff22cf8e30c0f7e4eed44fcfc643f
|
diff --git a/emma2/msm/estimation/sparse/transition_matrix.py b/emma2/msm/estimation/sparse/transition_matrix.py
index <HASH>..<HASH> 100644
--- a/emma2/msm/estimation/sparse/transition_matrix.py
+++ b/emma2/msm/estimation/sparse/transition_matrix.py
@@ -30,7 +30,7 @@ def tmatrix_cov(C, row=None):
def error_perturbation(C, sensitivity):
error = 0.0;
- n = len(C)
+ n = C.shape[0]
for k in range(0,n):
cov = tmatrix_cov(C, k)
|
[msm/estimation/tmatrix] use shape instead of len, since len is not available for some sparse datatypes
|
markovmodel_PyEMMA
|
train
|
0e30cb436d081c1475f3197d858f0b4fcffe7dee
|
diff --git a/apio/commands/drivers.py b/apio/commands/drivers.py
index <HASH>..<HASH> 100644
--- a/apio/commands/drivers.py
+++ b/apio/commands/drivers.py
@@ -11,9 +11,9 @@ from apio.managers.drivers import Drivers
@click.command('drivers')
@click.pass_context
-@click.option('-e', '--ftdi-enable', is_flag=True,
+@click.option('--ftdi-enable', is_flag=True,
help='Enable FPGA drivers.')
-@click.option('-d', '--ftdi-disable', is_flag=True,
+@click.option('--ftdi-disable', is_flag=True,
help='Disable FPGA drivers.')
def cli(ctx, ftdi_enable, ftdi_disable):
"""Manage FPGA drivers."""
|
Remove -e, -d options in drivers command
|
FPGAwars_apio
|
train
|
58505144a05c63848048eb0c8518827c12ddcd0c
|
diff --git a/lib/ProMotion/cocoatouch/view_controller.rb b/lib/ProMotion/cocoatouch/view_controller.rb
index <HASH>..<HASH> 100644
--- a/lib/ProMotion/cocoatouch/view_controller.rb
+++ b/lib/ProMotion/cocoatouch/view_controller.rb
@@ -1,11 +1,11 @@
module ProMotion
class ViewController < UIViewController
def self.new(args = {})
- s = self.alloc.initWithNibName(nil, bundle:nil)
+ s = self.alloc.initWithNibName(args[:nib_name] || nil, bundle:args[:bundle] || nil)
s.on_create(args) if s.respond_to?(:on_create)
s
end
-
+
def loadView
super
self.send(:on_load) if self.respond_to?(:on_load)
|
Allos users to specify their own nib and bundle names.
|
infinitered_ProMotion
|
train
|
7d4416fc65971da021e2272daadac4a6bb080087
|
diff --git a/danceschool/core/classreg.py b/danceschool/core/classreg.py
index <HASH>..<HASH> 100644
--- a/danceschool/core/classreg.py
+++ b/danceschool/core/classreg.py
@@ -61,6 +61,11 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
form_class = ClassChoiceForm
template_name = 'core/registration/event_registration.html'
+ # The list of event registrations is kept as an attribute of the view so
+ # that it may be used in subclassed versions of methods like
+ # get_success_url() (see e.g. the nightlydoor app).
+ event_registrations = []
+
def dispatch(self, request, *args, **kwargs):
'''
Check that registration is online before proceeding. If this is a POST
@@ -88,11 +93,11 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
# the registration page. set_return_page() is in SiteHistoryMixin.
self.set_return_page('registration',_('Registration'))
- return super(ClassRegistrationView,self).get_context_data(**context)
+ return super().get_context_data(**context)
def get_form_kwargs(self, **kwargs):
''' Tell the form which fields to render '''
- kwargs = super(ClassRegistrationView, self).get_form_kwargs(**kwargs)
+ kwargs = super().get_form_kwargs(**kwargs)
kwargs['user'] = self.request.user if hasattr(self.request,'user') else None
listing = self.get_listing()
@@ -136,7 +141,7 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
non_event_listing = {key: value for key,value in form.cleaned_data.items() if 'event' not in key}
except (ValueError, TypeError) as e:
form.add_error(None,ValidationError(_('Invalid event information passed.'),code='invalid'))
- return super(ClassRegistrationView,self).form_invalid(form)
+ return self.form_invalid(form)
associated_events = Event.objects.filter(id__in=[k for k in event_listing.keys()])
@@ -147,8 +152,8 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
submissionUser = None
reg = TemporaryRegistration(
- submissionUser=submissionUser,dateTime=timezone.now(),
- payAtDoor=non_event_listing.pop('payAtDoor',False),
+ submissionUser=submissionUser, dateTime=timezone.now(),
+ payAtDoor=non_event_listing.pop('payAtDoor', False),
expirationDate=expiry,
)
@@ -159,10 +164,12 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
if regSession.get('marketing_id'):
reg.data.update({'marketing_id': regSession.pop('marketing_id',None)})
- eventRegs = []
+ # Reset the list of event registrations (if it's not empty) and build it
+ # from the form submission data.
+ self.event_registrations = []
grossPrice = 0
- for key,value in event_listing.items():
+ for key, value in event_listing.items():
this_event = associated_events.get(id=key)
# Check if registration is still feasible based on both completed registrations
@@ -182,7 +189,7 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
# For users without permissions, don't allow registration for sold out things
# at all.
form.add_error(None, ValidationError(_('Registration for "%s" is tentatively sold out while others complete their registration. Please try again later.' % this_event.name),code='invalid'))
- return super(ClassRegistrationView,self).form_invalid(form)
+ return self.form_invalid(form)
dropInList = [int(k.split("_")[-1]) for k,v in value.items() if k.startswith('dropin_') and v is True]
@@ -201,13 +208,13 @@ class ClassRegistrationView(FinancialContextMixin, EventOrderMixin, SiteHistoryM
)
# If it's possible to store additional data and such data exist, then store them.
tr.data = {k: v for k,v in value.items() if k in permitted_keys and k != 'role'}
- eventRegs.append(tr)
+ self.event_registrations.append(tr)
grossPrice += tr.price
# If we got this far with no issues, then save
reg.priceWithDiscount = grossPrice
reg.save()
- for er in eventRegs:
+ for er in self.event_registrations:
er.registration = reg
er.save()
|
Made event_registrations a property of ClassRegistrationView to permit more sophisticated form logic in subclasses.
|
django-danceschool_django-danceschool
|
train
|
897e8e454fbc15a4b74f48e89778a2a658aa5e25
|
diff --git a/src/Support/Database/QueryFilter.php b/src/Support/Database/QueryFilter.php
index <HASH>..<HASH> 100644
--- a/src/Support/Database/QueryFilter.php
+++ b/src/Support/Database/QueryFilter.php
@@ -66,7 +66,7 @@ class QueryFilter
'resolveArgs' => array_get($filter, 'resolveArgs', []),
]);
- $resolve($query, $key, $resolveArgs);
+ $query = $resolve($query, $key, $resolveArgs);
}
return $query;
|
Feat: Added support for filter queries to return a different query.
|
nuwave_lighthouse
|
train
|
053e43af2615b73fb9568050eb725ea1eb302a97
|
diff --git a/.scrutinizer.yml b/.scrutinizer.yml
index <HASH>..<HASH> 100755
--- a/.scrutinizer.yml
+++ b/.scrutinizer.yml
@@ -8,6 +8,8 @@ tools:
checks:
php:
+ code_rating: true
+ duplication: true
remove_extra_empty_lines: true
remove_php_closing_tag: true
remove_trailing_whitespace: true
diff --git a/migrations/2016_03_17_000000_create_lern_tables.php b/migrations/2016_03_17_000000_create_lern_tables.php
index <HASH>..<HASH> 100755
--- a/migrations/2016_03_17_000000_create_lern_tables.php
+++ b/migrations/2016_03_17_000000_create_lern_tables.php
@@ -1,5 +1,7 @@
<?php
+namespace Tylercd100\LERN\Migrations;
+
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
@@ -13,7 +15,7 @@ class CreateLERNTables extends Migration {
public function up()
{
Schema::create(config('lern.record.table'), function(Blueprint $table) {
- $table->increments('id', true)->unsigned();
+ $table->increments('id')->unsigned();
$table->string('class');
$table->string('file');
$table->integer('code');
diff --git a/src/Notifications/MonologHandlerFactory.php b/src/Notifications/MonologHandlerFactory.php
index <HASH>..<HASH> 100755
--- a/src/Notifications/MonologHandlerFactory.php
+++ b/src/Notifications/MonologHandlerFactory.php
@@ -12,7 +12,7 @@ class MonologHandlerFactory {
/**
* Creates a handler for a specified driver
* @param string $driver Lowercase driver string that is also in the config/lern.php file
- * @param array $subject Title or Subject line for the notification
+ * @param string $subject Title or Subject line for the notification
* @return \Monolog\Handler\HandlerInterface A handler to use with a Monolog\Logger instance
*/
public function create($driver, $subject = null)
@@ -64,7 +64,7 @@ class MonologHandlerFactory {
/**
* Creates Pushover Monolog Handler
- * @param array $subject Title or Subject line for the notification
+ * @param string $subject Title or Subject line for the notification
* @return \Monolog\Handler\PushoverHandler A handler to use with a Monolog\Logger instance
*/
protected function pushover($subject)
@@ -79,7 +79,7 @@ class MonologHandlerFactory {
/**
* Creates Mail Monolog Handler
- * @param array $subject Title or Subject line for the notification
+ * @param string $subject Title or Subject line for the notification
* @return \Monolog\Handler\NativeMailerHandler A handler to use with a Monolog\Logger instance
*/
protected function mail($subject)
@@ -107,8 +107,8 @@ class MonologHandlerFactory {
/**
* Validates that the subject is an unempty string
- * @param mixed $subject [description]
- * @return [type] [description]
+ * @param mixed $subject The value to check
+ * @return void
*/
private function checkSubject($subject) {
if (empty($subject)) {
diff --git a/src/Notifications/Notifier.php b/src/Notifications/Notifier.php
index <HASH>..<HASH> 100755
--- a/src/Notifications/Notifier.php
+++ b/src/Notifications/Notifier.php
@@ -27,17 +27,25 @@ class Notifier {
}
/**
+ * Transforms a value into a closure that returns itself when called
+ * @param callable|string $cb The value that you want to wrap in a closure
+ * @return callable
+ */
+ private function wrapValueInClosure($cb){
+ if (is_callable($cb)) {
+ return $cb;
+ } else {
+ return function() use ($cb) { return $cb; };
+ }
+ }
+
+ /**
* Set a string or a closure to be called that will generate the message body for the notification
- * @param function|string $cb This closure function will be passed an Exception and must return a string
+ * @param callable|string $cb A closure or string that will be set for the message
*/
public function setMessage($cb)
{
- if (is_string($cb)) {
- $this->messageCb = function() use ($cb) { return $cb; };
- } else if (is_callable($cb)) {
- $this->messageCb = $cb;
- }
-
+ $this->messageCb = $this->wrapValueInClosure($cb);
return $this;
}
@@ -60,16 +68,11 @@ class Notifier {
/**
* Set a string or a closure to be called that will generate the subject line for the notification
- * @param function|string $cb This closure function will be passed an Exception and must return a string
+ * @param callable|string $cb A closure or string that will be set for the subject line
*/
public function setSubject($cb)
{
- if (is_string($cb)) {
- $this->subjectCb = function() use ($cb) { return $cb; };
- } else if (is_callable($cb)) {
- $this->subjectCb = $cb;
- }
-
+ $this->subjectCb = $this->wrapValueInClosure($cb);
return $this;
}
|
Fixed Duplication. Fixed Docs.
|
tylercd100_lern
|
train
|
910651f2d2623f062a398a84d71373d4d67dde0a
|
diff --git a/test/test_generic.rb b/test/test_generic.rb
index <HASH>..<HASH> 100644
--- a/test/test_generic.rb
+++ b/test/test_generic.rb
@@ -1,8 +1,6 @@
require 'test/unit'
require 'rdl'
-RDL::MasterSwitch.turn_off
-
class GenericTest < Test::Unit::TestCase
include RDL::Type
@@ -87,8 +85,6 @@ class GenericTest < Test::Unit::TestCase
end
def test_array_methods
- RDL::MasterSwitch.turn_on
-
x = [1,2,3,"123"].rdl_inst({:t => "Fixnum or String or TrueClass"})
y = x.push(true)
assert_equal [1,2,3,"123",true], y
@@ -103,8 +99,6 @@ class GenericTest < Test::Unit::TestCase
y = x.+([true, false])
assert_equal [1,"a",true, false], y
-
- RDL::MasterSwitch.turn_off
end
def test_type_params
diff --git a/test/test_intersection.rb b/test/test_intersection.rb
index <HASH>..<HASH> 100644
--- a/test/test_intersection.rb
+++ b/test/test_intersection.rb
@@ -1,8 +1,6 @@
require 'test/unit'
require 'rdl'
-RDL::MasterSwitch.turn_off
-
class IntersectionTest < Test::Unit::TestCase
include RDL::Type
@@ -42,9 +40,6 @@ class IntersectionTest < Test::Unit::TestCase
def test_array_types
arr = [1,2,3,4,5]
-
- RDL::MasterSwitch.turn_on
-
x = arr[1]
assert_equal(2, x)
@@ -65,8 +60,6 @@ class IntersectionTest < Test::Unit::TestCase
assert_raise(RDL::TypesigException) {
arr[1, true]
}
-
- RDL::MasterSwitch.turn_off
end
end
diff --git a/test/test_parser.rb b/test/test_parser.rb
index <HASH>..<HASH> 100644
--- a/test/test_parser.rb
+++ b/test/test_parser.rb
@@ -1,8 +1,6 @@
require 'test/unit'
require 'rdl'
-RDL::MasterSwitch.turn_off
-
class TypeTest < Test::Unit::TestCase
include RDL::Type
diff --git a/test/test_types.rb b/test/test_types.rb
index <HASH>..<HASH> 100644
--- a/test/test_types.rb
+++ b/test/test_types.rb
@@ -1,8 +1,6 @@
require 'test/unit'
require 'rdl'
-RDL::MasterSwitch.turn_off
-
class TypeTest < Test::Unit::TestCase
include RDL::Type
|
no longer need to have MasterSwitch turned off at these places
|
plum-umd_rdl
|
train
|
488d01796a5e64e9b62aecf58699daf1c3858f7c
|
diff --git a/smack-tcp/src/main/java/org/jivesoftware/smack/tcp/XmppTcpTransportModule.java b/smack-tcp/src/main/java/org/jivesoftware/smack/tcp/XmppTcpTransportModule.java
index <HASH>..<HASH> 100644
--- a/smack-tcp/src/main/java/org/jivesoftware/smack/tcp/XmppTcpTransportModule.java
+++ b/smack-tcp/src/main/java/org/jivesoftware/smack/tcp/XmppTcpTransportModule.java
@@ -1066,18 +1066,25 @@ public class XmppTcpTransportModule extends ModularXmppClientToServerConnectionM
SSLEngineResult.HandshakeStatus handshakeStatus = handleHandshakeStatus(result);
switch (handshakeStatus) {
case NEED_TASK:
- // A delegated task is asynchronously running. Signal that there is pending input data and
- // cycle again through the smack reactor.
+ // A delegated task is asynchronously running. Take care of the remaining accumulatedData.
addAsPendingInputData(accumulatedData);
- break;
+ // Return here, as the async task created by handleHandshakeStatus will continue calling the
+ // cannelSelectedCallback.
+ return null;
case NEED_UNWRAP:
continue;
case NEED_WRAP:
// NEED_WRAP means that the SSLEngine needs to send data, probably without consuming data.
// We exploit here the fact that the channelSelectedCallback is single threaded and that the
// input processing is after the output processing.
+ addAsPendingInputData(accumulatedData);
+ // Note that it is ok that we the provided argument for pending input filter data to channel
+ // selected callback is false, as setPendingInputFilterData() will have set the internal state
+ // boolean accordingly.
connectionInternal.asyncGo(() -> callChannelSelectedCallback(false, true));
- break;
+ // Do not break here, but instead return and let the asynchronously invoked
+ // callChannelSelectedCallback() do its work.
+ return null;
default:
break;
}
@@ -1109,8 +1116,13 @@ public class XmppTcpTransportModule extends ModularXmppClientToServerConnectionM
}
private void addAsPendingInputData(ByteBuffer byteBuffer) {
+ // TODO: Why doeesn't simply
+ // pendingInputData = byteBuffer;
+ // work?
pendingInputData = ByteBuffer.allocate(byteBuffer.remaining());
pendingInputData.put(byteBuffer).flip();
+
+ pendingInputFilterData = pendingInputData.hasRemaining();
}
private SSLEngineResult.HandshakeStatus handleHandshakeStatus(SSLEngineResult sslEngineResult) {
|
[tcp] Fix TlsState by aborting the channel selected callback
Instead of breaking in case the SSLEngine signals NEED_WRAP, which
leads to an endless loop while holding the
channelSelectedCallbackLock, we have to return, so that the
asynchronously invoked callback can aquire it, and do its work.
|
igniterealtime_Smack
|
train
|
792d20988f82b0bf2e26565defa69355d51d7502
|
diff --git a/src/processors/SQLChunkProcessor.php b/src/processors/SQLChunkProcessor.php
index <HASH>..<HASH> 100644
--- a/src/processors/SQLChunkProcessor.php
+++ b/src/processors/SQLChunkProcessor.php
@@ -158,7 +158,7 @@ class SQLChunkProcessor extends AbstractProcessor {
}
if (!empty($out['HAVING'])) {
$processor = new HavingProcessor();
- $out['HAVING'] = $processor->process($out['HAVING']);
+ $out['HAVING'] = $processor->process($out['HAVING'], isset($out['SELECT']) ? $out['SELECT'] : array());
}
if (!empty($out['SET'])) {
$processor = new SetProcessor();
|
CHG: the new HavingProcessor needs the already parsed SELECT clause to get the aliases from there.
git-svn-id: <URL>
|
greenlion_PHP-SQL-Parser
|
train
|
67c2b23d7437627579ba93f25e0dd136ec0d0aeb
|
diff --git a/gubernator/view_build.py b/gubernator/view_build.py
index <HASH>..<HASH> 100644
--- a/gubernator/view_build.py
+++ b/gubernator/view_build.py
@@ -34,6 +34,17 @@ class JUnitParser(object):
self.passed = []
self.failed = []
+ def handle_suite(self, tree, filename):
+ for subelement in tree:
+ if subelement.tag == 'testsuite':
+ self.handle_suite(subelement, filename)
+ elif subelement.tag == 'testcase':
+ if 'name' in tree.attrib:
+ name_prefix = tree.attrib['name'] + ' '
+ else:
+ name_prefix = ''
+ self.handle_test(subelement, filename, name_prefix)
+
def handle_test(self, child, filename, name_prefix=''):
name = name_prefix + child.attrib['name']
if child.find('skipped') is not None:
@@ -64,13 +75,10 @@ class JUnitParser(object):
('Gubernator Internal Fatal XML Parse Error', 0.0, str(e), filename, ''))
return
if tree.tag == 'testsuite':
- for child in tree:
- self.handle_test(child, filename)
+ self.handle_suite(tree, filename)
elif tree.tag == 'testsuites':
for testsuite in tree:
- name_prefix = testsuite.attrib['name'] + ' '
- for child in testsuite.findall('testcase'):
- self.handle_test(child, filename, name_prefix)
+ self.handle_suite(testsuite, filename)
else:
logging.error('unable to find failures, unexpected tag %s', tree.tag)
diff --git a/gubernator/view_build_test.py b/gubernator/view_build_test.py
index <HASH>..<HASH> 100644
--- a/gubernator/view_build_test.py
+++ b/gubernator/view_build_test.py
@@ -66,6 +66,28 @@ class ParseJunitTest(unittest.TestCase):
"out: first line\nout: second line\nerr: first line",
)])
+ def test_nested_testsuites(self):
+ results = self.parse('''
+ <testsuites>
+ <testsuite name="k8s.io/suite">
+ <testsuite name="k8s.io/suite/sub">
+ <properties>
+ <property name="go.version" value="go1.6"/>
+ </properties>
+ <testcase name="TestBad" time="0.1">
+ <failure>something bad</failure>
+ <system-out>out: first line</system-out>
+ <system-err>err: first line</system-err>
+ <system-out>out: second line</system-out>
+ </testcase>
+ </testsuite>
+ </testsuite>
+ </testsuites>''')
+ self.assertEqual(results['failed'], [(
+ 'k8s.io/suite/sub TestBad', 0.1, 'something bad', "fp",
+ "out: first line\nout: second line\nerr: first line",
+ )])
+
def test_bad_xml(self):
self.assertEqual(self.parse('''<body />''')['failed'], [])
|
Supported nested jUnit test suites in Gubernator
The jUnit XML spec allows for infinitely nested test suites in a test
report, so Gubernator should support this by parsing those files
correctly.
|
kubernetes_test-infra
|
train
|
803b5933d7749f5bc3778f91f2552f8b3b9c52a4
|
diff --git a/Rakefile b/Rakefile
index <HASH>..<HASH> 100644
--- a/Rakefile
+++ b/Rakefile
@@ -9,10 +9,8 @@ Rake::TestTask.new(:test) do |test|
test.pattern = "test/**/*_test.rb"
test.verbose = false
- # Set interpreter warning level to 1 (medium). Level 2 produces hundreds of warnings
- # about uninitialized instance variables.
- # TODO: Find a good way to deal with the level 2 warnings.
- test.ruby_opts += ["-W1"]
+ # Set interpreter warning level to 2 (verbose)
+ test.ruby_opts += ["-W2"]
end
require "rubocop/rake_task"
diff --git a/authlogic.gemspec b/authlogic.gemspec
index <HASH>..<HASH> 100644
--- a/authlogic.gemspec
+++ b/authlogic.gemspec
@@ -27,6 +27,7 @@ require "authlogic/version"
s.add_dependency "scrypt", ">= 1.2", "< 4.0"
s.add_development_dependency "bcrypt", "~> 3.1"
s.add_development_dependency "byebug", "~> 10.0"
+ s.add_development_dependency "minitest-reporters", "~> 1.2"
s.add_development_dependency "rubocop", "~> 0.54.0"
s.add_development_dependency "timecop", "~> 0.7"
diff --git a/lib/authlogic/acts_as_authentic/password.rb b/lib/authlogic/acts_as_authentic/password.rb
index <HASH>..<HASH> 100644
--- a/lib/authlogic/acts_as_authentic/password.rb
+++ b/lib/authlogic/acts_as_authentic/password.rb
@@ -303,6 +303,7 @@ module Authlogic
module InstanceMethods
# The password
def password
+ return nil unless defined?(@password)
@password
end
@@ -472,7 +473,7 @@ module Authlogic
end
def password_changed?
- @password_changed == true
+ defined?(@password_changed) && @password_changed == true
end
def reset_password_changed
diff --git a/lib/authlogic/session/id.rb b/lib/authlogic/session/id.rb
index <HASH>..<HASH> 100644
--- a/lib/authlogic/session/id.rb
+++ b/lib/authlogic/session/id.rb
@@ -3,6 +3,11 @@ module Authlogic
# Allows you to separate sessions with an id, ultimately letting you create
# multiple sessions for the same user.
module Id
+ def initialize(*args)
+ @id = nil
+ super
+ end
+
def self.included(klass)
klass.class_eval do
attr_writer :id
diff --git a/lib/authlogic/session/password.rb b/lib/authlogic/session/password.rb
index <HASH>..<HASH> 100644
--- a/lib/authlogic/session/password.rb
+++ b/lib/authlogic/session/password.rb
@@ -134,6 +134,7 @@ module Authlogic
configure_password_methods
self.class.configured_password_methods = true
end
+ instance_variable_set("@#{password_field}", nil)
super
end
diff --git a/lib/authlogic/test_case.rb b/lib/authlogic/test_case.rb
index <HASH>..<HASH> 100644
--- a/lib/authlogic/test_case.rb
+++ b/lib/authlogic/test_case.rb
@@ -175,6 +175,11 @@ module Authlogic
# assert_logged_in
# end
module TestCase
+ def initialize(*args)
+ @request = nil
+ super
+ end
+
# Activates authlogic so that you can use it in your tests. You should call
# this method in your test's setup. Ex:
#
diff --git a/test/test_helper.rb b/test/test_helper.rb
index <HASH>..<HASH> 100644
--- a/test/test_helper.rb
+++ b/test/test_helper.rb
@@ -5,6 +5,9 @@ require "active_record"
require "active_record/fixtures"
require "timecop"
require "i18n"
+require "minitest/reporters"
+
+Minitest::Reporters.use!(Minitest::Reporters::SpecReporter.new)
I18n.load_path << File.dirname(__FILE__) + "/i18n/lol.yml"
|
Fix interpreter warning: undefined ivar (#<I>)
* Fix interpreter warning: undefined ivar
* Tests: increase interpreter warning level to verbose
* Install minitest-reporters, which makes it possible to see which tests produce which warnings
|
binarylogic_authlogic
|
train
|
2bfc2c90ea7dcccd28682f06af0eaec5ceaf9622
|
diff --git a/com/checkout/packages/CheckoutApi/Api.php b/com/checkout/packages/CheckoutApi/Api.php
index <HASH>..<HASH> 100644
--- a/com/checkout/packages/CheckoutApi/Api.php
+++ b/com/checkout/packages/CheckoutApi/Api.php
@@ -48,6 +48,11 @@ final class CheckoutApi_Api
if($_apiClass) {
self::setApiClass($_apiClass);
}
+
+ //Initialise the exception library
+ $exceptionState = CheckoutApi_Lib_Factory::getSingletonInstance('CheckoutApi_Lib_ExceptionState');
+ $exceptionState->setErrorState(false);
+
return CheckoutApi_Lib_Factory::getSingletonInstance(self::getApiClass(),$arguments);
}
diff --git a/com/checkout/packages/CheckoutApi/Lib/ExceptionState.php b/com/checkout/packages/CheckoutApi/Lib/ExceptionState.php
index <HASH>..<HASH> 100644
--- a/com/checkout/packages/CheckoutApi/Lib/ExceptionState.php
+++ b/com/checkout/packages/CheckoutApi/Lib/ExceptionState.php
@@ -38,11 +38,11 @@
* @param boolean $state state of the current error
*
*/
- private function setErrorState($state)
+ public function setErrorState($state)
{
- if(!$this->_errorState){
+ //if(!$this->_errorState){
$this->_errorState = $state;
- }
+ //}
}
|
Bugfix: the ExceptionState class was not being initialised after an exception is raised, thus, any other request would make it raise the exception again.
|
checkout_checkout-php-library
|
train
|
a108935748890e01012a749242c05acf9ae787d7
|
diff --git a/webpack.config.js b/webpack.config.js
index <HASH>..<HASH> 100644
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -11,14 +11,14 @@ var fs = require('fs');
var banner = [
"surveyjs - Survey JavaScript library v" + packageJson.version,
- "Copyright (c) 2015-2017 Devsoft Baltic O� - http://surveyjs.org/",
+ "Copyright (c) 2015-2017 Devsoft Baltic OÜ - http://surveyjs.org/",
"License: MIT (http://www.opensource.org/licenses/mit-license.php)",
].join("\n");
// TODO add to dts_bundler
var dts_banner = ["Type definitions for Survey JavaScript library v" + packageJson.version,
"Project: http://surveyjs.org/",
- "Definitions by: Devsoft Baltic O� <https://github.com/surveyjs/>",
+ "Definitions by: Devsoft Baltic OÜ <https://github.com/surveyjs/>",
""].join("\n");
var platformOptions = {
@@ -62,12 +62,12 @@ var platformOptions = {
}
},
keywords: ['jquery', 'jquery-plugin'],
- dependencies: { 'jquery': '>=1.12.4', '@types/react': '0.0.0' }
+ dependencies: { 'jquery': '>=1.12.4', '@types/react': '15.0.21' }
},
'angular': {
externals: {},
keywords: ['angular', 'angular-component'],
- dependencies: { '@types/react': '0.0.0' }
+ dependencies: { '@types/react': '15.0.21' }
},
'vue': {
externals: {
|
updated "@types/react" dependency for angular and jquery
fixed copyright symbols
|
surveyjs_survey-library
|
train
|
edc594e00659d94de8f3fe90935c79388d760646
|
diff --git a/commands/command_migrate_info.go b/commands/command_migrate_info.go
index <HASH>..<HASH> 100644
--- a/commands/command_migrate_info.go
+++ b/commands/command_migrate_info.go
@@ -160,7 +160,13 @@ func (e EntriesBySize) Len() int { return len(e) }
// Less returns the whether or not the MigrateInfoEntry given at `i` takes up
// less total size than the MigrateInfoEntry given at `j`.
-func (e EntriesBySize) Less(i, j int) bool { return e[i].BytesAbove < e[j].BytesAbove }
+func (e EntriesBySize) Less(i, j int) bool {
+ if e[i].BytesAbove == e[j].BytesAbove {
+ return e[i].Qualifier > e[j].Qualifier
+ } else {
+ return e[i].BytesAbove < e[j].BytesAbove
+ }
+}
// Swap swaps the entries given at i, j.
func (e EntriesBySize) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
|
commands: make migrate info sort deterministically
At present the summary of file types and sizes output by the
"migrate info" command is sorted only by the byte count totals.
When two entries have the same counts, though, the order in
which they are output is not defined, and may vary. This will
pose a problem for some tests we expect to add in subsequent
commits.
Therefore we handle the case where two entries have identical
byte counts by further comparing the file types and returning
whichever sorts first lexigraphically, according to Go's
internal string sort order.
|
git-lfs_git-lfs
|
train
|
b8c5b7865fa919280a78ce3553dbca95ff2fcf8d
|
diff --git a/inginious/frontend/task_factory.py b/inginious/frontend/task_factory.py
index <HASH>..<HASH> 100644
--- a/inginious/frontend/task_factory.py
+++ b/inginious/frontend/task_factory.py
@@ -220,6 +220,23 @@ class TaskFactory(object):
def _get_last_updates(self, course, taskid, task_fs, need_content=False):
descriptor_name, descriptor_reader = self._get_task_descriptor_info(course.get_id(), taskid)
last_update = {descriptor_name: task_fs.get_last_modification_time(descriptor_name)}
+ translations_fs = task_fs.from_subfolder("$i18n")
+
+ if not translations_fs.exists():
+ translations_fs = task_fs.from_subfolder("student").from_subfolder("$i18n")
+ if not translations_fs.exists():
+ translations_fs = course.get_fs().from_subfolder("$common").from_subfolder("$i18n")
+ if not translations_fs.exists():
+ translations_fs = course.get_fs().from_subfolder("$common").from_subfolder("student").from_subfolder(
+ "$i18n")
+ if not translations_fs.exists():
+ translations_fs = course.get_fs().from_subfolder("$i18n")
+
+ if translations_fs.exists():
+ for f in translations_fs.list(folders=False, files=True, recursive=False):
+ lang = f[0:len(f) - 3]
+ if translations_fs.exists(lang + ".mo"):
+ last_update["$i18n/" + lang + ".mo"] = translations_fs.get_last_modification_time(lang + ".mo")
if need_content:
try:
|
[frontend/task_factory] Add translation files back to cache date watcher
Fix #<I>.
Regression from course and task factories refactor.
Taken from 9be<I>
|
UCL-INGI_INGInious
|
train
|
053ee3ccf473a635aed1cf7acdc2f7a51d7b54bf
|
diff --git a/src/svg/path/parser.py b/src/svg/path/parser.py
index <HASH>..<HASH> 100644
--- a/src/svg/path/parser.py
+++ b/src/svg/path/parser.py
@@ -156,7 +156,7 @@ def parse_path(pathdef, current_pos=0j):
# The control point is assumed to be the reflection of
# the control point on the previous command relative
# to the current point.
- control = current_pos + current_pos - segments[-1].control2
+ control = current_pos + current_pos - segments[-1].control
end = float(elements.pop()) + float(elements.pop()) * 1j
diff --git a/src/svg/path/path.py b/src/svg/path/path.py
index <HASH>..<HASH> 100644
--- a/src/svg/path/path.py
+++ b/src/svg/path/path.py
@@ -1,5 +1,5 @@
from __future__ import division
-from math import sqrt, cos, sin, acos, degrees, radians
+from math import sqrt, cos, sin, acos, degrees, radians, log
from collections import MutableSequence
# This file contains classes for the different types of SVG path segments as
@@ -83,21 +83,47 @@ class CubicBezier(object):
return lenght
-class QuadraticBezier(CubicBezier):
- # For Quadratic Bezier we simply subclass the Cubic. This is less efficient
- # and gives more complex calculations, but reuse means less bugs.
- # It is possible to calculate the length of a quadratic bezier so a TODO is to
- # replace the geometric approximation here.
-
+class QuadraticBezier(object):
def __init__(self, start, control, end):
self.start = start
- self.control1 = self.control2 = control
self.end = end
+ self.control = control
def __repr__(self):
return '<QuadradicBezier start=%s control=%s end=%s>' % (
- self.start, self.control1, self.end)
+ self.start, self.control, self.end)
+
+ def __eq__(self, other):
+ if not isinstance(other, QuadraticBezier):
+ return NotImplemented
+ return self.start == other.start and self.end == other.end and \
+ self.control == other.control
+
+ def __ne__(self, other):
+ if not isinstance(other, QuadraticBezier):
+ return NotImplemented
+ return not self == other
+ def point(self, pos):
+ return (1-pos)**2*self.start + 2*(1-pos)*pos*self.control + \
+ pos**2*self.end
+
+ def length(self):
+ # http://fontforge.github.io/bezier.html
+ a = self.start - 2*self.control + self.end
+ b = 2*(self.control - self.start)
+
+ A = 4*(a.real**2 + a.imag**2)
+ B = 4*(a.real*b.real + a.imag*b.imag)
+ C = b.real**2 + b.imag**2
+
+ Sabc = 2*sqrt(A+B+C)
+ A2 = sqrt(A)
+ A32 = 2*A*A2
+ C2 = 2*sqrt(C)
+ BA = B/A2
+
+ return (A32*Sabc + A2*B*(Sabc-C2) + (4*C*A-B**2)*log((2*A2+BA+Sabc)/(BA+C2)))/(4*A32)
class Arc(object):
diff --git a/src/svg/path/tests/test_paths.py b/src/svg/path/tests/test_paths.py
index <HASH>..<HASH> 100644
--- a/src/svg/path/tests/test_paths.py
+++ b/src/svg/path/tests/test_paths.py
@@ -263,20 +263,24 @@ class QuadraticBezierTest(unittest.TestCase):
# M200,300 Q400,50 600,300 T1000,300
path1 = QuadraticBezier(200+300j, 400+50j, 600+300j)
self.assertAlmostEqual(path1.point(0), (200+300j))
- self.assertAlmostEqual(path1.point(0.3), (336.8+142.5j))
- self.assertAlmostEqual(path1.point(0.5), (400+112.5j))
- self.assertAlmostEqual(path1.point(0.9), (545.6+232.5j))
+ self.assertAlmostEqual(path1.point(0.3), (320+195j))
+ self.assertAlmostEqual(path1.point(0.5), (400+175j))
+ self.assertAlmostEqual(path1.point(0.9), (560+255j))
self.assertAlmostEqual(path1.point(1), (600+300j))
# T1000, 300
inversion = (600+300j) + (600+300j) - (400+50j)
path2 = QuadraticBezier(600+300j, inversion, 1000+300j)
self.assertAlmostEqual(path2.point(0), (600+300j))
- self.assertAlmostEqual(path2.point(0.3), (736.8+457.5j))
- self.assertAlmostEqual(path2.point(0.5), (800+487.5j))
- self.assertAlmostEqual(path2.point(0.9), (945.6+367.5j))
+ self.assertAlmostEqual(path2.point(0.3), (720+405j))
+ self.assertAlmostEqual(path2.point(0.5), (800+425j))
+ self.assertAlmostEqual(path2.point(0.9), (960+345j))
self.assertAlmostEqual(path2.point(1), (1000+300j))
-
+
+ def test_length(self):
+ # calculated with the cubic bezier length estimation
+ path1 = QuadraticBezier(200+300j, 400+50j, 600+300j)
+ self.assertAlmostEqual(path1.length(), 487.7710938890204)
class ArcTest(unittest.TestCase):
@@ -422,4 +426,4 @@ class TestPath(unittest.TestCase):
if __name__ == "__main__":
- unittest.main()
\ No newline at end of file
+ unittest.main()
|
Fixed the Quadratic Bezier curve. Current code made implemented it as a Cubic Bezier with the second and third control points equal to the second Quadratic control point. This is incorrect (see: <URL> to mathematically find the curve length, instead of approximating it.
* Updated tests for QuadraticBezier.point(). Previous points were calculated using a CubicBezier with the incorrect control points.
* Added test for QuadraticBezier.length(). Tests against the length found from approximation using an equivalent cubic bezier curve.
|
regebro_svg.path
|
train
|
3b75cb49eac536d07cc4302194dd64a34d880991
|
diff --git a/test/com/inet/lib/less/ErrorTest.java b/test/com/inet/lib/less/ErrorTest.java
index <HASH>..<HASH> 100644
--- a/test/com/inet/lib/less/ErrorTest.java
+++ b/test/com/inet/lib/less/ErrorTest.java
@@ -6,38 +6,89 @@ import static org.junit.Assert.*;
public class ErrorTest {
- @Test
- public void parenthesisWithComma() {
- Less.compile( null, ".a { a: (red); }", false );
+ private void assertLessException( String less, String expectedErrorMessage ) {
try {
- Less.compile( null, ".a { a: (red,green); }", false );
+ Less.compile( null, less, false );
fail( "LessException expected" );
} catch( LessException lex ) {
String message = lex.getMessage();
- assertEquals( "Unrecognised input", message.substring( 0, message.indexOf( '\n' ) ) );
+ assertEquals( expectedErrorMessage, message.substring( 0, message.indexOf( '\n' ) ) );
}
}
+ @Test
+ public void parenthesisWithComma() {
+ Less.compile( null, ".a { a: (red); }", false );
+ assertLessException( ".a { a: (red,green); }", "Unrecognized input" );
+ }
@Test
public void maxDiffTypes() {
- try {
- Less.compile( null, ".a { a: max( 1px, 1% ); }", false );
- fail( "LessException expected" );
- } catch( LessException lex ) {
- String message = lex.getMessage();
- assertEquals( "Incompatible types", message.substring( 0, message.indexOf( '\n' ) ) );
- }
+ assertLessException( ".a { a: max( 1px, 1% ); }", "Incompatible types" );
}
@Test
public void minDiffTypes() {
- try {
- Less.compile( null, ".a { a: min( 1px, 1% ); }", false );
- fail( "LessException expected" );
- } catch( LessException lex ) {
- String message = lex.getMessage();
- assertEquals( "Incompatible types", message.substring( 0, message.indexOf( '\n' ) ) );
- }
+ assertLessException( ".a { a: min( 1px, 1% ); }", "Incompatible types" );
+ }
+
+ @Test
+ public void unrecognizedInput1() {
+ assertLessException( "a: > 5;", "Unrecognized input: '>'" );
+ }
+
+ @Test
+ public void unrecognizedInput2() {
+ assertLessException( "a: 1 <> 5;", "Unrecognized input: '>'" );
+ }
+
+ @Test
+ public void unrecognizedInput3() {
+ assertLessException( "a:}", "Unrecognized input: 'a:}'" );
+ }
+
+ @Test
+ public void unrecognizedInput4() {
+ assertLessException( "a:);", "Unrecognized input: 'a:)'" );
+ }
+
+ @Test
+ public void unrecognizedInput5() {
+ assertLessException( "/*comment", "Unrecognized input: '/*comment'" );
+ }
+
+ @Test
+ public void unrecognizedInput6() {
+ assertLessException( "@{a;", "Unrecognized input: '@{a;'" );
+ }
+
+ @Test
+ public void unrecognizedInput7() {
+ assertLessException( ".a()xyz{}", "Unrecognized input: 'xyz{'" );
+ }
+
+ @Test
+ public void unknownImportKeyword() {
+ assertLessException( "@import (xyz) 'c.data';", "Unknown @import keyword: xyz" );
+ }
+
+ @Test
+ public void undefineVariableInSelectorInput() {
+ assertLessException( "a@{b}c{a:1;}", "Undefine Variable: @b in a@{b}c" );
+ }
+
+ @Test
+ public void unexpectedEOF1() {
+ assertLessException( "@a:'a", "Unexpected end of Less data" );
+ }
+
+ @Test
+ public void unexpectedEOF2() {
+ assertLessException( "a{b", "Unexpected end of Less data" );
+ }
+
+ @Test
+ public void propsInRoot() {
+ assertLessException( "a: 5;", "Properties must be inside selector blocks, they cannot be in the root." );
}
/**
|
More error test case to improve the coverage.
|
i-net-software_jlessc
|
train
|
e63bb2459986b0980126310557c2c3b00b0cab7b
|
diff --git a/src/base/Dialog.js b/src/base/Dialog.js
index <HASH>..<HASH> 100644
--- a/src/base/Dialog.js
+++ b/src/base/Dialog.js
@@ -37,12 +37,16 @@ class Dialog extends Base {
/** @type {any} */ const cast = this;
cast[FocusCaptureMixin.wrap](frame);
+ // We'd prefer to use inline-grid instead of inline-flex, but for grid
+ // styling we'd want the row to be 1fr tall. Unfortunately, as of
+ // 2021-02-11, Safari doesn't seem to handle that basic need correctly.
+ // May be caused by https://bugs.webkit.org/show_bug.cgi?id=202051.
result.content.append(
fragmentFrom.html`
<style>
:host {
- display: grid;
- grid-template: minmax(0, 1fr) / minmax(0, 1fr);
+ display: inline-flex;
+ flex-direction: column;
height: 100%;
left: 0;
pointer-events: initial;
diff --git a/src/base/Drawer.js b/src/base/Drawer.js
index <HASH>..<HASH> 100644
--- a/src/base/Drawer.js
+++ b/src/base/Drawer.js
@@ -435,13 +435,14 @@ class Drawer extends Base {
cast[FocusCaptureMixin.wrap](frameContent);
}
+ // We'd prefer to use inline-grid instead of inline-flex; see Dialog.js
result.content.append(
fragmentFrom.html`
<style>
:host {
align-items: stretch;
- display: grid;
- grid-template: minmax(0, 1fr) / minmax(0, 1fr);
+ display: inline-flex;
+ flex-direction: column;
-webkit-overflow-scrolling: touch; /* for momentum scrolling */
}
|
Revert to use of inline-flex instead of inline-grid to work around problems (bug?) in WebKit.
|
elix_elix
|
train
|
43b2c9a1d0a49362f7c1e4bdcfd90f8264783cbe
|
diff --git a/state/state.go b/state/state.go
index <HASH>..<HASH> 100644
--- a/state/state.go
+++ b/state/state.go
@@ -3,7 +3,6 @@ package state
import (
"context"
"fmt"
- "math/rand"
"os"
"os/user"
"time"
@@ -14,12 +13,6 @@ import (
"github.com/hashicorp/terraform/version"
)
-var rngSource *rand.Rand
-
-func init() {
- rngSource = rand.New(rand.NewSource(time.Now().UnixNano()))
-}
-
// State is a deprecated alias for statemgr.Full
type State = statemgr.Full
@@ -82,13 +75,7 @@ func LockWithContext(ctx context.Context, s State, info *LockInfo) (string, erro
// Generate a LockInfo structure, populating the required fields.
func NewLockInfo() *LockInfo {
- // this doesn't need to be cryptographically secure, just unique.
- // Using math/rand alleviates the need to check handle the read error.
- // Use a uuid format to match other IDs used throughout Terraform.
- buf := make([]byte, 16)
- rngSource.Read(buf)
-
- id, err := uuid.FormatUUID(buf)
+ id, err := uuid.GenerateUUID()
if err != nil {
// this of course shouldn't happen
panic(err)
|
remove single rand source to prevent races
This shouldn't really be an issue in normal usage, but some of the
backend tests will trigger a race here.
|
hashicorp_terraform
|
train
|
f94fb7728216e8edaa7c44c0a0525f9b1f5107b8
|
diff --git a/lib/offsite_payments.rb b/lib/offsite_payments.rb
index <HASH>..<HASH> 100644
--- a/lib/offsite_payments.rb
+++ b/lib/offsite_payments.rb
@@ -3,8 +3,6 @@ require 'cgi'
require "timeout"
require "socket"
-require 'active_support/core_ext/class/delegating_attributes'
-
require 'active_utils'
require "offsite_payments/helper"
|
Remove unneeded require
superclass_delegating_accessor is not being used in the codebase
anymore.
|
activemerchant_offsite_payments
|
train
|
b7ccf916650f81e4d66e2a6577b4bb3455761865
|
diff --git a/Classes/ConnectionManager.php b/Classes/ConnectionManager.php
index <HASH>..<HASH> 100644
--- a/Classes/ConnectionManager.php
+++ b/Classes/ConnectionManager.php
@@ -442,7 +442,8 @@ class ConnectionManager implements SingletonInterface, ClearCacheActionsHookInte
$tmpl->generateConfig();
$GLOBALS['TSFE']->tmpl->setup = $tmpl->setup;
- $configuration = Util::getSolrConfiguration();
+ $configuration = Util::getSolrConfigurationFromPageId($rootPage['uid'], false, $languageId);
+
$solrIsEnabledAndConfigured = $configuration->getEnabled() && $configuration->getSolrHasConnectionConfiguration();
if (!$solrIsEnabledAndConfigured) {
return $connection;
|
[BUGFIX] Take language into account when configuration is fetched
Fixes: #<I>
|
TYPO3-Solr_ext-solr
|
train
|
e0550a5eabc9fc86bdd89385672a3098f5e16391
|
diff --git a/rtv/content.py b/rtv/content.py
index <HASH>..<HASH> 100644
--- a/rtv/content.py
+++ b/rtv/content.py
@@ -439,9 +439,9 @@ class SubredditContent(Content):
raise IndexError
else:
data = self.strip_praw_submission(submission)
- data['index'] = index
+ data['index'] = len(self._submission_data) + 1
# Add the post number to the beginning of the title
- data['title'] = '{0}. {1}'.format(index+1, data['title'])
+ data['title'] = '{0}. {1}'.format(data['index'], data['title'])
self._submission_data.append(data)
# Modifies the original dict, faster than copying
diff --git a/tests/test_content.py b/tests/test_content.py
index <HASH>..<HASH> 100644
--- a/tests/test_content.py
+++ b/tests/test_content.py
@@ -216,13 +216,16 @@ def test_content_subreddit_load_more(reddit, terminal):
assert content.get(50)['type'] == 'Submission'
assert len(content._submission_data) == 51
- for data in islice(content.iterate(0, 1), 0, 50):
+ for i, data in enumerate(islice(content.iterate(0, 1), 0, 50)):
assert all(k in data for k in ('object', 'n_rows', 'offset', 'type',
'index', 'title', 'split_title'))
# All text should be converted to unicode by this point
for val in data.values():
assert not isinstance(val, six.binary_type)
+ # Index be appended to each title, starting at "1." and incrementing
+ assert data['index'] == i + 1
+ assert data['title'].startswith(six.text_type(i + 1))
def test_content_subreddit_from_name(reddit, terminal):
|
Fix bug where submission indicies were duplicated when paging. #<I>
|
michael-lazar_rtv
|
train
|
447f1f18760630e74404e212afa2cd7e4183813e
|
diff --git a/lib/bumbleworks/ruote/exp/wait_for_event_expression.rb b/lib/bumbleworks/ruote/exp/wait_for_event_expression.rb
index <HASH>..<HASH> 100644
--- a/lib/bumbleworks/ruote/exp/wait_for_event_expression.rb
+++ b/lib/bumbleworks/ruote/exp/wait_for_event_expression.rb
@@ -5,6 +5,11 @@ module Ruote::Exp
class WaitForEventExpression < AwaitExpression
names :wait_for_event
+ # This does the same as the base AwaitExpression#apply, except that this
+ # will always be a global listener, listening for a 'left_tag' event, and
+ # the event's workitem will be discarded after the reply is complete. The
+ # event's workitem is only used for comparisons in the where clause (see
+ # #reply).
def apply
update_tree
h.updated_tree[1]['global'] = true
@@ -13,14 +18,29 @@ module Ruote::Exp
super
end
+ # On apply, the workitem for this FlowExpression was replaced by the workitem
+ # from the event. So when we refer to "f:" in this #reply method, we're
+ # looking at the event's workitem, which will be discarded at the end of this
+ # reply (and replaced with the applied workitem). In order to compare the
+ # event's workitem with the applied workitem (so we can determine whether or
+ # not the event was intended for us), we assign the applied_workitem's fields
+ # to a hash on the event's workitem fields, available at "f:receiver.*".
def reply(workitem)
update_tree
+ # If we have a where clause at all...
if translated_where = attribute(:where, nil, :escape => true)
if translated_where.to_s == 'entities_match'
+ # Check to see that the event's entity is equal to the current workitem's
+ # entity. If so, this message is intended for us.
translated_where = '${f:entity_id} == ${f:receiver.entity_id} && ${f:entity_type} == ${f:receiver.entity_type}'
else
- translated_where.gsub!('${event:', '${f:')
- translated_where.gsub!('${this:', '${f:receiver.')
+ # This just gives us a shortcut so the process definition reads more
+ # clearly. You could always use "${f:" and "${f:receiver." in your
+ # where clauses, but you have to remember that the former refers to the
+ # incoming event's workitem, and the latter is the workitem of the
+ # listening process.
+ translated_where.gsub!('${event:', '${f:') # event workitem
+ translated_where.gsub!('${this:', '${f:receiver.') # listening workitem
end
h.updated_tree[1]['where'] = translated_where
end
|
Add documentation (comments) to wait_for_event exp
|
bumbleworks_bumbleworks
|
train
|
c4fec8c7998113902af4152d716c42dada6eb465
|
diff --git a/src/state.js b/src/state.js
index <HASH>..<HASH> 100644
--- a/src/state.js
+++ b/src/state.js
@@ -41,7 +41,7 @@ function $StateProvider( $urlRouterProvider, $urlMatcherFactory) {
// inherit 'data' from parent and override by own values (if any)
data: function(state) {
if (state.parent && state.parent.data) {
- state.data = state.self.data = extend({}, state.parent.data, state.data);
+ state.data = state.self.data = inherit(state.parent.data, state.data);
}
return state.data;
},
diff --git a/test/stateSpec.js b/test/stateSpec.js
index <HASH>..<HASH> 100644
--- a/test/stateSpec.js
+++ b/test/stateSpec.js
@@ -1338,6 +1338,7 @@ describe('state', function () {
expect($state.current.name).toEqual('HHH');
expect($state.current.data.propA).toEqual(HHH.data.propA);
expect($state.current.data.propB).toEqual(H.data.propB);
+ expect($state.current.data.hasOwnProperty('propB')).toBe(false);
expect($state.current.data.propB).toEqual(HH.data.propB);
expect($state.current.data.propC).toEqual(HHH.data.propC);
}));
|
feat($state): make state data inheritance prototypical
Modify state data inheritance to use prototypical inhertiance rather than Angular's extend method to enable more flexibility in handling state data.
|
angular-ui_ui-router
|
train
|
6e34488fa5752cf274f32363d93af1e7081ad8db
|
diff --git a/framework/yii/base/Behavior.php b/framework/yii/base/Behavior.php
index <HASH>..<HASH> 100644
--- a/framework/yii/base/Behavior.php
+++ b/framework/yii/base/Behavior.php
@@ -46,8 +46,8 @@ class Behavior extends \yii\base\Object
*
* ~~~
* [
- * 'beforeValidate' => 'myBeforeValidate',
- * 'afterValidate' => 'myAfterValidate',
+ * Model::EVENT_BEFORE_VALIDATE => 'myBeforeValidate',
+ * Model::EVENT_AFTER_VALIDATE => 'myAfterValidate',
* ]
* ~~~
*
|
Using constants instead of literals in Behavior
|
yiisoft_yii2-bootstrap4
|
train
|
7c782ecd66634e15116ef83ca6f458fab8df0ab4
|
diff --git a/reddit/data.go b/reddit/data.go
index <HASH>..<HASH> 100644
--- a/reddit/data.go
+++ b/reddit/data.go
@@ -37,8 +37,8 @@ type Comment struct {
Distinguished string `mapstructure:"distinguished"`
}
-// IsRoot is true when the comment is a top level comment.
-func (c *Comment) IsRoot() bool {
+// IsTopLevel is true when the comment is a top level comment.
+func (c *Comment) IsTopLevel() bool {
parentType := strings.Split(c.ParentID, "_")[0]
return parentType == postKind
}
|
rename IsRoot to IsTopLevel
|
turnage_graw
|
train
|
72dc83b4676f90f84e42a6c0b1dd1d1aa32777f0
|
diff --git a/config/config.default.php b/config/config.default.php
index <HASH>..<HASH> 100644
--- a/config/config.default.php
+++ b/config/config.default.php
@@ -44,10 +44,7 @@ $config = array(
* @var Imbo\Database\DatabaseInterface|Closure
*/
'database' => function() {
- return new Database\MongoDB(array(
- 'databaseName' => 'imbo',
- 'collectionName' => 'images',
- ));
+ return new Database\MongoDB();
},
/**
@@ -60,9 +57,7 @@ $config = array(
* @var Imbo\Storage\StorageInterface|Closure
*/
'storage' => function() {
- return new Storage\GridFS(array(
- 'databaseName' => 'imbo_storage',
- ));
+ return new Storage\GridFS();
},
/**
diff --git a/library/Imbo/Database/MongoDB.php b/library/Imbo/Database/MongoDB.php
index <HASH>..<HASH> 100755
--- a/library/Imbo/Database/MongoDB.php
+++ b/library/Imbo/Database/MongoDB.php
@@ -28,8 +28,8 @@ use Imbo\Model\Image,
*
* - (string) databaseName Name of the database. Defaults to 'imbo'
* - (array) collectionNames Name of the collections to store data in. Defaults to
- * array('image' => 'image, 'shortUrl' => 'shortUrl'). Change the values
- * in the array to change the default names.
+ * array('image' => 'images', 'shortUrl' => 'shortUrls'). Change the
+ * values in the array to change the default names.
* - (string) server The server string to use when connecting to MongoDB. Defaults to
* 'mongodb://localhost:27017'
* - (array) options Options to use when creating the MongoClient instance. Defaults to
@@ -65,8 +65,8 @@ class MongoDB implements DatabaseInterface {
// Database and collection names
'databaseName' => 'imbo',
'collectionNames' => array(
- 'image' => 'image',
- 'shortUrl' => 'shortUrl',
+ 'image' => 'images',
+ 'shortUrl' => 'shortUrls',
),
// Server string and ctor options
|
Make sure the defaults are the same as before
|
imbo_imbo
|
train
|
4f1a1ca104a6a660493f92f88894286d662af513
|
diff --git a/autofit/tools/edenise/file.py b/autofit/tools/edenise/file.py
index <HASH>..<HASH> 100644
--- a/autofit/tools/edenise/file.py
+++ b/autofit/tools/edenise/file.py
@@ -61,6 +61,7 @@ class File(DirectoryItem):
import_.alias
for import_
in self.aliased_imports
+ if import_.is_in_project
]
def attributes_for_alias(
@@ -90,6 +91,7 @@ class File(DirectoryItem):
"""
All attributes in this file
"""
+
def get_attributes(
obj
):
@@ -158,7 +160,7 @@ class File(DirectoryItem):
for line in self.lines():
if isinstance(
line, Import
- ) and line.is_aliased:
+ ) and line.is_in_project and line.is_aliased:
line = line.as_from_import(
self.attributes_for_alias(
line.alias
diff --git a/autofit/tools/edenise/package.py b/autofit/tools/edenise/package.py
index <HASH>..<HASH> 100644
--- a/autofit/tools/edenise/package.py
+++ b/autofit/tools/edenise/package.py
@@ -70,6 +70,13 @@ class Package(DirectoryItem):
"""
return self.path.name
+ def is_name_of_eden_dependency(self, name):
+ return name == self.name or name in [
+ dependency.name
+ for dependency
+ in self.eden_dependencies
+ ]
+
def _item_for_path(
self,
path: List[str]
diff --git a/test_autofit/tools/edenise/test_examples/test_unpack_imports.py b/test_autofit/tools/edenise/test_examples/test_unpack_imports.py
index <HASH>..<HASH> 100644
--- a/test_autofit/tools/edenise/test_examples/test_unpack_imports.py
+++ b/test_autofit/tools/edenise/test_examples/test_unpack_imports.py
@@ -55,8 +55,8 @@ def test_convert_if_numpy(
prefix=""
)
assert file.target_string == """
-from numpy import isnan
-if isnan(1):
+import numpy as np
+if np.isnan(1):
assert False
"""
|
don't dealias numpy
|
rhayes777_PyAutoFit
|
train
|
48571df870664c5c3706600c2b1833e19bdd15ae
|
diff --git a/gtabview/viewer.py b/gtabview/viewer.py
index <HASH>..<HASH> 100644
--- a/gtabview/viewer.py
+++ b/gtabview/viewer.py
@@ -1,8 +1,12 @@
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, absolute_import, generators
from .compat import *
+from . import models
import math
+AUTOSIZE_LIMIT = models.DEFAULT_CHUNK_SIZE
+
+
# Support PyQt4/PySide with either Python 2/3
try:
from PyQt4 import QtCore, QtGui
@@ -339,10 +343,12 @@ class ExtTableView(QtGui.QWidget):
QtGui.QItemSelectionModel.ClearAndSelect)
def _sizeHintForColumn(self, table, col, limit):
- if limit is None or limit >= table.model().rowCount():
- return table.sizeHintForColumn(col)
+ # TODO: use current chunk boundaries, do not start from the beginning
+ max_row = table.model().rowCount()
+ if limit is not None:
+ max_row = min(max_row, limit)
max_width = 0
- for row in range(min(table.model().rowCount(), limit)):
+ for row in range(max_row):
v = table.sizeHintForIndex(table.model().index(row, col))
max_width = max(max_width, v.width())
return max_width
@@ -428,7 +434,7 @@ class Viewer(QtGui.QMainWindow):
# resizing materializes the contents and might actually take longer
# than loading all the data itself, so do it only on a single chunk
- self.table.resizeColumnsToContents(model.chunk_size())
+ self.table.resizeColumnsToContents(min(AUTOSIZE_LIMIT, model.chunk_size()))
self.table.setFocus()
if start_pos:
|
Always autosize using the requested row contents
sizeHintForColumn() only sizes based on the visible content.
Re-introduce an autosize limit for in-memory structures, causing overlong
column size estimation times.
|
TabViewer_gtabview
|
train
|
0090b9fc79028712396adae438eb5086b8cafc4e
|
diff --git a/aws/resource_aws_codebuild_project.go b/aws/resource_aws_codebuild_project.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_codebuild_project.go
+++ b/aws/resource_aws_codebuild_project.go
@@ -60,7 +60,7 @@ func resourceAwsCodeBuildProject() *schema.Resource {
Set: resourceAwsCodeBuildProjectArtifactsHash,
},
"cache": {
- Type: schema.TypeSet,
+ Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
@@ -76,7 +76,6 @@ func resourceAwsCodeBuildProject() *schema.Resource {
},
},
},
- Set: resourceAwsCodeBuildProjectCacheHash,
},
"description": {
Type: schema.TypeString,
@@ -246,7 +245,7 @@ func resourceAwsCodeBuildProjectCreate(d *schema.ResourceData, meta interface{})
}
if v, ok := d.GetOk("cache"); ok {
- params.Cache = expandProjectCache(v.(*schema.Set))
+ params.Cache = expandProjectCache(v.([]interface{}))
}
if v, ok := d.GetOk("description"); ok {
@@ -335,10 +334,10 @@ func expandProjectArtifacts(d *schema.ResourceData) codebuild.ProjectArtifacts {
return projectArtifacts
}
-func expandProjectCache(s *schema.Set) *codebuild.ProjectCache {
+func expandProjectCache(s []interface{}) *codebuild.ProjectCache {
var projectCache *codebuild.ProjectCache
- data := s.List()[0].(map[string]interface{})
+ data := s[0].(map[string]interface{})
projectCache = &codebuild.ProjectCache{
Type: aws.String(data["type"].(string)),
@@ -474,7 +473,7 @@ func resourceAwsCodeBuildProjectRead(d *schema.ResourceData, meta interface{}) e
return err
}
- if err := d.Set("cache", schema.NewSet(resourceAwsCodeBuildProjectCacheHash, flattenAwsCodebuildProjectCache(project.Cache))); err != nil {
+ if err := d.Set("cache", flattenAwsCodebuildProjectCache(project.Cache)); err != nil {
return err
}
@@ -527,7 +526,7 @@ func resourceAwsCodeBuildProjectUpdate(d *schema.ResourceData, meta interface{})
if d.HasChange("cache") {
if v, ok := d.GetOk("cache"); ok {
- params.Cache = expandProjectCache(v.(*schema.Set))
+ params.Cache = expandProjectCache(v.([]interface{}))
} else {
params.Cache = &codebuild.ProjectCache{
Type: aws.String("NO_CACHE"),
@@ -698,21 +697,6 @@ func resourceAwsCodeBuildProjectArtifactsHash(v interface{}) int {
return hashcode.String(buf.String())
}
-func resourceAwsCodeBuildProjectCacheHash(v interface{}) int {
- var buf bytes.Buffer
- m := v.(map[string]interface{})
-
- if m["type"] != nil {
- buf.WriteString(fmt.Sprintf("%s-", m["type"].(string)))
- }
-
- if m["location"] != nil {
- buf.WriteString(fmt.Sprintf("%s-", m["location"].(string)))
- }
-
- return hashcode.String(buf.String())
-}
-
func resourceAwsCodeBuildProjectEnvironmentHash(v interface{}) int {
var buf bytes.Buffer
m := v.(map[string]interface{})
diff --git a/aws/validators.go b/aws/validators.go
index <HASH>..<HASH> 100644
--- a/aws/validators.go
+++ b/aws/validators.go
@@ -15,13 +15,13 @@ import (
"github.com/aws/aws-sdk-go/service/cognitoidentityprovider"
"github.com/aws/aws-sdk-go/service/configservice"
"github.com/aws/aws-sdk-go/service/ec2"
+ "github.com/aws/aws-sdk-go/service/gamelift"
+ "github.com/aws/aws-sdk-go/service/guardduty"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/waf"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/structure"
"github.com/hashicorp/terraform/helper/validation"
- "github.com/aws/aws-sdk-go/service/gamelift"
- "github.com/aws/aws-sdk-go/service/guardduty"
)
// When released, replace all usage with upstream validation function:
|
r/aws_codebuild_project: Make cache option into a *schema.List instead of *schema.Set
|
terraform-providers_terraform-provider-aws
|
train
|
ab81c91dfd1325930061b64d82a519d9436e2825
|
diff --git a/katcp/resource.py b/katcp/resource.py
index <HASH>..<HASH> 100644
--- a/katcp/resource.py
+++ b/katcp/resource.py
@@ -595,12 +595,12 @@ class KATCPSensor(object):
@property
def name(self):
- """Name of this KATCP resource"""
+ """Name of this KATCPSensor"""
return self._name
@property
def python_id(self):
- """Python identifer name of this KATCP resource"""
+ """Python identifer name of this KATCPSensor"""
return escape_name(self._name)
@property
|
Update resource.py
Fixing docstrings
|
ska-sa_katcp-python
|
train
|
9cbdb2a8a43ea69314298edaea04d85267f8bbcd
|
diff --git a/src/com/caverock/androidsvg/SVGAndroidRenderer.java b/src/com/caverock/androidsvg/SVGAndroidRenderer.java
index <HASH>..<HASH> 100644
--- a/src/com/caverock/androidsvg/SVGAndroidRenderer.java
+++ b/src/com/caverock/androidsvg/SVGAndroidRenderer.java
@@ -55,6 +55,7 @@ import com.caverock.androidsvg.SVG.Pattern;
import com.caverock.androidsvg.SVG.Rect;
import com.caverock.androidsvg.SVG.Stop;
import com.caverock.androidsvg.SVG.Style;
+import com.caverock.androidsvg.SVG.Style.FontStyle;
import com.caverock.androidsvg.SVG.Style.TextDecoration;
import com.caverock.androidsvg.SVG.SvgContainer;
import com.caverock.androidsvg.SVG.SvgElement;
@@ -105,6 +106,8 @@ public class SVGAndroidRenderer
private static final int LUMINANCE_TO_ALPHA_GREEN = (int)(0.7154f * (1 << LUMINANCE_FACTOR_SHIFT));
private static final int LUMINANCE_TO_ALPHA_BLUE = (int)(0.0721f * (1 << LUMINANCE_FACTOR_SHIFT));
+ private static final String DEFAULT_FONT_FAMILY = "sans-serif";
+
private class RendererState implements Cloneable
{
@@ -1948,20 +1951,18 @@ public class SVGAndroidRenderer
if (state.style.fontFamily != null && document != null) {
fileResolver = document.getFileResolver();
- if (fileResolver != null) {
- for (String fontName: state.style.fontFamily) {
+ for (String fontName: state.style.fontFamily) {
+ font = checkGenericFont(fontName, state.style.fontWeight, state.style.fontStyle);
+ if (font == null && fileResolver != null) {
font = fileResolver.resolveFont(fontName, state.style.fontWeight, String.valueOf(state.style.fontStyle));
- if (font != null)
- break;
}
+ if (font != null)
+ break;
}
}
if (font == null) {
// Fall back to default font
- if (state.style.fontWeight <= 500)
- font = Typeface.create(Typeface.DEFAULT, getTypefaceStyle(style));
- else
- font = Typeface.create(Typeface.DEFAULT_BOLD, getTypefaceStyle(style));
+ font = checkGenericFont(DEFAULT_FONT_FAMILY, state.style.fontWeight, state.style.fontStyle);
}
state.fillPaint.setTypeface(font);
state.strokePaint.setTypeface(font);
@@ -2057,13 +2058,27 @@ public class SVGAndroidRenderer
}
- private int getTypefaceStyle(Style style)
+ private Typeface checkGenericFont(String fontName, Integer fontWeight, FontStyle fontStyle)
{
- boolean italic = (style.fontStyle == Style.FontStyle.Italic);
- if ("bold".equals(style.fontWeight)) {
- return italic ? Typeface.BOLD_ITALIC : Typeface.BOLD;
+ Typeface font = null;
+ int typefaceStyle;
+
+ boolean italic = (fontStyle == Style.FontStyle.Italic);
+ typefaceStyle = (fontWeight > 500) ? (italic ? Typeface.BOLD_ITALIC : Typeface.BOLD)
+ : (italic ? Typeface.ITALIC : Typeface.NORMAL);
+
+ if (fontName.equals("serif")) {
+ font = Typeface.create(Typeface.SERIF, typefaceStyle);
+ } else if (fontName.equals("sans-serif")) {
+ font = Typeface.create(Typeface.SANS_SERIF, typefaceStyle);
+ } else if (fontName.equals("monospace")) {
+ font = Typeface.create(Typeface.MONOSPACE, typefaceStyle);
+ } else if (fontName.equals("cursive")) {
+ font = Typeface.create(Typeface.SANS_SERIF, typefaceStyle);
+ } else if (fontName.equals("fantasy")) {
+ font = Typeface.create(Typeface.SANS_SERIF, typefaceStyle);
}
- return italic ? Typeface.ITALIC : Typeface.NORMAL;
+ return font;
}
|
Issue 1: Added support for generic font family names.
|
BigBadaboom_androidsvg
|
train
|
79170736d2794645703de7bfe08e65d153e12b4a
|
diff --git a/jdl/jhipster/default-application-options.js b/jdl/jhipster/default-application-options.js
index <HASH>..<HASH> 100644
--- a/jdl/jhipster/default-application-options.js
+++ b/jdl/jhipster/default-application-options.js
@@ -166,7 +166,7 @@ function getConfigForUAAApplication(customOptions = {}) {
[AUTHENTICATION_TYPE]: OptionValues[AUTHENTICATION_TYPE].uaa,
[CACHE_PROVIDER]: OptionValues[CACHE_PROVIDER].hazelcast,
[SERVER_PORT]: DEFAULT_SERVER_PORT,
- [SERVICE_DISCOVERY_TYPE]: false,
+ [SERVICE_DISCOVERY_TYPE]: OptionValues[SERVICE_DISCOVERY_TYPE].eureka,
...customOptions,
};
delete options[CLIENT_FRAMEWORK];
diff --git a/test/jdl/jhipster/default-application-options.spec.js b/test/jdl/jhipster/default-application-options.spec.js
index <HASH>..<HASH> 100644
--- a/test/jdl/jhipster/default-application-options.spec.js
+++ b/test/jdl/jhipster/default-application-options.spec.js
@@ -398,6 +398,9 @@ describe('DefaultApplicationOptions', () => {
it('should unset the server skipping option', () => {
expect(options.skipServer).to.be.undefined;
});
+ it('should set the service discovery type to eureka', () => {
+ expect(options.serviceDiscoveryType).to.equal('eureka');
+ });
});
context('when passing custom options', () => {
let options;
|
Set the default for service discovery type to eureka for uaa apps
There was an inconsistency, this should fix it.
|
jhipster_generator-jhipster
|
train
|
60b8ba6b865ba02428d7926998af72cdaa17ea5d
|
diff --git a/src/structures/GuildEmoji.js b/src/structures/GuildEmoji.js
index <HASH>..<HASH> 100644
--- a/src/structures/GuildEmoji.js
+++ b/src/structures/GuildEmoji.js
@@ -31,6 +31,8 @@ class GuildEmoji extends BaseGuildEmoji {
* @private
*/
Object.defineProperty(this, '_roles', { value: [], writable: true });
+
+ this._patch(data);
}
/**
|
fix(GuildEmoji): Cache restricted roles and author data (#<I>)
|
discordjs_discord.js
|
train
|
1f1b7da6f42e083148565da7112770b403197e04
|
diff --git a/thefuck/output_readers/read_log.py b/thefuck/output_readers/read_log.py
index <HASH>..<HASH> 100644
--- a/thefuck/output_readers/read_log.py
+++ b/thefuck/output_readers/read_log.py
@@ -69,6 +69,11 @@ def get_output(script):
warn("Output log isn't specified")
return None, None
+ if const.USER_COMMAND_MARK not in os.environ.get('PS1', ''):
+ warn("PS1 doesn't contain user command mark, please ensure "
+ "that PS1 is not changed after The Fuck alias initialization")
+ return None, None
+
try:
with open(os.environ['THEFUCK_OUTPUT_LOG'], 'rb') as log_file:
lines = _get_output_lines(script, log_file)
|
#<I>: Warn if PS1 changed after thefuck initialization
|
nvbn_thefuck
|
train
|
8d4e0f29dcd7ea53ee1e46ebc9abd3e644bef2f0
|
diff --git a/calendar/classes/local/event/data_access/event_vault.php b/calendar/classes/local/event/data_access/event_vault.php
index <HASH>..<HASH> 100644
--- a/calendar/classes/local/event/data_access/event_vault.php
+++ b/calendar/classes/local/event/data_access/event_vault.php
@@ -207,6 +207,8 @@ class event_vault implements event_vault_interface {
[$user->id],
null,
null,
+ true,
+ true,
function ($event) {
return $event instanceof action_event_interface;
}
@@ -234,6 +236,8 @@ class event_vault implements event_vault_interface {
[$user->id],
null,
[$course->id],
+ true,
+ true,
function ($event) use ($course) {
return $event instanceof action_event_interface && $event->get_course()->get_id() == $course->id;
}
|
MDL-<I> core_calendar: Correctly filter in event vault
The filter parameter was being passed in as the wrong argument which
was allowing non action events to be returned.
Part of MDL-<I> epic.
|
moodle_moodle
|
train
|
f5f52e5f221f7f3d44b721cf72a8eb2e327eca47
|
diff --git a/nolds/test_measures.py b/nolds/test_measures.py
index <HASH>..<HASH> 100644
--- a/nolds/test_measures.py
+++ b/nolds/test_measures.py
@@ -175,8 +175,9 @@ class TestNoldsSampEn(unittest.TestCase):
Tests for sampen
"""
def test_sampen_base(self):
- # TODO implement
- pass
+ data = [0,1,5,4,1,0,1,5,3]
+ se = nolds.sampen(data)
+ self.assertAlmostEqual(se, np.log(2), delta=0.01)
if __name__ == "__main__":
unittest.main()
|
adds small test case for sample entropy
|
CSchoel_nolds
|
train
|
7a743018c9b92a4c8ee826bfdc03ebf56f513dd2
|
diff --git a/allaccess/__init__.py b/allaccess/__init__.py
index <HASH>..<HASH> 100644
--- a/allaccess/__init__.py
+++ b/allaccess/__init__.py
@@ -3,7 +3,7 @@ django-all-access is a reusable application for user registration and authentica
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
-__version__ = '0.3.0'
+__version__ = '0.4.0dev'
import logging
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -48,9 +48,9 @@ copyright = u'2012, Mark Lavin'
# built documents.
#
# The short X.Y version.
-version = '0.3'
+version = '0.4'
# The full version, including alpha/beta/rc tags.
-release = '0.3.0'
+release = '0.4.0dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
|
Version bump to reflect current dev status.
|
mlavin_django-all-access
|
train
|
93e46b9ffda885b86f0d0a49c2e9d8c03d884f4c
|
diff --git a/block_test.go b/block_test.go
index <HASH>..<HASH> 100644
--- a/block_test.go
+++ b/block_test.go
@@ -61,7 +61,7 @@ func TestBlock(t *testing.T) {
"e9a66845e05d5abc0ad04ec80f774a7e585c6e8db975962d069a522137b80c1d",
}
- // Request sha for all transactions one at a time.
+ // Request sha for all transactions one at a time via TxSha.
for i, txSha := range wantTxShas {
wantSha, err := btcwire.NewShaHashFromStr(txSha)
if err != nil {
@@ -75,6 +75,7 @@ func TestBlock(t *testing.T) {
t.Errorf("TxSha: %v", err)
continue
}
+
if !sha.IsEqual(wantSha) {
t.Errorf("TxSha #%d mismatched sha - got %v, "+
"want %v", j, sha, wantSha)
@@ -86,6 +87,33 @@ func TestBlock(t *testing.T) {
// Create a new block to nuke all cached data.
b = btcutil.NewBlock(&Block100000)
+ // Request sha for all transactions one at a time via Tx.
+ for i, txSha := range wantTxShas {
+ wantSha, err := btcwire.NewShaHashFromStr(txSha)
+ if err != nil {
+ t.Errorf("NewShaHashFromStr: %v", err)
+ }
+
+ // Request the sha multiple times to test generation and caching.
+ for j := 0; j < 2; j++ {
+ tx, err := b.Tx(i)
+ if err != nil {
+ t.Errorf("Tx #%d: %v", i, err)
+ continue
+ }
+
+ sha := tx.Sha()
+ if !sha.IsEqual(wantSha) {
+ t.Errorf("Sha #%d mismatched sha - got %v, "+
+ "want %v", j, sha, wantSha)
+ continue
+ }
+ }
+ }
+
+ // Create a new block to nuke all cached data.
+ b = btcutil.NewBlock(&Block100000)
+
// Request slice of all transaction shas multiple times to test
// generation and caching.
for i := 0; i < 2; i++ {
@@ -120,6 +148,38 @@ func TestBlock(t *testing.T) {
}
}
+ // Create a new block to nuke all cached data.
+ b = btcutil.NewBlock(&Block100000)
+
+ // Request slice of all transactions multiple times to test generation
+ // and caching.
+ for i := 0; i < 2; i++ {
+ transactions := b.Transactions()
+
+ // Ensure we get the expected number of transactions.
+ if len(transactions) != len(wantTxShas) {
+ t.Errorf("Transactions #%d mismatched number of "+
+ "transactions - got %d, want %d", i,
+ len(transactions), len(wantTxShas))
+ continue
+ }
+
+ // Ensure all of the shas match.
+ for j, tx := range transactions {
+ wantSha, err := btcwire.NewShaHashFromStr(wantTxShas[j])
+ if err != nil {
+ t.Errorf("NewShaHashFromStr: %v", err)
+ }
+
+ sha := tx.Sha()
+ if !sha.IsEqual(wantSha) {
+ t.Errorf("Transactions #%d mismatched shas - "+
+ "got %v, want %v", j, sha, wantSha)
+ continue
+ }
+ }
+ }
+
// Serialize the test block.
var block100000Buf bytes.Buffer
err = Block100000.Serialize(&block100000Buf)
@@ -277,6 +337,18 @@ func TestBlockErrors(t *testing.T) {
"want: <%T>", err, err, btcutil.OutOfRangeError(""))
}
+ // Ensure Tx returns expected error on invalid indices.
+ _, err = b.Tx(-1)
+ if _, ok := err.(btcutil.OutOfRangeError); !ok {
+ t.Errorf("Tx: wrong error - got: %v <%T>, "+
+ "want: <%T>", err, err, btcutil.OutOfRangeError(""))
+ }
+ _, err = b.Tx(len(Block100000.Transactions) + 1)
+ if _, ok := err.(btcutil.OutOfRangeError); !ok {
+ t.Errorf("Tx: wrong error - got: %v <%T>, "+
+ "want: <%T>", err, err, btcutil.OutOfRangeError(""))
+ }
+
// Ensure TxLoc returns expected error with short byte buffer.
// This makes use of the test package only function, SetBlockBytes, to
// inject a short byte buffer.
|
Add tests for new Block API functions.
This commit adds both positive and negative tests for the new Tx and
Transactions Block API functions.
This is part of the ongoing transaction hash optimization effort noted in
conformal/btcd#<I>.
|
btcsuite_btcutil
|
train
|
140b2f34f894735d7a62a4cf4c780e7cca06c16b
|
diff --git a/v1/brokers/redis/redis.go b/v1/brokers/redis/redis.go
index <HASH>..<HASH> 100644
--- a/v1/brokers/redis/redis.go
+++ b/v1/brokers/redis/redis.go
@@ -95,6 +95,7 @@ func (b *Broker) StartConsuming(consumerTag string, concurrency int, taskProcess
select {
// A way to stop this goroutine from b.StopConsuming
case <-b.stopReceivingChan:
+ close(deliveries)
return
case <-pool:
task, _ := b.nextTask(getQueue(b.GetConfig(), taskProcessor))
@@ -246,7 +247,10 @@ func (b *Broker) consume(deliveries <-chan []byte, concurrency int, taskProcesso
select {
case err := <-errorsChan:
return err
- case d := <-deliveries:
+ case d, open := <-deliveries:
+ if !open {
+ return nil
+ }
if concurrency > 0 {
// get execution slot from pool (blocks until one is available)
<-pool
@@ -268,8 +272,6 @@ func (b *Broker) consume(deliveries <-chan []byte, concurrency int, taskProcesso
pool <- struct{}{}
}
}()
- case <-b.Broker.GetStopChan():
- return nil
}
}
}
|
for Redis broker: solving waitgroup panic on Worker.Quit()
|
RichardKnop_machinery
|
train
|
401ee08bdde3eec80c3804c0d4ecb76ecdfb03a9
|
diff --git a/lib/clusters.py b/lib/clusters.py
index <HASH>..<HASH> 100644
--- a/lib/clusters.py
+++ b/lib/clusters.py
@@ -31,52 +31,109 @@
... print cluster
"""
-__all__ = ["ClusterFactory"]
+
+__all__ = ["Cluster", "RuleCluster", "ClusterFactory"]
+
+
+class Cluster(object):
+ """A set of related items
+
+ This is the most elementary implementation of a cluster. In practice
+ on is often interested in extending the functionality of a cluster.
+ """
+ def __init__(self, items):
+ """Initialize a Cluster object"""
+ self.items = set(items)
+
+ def add_item(self, item):
+ """Add an item to a cluster"""
+ self.items.add(item)
+
+ def update(self, other):
+ """Merge another cluster into this cluster"""
+ self.items |= other.items
+
+
+class RuleCluster(Cluster):
+ """Clusters based on rules
+
+ This is a typical derived Cluster class where the relation between the
+ items is one or more rules, which one would like to know at the end of
+ the clustering algorithm.
+
+ An example application is the shake algorithm where it is beneficial
+ to group constraints that share certain degrees of freedom into a cluster
+ of equations.
+ """
+ def __init__(self, items, rules=None):
+ """Initialize a RuleCluster object"""
+ Cluster.__init__(self, items)
+ if rules is None:
+ self.rules = []
+ else:
+ self.rules = rules
+
+ def update(self, other):
+ """Extend the current cluster with data from another cluster"""
+ Cluster.update(self, other)
+ self.rules.extend(other.rules)
class ClusterFactory(object):
"""A very basic cluster algorithm"""
- def __init__(self):
+ def __init__(self, cls=Cluster):
"""Initialize a ClusterFactory"""
+ self.cls = cls
# mapping: item -> cluster. Each cluster is a tuple of related items.
self.lookup = {}
- def add_related(self, *group):
+ def add_related(self, *objects):
"""Add related items
+ The arguments can be individual items or cluster objects containing
+ several items.
+
When two groups of related items share one or more common members,
they will be merged into one cluster.
"""
- master = None # this will become the common cluster of all related group
+ master = None # this will become the common cluster of all related items
slaves = set([]) # set of clusters that are going to be merged in the master
solitaire = set([]) # set of new items that are not yet part of a cluster
- for new in group:
- cluster = self.lookup.get(new)
- if cluster is None:
- #print "solitaire", new
- solitaire.add(new)
- elif master is None:
- #print "starting master", new
- master = cluster
- elif master != cluster:
- #print "in slave", new
- slaves.add(cluster)
- #else:
- ##nothing to do
- #print "new in master", new
+ for new in objects:
+ if isinstance(new, self.cls):
+ if master is None:
+ master = new
+ else:
+ slaves.add(new)
+ for item in new.items:
+ existing = self.lookup.get(item)
+ if existing is not None:
+ slaves.add(existing)
+ else:
+ cluster = self.lookup.get(new)
+ if cluster is None:
+ #print "solitaire", new
+ solitaire.add(new)
+ elif master is None:
+ #print "starting master", new
+ master = cluster
+ elif master != cluster:
+ #print "in slave", new
+ slaves.add(cluster)
+ #else:
+ ##nothing to do
+ #print "new in master", new
if master is None:
- master = []
- else:
- master = list(master)
+ master = self.cls([])
for slave in slaves:
- master.extend(slave)
- master.extend(solitaire)
- master = tuple(master)
+ master.update(slave)
+ for item in solitaire:
+ master.add_item(item)
- for item in master:
+ for item in master.items:
self.lookup[item] = master
def get_clusters(self):
diff --git a/test/clusters.py b/test/clusters.py
index <HASH>..<HASH> 100644
--- a/test/clusters.py
+++ b/test/clusters.py
@@ -19,7 +19,7 @@
# --
-from molmod.clusters import ClusterFactory
+from molmod.clusters import *
import numpy, unittest
@@ -40,13 +40,25 @@ class ClusterTestCase(unittest.TestCase):
clusters = cf.get_clusters()
complete = set([])
for cluster in clusters:
- tmp = numpy.array(cluster) % 2
+ tmp = numpy.array(list(cluster.items)) % 2
self.assert_((tmp == 0).all() or (tmp == 1).all())
- counter += len(cluster)
- complete |= set(cluster)
+ counter += len(cluster.items)
+ complete |= cluster.items
self.assertEqual(counter, len(complete))
-
-
+ def test_rule_cluster(self):
+ cf = ClusterFactory(RuleCluster)
+ cf.add_related(RuleCluster(["x", "y"], ["x+y=1"]))
+ cf.add_related(RuleCluster(["x", "z"], ["x*z=2"]))
+ cf.add_related(RuleCluster(["u", "v"], ["u=v"]))
+ clusters = list(cf.get_clusters())
+ self.assertEqual(len(clusters), 2)
+ clusters.sort(lambda x,y: cmp(len(x.items),len(y.items)))
+ for cluster in clusters:
+ cluster.rules.sort()
+ self.assertEqual(clusters[0].items, set(["u", "v"]))
+ self.assertEqual(clusters[0].rules, ["u=v"])
+ self.assertEqual(clusters[1].items, set(["x", "y", "z"]))
+ self.assertEqual(clusters[1].rules, ["x*z=2", "x+y=1"])
|
Support for custom clusters in clusters.py
|
molmod_molmod
|
train
|
bffcd41ba9944b7bc16de3c95d6572ab105e6b74
|
diff --git a/lwr/managers/base.py b/lwr/managers/base.py
index <HASH>..<HASH> 100644
--- a/lwr/managers/base.py
+++ b/lwr/managers/base.py
@@ -152,7 +152,7 @@ class DirectoryBaseManager(BaseManager):
return self._job_directory(job_id).read_file(name, **kwds)
def _write_job_file(self, job_id, name, contents):
- self._job_directory(job_id).write_file(name, contents)
+ return self._job_directory(job_id).write_file(name, contents)
def _write_return_code(self, job_id, return_code):
self._write_job_file(job_id, JOB_FILE_RETURN_CODE, str(return_code))
diff --git a/lwr/managers/queued_condor.py b/lwr/managers/queued_condor.py
index <HASH>..<HASH> 100644
--- a/lwr/managers/queued_condor.py
+++ b/lwr/managers/queued_condor.py
@@ -52,6 +52,8 @@ class CondorQueueManager(ExternalBaseManager):
s_out = 'Failed to find job id from condor_submit'
else:
external_id = match.group(1)
+ else:
+ raise Exception("condor_submit failed - %s" % s_out)
self._register_external_id(job_id, external_id)
def __condor_user_log(self, job_id):
@@ -64,8 +66,10 @@ class CondorQueueManager(ExternalBaseManager):
pass
def get_status(self, job_id):
- external_id = self._get_external_id(job_id)
- log_path = self.__condor_user_log(id)
+ external_id = self._external_id(job_id)
+ if not external_id:
+ raise Exception("Failed to obtain external_id for job_id %s, cannot determine status." % job_id)
+ log_path = self.__condor_user_log(job_id)
if not exists(log_path):
return 'complete'
if external_id not in self.user_log_sizes:
diff --git a/lwr/util.py b/lwr/util.py
index <HASH>..<HASH> 100644
--- a/lwr/util.py
+++ b/lwr/util.py
@@ -174,6 +174,7 @@ class JobDirectory(object):
job_file.write(contents)
finally:
job_file.close()
+ return path
def remove_file(self, name):
"""
diff --git a/test/integration_test.py b/test/integration_test.py
index <HASH>..<HASH> 100644
--- a/test/integration_test.py
+++ b/test/integration_test.py
@@ -27,7 +27,10 @@ class IntegrationTest(TempDirectoryTestCase):
self.__run(app_conf={"private_key": "testtoken"}, private_token="testtoken", transport="curl", cache=False, test_errors=True)
def test_integration_drmaa(self):
- self.__run(job_conf_props={'type': 'queued_drmaa'}, private_token=None, transport=None, cache=False, test_errors=False)
+ self.__run(app_conf={}, job_conf_props={'type': 'queued_drmaa'}, private_token=None, transport=None, cache=False, test_errors=False)
+
+ def test_integration_condor(self):
+ self.__run(app_conf={}, job_conf_props={'type': 'queued_condor'}, private_token=None, transport=None, cache=False, test_errors=False)
def __run(self, app_conf={}, job_conf_props={}, **kwds):
kwds["suppress_output"] = True
|
Condor bug fixes. Added condor manager test to integration suite. Condor job manager now seem to work properly!
|
galaxyproject_pulsar
|
train
|
950374e19b5bf0a1f6b80be23f572aa9b1ac6f14
|
diff --git a/benchmarks/src/test/java/jetbrains/exodus/benchmark/env/LogBenchmarks.java b/benchmarks/src/test/java/jetbrains/exodus/benchmark/env/LogBenchmarks.java
index <HASH>..<HASH> 100644
--- a/benchmarks/src/test/java/jetbrains/exodus/benchmark/env/LogBenchmarks.java
+++ b/benchmarks/src/test/java/jetbrains/exodus/benchmark/env/LogBenchmarks.java
@@ -16,6 +16,7 @@
package jetbrains.exodus.benchmark.env;
import jetbrains.exodus.core.dataStructures.LongArrayList;
+import jetbrains.exodus.core.dataStructures.ObjectCacheBase;
import jetbrains.exodus.log.LogTestsBase;
import jetbrains.exodus.log.LoggableToWrite;
import jetbrains.exodus.util.Random;
@@ -103,15 +104,10 @@ public class LogBenchmarks extends LogTestsBase {
}
long readTime = System.currentTimeMillis() - start;
- System.out.println(percent + "% of memory for cache: random read took " + readTime + ". Cache hit rate: " + hitRateString(getLog().getCacheHitRate()));
+ System.out.println(percent + "% of memory for cache: random read took " + readTime + ". Cache hit rate: " + ObjectCacheBase.formatHitRate(getLog().getCacheHitRate()));
if (myMessenger != null) {
myMessenger.putValue(valueName + "_randomRead", readTime);
}
System.out.println();
}
-
- private String hitRateString(double hr) {
- int result = (int) (hr * 1000);
- return String.valueOf((result / 10)) + '.' + (result % 10) + '%';
- }
}
diff --git a/environment/src/main/java/jetbrains/exodus/env/EnvironmentImpl.java b/environment/src/main/java/jetbrains/exodus/env/EnvironmentImpl.java
index <HASH>..<HASH> 100644
--- a/environment/src/main/java/jetbrains/exodus/env/EnvironmentImpl.java
+++ b/environment/src/main/java/jetbrains/exodus/env/EnvironmentImpl.java
@@ -17,6 +17,7 @@ package jetbrains.exodus.env;
import jetbrains.exodus.BackupStrategy;
import jetbrains.exodus.ExodusException;
+import jetbrains.exodus.core.dataStructures.ObjectCacheBase;
import jetbrains.exodus.core.dataStructures.Pair;
import jetbrains.exodus.gc.GarbageCollector;
import jetbrains.exodus.log.Log;
@@ -258,21 +259,24 @@ public class EnvironmentImpl implements Environment {
// in order to avoid deadlock, do not finish gc inside lock
// it is safe to invoke gc.finish() several times
gc.finish();
- final double hitRate;
+ final double logCacheHitRate;
+ final double storeGetCacheHitRate;
synchronized (commitLock) {
if (!isOpen()) {
throw new IllegalStateException("Already closed, see cause for previous close stack trace", throwableOnClose);
}
checkInactive(ec.getEnvCloseForcedly());
gc.saveUtilizationProfile();
- hitRate = log.getCacheHitRate() * 100;
+ logCacheHitRate = log.getCacheHitRate();
log.close();
+ storeGetCacheHitRate = storeGetCache.hitRate();
throwableOnClose = new Throwable();
throwableOnCommit = EnvironmentClosedException.INSTANCE;
}
runAllTransactionSafeTasks();
if (logging.isInfoEnabled()) {
- logging.info("Exodus log cache hit rate: " + hitRate + '%');
+ logging.info("Exodus log cache hit rate: " + ObjectCacheBase.formatHitRate(logCacheHitRate));
+ logging.info("Store get cache hit rate: " + ObjectCacheBase.formatHitRate(storeGetCacheHitRate));
}
}
diff --git a/environment/src/main/java/jetbrains/exodus/env/StoreGetCache.java b/environment/src/main/java/jetbrains/exodus/env/StoreGetCache.java
index <HASH>..<HASH> 100644
--- a/environment/src/main/java/jetbrains/exodus/env/StoreGetCache.java
+++ b/environment/src/main/java/jetbrains/exodus/env/StoreGetCache.java
@@ -42,6 +42,10 @@ class StoreGetCache {
cache.cacheObject(new KeyEntry(treeRootAddress, key), value);
}
+ double hitRate() {
+ return cache.hitRate();
+ }
+
private static class KeyEntry {
private final long treeRootAddress;
diff --git a/utils/src/main/java/jetbrains/exodus/core/dataStructures/ObjectCacheBase.java b/utils/src/main/java/jetbrains/exodus/core/dataStructures/ObjectCacheBase.java
index <HASH>..<HASH> 100644
--- a/utils/src/main/java/jetbrains/exodus/core/dataStructures/ObjectCacheBase.java
+++ b/utils/src/main/java/jetbrains/exodus/core/dataStructures/ObjectCacheBase.java
@@ -101,4 +101,14 @@ public abstract class ObjectCacheBase<K, V> {
public abstract V getObject(@NotNull final K key);
public abstract int count();
+
+ /**
+ * Formats hit rate in percent with one decimal place.
+ *
+ * @param hitRate hit rate value in the interval [0..1]
+ */
+ public static String formatHitRate(final double hitRate) {
+ final int result = (int) (hitRate * 1000);
+ return String.valueOf((result / 10)) + '.' + (result % 10) + '%';
+ }
}
|
on closing Environment, logging StoreGetCache hit rate
|
JetBrains_xodus
|
train
|
ea6a062fec4b15b42e1276dc2711d72df8bb2e8c
|
diff --git a/gnupg/_meta.py b/gnupg/_meta.py
index <HASH>..<HASH> 100644
--- a/gnupg/_meta.py
+++ b/gnupg/_meta.py
@@ -931,7 +931,7 @@ class GPGBase(object):
>>> encrypted = str(gpg.encrypt(message, key.fingerprint))
>>> assert encrypted != message
>>> assert not encrypted.isspace()
- >>> decrypted = str(gpg.decrypt(encrypted))
+ >>> decrypted = str(gpg.decrypt(encrypted, passphrase='foo'))
>>> assert not decrypted.isspace()
>>> decrypted
'The crow flies at midnight.'
|
similar doc update
I suspect this example needs the similar fix
|
isislovecruft_python-gnupg
|
train
|
de17527022bdd7d557c5413ceae98886e3ee757d
|
diff --git a/blocks/dock.js b/blocks/dock.js
index <HASH>..<HASH> 100644
--- a/blocks/dock.js
+++ b/blocks/dock.js
@@ -836,7 +836,7 @@ M.core_dock.genericblock.prototype = {
}
// Must set the image src seperatly of we get an error with XML strict headers
- var moveto = Y.Node.create('<input type="image" class="moveto customcommand requiresjs" title="'+M.str.block.addtodock+'" alt="'+
+ var moveto = Y.Node.create('<input type="image" class="moveto customcommand requiresjs" alt="'+M.str.block.addtodock+'" title="'+
Y.Escape.html(M.util.get_string('dockblock', 'block', node.one('.header .title h2').getHTML())) +'" />');
var icon = 't/block_to_dock';
if (right_to_left()) {
|
MDL-<I>: Be consistent with use of alt/title.
Re: Block dock/undock links.
|
moodle_moodle
|
train
|
e5e8bd54fa0e3da101f9f1dcbf1b8c9c8b43b86e
|
diff --git a/functions.php b/functions.php
index <HASH>..<HASH> 100644
--- a/functions.php
+++ b/functions.php
@@ -1007,14 +1007,28 @@ function pods_shortcode ( $tags, $content = null ) {
'thank_you' => null
);
- $tags = array_merge( $defaults, $tags );
+ if ( !empty( $tags ) )
+ $tags = array_merge( $defaults, $tags );
+ else
+ $tags = $defaults;
+
$tags = apply_filters( 'pods_shortcode', $tags );
if ( empty( $content ) )
$content = null;
if ( empty( $tags[ 'name' ] ) ) {
- return '<p>Please provide a Pod name</p>';
+ if ( in_the_loop() || is_singular() ) {
+ $pod = pods( get_post_type(), get_the_ID(), false );
+
+ if ( !empty( $pod ) ) {
+ $tags[ 'name' ] = get_post_type();
+ $tags[ 'id' ] = get_the_ID();
+ }
+ }
+
+ if ( empty( $tags[ 'name' ] ) )
+ return '<p>Please provide a Pod name</p>';
}
if ( !empty( $tags[ 'col' ] ) ) {
@@ -1043,7 +1057,8 @@ function pods_shortcode ( $tags, $content = null ) {
$id = absint( $id );
}
- $pod = pods( $tags[ 'name' ], $id );
+ if ( !isset( $pod ) )
+ $pod = pods( $tags[ 'name' ], $id );
$found = 0;
|
Oh hey, shortcodes [pods]content[/pods] in Pods post types content
|
pods-framework_pods
|
train
|
096911376b24db4c144e91731e1b41bc97fdd951
|
diff --git a/lib/date.js b/lib/date.js
index <HASH>..<HASH> 100644
--- a/lib/date.js
+++ b/lib/date.js
@@ -39,10 +39,7 @@ const addMonthsWithoutValidation = (months, date) => {
date.getMilliseconds()
)
const daysInMonth = monthDays(finalDate.getMonth(), finalDate.getFullYear())
- finalDate.setMonth(
- finalDate.getMonth(),
- Math.min(daysInMonth, date.getDate())
- )
+ finalDate.setDate(Math.min(daysInMonth, date.getDate()))
return finalDate
}
|
:sparkles: Change a little addMOnths implementation
|
gugutz_vanilla-commons
|
train
|
49adacd9f53c8a8e7fb17946127a17f675f44337
|
diff --git a/lib/plugins/aws/deploy/lib/checkForChanges.js b/lib/plugins/aws/deploy/lib/checkForChanges.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/aws/deploy/lib/checkForChanges.js
+++ b/lib/plugins/aws/deploy/lib/checkForChanges.js
@@ -151,7 +151,7 @@ module.exports = {
const region = this.provider.getRegion();
const cloudWatchLogsSdk = new this.provider.sdk.CloudWatchLogs({ region });
- return this.provider.getAccountId().then(accountId =>
+ return this.provider.getAccountInfo().then(account =>
Promise.all(
this.serverless.service.getAllFunctions().map(functionName => {
const functionObj = this.serverless.service.getFunction(functionName);
@@ -177,6 +177,9 @@ module.exports = {
logGroupName = event.cloudwatchLog.replace(/\r?\n/g, '');
}
+ const accountId = account.accountId;
+ const partition = account.partition;
+
/*
return a new promise that will check the resource limit exceeded and will fix it if
the option is enabled
@@ -187,6 +190,7 @@ module.exports = {
logGroupName,
functionObj,
region,
+ partition,
});
});
@@ -202,6 +206,7 @@ module.exports = {
const logGroupName = params.logGroupName;
const functionObj = params.functionObj;
const region = params.region;
+ const partition = params.partition;
return (
cloudWatchLogsSdk
@@ -217,7 +222,7 @@ module.exports = {
const oldDestinationArn = subscriptionFilter.destinationArn;
const filterName = subscriptionFilter.filterName;
- const newDestinationArn = `arn:aws:lambda:${region}:${accountId}:function:${functionObj.name}`;
+ const newDestinationArn = `arn:${partition}:lambda:${region}:${accountId}:function:${functionObj.name}`;
// everything is fine, just return
if (oldDestinationArn === newDestinationArn) {
diff --git a/lib/plugins/aws/deploy/lib/checkForChanges.test.js b/lib/plugins/aws/deploy/lib/checkForChanges.test.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/aws/deploy/lib/checkForChanges.test.js
+++ b/lib/plugins/aws/deploy/lib/checkForChanges.test.js
@@ -514,7 +514,10 @@ describe('checkForChanges', () => {
provider.sdk.CloudWatchLogs = CloudWatchLogsStub;
- sandbox.stub(provider, 'getAccountId').returns(BbPromise.resolve(accountId));
+ sandbox.stub(provider, 'getAccountInfo').returns(BbPromise.resolve({
+ accountId,
+ partition: 'aws',
+ }));
sandbox.stub(awsDeploy.serverless.service, 'getServiceName').returns(serviceName);
@@ -658,6 +661,35 @@ describe('checkForChanges', () => {
.then(() => expect(deleteSubscriptionFilterStub).to.not.have.been.called);
});
+ it('should not call delete if there is a subFilter and the ARNs are the same with custom function name accounting for non-standard partitions', () => {
+ provider.getAccountInfo.restore()
+ sandbox.stub(provider, 'getAccountInfo').returns(BbPromise.resolve({
+ accountId,
+ partition: 'aws-us-gov',
+ }));
+ awsDeploy.serverless.service.functions = {
+ first: {
+ name: 'my-test-function',
+ events: [{ cloudwatchLog: '/aws/lambda/hello1' }],
+ },
+ };
+
+ awsDeploy.serverless.service.setFunctionNames();
+
+ describeSubscriptionFiltersResponse = {
+ subscriptionFilters: [
+ {
+ destinationArn: `arn:aws-us-gov:lambda:${region}:${accountId}:function:my-test-function`,
+ filterName: 'dummy-filter',
+ },
+ ],
+ };
+
+ return awsDeploy
+ .checkForChanges()
+ .then(() => expect(deleteSubscriptionFilterStub).to.not.have.been.called);
+ });
+
it('should call delete if there is a subFilter but the ARNs are not the same with custom function name', () => {
awsDeploy.serverless.service.functions = {
first: {
|
Remove hard coded partition when validating cloudwatch subscription filters
|
serverless_serverless
|
train
|
b3ecb4caccab6f40098f52a3c89d50ba6998a057
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -2,13 +2,5 @@
Is a mongoose plugin that automatically keeps track of when the document has been created, updated and optionally when some fields has been changed
# TODO
-* some fields should be declared trackable to obtain something like
- ```
- updates: [
- {
- field: "status",
- changedTo: "ready",
- at: ISODate("2013-12-15T16:33:25.056Z")
- }
- ]
- ```
+* prevent to track field `__updates`
+* skiptToTrackDocumentUpdates vs skipToTrackFieldsUpdates
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -4,20 +4,40 @@ module.exports = function(schema, options) {
options = _.extend(
{ createdAt: 'createdAt',
updatedAt: 'updatedAt',
- skipToTrackUpdates: false
+ skipToTrackUpdates: false,
+ fieldsToTrack: []
},
options || {}
)
+ options.fieldsToTrack = [].concat(options.fieldsToTrack)
+
schema.add((function(fields) {
fields[options.createdAt] = {type: Date, default: function() {return new Date()}}
fields[options.updatedAt] = {type: Date, default: function() {return new Date()}}
+ if (options.fieldsToTrack.length > 0) {
+ fields['__updates'] = {type: Array}
+ }
return fields
})({}))
- schema.pre('save', function (next) {
+ schema.pre('save', function(next) {
+ var doc = this, now = new Date()
+
if (!options.skipToTrackUpdates) {
- this[options.updatedAt] = new Date();
+ doc.set(options.updatedAt, now)
+
+ if (doc.isModified()) {
+ options.fieldsToTrack.forEach(function(field) {
+ if (doc.isNew || doc.isModified(field)) {
+ doc.get('__updates').push({
+ field: field,
+ changedTo: doc[field],
+ at: now
+ })
+ }
+ })
+ }
}
next();
});
diff --git a/spec/acceptance.js b/spec/acceptance.js
index <HASH>..<HASH> 100644
--- a/spec/acceptance.js
+++ b/spec/acceptance.js
@@ -1,7 +1,8 @@
var chai = require('chai').use(require('chai-datetime')),
expect = chai.expect,
mongoose = require('mongoose'),
- trackable = require('./../')
+ trackable = require('./../'),
+ _ = require('lodash')
describe('mongoose-trackable', function() {
before(function(done) {
@@ -17,8 +18,8 @@ describe('mongoose-trackable', function() {
describe('plugged into a mongoose.Schema', function() {
before(function() {
- this.modelWithTrackablePlugin = function(name, options) {
- return mongoose.model(name, new mongoose.Schema().plugin(trackable, options))
+ this.modelWithTrackablePlugin = function(name, options, schema) {
+ return mongoose.model(name, new mongoose.Schema(schema || {}).plugin(trackable, options))
}
})
@@ -85,5 +86,47 @@ describe('mongoose-trackable', function() {
done()
})
})
+
+ it('could track changes of a field', function(done) {
+ this.modelWithTrackablePlugin('TrackableWithTrackedField', {fieldsToTrack: 'status'}, {status: 'string'})
+ .create({status: 'started'}, function(err, doc) {
+ expect(doc).to.have.property('status', 'started')
+ expect(doc).to.have.property('__updates')
+ expect(_.pluck(doc.__updates, 'changedTo')).to.be.eql(['started'])
+
+ doc.set('status', 'closed')
+ doc.save(function(err, doc) {
+ expect(_.pluck(doc.__updates, 'changedTo')).to.be.eql(['started', 'closed'])
+ done()
+ })
+ })
+ })
+
+ it('could track changes of multiple fields', function(done) {
+ this.modelWithTrackablePlugin(
+ 'TrackableWithTrackedFields',
+ {fieldsToTrack: ['status', 'location']},
+ {status: 'string', location: 'string'}
+ ).create({status: 'shipped', location: 'Chicago'}, function(err, doc) {
+ expect(doc).to.have.property('status', 'shipped')
+ expect(doc).to.have.property('location', 'Chicago')
+
+ var changesToStatusField = _.chain(doc.__updates).filter(function(u) {return u.field === 'status'}).pluck('changedTo').valueOf()
+ var changesToLocationField = _.chain(doc.__updates).filter(function(u) {return u.field === 'location'}).pluck('changedTo').valueOf()
+
+ expect(changesToStatusField).to.eql(['shipped'])
+ expect(changesToLocationField).to.eql(['Chicago'])
+ done()
+ })
+ })
+
+ it('doesn\'t track fields that are not to track', function(done) {
+ this.modelWithTrackablePlugin('TrackableWithNotTrackedField', {}, {status: 'string'})
+ .create({status: 'started'}, function(err, doc) {
+ expect(doc).to.have.property('status')
+ expect(doc).to.not.have.property('__updates')
+ done()
+ })
+ })
})
})
|
Track change of fields
with the option `fieldsToTrack` you could track changes to some fields,
for each field a record in `__updates` field is recorded, something like
```json
{ "__updates": [
{ "field": "status",
"changedTo": "started",
"at": "Fri Jan <I> <I> <I>:<I>:<I> GMT<I>"
}
}
```
|
gabrielelana_mongoose-trackable
|
train
|
70ba7ae695d474a271fdd9b6130b575028119609
|
diff --git a/flake8_coding.py b/flake8_coding.py
index <HASH>..<HASH> 100644
--- a/flake8_coding.py
+++ b/flake8_coding.py
@@ -18,12 +18,14 @@ class CodingChecker(object):
'--accept-encodings', default='latin-1, utf-8', action='store',
help="Acceptable source code encodings for `coding:` magic comment"
)
- parser.config_options.append('accept-encodings')
parser.add_option(
'--no-accept-encodings', action='store_true',
help="Warn for files containing a `coding:` magic comment"
)
- parser.config_options.append('no-accept-encodings')
+
+ if hasattr(parser, 'config_options'): # for flake8 < 3.0
+ parser.config_options.append('accept-encodings')
+ parser.config_options.append('no-accept-encodings')
@classmethod
def parse_options(cls, options):
@@ -33,7 +35,12 @@ class CodingChecker(object):
cls.encodings = [e.strip().lower() for e in options.accept_encodings.split(',')]
def read_headers(self):
- import pep8
+ try:
+ # flake8 >= v3.0
+ import pycodestyle as pep8
+ except ImportError:
+ import pep8
+
if self.filename in ('stdin', '-', None):
return pep8.stdin_get_value().splitlines(True)[:2]
else:
diff --git a/run_tests.py b/run_tests.py
index <HASH>..<HASH> 100644
--- a/run_tests.py
+++ b/run_tests.py
@@ -1,10 +1,12 @@
# -*- coding: utf-8 -*-
-import sys
import unittest
from mock import patch
+from collections import namedtuple
from flake8_coding import CodingChecker
-from flake8.engine import get_style_guide
+
+
+Options = namedtuple('Options', 'no_accept_encodings, accept_encodings')
class TestFlake8Coding(unittest.TestCase):
@@ -71,42 +73,49 @@ class TestFlake8Coding(unittest.TestCase):
self.assertEqual(ret[0][1], 0)
self.assertTrue(ret[0][2].startswith('C101 '))
- @patch.object(sys, 'argv', [])
def test_default_encoding(self):
try:
- get_style_guide(parse_argv=True) # parse arguments
+ options = Options(False, 'latin-1, utf-8')
+ CodingChecker.parse_options(options)
self.assertEqual(CodingChecker.encodings, ['latin-1', 'utf-8'])
finally:
if hasattr(CodingChecker, 'encodings'):
del CodingChecker.encodings
- @patch.object(sys, 'argv', ['', '--accept-encodings=utf-8,utf-16'])
def test_change_encoding(self):
try:
- get_style_guide(parse_argv=True) # parse arguments
+ options = Options(False, 'utf-8,utf-16')
+ CodingChecker.parse_options(options)
self.assertEqual(CodingChecker.encodings, ['utf-8', 'utf-16'])
finally:
if hasattr(CodingChecker, 'encodings'):
del CodingChecker.encodings
- @patch('pep8.stdin_get_value')
- def test_stdin(self, stdin_get_value):
- with open('testsuite/nocodings.py') as fp:
- stdin_get_value.return_value = fp.read()
-
- for input in ['stdin', '-', None]:
- checker = CodingChecker(None, input)
- checker.encodings = ['latin-1', 'utf-8']
- ret = list(checker.run())
- self.assertEqual(len(ret), 1)
- self.assertEqual(ret[0][0], 0)
- self.assertEqual(ret[0][1], 0)
- self.assertTrue(ret[0][2].startswith('C101 '))
-
- @patch.object(sys, 'argv', ['', '--no-accept-encodings'])
+ def test_stdin(self):
+ try:
+ import pycodestyle as pep8 # noqa
+ target = 'pycodestyle.stdin_get_value'
+ except ImportError:
+ import pep8 # noqa
+ target = 'pep8.stdin_get_value'
+
+ with patch(target) as stdin_get_value:
+ with open('testsuite/nocodings.py') as fp:
+ stdin_get_value.return_value = fp.read()
+
+ for input in ['stdin', '-', None]:
+ checker = CodingChecker(None, input)
+ checker.encodings = ['latin-1', 'utf-8']
+ ret = list(checker.run())
+ self.assertEqual(len(ret), 1)
+ self.assertEqual(ret[0][0], 0)
+ self.assertEqual(ret[0][1], 0)
+ self.assertTrue(ret[0][2].startswith('C101 '))
+
def test_no_accept_encodings_sets_encodings_none(self):
try:
- get_style_guide(parse_argv=True) # parse arguments
+ options = Options(True, 'latin-1,utf-8')
+ CodingChecker.parse_options(options)
self.assertTrue(CodingChecker.encodings is None)
finally:
if hasattr(CodingChecker, 'encodings'):
|
Fix does not work with flake8-<I>
|
tk0miya_flake8-coding
|
train
|
984d8a5c578d35628522be2e86f419f4ed8aff37
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -19,13 +19,23 @@ const paths = {
build: 'build'
};
+function logError(err) {
+ if (err instanceof SyntaxError) {
+ console.error('Syntax Error:');
+ console.error(err.message);
+ console.error(err.codeFrame);
+ } else {
+ console.error(err.message);
+ }
+}
+
function handleError(err) {
- console.error(err);
+ logError(err);
this.emit('end');
}
function handleErrorTimeout(err) {
- console.error(err);
+ logError(err);
setTimeout(() => {
// set delay for full mocha error message
this.emit('end');
|
gulpfile error logging change.
|
unchartedsoftware_lumo
|
train
|
c65df4fba17101e60e8c31f378f6001b514e5a42
|
diff --git a/scripts/install.js b/scripts/install.js
index <HASH>..<HASH> 100755
--- a/scripts/install.js
+++ b/scripts/install.js
@@ -47,9 +47,10 @@ function shouldRenderProgressBar() {
const silentFlag = process.argv.some((v) => v === '--silent');
const silentConfig = process.env.npm_config_loglevel === 'silent';
const silentEnv = process.env.SENTRYCLI_NO_PROGRESS_BAR;
- const ciEnv = process.env.CI === 'true';
+ const ciEnv = process.env.CI === 'true' || process.env.CI === '1';
+ const notTTY = !process.stdout.isTTY;
// If any of possible options is set, skip rendering of progress bar
- return !(silentFlag || silentConfig || silentEnv || ciEnv);
+ return !(silentFlag || silentConfig || silentEnv || ciEnv || notTTY);
}
function getDownloadUrl(platform, arch) {
|
ref: Dont print install progressbar for nonTTY and CI=1 (#<I>)
|
getsentry_sentry-cli
|
train
|
9c4e46a6bd6f0ad98bf497ba5152d69cb4181f77
|
diff --git a/polyaxon/polyaxon/config_settings/celery_settings.py b/polyaxon/polyaxon/config_settings/celery_settings.py
index <HASH>..<HASH> 100644
--- a/polyaxon/polyaxon/config_settings/celery_settings.py
+++ b/polyaxon/polyaxon/config_settings/celery_settings.py
@@ -11,6 +11,8 @@ RABBITMQ_USER = config.get_string('POLYAXON_RABBITMQ_USER', is_optional=True)
RABBITMQ_PASSWORD = config.get_string('POLYAXON_RABBITMQ_PASSWORD',
is_secret=True,
is_optional=True)
+BROKER_POOL_LIMIT = None
+
if RABBITMQ_USER and RABBITMQ_PASSWORD:
CELERY_BROKER_URL = 'amqp://{user}:{password}@{url}'.format(
user=RABBITMQ_USER,
diff --git a/polyaxon/publisher/service.py b/polyaxon/publisher/service.py
index <HASH>..<HASH> 100644
--- a/polyaxon/publisher/service.py
+++ b/polyaxon/publisher/service.py
@@ -60,6 +60,7 @@ class PublisherService(Service):
'task_type': task_type,
'task_idx': task_idx
},
+ retry=True,
routing_key='{}.{}.{}'.format(RoutingKeys.LOGS_SIDECARS_EXPERIMENTS,
experiment_uuid,
job_uuid),
@@ -83,6 +84,7 @@ class PublisherService(Service):
'job_uuid': job_uuid,
'log_lines': log_lines,
},
+ retry=True,
routing_key='{}.{}'.format(routing_key, job_uuid),
exchange=settings.INTERNAL_EXCHANGE,
)
|
Fix issue connection reset by peer related to celery/celery#<I>
|
polyaxon_polyaxon
|
train
|
91438198b7295f0460c9e701d0677fe8dddf950c
|
diff --git a/lib/octopress-ink/assets/asset.rb b/lib/octopress-ink/assets/asset.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress-ink/assets/asset.rb
+++ b/lib/octopress-ink/assets/asset.rb
@@ -33,10 +33,6 @@ module Octopress
@plugin.disabled?(@base, filename)
end
- def disable
- @disabled = true
- end
-
def path
if @found_file and !@no_cache
@found_file
|
Removed unused disable method from asset class
|
octopress_ink
|
train
|
f512caac5b6f30fb6c226b73350d0180bdd41d38
|
diff --git a/shared/version/flex.go b/shared/version/flex.go
index <HASH>..<HASH> 100644
--- a/shared/version/flex.go
+++ b/shared/version/flex.go
@@ -1,10 +1,33 @@
package version
+import (
+ "fmt"
+ "runtime"
+ "strings"
+
+ "github.com/lxc/lxd/shared/osarch"
+)
+
// Version contains the LXD version number
var Version = "2.18"
// UserAgent contains a string suitable as a user-agent
-var UserAgent = "LXD " + Version
+var UserAgent = getUserAgent()
// APIVersion contains the API base version. Only bumped for backward incompatible changes.
var APIVersion = "1.0"
+
+func getUserAgent() string {
+ archID, err := osarch.ArchitectureId(runtime.GOARCH)
+ if err != nil {
+ panic(err)
+ }
+ arch, err := osarch.ArchitectureName(archID)
+ if err != nil {
+ panic(err)
+ }
+
+ tokens := []string{strings.Title(runtime.GOOS), arch}
+ tokens = append(tokens, getPlatformVersionStrings()...)
+ return fmt.Sprintf("LXD %s (%s)", Version, strings.Join(tokens, "; "))
+}
|
shared/version: include OS, architecture and possibly kernel and distro info in User-Agent
|
lxc_lxd
|
train
|
d369640a29766c11365851a164a83fa619cd2b0b
|
diff --git a/src/main/java/eu/fusepool/p3/transformer/client/TransformerClientImpl.java b/src/main/java/eu/fusepool/p3/transformer/client/TransformerClientImpl.java
index <HASH>..<HASH> 100644
--- a/src/main/java/eu/fusepool/p3/transformer/client/TransformerClientImpl.java
+++ b/src/main/java/eu/fusepool/p3/transformer/client/TransformerClientImpl.java
@@ -215,6 +215,7 @@ public class TransformerClientImpl implements Transformer {
private Entity getAsyncResponseEntity(URL url, String acceptHeaderValue) {
//recursive function would be nicer, but this saves memory
+ int counter = 0;
while (true) {
HttpURLConnection connection = null;
try {
@@ -239,6 +240,13 @@ public class TransformerClientImpl implements Transformer {
connection.disconnect();
}
}
+ //Check every half second for the first 5 seconds, Evrey 10 second for the following 10 minutes, every two minutes afterwards
+ final int interval = counter < 10 ? 500 : counter < 70? 10000 : 120000;
+ try {
+ Thread.sleep(interval);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
}
}
|
Closes #2: added interval on checking for async results
|
fusepoolP3_p3-transformer-client-library
|
train
|
45e5aae50bb3fb8880031dfa7ecf3049d9ee100c
|
diff --git a/src/reducers/sources.js b/src/reducers/sources.js
index <HASH>..<HASH> 100644
--- a/src/reducers/sources.js
+++ b/src/reducers/sources.js
@@ -68,19 +68,16 @@ function update(
switch (action.type) {
case "UPDATE_SOURCE": {
const source = action.source;
- return updateSource(state, source);
+ return updateSources(state, [source]);
}
case "ADD_SOURCE": {
const source = action.source;
- return updateSource(state, source);
+ return updateSources(state, [source]);
}
case "ADD_SOURCES": {
- return action.sources.reduce(
- (newState, source) => updateSource(newState, source),
- state
- );
+ return updateSources(state, action.sources);
}
case "SET_SELECTED_LOCATION":
@@ -127,7 +124,7 @@ function update(
const { id, url } = action.source;
const { isBlackBoxed } = ((action: any): DonePromiseAction).value;
updateBlackBoxList(url, isBlackBoxed);
- return updateSource(state, { id, isBlackBoxed });
+ return updateSources(state, [{ id, isBlackBoxed }]);
}
break;
@@ -161,8 +158,8 @@ function getTextPropsFromAction(action) {
}
return {
- text: action.value.text,
id: sourceId,
+ text: action.value.text,
contentType: action.value.contentType,
loadedState: "loaded"
};
@@ -173,8 +170,22 @@ function getTextPropsFromAction(action) {
// "start" and "error" states but we don't type it like that. We need
// to rethink how we type async actions.
function setSourceTextProps(state, action: LoadSourceAction): SourcesState {
- const text = getTextPropsFromAction(action);
- return updateSource(state, text);
+ const source = getTextPropsFromAction(action);
+ return updateSources(state, [source]);
+}
+
+function updateSources(state, sources) {
+ state = {
+ ...state,
+ sources: { ...state.sources },
+ relativeSources: { ...state.relativeSources },
+ urls: { ...state.urls }
+ };
+
+ return sources.reduce(
+ (newState, source) => updateSource(newState, source),
+ state
+ );
}
function updateSource(state: SourcesState, source: Object) {
@@ -187,19 +198,20 @@ function updateSource(state: SourcesState, source: Object) {
? { ...existingSource, ...source }
: createSource(source);
+ state.sources[source.id] = updatedSource;
+
const existingUrls = state.urls[source.url];
- const urls = existingUrls ? [...existingUrls, source.id] : [source.id];
+ state.urls[source.url] = existingUrls
+ ? [...existingUrls, source.id]
+ : [source.id];
+
+ updateRelativeSource(
+ state.relativeSources,
+ updatedSource,
+ state.projectDirectoryRoot
+ );
- return {
- ...state,
- relativeSources: updateRelativeSource(
- { ...state.relativeSources },
- updatedSource,
- state.projectDirectoryRoot
- ),
- sources: { ...state.sources, [source.id]: updatedSource },
- urls: { ...state.urls, [source.url]: urls }
- };
+ return state;
}
function updateRelativeSource(
|
[sources] improve updateSource performance (#<I>)
|
firefox-devtools_debugger
|
train
|
29f7eeb17df8636432b804ebf87293afd6bc1a07
|
diff --git a/src/Fracture/Http/Response.php b/src/Fracture/Http/Response.php
index <HASH>..<HASH> 100755
--- a/src/Fracture/Http/Response.php
+++ b/src/Fracture/Http/Response.php
@@ -43,7 +43,7 @@ class Response
}
- public function deleteCookie($name)
+ public function removeCookie($name)
{
unset($this->cookies[$name]);
}
diff --git a/tests/unit/Fracture/Http/ResponseTest.php b/tests/unit/Fracture/Http/ResponseTest.php
index <HASH>..<HASH> 100755
--- a/tests/unit/Fracture/Http/ResponseTest.php
+++ b/tests/unit/Fracture/Http/ResponseTest.php
@@ -25,6 +25,32 @@ class ResponseTest extends PHPUnit_Framework_TestCase
/**
+ * @covers Fracture\Http\Response::getBody
+ * @covers Fracture\Http\Response::setStatusCode
+ * @covers Fracture\Http\Response::getStatusCode
+ */
+ public function testProperStatusCode()
+ {
+ $instance = new Response;
+ $instance->setStatusCode(404);
+ $this->assertSame(404, $instance->getStatusCode());
+ }
+
+
+ /**
+ * @expectedException InvalidArgumentException
+ *
+ * @covers Fracture\Http\Response::getBody
+ * @covers Fracture\Http\Response::setStatusCode
+ */
+ public function testBadStatusCode()
+ {
+ $instance = new Response;
+ $instance->setStatusCode(9999);
+ }
+
+
+ /**
* @covers Fracture\Http\Response::setBody
* @covers Fracture\Http\Response::appendBody
* @covers Fracture\Http\Response::prependBody
@@ -48,6 +74,10 @@ class ResponseTest extends PHPUnit_Framework_TestCase
}
+ /**
+ * @covers Fracture\Http\Response::addHeader
+ * @covers Fracture\Http\Response::getHeaders
+ */
public function testSimpleHeader()
{
$header = $this->getMock('Fracture\Http\Headers\ContentType', ['getName', 'getValue']);
@@ -69,11 +99,12 @@ class ResponseTest extends PHPUnit_Framework_TestCase
}
+ /**
+ * @covers Fracture\Http\Response::addHeader
+ * @covers Fracture\Http\Response::getHeaders
+ */
public function testReplacingHeaderInstance()
{
- $instance = new Response;
-
-
$original = $this->getMock('Fracture\Http\Headers\ContentType', ['getName', 'getValue']);
$original->expects($this->any())
->method('getName')
@@ -83,10 +114,6 @@ class ResponseTest extends PHPUnit_Framework_TestCase
->method('getValue')
->will($this->returnValue('beta'));
-
- $instance->addHeader($original);
-
-
$replacement = $this->getMock('Fracture\Http\Headers\ContentType', ['getName', 'getValue']);
$replacement->expects($this->any())
->method('getName')
@@ -97,6 +124,8 @@ class ResponseTest extends PHPUnit_Framework_TestCase
->will($this->returnValue('gamma'));
+ $instance = new Response;
+ $instance->addHeader($original);
$instance->addHeader($replacement);
@@ -106,6 +135,10 @@ class ResponseTest extends PHPUnit_Framework_TestCase
}
+ /**
+ * @covers Fracture\Http\Response::addCookie
+ * @covers Fracture\Http\Response::getHeaders
+ */
public function testCookieAsHeader()
{
$cookie = $this->getMock('Fracture\Http\Cookie', ['getName', 'getHeaderValue'], [], '', false);
@@ -123,4 +156,25 @@ class ResponseTest extends PHPUnit_Framework_TestCase
'Set-Cookie: alpha=omega; HttpOnly',
], $instance->getHeaders());
}
+
+
+ /**
+ * @covers Fracture\Http\Response::addCookie
+ * @covers Fracture\Http\Response::removeCookie
+ * @covers Fracture\Http\Response::getHeaders
+ */
+ public function testHeaderAfterRemovingCookie()
+ {
+ $cookie = $this->getMock('Fracture\Http\Cookie', ['getName'], [], '', false);
+ $cookie->expects($this->any())
+ ->method('getName')
+ ->will($this->returnValue('alpha'));
+
+ $instance = new Response;
+ $instance->addCookie($cookie);
+
+ $instance->removeCookie('alpha');
+
+ $this->assertEquals([], $instance->getHeaders());
+ }
}
|
response instance should be complete and testes
|
fracture_http
|
train
|
cacccb43318e2902875fc34478e5a46aff6b850f
|
diff --git a/lib/searchkick.rb b/lib/searchkick.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick.rb
+++ b/lib/searchkick.rb
@@ -10,7 +10,7 @@ require "searchkick/logger" if defined?(Rails)
module Searchkick
- def self.reindex
+ def self.reindex_all
(Searchkick::Reindex.instance_variable_get(:@descendents) || []).each do |model|
model.reindex
end
|
Renamed to Searchkick.reindex_all to avoid confusion
|
ankane_searchkick
|
train
|
68e9f4618bc6432023f8327cfce775618806fa9e
|
diff --git a/spec/integration/knife/chef_fs_data_store_spec.rb b/spec/integration/knife/chef_fs_data_store_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/integration/knife/chef_fs_data_store_spec.rb
+++ b/spec/integration/knife/chef_fs_data_store_spec.rb
@@ -28,6 +28,7 @@ describe "ChefFSDataStore tests", :workstation do
let(:cookbook_x_100_metadata_rb) { cb_metadata("x", "1.0.0") }
let(:cookbook_z_100_metadata_rb) { cb_metadata("z", "1.0.0") }
+ let(:cookbook_y_102_metadata_rb) { cb_metadata("z", "1.0.2") }
describe "with repo mode 'hosted_everything' (default)" do
before do
@@ -39,6 +40,8 @@ describe "ChefFSDataStore tests", :workstation do
file "clients/x.json", {}
file "cookbook_artifacts/x-111/metadata.rb", cookbook_x_100_metadata_rb
file "cookbooks/x/metadata.rb", cookbook_x_100_metadata_rb
+ file "cookbooks/y/metadata.rb", cookbook_y_102_metadata_rb
+ file "cookbooks/z/metadata.rb", cookbook_z_100_metadata_rb
file "data_bags/x/y.json", {}
file "environments/x.json", {}
file "nodes/x.json", {}
@@ -64,6 +67,7 @@ describe "ChefFSDataStore tests", :workstation do
/acls/cookbook_artifacts/x.json
/acls/cookbooks/
/acls/cookbooks/x.json
+/acls/cookbooks/z.json
/acls/data_bags/
/acls/data_bags/x.json
/acls/environments/
@@ -84,11 +88,13 @@ describe "ChefFSDataStore tests", :workstation do
/containers/
/containers/x.json
/cookbook_artifacts/
-/cookbook_artifacts/x-111/
-/cookbook_artifacts/x-111/metadata.rb
+/cookbook_artifacts/x-1.0.0/
+/cookbook_artifacts/x-1.0.0/metadata.rb
/cookbooks/
/cookbooks/x/
/cookbooks/x/metadata.rb
+/cookbooks/z/
+/cookbooks/z/metadata.rb
/data_bags/
/data_bags/x/
/data_bags/x/y.json
@@ -111,6 +117,12 @@ EOM
end
end
+ context "LIST /TYPE/NAME" do
+ it "knife cookbook show -z z" do
+ knife("cookbook show -z z").should_succeed "z 1.0.2 1.0.0\n"
+ end
+ end
+
context "DELETE /TYPE/NAME" do
it "knife delete -z /clients/x.json works" do
knife("delete -z /clients/x.json").should_succeed "Deleted /clients/x.json\n"
@@ -119,7 +131,7 @@ EOM
it "knife delete -z -r /cookbooks/x works" do
knife("delete -z -r /cookbooks/x").should_succeed "Deleted /cookbooks/x\n"
- knife("list -z -Rfp /cookbooks").should_succeed ""
+ knife("list -z -Rfp /cookbooks").should_succeed "/cookbooks/z/\n/cookbooks/z/metadata.rb\n"
end
it "knife delete -z -r /data_bags/x works" do
@@ -194,7 +206,14 @@ EOM
Uploading x [1.0.0]
Uploaded 1 cookbook.
EOM
- knife("list --local -Rfp /cookbooks").should_succeed "/cookbooks/x/\n/cookbooks/x/metadata.rb\n"
+ knife("list --local -Rfp /cookbooks").should_succeed <<EOM
+/cookbooks/x/
+/cookbooks/x/metadata.rb
+/cookbooks/y/
+/cookbooks/y/metadata.rb
+/cookbooks/z/
+/cookbooks/z/metadata.rb
+EOM
end
it "knife raw -z -i empty.json -m PUT /data/x/y" do
|
Updated specs for chef_fs_data_store after switching to use metadata
|
chef_chef
|
train
|
7ed3397b4e35a703fee0164b637412b0f1c63216
|
diff --git a/AlphaTwirl/AlphaTwirl.py b/AlphaTwirl/AlphaTwirl.py
index <HASH>..<HASH> 100644
--- a/AlphaTwirl/AlphaTwirl.py
+++ b/AlphaTwirl/AlphaTwirl.py
@@ -2,7 +2,6 @@
import argparse
import sys
import os
-import itertools
from Configure import TableConfigCompleter
from Configure import EventReaderCollectorAssociatorBuilder
@@ -38,21 +37,21 @@ class ArgumentParser(argparse.ArgumentParser):
return args
##__________________________________________________________________||
-def createTreeReader(progressMonitor, communicationChannel, outDir, force, nevents, analyzerName, fileName, treeName, tableConfigs, eventSelection):
+def buildTableCreators(tableConfigs, outDir, force, progressMonitor):
tableConfigCompleter = TableConfigCompleter(defaultCountsClass = Counts, defaultOutDir = outDir)
tableConfigs = [tableConfigCompleter.complete(c) for c in tableConfigs]
if not force: tableConfigs = [c for c in tableConfigs if c['outFile'] and not os.path.exists(c['outFilePath'])]
-
tableCreatorBuilder = EventReaderCollectorAssociatorBuilder()
tableCreators = EventReaderCollectorAssociatorComposite(progressMonitor.createReporter())
for tblcfg in tableConfigs:
tableCreators.add(tableCreatorBuilder.build(tblcfg))
+ return tableCreators
+##__________________________________________________________________||
+def createTreeReader(analyzerName, fileName, treeName, eventSelection, tableCreators, nevents, communicationChannel):
eventLoopRunner = MPEventLoopRunner(communicationChannel)
-
eventBuilder = EventBuilder(analyzerName, fileName, treeName, nevents)
eventReaderBundle = EventReaderBundle(eventBuilder, eventLoopRunner, tableCreators, eventSelection = eventSelection)
-
return eventReaderBundle
##__________________________________________________________________||
@@ -78,15 +77,6 @@ class AlphaTwirl(object):
parser.add_argument("--force", action = "store_true", default = False, dest="force", help = "recreate all output files")
return parser
- def _create_CommunicationChannel_and_ProgressMonitor(self):
- self.progressBar = None if self.args.quiet else ProgressBar()
- if self.args.processes is None or self.args.processes == 0:
- self.progressMonitor = NullProgressMonitor() if self.args.quiet else ProgressMonitor(presentation = self.progressBar)
- self.communicationChannel = CommunicationChannel0(self.progressMonitor)
- else:
- self.progressMonitor = NullProgressMonitor() if self.args.quiet else BProgressMonitor(presentation = self.progressBar)
- self.communicationChannel = CommunicationChannel(self.args.processes, self.progressMonitor)
-
def addComponentReader(self, reader):
self.componentReaders.add(reader)
@@ -102,25 +92,44 @@ class AlphaTwirl(object):
self.treeReaderConfigs.append(cfg)
- def run(self):
- if self.args is None: self.ArgumentParser().parse_args()
- self._create_CommunicationChannel_and_ProgressMonitor()
+ def _build(self):
+
+ self.progressBar = None if self.args.quiet else ProgressBar()
+ if self.args.processes is None or self.args.processes == 0:
+ self.progressMonitor = NullProgressMonitor() if self.args.quiet else ProgressMonitor(presentation = self.progressBar)
+ self.communicationChannel = CommunicationChannel0(self.progressMonitor)
+ else:
+ self.progressMonitor = NullProgressMonitor() if self.args.quiet else BProgressMonitor(presentation = self.progressBar)
+ self.communicationChannel = CommunicationChannel(self.args.processes, self.progressMonitor)
+
for cfg in self.treeReaderConfigs:
+ tableCreators = buildTableCreators(
+ tableConfigs = cfg['tableConfigs'],
+ outDir = self.args.outDir,
+ force = self.args.force,
+ progressMonitor = self.progressMonitor,
+ )
treeReader = createTreeReader(
- self.progressMonitor,
- self.communicationChannel,
- self.args.outDir,
- self.args.force,
- self.args.nevents,
- **cfg)
+ analyzerName = cfg['analyzerName'],
+ fileName = cfg['fileName'],
+ treeName = cfg['treeName'],
+ eventSelection = cfg['eventSelection'],
+ tableCreators = tableCreators,
+ nevents = self.args.nevents,
+ communicationChannel = self.communicationChannel,
+ )
self.addComponentReader(treeReader)
- if self.progressMonitor is not None: self.progressMonitor.begin()
+
+ def run(self):
+ if self.args is None: self.ArgumentParser().parse_args()
+ self._build()
+ self.progressMonitor.begin()
self.communicationChannel.begin()
componentLoop = ComponentLoop(self.componentReaders)
if self.args.components == ['all']: self.args.components = None
heppyResult = HeppyResult(path = self.args.heppydir, componentNames = self.args.components)
componentLoop(heppyResult.components())
self.communicationChannel.end()
- if self.progressMonitor is not None: self.progressMonitor.end()
+ self.progressMonitor.end()
##__________________________________________________________________||
|
clean up code in AlphaTwirl.py
|
alphatwirl_alphatwirl
|
train
|
f76b8b1ab352f08515255204faddfc69b43e2bb6
|
diff --git a/Composite/Form.php b/Composite/Form.php
index <HASH>..<HASH> 100644
--- a/Composite/Form.php
+++ b/Composite/Form.php
@@ -13,8 +13,10 @@ namespace DesignPatterns\Composite;
* subsequently runs trough all its child elements and calls render() on them
* - Zend_Config: a tree of configuration options, each one is a Zend_Config object
*
+ * The composite node MUST extend the component contract. This is mandatory for building
+ * a tree of component.
*/
-class Form
+class Form extends FormElement
{
protected $_elements;
|
you MUST inherit from the component contract. If not, this is no longer a composite pattern
|
domnikl_DesignPatternsPHP
|
train
|
a1836f529cd0b5b8dc0dcd9dcd3b165b70ed506d
|
diff --git a/polyaxon/scheduler/job_scheduler.py b/polyaxon/scheduler/job_scheduler.py
index <HASH>..<HASH> 100644
--- a/polyaxon/scheduler/job_scheduler.py
+++ b/polyaxon/scheduler/job_scheduler.py
@@ -22,7 +22,8 @@ def start_job(job):
image_name, image_tag = get_image_info(build_job=job.build_job)
except (ValueError, AttributeError):
_logger.error('Could not start the job.', exc_info=True)
- job.set_status(JobLifeCycle.FAILED, message='External git repo was note found.')
+ job.set_status(JobLifeCycle.FAILED,
+ message='Image info was not found.')
return
job_docker_image = '{}:{}'.format(image_name, image_tag)
_logger.info('Start job with built image `%s`', job_docker_image)
diff --git a/polyaxon/scheduler/notebook_scheduler.py b/polyaxon/scheduler/notebook_scheduler.py
index <HASH>..<HASH> 100644
--- a/polyaxon/scheduler/notebook_scheduler.py
+++ b/polyaxon/scheduler/notebook_scheduler.py
@@ -21,7 +21,8 @@ def start_notebook(notebook):
image_name, image_tag = get_image_info(build_job=notebook.build_job)
except (ValueError, AttributeError):
_logger.error('Could not start the notebook.', exc_info=True)
- notebook.set_status(JobLifeCycle.FAILED, message='External git repo was note found.')
+ notebook.set_status(JobLifeCycle.FAILED,
+ message='Image info was not found.')
return
job_docker_image = '{}:{}'.format(image_name, image_tag)
_logger.info('Start notebook with built image `%s`', job_docker_image)
diff --git a/tests/test_experiments/test_models.py b/tests/test_experiments/test_models.py
index <HASH>..<HASH> 100644
--- a/tests/test_experiments/test_models.py
+++ b/tests/test_experiments/test_models.py
@@ -239,13 +239,14 @@ class TestExperimentModel(BaseTest):
build_experiment(experiment_id=experiment.id)
assert mock_build.call_count == 1
- assert ExperimentStatus.objects.filter(experiment=experiment).count() == 3
+ assert ExperimentStatus.objects.filter(experiment=experiment).count() == 4
assert list(ExperimentStatus.objects.filter(experiment=experiment).values_list(
'status', flat=True)) == [ExperimentLifeCycle.CREATED,
ExperimentLifeCycle.BUILDING,
- ExperimentLifeCycle.SCHEDULED]
+ ExperimentLifeCycle.SCHEDULED,
+ ExperimentLifeCycle.FAILED]
experiment.refresh_from_db()
- assert experiment.last_status == ExperimentLifeCycle.SCHEDULED
+ assert experiment.last_status == ExperimentLifeCycle.FAILED
@mock.patch('scheduler.experiment_scheduler.ExperimentSpawner')
def test_create_experiment_with_valid_spec(self, spawner_mock):
|
Update error message when build is not found
* Update related test
|
polyaxon_polyaxon
|
train
|
cdf9e170cae65c68fde5c86a27c9ff28a1a35000
|
diff --git a/core/src/main/java/pro/zackpollard/telegrambot/api/internal/chat/message/content/DocumentContentImpl.java b/core/src/main/java/pro/zackpollard/telegrambot/api/internal/chat/message/content/DocumentContentImpl.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/pro/zackpollard/telegrambot/api/internal/chat/message/content/DocumentContentImpl.java
+++ b/core/src/main/java/pro/zackpollard/telegrambot/api/internal/chat/message/content/DocumentContentImpl.java
@@ -28,4 +28,9 @@ public class DocumentContentImpl implements DocumentContent {
public Document getContent() {
return content;
}
+
+ @Override
+ public String getCaption() {
+ return caption;
+ }
}
|
Add in missing getCaption method to DocumentContentImpl object, fixes #<I>
|
zackpollard_JavaTelegramBot-API
|
train
|
611f3e2a8d1719c58cc3ba227b825e352dc321a9
|
diff --git a/machina/__init__.py b/machina/__init__.py
index <HASH>..<HASH> 100644
--- a/machina/__init__.py
+++ b/machina/__init__.py
@@ -13,7 +13,7 @@ from __future__ import unicode_literals
import os
-__version__ = '0.7.0'
+__version__ = '0.8.0.dev0'
MACHINA_VANILLA_APPS = [
|
Bumped version to <I>.dev0
|
ellmetha_django-machina
|
train
|
248993e9c91750ee6671c300f58ff40bd5b94130
|
diff --git a/uproot/rootio.py b/uproot/rootio.py
index <HASH>..<HASH> 100644
--- a/uproot/rootio.py
+++ b/uproot/rootio.py
@@ -200,7 +200,6 @@ class ROOTDirectory(object):
keys = [TKey.read(source, subcursor, context, None) for i in range(nkeys)]
out = ROOTDirectory(mykey._fName, context, keys)
- out._headerkey = headerkey
out._fVersion, out._fDatimeC, out._fDatimeM, out._fNbytesKeys, out._fNbytesName, out._fSeekDir, out._fSeekParent, out._fSeekKeys = fVersion, fDatimeC, fDatimeM, fNbytesKeys, fNbytesName, fSeekDir, fSeekParent, fSeekKeys
out.source = source
diff --git a/uproot/write/TDirectory.py b/uproot/write/TDirectory.py
index <HASH>..<HASH> 100644
--- a/uproot/write/TDirectory.py
+++ b/uproot/write/TDirectory.py
@@ -35,11 +35,11 @@ import uproot.write.sink.cursor
import uproot.write.TKey
class TDirectory(object):
- def __init__(self, tfile, fName, fNbytesName, fNbytesKeys=0, fSeekDir=100, fSeekParent=0, fSeekKeys=0, allocationbytes=1024, growfactor=10):
+ def __init__(self, tfile, fName, fNbytesName, fSeekDir=100, fSeekParent=0, fSeekKeys=0, allocationbytes=1024, growfactor=10):
self.tfile = tfile
self.fName = fName
self.fNbytesName = fNbytesName
- self.fNbytesKeys = fNbytesKeys
+ self.fNbytesKeys = self._format2.size
self.fSeekDir = fSeekDir
self.fSeekParent = fSeekParent
self.fSeekKeys = fSeekKeys
@@ -50,9 +50,7 @@ class TDirectory(object):
self.headkey = uproot.write.TKey.TKey(fClassName = b"TFile",
fName = self.fName,
fObjlen = self._format2.size,
- fSeekKey = self.fSeekKeys,
- fNbytes = self.fNbytesKeys)
- self.nkeys = 0
+ fSeekKey = self.fSeekKeys)
self.keys = collections.OrderedDict()
def update(self):
@@ -74,16 +72,19 @@ class TDirectory(object):
_format1 = struct.Struct(">hIIiiqqq")
_format2 = struct.Struct(">i")
+ def _nbyteskeys(self):
+ return self.headkey.fKeylen + self._format2.size + sum(x.fKeylen for x in self.keys.values())
+
def writekeys(self, cursor):
self.fSeekKeys = cursor.index
- self.fNbytesKeys = self.headkey.fObjlen + self._format2.size + sum(x.fObjlen for x in self.keys.values())
+ self.fNbytesKeys = self._nbyteskeys()
self.tfile._expandfile(uproot.write.sink.cursor.Cursor(self.fSeekKeys + self.allocationbytes))
self.keycursor = uproot.write.sink.cursor.Cursor(self.fSeekKeys)
self.headkey.write(self.keycursor, self.sink)
self.nkeycursor = uproot.write.sink.cursor.Cursor(self.keycursor.index)
- self.keycursor.write_fields(self.sink, self._format2, self.nkeys)
+ self.keycursor.write_fields(self.sink, self._format2, len(self.keys))
self.update()
@@ -91,14 +92,13 @@ class TDirectory(object):
newcursor = None
if newkey.fName in self.keys:
+ self.headkey.fObjlen -= self.keys[newkey.fName].fKeylen
newcursor = uproot.write.sink.cursor.Cursor(self.fSeekKeys)
self.headkey.fObjlen += newkey.fKeylen
- self.headkey.fNbytes += newkey.fKeylen
- self.nkeys += 1
self.keys[newkey.fName] = newkey
- self.fNbytesKeys = self.headkey.fObjlen + self._format2.size + sum(x.fObjlen for x in self.keys.values())
+ self.fNbytesKeys = self._nbyteskeys()
while self.fNbytesKeys > self.allocationbytes:
self.allocationbytes *= self.growfactor
newcursor = uproot.write.sink.cursor.Cursor(self.tfile.fSeekFree)
@@ -108,15 +108,13 @@ class TDirectory(object):
else:
newkey.write(self.keycursor, self.sink)
self.headkey.update()
- self.nkeycursor.update_fields(self.sink, self._format2, self.nkeys)
+ self.nkeycursor.update_fields(self.sink, self._format2, len(self.keys))
self.update()
def delkey(self, name):
oldkey = self.keys[name]
self.headkey.fObjlen -= oldkey
- self.headkey.fNbytes -= oldkey
- self.nkeys -= 1
del self.keys[name]
- self.fNbytesKeys = self.headkey.fObjlen + self._format2.size + sum(x.fObjlen for x in self.keys.values())
+ self.fNbytesKeys = self._nbyteskeys()
self.writekeys(uproot.write.sink.cursor.Cursor(self.fSeekKeys))
|
[skip ci] can be read in ROOT
|
scikit-hep_uproot
|
train
|
efa7f62513bd65da46b1edc2a1eb762168c40e8a
|
diff --git a/openhtf/io/output/mfg_inspector.py b/openhtf/io/output/mfg_inspector.py
index <HASH>..<HASH> 100644
--- a/openhtf/io/output/mfg_inspector.py
+++ b/openhtf/io/output/mfg_inspector.py
@@ -151,6 +151,8 @@ def _ExtractAttachments(phase, testrun, used_parameter_names):
name = _EnsureUniqueParameterName(name, used_parameter_names)
testrun_param = testrun.info_parameters.add()
testrun_param.name = name
+ if isinstance(data, unicode):
+ data = data.encode('utf8')
testrun_param.value_binary = data
if mimetype in MIMETYPE_MAP:
testrun_param.type = MIMETYPE_MAP[mimetype]
|
Encode unicode before shoving into a bytes field
protos expect `value_binary` to be Python `bytes` or `str` objects. If we pass in `unicode` for any reason, it's understandably expected to be encoded. UTF-8 is as good an encoding as any.
|
google_openhtf
|
train
|
c2320a4f95f976b076b47048721e9994c7f71c7b
|
diff --git a/lib/webcat/driver/rack_test_driver.rb b/lib/webcat/driver/rack_test_driver.rb
index <HASH>..<HASH> 100644
--- a/lib/webcat/driver/rack_test_driver.rb
+++ b/lib/webcat/driver/rack_test_driver.rb
@@ -13,7 +13,7 @@ class Webcat::Driver::RackTest
end
def set(value)
- if tag_name == 'input' and %w(text password).include?(type)
+ if tag_name == 'input' and %w(text password hidden).include?(type)
node['value'] = value.to_s
elsif tag_name == 'input' and type == 'radio'
session.html.xpath("//input[@name='#{self[:name]}']").each { |node| node.remove_attribute("checked") }
diff --git a/lib/webcat/session.rb b/lib/webcat/session.rb
index <HASH>..<HASH> 100644
--- a/lib/webcat/session.rb
+++ b/lib/webcat/session.rb
@@ -36,6 +36,10 @@ class Webcat::Session
def choose(locator)
find_field(locator, :radio).set(true)
end
+
+ def set_hidden_field(locator, options={})
+ find_field(locator, :hidden_field).set(options[:to])
+ end
def body
driver.body
@@ -59,7 +63,8 @@ private
:text_field => proc { |id| "//input[@type='text'][@id='#{id}']" },
:text_area => proc { |id| "//textarea[@id='#{id}']" },
:password_field => proc { |id| "//input[@type='password'][@id='#{id}']" },
- :radio => proc { |id| "//input[@type='radio'][@id='#{id}']" }
+ :radio => proc { |id| "//input[@type='radio'][@id='#{id}']" },
+ :hidden_field => proc { |id| "//input[@type='hidden'][@id='#{id}']" }
}
def find_field_by_id(locator, *kinds)
diff --git a/spec/session_spec.rb b/spec/session_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/session_spec.rb
+++ b/spec/session_spec.rb
@@ -193,7 +193,16 @@ shared_examples_for "session" do
end
describe "#set_hidden_field" do
-
+ before do
+ @session.visit('/form')
+ end
+
+ it "should set a hidden field by id" do
+ pending "Culerity doesn't like hidden fields for some reason" if @session.mode == :culerity
+ @session.set_hidden_field("form_token", :to => 'test567')
+ @session.click_button('awesome')
+ YAML.load(@session.body)['token'].should == 'test567'
+ end
end
describe "#check" do
|
Hidden fields
Culerity has problems with this.
|
teamcapybara_capybara
|
train
|
6f435f09d60d6c67fadabd2d003d34d4dd857adb
|
diff --git a/lib/optparse_plus/version.rb b/lib/optparse_plus/version.rb
index <HASH>..<HASH> 100644
--- a/lib/optparse_plus/version.rb
+++ b/lib/optparse_plus/version.rb
@@ -1,3 +1,3 @@
module OptparsePlus
- VERSION = "0.0.2"
+ VERSION = "0.0.3"
end
|
bump the version number to <I>: a bug fix on Windows
|
nico-hn_optparse_plus
|
train
|
982c58cf8d3033836edcf458f3d13422574bc5ea
|
diff --git a/pyres/job.py b/pyres/job.py
index <HASH>..<HASH> 100644
--- a/pyres/job.py
+++ b/pyres/job.py
@@ -1,3 +1,4 @@
+import logging
import time
from datetime import timedelta
from pyres import ResQ, safe_str_to_class
@@ -88,6 +89,7 @@ class Job(object):
raise
else:
metadata["retried"] = True
+ logging.exception("Retry scheduled after error in %s", job)
finally:
after_perform = getattr(payload_class, "after_perform", None)
if after_perform and check_after:
diff --git a/pyres/worker.py b/pyres/worker.py
index <HASH>..<HASH> 100644
--- a/pyres/worker.py
+++ b/pyres/worker.py
@@ -22,9 +22,9 @@ class Worker(object):
>>> Worker.run([queue1, queue2], server="localhost:6379")
"""
-
+
job_class = Job
-
+
def __init__(self, queues=(), server="localhost:6379", password=None, timeout=None):
self.queues = queues
self.validate_queues()
@@ -209,10 +209,10 @@ class Worker(object):
logger.debug('done waiting')
else:
self._setproctitle("Processing %s since %s" %
- (job._queue,
+ (job,
datetime.datetime.now()))
logger.info('Processing %s since %s' %
- (job._queue, datetime.datetime.now()))
+ (job, datetime.datetime.now()))
self.after_fork(job)
# re-seed the Python PRNG after forking, otherwise
@@ -263,7 +263,7 @@ class Worker(object):
logger.info('completed job')
logger.debug('job details: %s' % job)
finally:
- self.done_working()
+ self.done_working(job)
def _handle_job_exception(self, job):
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
@@ -290,8 +290,8 @@ class Worker(object):
logger.debug("worker:%s" % str(self))
logger.debug(self.resq.redis["resque:worker:%s" % str(self)])
- def done_working(self):
- logger.info('done working')
+ def done_working(self, job):
+ logger.info('done working on %s', job)
self.processed()
self.resq.redis.delete("resque:worker:%s" % str(self))
|
improve some logging, particularly errors that get retried
|
binarydud_pyres
|
train
|
1ba70b0aed9251495566308561a8f7e14889c555
|
diff --git a/bin.js b/bin.js
index <HASH>..<HASH> 100755
--- a/bin.js
+++ b/bin.js
@@ -1,7 +1,6 @@
#!/usr/bin/env node
var minimist = require('minimist')
-var abi = require('node-abi')
var prebuildify = require('./index')
var argv = minimist(process.argv.slice(2), {
@@ -13,34 +12,10 @@ var argv = minimist(process.argv.slice(2), {
stripBin: 'strip-bin',
nodeGyp: 'node-gyp'
},
- boolean: ['quiet', 'strip', 'napi', 'debug']
+ boolean: ['quiet', 'strip', 'napi', 'debug', 'all']
})
-var targets = [].concat(argv.target || []).map(function (v) {
- if (v.indexOf('@') === -1) v = 'node@' + v
-
- return {
- runtime: v.split('@')[0],
- target: v.split('@')[1].replace(/^v/, '')
- }
-})
-
-// TODO: also support --lts and get versions from travis
-if (argv.all) {
- targets = abi.supportedTargets.slice(0)
-}
-
-// Should be the default once napi is stable
-if (argv.napi && targets.length === 0) {
- targets = [
- abi.supportedTargets.filter(onlyNode).pop(),
- abi.supportedTargets.filter(onlyElectron).pop()
- ]
-
- if (targets[0].target === '9.0.0') targets[0].target = '9.6.1'
-}
-
-argv.targets = targets
+argv.targets = [].concat(argv.target || [])
argv.cwd = argv.cwd || argv._[0] || '.'
prebuildify(argv, function (err) {
@@ -49,11 +24,3 @@ prebuildify(argv, function (err) {
process.exit(1)
}
})
-
-function onlyNode (t) {
- return t.runtime === 'node'
-}
-
-function onlyElectron (t) {
- return t.runtime === 'electron'
-}
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -23,12 +23,14 @@ function prebuildify (opts, cb) {
targets: []
}, opts)
- if (!opts.targets.length) {
- return cb(new Error('You must specify at least one target using --target=runtime@version'))
+ var targets = resolveTargets(opts.targets, opts.all, opts.napi)
+
+ if (!targets.length) {
+ return process.nextTick(cb, new Error('You must specify at least one target'))
}
opts = xtend(opts, {
- targets: opts.targets.slice(),
+ targets: targets,
env: xtend(process.env, {
PREBUILD_ARCH: opts.arch,
PREBUILD_PLATFORM: opts.platform,
@@ -221,3 +223,40 @@ function npmbin (name) {
function shell () {
return os.platform() === 'android' ? 'sh' : undefined
}
+
+function resolveTargets (targets, all, napi) {
+ targets = targets.map(function (v) {
+ if (typeof v === 'object' && v !== null) return v
+ if (v.indexOf('@') === -1) v = 'node@' + v
+
+ return {
+ runtime: v.split('@')[0],
+ target: v.split('@')[1].replace(/^v/, '')
+ }
+ })
+
+ // TODO: also support --lts and get versions from travis
+ if (all) {
+ targets = abi.supportedTargets.slice(0)
+ }
+
+ // Should be the default once napi is stable
+ if (napi && targets.length === 0) {
+ targets = [
+ abi.supportedTargets.filter(onlyNode).pop(),
+ abi.supportedTargets.filter(onlyElectron).pop()
+ ]
+
+ if (targets[0].target === '9.0.0') targets[0].target = '9.6.1'
+ }
+
+ return targets
+}
+
+function onlyNode (t) {
+ return t.runtime === 'node'
+}
+
+function onlyElectron (t) {
+ return t.runtime === 'electron'
+}
|
Move target resolving from cli to api
|
prebuild_prebuildify
|
train
|
af599197cc740690e57443d3110b6bf11dee4799
|
diff --git a/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb b/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb
index <HASH>..<HASH> 100644
--- a/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb
+++ b/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb
@@ -28,7 +28,7 @@ module ActionController
logtasher_add_custom_fields_to_payload(raw_payload)
after_keys = raw_payload.keys
# Store all extra keys added to payload hash in payload itself. This is a thread safe way
- LogStasher.custom_fields += after_keys - before_keys
+ LogStasher::CustomFields.add(*(after_keys - before_keys))
end
result = super
|
Fixes a regression found in instrumentation that still uses the old version of tempering with custom_fields.
|
shadabahmed_logstasher
|
train
|
e6d11c1302ad8dda46286884a650963329c376bf
|
diff --git a/app/controllers/api/templates_controller.rb b/app/controllers/api/templates_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/api/templates_controller.rb
+++ b/app/controllers/api/templates_controller.rb
@@ -86,7 +86,7 @@ class Api::TemplatesController < Api::ApiController
def export
respond_to do |format|
format.tdl { render(:text => @template.export_as_tdl, :content_type => Mime::TDL) and return }
- format.json { render :json => @template.export_as_json }
+ format.json { render :text => @template.export_as_json }
end
end
diff --git a/app/models/system_template.rb b/app/models/system_template.rb
index <HASH>..<HASH> 100644
--- a/app/models/system_template.rb
+++ b/app/models/system_template.rb
@@ -88,7 +88,7 @@ class SystemTemplate < ActiveRecord::Base
json["parameters"].each_pair {|k,v| self.parameters[k] = v } if json["parameters"]
end
- def export_as_json
+ def export_as_hash
tpl = {
:name => self.name,
:revision => self.revision,
@@ -103,8 +103,8 @@ class SystemTemplate < ActiveRecord::Base
tpl
end
- def string_export
- self.export_as_json.to_json
+ def export_as_json
+ self.export_as_hash.to_json
end
@@ -386,7 +386,7 @@ class SystemTemplate < ActiveRecord::Base
end
def get_content_state
- content = self.export_as_json
+ content = self.export_as_hash
content.delete(:name)
content.delete(:description)
content.delete(:revision)
diff --git a/spec/models/system_template_spec.rb b/spec/models/system_template_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/models/system_template_spec.rb
+++ b/spec/models/system_template_spec.rb
@@ -274,7 +274,7 @@ describe SystemTemplate do
@export_tpl.stub(:package_groups).and_return [SystemTemplatePackGroup.new({:name => 'xxx'})]
@export_tpl.stub(:pg_categories).and_return [SystemTemplatePgCategory.new({:name => 'xxx'})]
- str = @export_tpl.export_as_json.to_json
+ str = @export_tpl.export_as_json
json = ActiveSupport::JSON.decode(str)
json['products'].size.should == 2
json['packages'].size.should == 1
|
templates - fix for cloning to an environment
|
Katello_katello
|
train
|
ae84379f86a0216685ce1886cd36ec6a550ab231
|
diff --git a/tests/test_using.py b/tests/test_using.py
index <HASH>..<HASH> 100644
--- a/tests/test_using.py
+++ b/tests/test_using.py
@@ -1580,6 +1580,45 @@ class PostGenerationTestCase(unittest.TestCase):
self.assertEqual(4, related.two)
+class RelatedFactoryExtractionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.relateds = []
+
+ class TestRelatedObject(object):
+ def __init__(subself, obj):
+ self.relateds.append(subself)
+ subself.obj = obj
+ obj.related = subself
+
+ class TestRelatedObjectFactory(factory.Factory):
+ FACTORY_FOR = TestRelatedObject
+
+ class TestObjectFactory(factory.Factory):
+ FACTORY_FOR = TestObject
+ one = factory.RelatedFactory(TestRelatedObjectFactory, 'obj')
+
+ self.TestRelatedObject = TestRelatedObject
+ self.TestRelatedObjectFactory = TestRelatedObjectFactory
+ self.TestObjectFactory = TestObjectFactory
+
+ def test_no_extraction(self):
+ o = self.TestObjectFactory()
+ self.assertEqual(1, len(self.relateds))
+ rel = self.relateds[0]
+ self.assertEqual(o, rel.obj)
+ self.assertEqual(rel, o.related)
+
+ def test_passed_value(self):
+ o = self.TestObjectFactory(one=42)
+ self.assertEqual([], self.relateds)
+ self.assertFalse(hasattr(o, 'related'))
+
+ def test_passed_none(self):
+ o = self.TestObjectFactory(one=None)
+ self.assertEqual([], self.relateds)
+ self.assertFalse(hasattr(o, 'related'))
+
+
class CircularTestCase(unittest.TestCase):
def test_example(self):
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
|
Add tests for RelatedFactory extraction.
|
FactoryBoy_factory_boy
|
train
|
b6ca16e8ab1dc0466532011b3c05e93d277c6315
|
diff --git a/src/pytesseract.py b/src/pytesseract.py
index <HASH>..<HASH> 100755
--- a/src/pytesseract.py
+++ b/src/pytesseract.py
@@ -62,6 +62,16 @@ class TSVNotSupported(Exception):
)
+def run_once(func):
+ def wrapper(*args, **kwargs):
+ if wrapper._result is wrapper:
+ wrapper._result = func(*args, **kwargs)
+ return wrapper._result
+
+ wrapper._result = wrapper
+ return wrapper
+
+
def get_errors(error_string):
return u' '.join(
line for line in error_string.decode('utf-8').splitlines()
@@ -238,17 +248,21 @@ def osd_to_dict(osd):
}
+@run_once
def get_tesseract_version():
'''
Returns a string containing the Tesseract version.
'''
try:
- return subprocess.check_output(
- [tesseract_cmd, '--version'], stderr=subprocess.STDOUT
- ).decode('utf-8').split()[1]
+ return LooseVersion(
+ subprocess.check_output(
+ [tesseract_cmd, '--version'], stderr=subprocess.STDOUT
+ ).decode('utf-8').split()[1]
+ )
except OSError:
raise TesseractNotFoundError()
+
def image_to_string(image,
lang=None,
config='',
@@ -302,8 +316,11 @@ def image_to_data(image,
Returns string containing box boundaries, confidences,
and other information. Requires Tesseract 3.05+
'''
- if LooseVersion(get_tesseract_version()) < '3.05':
+
+ # TODO: we can use decoration for this check
+ if get_tesseract_version() < '3.05':
raise TSVNotSupported()
+
if output_type == Output.DICT:
return file_to_dict(
run_and_get_output(image, 'tsv', lang, config, nice), '\t', -1)
|
Cache the tesseract version reporting
Avoid calling the external tesseract process over and over for every image_to_data call.
|
madmaze_pytesseract
|
train
|
33efa1c618ff01a9bfa5974bad02bea050b97515
|
diff --git a/graylog2-server/src/main/java/org/graylog2/shared/security/ShiroAuthorizationFilter.java b/graylog2-server/src/main/java/org/graylog2/shared/security/ShiroAuthorizationFilter.java
index <HASH>..<HASH> 100644
--- a/graylog2-server/src/main/java/org/graylog2/shared/security/ShiroAuthorizationFilter.java
+++ b/graylog2-server/src/main/java/org/graylog2/shared/security/ShiroAuthorizationFilter.java
@@ -50,7 +50,7 @@ public class ShiroAuthorizationFilter implements ContainerRequestFilter {
LOG.debug("Checking authorization for user [{}], needs permissions: {}", userName, annotation.value());
annotationHandler.assertAuthorized(annotation);
} catch (AuthorizationException e) {
- LOG.info("User [" + userName + "] not authorized.", e);
+ LOG.info("User [" + userName + "] not authorized.");
throw new NotAuthorizedException(e, "Basic realm=\"Graylog Server\"");
}
} else {
|
Do not log a stack trace when a user is not authorized
|
Graylog2_graylog2-server
|
train
|
0e1ed0f171cb4b190193e2e94f00f94a6a4948bc
|
diff --git a/src/patch/java/com/android/tools/perflib/heap/HprofParser.java b/src/patch/java/com/android/tools/perflib/heap/HprofParser.java
index <HASH>..<HASH> 100644
--- a/src/patch/java/com/android/tools/perflib/heap/HprofParser.java
+++ b/src/patch/java/com/android/tools/perflib/heap/HprofParser.java
@@ -208,7 +208,7 @@ class HprofParser {
// TODO: enable this after the dominators computation is also optimized.
// mSnapshot.computeRetainedSizes();
} catch (Exception e) {
- e.printStackTrace();
+ throw new RuntimeException("Could not parse heap dump", e);
}
mClassNamesById.clear();
|
HprofParser should not fail silently
|
square_haha
|
train
|
9413df009883ff23687e1c00c5bc1376ec23a6a6
|
diff --git a/librosa/display.py b/librosa/display.py
index <HASH>..<HASH> 100644
--- a/librosa/display.py
+++ b/librosa/display.py
@@ -1121,13 +1121,13 @@ def __coord_fft_hz(n, sr=22050, **_kwargs):
return basis
-def __coord_mel_hz(n, fmin=0, fmax=11025.0, **_kwargs):
+def __coord_mel_hz(n, fmin=0, fmax=None, sr=22050, **_kwargs):
"""Get the frequencies for Mel bins"""
if fmin is None:
fmin = 0
if fmax is None:
- fmax = 11025.0
+ fmax = 0.5 * sr
basis = core.mel_frequencies(n, fmin=fmin, fmax=fmax)
basis[1:] -= 0.5 * np.diff(basis)
|
fixed #<I>, mel axis scale inference (#<I>)
|
librosa_librosa
|
train
|
815487625e6984444e2d8191b9785a6465e0f8e7
|
diff --git a/h2o-core/src/main/java/water/api/FramesHandler.java b/h2o-core/src/main/java/water/api/FramesHandler.java
index <HASH>..<HASH> 100644
--- a/h2o-core/src/main/java/water/api/FramesHandler.java
+++ b/h2o-core/src/main/java/water/api/FramesHandler.java
@@ -266,8 +266,14 @@ public class FramesHandler<I extends FramesHandler.Frames, S extends SchemaV3<I,
if( null != frame) {
Futures fs = new Futures();
- for( Vec v : frame.vecs() )
- v.startRollupStats(fs, Vec.DO_HISTOGRAMS);
+ int i = 0;
+ for( Vec v : frame.vecs() ) {
+ if (null == DKV.get(v._key))
+ Log.warn("For Frame: " + frame._key + ", Vec number: " + i + " (" + frame.name(i)+ ") is missing; not returning it.");
+ else
+ v.startRollupStats(fs, Vec.DO_HISTOGRAMS);
+ i++;
+ }
fs.blockForPending();
}
diff --git a/h2o-core/src/main/java/water/api/schemas3/FrameV3.java b/h2o-core/src/main/java/water/api/schemas3/FrameV3.java
index <HASH>..<HASH> 100644
--- a/h2o-core/src/main/java/water/api/schemas3/FrameV3.java
+++ b/h2o-core/src/main/java/water/api/schemas3/FrameV3.java
@@ -1,14 +1,21 @@
package water.api.schemas3;
+import water.DKV;
import water.Futures;
import water.Key;
import water.MemoryManager;
-import water.api.*;
+import water.api.API;
import water.api.schemas3.KeyV3.FrameKeyV3;
-import water.fvec.*;
+import water.fvec.ByteVec;
+import water.fvec.Chunk;
+import water.fvec.Frame;
import water.fvec.Frame.VecSpecifier;
+import water.fvec.Vec;
import water.parser.BufferedString;
-import water.util.*;
+import water.util.ChunkSummary;
+import water.util.FrameUtils;
+import water.util.Log;
+import water.util.PrettyPrint;
/**
* All the details on a Frame. Note that inside ColV3 there are fields which won't be
@@ -235,11 +242,19 @@ public class FrameV3 extends FrameBaseV3<Frame, FrameV3> {
Vec[] vecs = f.vecs();
Futures fs = new Futures();
// Compute rollups in parallel as needed, by starting all of them and using
- // them when filling in the ColV3 Schemas
+ // them when filling in the ColV3 Schemas.
+ // NOTE: SKIP deleted Vecs! The columns entry will be null for deleted Vecs.
for( int i = 0; i < column_count; i++ )
- vecs[column_offset + i].startRollupStats(fs);
+ if (null == DKV.get(vecs[column_offset + i]._key))
+ Log.warn("For Frame: " + f._key + ", Vec number: " + (column_offset + i) + " (" + f.name((column_offset + i))+ ") is missing; not returning it.");
+ else
+ vecs[column_offset + i].startRollupStats(fs);
for( int i = 0; i < column_count; i++ )
- columns[i] = new ColV3(f._names[column_offset + i], vecs[column_offset + i], this.row_offset, this.row_count);
+ if (null == DKV.get(vecs[column_offset + i]._key))
+ Log.warn("For Frame: " + f._key + ", Vec number: " + (column_offset + i) + " (" + f.name((column_offset + i))+ ") is missing; not returning it.");
+ else
+ columns[i] = new ColV3(f._names[column_offset + i], vecs[column_offset + i], this.row_offset, this.row_count);
+
fs.blockForPending();
this.is_text = f.numCols()==1 && vecs[0] instanceof ByteVec;
this.default_percentiles = Vec.PERCENTILES;
@@ -258,7 +273,8 @@ public class FrameV3 extends FrameBaseV3<Frame, FrameV3> {
public void clearBinsField() {
for (ColV3 col: columns)
- col.clearBinsField();
+ if (col != null)
+ col.clearBinsField();
}
private abstract static class ColOp { abstract String op(ColV3 v); }
diff --git a/h2o-core/src/main/java/water/util/PojoUtils.java b/h2o-core/src/main/java/water/util/PojoUtils.java
index <HASH>..<HASH> 100644
--- a/h2o-core/src/main/java/water/util/PojoUtils.java
+++ b/h2o-core/src/main/java/water/util/PojoUtils.java
@@ -339,6 +339,9 @@ public class PojoUtils {
* <b>NOTE: modifies the scheme tree in place.</b>
*/
public static void filterFields(Object o, String includes, String excludes) {
+ if (null == o)
+ return;
+
if (null == excludes || "".equals(excludes))
return;
|
HEXDEV-<I>: don't return JSON exceptions when REST API clients ask about Frames with deleted Vecs. This will allow the clients to keep working with functional Frames.
|
h2oai_h2o-3
|
train
|
bf4a2247068ab9fb6f3644c96c4fdf3af51d4f09
|
diff --git a/src/classes/task.php b/src/classes/task.php
index <HASH>..<HASH> 100644
--- a/src/classes/task.php
+++ b/src/classes/task.php
@@ -85,11 +85,12 @@ class periodicTask
$this->logger = $logger;
// Configure task
- if ( $this->configuration->config )
+ foreach ( $this->properties as $key => $default )
{
- foreach ( $this->configuration->config as $key => $value )
+ if ( $this->configuration->config &&
+ $this->configuration->config->$key )
{
- $this->$key = $value;
+ $this->properties[$key] = (int) (string) $this->configuration->config->$key;
}
}
}
@@ -160,5 +161,38 @@ class periodicTask
return periodicExecutor::SUCCESS;
}
+
+ /**
+ * Interceptor for task options
+ *
+ * @param string $property
+ * @return mixed
+ */
+ public function __get( $property )
+ {
+ if ( !array_key_exists( $property, $this->properties ) )
+ {
+ throw new periodicAttributeException( periodicAttributeException::NON_EXISTANT, $property );
+ }
+
+ return $this->properties[$property];
+ }
+
+ /**
+ * Interceptor for task options
+ *
+ * @param string $property
+ * @param mixed $value
+ * @return void
+ */
+ public function __set( $property, $value )
+ {
+ if ( !array_key_exists( $property, $this->properties ) )
+ {
+ throw new periodicAttributeException( periodicAttributeException::NON_EXISTANT, $property );
+ }
+
+ throw new periodicAttributeException( periodicAttributeException::WRITE, $property );
+ }
}
diff --git a/tests/data/tasks/reschedule.xml b/tests/data/tasks/reschedule.xml
index <HASH>..<HASH> 100644
--- a/tests/data/tasks/reschedule.xml
+++ b/tests/data/tasks/reschedule.xml
@@ -1,5 +1,9 @@
<?xml version="1.0"?>
<task>
+ <config>
+ <reScheduleTime>30</reScheduleTime>
+ <timeout>1200</timeout>
+ </config>
<command type="test.dummy"/>
<command type="test.reschedule"/>
<command type="test.dummy"/>
diff --git a/tests/executor/task.php b/tests/executor/task.php
index <HASH>..<HASH> 100644
--- a/tests/executor/task.php
+++ b/tests/executor/task.php
@@ -40,6 +40,95 @@ class periodicTaskTests extends PHPUnit_Framework_TestCase
periodicCommandRegistry::registerCommand( 'test.errorneous', 'periodicTestErrorneousCommand' );
}
+ public function testTaskConfigurationDefaultValues()
+ {
+ $task = new periodicTask(
+ 'test', 0,
+ arbitXml::loadFile( __DIR__ . "/../data/tasks/dummy.xml" ),
+ $logger = new periodicTestLogger()
+ );
+
+ $this->assertSame(
+ 300,
+ $task->reScheduleTime
+ );
+
+ $this->assertSame(
+ 3600,
+ $task->timeout
+ );
+ }
+
+ public function testTaskConfigurationReadUnknownValue()
+ {
+ $task = new periodicTask(
+ 'test', 0,
+ arbitXml::loadFile( __DIR__ . "/../data/tasks/dummy.xml" ),
+ $logger = new periodicTestLogger()
+ );
+
+ try
+ {
+ $task->unknown;
+ $this->fail( 'Expected periodicAttributeException.' );
+ }
+ catch ( periodicAttributeException $e )
+ { /* Expected */ }
+ }
+
+ public function testTaskConfigurationWriteUnknownValue()
+ {
+ $task = new periodicTask(
+ 'test', 0,
+ arbitXml::loadFile( __DIR__ . "/../data/tasks/dummy.xml" ),
+ $logger = new periodicTestLogger()
+ );
+
+ try
+ {
+ $task->unknown = 42;
+ $this->fail( 'Expected periodicAttributeException.' );
+ }
+ catch ( periodicAttributeException $e )
+ { /* Expected */ }
+ }
+
+ public function testTaskConfigurationWriteValue()
+ {
+ $task = new periodicTask(
+ 'test', 0,
+ arbitXml::loadFile( __DIR__ . "/../data/tasks/dummy.xml" ),
+ $logger = new periodicTestLogger()
+ );
+
+ try
+ {
+ $task->timeout = 42;
+ $this->fail( 'Expected periodicAttributeException.' );
+ }
+ catch ( periodicAttributeException $e )
+ { /* Expected */ }
+ }
+
+ public function testTaskConfigurationReconfiguredValues()
+ {
+ $task = new periodicTask(
+ 'test', 0,
+ arbitXml::loadFile( __DIR__ . "/../data/tasks/reschedule.xml" ),
+ $logger = new periodicTestLogger()
+ );
+
+ $this->assertSame(
+ 30,
+ $task->reScheduleTime
+ );
+
+ $this->assertSame(
+ 1200,
+ $task->timeout
+ );
+ }
+
public static function getTaskHandlingLogs()
{
return array(
|
- Tested: Configuration of task
|
Arbitracker_Periodic
|
train
|
8d7f4e549a2f83e93de9d440a7aa979b73cfba38
|
diff --git a/examples/my_test_suite.py b/examples/my_test_suite.py
index <HASH>..<HASH> 100755
--- a/examples/my_test_suite.py
+++ b/examples/my_test_suite.py
@@ -14,6 +14,7 @@ class MyTestSuite(BaseCase):
def test_2(self):
# This test should FAIL
+ print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
@@ -25,5 +26,6 @@ class MyTestSuite(BaseCase):
def test_4(self):
# This test should FAIL
+ print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
Make it clear that a few example tests fail on purpose
|
seleniumbase_SeleniumBase
|
train
|
9883a3955c8b8f2983fb468311a2f8f8a190d1ec
|
diff --git a/runtime/component.js b/runtime/component.js
index <HASH>..<HASH> 100644
--- a/runtime/component.js
+++ b/runtime/component.js
@@ -127,7 +127,7 @@ export class Runtime extends Component {
Type={app.types[panel.type]}
zIndex={router.routes.items.length - i}
navigate={navigate}
- key={context}
+ key={context.replace(/[()]/g, '')}
present={present}
router={router}
toggleExpand={toggleExpand}
|
fix: panel key shouldn't be dependent on sliced panels
|
UXtemple_panels
|
train
|
f604c83c3032da131004730a1365e911f5e167d1
|
diff --git a/core-ui/src/components/ApiRules/gql/useApiRulesQuery.js b/core-ui/src/components/ApiRules/gql/useApiRulesQuery.js
index <HASH>..<HASH> 100644
--- a/core-ui/src/components/ApiRules/gql/useApiRulesQuery.js
+++ b/core-ui/src/components/ApiRules/gql/useApiRulesQuery.js
@@ -22,6 +22,10 @@ export const useApiRulesQuery = ({ namespace, serviceName = undefined }) => {
const [apiRules, setApiRules] = useStateWithCallback([]);
const apolloClient = useApolloClient();
+ useEffect(() => {
+ setLoadedData(false);
+ }, [namespace]);
+
function processQueue(event, done) {
const newApiRules = handleSubscriptionEvent(
{
|
reset loadedData once namespace is changed (#<I>)
|
kyma-project_console
|
train
|
85d21c694b294d03e37fc1646937a21bc0782f30
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100755
--- a/index.js
+++ b/index.js
@@ -45,6 +45,11 @@ module.exports = postcss.plugin('postcss-js-mixins', (options = {}) => {
*/
function createDeclarations(data, node) {
return data.map(decl => {
+
+ // TODO: temporary fix
+ if (decl.prop === 'rule') {
+ return postcss.parse(decl.value)
+ }
return createDeclaration(decl, node);
});
}
|
Add rule replacement in createDeclarations function
|
nathanhood_postcss-js-mixins
|
train
|
1c2c5527032f7a79aff53d29e9640e27957f1b49
|
diff --git a/activemodel/test/cases/errors_test.rb b/activemodel/test/cases/errors_test.rb
index <HASH>..<HASH> 100644
--- a/activemodel/test/cases/errors_test.rb
+++ b/activemodel/test/cases/errors_test.rb
@@ -54,6 +54,11 @@ class ErrorsTest < ActiveModel::TestCase
assert errors.has_key?(:foo), 'errors should have key :foo'
end
+ def test_has_no_key
+ errors = ActiveModel::Errors.new(self)
+ assert_equal false, errors.has_key?(:name), 'errors should not have key :name'
+ end
+
test "clear errors" do
person = Person.new
person.validate!
|
Add failing test for ActiveModel::Errors#has_key? method
From the doc, this method should return false and not nil if there is no errors for this key
|
rails_rails
|
train
|
615ea171da58731c54d966f513285d80e880296e
|
diff --git a/app/Module/SlideShowModule.php b/app/Module/SlideShowModule.php
index <HASH>..<HASH> 100644
--- a/app/Module/SlideShowModule.php
+++ b/app/Module/SlideShowModule.php
@@ -123,7 +123,7 @@ class SlideShowModule extends AbstractModule implements ModuleBlockInterface
$media = Media::getInstance($row->m_id, $tree, $row->m_gedcom);
assert($media instanceof Media);
- if (!$media->canShow()) {
+ if (!$media->canShow() || $media->firstImageFile() === null) {
return false;
}
|
Fix: #<I> - non-images labelled as images can break the slideshow
|
fisharebest_webtrees
|
train
|
0e1f20790028aa7f919587f9f248d9cab0e2c0b5
|
diff --git a/src/GetOptionKit/OptionResult.php b/src/GetOptionKit/OptionResult.php
index <HASH>..<HASH> 100644
--- a/src/GetOptionKit/OptionResult.php
+++ b/src/GetOptionKit/OptionResult.php
@@ -50,6 +50,11 @@ class OptionResult
public function __get($key)
{
+ //verifying if we got a camelCased key: http://stackoverflow.com/a/7599674/102960
+ $parts = preg_split('/(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])/', $key);
+ if( sizeof($parts) > 1 )
+ $key = join('-', array_map('strtolower', $parts));
+
if( isset($this->keys[ $key ]) )
return @$this->keys[ $key ]->value;
}
|
Enabling translation of CamelCased properties to hyphenated option names
|
c9s_GetOptionKit
|
train
|
7324f4f07bb7b0ec315e1c11ec51f29bec8cd41c
|
diff --git a/report/src/main/java/com/buschmais/jqassistant/core/report/api/LanguageHelper.java b/report/src/main/java/com/buschmais/jqassistant/core/report/api/LanguageHelper.java
index <HASH>..<HASH> 100644
--- a/report/src/main/java/com/buschmais/jqassistant/core/report/api/LanguageHelper.java
+++ b/report/src/main/java/com/buschmais/jqassistant/core/report/api/LanguageHelper.java
@@ -3,7 +3,6 @@ package com.buschmais.jqassistant.core.report.api;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
-import com.buschmais.jqassistant.core.analysis.api.AnalysisListenerException;
import com.buschmais.jqassistant.core.store.api.model.Descriptor;
import com.buschmais.xo.spi.reflection.AnnotatedType;
@@ -22,7 +21,7 @@ public final class LanguageHelper {
*
* @throws com.buschmais.jqassistant.core.analysis.api.AnalysisListenerException
*/
- public static LanguageElement getLanguageElement(Descriptor descriptor) throws AnalysisListenerException {
+ public static LanguageElement getLanguageElement(Descriptor descriptor) {
for (Class<?> descriptorType : descriptor.getClass().getInterfaces()) {
AnnotatedType annotatedType = new AnnotatedType(descriptorType);
Annotation languageAnnotation = annotatedType.getByMetaAnnotation(Language.class);
@@ -45,22 +44,20 @@ public final class LanguageHelper {
* @param <T>
* The expected type.
* @return The value.
- * @throws com.buschmais.jqassistant.core.analysis.api.AnalysisListenerException
- * If the value cannot be determined from the annotation.
*/
- private static <T> T getAnnotationValue(Annotation annotation, String value, Class<T> expectedType) throws AnalysisListenerException {
+ private static <T> T getAnnotationValue(Annotation annotation, String value, Class<T> expectedType) {
Class<? extends Annotation> annotationType = annotation.annotationType();
Method valueMethod;
try {
valueMethod = annotationType.getDeclaredMethod(value);
} catch (NoSuchMethodException e) {
- throw new AnalysisListenerException("Cannot resolve required method '" + value + "()' for '" + annotationType + "'.");
+ throw new IllegalStateException("Cannot resolve required method '" + value + "()' for '" + annotationType + "'.");
}
Object elementValue;
try {
elementValue = valueMethod.invoke(annotation);
} catch (ReflectiveOperationException e) {
- throw new AnalysisListenerException("Cannot invoke method value() for " + annotationType);
+ throw new IllegalStateException("Cannot invoke method value() for " + annotationType);
}
return elementValue != null ? expectedType.cast(elementValue) : null;
}
|
#<I> added test for analysis severity
|
buschmais_jqa-core-framework
|
train
|
967d575a202d6903b2ab19b0c054393137a3fc18
|
diff --git a/mqlight.js b/mqlight.js
index <HASH>..<HASH> 100644
--- a/mqlight.js
+++ b/mqlight.js
@@ -1195,6 +1195,13 @@ Client.prototype.disconnect = function(callback) {
'stopProcessing.queuedSendCallback', client.id, null);
});
}
+ // clear the active subscriptions list as we were asked to disconnect
+ logger.log('data', client.id, 'client.subscriptions:',
+ client.subscriptions);
+ while (client.subscriptions.length > 0) {
+ client.subscriptions.pop();
+ }
+
// Indicate that we've disconnected
client.state = 'disconnected';
process.nextTick(function() {
diff --git a/test/teststop.js b/test/teststop.js
index <HASH>..<HASH> 100644
--- a/test/teststop.js
+++ b/test/teststop.js
@@ -139,3 +139,28 @@ module.exports.test_disconnect_too_many_arguments = function(test) {
test.done();
}, 'spurious');
};
+
+
+/**
+ * Test that the client.subscriptions list is cleared upon a user-requested
+ * client.disconnect(...) call.
+ *
+ * @param {object} test the unittest interface
+ */
+module.exports.test_disconnect_cleared_subscriptions = function(test) {
+ var client = mqlight.createClient({service: 'amqp://host'});
+ client.on('connected', function() {
+ client.on('disconnected', function() {
+ test.deepEqual(client.subscriptions, [], 'client.subscriptions was not ' +
+ 'cleared during client.disconnect() call');
+ test.done();
+ });
+ client.subscribe('/foo', function(err) {
+ test.ifError(err);
+ test.deepEqual(client.subscriptions.length, 1, 'client.subscriptions ' +
+ 'was not appended to');
+ client.disconnect();
+ });
+ });
+ client.connect();
+};
|
clear client.subscriptions when a client.disconnect() is explictly called
|
mqlight_nodejs-mqlight
|
train
|
1bf1913d041e7e8f7d8180b1895b65cb39691b08
|
diff --git a/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/TabBarView.java b/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/TabBarView.java
index <HASH>..<HASH> 100644
--- a/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/TabBarView.java
+++ b/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/TabBarView.java
@@ -108,9 +108,11 @@ public class TabBarView extends ViewPager {
private class Adapter extends FragmentPagerAdapter {
private List<TabFragment> tabFragments = new ArrayList<>();
+ FragmentManager fragmentManager;
Adapter(FragmentManager fragmentManager) {
super(fragmentManager);
+ this.fragmentManager = fragmentManager;
}
void addTab(TabBarItemView tab, int index) {
@@ -152,6 +154,11 @@ public class TabBarView extends ViewPager {
return !tabs.contains(object) ? POSITION_NONE : tabs.indexOf(object);
}
+ @Override
+ public long getItemId(int position) {
+ return System.identityHashCode(tabFragments.get(position));
+ }
+
@NonNull
@Override
public Object instantiateItem(@NonNull ViewGroup container, int position) {
@@ -162,9 +169,9 @@ public class TabBarView extends ViewPager {
@Override
public void destroyItem(@NonNull ViewGroup container, int position, @NonNull Object object) {
if (!tabFragments.contains(object)) {
- FragmentTransaction transaction = ((FragmentActivity) ((ReactContext) getContext()).getCurrentActivity()).getSupportFragmentManager().beginTransaction();
+ FragmentTransaction transaction = fragmentManager.beginTransaction();
transaction.remove((Fragment) object);
- transaction.commit();
+ transaction.commitAllowingStateLoss();
}
}
}
|
Fixed removal with getItemId
With three tabs, removing the middel one is basically reordering. The getItemId came from this stackoverflow about how to reorder <URL>
|
grahammendick_navigation
|
train
|
10438a19770953a3d2cf719839f1a17ecb0021fe
|
diff --git a/packages/material-ui/src/Popover/Popover.js b/packages/material-ui/src/Popover/Popover.js
index <HASH>..<HASH> 100644
--- a/packages/material-ui/src/Popover/Popover.js
+++ b/packages/material-ui/src/Popover/Popover.js
@@ -178,7 +178,7 @@ class Popover extends React.Component {
}
warning(
- elemRect.height < heightThreshold || !elemRect.height || !heightThreshold,
+ elemRect.height <= heightThreshold || !elemRect.height || !heightThreshold,
[
'Material-UI: the popover component is too tall.',
`Some part of it can not be seen on the screen (${elemRect.height - heightThreshold}px).`,
|
[Popover] Correct warning for tall component (#<I>)
|
mui-org_material-ui
|
train
|
529033108ba9d7459874af2582a6c619ca7565cf
|
diff --git a/closure/goog/locale/locale.js b/closure/goog/locale/locale.js
index <HASH>..<HASH> 100644
--- a/closure/goog/locale/locale.js
+++ b/closure/goog/locale/locale.js
@@ -41,6 +41,7 @@ goog.locale.setLocale = function(localeName) {
/**
* Retrieve the currnet locale
* @return {string} Current locale name string.
+ * @deprecated Use goog.LOCALE instead.
*/
goog.locale.getLocale = function() {
if (!goog.locale.activeLocale_) {
|
goog.locale.getLocale method is deprecated - use goog.LOCALE instead.
R=pupius
DELTA=1 (1 added, 0 deleted, 0 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL>
|
google_closure-library
|
train
|
30949397e90a5685fd83248d75df7c640f43c06d
|
diff --git a/src/Helper/DateTime.php b/src/Helper/DateTime.php
index <HASH>..<HASH> 100644
--- a/src/Helper/DateTime.php
+++ b/src/Helper/DateTime.php
@@ -29,7 +29,7 @@ class DateTime
*/
public static function parse(string $dateTimeString): Carbon
{
- Self::emptyDateTimeString($dateTimeString);
+ Self::isEmptyDateTimeString($dateTimeString);
try {
return Carbon::parse($dateTimeString);
@@ -47,7 +47,7 @@ class DateTime
*
* @return bool
*/
- private static function emptyDateTimeString(string $dateTimeString): bool
+ private static function isEmptyDateTimeString(string $dateTimeString): bool
{
if (!empty($dateTimeString)) {
return true;
diff --git a/src/TokenBuilder.php b/src/TokenBuilder.php
index <HASH>..<HASH> 100644
--- a/src/TokenBuilder.php
+++ b/src/TokenBuilder.php
@@ -6,11 +6,13 @@ use ReallySimpleJWT\Helper\TokenEncodeDecode;
use ReallySimpleJWT\Helper\DateTime;
use Carbon\Carbon;
use ReallySimpleJWT\Helper\Secret;
+use ReallySimpleJWT\TokenObject;
/**
* Class that generates a JSON Web Token, uses HS256 to generate the signature
*
* @author Rob Waller <rdwaller1984@gmail.com>
+ * @todo Conceptually this class is wrong it needs to abstracted to a proper builder pattern
*/
class TokenBuilder extends TokenAbstract
{
@@ -289,9 +291,13 @@ class TokenBuilder extends TokenAbstract
*/
public function build(): string
{
- return $this->encodeHeader() . "." .
+ $jwt = $this->encodeHeader() . "." .
$this->encodePayload() . "." .
$this->getSignature()->get();
+
+ $this->tearDown();
+
+ return $jwt;
}
/**
@@ -303,4 +309,19 @@ class TokenBuilder extends TokenAbstract
{
return DateTime::olderThan(DateTime::now(), DateTime::parse($this->expiration));
}
+
+ /**
+ * This method is a fix to allow the creation of multiple tokens at the same
+ * time. It is essentially a flawed but working approach. This class needs
+ * to be rebuilt in the 2.0.0 release.
+ */
+ private function tearDown()
+ {
+ $this->payload = [];
+ $this->secret = null;
+ $this->expiration = null;
+ $this->issuer = null;
+ $this->subject = null;
+ $this->audience = null;
+ }
}
diff --git a/tests/TokenBuilderTest.php b/tests/TokenBuilderTest.php
index <HASH>..<HASH> 100644
--- a/tests/TokenBuilderTest.php
+++ b/tests/TokenBuilderTest.php
@@ -274,4 +274,55 @@ class TokenBuilderTest extends TestCase
$this->assertEquals('', $builder->getAudience());
}
+
+
+ public function testAddDuplicatePayloadKey()
+ {
+ $builder = new TokenBuilder();
+
+ $builder->setIssuer('127.0.0.1')
+ ->setExpiration(Carbon::now()->addMinutes(10)->toDateTimeString())
+ ->addPayload(['key' => 'id', 'value' => 'hello'])
+ ->addPayload(['key' => 'id', 'value' => 'world']);
+
+ $this->assertEquals('world', json_decode($builder->getPayload())->id);
+ }
+
+ public function testCreateMultipleTokens()
+ {
+ $builder = new TokenBuilder();
+
+ $jwt1 = $builder->setIssuer('127.0.0.1')
+ ->setSecret('123ABC*$def456')
+ ->setExpiration(Carbon::now()->addMinutes(10)->toDateTimeString())
+ ->addPayload(['key' => 'id', 'value' => 'hello'])
+ ->build();
+
+ $jwt2 = $builder->setIssuer('127.0.0.1')
+ ->setSecret('123ABC*$def456')
+ ->setExpiration(Carbon::now()->addMinutes(20)->toDateTimeString())
+ ->addPayload(['key' => 'id', 'value' => 'hello'])
+ ->build();
+
+ $this->assertNotEquals($jwt1, $jwt2);
+ }
+
+ public function testCreateMultipleTokensTwo()
+ {
+ $builder = new TokenBuilder();
+
+ $jwt1 = $builder->setIssuer('127.0.0.1')
+ ->setSecret('123ABC*$def456')
+ ->setExpiration(Carbon::now()->addMinutes(10)->toDateTimeString())
+ ->addPayload(['key' => 'id', 'value' => 'hello'])
+ ->build();
+
+ $jwt2 = $builder->setIssuer('localhost')
+ ->setSecret('123ABC*$def456')
+ ->setExpiration(Carbon::now()->addMinutes(10)->toDateTimeString())
+ ->addPayload(['key' => 'id', 'value' => 'world'])
+ ->build();
+
+ $this->assertNotEquals($jwt1, $jwt2);
+ }
}
|
Fixed core issue that multiple tokens cannot be created at the same time via a tear down method, this isn't great, but seems to work.
|
RobDWaller_ReallySimpleJWT
|
train
|
76d556b872c9b5031913b4b8c2769bf24d1dad5a
|
diff --git a/lib/solargraph/shell.rb b/lib/solargraph/shell.rb
index <HASH>..<HASH> 100755
--- a/lib/solargraph/shell.rb
+++ b/lib/solargraph/shell.rb
@@ -96,8 +96,8 @@ module Solargraph
puts "Deleting the cached documentation"
Solargraph::YardMap::CoreDocs.clear
end
- map 'clear-cache'.to_sym => :clear
- map 'clear-cores'.to_sym => :clear
+ map 'clear-cache' => :clear
+ map 'clear-cores' => :clear
desc 'uncache GEM [...GEM]', "Delete cached gem documentation"
def uncache *gems
@@ -179,6 +179,7 @@ module Solargraph
desc 'bundle', 'Generate documentation for bundled gems'
option :directory, type: :string, aliases: :d, desc: 'The workspace directory', default: '.'
+ option :rebuild, type: :boolean, aliases: :r, desc: 'Rebuild existing documentation', default: false
def bundle
Documentor.new(options[:directory]).document
end
|
Rebuild option for bundle subcommand.
|
castwide_solargraph
|
train
|
401e333e9080366ee58e21a9dc2a16ecdefb5d93
|
diff --git a/src/core.php b/src/core.php
index <HASH>..<HASH> 100644
--- a/src/core.php
+++ b/src/core.php
@@ -320,12 +320,12 @@ function formatException($e) {
function log_report($report, $file, $action='', $message='') {
if( !is_scalar($report) ) {
if( $report instanceof Exception ) {
- $exception = $report;
+ $exception = $report;
}
$report = 'NON-SCALAR::'.stringify($report);//."\n".print_r($report, 1);
}
- $Error = array('date' => date('c'), 'report' => $report, 'action' => $action);
- $logFilePath = ((defined("LOGSPATH") && is_dir(LOGSPATH)) ? LOGSPATH : '').$file;
+ $Error = array('id'=>uniqid('OL', true), 'date' => date('c'), 'report' => $report, 'action' => $action);
+ $logFilePath = ((defined("LOGSPATH") && is_dir(LOGSPATH)) ? LOGSPATH : '').$file;
try {
file_put_contents($logFilePath, json_encode($Error)."\n", FILE_APPEND);
} catch( Exception $e ) {
|
Add an unique id to log reports
|
Sowapps_orpheus-core
|
train
|
369d6cd6798fe386ba3cc7e687e149e1b87770e4
|
diff --git a/src/main/java/de/beyondjava/jsf/ajax/differentialContextWriter/differenceEngine/DiffenceEngine.java b/src/main/java/de/beyondjava/jsf/ajax/differentialContextWriter/differenceEngine/DiffenceEngine.java
index <HASH>..<HASH> 100644
--- a/src/main/java/de/beyondjava/jsf/ajax/differentialContextWriter/differenceEngine/DiffenceEngine.java
+++ b/src/main/java/de/beyondjava/jsf/ajax/differentialContextWriter/differenceEngine/DiffenceEngine.java
@@ -94,7 +94,7 @@ public class DiffenceEngine {
for (Node d : differences) {
System.out.println("Difference: " + d);
}
- generateJUnitTest(newHTML, lastKnownCorrespondingNode, differences);
+ // generateJUnitTest(newHTML, lastKnownCorrespondingNode, differences);
return differences;
}
|
deactivate automatic generation of JUnit tests
|
stephanrauh_AngularFaces
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.