hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
0b0e015ea83d7c4e8790e13e9d0939209544edb3
diff --git a/app/controllers/devise/invitations_controller.rb b/app/controllers/devise/invitations_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/devise/invitations_controller.rb +++ b/app/controllers/devise/invitations_controller.rb @@ -38,9 +38,11 @@ class Devise::InvitationsController < DeviseController # PUT /resource/invitation def update self.resource = accept_resource + invitation_accepted = resource.errors.empty? - if resource.errors.empty? - yield resource if block_given? + yield resource if block_given? + + if invitation_accepted flash_message = resource.active_for_authentication? ? :updated : :updated_not_active set_flash_message :notice, flash_message if is_flashing_format? sign_in(resource_name, resource)
Moves yield around resource before checking if invitation accepted succeded
scambra_devise_invitable
train
9b02d54138bc5775c9da9679eb15ce07805e472e
diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index <HASH>..<HASH> 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -1431,7 +1431,12 @@ class HDFStore: return kwargs def _create_storer( - self, group, format=None, value=None, **kwargs + self, + group, + format=None, + value=None, + encoding: str = "UTF-8", + errors: str = "strict", ) -> Union["GenericFixed", "Table"]: """ return a suitable class to operate """ @@ -1439,8 +1444,7 @@ class HDFStore: # return instead of raising so mypy can tell where we are raising return TypeError( f"cannot properly create the storer for: [{t}] [group->" - f"{group},value->{type(value)},format->{format}," - f"kwargs->{kwargs}]" + f"{group},value->{type(value)},format->{format}" ) pt = _ensure_decoded(getattr(group._v_attrs, "pandas_type", None)) @@ -1476,7 +1480,9 @@ class HDFStore: # a storer node if "table" not in pt: try: - return globals()[_STORER_MAP[pt]](self, group, **kwargs) + return globals()[_STORER_MAP[pt]]( + self, group, encoding=encoding, errors=errors + ) except KeyError: raise error("_STORER_MAP") @@ -1517,7 +1523,9 @@ class HDFStore: pass try: - return globals()[_TABLE_MAP[tt]](self, group, **kwargs) + return globals()[_TABLE_MAP[tt]]( + self, group, encoding=encoding, errors=errors + ) except KeyError: raise error("_TABLE_MAP") @@ -1526,11 +1534,20 @@ class HDFStore: key: str, value, format, + axes=None, index=True, append=False, complib=None, + complevel: Optional[int] = None, + fletcher32=None, + min_itemsize=None, + chunksize=None, + expectedrows=None, + dropna=False, + nan_rep=None, + data_columns=None, encoding=None, - **kwargs, + errors: str = "strict", ): group = self.get_node(key) @@ -1565,7 +1582,7 @@ class HDFStore: group = self._handle.create_group(path, p) path = new_path - s = self._create_storer(group, format, value, encoding=encoding, **kwargs) + s = self._create_storer(group, format, value, encoding=encoding, errors=errors) if append: # raise if we are trying to append to a Fixed format, # or a table that exists (and we are putting) @@ -1580,7 +1597,20 @@ class HDFStore: raise ValueError("Compression not supported on Fixed format stores") # write the object - s.write(obj=value, append=append, complib=complib, **kwargs) + s.write( + obj=value, + axes=axes, + append=append, + complib=complib, + complevel=complevel, + fletcher32=fletcher32, + min_itemsize=min_itemsize, + chunksize=chunksize, + expectedrows=expectedrows, + dropna=dropna, + nan_rep=nan_rep, + data_columns=data_columns, + ) if isinstance(s, Table) and index: s.create_index(columns=index) @@ -2524,10 +2554,11 @@ class Fixed: ndim: int parent: HDFStore group: "Node" + errors: str is_table = False def __init__( - self, parent: HDFStore, group: "Node", encoding=None, errors="strict", **kwargs + self, parent: HDFStore, group: "Node", encoding=None, errors: str = "strict" ): assert isinstance(parent, HDFStore), type(parent) assert _table_mod is not None # needed for mypy @@ -3199,8 +3230,10 @@ class Table(Fixed): metadata: List info: Dict - def __init__(self, parent: HDFStore, group: "Node", **kwargs): - super().__init__(parent, group, **kwargs) + def __init__( + self, parent: HDFStore, group: "Node", encoding=None, errors: str = "strict" + ): + super().__init__(parent, group, encoding=encoding, errors=errors) self.index_axes = [] self.non_index_axes = [] self.values_axes = [] @@ -4076,7 +4109,6 @@ class AppendableTable(Table): dropna=False, nan_rep=None, data_columns=None, - errors="strict", # not used here, but passed to super ): if not append and self.is_exists:
CLN: fix pytables passing too many kwargs (#<I>)
pandas-dev_pandas
train
4099597c8363589606e1b34c297c2e4e4d734ea5
diff --git a/runtime/layers/tests.php b/runtime/layers/tests.php index <HASH>..<HASH> 100644 --- a/runtime/layers/tests.php +++ b/runtime/layers/tests.php @@ -59,7 +59,7 @@ foreach ($fpmLayers as $layer) { $devLayers = [ 'bref/php-73-fpm-dev', 'bref/php-74-fpm-dev', - // 'bref/php-80-fpm-dev', // skip until blackfire gets supported for PHP 8.0 + 'bref/php-80-fpm-dev', ]; $devExtensions = [ 'xdebug',
[docker] Enable PHP8 tests
mnapoli_bref
train
e0da728f71ab1c50afa57608b7c5a102a7d307e7
diff --git a/pandasdmx/model.py b/pandasdmx/model.py index <HASH>..<HASH> 100644 --- a/pandasdmx/model.py +++ b/pandasdmx/model.py @@ -38,6 +38,7 @@ from operator import attrgetter from traitlets import ( Any, Bool, + CBool, CFloat, CInt, Dict, @@ -240,6 +241,8 @@ FacetValueType = Enum( 'monthDay day time duration keyValues identifiableReference ' 'dataSetReference') +ConstraintRoleType = Enum('ConstraintRoleType', 'allowable actual') + # 3.5: Item Scheme @@ -506,32 +509,35 @@ class ConstrainableArtefact: # 10.3: Constraints -ConstraintRoleType = Enum('ConstraintRoleType', 'allowable actual') - - class ConstraintRole(HasTraits): role = UseEnum(ConstraintRoleType) -# class ComponentValue(HasTraits): -# value_for = Instance(Component) -# value = Unicode() -# -# -# class DataKey(HasTraits): -# is_included = Bool() -# key_value = List(Instance(ComponentValue)) -# -# -# class DataKeySet(HasTraits): -# is_included = Bool() -# keys = List(Instance(DataKey)) +class ComponentValue(HasTraits): + value_for = Instance(Component) + value = Unicode() + + +class DataKey(HasTraits): + included = Bool() + key_value = Dict(Instance(ComponentValue)) + + def __init__(self, *args): + for component, value in args: + self.key_value[component] = ComponentValue(value_for=component, + value=value) + +class DataKeySet(HasTraits): + included = CBool() + keys = List(Instance(DataKey)) class Constraint(MaintainableArtefact): - # data_content_keys = Instance(DataKeySet, allow_none=True) + data_content_keys = Instance(DataKeySet, allow_none=True) # metadata_content_keys = Instance(MetadataKeySet, allow_none=True) - role = Set(Instance(ConstraintRole)) + # NB the spec gives 1..* for this attribute, but this implementation allows + # only 1 + role = Instance(ConstraintRole) class SelectionValue(HasTraits): @@ -546,11 +552,12 @@ class MemberValue(SelectionValue): class MemberSelection(HasTraits): included = Bool() values_for = Instance(Component) + # NB the spec does not say what this feature should be named values = Set(Instance(MemberValue)) class CubeRegion(HasTraits): - included = Bool() + included = CBool() member = Dict(Instance(MemberSelection)) def __contains__(self, v): @@ -574,6 +581,7 @@ class CubeRegion(HasTraits): class ContentConstraint(Constraint): data_content_region = Instance(CubeRegion, allow_none=True) + content = Set(Instance(ConstrainableArtefact)) # metadata_content_region = Instance(MetadataTargetRegion, allow_none=True) def __contains__(self, v):
Implement model classes ComponentValue, DataKey, DataKeySet Changes to Constraint, ContentConstraint, CubeRegion
dr-leo_pandaSDMX
train
7b5764c4e8bc63f6814e6a98cd5a3a394748b4fd
diff --git a/controller/extjs/tests/Controller/ExtJS/Media/DefaultTest.php b/controller/extjs/tests/Controller/ExtJS/Media/DefaultTest.php index <HASH>..<HASH> 100644 --- a/controller/extjs/tests/Controller/ExtJS/Media/DefaultTest.php +++ b/controller/extjs/tests/Controller/ExtJS/Media/DefaultTest.php @@ -257,6 +257,10 @@ class Controller_ExtJS_Media_DefaultTest extends MW_Unittest_Testcase public function testProtectedGetAbsoluteDirectoryErrorByUploadItem() { + $this->markTestSkipped( + '@TODO Refactor unit test.' + ); + $context = TestHelper::getContext(); $context->getConfig()->set( 'controller/extjs/media/default/basedir', '/root/');
skipp failure unit test for refactoring.
Arcavias_arcavias-core
train
105f2a0f4184e0423081c5f03c918d9f0c714aff
diff --git a/pyvisa/highlevel.py b/pyvisa/highlevel.py index <HASH>..<HASH> 100644 --- a/pyvisa/highlevel.py +++ b/pyvisa/highlevel.py @@ -60,16 +60,18 @@ if TYPE_CHECKING: #: Named tuple with information about a resource. Returned by some :class:`ResourceManager` methods. #: #: :interface_type: Interface type of the given resource string. :class:`pyvisa.constants.InterfaceType` -#: :interface_board_number: Board number of the interface of the given resource string. +#: :interface_board_number: Board number of the interface of the given resource string. We allow None +#: since serial resources may not sometimes be easily described +#: by a single number in particular on Linux system. #: :resource_class: Specifies the resource class (for example, "INSTR") of the given resource string. #: :resource_name: This is the expanded version of the given resource string. -#: The format should be similar to the VISA-defined canonical resource name. +#: The format should be similar to the VISA-defined canonical resource name. #: :alias: Specifies the user-defined alias for the given resource string. ResourceInfo = NamedTuple( "ResourceInfo", ( ("interface_type", constants.InterfaceType), - ("interface_board_number", int), + ("interface_board_number", Optional[int]), ("resource_class", Optional[str]), ("resource_name", Optional[str]), ("alias", Optional[str]), @@ -2036,29 +2038,36 @@ class VisaLibraryBase(object): """ try: parsed = rname.parse_resource_name(resource_name) - - return ( - ResourceInfo( - parsed.interface_type_const, - # We can only get concrete classes which have one of those - # attributes - int( - parsed.board # type: ignore - if hasattr(parsed, "board") - else parsed.interface # type: ignore - ), - parsed.resource_class, - str(parsed), - None, - ), - StatusCode.success, - ) except ValueError: return ( ResourceInfo(constants.InterfaceType.unknown, 0, None, None, None), StatusCode.error_invalid_resource_name, ) + board_number: Optional[int] + try: + # We can only get concrete classes which have one of those attributes + board_number = int( + parsed.board # type: ignore + if hasattr(parsed, "board") + else parsed.interface # type: ignore + ) + # In some cases the board number may not be convertible to an int + # PyVISA-py serial resources on Linux for example + except ValueError: + board_number = None + + return ( + ResourceInfo( + parsed.interface_type_const, + board_number, + parsed.resource_class, + str(parsed), + None, + ), + StatusCode.success, + ) + def peek_8( self, session: VISASession, address: VISAMemoryAddress ) -> Tuple[int, StatusCode]: diff --git a/pyvisa/testsuite/test_highlevel.py b/pyvisa/testsuite/test_highlevel.py index <HASH>..<HASH> 100644 --- a/pyvisa/testsuite/test_highlevel.py +++ b/pyvisa/testsuite/test_highlevel.py @@ -19,29 +19,48 @@ class TestHighlevel(BaseTestCase): CHECK_NO_WARNING = False - def test_base_class_parse_resource(self): + @pytest.mark.parametrize( + "rsc_name, values", + [ + ("TCPIP::192.168.0.1::INSTR", (constants.InterfaceType.tcpip, 0, "INSTR")), + ("TCPIP1::192.168.0.1::INSTR", (constants.InterfaceType.tcpip, 1, "INSTR")), + ( + "TCPIP::192.168.0.1::5000::SOCKET", + (constants.InterfaceType.tcpip, 0, "SOCKET"), + ), + ( + "TCPIP2::192.168.0.1::5000::SOCKET", + (constants.InterfaceType.tcpip, 2, "SOCKET"), + ), + ("GPIB::1::INSTR", (constants.InterfaceType.gpib, 0, "INSTR")), + ("GPIB::INTFC", (constants.InterfaceType.gpib, 0, "INTFC")), + ("GPIB2::1::INSTR", (constants.InterfaceType.gpib, 2, "INSTR")), + ("GPIB3::INTFC", (constants.InterfaceType.gpib, 3, "INTFC")), + ( + "USB1::0x1111::0x2222::0x4445::0::RAW", + (constants.InterfaceType.usb, 1, "RAW"), + ), + ( + "USB0::0x1112::0x2223::0x1234::0::INSTR", + (constants.InterfaceType.usb, 0, "INSTR"), + ), + ("ASRL2::INSTR", (constants.InterfaceType.asrl, 2, "INSTR")), + ("ASRL/dev/tty0::INSTR", (constants.InterfaceType.asrl, None, "INSTR")), + ], + ) + def test_base_class_parse_resource(self, rsc_name, values): """Test the base class implementation of parse_resource.""" lib = highlevel.VisaLibraryBase("test") - rsc_name = "TCPIP::192.168.0.1::INSTR" info, ret_code = lib.parse_resource(None, rsc_name) # interface_type interface_board_number resource_class resource_name alias - for parsed, value in zip( - info, (constants.InterfaceType.tcpip, 0, None, None, None) - ): + for parsed, value in zip(info, values[:2] + (None, None, None)): assert parsed == value info, ret_code = lib.parse_resource_extended(None, rsc_name) # interface_type interface_board_number resource_class resource_name alias for parsed, value in zip( - info, - ( - constants.InterfaceType.tcpip, - 0, - "INSTR", - rname.to_canonical_name(rsc_name), - None, - ), + info, values + (rname.to_canonical_name(rsc_name), None,), ): assert parsed == value
highlevel: avoid returning a generic resource info for a non-int board This typically happens for serial resource on Linux where we end up ASRL/dev/tty0::INSTR. Without this change users have to specify the resource class they want.
pyvisa_pyvisa
train
854a7c5c1cdb0b0a78abe3b282010aa1285db20e
diff --git a/safesql.go b/safesql.go index <HASH>..<HASH> 100644 --- a/safesql.go +++ b/safesql.go @@ -43,7 +43,7 @@ func main() { os.Exit(2) } s := ssautil.CreateProgram(p, 0) - s.BuildAll() + s.Build() qms := FindQueryMethods(p.Package("database/sql").Pkg, s) if verbose {
Update usage of go tools ssa In particular, use Build, renamed from BuildAll in golang/tools@afcda<I>b<I>c7af<I>a<I>f0b6bdb9c<I>a<I>
stripe_safesql
train
ea1dc85d50c1e2c86073b84b539515a4af511e9d
diff --git a/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfiguration.java b/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfiguration.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfiguration.java +++ b/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfiguration.java @@ -54,6 +54,7 @@ import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; +import org.springframework.context.annotation.Scope; import org.springframework.core.Ordered; import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; import org.springframework.util.Assert; @@ -168,6 +169,7 @@ public class JacksonAutoConfiguration { static class JacksonObjectMapperBuilderConfiguration { @Bean + @Scope("prototype") @ConditionalOnMissingBean Jackson2ObjectMapperBuilder jacksonObjectMapperBuilder(ApplicationContext applicationContext, List<Jackson2ObjectMapperBuilderCustomizer> customizers) { diff --git a/spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfigurationTests.java b/spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfigurationTests.java index <HASH>..<HASH> 100644 --- a/spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfigurationTests.java +++ b/spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/jackson/JacksonAutoConfigurationTests.java @@ -391,6 +391,16 @@ class JacksonAutoConfigurationTests { }); } + @Test + void builderIsNotSharedAcrossMultipleInjectionPoints() { + this.contextRunner.withUserConfiguration(ObjectMapperBuilderConsumerConfig.class).run((context) -> { + ObjectMapperBuilderConsumerConfig consumer = context.getBean(ObjectMapperBuilderConsumerConfig.class); + assertThat(consumer.builderOne).isNotNull(); + assertThat(consumer.builderTwo).isNotNull(); + assertThat(consumer.builderOne).isNotSameAs(consumer.builderTwo); + }); + } + private void assertParameterNamesModuleCreatorBinding(Mode expectedMode, Class<?>... configClasses) { this.contextRunner.withUserConfiguration(configClasses).run((context) -> { DeserializationConfig deserializationConfig = context.getBean(ObjectMapper.class) @@ -479,6 +489,27 @@ class JacksonAutoConfigurationTests { } + @Configuration(proxyBeanMethods = false) + static class ObjectMapperBuilderConsumerConfig { + + Jackson2ObjectMapperBuilder builderOne; + + Jackson2ObjectMapperBuilder builderTwo; + + @Bean + String consumerOne(Jackson2ObjectMapperBuilder builder) { + this.builderOne = builder; + return "one"; + } + + @Bean + String consumerTwo(Jackson2ObjectMapperBuilder builder) { + this.builderTwo = builder; + return "two"; + } + + } + protected static final class Foo { private String name;
Isolate Jackson2ObjectMapperBuilder mutation Previously, Jackson2ObjectMapperBuilder was a singleton bean. This meant that if it was injected and mutated in one injection point, usage in a subsequent injection point would see the previous injection point's mutation which can lead to unexpected failures. This commit updates the auto-configuration of the builder to make it a protoype bean. Mutation of the builder that is intended to apply globally should be made using a customizer. Closes gh-<I>
spring-projects_spring-boot
train
b334ce0b4040855d0ce98366e92df43841561756
diff --git a/src/main/java/net/dv8tion/jda/MessageBuilder.java b/src/main/java/net/dv8tion/jda/MessageBuilder.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/dv8tion/jda/MessageBuilder.java +++ b/src/main/java/net/dv8tion/jda/MessageBuilder.java @@ -142,13 +142,39 @@ public class MessageBuilder } /** + * Returns the current length of the content that will be built into a {@link net.dv8tion.jda.entities.Message Message} + * when {@link #build()} is called.<br> + * If this value is <code>0</code> or greater than <code>2000</code> when {@link #build()} is called, an exception + * will be raised. + * + * @return + * The currently length of the content that will be built into a Message. + */ + public int getLength() + { + return builder.length(); + } + + /** * Creates a {@link net.dv8tion.jda.entities.Message Message} object from this Builder * * @return the created {@link net.dv8tion.jda.entities.Message Message} + * + * @throws java.lang.UnsupportedOperationException + * <ul> + * <li>If you attempt to build() an empty Message (no content added to the Message)</li> + * <li>If you attempt to build() a Message with more than 2000 characters of content.</li> + * </ul> */ public Message build() { - return new MessageImpl("", null).setContent(builder.toString()).setTTS(isTTS).setMentionedUsers(mentioned) + String message = builder.toString(); + if (message.isEmpty()) + throw new UnsupportedOperationException("Cannot build a Message with no content. (You never added any content to the message)"); + if (message.length() > 2000) + throw new UnsupportedOperationException("Cannot build a Message with more than 2000 characters. Please limit your input."); + + return new MessageImpl("", null).setContent(message).setTTS(isTTS).setMentionedUsers(mentioned) .setMentionedChannels(mentionedTextChannels).setMentionsEveryone(mentionEveryone); }
Added simple checking to MessageBuilder#build() to check for no content in the message or more than 2k characters in the Message. Also added MessageBuilder#getLength() to determine the length of content before building.
DV8FromTheWorld_JDA
train
4d9a3a71ed45f66d339aee8f3e5109375f1ae996
diff --git a/src/main/java/com/brettonw/bag/formats/FormatReader.java b/src/main/java/com/brettonw/bag/formats/FormatReader.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/brettonw/bag/formats/FormatReader.java +++ b/src/main/java/com/brettonw/bag/formats/FormatReader.java @@ -7,7 +7,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.atteo.classindex.IndexSubclasses; -import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.function.Function; @@ -35,14 +34,15 @@ public class FormatReader { /** * - * @param format + * @param mimeType * @param replace * @param factory */ - public static void registerFormatReader (String format, boolean replace, Function<String, FormatReader> factory) { - format = format.toLowerCase (); - if ((! replace) || (! formatReaders.containsKey(format))) { - formatReaders.put(format, factory); + public static void registerFormatReader (String mimeType, boolean replace, Function<String, FormatReader> factory) { + // try to find the mime type first, and if it's not there, add it + String foundMimeType = MimeType.getFromMimeType (mimeType, () -> MimeType.addMimeTypeMapping (mimeType)); + if ((! replace) || (! formatReaders.containsKey(foundMimeType))) { + formatReaders.put(foundMimeType, factory); } } diff --git a/src/main/java/com/brettonw/bag/formats/FormatReaderComposite.java b/src/main/java/com/brettonw/bag/formats/FormatReaderComposite.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/brettonw/bag/formats/FormatReaderComposite.java +++ b/src/main/java/com/brettonw/bag/formats/FormatReaderComposite.java @@ -43,11 +43,9 @@ public class FormatReaderComposite extends FormatReader implements ArrayFormatRe static { MimeType.addExtensionMapping (MimeType.PROP, "properties"); - MimeType.addMimeTypeMapping (MimeType.PROP); FormatReader.registerFormatReader (MimeType.PROP, false, (input) -> basicObjectReader (input, "\n", "=")); MimeType.addExtensionMapping (MimeType.URL, "url"); - MimeType.addMimeTypeMapping (MimeType.URL); FormatReader.registerFormatReader (MimeType.URL, false, (input) -> basicObjectReader (input, "&", "=")); } } diff --git a/src/main/java/com/brettonw/bag/formats/MimeType.java b/src/main/java/com/brettonw/bag/formats/MimeType.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/brettonw/bag/formats/MimeType.java +++ b/src/main/java/com/brettonw/bag/formats/MimeType.java @@ -25,7 +25,8 @@ public class MimeType { private static final Map<String, String> extensionMappings = new HashMap<> (); - public static void addExtensionMapping (String mimeType, String... extensions) { + public static void addExtensionMapping (String mimeTypeIn, String... extensions) { + String mimeType = getFromMimeType (mimeTypeIn, () -> addMimeTypeMapping (mimeTypeIn)); for (String extension : extensions) { if (extensionMappings.containsKey (extension)) { log.error ("Duplicate file extension mapping (" + extension + ") for MIME type (" + mimeType +")"); @@ -46,11 +47,14 @@ public class MimeType { private static final Map<String, String> mimeTypeRemappings = new HashMap<> (); - public static void addMimeTypeMapping (String mimeType, String... synonyms) { + public static String addMimeTypeMapping (String mimeType, String... synonyms) { + mimeType = mimeType.toLowerCase (); mimeTypeRemappings.put (mimeType, mimeType); for (String synonym : synonyms) { + synonym = synonym.toLowerCase (); mimeTypeRemappings.put (synonym, mimeType); } + return mimeType; } /** @@ -60,6 +64,7 @@ public class MimeType { * to support. */ public static String getFromMimeType (String mimeType, Supplier<String> notFound) { + mimeType = mimeType.toLowerCase (); return mimeTypeRemappings.containsKey (mimeType) ? mimeTypeRemappings.get (mimeType) : notFound.get (); }
mime type handling updated to ensure lower case-ness, and that you don't have to register the mime type if you are also either registering file mappings or a reader
brettonw_Bag
train
89cc208d8659815da1d77b50472d5934f1b9afcb
diff --git a/README.rst b/README.rst index <HASH>..<HASH> 100644 --- a/README.rst +++ b/README.rst @@ -118,6 +118,7 @@ Then you can see } ] }, + "ignores": [], "key": "cf87ecc6d0925914d0deede431f44d0d3f11b390edde53e9147cb52ff6964e38", "summary": { "one": { diff --git a/jumeaux/addons/judgement/ignore_properties.py b/jumeaux/addons/judgement/ignore_properties.py index <HASH>..<HASH> 100644 --- a/jumeaux/addons/judgement/ignore_properties.py +++ b/jumeaux/addons/judgement/ignore_properties.py @@ -28,26 +28,11 @@ from owlmixin.owlcollections import TList from jumeaux.addons.judgement import JudgementExecutor from jumeaux.addons.utils import exact_match -from jumeaux.models import JudgementAddOnPayload, DiffKeys +from jumeaux.models import JudgementAddOnPayload, DiffKeys, Ignore, Condition logger = logging.getLogger(__name__) -class Condition(OwlMixin): - name: TOption[str] - path: TOption[str] - added: TList[str] = [] - removed: TList[str] = [] - changed: TList[str] = [] - - -class Ignore(OwlMixin): - title: str - conditions: TList[Condition] - image: TOption[str] - link: TOption[str] - - class Config(OwlMixin): ignores: TList[Ignore] diff --git a/jumeaux/executor.py b/jumeaux/executor.py index <HASH>..<HASH> 100644 --- a/jumeaux/executor.py +++ b/jumeaux/executor.py @@ -317,7 +317,10 @@ def exec(args: Args, config: Config, reqs: TList[Request], key: str, retry_hash: "summary": summary.to_dict(), "trials": trials.to_dicts(), "addons": config.addons.to_dict(), - "retry_hash": retry_hash + "retry_hash": retry_hash, + "ignores": config.addons.judgement \ + .filter(_.name == 'jumeaux.addons.judgement.ignore_properties') \ + .flat_map(lambda x: x.config.map(_["ignores"]).get_or([])) }) diff --git a/jumeaux/models.py b/jumeaux/models.py index <HASH>..<HASH> 100644 --- a/jumeaux/models.py +++ b/jumeaux/models.py @@ -176,6 +176,21 @@ class ResponseSummary(OwlMixin): file: TOption[str] +class Condition(OwlMixin): + name: TOption[str] + path: TOption[str] + added: TList[str] = [] + removed: TList[str] = [] + changed: TList[str] = [] + + +class Ignore(OwlMixin): + title: str + conditions: TList[Condition] + image: TOption[str] + link: TOption[str] + + class Trial(OwlMixin): seq: int name: str @@ -198,6 +213,7 @@ class Report(OwlMixin): trials: TList[Trial] addons: TOption[Addons] retry_hash: TOption[str] + ignores: TList[Ignore] = [] # --- diff --git a/tests/test_executor.py b/tests/test_executor.py index <HASH>..<HASH> 100644 --- a/tests/test_executor.py +++ b/tests/test_executor.py @@ -640,6 +640,7 @@ class TestExec: "key": dummy_hash, "title": "Report title", "description": "Report description", + "ignores": [], "addons": { "log2reqs": { "name": "addons.log2reqs.csv",
:new: Add `ignores` to report
tadashi-aikawa_jumeaux
train
3fc12c957ec5c5302548951b116255662aacdad3
diff --git a/spillway/fields.py b/spillway/fields.py index <HASH>..<HASH> 100644 --- a/spillway/fields.py +++ b/spillway/fields.py @@ -20,8 +20,3 @@ class GeometryField(Field): # Value is already serialized as geojson, kml, etc. except AttributeError: return value - - -class GDALField(FileField): - def to_representation(self, value): - return value diff --git a/spillway/generics.py b/spillway/generics.py index <HASH>..<HASH> 100644 --- a/spillway/generics.py +++ b/spillway/generics.py @@ -68,6 +68,11 @@ class BaseRasterView(mixins.ModelSerializerMixin, response._headers = headers return response + def get_serializer_class(self): + if isinstance(self.request.accepted_renderer, BaseGDALRenderer): + self.serializer_class = serializers.ImageSerializer + return super(BaseRasterView, self).get_serializer_class() + @property def paginator(self): # Disable pagination for GDAL Renderers. diff --git a/spillway/serializers.py b/spillway/serializers.py index <HASH>..<HASH> 100644 --- a/spillway/serializers.py +++ b/spillway/serializers.py @@ -4,8 +4,7 @@ from greenwich.srs import SpatialReference import numpy as np from spillway import query, collections as sc -from spillway.fields import GeometryField, GDALField -from spillway.renderers.gdal import BaseGDALRenderer +from spillway.fields import GeometryField serializers.ModelSerializer.serializer_field_mapping.update({ models.GeometryField: GeometryField, @@ -129,7 +128,10 @@ class RasterModelSerializer(GeoModelSerializer): fieldname = self.Meta.raster_field request = self.context.get('request') renderer = getattr(request, 'accepted_renderer', None) - if isinstance(renderer, (renderers.JSONRenderer, - BaseGDALRenderer)): - fields[fieldname] = GDALField() + if isinstance(renderer, renderers.JSONRenderer): + fields[fieldname] = serializers.ReadOnlyField() return fields + + +class ImageSerializer(serializers.Serializer): + image = serializers.ReadOnlyField()
Alter serializer for gdal renderers
bkg_django-spillway
train
ecf804588707bde379a077cf4d84a47e005aafb2
diff --git a/dvc/project.py b/dvc/project.py index <HASH>..<HASH> 100644 --- a/dvc/project.py +++ b/dvc/project.py @@ -10,7 +10,7 @@ from dvc.logger import Logger from dvc.exceptions import DvcException from dvc.stage import Stage from dvc.config import Config -from dvc.state import LinkState +from dvc.state import LinkState, State from dvc.lock import Lock from dvc.scm import SCM, Base from dvc.cache import Cache @@ -50,6 +50,9 @@ class Project(object): self.config = Config(self.dvc_dir) self.scm = SCM(self.root_dir) self.lock = Lock(self.dvc_dir) + # NOTE: storing state and link_state in the repository itself to avoid + # any possible state corruption in 'shared cache dir' scenario. + self.state = State(self.dvc_dir) self.link_state = LinkState(self.root_dir, self.dvc_dir) self.logger = Logger(self.config._config[Config.SECTION_CORE].get(Config.SECTION_CORE_LOGLEVEL, None)) self.cache = Cache(self) diff --git a/dvc/remote/local.py b/dvc/remote/local.py index <HASH>..<HASH> 100644 --- a/dvc/remote/local.py +++ b/dvc/remote/local.py @@ -31,6 +31,7 @@ class RemoteLOCAL(RemoteBase): def __init__(self, project, config): self.project = project + self.state = self.project.state self.link_state = project.link_state storagepath = config.get(Config.SECTION_AWS_STORAGEPATH, None) self.cache_dir = config.get(Config.SECTION_REMOTE_URL, storagepath) @@ -46,8 +47,6 @@ class RemoteLOCAL(RemoteBase): if self.cache_dir != None and not os.path.exists(self.cache_dir): os.mkdir(self.cache_dir) - self.state = State(self.cache_dir) - @property def prefix(self): return self.cache_dir
state: store cache state on per-repo basis
iterative_dvc
train
9eb8c4b57d3ffcc7804fc38fd28fcb5c6badc5af
diff --git a/simulator/src/main/java/com/hazelcast/simulator/worker/performance/PerformanceTracker.java b/simulator/src/main/java/com/hazelcast/simulator/worker/performance/PerformanceTracker.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/worker/performance/PerformanceTracker.java +++ b/simulator/src/main/java/com/hazelcast/simulator/worker/performance/PerformanceTracker.java @@ -56,6 +56,8 @@ final class PerformanceTracker { private double intervalThroughput; private double totalThroughput; + private boolean isUpdated; + PerformanceTracker(String testId, Collection<String> probeNames, long testStartedTimestamp) { throughputFile = new File("throughput-" + testId + ".txt"); writeThroughputHeader(throughputFile, false); @@ -80,6 +82,16 @@ final class PerformanceTracker { return intervalThroughput; } + public boolean isUpdated() { + return isUpdated; + } + + public boolean getAndResetIsUpdated() { + boolean oldIsUpdated = isUpdated; + isUpdated = false; + return oldIsUpdated; + } + void update(Map<String, Histogram> intervalHistograms, long intervalPercentileLatency, double intervalAvgLatency, long intervalMaxLatency, long intervalOperationCount, long currentTimestamp) { this.intervalHistogramMap = intervalHistograms; @@ -98,6 +110,7 @@ final class PerformanceTracker { this.totalThroughput = (totalOperationCount * ONE_SECOND_IN_MILLIS / (double) totalTimeDelta); this.lastTimestamp = currentTimestamp; + this.isUpdated = true; } void writeStatsToFile(String timestamp) { diff --git a/simulator/src/main/java/com/hazelcast/simulator/worker/performance/WorkerPerformanceMonitor.java b/simulator/src/main/java/com/hazelcast/simulator/worker/performance/WorkerPerformanceMonitor.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/worker/performance/WorkerPerformanceMonitor.java +++ b/simulator/src/main/java/com/hazelcast/simulator/worker/performance/WorkerPerformanceMonitor.java @@ -135,7 +135,6 @@ public class WorkerPerformanceMonitor { for (TestContainer testContainer : testContainers) { String testId = testContainer.getTestContext().getTestId(); if (!testContainer.isRunning()) { - trackerMap.remove(testId); continue; } @@ -188,9 +187,11 @@ public class WorkerPerformanceMonitor { private void sendPerformanceStates() { PerformanceStateOperation operation = new PerformanceStateOperation(); for (Map.Entry<String, PerformanceTracker> trackerEntry : trackerMap.entrySet()) { - String testId = trackerEntry.getKey(); PerformanceTracker stats = trackerEntry.getValue(); - operation.addPerformanceState(testId, stats.createPerformanceState()); + if (stats.isUpdated()) { + String testId = trackerEntry.getKey(); + operation.addPerformanceState(testId, stats.createPerformanceState()); + } } serverConnector.submit(SimulatorAddress.COORDINATOR, operation); } @@ -207,11 +208,13 @@ public class WorkerPerformanceMonitor { // test performance stats for (PerformanceTracker stats : trackerMap.values()) { - stats.writeStatsToFile(dateString); + if (stats.getAndResetIsUpdated()) { + stats.writeStatsToFile(dateString); - globalIntervalOperationCount += stats.getIntervalOperationCount(); - globalOperationsCount += stats.getTotalOperationCount(); - globalIntervalThroughput += stats.getIntervalThroughput(); + globalIntervalOperationCount += stats.getIntervalOperationCount(); + globalOperationsCount += stats.getTotalOperationCount(); + globalIntervalThroughput += stats.getIntervalThroughput(); + } } // global performance stats
Made proper fix for stale performance data being sent and stored.
hazelcast_hazelcast-simulator
train
a239ba2980b39f5dab5ce7558fdd6331a36c76ee
diff --git a/lib/omniauth.rb b/lib/omniauth.rb index <HASH>..<HASH> 100644 --- a/lib/omniauth.rb +++ b/lib/omniauth.rb @@ -34,9 +34,9 @@ module OmniAuth :path_prefix => '/auth', :on_failure => OmniAuth::FailureEndpoint, :failure_raise_out_environments => ['development'], - :on_callback_hook =>Proc.new {|p|}, - :on_options_hook=>Proc.new {|p| }, - :on_request_hook=>Proc.new {|p| }, + :on_callback_hook => nil, + :on_options_hook => nil, + :on_request_hook => nil, :form_css => Form::DEFAULT_CSS, :test_mode => false, :logger => default_logger, diff --git a/lib/omniauth/strategy.rb b/lib/omniauth/strategy.rb index <HASH>..<HASH> 100644 --- a/lib/omniauth/strategy.rb +++ b/lib/omniauth/strategy.rb @@ -186,7 +186,7 @@ module OmniAuth # Responds to an OPTIONS request. def options_call - OmniAuth.config.on_options_hook.call(self.env) + OmniAuth.config.on_options_hook.call(self.env) if OmniAuth.config.on_options_hook verbs = OmniAuth.config.allowed_request_methods.map(&:to_s).map(&:upcase).join(', ') return [ 200, { 'Allow' => verbs }, [] ] end @@ -200,7 +200,7 @@ module OmniAuth #store query params from the request url, extracted in the callback_phase session['omniauth.params'] = request.params - OmniAuth.config.on_request_hook.call(self.env) + OmniAuth.config.on_request_hook.call(self.env) if OmniAuth.config.on_request_hook if options.form.respond_to?(:call) log :info, "Rendering form from supplied Rack endpoint." @@ -225,7 +225,7 @@ module OmniAuth @env['omniauth.origin'] = session.delete('omniauth.origin') @env['omniauth.origin'] = nil if env['omniauth.origin'] == '' @env['omniauth.params'] = session.delete('omniauth.params') || {} - OmniAuth.config.on_callback_hook.call(@env) + OmniAuth.config.on_callback_hook.call(@env) if OmniAuth.config.on_callback_hook callback_phase end @@ -268,7 +268,7 @@ module OmniAuth setup_phase session['omniauth.params'] = request.params - OmniAuth.config.on_request_hook.call(self.env) + OmniAuth.config.on_request_hook.call(self.env) if OmniAuth.config.on_request_hook if request.params['origin'] @env['rack.session']['omniauth.origin'] = request.params['origin'] elsif env['HTTP_REFERER'] && !env['HTTP_REFERER'].match(/#{request_path}$/) @@ -287,7 +287,7 @@ module OmniAuth @env['omniauth.params'] = session.delete('omniauth.params') || {} @env['omniauth.origin'] = session.delete('omniauth.origin') @env['omniauth.origin'] = nil if env['omniauth.origin'] == '' - OmniAuth.config.on_callback_hook.call(@env) + OmniAuth.config.on_callback_hook.call(@env) if OmniAuth.config.on_callback_hook call_app! end end diff --git a/spec/omniauth/strategy_spec.rb b/spec/omniauth/strategy_spec.rb index <HASH>..<HASH> 100644 --- a/spec/omniauth/strategy_spec.rb +++ b/spec/omniauth/strategy_spec.rb @@ -605,7 +605,6 @@ describe OmniAuth::Strategy do expect(strategy.env['foobar']).to eq('baz') end - it "sets omniauth.params on the request phase" do OmniAuth.config.mock_auth[:test] = {}
set hooks as nil and check for presence
omniauth_omniauth
train
77ff893ce9f41f182e08e8b57112524e16276de0
diff --git a/rake-tasks/crazy_fun/mappings/java.rb b/rake-tasks/crazy_fun/mappings/java.rb index <HASH>..<HASH> 100644 --- a/rake-tasks/crazy_fun/mappings/java.rb +++ b/rake-tasks/crazy_fun/mappings/java.rb @@ -335,23 +335,23 @@ module CrazyFunJava class CopyResources < BaseJava def handle(fun, dir, args) - unless args[:embedded].nil? - args[:embedded].each do |to_copy| - from = "build/#{dir}/#{to_copy}" - package_dir = package_name("#{dir}/.") # Append a /. because package_name expects file names not folder names - to = "#{temp_dir(dir, args[:name])}/#{package_dir}" - mkdir_p to - cp_r from, to + task task_name(dir, args[:name]) do + unless args[:embedded].nil? + args[:embedded].each do |to_copy| + from = "build/#{dir}/#{to_copy}" + package_dir = package_name("#{dir}/.") # Append a /. because package_name expects file names not folder names + to = "#{temp_dir(dir, args[:name])}/#{package_dir}" + mkdir_p to + cp_r from, to + end end - end - - if (args[:resources].nil?) - return - end - file jar_name(dir, args[:name]) do - out_dir = temp_dir(dir, args[:name]) - copy_resources(dir, args[:resources], out_dir) + unless args[:resources].nil? + file jar_name(dir, args[:name]) do + out_dir = temp_dir(dir, args[:name]) + copy_resources(dir, args[:resources], out_dir) + end + end end end end
DanielWagnerHall: Do resource copying after resolving dependencies, not before r<I>
SeleniumHQ_selenium
train
6f6631379db0939d52d60be6d0a3490eec916852
diff --git a/src/sap.ui.demokit/src/sap/ui/demokit/DemokitApp.js b/src/sap.ui.demokit/src/sap/ui/demokit/DemokitApp.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.demokit/src/sap/ui/demokit/DemokitApp.js +++ b/src/sap.ui.demokit/src/sap/ui/demokit/DemokitApp.js @@ -525,7 +525,8 @@ sap.ui.define(['jquery.sap.global', 'sap/ui/commons/DropdownBox', 'sap/ui/common content: [oLayout], showCloseButton: false, width: "550px", - height: "800px" + height: "800px", + maxHeight: "100%" }); oDialog.open(); };
[FIX] Demokit About dialog is all visible now in small resolutions Change-Id: I<I>e<I>dd<I>b8fe3e<I>f<I>e<I>d8b<I>cdf<I> BCP: <I>
SAP_openui5
train
b3265e7c1073f76c478ab6a9b79ef564c2770c14
diff --git a/bees/fsnotifybee/fsnotifybee.go b/bees/fsnotifybee/fsnotifybee.go index <HASH>..<HASH> 100644 --- a/bees/fsnotifybee/fsnotifybee.go +++ b/bees/fsnotifybee/fsnotifybee.go @@ -64,7 +64,7 @@ func (mod *FSNotifyBee) ReloadOptions(options bees.BeeOptions) { func sendEvent(bee, etype, path string, eventChan chan bees.Event) { event := bees.Event{ Bee: bee, - Name: "event", + Name: "fsevent", Options: []bees.Placeholder{ { Name: "type",
renames event in fsnotifybee
muesli_beehive
train
edcc8156ba5bcee818be3b497ee91c2995b07801
diff --git a/docs/build/search.js b/docs/build/search.js index <HASH>..<HASH> 100644 --- a/docs/build/search.js +++ b/docs/build/search.js @@ -157,8 +157,8 @@ const processNode = (node) => { const level = node.level const slug = slugify(subheading) const id = { - anchor: slug, - [ 'hierarchy_lvl' + (level) ]: subheading + [ 'hierarchy_lvl' + (level) ]: subheading, + anchor: slug } return id } @@ -227,10 +227,9 @@ const processPage = (page, entry, entries, level = 0) => { const yaml = getYaml(md) const { title, desc } = getYamlFields(yaml) - const slug = slugify(title) const entryItem = { ...entry, - [ 'hierarchy_lvl' + level ]: slug, + [ 'hierarchy_lvl' + level ]: title, content: desc, anchor: 'Introduction' } @@ -250,8 +249,9 @@ const processChildren = (parent, entry, entries, level) => { const slug = slugify(menuItem.name) entryChild = { ...entry, - [ 'hierarchy_lvl' + (level) ]: slug, - url: entry.url + '/' + menuItem.path + [ 'hierarchy_lvl' + (level) ]: menuItem.name, + url: entry.url + '/' + menuItem.path, + anchor: slug } } @@ -280,7 +280,8 @@ const processMenuItem = (menuItem, entries, level = 0) => { const slug = slugify(menuItem.name) const entryChild = { ...entryItem, - [ 'hierarchy_lvl' + level ]: slug + [ 'hierarchy_lvl' + level ]: menuItem.name, + anchor: slug } processChildren(menuItem, entryChild, entries, level) }
feat(docs): further tweaks to search
quasarframework_quasar
train
47dc22ca0deaf6dde9e65d2efa4dd6f7dc1ace98
diff --git a/components/users.js b/components/users.js index <HASH>..<HASH> 100644 --- a/components/users.js +++ b/components/users.js @@ -218,16 +218,16 @@ SteamCommunity.prototype.getUserAliases = function(userID, callback) { }; SteamCommunity.prototype.getUserInventoryContexts = function(userID, callback) { - if(typeof userID === 'string') { + if (typeof userID === 'string') { userID = new SteamID(userID); } - if(typeof userID === 'function') { + if (typeof userID === 'function') { callback = userID; userID = this.steamID; } - if(!userID) { + if (!userID) { callback(new Error("No SteamID specified and not logged in")); return; } @@ -240,8 +240,8 @@ SteamCommunity.prototype.getUserInventoryContexts = function(userID, callback) { } var match = body.match(/var g_rgAppContextData = ([^\n]+);\r?\n/); - if(!match) { - callback(new Error("Malformed response")); + if (!match) { + callback(new Error(body.match(/inventory is currently private\./) ? "Private inventory" : "Malformed response")); return; }
Handle cases where inventories are private in getUserInventoryContexts
DoctorMcKay_node-steamcommunity
train
c3a3fc21ffa88dfeb38bb32041957fb15b257b18
diff --git a/php/WP_CLI/Iterators/CSV.php b/php/WP_CLI/Iterators/CSV.php index <HASH>..<HASH> 100644 --- a/php/WP_CLI/Iterators/CSV.php +++ b/php/WP_CLI/Iterators/CSV.php @@ -19,7 +19,7 @@ class CSV implements \Iterator { public function __construct( $filename, $delimiter = ',' ) { $this->filePointer = fopen( $filename, 'r' ); - if ( !$this->filePointer ) { + if ( ! $this->filePointer ) { \WP_CLI::error( sprintf( 'Could not open file: %s', $filename ) ); } @@ -49,18 +49,20 @@ class CSV implements \Iterator { while ( true ) { $str = fgets( $this->filePointer ); - if ( false === $str ) + if ( false === $str ) { break; + } $row = str_getcsv( $str, $this->delimiter ); $element = array(); foreach ( $this->columns as $i => $key ) { - if ( isset( $row[ $i ] ) ) + if ( isset( $row[ $i ] ) ) { $element[ $key ] = $row[ $i ]; + } } - if ( !empty( $element ) ) { + if ( ! empty( $element ) ) { $this->currentElement = $element; $this->currentIndex++; diff --git a/php/WP_CLI/Iterators/Query.php b/php/WP_CLI/Iterators/Query.php index <HASH>..<HASH> 100644 --- a/php/WP_CLI/Iterators/Query.php +++ b/php/WP_CLI/Iterators/Query.php @@ -38,8 +38,9 @@ class Query implements \Iterator { $this->query = $query; $this->count_query = preg_replace( '/^.*? FROM /', 'SELECT COUNT(*) FROM ', $query, 1, $replacements ); - if ( $replacements != 1 ) + if ( $replacements != 1 ) { $this->count_query = ''; + } $this->chunk_size = $chunk_size; @@ -48,19 +49,21 @@ class Query implements \Iterator { /** * Reduces the offset when the query row count shrinks - * - * In cases where the iterated rows are being updated such that they will no + * + * In cases where the iterated rows are being updated such that they will no * longer be returned by the original query, the offset must be reduced to * iterate over all remaining rows. */ private function adjust_offset_for_shrinking_result_set() { - if ( empty( $this->count_query ) ) + if ( empty( $this->count_query ) ) { return; + } $row_count = $this->db->get_var( $this->count_query ); - if ( $row_count < $this->row_count ) + if ( $row_count < $this->row_count ) { $this->offset -= $this->row_count - $row_count; + } $this->row_count = $row_count; } @@ -71,7 +74,7 @@ class Query implements \Iterator { $query = $this->query . sprintf( ' LIMIT %d OFFSET %d', $this->chunk_size, $this->offset ); $this->results = $this->db->get_results( $query ); - if ( !$this->results ) { + if ( ! $this->results ) { if ( $this->db->last_error ) { throw new Exception( 'Database error: ' . $this->db->last_error ); } else { @@ -109,10 +112,10 @@ class Query implements \Iterator { return false; } - if ( !isset( $this->results[ $this->index_in_results ] ) ) { + if ( ! isset( $this->results[ $this->index_in_results ] ) ) { $items_loaded = $this->load_items_from_db(); - if ( !$items_loaded ) { + if ( ! $items_loaded ) { $this->rewind(); $this->depleted = true; return false; diff --git a/php/WP_CLI/Iterators/Table.php b/php/WP_CLI/Iterators/Table.php index <HASH>..<HASH> 100644 --- a/php/WP_CLI/Iterators/Table.php +++ b/php/WP_CLI/Iterators/Table.php @@ -58,20 +58,29 @@ class Table extends Query { } private static function build_fields( $fields ) { - if ( '*' === $fields ) + if ( '*' === $fields ) { return $fields; + } - return implode( ', ', array_map( function ($v) { return "`$v`"; }, $fields ) ); + return implode( + ', ', + array_map( + function ( $v ) { + return "`$v`"; + }, + $fields + ) + ); } private static function build_where_conditions( $where ) { global $wpdb; if ( is_array( $where ) ) { $conditions = array(); - foreach( $where as $key => $value ) { + foreach ( $where as $key => $value ) { if ( is_array( $value ) ) { $conditions[] = $key . ' IN (' . esc_sql( implode( ',', $value ) ) . ')'; - } else if ( is_numeric( $key ) ) { + } elseif ( is_numeric( $key ) ) { $conditions[] = $value; } else { $conditions[] = $key . $wpdb->prepare( ' = %s', $value );
CS for php/WP_CLI/Iterators/* Auto-fixing of sniffs no longer excluded. See #<I>.
wp-cli_wp-cli
train
ccf8cbd8385b1b373cd798d9448e875f25f711d5
diff --git a/src/Orm/ObjectTrait.php b/src/Orm/ObjectTrait.php index <HASH>..<HASH> 100644 --- a/src/Orm/ObjectTrait.php +++ b/src/Orm/ObjectTrait.php @@ -28,28 +28,4 @@ trait ObjectTrait return (new ReflectionClass($class))->newInstanceArgs($args); } - - /** - * @param string $destination - * - * @return object - */ - public function cast(string $destination) - { - if (!is_subclass_of($destination, get_class($this))) { - throw new \InvalidArgumentException(sprintf( - '%s is not a descendant of $object class: %s.', - $destination, - get_class($this) - )); - } - - return unserialize( - preg_replace( - '/^C:\d+:"[^"]++"/', - 'C:' . strlen($destination) . ':"' . $destination . '"', - serialize($this) - ) - ); - } } diff --git a/src/Orm/Serializable.php b/src/Orm/Serializable.php index <HASH>..<HASH> 100644 --- a/src/Orm/Serializable.php +++ b/src/Orm/Serializable.php @@ -22,9 +22,9 @@ abstract class Serializable implements \Serializable */ public function serialize() : string { - $data = $this->getSerializableData($this); + $data = self::getSerializableData($this); - return serialize(['d' => $data]); + return serialize($data); } /** @@ -34,6 +34,33 @@ abstract class Serializable implements \Serializable { $data = unserialize($serialized); - $this->unserializeData($this, $data['d']); + self::unserializeData($this, $data); + } + + + /** + * @param string $destination + * + * @return object + */ + public function cast(string $destination) + { + if (!is_subclass_of($destination, get_class($this))) { + throw new \InvalidArgumentException(sprintf( + '%s is not a descendant of $object class: %s.', + $destination, + get_class($this) + )); + } + + $data = self::getSerializableData($this); + $data["@type"] = $destination; + + /** @var Serializable $return */ + $reflection = new \ReflectionClass($destination); + $return = $reflection->newInstanceWithoutConstructor(); + self::unserializeData($return, $data); + + return $return; } } diff --git a/src/Orm/SerializableTrait.php b/src/Orm/SerializableTrait.php index <HASH>..<HASH> 100644 --- a/src/Orm/SerializableTrait.php +++ b/src/Orm/SerializableTrait.php @@ -19,15 +19,26 @@ trait SerializableTrait * Recursive serialize * * @param object $object + * @param string $className * * @return array */ - private static function getSerializableData($object) + protected static function getSerializableData($object, string $className = null) { $data = ['@type' => get_class($object)]; - $reflectionClass = new \ReflectionObject($object); + $reflectionClass = new \ReflectionClass($className ? $className : $object); + // parent class + $parent = $reflectionClass->getParentClass(); + if ($parent) { + $parentData = self::getSerializableData($object, $parent->getName()); + if (count($parentData) > 0) { + $data = array_merge($data, $parentData); + } + } + + // current $object foreach ($reflectionClass->getProperties() as $property) { $property->setAccessible(true); $value = $property->getValue($object); @@ -64,14 +75,21 @@ trait SerializableTrait * * @param object $object * @param array $cacheData + * @param string $className * * @return array */ - protected static function unserializeData($object, &$cacheData) : array + protected static function unserializeData($object, &$cacheData, string $className = null) : array { - $reflectionClass = new \ReflectionObject($object); + $reflectionClass = new \ReflectionClass($className ? $className : $object); unset($cacheData['@type']); + // parent class + $parent = $reflectionClass->getParentClass(); + if ($parent) { + self::unserializeData($object, $cacheData, $parent->getName()); + } + foreach ($reflectionClass->getProperties() as $property) { $name = $property->getName();
Orm : (un)serialize recursive & cast
cawaphp_cawa
train
afac64a9dd48ff2a74be193bc23eba5a045b6c53
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -57,12 +57,8 @@ function getFilesAndDeps (patterns, context) { // Futureproof webpack option parsing function wpGetOptions (context) { - if (typeof context.query === 'string') { - if (loaderUtils.getOptions) { return loaderUtils.getOptions(context); } - if (loaderUtils.parseQuery) { return loaderUtils.parseQuery(context.query); } - } else { - return context.query; - } + if (typeof context.query === 'string') return loaderUtils.getOptions(context); + return context.query; } module.exports = function (content) {
Stop using parseQuery from loader-utils
jeerbl_webfonts-loader
train
951836fb2d146e07eb326d49c62b65157d0815a2
diff --git a/src/AlgoliaManager.php b/src/AlgoliaManager.php index <HASH>..<HASH> 100644 --- a/src/AlgoliaManager.php +++ b/src/AlgoliaManager.php @@ -245,19 +245,7 @@ class AlgoliaManager $response = []; foreach ($indices as $index) { - $temporaryIndexName = 'tmp_' . $index->indexName; - - /** @var Index $temporaryIndex */ - $temporaryIndex = $this->initIndex($temporaryIndexName); - $temporaryIndex->addObjects($records); - - $settings = $index->getSettings(); - - // Temporary index overrides all the settings on the main one. - // So we need to set the original settings on the temporary one before atomically moving the index. - $temporaryIndex->setSettings($settings); - - $response[$index->indexName] = $this->moveIndex($temporaryIndexName, $index->indexName); + $response[$index->indexName] = $this->reindexAtomically($index, $records); } return $response; @@ -360,4 +348,28 @@ class AlgoliaManager return $algoliaRecords; } + + /** + * Reindex atomically the given index with the given records. + * + * @param Index $index + * @param array $algoliaRecords + * + * @return mixed + */ + private function reindexAtomically(Index $index, array $algoliaRecords) + { + $temporaryIndexName = 'tmp_' . $index->indexName; + + $temporaryIndex = $this->initIndex($temporaryIndexName); + $temporaryIndex->addObjects($algoliaRecords); + + $settings = $index->getSettings(); + + // Temporary index overrides all the settings on the main one. + // So we need to set the original settings on the temporary one before atomically moving the index. + $temporaryIndex->setSettings($settings); + + return $this->moveIndex($temporaryIndexName, $index->indexName); + } }
Refactor the reindex method for readability
lordthorzonus_yii2-algolia
train
2f917674d12e59e2a1cef320c9656f3a9d326819
diff --git a/cake/libs/controller/components/auth.php b/cake/libs/controller/components/auth.php index <HASH>..<HASH> 100644 --- a/cake/libs/controller/components/auth.php +++ b/cake/libs/controller/components/auth.php @@ -65,11 +65,11 @@ class AuthComponent extends Component { * * Using the class name without 'Authenticate' as the key, you can pass in an array of settings for each * authentication object. Additionally you can define settings that should be set to all authentications objects - * using the '*' key: + * using the 'all' key: * * {{{ * $this->Auth->authenticate = array( - * '*' => array( + * 'all' => array( * 'userModel' => 'Users.User', * 'scope' => array('User.active' => 1) * ), @@ -78,6 +78,8 @@ class AuthComponent extends Component { * ); * }}} * + * You can also use AuthComponent::ALL instead of the string 'all'. + * * @var array * @link http://book.cakephp.org/view/1278/authenticate */ @@ -104,11 +106,11 @@ class AuthComponent extends Component { * * Using the class name without 'Authorize' as the key, you can pass in an array of settings for each * authorization object. Additionally you can define settings that should be set to all authorization objects - * using the '*' key: + * using the 'all' key: * * {{{ * $this->Auth->authorize = array( - * '*' => array( + * 'all' => array( * 'actionPath' => 'controllers/' * ), * 'Crud', @@ -116,6 +118,8 @@ class AuthComponent extends Component { * ); * }}} * + * You can also use AuthComponent::ALL instead of the string 'all' + * * @var mixed * @link http://book.cakephp.org/view/1275/authorize */
Fixing docblock to reflect change from '*' to 'all'. Adding note about constant.
cakephp_cakephp
train
b0ef59f5ece797592730f393c4b2d7bdc0694e1f
diff --git a/lib/webrat/core/elements/field.rb b/lib/webrat/core/elements/field.rb index <HASH>..<HASH> 100644 --- a/lib/webrat/core/elements/field.rb +++ b/lib/webrat/core/elements/field.rb @@ -139,7 +139,7 @@ module Webrat end def escaped_value - CGI.escape([*@value].first.to_s) + CGI.escape(@value.to_s) end # Because we have to escape it before sending it to the above case statement, @@ -415,7 +415,7 @@ module Webrat end def unset(value) - @value = [] + @value = nil end protected @@ -427,17 +427,21 @@ module Webrat selected_options.map do |option| return "" if option.nil? option["value"] || option.inner_html - end.uniq + end.uniq.first end end - class MultipleSelectField < SelectField #:nodoc: + class MultipleSelectField < Field #:nodoc: def self.xpath_search [".//select[@multiple='multiple']"] end + def options + @options ||= SelectOption.load_all(@session, @element) + end + def set(value) @value << value end diff --git a/spec/public/fill_in_spec.rb b/spec/public/fill_in_spec.rb index <HASH>..<HASH> 100644 --- a/spec/public/fill_in_spec.rb +++ b/spec/public/fill_in_spec.rb @@ -15,6 +15,21 @@ describe "fill_in" do fill_in "User Text", :with => "filling text area" click_button end + + it "should support multiline values" do + with_html <<-HTML + <html> + <form method="post" action="/login"> + <label for="user_text">User Text</label> + <textarea id="user_text" name="user[text]"></textarea> + <input type="submit" /> + </form> + </html> + HTML + webrat_session.should_receive(:post).with("/login", "user" => {"text" => "One\nTwo"}) + fill_in "User Text", :with => "One\nTwo" + click_button + end it "should work with password fields" do with_html <<-HTML
Properly handle multiline param values (previously, subsequent lines were lost)
brynary_webrat
train
4a2356fb54f0d3fbb5cafd87a322c3de2bb095b4
diff --git a/src/main/java/net/time4j/PlainTimestamp.java b/src/main/java/net/time4j/PlainTimestamp.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/time4j/PlainTimestamp.java +++ b/src/main/java/net/time4j/PlainTimestamp.java @@ -515,12 +515,28 @@ public final class PlainTimestamp } /** + * <p>Kombiniert diesen lokalen Zeitstempel mit dem angegebenen Offset + * zu einem UTC-Zeitstempel. </p> + * + * @param offset fixed time zone offset + * @return global timestamp based on this local timestamp interpreted + * at given time zone offset + */ + public Moment atOffset(ZonalOffset offset) { + + return this.inTimezone(offset, TransitionStrategy.PUSH_FORWARD); + + } + + /** * <p>Kombiniert diesen lokalen Zeitstempel mit der System-Zeitzone * zu einem UTC-Zeitstempel. </p> * * @return global timestamp based on this local timestamp interpreted * in system time zone * @see TimeZone#ofSystem() + * @see #inTimezone(TZID,TransitionStrategy) + * @see #atOffset(ZonalOffset) */ public Moment inSystemTimezone() { @@ -540,6 +556,8 @@ public final class PlainTimestamp * @return global timestamp based on this local timestamp interpreted * in given time zone * @see TimeZone#of(TZID) + * @see #inSystemTimezone() + * @see #atOffset(ZonalOffset) */ public Moment inTimezone( TZID tzid,
simple support for combination of PlainTimestamp and offset Offsets don't require transition strategy as argument.
MenoData_Time4J
train
a7cec8a8091705183e1b5d142bb6c53f7e62905a
diff --git a/satpy/tests/reader_tests/_modis_fixtures.py b/satpy/tests/reader_tests/_modis_fixtures.py index <HASH>..<HASH> 100644 --- a/satpy/tests/reader_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/_modis_fixtures.py @@ -34,7 +34,7 @@ AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)] AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2'] -SCAN_LEN_5KM = 406 +SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 1 RES_TO_REPEAT_FACTOR = {
Reduce MODIS L1b/L2 test case size for better test performance
pytroll_satpy
train
5a2b43aec803b74de5ecb3ae392ed784c22f0c6e
diff --git a/eqcorrscan/tests/match_filter_test.py b/eqcorrscan/tests/match_filter_test.py index <HASH>..<HASH> 100644 --- a/eqcorrscan/tests/match_filter_test.py +++ b/eqcorrscan/tests/match_filter_test.py @@ -656,6 +656,7 @@ class TestMatchObjectHeavy(unittest.TestCase): saved_party = Party().read("eqcorrscan_temporary_party.tgz") self.assertEqual(party, saved_party) + @pytest.mark.serial def test_tribe_detect_masked_data(self): """Test using masked data - possibly raises error at pre-processing. Padding may also result in error at correlation stage due to poor @@ -697,7 +698,7 @@ class TestMatchObjectHeavy(unittest.TestCase): self.assertEqual(len(party), 4) @pytest.mark.flaky(reruns=2) - @pytest.mark.netork + @pytest.mark.network def test_client_detect_save_progress(self): """Test the client_detect method.""" client = Client('NCEDC')
mark parallel function as needing to run in serial
eqcorrscan_EQcorrscan
train
cf8f29371fb11e46286b5cdb84deb2ffa56c78c8
diff --git a/src/sap.ui.mdc/src/sap/ui/mdc/Table.js b/src/sap.ui.mdc/src/sap/ui/mdc/Table.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.mdc/src/sap/ui/mdc/Table.js +++ b/src/sap.ui.mdc/src/sap/ui/mdc/Table.js @@ -1756,7 +1756,7 @@ sap.ui.define([ context: {title: that.getHeader()} }, dataSource: oRowBinding, - fileType: mCustomConfig.selectedFileType == "pdf" ? "PDF" : "XLSX", + fileType: mCustomConfig.selectedFileType, fileName: mCustomConfig ? mCustomConfig.fileName : this.getHeader() };
[INTERNAL] sap.ui.export - Correction for refactoring Fixes the file type handling after the latest refactoring. All file types use the sap.ui.export.FileType instead of a lower case textual representation. Change-Id: Ibf<I>a<I>b<I>c<I>df5b7ed7ddf8c9f<I>e3e<I>b
SAP_openui5
train
273f3c260106a5228e475a8e2a81f220c0f8f5fe
diff --git a/languagetool-server/src/main/java/org/languagetool/server/DatabaseLogger.java b/languagetool-server/src/main/java/org/languagetool/server/DatabaseLogger.java index <HASH>..<HASH> 100644 --- a/languagetool-server/src/main/java/org/languagetool/server/DatabaseLogger.java +++ b/languagetool-server/src/main/java/org/languagetool/server/DatabaseLogger.java @@ -130,8 +130,12 @@ class DatabaseLogger { public void log(DatabaseLogEntry entry) { try { - if (!disabled && messages.size() < MAX_QUEUE_SIZE) { - messages.put(entry); + if (!disabled) { + if (messages.size() < MAX_QUEUE_SIZE) { + messages.put(entry); + } else { + ServerTools.print("Logging queue has reached size limit; discarding new messages."); + } } } catch (InterruptedException e) { e.printStackTrace();
printing warning when logging queue reaches limit
languagetool-org_languagetool
train
ad7b90293aa7714ea2828b755d781cdb1fc47b57
diff --git a/indra/databases/chembl_client.py b/indra/databases/chembl_client.py index <HASH>..<HASH> 100644 --- a/indra/databases/chembl_client.py +++ b/indra/databases/chembl_client.py @@ -73,6 +73,26 @@ def send_query(query_dict): return js +def query_target(target_chembl_id): + """Query ChEMBL API target by id + + Parameters + ---------- + target_chembl_id : str + Returns + ------- + target : dict + dict parsed from json that is unique for the target + """ + query_dict = {'query': 'target', + 'params': {'target_chembl_id': target_chembl_id, + 'limit': 1}} + res = send_query(query_dict) + assert(res['page_meta']['total_count'] == 1) + target = res['targets'][0] + return target + + def activities_by_target(activities): """Get back lists of activities in a dict keyed by ChEMBL target id Parameters
Query ChEMBL API target by id
sorgerlab_indra
train
c2315102ff01c093750d0675c8e94aed5ed2fe46
diff --git a/registry/session.go b/registry/session.go index <HASH>..<HASH> 100644 --- a/registry/session.go +++ b/registry/session.go @@ -158,9 +158,9 @@ func NewSession(client *http.Client, authConfig *cliconfig.AuthConfig, endpoint } } - if endpoint.Version == APIVersion1 { - client.Transport = AuthTransport(client.Transport, authConfig, alwaysSetBasicAuth) - } + // Annotate the transport unconditionally so that v2 can + // properly fallback on v1 when an image is not found. + client.Transport = AuthTransport(client.Transport, authConfig, alwaysSetBasicAuth) jar, err := cookiejar.New(nil) if err != nil {
Unconditionally add AuthTransport. Today, endpoints implementing v2 cannot properly fallback to v1 because the underlying transport that deals with authentication (Basic / Token) doesn't get annotated. This doesn't affect DockerHub because the DockerHub endpoint appears as '<URL>, and the 'v1' tricks this logic just long enough that the transport is always annotated for DockerHub accesses.
containers_storage
train
c4ec6cfc808a31088a645ebba7757a13bd483761
diff --git a/github/github-accessors.go b/github/github-accessors.go index <HASH>..<HASH> 100644 --- a/github/github-accessors.go +++ b/github/github-accessors.go @@ -5580,6 +5580,14 @@ func (k *Key) GetURL() string { return *k.URL } +// GetVerified returns the Verified field if it's non-nil, zero value otherwise. +func (k *Key) GetVerified() bool { + if k == nil || k.Verified == nil { + return false + } + return *k.Verified +} + // GetColor returns the Color field if it's non-nil, zero value otherwise. func (l *Label) GetColor() string { if l == nil || l.Color == nil { diff --git a/github/github-accessors_test.go b/github/github-accessors_test.go index <HASH>..<HASH> 100644 --- a/github/github-accessors_test.go +++ b/github/github-accessors_test.go @@ -6520,6 +6520,16 @@ func TestKey_GetURL(tt *testing.T) { k.GetURL() } +func TestKey_GetVerified(tt *testing.T) { + var zeroValue bool + k := &Key{Verified: &zeroValue} + k.GetVerified() + k = &Key{} + k.GetVerified() + k = nil + k.GetVerified() +} + func TestLabel_GetColor(tt *testing.T) { var zeroValue string l := &Label{Color: &zeroValue} diff --git a/github/github-stringify_test.go b/github/github-stringify_test.go index <HASH>..<HASH> 100644 --- a/github/github-stringify_test.go +++ b/github/github-stringify_test.go @@ -671,9 +671,10 @@ func TestKey_String(t *testing.T) { URL: String(""), Title: String(""), ReadOnly: Bool(false), + Verified: Bool(false), CreatedAt: &Timestamp{}, } - want := `github.Key{ID:0, Key:"", URL:"", Title:"", ReadOnly:false, CreatedAt:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}}` + want := `github.Key{ID:0, Key:"", URL:"", Title:"", ReadOnly:false, Verified:false, CreatedAt:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}}` if got := v.String(); got != want { t.Errorf("Key.String = %v, want %v", got, want) } diff --git a/github/users_keys.go b/github/users_keys.go index <HASH>..<HASH> 100644 --- a/github/users_keys.go +++ b/github/users_keys.go @@ -17,6 +17,7 @@ type Key struct { URL *string `json:"url,omitempty"` Title *string `json:"title,omitempty"` ReadOnly *bool `json:"read_only,omitempty"` + Verified *bool `json:"verified,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` }
User.Keys: Add verified field (#<I>)
google_go-github
train
17d123a3be3a2c9e21845fda89c61f00301f78f5
diff --git a/core/manifest/ConfigManifest.php b/core/manifest/ConfigManifest.php index <HASH>..<HASH> 100644 --- a/core/manifest/ConfigManifest.php +++ b/core/manifest/ConfigManifest.php @@ -99,7 +99,7 @@ class SS_ConfigManifest { } // If we don't have a variantKeySpec (because we're forcing regen, or it just wasn't in the cache), generate it - if (false === $this->variantKeySpec) { + if (false === $this->phpConfigSources || false === $this->variantKeySpec) { $this->regenerate($includeTests); } diff --git a/tests/core/manifest/ConfigManifestTest.php b/tests/core/manifest/ConfigManifestTest.php index <HASH>..<HASH> 100644 --- a/tests/core/manifest/ConfigManifestTest.php +++ b/tests/core/manifest/ConfigManifestTest.php @@ -153,6 +153,83 @@ class ConfigManifestTest extends SapphireTest { } /** + * Test cache regeneration if all or some of the cache files are missing + * + * 1. Test regeneration if all cache files are missing + * 2. Test regeneration if 'variant_key_spec' cache file is missing + * 3. Test regeneration if 'php_config_sources' cache file is missing + */ + public function testAutomaticCacheRegeneration(){ + $base = dirname(__FILE__) . '/fixtures/configmanifest'; + + // Test regeneration if all cache files are missing + $manifest = $this->getManifestMock(array('getCache', 'regenerate', 'buildYamlConfigVariant')); + + $manifest->expects($this->once())// regenerate should be called once + ->method('regenerate') + ->with($this->equalTo(false)); // includeTests = false + + // Set up a cache where we expect load to never be called + $cache = $this->getCacheMock(); + $cache->expects($this->exactly(2)) + ->will($this->returnValue(false)) + ->method('load'); + + $manifest->expects($this->any()) + ->method('getCache') + ->will($this->returnValue($cache)); + + $manifest->__construct($base); + + // Test regeneration if 'variant_key_spec' cache file is missing + $manifest = $this->getManifestMock(array('getCache', 'regenerate', 'buildYamlConfigVariant')); + + $manifest->expects($this->once())// regenerate should be called once + ->method('regenerate') + ->with($this->equalTo(false)); // includeTests = false + + + $cache = $this->getCacheMock(); + $cache->expects($this->exactly(2)) + ->method('load') + ->will($this->returnCallback(function ($parameter) { + if (strpos($parameter, 'variant_key_spec') !== false) { + return false; + } + return array(); + })); + + $manifest->expects($this->any()) + ->method('getCache') + ->will($this->returnValue($cache)); + + $manifest->__construct($base); + + // Test regeneration if 'php_config_sources' cache file is missing + $manifest = $this->getManifestMock(array('getCache', 'regenerate', 'buildYamlConfigVariant')); + + $manifest->expects($this->once())// regenerate should be called once + ->method('regenerate') + ->with($this->equalTo(false)); // includeTests = false + + $cache = $this->getCacheMock(); + $cache->expects($this->exactly(2)) + ->method('load') + ->will($this->returnCallback(function ($parameter) { + if (strpos($parameter, 'php_config_sources') !== false) { + return false; + } + return array(); + })); + + $manifest->expects($this->any()) + ->method('getCache') + ->will($this->returnValue($cache)); + + $manifest->__construct($base); + } + + /** * This test checks the processing of before and after reference paths (module-name/filename#fragment) * This method uses fixture/configmanifest/mysite/_config/addyamlconfigfile.yml as a fixture */
BUG Ensure correct regeneration of ConfigManifest if only one of the cache files is missing
silverstripe_silverstripe-framework
train
60e0daea62f1d3a024bc62aca6703ce0eafbdea5
diff --git a/pymc3/tests/test_distributions.py b/pymc3/tests/test_distributions.py index <HASH>..<HASH> 100644 --- a/pymc3/tests/test_distributions.py +++ b/pymc3/tests/test_distributions.py @@ -948,6 +948,7 @@ class TestMatchesScipy(SeededTest): Unit, {"alpha": Rplus, "beta": Rplus}, lambda value, alpha, beta: sp.beta.logcdf(value, alpha, beta), + n_samples=10, ) def test_kumaraswamy(self): @@ -1052,17 +1053,20 @@ class TestMatchesScipy(SeededTest): Nat, {"mu": Rplus, "alpha": Rplus}, scipy_mu_alpha_logcdf, + n_samples=5, ) self.check_logcdf( NegativeBinomial, Nat, {"p": Unit, "n": Rplus}, lambda value, p, n: sp.nbinom.logcdf(value, n, p), + n_samples=5, ) self.check_selfconsistency_discrete_logcdf( NegativeBinomial, Nat, {"mu": Rplus, "alpha": Rplus}, + n_samples=10, ) @pytest.mark.parametrize( @@ -1282,11 +1286,13 @@ class TestMatchesScipy(SeededTest): Nat, {"n": NatSmall, "p": Unit}, lambda value, n, p: sp.binom.logcdf(value, n, p), + n_samples=10, ) self.check_selfconsistency_discrete_logcdf( Binomial, Nat, {"n": NatSmall, "p": Unit}, + n_samples=10, ) # Too lazy to propagate decimal parameter through the whole chain of deps @@ -1423,6 +1429,7 @@ class TestMatchesScipy(SeededTest): ZeroInflatedNegativeBinomial, Nat, {"mu": Rplusbig, "alpha": Rplusbig, "psi": Unit}, + n_samples=10, ) # Too lazy to propagate decimal parameter through the whole chain of deps @@ -1437,6 +1444,7 @@ class TestMatchesScipy(SeededTest): ZeroInflatedBinomial, Nat, {"n": NatSmall, "p": Unit, "psi": Unit}, + n_samples=10, ) @pytest.mark.parametrize("n", [1, 2, 3])
Reduce number of test points in slow logcdf methods (#<I>)
pymc-devs_pymc
train
b931486ce76f4289069a3cc86c53d0cfb0650c2b
diff --git a/github-downloads-plugin/src/main/java/com/github/maven/plugins/downloads/DownloadsMojo.java b/github-downloads-plugin/src/main/java/com/github/maven/plugins/downloads/DownloadsMojo.java index <HASH>..<HASH> 100644 --- a/github-downloads-plugin/src/main/java/com/github/maven/plugins/downloads/DownloadsMojo.java +++ b/github-downloads-plugin/src/main/java/com/github/maven/plugins/downloads/DownloadsMojo.java @@ -254,15 +254,12 @@ public class DownloadsMojo extends AbstractMojo { scanner.scan(); if (getLog().isDebugEnabled()) getLog().debug( - MessageFormat.format( - "Including {0} file(s) to upload: {1}", - scanner.getIncludedFiles().length, + MessageFormat.format("Scanned files to include: {0}", Arrays.toString(scanner.getIncludedFiles()))); for (String path : scanner.getIncludedFiles()) files.add(new File(baseDir, path)); - } else { + } else files = Collections.singletonList(project.getArtifact().getFile()); - } return files; } @@ -294,7 +291,10 @@ public class DownloadsMojo extends AbstractMojo { else existing = Collections.emptyMap(); - for (File file : getFiles()) { + List<File> files = getFiles(); + log.info(MessageFormat.format("Creating {0} download(s) at {1}", + files.size(), repository.generateId())); + for (File file : files) { final String name = file.getName(); Integer existingId = existing.get(name); if (existingId != null) @@ -315,10 +315,8 @@ public class DownloadsMojo extends AbstractMojo { if (!isEmpty(description)) download.setDescription(description); download.setSize(file.length()); - if (debug) - log.debug(MessageFormat.format( - "Creating download with name {0} and size {1}", name, - download.getSize())); + log.info(MessageFormat.format("Creating download: {0} ({1} bytes)", + name, download.getSize())); try { DownloadResource resource = service.createResource(repository, download);
Info log name and size of each download being created
github_maven-plugins
train
2b9b2aa15bbfb305b45dbe0ad8bc361a97baa8f7
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ NASSL_SETUP = { 'name': "nassl", 'version': __version__, 'package_dir': {'nassl': 'nassl'}, - 'py_modules': ['nassl.__init__', 'nassl.ssl_client', 'nassl.debug_ssl_client', 'nassl.x509_certificate', + 'py_modules': ['nassl.__init__', 'nassl.ssl_client', 'nassl.debug_ssl_client', 'nassl.ocsp_response'], 'description': 'Experimental OpenSSL wrapper for Python 2.7 / 3.3+ and SSLyze.', 'extras_require': {':python_version < "3.4"': ['enum34'],
Remove deleted x<I>_certificate module from setup.py
nabla-c0d3_nassl
train
aa6d4318792f8542e37bd314f301fa99802fc4d6
diff --git a/graph/graph.go b/graph/graph.go index <HASH>..<HASH> 100644 --- a/graph/graph.go +++ b/graph/graph.go @@ -259,6 +259,35 @@ func (g *Graph) GetLowestNodes(nodes Nodes) Nodes { return toReturn } +func (g *Graph) insert(flattened []Nodes, circulars Nodes, offset int64) { + maxLayer := int64(-1) + for i, nodes := range flattened { + for _, node := range nodes { + if g.positions[node.ID()] == nil { + g.numItems++ + } + g.positions[node.ID()] = &bundle{ + INode: node, + position: int64(i) + offset, + } + } + maxLayer = int64(i) + offset + } + + for _, node := range circulars { + if g.positions[node.ID()] == nil { + g.numItems++ + } + g.positions[node.ID()] = &bundle{ + INode: node, position: -1, + } + } + + if maxLayer > g.maxLayer { + g.maxLayer = maxLayer + } +} + // AddNodes will add the provided nodes to the flattened index // of the graph and return an execution graph that is ready to // be calculated. @@ -396,32 +425,6 @@ func (g *Graph) Len() uint64 { return g.numItems } -func (g *Graph) insert(flattened []Nodes, circulars Nodes, offset int64) { - maxLayer := int64(-1) - for i, nodes := range flattened { - for _, node := range nodes { - if g.positions[node.ID()] == nil { - g.numItems++ - } - g.positions[node.ID()] = &bundle{ - INode: node, - position: int64(i) + offset, - } - } - maxLayer = int64(i) + offset - } - - for _, node := range circulars { - g.positions[node.ID()] = &bundle{ - INode: node, position: -1, - } - } - - if maxLayer > g.maxLayer { - g.maxLayer = maxLayer - } -} - // FromNodes will create a new graph from the given nodes. func FromNodes(dp IDependencyProvider, nodes Nodes) *Graph { if len(nodes) == 0 { diff --git a/graph/graph_test.go b/graph/graph_test.go index <HASH>..<HASH> 100644 --- a/graph/graph_test.go +++ b/graph/graph_test.go @@ -464,6 +464,7 @@ func TestAddNodesWithDependents(t *testing.T) { assert.Len(t, eg.circulars, 2) assert.Contains(t, eg.circulars, n3) assert.Contains(t, eg.circulars, n4) + assert.Equal(t, 4, g.Len()) } func TestAddNodesWithCircularDependents(t *testing.T) { @@ -489,6 +490,7 @@ func TestAddNodesWithCircularDependents(t *testing.T) { dp.dependents = Nodes{n2} eg := g.AddNodes(dp, Nodes{n3}) assert.Len(t, eg.toApply, 1) + assert.Equal(t, 3, g.Len()) } func TestPositionsExtractLayers(t *testing.T) { @@ -569,6 +571,7 @@ func TestRemoveNodes(t *testing.T) { assert.Equal(t, Nodes{n2}, eg.toApply[0]) assert.Equal(t, Nodes{n4}, eg.toApply[1]) assert.Len(t, eg.circulars, 0) + assert.Equal(t, 3, g.Len()) } func TestRemoveNodesWithCircular(t *testing.T) { @@ -628,6 +631,7 @@ func TestRemoveNodesWithCircular(t *testing.T) { assert.Equal(t, Nodes{n3}, eg.toApply[2]) assert.Equal(t, 3, eg.size) assert.Len(t, eg.circulars, 0) + assert.Equal(t, 3, g.Len()) } func BenchmarkPreIndexedFlattening(b *testing.B) {
Modified some unit tests to increase coverage.
Workiva_go-datastructures
train
b4ff35943cd970750eec69142dfd939d7e6f088e
diff --git a/lib/components/fields/date.js b/lib/components/fields/date.js index <HASH>..<HASH> 100644 --- a/lib/components/fields/date.js +++ b/lib/components/fields/date.js @@ -58,15 +58,10 @@ module.exports = { var parentOptions = this.inForm()?clone(this.$parent.options.dateOptions):{}; - if (isString(this.value)) { - - this.value = this.range? - {start:moment(this.value.start, 'YYYY-MM-DD HH:mm:ss'),end:moment(this.value.end, 'YYYY-MM-DD HH:mm:ss')}: - moment(this.value, 'YYYY-MM-DD HH:mm:ss'); - - } - if (this.value) { + + this.momentizeValue(); + var value = {startDate:this.range?this.value.start.format(this.format):this.value.format(this.format)}; if (this.range) value.endDate = this.value.end.format(this.format); this.options = merge.recursive(this.options, value); @@ -89,7 +84,9 @@ module.exports = { datepicker.daterangepicker(options); datepicker.on('apply.daterangepicker', function(ev, picker) { - this.value = this.range?{start:picker.startDate, end:picker.endDate}:picker.startDate; + this.value = this.range? + {start:picker.startDate, end:picker.endDate}: + picker.startDate; }.bind(this)); datepicker.on('cancel.daterangepicker', function(ev, picker) { @@ -107,12 +104,37 @@ module.exports = { } + this.$watch('value', function(newVal, oldVal) { + this.momentizeValue(); + + var startDate = this.range? + this.value.start.format(this.format): + this.value.format(this.format); + + var endDate = this.range? + this.value.end.format(this.format): + null; + + datepicker.data('daterangepicker').setStartDate(startDate); + datepicker.data('daterangepicker').setEndDate(endDate); + }); + + }, + methods: { + momentizeValue: function() { + if (isDateString(this.value)) + this.value = this.range? + {start:moment(this.value.start, 'YYYY-MM-DD HH:mm:ss'), + end:moment(this.value.end, 'YYYY-MM-DD HH:mm:ss')}: + moment(this.value, 'YYYY-MM-DD HH:mm:ss'); + } }, computed: { isTimepicker: function() { return this.options.hasOwnProperty('timePicker') && this.options.timePicker; }, formattedDate: function() { + if (!this.value || (!this.range && !this.value.format) || (this.range && (!this.value.start.format || !this.value.end.format))) @@ -139,6 +161,6 @@ module.exports = { } -function isString(value) { +function isDateString(value) { return value && (typeof value=='string' || (value.hasOwnProperty('start') && typeof value.start=='string')); } diff --git a/lib/components/fields/select.js b/lib/components/fields/select.js index <HASH>..<HASH> 100644 --- a/lib/components/fields/select.js +++ b/lib/components/fields/select.js @@ -137,6 +137,13 @@ module.exports = { el.data('select2').$dropdown.addClass("dropdown-" + this.containerClass); } + + this.$watch('value', function(value, oldVal) { + if (JSON.stringify(value)!=JSON.stringify(oldVal)) + el.select2('val',value); + }); + + } }, diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "vue-formular", "description": "a comprehensive vue.js form component", - "version": "0.6.34", + "version": "0.6.4", "keywords": [ "vue", "form",
add two-way data binding for select2 and daterangepicker
matfish2_vue-formular
train
ce24a73e8d8490a54e3ff1b75325d9182d36c74e
diff --git a/js/imolecule.js b/js/imolecule.js index <HASH>..<HASH> 100644 --- a/js/imolecule.js +++ b/js/imolecule.js @@ -5,14 +5,39 @@ var imolecule = { // Creates a new instance of imolecule create: function (selector, options) { - var $s = $(selector), self = this; + var $s = $(selector), self = this, hasCanvas, hasWebgl; options = options || {}; this.shader = options.hasOwnProperty("shader") ? options.shader : "toon"; this.drawingType = options.hasOwnProperty("drawingType") ? options.drawingType : "ball and stick"; this.cameraType = options.hasOwnProperty("cameraType") ? options.cameraType : "perspective"; this.updateCamera = (this.cameraType === "orthographic"); - this.renderer = new THREE.WebGLRenderer({antialias: true, alpha: true}); + + // Adapted from http://japhr.blogspot.com/2012/07/fallback-from-webgl-to-canvas-in-threejs.html + hasCanvas = !!window.CanvasRenderingContext2D; + hasWebgl = (function () { + try { + return !!window.WebGLRenderingContext && + !!document.createElement("canvas").getContext("experimental-webgl"); + } catch(e) { + return false; + }})(); + + if (hasWebgl) { + this.renderMode = "webgl"; + this.renderer = new THREE.WebGLRenderer({antialias: true, alpha: true}); + } else if (hasCanvas) { + $s.append('<p class="alert alert-warning" align="center">Your web browser does not support WebGL. Using Canvas as a fallback.</p>'); + this.renderMode = "canvas"; + if (this.shader === "toon") { + this.shader = "basic"; + } + this.renderer = new THREE.CanvasRenderer(); + } else { + $s.append('<p class="alert alert-danger" align="center">Your web browser does not support either WebGL or Canvas. Please upgrade.</p>'); + return; + } + this.renderer.setSize($s.width(), $s.height()); $s.append(this.renderer.domElement); @@ -64,14 +89,16 @@ var imolecule = { }, makeMaterials: function () { - var self = this, threeMaterial; + var self = this, threeMaterial, overdraw; // If a different shader is specified, use uncustomized materials if ($.inArray(self.shader, ["basic", "phong", "lambert"]) !== -1) { threeMaterial = "Mesh" + self.shader.charAt(0).toUpperCase() + self.shader.slice(1) + "Material"; + overdraw = this.renderMode === "canvas" ? 0.5 : 0; $.each(self.data, function (key, value) { - value.material = new THREE[threeMaterial]({color: value.color}); + value.material = new THREE[threeMaterial]({color: value.color, + overdraw: overdraw}); }); // If toon, use materials with some shader edits @@ -324,6 +351,9 @@ var imolecule = { // Sets shader (toon, basic, phong, lambert) and redraws setShader: function (shader) { + if (this.renderMode !== "webgl" && shader === "toon") { + throw new Error("Toon shading requires webGL."); + } this.shader = shader; this.makeMaterials(); this.clear();
Incorporated changes from bjonnh/imolecule/feature/canvas-fallback. Fixes #<I>.
patrickfuller_imolecule
train
24d1839c697db96d08ef4984b4b241f7107ad97e
diff --git a/viewport-units-buggyfill.js b/viewport-units-buggyfill.js index <HASH>..<HASH> 100755 --- a/viewport-units-buggyfill.js +++ b/viewport-units-buggyfill.js @@ -77,7 +77,7 @@ @end @if (@_jscript_version < 9) { - isOldIE = true; + isOldIE = true; } @end @@ -127,12 +127,12 @@ if (isOldIE || (!options.force && !isMobileSafari && !isBuggyIE && !isBadStockAndroid && !isOperaMini && (!options.hacks || !options.hacks.required(options)))) { // this buggyfill only applies to mobile safari, IE9-10 and the Stock Android Browser. - if (window.console) { - console.info('This script will only work with browsers that have buggy implementations of viewport units and will not polyfill viewport units in older browsers (e.g. IE <= 8)'); - } - + if (window.console && isOldIE) { + console.info('viewport-units-buggyfill requires a proper CSSOM and basic viewport unit support, which are not available in IE8 and below'); + } + return { - init: function () {} + init: function () {} }; }
feature(init): fixing jshint, limiting console warning to IE<=8
rodneyrehm_viewport-units-buggyfill
train
0e7820bdd250679324b570d5220e4bb50233c6af
diff --git a/openquake.cfg b/openquake.cfg index <HASH>..<HASH> 100644 --- a/openquake.cfg +++ b/openquake.cfg @@ -74,7 +74,7 @@ concurrent_tasks = 32 # The number of work items (assets) per task. This affects both the # RAM usage (the more, the more) and the performance of the # computation (the more the less). -block_size = 10000 +block_size = 1000 # The number of tasks to be in queue at any given time. # Ideally, this would be set to the number of available worker processes. diff --git a/openquake/engine/calculators/hazard/classical/core.py b/openquake/engine/calculators/hazard/classical/core.py index <HASH>..<HASH> 100644 --- a/openquake/engine/calculators/hazard/classical/core.py +++ b/openquake/engine/calculators/hazard/classical/core.py @@ -366,10 +366,9 @@ BaseHazardCalculatorNext.finalize_hazard_curves` ) container_ids['q%s' % quantile] = q_hc.id - all_curves_for_imt = models.HazardCurveData.objects\ - .all_curves_for_imt( - self.job.id, im_type, sa_period, sa_damping)\ - .order_by('location') + all_curves_for_imt = models.order_by_location( + models.HazardCurveData.objects.all_curves_for_imt( + self.job.id, im_type, sa_period, sa_damping)) with transaction.commit_on_success(using='reslt_writer'): inserter = BulkInserter(models.HazardCurveData, diff --git a/openquake/engine/calculators/hazard/event_based/post_processing.py b/openquake/engine/calculators/hazard/event_based/post_processing.py index <HASH>..<HASH> 100644 --- a/openquake/engine/calculators/hazard/event_based/post_processing.py +++ b/openquake/engine/calculators/hazard/event_based/post_processing.py @@ -166,9 +166,8 @@ def gmf_to_hazard_curve_task(job_id, point, lt_rlz_id, imt, imls, hc_coll_id, gmf_set__gmf_collection__lt_realization=lt_rlz_id, imt=imt, sa_period=sa_period, - sa_damping=sa_damping, - location=point.wkt2d) - + sa_damping=sa_damping).extra(where=[ + "location::geometry ~= 'SRID=4326;%s'::geometry" % point.wkt2d]) # Collect all of the ground motion values: gmvs = list(itertools.chain(*(g.gmvs for g in gmfs))) # Compute the hazard curve PoEs: diff --git a/openquake/engine/db/models.py b/openquake/engine/db/models.py index <HASH>..<HASH> 100644 --- a/openquake/engine/db/models.py +++ b/openquake/engine/db/models.py @@ -83,6 +83,18 @@ IMT_CHOICES = ( DEFAULT_LOSS_CURVE_RESOLUTION = 50 +def order_by_location(queryset): + """ + Utility function to order a queryset by location. This works even if + the location is of Geography object (a simple order_by('location') only + works for Geometry objects). + """ + return queryset.extra( + select={'x': 'ST_X(geometry(location))', + 'y': 'ST_Y(geometry(location))'}, + order_by=["x", "y"]) + + def queryset_iter(queryset, chunk_size): """ Given a QuerySet, split it into smaller queries and yield the result of @@ -1621,7 +1633,7 @@ class HazardCurveDataManager(djm.GeoManager): """ Same as #individual_curves but the results are ordered by location """ - return self.individual_curves(job, imt).order_by('location') + return order_by_location(self.individual_curves(job, imt)) def individual_curves_nr(self, job, imt): """ @@ -1993,14 +2005,13 @@ class GmfSet(djm.Model): for imt, sa_period, sa_damping in imts: for result_grp_ordinal in xrange(1, num_tasks + 1): - gmfs = Gmf.objects\ - .filter( + gmfs = order_by_location( + Gmf.objects.filter( gmf_set=self.id, imt=imt, sa_period=sa_period, sa_damping=sa_damping, - result_grp_ordinal=result_grp_ordinal)\ - .order_by('location') + result_grp_ordinal=result_grp_ordinal)) if len(gmfs) == 0: # This task did not contribute to this GmfSet continue @@ -2111,12 +2122,12 @@ def get_gmfs_scenario(output, imt=None): else: imts = [parse_imt(imt)] for imt, sa_period, sa_damping in imts: - gmfs = GmfScenario.objects.filter( - output__id=output.id, - imt=imt, - sa_period=sa_period, - sa_damping=sa_damping, - ).order_by('location') + gmfs = order_by_location( + GmfScenario.objects.filter( + output__id=output.id, + imt=imt, + sa_period=sa_period, + sa_damping=sa_damping)) # yield all the nodes associated to a given location for loc, rows in itertools.groupby( gmfs, operator.attrgetter('location')):
Fixed the bug introduced by change the location field from Geometry to Geography Former-commit-id: aef3f<I>a8cdc<I>a2cfa<I>dd9a<I>e<I>deb3b
gem_oq-engine
train
536a5f865fc204bfc6d8c2a51616052dc772374d
diff --git a/src/main/java/io/github/bonigarcia/wdm/Downloader.java b/src/main/java/io/github/bonigarcia/wdm/Downloader.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/github/bonigarcia/wdm/Downloader.java +++ b/src/main/java/io/github/bonigarcia/wdm/Downloader.java @@ -297,7 +297,7 @@ public class Downloader { String phantomName = "phantomjs"; if (export.contains(phantomName)) { String fileNoExtension = archive.getName().replace(".tar.bz2", "") - .replace(".zip", ""); + .replace(".zip", "").replace("-beta", ".beta"); File phantomjs = null; try { @@ -309,6 +309,7 @@ public class Downloader { phantomjs = new File(archive.getParentFile().getAbsolutePath() + File.separator + fileNoExtension + File.separator + phantomName + extension); + } target = new File(archive.getParentFile().getAbsolutePath()
Bug-fix for issue #<I>: compatibility with PhantomJS <I>-beta
bonigarcia_webdrivermanager
train
a3b10a83944a30034c73a12cfe1c317a986da546
diff --git a/test/passwordless-email/emailcode.acceptance.test.js b/test/passwordless-email/emailcode.acceptance.test.js index <HASH>..<HASH> 100644 --- a/test/passwordless-email/emailcode.acceptance.test.js +++ b/test/passwordless-email/emailcode.acceptance.test.js @@ -15,19 +15,23 @@ describe(".emailcode acceptance", function() { }); }); - describe.skip("opening a Lock", function() { + describe("opening a Lock", function() { before(function() { this.lock = u.constructLock(); - u.openLock(this.lock, "emailcode"); }); after(function() { u.closeLock(this.lock); }); - it("renders the widget and opens it after a few ms", function(done) { + it("doesn't open the Lock immediately", function() { + u.openLock(this.lock, "emailcode"); + expect(u.isRendered(this.lock)).to.be.ok(); expect(u.isOpened(this.lock)).to.not.be.ok(); + }); + + it("opens it after a few ms", function(done) { setTimeout(() => { expect(u.isOpened(this.lock)).to.be.ok(); done();
Activate test for opening a emailcode Lock
auth0_lock
train
a28f718ecaca9c9f9dc6feacd8372355e8910339
diff --git a/.gitignore b/.gitignore index <HASH>..<HASH> 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ /target /.classpath /.project +/.idea \ No newline at end of file diff --git a/src/main/java/ch/ralscha/extdirectspring/controller/ConfigurationService.java b/src/main/java/ch/ralscha/extdirectspring/controller/ConfigurationService.java index <HASH>..<HASH> 100644 --- a/src/main/java/ch/ralscha/extdirectspring/controller/ConfigurationService.java +++ b/src/main/java/ch/ralscha/extdirectspring/controller/ConfigurationService.java @@ -15,6 +15,7 @@ */ package ch.ralscha.extdirectspring.controller; +import java.util.Collection; import java.util.Map; import java.util.concurrent.Executors; @@ -26,6 +27,7 @@ import org.springframework.core.convert.ConversionService; import org.springframework.format.support.DefaultFormattingConversionService; import org.springframework.format.support.FormattingConversionService; import org.springframework.stereotype.Service; +import org.springframework.web.bind.support.WebArgumentResolver; import ch.ralscha.extdirectspring.util.JsonHandler; import ch.ralscha.extdirectspring.util.ParametersResolver; @@ -87,8 +89,8 @@ public class ConfigurationService implements InitializingBean, DisposableBean { } } - parametersResolver = new ParametersResolver(configuration.getConversionService(), jsonHandler); - + Collection<WebArgumentResolver> webResolvers = context.getBeansOfType(WebArgumentResolver.class).values(); + parametersResolver = new ParametersResolver(configuration.getConversionService(), jsonHandler, webResolvers); } @Override diff --git a/src/main/java/ch/ralscha/extdirectspring/util/ParametersResolver.java b/src/main/java/ch/ralscha/extdirectspring/util/ParametersResolver.java index <HASH>..<HASH> 100644 --- a/src/main/java/ch/ralscha/extdirectspring/util/ParametersResolver.java +++ b/src/main/java/ch/ralscha/extdirectspring/util/ParametersResolver.java @@ -18,6 +18,7 @@ package ch.ralscha.extdirectspring.util; import java.beans.PropertyDescriptor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -32,9 +33,12 @@ import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.BeanUtils; +import org.springframework.core.MethodParameter; import org.springframework.core.convert.ConversionFailedException; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.TypeDescriptor; +import org.springframework.web.bind.support.WebArgumentResolver; +import org.springframework.web.context.request.ServletWebRequest; import ch.ralscha.extdirectspring.annotation.ExtDirectMethodType; import ch.ralscha.extdirectspring.bean.ExtDirectRequest; @@ -61,9 +65,13 @@ public final class ParametersResolver { private final JsonHandler jsonHandler; - public ParametersResolver(ConversionService conversionService, JsonHandler jsonHandler) { + private final Collection<WebArgumentResolver> webArgumentResolvers; + + public ParametersResolver(ConversionService conversionService, JsonHandler jsonHandler, + Collection<WebArgumentResolver> webArgumentResolvers) { this.conversionService = conversionService; this.jsonHandler = jsonHandler; + this.webArgumentResolvers = webArgumentResolvers; } public Object[] prepareParameters(HttpServletRequest request, HttpServletResponse response, Locale locale, @@ -197,10 +205,34 @@ public final class ParametersResolver { parameters[paramIndex] = convertValue(jsonValue, methodParameter); jsonParamIndex++; } else { - throw new IllegalArgumentException( - "Error, parameter mismatch. Please check your remoting method signature to ensure all supported parameters types are used."); - } + log.info("WebResolvers size:" + this.webArgumentResolvers.size()); + log.info("ParamIndex:" + paramIndex); + + log.info("Request params size:" + request.getParameterMap().isEmpty()); + log.info("Request params names:" + request.getParameterMap().keySet()); + log.info("Direct Request:" + directRequest.toString()); + + MethodParameter p = new MethodParameter(methodInfo.getMethod(), paramIndex); + request.setAttribute("directRequest", directRequest); + ServletWebRequest r = new ServletWebRequest(request); + Object result = WebArgumentResolver.UNRESOLVED; + + for (WebArgumentResolver resolver : this.webArgumentResolvers) { + log.info("Resolving with:" + resolver.getClass().getCanonicalName()); + + result = resolver.resolveArgument(p, r); + if (result != WebArgumentResolver.UNRESOLVED) { + log.info("Resolved by:" + resolver.getClass().getCanonicalName()); + parameters[paramIndex] = result; + break; + } + } + if (result == WebArgumentResolver.UNRESOLVED) { + throw new IllegalArgumentException( + "Error, parameter mismatch. Please check your remoting method signature to ensure all supported parameters types are used."); + } + } } }
Resolves #<I>
ralscha_extdirectspring
train
a95fc0edb63aae4100a51b9c144e346ea17f1fc4
diff --git a/birkhoff/__init__.py b/birkhoff/__init__.py index <HASH>..<HASH> 100644 --- a/birkhoff/__init__.py +++ b/birkhoff/__init__.py @@ -18,6 +18,6 @@ # Birkhoff. If not, see <http://www.gnu.org/licenses/>. #: The current version of this package. -__version__ = '0.0.4' +__version__ = '0.0.5-dev' from .decompose import birkhoff_von_neumann_decomposition diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -47,6 +47,6 @@ setup( test_suite='nose.collector', tests_require=['nose'], url='https://github.com/jfinkels/birkhoff', - version='0.0.4', + version='0.0.5-dev', zip_safe=False )
Set development version number to <I>-dev
jfinkels_birkhoff
train
8d3bfefc27cac103ef7b1ec88295f6f46701c2f2
diff --git a/src/2D.js b/src/2D.js index <HASH>..<HASH> 100644 --- a/src/2D.js +++ b/src/2D.js @@ -457,13 +457,7 @@ Crafty.c("2D", { sin: Math.sin(drad), deg: difference, rad: drad, - o: o, - matrix: { - M11: ct, - M12: st, - M21: -st, - M22: ct - } + o: o }); }, diff --git a/src/DOM.js b/src/DOM.js index <HASH>..<HASH> 100644 --- a/src/DOM.js +++ b/src/DOM.js @@ -73,21 +73,6 @@ Crafty.c("DOM", { this.bind("NewComponent", updateClass).bind("RemoveComponent", removeClass); - if (Crafty.support.prefix === "ms" && Crafty.support.version < 9) { - this._filters = {}; - - this.bind("Rotate", function (e) { - var m = e.matrix, - elem = this._element.style, - M11 = m.M11.toFixed(8), - M12 = m.M12.toFixed(8), - M21 = m.M21.toFixed(8), - M22 = m.M22.toFixed(8); - - this._filters.rotation = "progid:DXImageTransform.Microsoft.Matrix(M11=" + M11 + ", M12=" + M12 + ", M21=" + M21 + ", M22=" + M22 + ",sizingMethod='auto expand')"; - }); - } - this.bind("Remove", this.undraw); this.bind("RemoveComponent", function (compName) { if (compName === "DOM")
Remove IE8 specific rotation code and setup
craftyjs_Crafty
train
654442f33de17731619ee61b16b511c1cc3264a3
diff --git a/python_modules/dagster-graphql/dagster_graphql/schema/runs.py b/python_modules/dagster-graphql/dagster_graphql/schema/runs.py index <HASH>..<HASH> 100644 --- a/python_modules/dagster-graphql/dagster_graphql/schema/runs.py +++ b/python_modules/dagster-graphql/dagster_graphql/schema/runs.py @@ -813,8 +813,9 @@ def construct_basic_params(graphene_info, event_record, execution_plan): check.opt_inst_param(execution_plan, 'execution_plan', ExecutionPlan) return { 'runId': event_record.run_id, - 'message': event_record.user_message - or (event_record.dagster_event.message if event_record.dagster_event else None), + 'message': event_record.dagster_event.message + if event_record.dagster_event + else event_record.user_message, 'timestamp': int(event_record.timestamp * 1000), 'level': DauphinLogLevel.from_level(event_record.level), 'step': create_dauphin_step(graphene_info, event_record, execution_plan)
fix graphql log message Summary: The GraphQL interface says this should not be null - so don't set it to None Test Plan: existing snapshot tests Reviewers: prha, max, schrockn Reviewed By: schrockn Differential Revision: <URL>
dagster-io_dagster
train
27fcdb1c54e73b95a0901c6b4cc1600538344d74
diff --git a/lib/Post.php b/lib/Post.php index <HASH>..<HASH> 100644 --- a/lib/Post.php +++ b/lib/Post.php @@ -1259,7 +1259,7 @@ class Post extends Core implements CoreInterface { */ public function date( $date_format = '' ) { $df = $date_format ? $date_format : get_option('date_format'); - $the_date = (string) mysql2date($df, $this->post_date); + $the_date = date_i18n($df, strtotime($this->post_date)); return apply_filters('get_the_date', $the_date, $df); } @@ -1283,7 +1283,7 @@ class Post extends Core implements CoreInterface { */ public function time( $time_format = '' ) { $tf = $time_format ? $time_format : get_option('time_format'); - $the_time = (string) mysql2date($tf, $this->post_date); + $the_time = date_i18n($tf, strtotime($this->post_date)); return apply_filters('get_the_time', $the_time, $tf); }
Updates the post and date methods to use data_i<I>n
timber_timber
train
d11b623fa5a94fcd3f387c0951408e98a7c6dcab
diff --git a/Annis-Web/src/main/webapp/javascript/annis/History.js b/Annis-Web/src/main/webapp/javascript/annis/History.js index <HASH>..<HASH> 100644 --- a/Annis-Web/src/main/webapp/javascript/annis/History.js +++ b/Annis-Web/src/main/webapp/javascript/annis/History.js @@ -66,6 +66,7 @@ Ext.onReady(function() columns : [ { header : '#', width : 30, + fixed: true, renderer : function(value, metadata, record, rowIndex, colIndex) { return (rowIndex + 1); @@ -74,11 +75,11 @@ Ext.onReady(function() id : 'query', header : 'Query', sortable : true, - width : 330, dataIndex : 'query' }, { header : "url", width : 30, + fixed: true, dataIndex : "citation", renderer : function(value) { @@ -96,11 +97,14 @@ Ext.onReady(function() } }), viewConfig : { - rowOverCls : 'history-hover' - }, - stripeRows : true, - width : 400, - height : 300 + rowOverCls : 'history-hover', + forceFit:true, + autoFill: true + }, + loadMask: true, + autoHeight: true, + autoWidth: true, + stripeRows : true }); // initiate window @@ -113,7 +117,8 @@ Ext.onReady(function() closeAction : 'hide', stateful : true, stateId : 'grid', - resizable : false + layout : 'fit', + autoScroll : true }); var historyWindow = function()
the history-window is now liquid
korpling_ANNIS
train
8a725508fa0a8310e1c8b723cde5d7503ef6c784
diff --git a/lib/resources/FileUploads.js b/lib/resources/FileUploads.js index <HASH>..<HASH> 100644 --- a/lib/resources/FileUploads.js +++ b/lib/resources/FileUploads.js @@ -3,7 +3,6 @@ var utils = require('../utils'); var StripeResource = require('../StripeResource'); var stripeMethod = StripeResource.method; -var _ = require('lodash'); var multipartDataGenerator = require('../MultipartDataGenerator'); module.exports = StripeResource.extend({
remove lodash when required but not used
stripe_stripe-node
train
314f11cb61a98fce96ce3a27c9ba5ec1936413fb
diff --git a/src/Database/Expression/IdentifierExpression.php b/src/Database/Expression/IdentifierExpression.php index <HASH>..<HASH> 100644 --- a/src/Database/Expression/IdentifierExpression.php +++ b/src/Database/Expression/IdentifierExpression.php @@ -18,7 +18,12 @@ use Cake\Database\ExpressionInterface; use Cake\Database\ValueBinder; /** - * Represents a single identifier name in the database + * Represents a single identifier name in the database. + * + * Identifier values are unsafe with user supplied data. + * Values will be quoted when identifier quoting is enabled. + * + * @see \Cake\Database\Query::identifier() */ class IdentifierExpression implements ExpressionInterface { diff --git a/tests/TestCase/Database/QueryTest.php b/tests/TestCase/Database/QueryTest.php index <HASH>..<HASH> 100644 --- a/tests/TestCase/Database/QueryTest.php +++ b/tests/TestCase/Database/QueryTest.php @@ -16,7 +16,6 @@ namespace Cake\Test\TestCase\Database; use Cake\Database\Expression\IdentifierExpression; use Cake\Database\ExpressionInterface; -use Cake\Database\IdentifierInterface; use Cake\Database\Query; use Cake\Database\StatementInterface; use Cake\Database\TypeMap; @@ -3394,13 +3393,9 @@ class QueryTest extends TestCase public function testIdentifierInterface() { $query = new Query($this->connection); - /* @var \Cake\Database\IdentifierInterface $identifier */ $identifier = $query->identifier('description'); - // should support these interfaces - $this->assertInstanceOf(IdentifierInterface::class, $identifier); $this->assertInstanceOf(ExpressionInterface::class, $identifier); - $this->assertEquals('description', $identifier->getIdentifier()); $identifier->setIdentifier('title');
Add a bit more documentation and fix up tests.
cakephp_cakephp
train
585522fc6bd3f3274bb99c406535450e7841cdcf
diff --git a/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/errors.go b/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/errors.go index <HASH>..<HASH> 100644 --- a/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/errors.go +++ b/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/errors.go @@ -113,22 +113,24 @@ func (c NilResultError) Error() string { } type RPCDecodeError struct { - err error - typ MethodType - len int - name string + err error + typ MethodType + len int + name string + ctype CompressionType } func (r RPCDecodeError) Error() string { - return fmt.Sprintf("RPC error. type: %s, method: %s, length: %d, error: %v", r.typ, r.name, r.len, r.err) + return fmt.Sprintf("RPC error. type: %s, method: %s, length: %d, compression: %v, error: %v", r.typ, r.name, r.len, r.ctype, r.err) } -func newRPCDecodeError(t MethodType, n string, l int, e error) RPCDecodeError { +func newRPCDecodeError(t MethodType, n string, l int, ctype CompressionType, err error) RPCDecodeError { return RPCDecodeError{ - err: e, - typ: t, - len: l, - name: n, + err: err, + typ: t, + len: l, + ctype: ctype, + name: n, } } diff --git a/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/message.go b/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/message.go index <HASH>..<HASH> 100644 --- a/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/message.go +++ b/go/vendor/github.com/keybase/go-framed-msgpack-rpc/rpc/message.go @@ -257,6 +257,9 @@ func (r rpcResponseMessage) Type() MethodType { } func (r rpcResponseMessage) Compression() CompressionType { + if r.c != nil { + return r.c.ctype + } return CompressionNone } @@ -436,7 +439,7 @@ func decodeRPC(l int, r *frameReader, p *protocolHandler, cc *callContainer, com typ := MethodInvalid if err := decoder.Decode(&typ); err != nil { - return nil, newRPCDecodeError(typ, "", l, err) + return nil, newRPCDecodeError(typ, "", l, CompressionNone, err) } var data rpcMessage @@ -452,16 +455,16 @@ func decodeRPC(l int, r *frameReader, p *protocolHandler, cc *callContainer, com case MethodCallCompressed: data = &rpcCallCompressedMessage{} default: - return nil, newRPCDecodeError(typ, "", l, errors.New("invalid RPC type")) + return nil, newRPCDecodeError(typ, "", l, CompressionNone, errors.New("invalid RPC type")) } dataLength := l - 1 if dataLength < data.MinLength() { - return nil, newRPCDecodeError(typ, "", l, errors.New("wrong message length")) + return nil, newRPCDecodeError(typ, "", l, CompressionNone, errors.New("wrong message length")) } if err := data.DecodeMessage(dataLength, decoder, p, cc, compressorCacher, instrumenterStorage); err != nil { - return data, newRPCDecodeError(typ, data.Name(), l, err) + return data, newRPCDecodeError(typ, data.Name(), l, data.Compression(), err) } return data, nil } diff --git a/go/vendor/vendor.json b/go/vendor/vendor.json index <HASH>..<HASH> 100644 --- a/go/vendor/vendor.json +++ b/go/vendor/vendor.json @@ -842,10 +842,10 @@ "revisionTime": "2020-01-23T00:39:47Z" }, { - "checksumSHA1": "zgpsAORgf6VAzF0yBB/Z3SLPPOk=", + "checksumSHA1": "qhkwhuKIYzEZmHMtGsegm0v7KKE=", "path": "github.com/keybase/go-framed-msgpack-rpc/rpc", - "revision": "a1327b90a382c895fb8dfa15d63ead435d11e7ab", - "revisionTime": "2020-03-09T18:03:39Z" + "revision": "26e5d1ace9c88d9794d620b1201b3f43939ae617", + "revisionTime": "2020-03-11T21:12:34Z" }, { "checksumSHA1": "RLs8GIV4e+D350pyzZh5RC3mgQg=",
revendor rpc library (#<I>)
keybase_client
train
55c3634359ff5ca81afee5e2140eeddd324445f1
diff --git a/src/Services/QueuedJobService.php b/src/Services/QueuedJobService.php index <HASH>..<HASH> 100644 --- a/src/Services/QueuedJobService.php +++ b/src/Services/QueuedJobService.php @@ -183,7 +183,11 @@ class QueuedJobService $jobDescriptor->Implementation = get_class($job); $jobDescriptor->StartAfter = $startAfter; - $jobDescriptor->RunAsID = $userId ? $userId : Security::getCurrentUser()->ID; + if ($userId === null) { + $userId = (Security::getCurrentUser() ? Security::getCurrentUser()->ID : null); + } + + $jobDescriptor->RunAsID = $userId; // copy data $this->copyJobToDescriptor($job, $jobDescriptor);
Check that Security::getCurrentUser() returns not null before accessing property.
symbiote_silverstripe-queuedjobs
train
ba7dfa8c9ac2614f8c2e5db0619300b8940a2293
diff --git a/bin/node-pre-gyp-github.js b/bin/node-pre-gyp-github.js index <HASH>..<HASH> 100755 --- a/bin/node-pre-gyp-github.js +++ b/bin/node-pre-gyp-github.js @@ -5,6 +5,7 @@ const program = require('commander'); program .command('publish') + .storeOptionsAsProperties() .description('publishes the contents of .\\build\\stage\\{version} to the current version\'s GitHub release') .option("-r, --release", "publish immediately, do not create draft") .option("-s, --silent", "turns verbose messages off")
Fixed program builder by adding storing of options as properties (#<I>)
bchr02_node-pre-gyp-github
train
2e51c3b04acad1cfe3d2dec733ca2c21b8204dd8
diff --git a/lib/share_progress/configuration.rb b/lib/share_progress/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/share_progress/configuration.rb +++ b/lib/share_progress/configuration.rb @@ -16,7 +16,7 @@ module ShareProgress def initialize @share_progress_uri = ENV['SHARE_PROGRESS_URI'] || 'run.shareprogress.org/api/v1' - @share_progress_api_key = ENV['SHARE_PROGRESS_API_KEY'] + @share_progress_api_key = ENV['SHARE_PROGRESS_API_KEY'] || 'please_set_an_api_key' end end end
Setting a default API key to make sure all our tests pass.
SumOfUs_share_progress
train
8abce3b85f92156df25864ba871e243d481c3dfb
diff --git a/src/org/opencms/main/OpenCmsListener.java b/src/org/opencms/main/OpenCmsListener.java index <HASH>..<HASH> 100644 --- a/src/org/opencms/main/OpenCmsListener.java +++ b/src/org/opencms/main/OpenCmsListener.java @@ -142,13 +142,11 @@ public class OpenCmsListener implements ServletContextListener, HttpSessionListe Driver driver = drivers.nextElement(); try { DriverManager.deregisterDriver(driver); - System.out.println(Messages.get().getBundle().key( - Messages.LOG_ERROR_DERIGISTERING_JDBC_DRIVER_1, - driver.getClass().getName())); } catch (Throwable e) { System.out.println(Messages.get().getBundle().key( Messages.ERR_DEREGISTERING_JDBC_DRIVER_1, driver.getClass().getName())); + e.printStackTrace(System.out); } } }
Fixed error reporting for DB driver cleanup.
alkacon_opencms-core
train
21bb4f1c8b12ea5458fbe5868ca32f044038a9cb
diff --git a/lib/router/router.js b/lib/router/router.js index <HASH>..<HASH> 100644 --- a/lib/router/router.js +++ b/lib/router/router.js @@ -127,7 +127,7 @@ p.getInfo = function(req) { } // forward original image if no operation on image and browser support image format - if (routeInfo.imageSteps.length === 0 && this.options.ifNoOperation && this.options.ifNoOperation.justForward === true) { + if (routeInfo.imageSteps.length === 0 && ((this.options.ifNoOperation && this.options.ifNoOperation.justForward === true) || routeInfo.urlInfo.query.noop === 'true')) { contentType = getImageContentType(req,routeInfo,this.options.ifNoOperation.imageTypeIfUnknown); if (contentType) { routeInfo.contentType = contentType;
Add noop query If noop query is true and there is no operation required just forward the original
asilvas_node-image-steam
train
a2135ee53e6b96abbe48ce7f9f50127c48390f05
diff --git a/packages/react-swipeable-views/src/SwipeableViews.js b/packages/react-swipeable-views/src/SwipeableViews.js index <HASH>..<HASH> 100644 --- a/packages/react-swipeable-views/src/SwipeableViews.js +++ b/packages/react-swipeable-views/src/SwipeableViews.js @@ -148,7 +148,7 @@ export function getDomTreeShapes(element, rootNode) { while (element && element !== rootNode) { // We reach a Swipeable View, no need to look higher in the dom tree. - if (element.getAttribute('role') === 'option') { + if (element.hasAttribute('data-swipeable')) { break; } @@ -776,7 +776,6 @@ class SwipeableViews extends Component { <div ref={(node) => { this.rootNode = node; }} style={Object.assign({}, axisProperties.root[axis], style)} - role="listbox" {...other} {...touchEvents} onScroll={this.handleScroll} @@ -809,7 +808,7 @@ class SwipeableViews extends Component { style={slideStyle} className={slideClassName} aria-hidden={hidden} - role="option" + data-swipeable="true" > {child} </div> diff --git a/packages/react-swipeable-views/src/SwipeableViews.spec.js b/packages/react-swipeable-views/src/SwipeableViews.spec.js index <HASH>..<HASH> 100644 --- a/packages/react-swipeable-views/src/SwipeableViews.spec.js +++ b/packages/react-swipeable-views/src/SwipeableViews.spec.js @@ -422,16 +422,16 @@ describe('SwipeableViews', () => { }); describe('getDomTreeShapes', () => { - it('should stop at the role === option', () => { + it('should stop at the data-swipeable attribute', () => { const rootNode = {}; const optionNode = { - getAttribute: () => 'option', + hasAttribute: () => true, parentNode: rootNode, }; const targetNode = { - getAttribute: () => null, + hasAttribute: () => false, parentNode: optionNode, clientWidth: 10, scrollWidth: 20,
Fix accessibility caused by aria roles Replace role="option" attr with data-swipeable
oliviertassinari_react-swipeable-views
train
bf0b2c4d9ea561df3fad9b31782240b94ecfc412
diff --git a/application/briefkasten/dropbox.py b/application/briefkasten/dropbox.py index <HASH>..<HASH> 100644 --- a/application/briefkasten/dropbox.py +++ b/application/briefkasten/dropbox.py @@ -309,7 +309,7 @@ class Dropbox(object): """ # TODO: here would be the place to move the backup of the dirty attachments, depending # on the current state of the dropbox - self.status = u'700 creating final encrypted backup of cleansed attachments' + self.status = u'270 creating final encrypted backup of cleansed attachments' return self._create_encrypted_zip(source='clean', fs_target_dir=self.container.fs_archive_cleansed) def _notify_editors(self):
lower status - otherwise it will be interpreted as error
ZeitOnline_briefkasten
train
66be62f16641143f1d19435932baa85a72f8f01b
diff --git a/src/foundations/parser.py b/src/foundations/parser.py index <HASH>..<HASH> 100644 --- a/src/foundations/parser.py +++ b/src/foundations/parser.py @@ -404,7 +404,7 @@ class Parser(io.File): self._comments[section + self._namespaceSplitter + self._commentMarker + str(commentId)] = {"id" : commentId, "content" : line.strip().strip(self._commentLimiter)} commentId += 1 elif self._splitter in line: - lineTokens = line.split(self._splitter) + lineTokens = line.split(self._splitter, 1) attributes[section + self._namespaceSplitter + lineTokens[0].strip()] = lineTokens[1].strip().strip("\"") self._sections[section] = attributes
Ensure that "parser" module "parse" method slice each attribute only one time.
KelSolaar_Foundations
train
cdc4c6de258899bd481a999989eb7f16ddedce43
diff --git a/tests/test_sftp.py b/tests/test_sftp.py index <HASH>..<HASH> 100755 --- a/tests/test_sftp.py +++ b/tests/test_sftp.py @@ -610,7 +610,7 @@ class SFTPTest (unittest.TestCase): with sftp.open(FOLDER + '/bunny.txt', 'rb') as f: self.assertEqual(text, f.read(128)) - self.assertEqual((41, 41), saved_progress[-1]) + self.assertEqual([(41, 41)], saved_progress) os.unlink(localname) fd, localname = mkstemp() @@ -620,7 +620,7 @@ class SFTPTest (unittest.TestCase): with open(localname, 'rb') as f: self.assertEqual(text, f.read(128)) - self.assertEqual((41, 41), saved_progress[-1]) + self.assertEqual([(41, 41)], saved_progress) os.unlink(localname) sftp.unlink(FOLDER + '/bunny.txt')
Update existing test to prove #<I>
paramiko_paramiko
train
12f5ad2b36af9dd8b881b0bb17a8daf8eeb31796
diff --git a/pystache/template.py b/pystache/template.py index <HASH>..<HASH> 100644 --- a/pystache/template.py +++ b/pystache/template.py @@ -3,6 +3,10 @@ import cgi import inspect import types + +END_OF_LINE_CHARACTERS = ['\r', '\n'] + + def call(val, view, template=None): if callable(val): (args, _, _, _) = inspect.getargspec(val) @@ -162,18 +166,18 @@ class Template(object): elif captures['raw'] is not None: captures.update(tag='{', name=captures['raw_name']) - # Save the literal text content. parse_tree.append(captures['content']) - end_index = match.end() + match_index = match.end('content') + end_index = match.end() # Standalone (non-interpolation) tags consume the entire line, # both leading whitespace and trailing newline. - tagBeganLine = not match_index or template[match_index - 1] in ['\r', '\n'] - tagEndedLine = (end_index == len(template) or template[end_index] in ['\r', '\n']) - interpolationTag = captures['tag'] in ['', '&', '{'] + did_tag_begin_line = match_index == 0 or template[match_index - 1] in END_OF_LINE_CHARACTERS + did_tag_end_line = end_index == len(template) or template[end_index] in END_OF_LINE_CHARACTERS + is_tag_interpolating = captures['tag'] in ['', '&', '{'] - if (tagBeganLine and tagEndedLine and not interpolationTag): + if did_tag_begin_line and did_tag_end_line and not is_tag_interpolating: if end_index < len(template): end_index += template[end_index] == '\r' and 1 or 0 if end_index < len(template):
Renamed some variables in _handle_match().
defunkt_pystache
train
4efbf8fc3f458fc3e0d84db938afb415661089f0
diff --git a/lib/jpmobile/mobile/abstract_mobile.rb b/lib/jpmobile/mobile/abstract_mobile.rb index <HASH>..<HASH> 100644 --- a/lib/jpmobile/mobile/abstract_mobile.rb +++ b/lib/jpmobile/mobile/abstract_mobile.rb @@ -58,6 +58,16 @@ module Jpmobile::Mobile false end + # Jpmobile::Rack::Filter を適用するかどうか + def apply_filter? + true + end + + # Jpmobile::Rack::ParamsFilter を適用するかどうか + def apply_params_filter? + true + end + # エンコーディング変換用 def to_internal(str) str diff --git a/lib/jpmobile/mobile/iphone.rb b/lib/jpmobile/mobile/iphone.rb index <HASH>..<HASH> 100644 --- a/lib/jpmobile/mobile/iphone.rb +++ b/lib/jpmobile/mobile/iphone.rb @@ -7,6 +7,16 @@ module Jpmobile::Mobile # 対応するUser-Agentの正規表現 USER_AGENT_REGEXP = /iPhone/ + # Jpmobile::Rack::Filter を適用する + def apply_filter? + true + end + + # Jpmobile::Rack::ParamsFilter を適用する + def apply_params_filter? + true + end + # 文字コード変換 def to_internal(str) # 絵文字を数値参照に変換 diff --git a/lib/jpmobile/mobile/smart_phone.rb b/lib/jpmobile/mobile/smart_phone.rb index <HASH>..<HASH> 100644 --- a/lib/jpmobile/mobile/smart_phone.rb +++ b/lib/jpmobile/mobile/smart_phone.rb @@ -17,5 +17,15 @@ module Jpmobile::Mobile def smart_phone? true end + + # Jpmobile::Rack::Filter は適用しない + def apply_filter? + false + end + + # Jpmobile::Rack::ParamsFilter は適用しない + def apply_params_filter? + false + end end end diff --git a/lib/jpmobile/rack/filter.rb b/lib/jpmobile/rack/filter.rb index <HASH>..<HASH> 100644 --- a/lib/jpmobile/rack/filter.rb +++ b/lib/jpmobile/rack/filter.rb @@ -15,7 +15,7 @@ module Jpmobile status, env, response = @app.call(env) - if mobile and env['Content-Type'] =~ %r!text/html|application/xhtml\+xml! + if mobile and mobile.apply_filter? and env['Content-Type'] =~ %r!text/html|application/xhtml\+xml! type, charset = env['Content-Type'].split(/;\s*charset=/) body = response_to_body(response) diff --git a/lib/jpmobile/rack/params_filter.rb b/lib/jpmobile/rack/params_filter.rb index <HASH>..<HASH> 100644 --- a/lib/jpmobile/rack/params_filter.rb +++ b/lib/jpmobile/rack/params_filter.rb @@ -9,7 +9,7 @@ module Jpmobile def call(env) # 入力 - if @mobile = env['rack.jpmobile'] + if @mobile = env['rack.jpmobile'] and @mobile.apply_params_filter? # パラメータをkey, valueに分解 # form_params if env['REQUEST_METHOD'] == 'POST'
Not applying Filter/ParamsFilter for smart-phones
jpmobile_jpmobile
train
bbcf32239d0db46628385b75c3aa88061fcc4bff
diff --git a/base.php b/base.php index <HASH>..<HASH> 100644 --- a/base.php +++ b/base.php @@ -1642,6 +1642,7 @@ class Base extends Prefab { $scheme=isset($_SERVER['HTTPS']) && $_SERVER['HTTPS']=='on' || isset($headers['X-Forwarded-Proto']) && $headers['X-Forwarded-Proto']=='https'?'https':'http'; + $port_suffix = isset($_SERVER["SERVER_PORT"]) && $_SERVER["SERVER_PORT"] != ($scheme == "https" ? "443" : "80") ? ":{$_SERVER["SERVER_PORT"]}" : ""; if (function_exists('apache_setenv')) { // Work around Apache pre-2.4 VirtualDocumentRoot bug $_SERVER['DOCUMENT_ROOT']=str_replace($_SERVER['SCRIPT_NAME'],'', @@ -1721,7 +1722,7 @@ class Base extends Prefab { 'QUIET'=>FALSE, 'RAW'=>FALSE, 'REALM'=>$scheme.'://'. - $_SERVER['SERVER_NAME'].$_SERVER['REQUEST_URI'], + $_SERVER['SERVER_NAME'].$port_suffix.$_SERVER['REQUEST_URI'], 'RESPONSE'=>'', 'ROOT'=>$_SERVER['DOCUMENT_ROOT'], 'ROUTES'=>array(),
Add port suffix to REALM variable
bcosca_fatfree-core
train
b977e305d28c385cc208425edc5af26055534f83
diff --git a/context.go b/context.go index <HASH>..<HASH> 100644 --- a/context.go +++ b/context.go @@ -114,13 +114,13 @@ func Run(ctx context.Context, actions ...Action) error { if c == nil || c.Allocator == nil { return ErrInvalidContext } - c.first = c.Browser == nil - if c.first { + if c.Browser == nil { browser, err := c.Allocator.Allocate(ctx) if err != nil { return err } c.Browser = browser + c.first = true } if c.Target == nil { if err := c.newSession(ctx); err != nil { diff --git a/context_test.go b/context_test.go index <HASH>..<HASH> 100644 --- a/context_test.go +++ b/context_test.go @@ -41,4 +41,10 @@ func TestTargets(t *testing.T) { // Cancelling the second context should close the second tab alone. cancel2() wantTargets(ctx1, 1) + + // We used to have a bug where Run would reset the first context as if + // it weren't the first, breaking its cancellation. + if err := Run(ctx1); err != nil { + t.Fatal(err) + } }
fix regression when using Run twice on the first ctx We don't want to always set c.first, as that can change the field from true to false.
chromedp_chromedp
train
d8e1aa9e285f0e032112618d655930dedf46d3da
diff --git a/protocol/src/main/java/net/kuujo/copycat/raft/Member.java b/protocol/src/main/java/net/kuujo/copycat/raft/Member.java index <HASH>..<HASH> 100644 --- a/protocol/src/main/java/net/kuujo/copycat/raft/Member.java +++ b/protocol/src/main/java/net/kuujo/copycat/raft/Member.java @@ -133,6 +133,12 @@ public class Member implements AlleycatSerializable { } @Override + protected void reset() { + super.reset(); + this.member = new Member(); + } + + @Override protected void reset(Member member) { this.member = member; } @@ -172,6 +178,7 @@ public class Member implements AlleycatSerializable { @Override public Member build() { + close(); return member; } } diff --git a/protocol/src/main/java/net/kuujo/copycat/raft/Members.java b/protocol/src/main/java/net/kuujo/copycat/raft/Members.java index <HASH>..<HASH> 100644 --- a/protocol/src/main/java/net/kuujo/copycat/raft/Members.java +++ b/protocol/src/main/java/net/kuujo/copycat/raft/Members.java @@ -103,8 +103,8 @@ public class Members implements AlleycatSerializable { @Override protected void reset() { - members.members.clear(); - members.list.clear(); + super.reset(); + this.members = new Members(); } @Override
Ensure members are properly reset when builders are acquired from builder pool.
atomix_atomix
train
f8c0d02591acd75d480c4f82e0dede03f3187131
diff --git a/rapidoid-io/src/main/java/org/rapidoid/io/Res.java b/rapidoid-io/src/main/java/org/rapidoid/io/Res.java index <HASH>..<HASH> 100644 --- a/rapidoid-io/src/main/java/org/rapidoid/io/Res.java +++ b/rapidoid-io/src/main/java/org/rapidoid/io/Res.java @@ -3,6 +3,7 @@ package org.rapidoid.io; import java.io.File; import java.io.Reader; import java.io.StringReader; +import java.util.Arrays; import java.util.List; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -83,6 +84,8 @@ public class Res { protected void loadResource() { // micro-caching the file content, expires after 1 second if (U.time() - lastUpdatedOn >= 1000) { + byte[] oldBytes = bytes; + load(name); if (bytes == null) { @@ -90,13 +93,16 @@ public class Res { load(IO.getDefaultFilename(name)); } - notifyChangeListeners(); + if (!U.eq(oldBytes, bytes) && (oldBytes == null || bytes == null || !Arrays.equals(oldBytes, bytes))) { + invalidate(); + } } } protected void invalidate() { content = null; lastUpdatedOn = U.time(); + notifyChangeListeners(); } protected void load(String filename) { @@ -108,13 +114,11 @@ public class Res { Log.debug("Reloading file", "name", filename); this.lastModified = file.lastModified(); this.bytes = IO.loadBytes(filename); - invalidate(); } } else { // it might not exist or it might be on the classpath or compressed in a JAR Log.debug("Reloading classpath resource", "name", filename); this.bytes = IO.loadBytes(filename); - invalidate(); } } @@ -154,7 +158,7 @@ public class Res { private void notifyChangeListeners() { if (!changeListeners.isEmpty()) { - Log.debug("Resource might have changed", "name", name); + Log.info("Resource has changed, reloading...", "name", name); } for (Runnable listener : changeListeners) {
Fixed the resource change detection to avoid false positives.
rapidoid_rapidoid
train
9b79aab38717e16151c679168fa230e124e6dbd8
diff --git a/spec/easemob/users_spec.rb b/spec/easemob/users_spec.rb index <HASH>..<HASH> 100644 --- a/spec/easemob/users_spec.rb +++ b/spec/easemob/users_spec.rb @@ -164,10 +164,10 @@ RSpec.describe Easemob::Users do describe '#deactivate_user' do it 'Deactivate a user' do - res = Easemob.deactivate_user('u9') + res = Easemob.deactivate_user('u8') expect(res.code).to eq 200 h1 = JSON.parse res.to_s - expect(h1['entities'][0]['username']).to eq 'u9' + expect(h1['entities'][0]['username']).to eq 'u8' expect(h1['entities'][0]['activated']).to be false end end
u9 is already delete at another test case.
bayetech_easemob
train
8d1ffdd4b2844e9d6a83132a9ee04e7f71c57331
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,9 @@ This release adds the [endpoints-support.md](endpoints-support.md) file to the r Also adds the [TESTING.md](TESTING.md) file to the repository, in order to guide the test execution and implementation for this SDK. +#### Bug fixes & Enhancements +- [#285](https://github.com/HewlettPackard/oneview-sdk-ruby/issues/285) Add helper method to set a StorageSystem to a StoragePool of API500 + ## v5.1.1 #### Bug fixes & Enhancements diff --git a/lib/oneview-sdk/resource/api500/c7000/storage_pool.rb b/lib/oneview-sdk/resource/api500/c7000/storage_pool.rb index <HASH>..<HASH> 100644 --- a/lib/oneview-sdk/resource/api500/c7000/storage_pool.rb +++ b/lib/oneview-sdk/resource/api500/c7000/storage_pool.rb @@ -96,6 +96,13 @@ module OneviewSDK update refresh end + + # Sets the storage system + # @param [OneviewSDK::StorageSystem] storage_system + def set_storage_system(storage_system) + raise IncompleteResource, 'Please set the storage system\'s uri attribute!' unless storage_system['uri'] + set('storageSystemUri', storage_system['uri']) + end end end end diff --git a/spec/unit/resource/api500/c7000/storage_pool_spec.rb b/spec/unit/resource/api500/c7000/storage_pool_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/resource/api500/c7000/storage_pool_spec.rb +++ b/spec/unit/resource/api500/c7000/storage_pool_spec.rb @@ -126,4 +126,18 @@ RSpec.describe OneviewSDK::API500::C7000::StoragePool do target.exists? end end + + describe '#set_storage_system' do + it 'should set the storageSystemUri' do + storage_system = OneviewSDK::API500::C7000::StorageSystem.new(@client_500, uri: '/storage-sytems/1') + expect(target['storageSystemUri']).to eq(nil) + target.set_storage_system(storage_system) + expect(target['storageSystemUri']).to eq('/storage-sytems/1') + end + + it 'should throw error when StorageSystem URI is not present' do + storage_system = OneviewSDK::API500::C7000::StorageSystem.new(@client_500) + expect { target.set_storage_system(storage_system) }.to raise_error(OneviewSDK::IncompleteResource, /storage system\'s uri/) + end + end end
Added helper method to add StorageSystem to StoragePool of API<I>
HewlettPackard_oneview-sdk-ruby
train
dbaa18beee35db3fe66adad6af0a9a4a5a69ad27
diff --git a/chevron/renderer.py b/chevron/renderer.py index <HASH>..<HASH> 100644 --- a/chevron/renderer.py +++ b/chevron/renderer.py @@ -24,7 +24,7 @@ if sys.version_info[0] == 3: else: # python 2 python3 = False unicode_type = unicode - string_type = basestring + string_type = basestring # noqa: F821 (This is defined in python2) #
Fix flake8 not realizing this line is python2
noahmorrison_chevron
train
459be90492d8ba48d9bd04841ac791e4928c0635
diff --git a/conn.go b/conn.go index <HASH>..<HASH> 100644 --- a/conn.go +++ b/conn.go @@ -168,7 +168,7 @@ func (cn *conn) simpleQuery(q string) (res driver.Result, err error) { func (cn *conn) prepareTo(q, stmtName string) (_ driver.Stmt, err error) { defer errRecover(&err) - st := &stmt{cn: cn, name: stmtName} + st := &stmt{cn: cn, name: stmtName, query: q} b := newWriteBuf('P') b.string(st.name) @@ -378,6 +378,7 @@ func (cn *conn) auth(r *readBuf, o Values) { type stmt struct { cn *conn name string + query string cols []string nparams int ooid []int @@ -420,6 +421,10 @@ func (st *stmt) Query(v []driver.Value) (_ driver.Rows, err error) { func (st *stmt) Exec(v []driver.Value) (res driver.Result, err error) { defer errRecover(&err) + + if len(v) == 0 { + return st.cn.simpleQuery(st.query) + } st.exec(v) for {
Use simpleQuery for prepared Exec()s, too.
bmizerany_pq
train
478f409ccab1b7bfb73653aed90756ad0ee5cd44
diff --git a/pkg/k8s/watchers/pod.go b/pkg/k8s/watchers/pod.go index <HASH>..<HASH> 100644 --- a/pkg/k8s/watchers/pod.go +++ b/pkg/k8s/watchers/pod.go @@ -79,7 +79,12 @@ func (k *K8sWatcher) createPodController(getter cache.Getter, fieldSelector fiel // handling. if ep := k.endpointManager.LookupPodName(podNSName); ep != nil { epCreatedAt := ep.GetCreatedAt() - metrics.EventLagK8s.Set(time.Since(epCreatedAt).Seconds()) + timeSinceEpCreated := time.Since(epCreatedAt) + if timeSinceEpCreated <= 0 { + metrics.EventLagK8s.Set(0) + } else { + metrics.EventLagK8s.Set(timeSinceEpCreated.Round(time.Second).Seconds()) + } } err := k.addK8sPodV1(pod) k.K8sEventProcessed(metricPod, metricCreate, err == nil)
pkg/k8s: fix k8s_event_lag_seconds for negative time In some occasions the metric `k8s_event_lag_seconds` could be presented as an overflown value such as `<I>`. This commit fixes this by checking if the calculated value is less than zero by only setting this metric for positive times. Fixes: 4e<I> ("pkg/endpoint: calculate Kube API-Server lag from pod events")
cilium_cilium
train
683f1b7ebd0a37c4b80f69475648f5a4c849dd25
diff --git a/icrawler/builtin/google.py b/icrawler/builtin/google.py index <HASH>..<HASH> 100644 --- a/icrawler/builtin/google.py +++ b/icrawler/builtin/google.py @@ -146,8 +146,8 @@ class GoogleParser(Parser): response.content.decode('utf-8', 'ignore'), 'lxml') image_divs = soup.find_all('script') for div in image_divs: - txt = div.text - if not txt.startswith('AF_initDataCallback'): + txt = div.string + if txt is None or not txt.startswith('AF_initDataCallback'): continue if 'ds:1' not in txt: continue
Fix GoogleParser - BeautifulSoup4 <I> Attribute `.text` is not supported for `script` tags anymore starting with beautifulsoup4 <I>
hellock_icrawler
train
89c8b769fec9446acf75c48a016f7725fa52670c
diff --git a/test/tm2z.js b/test/tm2z.js index <HASH>..<HASH> 100644 --- a/test/tm2z.js +++ b/test/tm2z.js @@ -57,10 +57,14 @@ Vector.mapnik.register_fonts(path.join(__dirname, 'fonts', 'source-sans-pro')); }); }); -test('tm2z+http Z_DATA_ERROR', function(assert) { +test('tm2z+http ENOTFOUND or Z_DATA_ERROR', function(assert) { tilelive.load('tm2z+http://not-a-valid-domain/patternstyle.tm2z', function(err, source) { assert.ok(err, 'has error'); - assert.equal(err.code, 'Z_DATA_ERROR', 'code: Z_DATA_ERROR'); + if (err.code && err.code === 'Z_DATA_ERROR') { + assert.equal(err.code, 'Z_DATA_ERROR', 'code: Z_DATA_ERROR'); + } else { + assert.equal(err.code, 'ENOTFOUND', 'code: ENOTFOUND'); + } assert.end(); }); });
Fix so that error message works for both systems
mapbox_tilelive-vector
train
fdd13a8e1464f0f4adea251e18886bab4c2d9bfb
diff --git a/SingularityService/src/main/java/com/hubspot/singularity/config/SingularityConfiguration.java b/SingularityService/src/main/java/com/hubspot/singularity/config/SingularityConfiguration.java index <HASH>..<HASH> 100644 --- a/SingularityService/src/main/java/com/hubspot/singularity/config/SingularityConfiguration.java +++ b/SingularityService/src/main/java/com/hubspot/singularity/config/SingularityConfiguration.java @@ -322,7 +322,7 @@ public class SingularityConfiguration extends Configuration { private int maxDecommissioningSlaves = 2; - private long delayPollersWhenDeltaOverMs = 30000; + private long delayPollersWhenDeltaOverMs = 15000; private boolean delayOfferProcessingForLargeStatusUpdateDelta = true;
set delay at <I>s
HubSpot_Singularity
train
b8d2741c72ba645c8a24b2b05f56c991c6b47fdf
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/event_based.py +++ b/openquake/calculators/event_based.py @@ -256,6 +256,7 @@ class EventBasedCalculator(base.HazardCalculator): self.src_filter, self.csm = self.filter_csm() rlzs_assoc = self.csm.info.get_rlzs_assoc() samples_by_grp = self.csm.info.get_samples_by_grp() + self.rupser = calc.RuptureSerializer(self.datastore) for src in self.csm.get_sources(): if oq.save_ruptures and not oq.ground_motion_fields: self.gmf_size += max_gmf_size( @@ -274,6 +275,7 @@ class EventBasedCalculator(base.HazardCalculator): # save the events always and the ruptures if oq.save_ruptures if hasattr(src, 'eb_ruptures'): self.save_ruptures(src.eb_ruptures) + self.rupser.close() with self.monitor('store source_info', autoflush=True): acc = mock.Mock(eff_ruptures={ grp.id: sum(src.num_ruptures for src in grp) @@ -401,11 +403,7 @@ class EventBasedCalculator(base.HazardCalculator): ds.set_nbytes('gmf_data') def init(self): - """ - Set the random seed passed to the SourceManager and the - minimum_intensity dictionary. - """ - self.rupser = calc.RuptureSerializer(self.datastore) + pass def post_execute(self, result): """ @@ -415,7 +413,6 @@ class EventBasedCalculator(base.HazardCalculator): N = len(self.sitecol.complete) L = len(oq.imtls.array) if oq.hazard_calculation_id is None: - self.rupser.close() num_events = sum(set_counts(self.datastore, 'events').values()) if num_events == 0: raise RuntimeError(
Moved .rupser
gem_oq-engine
train
3313954724037d9aae02ece4913a6fa623743433
diff --git a/mycluster/lsf.py b/mycluster/lsf.py index <HASH>..<HASH> 100644 --- a/mycluster/lsf.py +++ b/mycluster/lsf.py @@ -154,6 +154,11 @@ def create_submit(queue_id,**kwargs): if 'no_syscribe' in kwargs: record_job = "" + if 'openmpi_args' not in kwargs: + openmpi_args = "-bysocket -bind-to-socket" + else: + openmpi_args = kwargs['openmpi_args'] + script=Template(r"""#!/bin/bash # # LSF job submission script generated by MyCluster @@ -196,7 +201,7 @@ export OMP_PROC_BIND=true export OMP_PLACES=sockets # OpenMPI -export OMPI_CMD="mpiexec -n $$NUM_TASKS -npernode $$TASKS_PER_NODE -bysocket -bind-to-socket" +export OMPI_CMD="mpiexec -n $$NUM_TASKS -npernode $$TASKS_PER_NODE $openmpi_args" # MVAPICH2 export MV2_CPU_BINDING_LEVEL=SOCKET @@ -259,6 +264,7 @@ echo -e "Complete========\n" 'num_nodes':num_nodes, 'project_name': project_name, 'wall_clock' : wall_clock, + 'openmpi_args': openmpi_args, }) return script_str diff --git a/mycluster/sge.py b/mycluster/sge.py index <HASH>..<HASH> 100644 --- a/mycluster/sge.py +++ b/mycluster/sge.py @@ -277,6 +277,11 @@ def create_submit(queue_id,**kwargs): if 'no_syscribe' in kwargs: record_job = "" + if 'openmpi_args' not in kwargs: + openmpi_args = "-bysocket -bind-to-socket" + else: + openmpi_args = kwargs['openmpi_args'] + script=Template(r"""#!/bin/bash # # SGE job submission script generated by MyCluster @@ -315,7 +320,7 @@ export OMP_PROC_BIND=true export OMP_PLACES=sockets # OpenMPI -export OMPI_CMD="mpiexec -n $$NUM_TASKS -npernode $$TASKS_PER_NODE -bysocket -bind-to-socket" +export OMPI_CMD="mpiexec -n $$NUM_TASKS -npernode $$TASKS_PER_NODE $openmpi_args" # MVAPICH2 export MV2_CPU_BINDING_LEVEL=SOCKET @@ -378,6 +383,7 @@ echo -e "Complete========\n" 'num_nodes':num_nodes, 'project_name': project_name, 'wall_clock' : wall_clock, + 'openmpi_args': openmpi_args, }) return script_str diff --git a/mycluster/slurm.py b/mycluster/slurm.py index <HASH>..<HASH> 100644 --- a/mycluster/slurm.py +++ b/mycluster/slurm.py @@ -148,6 +148,11 @@ def create_submit(queue_id,**kwargs): if 'no_syscribe' in kwargs: record_job = "" + if 'openmpi_args' not in kwargs: + openmpi_args = "-bysocket -bind-to-socket" + else: + openmpi_args = kwargs['openmpi_args'] + script=Template(r"""#!/bin/bash # # SLURM job submission script generated by MyCluster @@ -190,7 +195,7 @@ export OMP_PROC_BIND=true export OMP_PLACES=sockets # OpenMPI -export OMPI_CMD="mpiexec -n $$NUM_TASKS -npernode $$TASKS_PER_NODE -bysocket -bind-to-socket" +export OMPI_CMD="mpiexec -n $$NUM_TASKS -npernode $$TASKS_PER_NODE $openmpi_args" # MVAPICH2 export MV2_CPU_BINDING_LEVEL=SOCKET @@ -254,6 +259,7 @@ echo -e "Complete========\n" 'project_name': project_name, 'wall_clock' : wall_clock, 'record_job' : record_job, + 'openmpi_args': openmpi_args, }) return script_str @@ -424,8 +430,7 @@ def job_stats_enhanced(job_id): except: print('SLURM: Error getting start time') return stats_dict - - return stats_dict +if def is_in_queue(job_id):
allow openmpi arguments to be supplied as a kwarg to create submit script
zenotech_MyCluster
train
44dc4f6645e12ddd5cf927bca0675a5b44d55cbd
diff --git a/activerecord/CHANGELOG.md b/activerecord/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/activerecord/CHANGELOG.md +++ b/activerecord/CHANGELOG.md @@ -1,3 +1,8 @@ +* Fix `write_attribute` method to check whether an attribute is aliased or not, and + use the aliased attribute name if needed. + + *Prathamesh Sonpatki* + * Fix `read_attribute` method to check whether an attribute is aliased or not, and use the aliased attribute name if needed. @@ -65,7 +70,7 @@ *Jon Moss* -* Add `stat` method to `ActiveRecord::ConnectionAdapters::ConnectionPool`. +* Added `stat` method to `ActiveRecord::ConnectionAdapters::ConnectionPool`. Example: diff --git a/activerecord/lib/active_record/attribute_methods/write.rb b/activerecord/lib/active_record/attribute_methods/write.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/attribute_methods/write.rb +++ b/activerecord/lib/active_record/attribute_methods/write.rb @@ -29,7 +29,13 @@ module ActiveRecord # specified +value+. Empty strings for Integer and Float columns are # turned into +nil+. def write_attribute(attr_name, value) - write_attribute_with_type_cast(attr_name, value, true) + name = if self.class.attribute_alias?(attr_name) + self.class.attribute_alias(attr_name).to_s + else + attr_name.to_s + end + + write_attribute_with_type_cast(name, value, true) end def raw_write_attribute(attr_name, value) # :nodoc: diff --git a/activerecord/test/cases/attribute_methods_test.rb b/activerecord/test/cases/attribute_methods_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/attribute_methods_test.rb +++ b/activerecord/test/cases/attribute_methods_test.rb @@ -319,6 +319,13 @@ class AttributeMethodsTest < ActiveRecord::TestCase assert_equal "Still another topic: part 4", topic.title end + test "write_attribute can write aliased attributes as well" do + topic = Topic.new(title: "Don't change the topic") + topic.write_attribute :heading, "New topic" + + assert_equal "New topic", topic.title + end + test "read_attribute" do topic = Topic.new topic.title = "Don't change the topic"
Check whether the current attribute being write is aliased or not before writing - If aliased, then use the aliased attribute name.
rails_rails
train
df4c45dd332921b96fb3fa021cdab4dd06f75d96
diff --git a/src/js/pannellum.js b/src/js/pannellum.js index <HASH>..<HASH> 100644 --- a/src/js/pannellum.js +++ b/src/js/pannellum.js @@ -1678,10 +1678,10 @@ function renderInitCallback() { preview = undefined; } loaded = true; - - fireEvent('load'); animateInit(); + + fireEvent('load'); } /**
Move `load` event to fire after rendering instead of before (#<I>).
mpetroff_pannellum
train
03cb51c0aae34ae926d0219c4b7be2d9a5bd2fa2
diff --git a/lib/mongoid/extensions/symbol/inflections.rb b/lib/mongoid/extensions/symbol/inflections.rb index <HASH>..<HASH> 100644 --- a/lib/mongoid/extensions/symbol/inflections.rb +++ b/lib/mongoid/extensions/symbol/inflections.rb @@ -23,7 +23,8 @@ module Mongoid #:nodoc: to_s.plural? end - ["gt", "lt", "gte", "lte", "ne", "near", "in", "nin", "mod", "all", "size", "exists", "within", ['match',"elemMatch"]].each do |oper| + [ "gt", "lt", "gte", "lte", "ne", "near", "in", "nin", "mod", "all", + "size", "exists", "within", ["matches","elemMatch"] ].each do |oper| m, oper = oper oper = m unless oper class_eval <<-OPERATORS diff --git a/spec/integration/mongoid/criteria_spec.rb b/spec/integration/mongoid/criteria_spec.rb index <HASH>..<HASH> 100644 --- a/spec/integration/mongoid/criteria_spec.rb +++ b/spec/integration/mongoid/criteria_spec.rb @@ -252,7 +252,7 @@ describe Mongoid::Criteria do context "#match" do it "returns those matching a partial element in a list" do - Person.criteria.where(:things.match => {:phone => 'HTC Incredible'}).should == [@person] + Person.where(:things.matches => { :phone => "HTC Incredible" }).should == [@person] end end
Symbol#match causes issues with ruby <I>.x - changed to #matches for elemMatch criteria
mongodb_mongoid
train
e06d2685e227eb55950b01b5840d9401ce7c10ee
diff --git a/molecule/command/idempotence.py b/molecule/command/idempotence.py index <HASH>..<HASH> 100644 --- a/molecule/command/idempotence.py +++ b/molecule/command/idempotence.py @@ -32,6 +32,9 @@ LOG = logger.get_logger(__name__) class Idempotence(base.Base): """ + Runs the converge step a second time. If no tasks will be marked as changed + the scenario will be considered idempotent. + Target the default scenario: $ molecule idempotence
Update documentation for verify and idempotency checks (#<I>) * Update documentation for verify and idempotency checks * Fixup formatting
ansible_molecule
train
4cb993caea9b42b4dd831062761e44198907b36c
diff --git a/data b/data index <HASH>..<HASH> 160000 --- a/data +++ b/data @@ -1 +1 @@ -Subproject commit 0fc83010033167d643766d8579f02024ad88ef87 +Subproject commit 630845c86c7d76341f8a2162c8a09f15134293fe diff --git a/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareBinaryFiducial.java b/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareBinaryFiducial.java index <HASH>..<HASH> 100644 --- a/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareBinaryFiducial.java +++ b/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareBinaryFiducial.java @@ -102,6 +102,6 @@ public class VisualizeSquareBinaryFiducial { VisualizeSquareBinaryFiducial app = new VisualizeSquareBinaryFiducial(); - app.process(directory+"/angled00_643_284.jpg",directory+"/intrinsic.xml"); + app.process(directory+"/image0000.jpg",directory+"/intrinsic.xml"); } } diff --git a/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareFiducial.java b/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareFiducial.java index <HASH>..<HASH> 100644 --- a/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareFiducial.java +++ b/evaluation/visualization/src/boofcv/alg/fiducial/VisualizeSquareFiducial.java @@ -131,7 +131,8 @@ public class VisualizeSquareFiducial { VisualizeSquareFiducial app = new VisualizeSquareFiducial(); - app.process(directory+"/angled00_643_284.jpg",directory+"/intrinsic.xml"); - app.process(directory+"/far00_643_284.jpg",directory+"/intrinsic.xml"); +// app.process(directory+"/image0000.jpg",directory+"/intrinsic.xml"); +// app.process(directory+"/image0001.jpg",directory+"/intrinsic.xml"); + app.process(directory+"/image0002.jpg",directory+"/intrinsic.xml"); } }
- Updated for new fiducial file names
lessthanoptimal_BoofCV
train
03c18570a608edf2fc0149e1029b2b0267cb5ffc
diff --git a/src/python/pants/option/parser.py b/src/python/pants/option/parser.py index <HASH>..<HASH> 100644 --- a/src/python/pants/option/parser.py +++ b/src/python/pants/option/parser.py @@ -32,6 +32,7 @@ from typing import ( import Levenshtein import yaml +from typing_extensions import Protocol from pants.base.build_environment import get_buildroot from pants.base.deprecated import validate_deprecation_semver, warn_or_error @@ -181,9 +182,15 @@ class Parser: @frozen_after_init @dataclass(unsafe_hash=True) class ParseArgsRequest: - flag_value_map: Dict + # N.B.: We use this callable protocol instead of Callable directly to work around the + # dataclass-specific issue described here: https://github.com/python/mypy/issues/6910 + class FlagNameProvider(Protocol): + def __call__(self) -> Iterable: + ... + + flag_value_map: Dict[str, List[Any]] namespace: OptionValueContainer - get_all_scoped_flag_names: Callable[["Parser.ParseArgsRequest"], Sequence] + get_all_scoped_flag_names: FlagNameProvider levenshtein_max_distance: int passthrough_args: List[str] # A passive option is one that doesn't affect functionality, or appear in help messages, but @@ -197,7 +204,7 @@ class Parser: self, flags_in_scope: Iterable[str], namespace: OptionValueContainer, - get_all_scoped_flag_names: Callable[[], Sequence], + get_all_scoped_flag_names: FlagNameProvider, levenshtein_max_distance: int, passthrough_args: List[str], include_passive_options: bool = False, @@ -215,7 +222,7 @@ class Parser: """ self.flag_value_map = self._create_flag_value_map(flags_in_scope) self.namespace = namespace - self.get_all_scoped_flag_names = get_all_scoped_flag_names # type: ignore[assignment] # cannot assign a method + self.get_all_scoped_flag_names = get_all_scoped_flag_names self.levenshtein_max_distance = levenshtein_max_distance self.passthrough_args = passthrough_args self.include_passive_options = include_passive_options @@ -348,7 +355,7 @@ class Parser: return namespace def _raise_error_for_invalid_flag_names( - self, flags: Sequence[str], all_scoped_flag_names: Sequence, max_edit_distance: int, + self, flags: Sequence[str], all_scoped_flag_names: Iterable, max_edit_distance: int, ) -> NoReturn: """Identify similar option names to unconsumed flags and raise a ParseError with those names."""
Demystify get_all_scoped_flag_names mypy kludges. (#<I>) These are now centralized in a Protocol that takes the place of the prior Callable of two different signatures (!) and a type ignore. The introduced Protocol is a bit klunky, but self contained and can carry the relevant issue pointer in one spot.
pantsbuild_pants
train
68e575e5025bdd0814c48c5860c7c886ccaefadd
diff --git a/lib/raven/integrations/rack.rb b/lib/raven/integrations/rack.rb index <HASH>..<HASH> 100644 --- a/lib/raven/integrations/rack.rb +++ b/lib/raven/integrations/rack.rb @@ -113,7 +113,7 @@ module Raven next unless key.start_with?('HTTP_') || %w(CONTENT_TYPE CONTENT_LENGTH).include?(key) # Rack stores headers as HTTP_WHAT_EVER, we need What-Ever - key = key.gsub("HTTP_", "") + key = key.sub(/^HTTP_/, "") key = key.split('_').map(&:capitalize).join('-') memo[key] = value rescue StandardError => e diff --git a/spec/raven/integrations/rack_spec.rb b/spec/raven/integrations/rack_spec.rb index <HASH>..<HASH> 100644 --- a/spec/raven/integrations/rack_spec.rb +++ b/spec/raven/integrations/rack_spec.rb @@ -103,6 +103,14 @@ RSpec.describe Raven::Rack do expect(interface.headers["Version"]).to eq("HTTP/2.0") end + it 'retains any literal "HTTP-" in the actual header name' do + interface = Raven::HttpInterface.new + new_env = env.merge("HTTP_HTTP_CUSTOM_HTTP_HEADER" => "test") + interface.from_rack(new_env) + + expect(interface.headers).to include("Http-Custom-Http-Header" => "test") + end + it 'does not fail if an object in the env cannot be cast to string' do obj = Class.new do def to_s
Retain any literal "HTTP-" in header names (#<I>) If a literal "HTTP-" exists in a request header name, it will appear as "HTTP_" in the Rack environment. This shouldn't be removed when extracting it from the environment.
getsentry_raven-ruby
train
2d7911c951f0bc67614e4351bcd6e11637e13844
diff --git a/sovrin_client/test/agent/helper.py b/sovrin_client/test/agent/helper.py index <HASH>..<HASH> 100644 --- a/sovrin_client/test/agent/helper.py +++ b/sovrin_client/test/agent/helper.py @@ -9,7 +9,6 @@ from plenum.test.test_stack import checkRemoteExists, CONNECTED from sovrin_client.client.wallet.wallet import Wallet from sovrin_common.config_util import getConfig - def connectAgents(agent1, agent2): e1 = agent1.endpoint e2 = agent2.endpoint
fix bulldog helper code to create log file in given basedir insead of assuming default location (mainly it will help while running tests)
hyperledger-archives_indy-client
train
a9c9ca9713c73ba7ed96cd2e2ca37106e5171b2f
diff --git a/lib/state_machine/transition.rb b/lib/state_machine/transition.rb index <HASH>..<HASH> 100644 --- a/lib/state_machine/transition.rb +++ b/lib/state_machine/transition.rb @@ -159,6 +159,18 @@ module StateMachine machine.action end + # Does this transition represent a loopback (i.e. the from and to state + # are the same) + # + # == Example + # + # machine = StateMachine.new(Vehicle) + # StateMachine::Transition.new(Vehicle.new, machine, :park, :parked, :parked).loopback? # => true + # StateMachine::Transition.new(Vehicle.new, machine, :park, :idling, :parked).loopback? # => false + def loopback? + from_name == to_name + end + # A hash of all the core attributes defined for this transition with their # names as keys and values of the attributes as values. # diff --git a/test/unit/transition_test.rb b/test/unit/transition_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/transition_test.rb +++ b/test/unit/transition_test.rb @@ -142,6 +142,40 @@ class TransitionWithDynamicToValueTest < Test::Unit::TestCase end end +class TransitionLoopbackTest < Test::Unit::TestCase + def setup + @klass = Class.new + @machine = StateMachine::Machine.new(@klass) + @machine.state :parked + @machine.event :park + + @object = @klass.new + @object.state = 'parked' + @transition = StateMachine::Transition.new(@object, @machine, :park, :parked, :parked) + end + + def test_should_be_loopback + assert @transition.loopback? + end +end + +class TransitionWithDifferentStatesTest < Test::Unit::TestCase + def setup + @klass = Class.new + @machine = StateMachine::Machine.new(@klass) + @machine.state :parked, :idling + @machine.event :ignite + + @object = @klass.new + @object.state = 'parked' + @transition = StateMachine::Transition.new(@object, @machine, :ignite, :parked, :idling) + end + + def test_should_not_be_loopback + assert !@transition.loopback? + end +end + class TransitionWithNamespaceTest < Test::Unit::TestCase def setup @klass = Class.new
Add Transition#loopback? to help determine if the from / to states are the same
pluginaweek_state_machine
train
d3fd02130b7f7cf7246ff1edc51265ca27ab7d8a
diff --git a/src/analytics.js b/src/analytics.js index <HASH>..<HASH> 100644 --- a/src/analytics.js +++ b/src/analytics.js @@ -3,14 +3,8 @@ const analytics = `(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[ m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); - var page = document.location.pathname; - if (page === '/') { - var page = window.__data.mobilizations.data[0].id; - ga('send', 'pageview', page); - } - ga('create', 'UA-26278513-30', 'auto'); - ga('send', 'pageview', page); + ga('send', 'pageview'); ` export const mobTracker = `ga('create', '{mobTrackingId}', 'auto', {'name': 'mobTracker'});`
[#<I>] remove failed implementation
nossas_bonde-client
train
e8169599ca685a972a09c8898e3358cc3b5d4a10
diff --git a/lib/cli.js b/lib/cli.js index <HASH>..<HASH> 100644 --- a/lib/cli.js +++ b/lib/cli.js @@ -72,8 +72,12 @@ var saveUserAsset = function(type, file) { }; var rmUserAsset = function(type, name) { - var assetPath = path.join(regDir, type, name + '.css'); - fs.unlinkSync(assetPath); + var assetPath = path.join(regDir, 'deck.js', 'themes', type, name + '.css'); + if(fs.existsSync(assetPath)) { + fs.unlinkSync(assetPath); + } else { + console.log('Remove What? Path "' + assetPath + '" does not exist.'); + } }; var optsFromProgramArgs = function(program) { @@ -129,11 +133,11 @@ module.exports = function(program) { var themeName = program.rmTheme , transName = program.rmTransition; - if(themeName !== true) { - rmUserAsset('theme', themeName); + if(themeName && themeName !== true) { + rmUserAsset('style', themeName); } - if(transName !== true) { + if(transName && transName !== true) { rmUserAsset('transition', transName); }
fix: Delete from correct user folder
jtrussell_bedecked
train
ce8236338c5006815de76e15d618bed806294d7e
diff --git a/nupic/simple_server.py b/nupic/simple_server.py index <HASH>..<HASH> 100755 --- a/nupic/simple_server.py +++ b/nupic/simple_server.py @@ -25,7 +25,15 @@ Note: Requires web.py to run (install using '$ pip install web.py') """ import os import sys -sys.path.remove(os.path.dirname(os.path.realpath(__file__))) +# The following loop removes the nupic.nupic package from the +# PythonPath (sys.path). This is necessary in order to let web +# import the built in math module rather than defaulting to +# nupic.math +while True: + try: + sys.path.remove(os.path.dirname(os.path.realpath(__file__))) + except: + break import datetime import json import web
Made sure that nupic.nupic is removed from the sys.path within simple_server
numenta_nupic
train
41fc5c014fd8ffda6d88778fb1749d364b86b8f1
diff --git a/ImagePanel.py b/ImagePanel.py index <HASH>..<HASH> 100644 --- a/ImagePanel.py +++ b/ImagePanel.py @@ -1350,12 +1350,9 @@ class ImagePanel(Panel.Panel): # this message comes from the document model. def data_item_deleted(self, deleted_data_item): data_item = self.get_displayed_data_item() - while data_item is not None: - # if our item gets deleted, clear the selection - if deleted_data_item == data_item: - self.__set_display(None) - break - data_item = data_item.data_source + # if our item gets deleted, clear the selection + if deleted_data_item == data_item: + self.__set_display(None) # this gets called when the user initiates a drag in the drag control to move the panel around def __begin_drag(self): diff --git a/model/DataItem.py b/model/DataItem.py index <HASH>..<HASH> 100644 --- a/model/DataItem.py +++ b/model/DataItem.py @@ -238,6 +238,7 @@ class DataItem(Observable.Observable, Observable.Broadcaster, Observable.Referen self.define_property(Observable.Property("flag", 0, validate=self.__validate_flag, changed=self.__metadata_property_changed)) self.define_property(Observable.Property("source_file_path", validate=self.__validate_source_file_path, changed=self.__property_changed)) self.define_property(Observable.Property("session_id", validate=self.__validate_session_id, changed=self.__session_id_changed)) + self.__data_sources = DataSourceList() self.define_property(Observable.Property("data_sources", DataSourceList(), make=DataSourceList)) self.define_relationship(Observable.Relationship("operations", Operation.operation_item_factory, insert=self.__insert_operation, remove=self.__remove_operation)) self.define_relationship(Observable.Relationship("displays", Display.display_factory, insert=self.__insert_display, remove=self.__remove_display)) @@ -802,15 +803,14 @@ class DataItem(Observable.Observable, Observable.Broadcaster, Observable.Referen def add_data_source(self, data_source): self.session_id = data_source.session_id data_sources = self.data_sources - assert len(data_sources.list) == 0 data_sources.list.append(str(data_source.uuid)) self.data_sources = data_sources # remove a reference to the given data source def remove_data_source(self, data_source): data_sources = self.data_sources - assert len(data_sources.list) == 1 and data_sources.list[0] == data_source - del data_sources.list[0] + assert str(data_source.uuid) in data_sources.list + data_sources.list.remove(str(data_source.uuid)) self.data_sources = data_sources self.session_id = None diff --git a/model/DocumentModel.py b/model/DocumentModel.py index <HASH>..<HASH> 100644 --- a/model/DocumentModel.py +++ b/model/DocumentModel.py @@ -309,13 +309,17 @@ class DocumentModel(Storage.StorageBase): data_item.connect_data_source(self.get_data_item_by_uuid) def remove_data_item(self, data_item): + # remove the data item from any groups for data_group in self.get_flat_data_group_generator(): if data_item in data_group.data_items: data_group.remove_data_item(data_item) + # remove data items that are entirely dependent on data item being removed for other_data_item in copy.copy(self.data_items): if other_data_item.data_source == data_item: self.remove_data_item(other_data_item) + # disconnect the data source data_item.disconnect_data_source() + # remove it from the vault self.__data_item_vault.remove(data_item) def __get_data_items(self): diff --git a/test/DataItem_test.py b/test/DataItem_test.py index <HASH>..<HASH> 100644 --- a/test/DataItem_test.py +++ b/test/DataItem_test.py @@ -604,6 +604,33 @@ class TestDataItemClass(unittest.TestCase): data_item_copy = data_item.snapshot() self.assertEqual(data_item_copy.get_metadata("test")["one"], 1) + def test_data_item_allows_adding_of_two_data_sources(self): + datastore = Storage.DictDatastore() + document_model = DocumentModel.DocumentModel(datastore) + with document_model.ref(): + data_item1 = DataItem.DataItem(numpy.zeros((256, 256), numpy.uint32)) + document_model.append_data_item(data_item1) + data_item2 = DataItem.DataItem(numpy.zeros((256, 256), numpy.uint32)) + document_model.append_data_item(data_item2) + data_item = DataItem.DataItem() + data_item.add_data_source(data_item1) + data_item.add_data_source(data_item2) + document_model.append_data_item(data_item) + + def test_data_item_allows_remove_second_of_two_data_sources(self): + datastore = Storage.DictDatastore() + document_model = DocumentModel.DocumentModel(datastore) + with document_model.ref(): + data_item1 = DataItem.DataItem(numpy.zeros((256, 256), numpy.uint32)) + document_model.append_data_item(data_item1) + data_item2 = DataItem.DataItem(numpy.zeros((256, 256), numpy.uint32)) + document_model.append_data_item(data_item2) + data_item = DataItem.DataItem() + data_item.add_data_source(data_item1) + data_item.add_data_source(data_item2) + document_model.append_data_item(data_item) + data_item.remove_data_source(data_item2) + if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG)
Begin process of generalizing data sources in data item. svn r<I>
nion-software_nionswift
train
87fe2e08481992a7bf3f75b61b5196af565bca76
diff --git a/lib/Doctrine/ODM/MongoDB/UnitOfWork.php b/lib/Doctrine/ODM/MongoDB/UnitOfWork.php index <HASH>..<HASH> 100644 --- a/lib/Doctrine/ODM/MongoDB/UnitOfWork.php +++ b/lib/Doctrine/ODM/MongoDB/UnitOfWork.php @@ -732,7 +732,7 @@ class UnitOfWork implements PropertyChangedListener } // if embed-many or reference-many relationship - if ($class->fieldMappings[$propName]['type'] === 'many') { + if (isset($class->fieldMappings[$propName]['type']) && $class->fieldMappings[$propName]['type'] === 'many') { $changeSet[$propName] = array($orgValue, $actualValue); if ($orgValue instanceof PersistentCollection) { $this->collectionDeletions[] = $orgValue; @@ -741,7 +741,7 @@ class UnitOfWork implements PropertyChangedListener } // skip equivalent date values - if ($class->fieldMappings[$propName]['type'] === 'date') { + if (isset($class->fieldMappings[$propName]['type']) && $class->fieldMappings[$propName]['type'] === 'date') { $dateType = Type::getType('date'); $dbOrgValue = $dateType->convertToDatabaseValue($orgValue); $dbActualValue = $dateType->convertToDatabaseValue($actualValue);
Protecting against notices Used the same technique as above to protect against notices for accessing undefined offset.
Briareos_mongodb-odm
train
65edf5cf1f737cffd6d3b24cf1032d5b265ab13e
diff --git a/jmock2/src/org/jmock/lib/nonstd/UnsafeHackConcreteClassImposteriser.java b/jmock2/src/org/jmock/lib/nonstd/UnsafeHackConcreteClassImposteriser.java index <HASH>..<HASH> 100644 --- a/jmock2/src/org/jmock/lib/nonstd/UnsafeHackConcreteClassImposteriser.java +++ b/jmock2/src/org/jmock/lib/nonstd/UnsafeHackConcreteClassImposteriser.java @@ -7,7 +7,9 @@ import java.lang.reflect.Method; import net.sf.cglib.core.DefaultNamingPolicy; import net.sf.cglib.core.NamingPolicy; import net.sf.cglib.core.Predicate; +import net.sf.cglib.proxy.Callback; import net.sf.cglib.proxy.Enhancer; +import net.sf.cglib.proxy.Factory; import net.sf.cglib.proxy.InvocationHandler; import org.jmock.api.Imposteriser; @@ -73,12 +75,12 @@ public class UnsafeHackConcreteClassImposteriser implements Imposteriser { private Object createProxy(Class<?> proxyClass, final Invokable mockObject) { try { - Object proxy = unsafe.allocateInstance(proxyClass); - Field callbackField = proxyClass.getDeclaredField("CGLIB$CALLBACK_0"); - callbackField.setAccessible(true); - callbackField.set(proxy, new InvocationHandler() { - public Object invoke(Object receiver, Method method, Object[] args) throws Throwable { - return mockObject.invoke(new Invocation(receiver, method, args)); + Factory proxy = (Factory)unsafe.allocateInstance(proxyClass); + proxy.setCallbacks(new Callback[] { + new InvocationHandler() { + public Object invoke(Object receiver, Method method, Object[] args) throws Throwable { + return mockObject.invoke(new Invocation(receiver, method, args)); + } } }); return proxy; @@ -89,12 +91,6 @@ public class UnsafeHackConcreteClassImposteriser implements Imposteriser { catch (SecurityException e) { throw new IllegalStateException("cannot access private callback field", e); } - catch (IllegalAccessException e) { - throw new IllegalStateException("cannot access private callback field", e); - } - catch (NoSuchFieldException e) { - throw new IllegalStateException("callback field does not exist", e); - } } private Class<?>[] prepend(Class<?> first, Class<?>... rest) {
Initialise proxy through published CGLIB API instead of a dodgy reflection hack
jmock-developers_jmock-library
train
7dfffe11b6bfd92752001bf8cd655fbe75723107
diff --git a/Vendor/Uploader.php b/Vendor/Uploader.php index <HASH>..<HASH> 100644 --- a/Vendor/Uploader.php +++ b/Vendor/Uploader.php @@ -80,6 +80,22 @@ class Uploader { const LOC_CENTER = 5; /** + * The mode to resize: width. + * + * @constant + * @var int + */ + const MODE_WIDTH = 1; + + /** + * The mode to resize: height. + * + * @constant + * @var int + */ + const MODE_HEIGHT = 2; + + /** * Max filesize using shorthand notation: http://php.net/manual/faq.using.php#faq.using.shorthandbytes * * @access public @@ -386,7 +402,15 @@ class Uploader { return false; } - $options = $options + array('location' => self::LOC_CENTER, 'quality' => 100, 'width' => null, 'height' => null, 'append' => null, 'prepend' => null); + $options = $options + array( + 'location' => self::LOC_CENTER, + 'quality' => 100, + 'width' => null, + 'height' => null, + 'append' => null, + 'prepend' => null + ); + $width = $this->_data[$this->_current]['width']; $height = $this->_data[$this->_current]['height']; $src_x = 0; @@ -902,7 +926,17 @@ class Uploader { return false; } - $options = $options + array('width' => null, 'height' => null, 'quality' => 100, 'append' => null, 'prepend' => null, 'expand' => false, 'aspect' => true); + $options = $options + array( + 'width' => null, + 'height' => null, + 'quality' => 100, + 'append' => null, + 'prepend' => null, + 'expand' => false, + 'aspect' => true, + 'mode' => self::MODE_WIDTH + ); + $baseWidth = $this->_data[$this->_current]['width']; $baseHeight = $this->_data[$this->_current]['height']; $width = $options['width']; @@ -925,11 +959,11 @@ class Uploader { $widthScale = $width / $baseWidth; $heightScale = $height / $baseHeight; - if ($widthScale < $heightScale) { + if (($options['mode'] == self::MODE_WIDTH && $widthScale < $heightScale) || ($options['mode'] == self::MODE_HEIGHT && $widthScale > $heightScale)) { $newWidth = $width; $newHeight = ($baseHeight * $newWidth) / $baseWidth; - } elseif ($widthScale > $heightScale) { + } else if (($options['mode'] == self::MODE_WIDTH && $widthScale > $heightScale) || ($options['mode'] == self::MODE_HEIGHT && $widthScale < $heightScale)) { $newHeight = $height; $newWidth = ($newHeight * $baseWidth) / $baseHeight; @@ -1095,7 +1129,18 @@ class Uploader { * @return boolean */ public function transform(array $options) { - $options = $options + array('dest_x' => 0, 'dest_y' => 0, 'source_x' => 0, 'source_y' => 0, 'dest_w' => null, 'dest_h' => null, 'source_w' => $this->_data[$this->_current]['width'], 'source_h' => $this->_data[$this->_current]['height'], 'quality' => 100); + $options = $options + array( + 'dest_x' => 0, + 'dest_y' => 0, + 'dest_w' => null, + 'dest_h' => null, + 'source_x' => 0, + 'source_y' => 0, + 'source_w' => $this->_data[$this->_current]['width'], + 'source_h' => $this->_data[$this->_current]['height'], + 'quality' => 100 + ); + $original = $this->_data[$this->_current]['path']; $mimeType = $this->_data[$this->_current]['type'];
Added mode orientation to resize()
milesj_uploader
train
fc8c0cfc4f431b5747a6a694c3d47da60385b051
diff --git a/.gitignore b/.gitignore index <HASH>..<HASH> 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ Gemfile.lock .rvmrc /.bundle/ /tags +binstubs/ diff --git a/Gemfile b/Gemfile index <HASH>..<HASH> 100644 --- a/Gemfile +++ b/Gemfile @@ -4,7 +4,7 @@ gem 'foodcritic', :path => '.' group :test do gem 'aruba', '~> 0.5' - gem 'cucumber', '~> 1.3' + gem 'cucumber', '>= 2' gem 'minitest', '~> 5.3' gem 'simplecov', '~> 0.8' end diff --git a/features/support/env.rb b/features/support/env.rb index <HASH>..<HASH> 100644 --- a/features/support/env.rb +++ b/features/support/env.rb @@ -10,7 +10,6 @@ end require 'aruba/cucumber' require 'foodcritic' -require 'minitest/autorun' require 'minitest/spec' MiniTest::Spec.new(nil) diff --git a/foodcritic.gemspec b/foodcritic.gemspec index <HASH>..<HASH> 100644 --- a/foodcritic.gemspec +++ b/foodcritic.gemspec @@ -11,7 +11,7 @@ Gem::Specification.new do |s| s.license = 'MIT' s.executables << 'foodcritic' s.required_ruby_version = ">= 2.0.0" - s.add_dependency('gherkin', '~> 2.11') + s.add_dependency('cucumber-core', '>= 1.3') s.add_dependency('nokogiri', '>= 1.5', '< 2.0') s.add_dependency('rake') s.add_dependency('treetop', '~> 1.4') diff --git a/lib/foodcritic.rb b/lib/foodcritic.rb index <HASH>..<HASH> 100644 --- a/lib/foodcritic.rb +++ b/lib/foodcritic.rb @@ -1,5 +1,5 @@ require 'pathname' -require 'gherkin' +require 'cucumber/core' require 'treetop' require 'ripper' require 'yajl' diff --git a/lib/foodcritic/domain.rb b/lib/foodcritic/domain.rb index <HASH>..<HASH> 100644 --- a/lib/foodcritic/domain.rb +++ b/lib/foodcritic/domain.rb @@ -1,4 +1,4 @@ -require 'gherkin/tag_expression' +require 'cucumber/core/gherkin/tag_expression' module FoodCritic # A warning of a possible issue @@ -95,9 +95,9 @@ module FoodCritic # Checks the rule tags to see if they match a Gherkin (Cucumber) expression def matches_tags?(tag_expr) - Gherkin::TagExpression.new(tag_expr).evaluate(tags.map do |t| - Gherkin::Formatter::Model::Tag.new(t, 1) - end) + Cucumber::Core::Gherkin::TagExpression.new(tag_expr).evaluate( + tags.map { |tag| Cucumber::Core::Ast::Tag.new(nil, tag) } + ) end # Returns a string representation of this rule. diff --git a/spec/foodcritic/api_spec.rb b/spec/foodcritic/api_spec.rb index <HASH>..<HASH> 100644 --- a/spec/foodcritic/api_spec.rb +++ b/spec/foodcritic/api_spec.rb @@ -138,7 +138,7 @@ describe FoodCritic::Api do end describe "#checks_for_chef_solo?" do - let(:ast) { ast = MiniTest::Mock.new } + let(:ast) { MiniTest::Mock.new } it "raises if the provided ast does not support XPath" do lambda{api.checks_for_chef_solo?(nil)}.must_raise(ArgumentError) end @@ -459,7 +459,7 @@ describe FoodCritic::Api do end describe "#literal_searches" do - let(:ast) { ast = MiniTest::Mock.new } + let(:ast) { MiniTest::Mock.new } it "returns empty if the AST does not support XPath expressions" do api.literal_searches(nil).must_be_empty end @@ -1642,7 +1642,7 @@ describe FoodCritic::Api do end describe "#searches" do - let(:ast) { ast = MiniTest::Mock.new } + let(:ast) { MiniTest::Mock.new } it "returns empty if the AST does not support XPath expressions" do api.searches('not-an-ast').must_be_empty end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -7,7 +7,6 @@ rescue LoadError warn 'warning: simplecov gem not found; skipping coverage' end -require 'minitest/autorun' require 'minitest/pride' require 'minitest/spec'
Use latest gherkin (faster install)
Foodcritic_foodcritic
train
cef6fe6e73ed6f657537c1b149c1b80b70257538
diff --git a/src/Router/Compiler.php b/src/Router/Compiler.php index <HASH>..<HASH> 100644 --- a/src/Router/Compiler.php +++ b/src/Router/Compiler.php @@ -163,21 +163,13 @@ class Compiler return new Response('Method Not Allowed', Response::HTTP_METHOD_NOT_ALLOWED, [ 'content-type' => 'text/plain', ]); - } elseif ( ! isset($routeInfo[1])) { - throw new RuntimeException( - 'Dispatcher generated a found response with no handler data!' - ); - } elseif ( ! is_array($routeInfo[1])) { - throw new RuntimeException( - 'Dispatcher generated a found response with invalid handler data!' - ); } elseif ( ! is_array($routeInfo[2])) { throw new RuntimeException( 'Dispatcher generated a found response with invalid variable data!' ); } - $middlewares = array_values($routeInfo[1]); + $middlewares = array_values((array) ($routeInfo[1] ?? [])); $route = array_pop($middlewares); if ( ! is_a($route, DaftRoute::class, true)) {
typecasting & coalescing rather than throwing exceptions, relying on null return by `array_pop()`
SignpostMarv_daft-router
train
3aeede3bf1c1563d3b37faf2ef986596bb47abac
diff --git a/core/codegen/src/main/java/org/overture/codegen/trans/conc/MainClassConcTransformation.java b/core/codegen/src/main/java/org/overture/codegen/trans/conc/MainClassConcTransformation.java index <HASH>..<HASH> 100644 --- a/core/codegen/src/main/java/org/overture/codegen/trans/conc/MainClassConcTransformation.java +++ b/core/codegen/src/main/java/org/overture/codegen/trans/conc/MainClassConcTransformation.java @@ -5,6 +5,7 @@ package org.overture.codegen.trans.conc; import java.util.List; +import org.overture.ast.expressions.AFieldExp; import org.overture.codegen.cgast.SPatternCG; import org.overture.codegen.cgast.analysis.AnalysisException; import org.overture.codegen.cgast.analysis.DepthFirstAnalysisAdaptor; @@ -15,14 +16,23 @@ import org.overture.codegen.cgast.declarations.AMethodDeclCG; import org.overture.codegen.cgast.declarations.APersyncDeclCG; import org.overture.codegen.cgast.expressions.ABoolLiteralExpCG; import org.overture.codegen.cgast.expressions.AEqualsBinaryExpCG; +import org.overture.codegen.cgast.expressions.AFieldExpCG; import org.overture.codegen.cgast.expressions.AHistoryExpCG; import org.overture.codegen.cgast.expressions.AIdentifierVarExpCG; import org.overture.codegen.cgast.expressions.AIntLiteralExpCG; +import org.overture.codegen.cgast.expressions.ANewExpCG; +import org.overture.codegen.cgast.name.ATypeNameCG; import org.overture.codegen.cgast.patterns.AIdentifierPatternCG; +import org.overture.codegen.cgast.statements.AAssignmentStmCG; import org.overture.codegen.cgast.statements.ABlockStmCG; import org.overture.codegen.cgast.statements.ACallStmCG; import org.overture.codegen.cgast.statements.AElseIfStmCG; +import org.overture.codegen.cgast.statements.AFieldObjectDesignatorCG; +import org.overture.codegen.cgast.statements.AFieldStateDesignatorCG; +import org.overture.codegen.cgast.statements.AIdentifierObjectDesignatorCG; import org.overture.codegen.cgast.statements.AIfStmCG; +import org.overture.codegen.cgast.statements.ALocalAssignmentStmCG; +import org.overture.codegen.cgast.statements.ANewObjectDesignatorCG; import org.overture.codegen.cgast.statements.AReturnStmCG; import org.overture.codegen.cgast.statements.ATryStmCG; import org.overture.codegen.cgast.types.ABoolBasicTypeCG; @@ -89,6 +99,8 @@ public class MainClassConcTransformation extends DepthFirstAnalysisAdaptor entering.setClassType(sentinel); entering.setType(new AVoidTypeCG()); + + //entering.setArgs(value); leaving.setName("leaving"); leaving.setClassType(sentinel.clone()); @@ -103,6 +115,35 @@ public class MainClassConcTransformation extends DepthFirstAnalysisAdaptor methodCG.setBody(bodyStm); } } + else + { + ABlockStmCG bodyConst = new ABlockStmCG(); + + ALocalAssignmentStmCG stm = new ALocalAssignmentStmCG(); + + AIdentifierVarExpCG field = new AIdentifierVarExpCG(); + + field.setOriginal("sentinel"); + + //System.out.println(field.getOriginal()); + + ANewExpCG newexp = new ANewExpCG(); + + ATypeNameCG classtype = new ATypeNameCG(); + classtype.setName(node.getName()+"_sentinel"); + newexp.setName(classtype); + + stm.setExp(newexp); + stm.setTarget(field); + + bodyConst.getStatements().add(methodCG.getBody()); + bodyConst.getStatements().add(stm); + + methodCG.setBody(bodyConst); + + + //} + } } //declaration of the method. @@ -164,14 +205,8 @@ public class MainClassConcTransformation extends DepthFirstAnalysisAdaptor for (APersyncDeclCG per : node.getPerSyncs()){ if(per.getOpname().equals(node.getMethods().get(i).getName())){ ret.setExp(per.getPred()); - } -// else -// { -// -// } - - } - + } + } AElseIfStmCG newBranch = new AElseIfStmCG(); AEqualsBinaryExpCG Branches = new AEqualsBinaryExpCG(); diff --git a/core/codegen/src/main/java/org/overture/codegen/trans/conc/SentinelTransformation.java b/core/codegen/src/main/java/org/overture/codegen/trans/conc/SentinelTransformation.java index <HASH>..<HASH> 100644 --- a/core/codegen/src/main/java/org/overture/codegen/trans/conc/SentinelTransformation.java +++ b/core/codegen/src/main/java/org/overture/codegen/trans/conc/SentinelTransformation.java @@ -152,7 +152,13 @@ public class SentinelTransformation extends DepthFirstAnalysisAdaptor //method_pp.setFormalParams(); if (node.getSuperName() != null){ - innerClass.setSuperName(node.getSuperName()+"_Sentinel"); + if(!node.getSuperName().equals("Thread")){ + innerClass.setSuperName(node.getSuperName()+"_Sentinel"); + } + else + { + innerClass.setSuperName("Sentinel"); + } } else{ @@ -192,8 +198,4 @@ public class SentinelTransformation extends DepthFirstAnalysisAdaptor return getThreadClass(superClass.getName(), superClass); } } -// #set ( $baseclass = "" ) -// #if (!$JavaFormat.isNull($node.getThread())) -// #set ( $baseclass = "extends Thread" ) -// #end }
added the instansiation of the sentinel. changed an error in the inheritance of sentinel innerclass.
overturetool_overture
train
f89392a1db009a53552f1280fd22f5941d17231f
diff --git a/lib/app/models/open_object_resource.rb b/lib/app/models/open_object_resource.rb index <HASH>..<HASH> 100644 --- a/lib/app/models/open_object_resource.rb +++ b/lib/app/models/open_object_resource.rb @@ -99,6 +99,7 @@ class OpenObjectResource < ActiveResource::Base @field_defined = true end + def define_openerp_model(arg, url, database, user_id, pass, binding) param = (arg.is_a? OpenObjectResource) ? arg.attributes.merge(arg.relations) : {'model' => arg} model_key = param['model'] @@ -143,7 +144,7 @@ class OpenObjectResource < ActiveResource::Base #corresponding method for OpenERP osv.execute(self, db, uid, obj, method, *args, **kw) method def rpc_execute_with_all(db, uid, pass, obj, method, *args) - client.call("execute", db, uid, pass, obj, method, *args) + try_with_pretty_error_log { client.call("execute", db, uid, pass, obj, method, *args) } end @@ -157,7 +158,21 @@ class OpenObjectResource < ActiveResource::Base end def rpc_exec_workflow_with_all(method, *args) - client.call("exec_workflow", db, uid, pass, obj, method, *args) + try_with_pretty_error_log { client.call("exec_workflow", db, uid, pass, obj, method, *args) } + end + + + def try_with_pretty_error_log + begin + yield + rescue RuntimeError => e + puts "OpenERP server error!" + begin + puts eval("#{ e }".gsub("wrong fault-structure: ", ""))["faultString"] + rescue + puts e.inspect + end + end end @@ -246,7 +261,17 @@ class OpenObjectResource < ActiveResource::Base #compatible with the Rails way but also supports OpenERP context def create(context={}) - self.class.rpc_execute('create', *(@attributes + [context])) + self.id = self.class.rpc_execute('create', @attributes, context) + end + + #compatible with the Rails way but also supports OpenERP context + def update(context={}) + self.class.rpc_execute('write', self.id, @attributes.reject{|k, v| k == 'id'}, context) + end + + #compatible with the Rails way but also supports OpenERP context + def destroy(context={}) + self.class.rpc_execute('unlink', self.id, context) end
- properly set the id after create - added update support - properly switch to update (OpenERP write method) if id already exists (after a create) - destroy support added - OpenERP Server Error pretty print added (while we would be happy to use Rails logger instead of just put)
akretion_ooor
train
b636d9a52b7b3e1cd0daa91f3e7241c54989b3ee
diff --git a/modules/es/bases/es.DocumentModelBranchNode.js b/modules/es/bases/es.DocumentModelBranchNode.js index <HASH>..<HASH> 100644 --- a/modules/es/bases/es.DocumentModelBranchNode.js +++ b/modules/es/bases/es.DocumentModelBranchNode.js @@ -200,6 +200,17 @@ es.DocumentModelBranchNode.prototype.reverse = function() { }; /** + * Gets the index of a given child node. + * + * @method + * @param {es.DocumentModelNode} node Child node to find index of + * @returns {Integer} Index of child node or -1 if node was not found + */ +es.DocumentModelBranchNode.prototype.indexOf = function( node ) { + return this.children.indexOf( node ); +}; + +/** * Sets the root node to this and all of it's children. * * @method diff --git a/modules/es/models/es.DocumentModel.js b/modules/es/models/es.DocumentModel.js index <HASH>..<HASH> 100644 --- a/modules/es/models/es.DocumentModel.js +++ b/modules/es/models/es.DocumentModel.js @@ -65,45 +65,47 @@ es.DocumentModel.nodeRules = {}; * Each function is called in the context of a state, and takes an operation object as a parameter. */ es.DocumentModel.operations = ( function() { - function invalidate( from, to ) { - this.rebuild.push( { 'from': from, 'to': to } ); - } - function retain( op ) { annotate.call( this, this.cursor + op.length ); this.cursor += op.length; } + function rebuild( newData, oldNodes ) { + var parent = oldNodes[0].getParent(), + index = parent.indexOf( oldNodes[0] ); + // Remove the node we are about to insert into from the model tree + parent.splice( index, oldNodes.length ); + // Regenerate nodes for the data we've affected + var newNodes = es.DocumentModel.createNodesFromData( newData ); + // Insert new elements into the tree where the old ones used to be + for ( var i = newNodes.length; i >= 0; i-- ) { + parent.splice( index, 0, newNodes[i] ); + } + } + function insert( op ) { if ( es.DocumentModel.isStructuralOffset( this.data, this.cursor ) ) { // TODO: Support tree updates when inserting between elements } else { // Get the node we are about to insert into var node = this.tree.getNodeFromOffset( this.cursor ); + if ( !node ) { + throw 'Missing node error. A node could not not be found at the cursor.'; + } if ( es.DocumentModel.containsElementData( op.data ) ) { - var nodeParent = node.getParent(); - if ( !nodeParent ) { - throw 'Missing parent error. Node does not have a parent node.'; - } - var offset = this.tree.getOffsetFromNode( node ), - length = node.getElementLength() + op.data.length, - index = nodeParent.indexOf( node ); - if ( index === -1 ) { - throw 'Missing child error. Node could not be found in its parent node.'; - } - // Remove the node we are about to insert into from the model tree - nodeParent.splice( index, 1 ); // Perform insert on linear data model es.insertIntoArray( this.data, this.cursor, op.data ); annotate.call( this, this.cursor + op.data.length ); - // Regenerate nodes for the data we've affected - var nodes = es.DocumentModel.createNodesFromData( - this.data.slice( offset, length ) - ); - // Insert new elements into the tree where the old one used to be - for ( var i = nodes.length; i >= 0; i-- ) { - this.tree.splice( index, nodes[i] ); + // Synchronize model tree + var offset = this.tree.getOffsetFromNode( node ); + if ( offset === -1 ) { + throw 'Invalid offset error. Node is not in model tree'; } + rebuild.call( + this, + this.data.slice( offset, offset + node.getElementLength() + op.data.length ), + [node] + ); } else { // Perform insert on linear data model es.insertIntoArray( this.data, this.cursor, op.data ); @@ -1208,8 +1210,7 @@ es.DocumentModel.prototype.commit = function( transaction ) { 'tree': this, 'cursor': 0, 'set': [], - 'clear': [], - 'rebuild': [] + 'clear': [] }, operations = transaction.getOperations(); for ( var i = 0, length = operations.length; i < length; i++ ) { @@ -1220,7 +1221,6 @@ es.DocumentModel.prototype.commit = function( transaction ) { throw 'Invalid operation error. Operation type is not supported: ' + operation.type; } } - // TODO: Synchronize op.tree - insert elements and adjust lengths }; /** @@ -1235,8 +1235,7 @@ es.DocumentModel.prototype.rollback = function( transaction ) { 'tree': this, 'cursor': 0, 'set': [], - 'clear': [], - 'rebuild': [] + 'clear': [] }, operations = transaction.getOperations(); for ( var i = 0, length = operations.length; i < length; i++ ) { @@ -1247,7 +1246,6 @@ es.DocumentModel.prototype.rollback = function( transaction ) { throw 'Invalid operation error. Operation type is not supported: ' + operation.type; } } - // TODO: Synchronize op.tree - insert elements and adjust lengths }; /* Inheritance */
Refactored some of the tree sync code to be reusable
wikimedia_parsoid
train