content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Java
Java
improve package javadoc
748fb50f63b9239350d1d02c76667fde61a5f78b
<ide><path>src/main/java/io/reactivex/observers/package-info.java <ide> */ <ide> <ide> /** <del> * Default wrappers and implementations for Observer-based consumer classes and interfaces; <del> * utility classes for creating them from callbacks. <add> * Default wrappers and implementations for Observer-based consumer classes and interfaces, <add> * including disposable and resource-tracking variants and <add> * the {@link io.reactivex.subscribers.TestObserver} that allows unit testing <add> * {@link io.reactivex.Observable}-, {@link io.reactivex.Single}-, {@link io.reactivex.Maybe}- <add> * and {@link io.reactivex.Completable}-based flows. <ide> */ <ide> package io.reactivex.observers; <ide><path>src/main/java/io/reactivex/package-info.java <ide> * limitations under the License. <ide> */ <ide> /** <del> * Base reactive classes: Flowable, Observable, Single and Completable; base reactive consumers; <add> * Base reactive classes: Flowable, Observable, Single, Maybe and Completable; base reactive consumers; <ide> * other common base interfaces. <ide> * <ide> * <p>A library that enables subscribing to and composing asynchronous events and <ide> * <ide> * <p>Services which intend on exposing data asynchronously and wish <ide> * to allow reactive processing and composition can implement the <del> * {@link io.reactivex.Flowable}, {@link io.reactivex.Observable}, {@link io.reactivex.Single} <del> * or {@link io.reactivex.Completable} class which then allow consumers to subscribe to them <del> * and receive events.</p> <add> * {@link io.reactivex.Flowable}, {@link io.reactivex.Observable}, {@link io.reactivex.Single}, <add> * {@link io.reactivex.Maybe} or {@link io.reactivex.Completable} class which then allow <add> * consumers to subscribe to them and receive events.</p> <ide> * <p>Usage examples can be found on the {@link io.reactivex.Flowable}/{@link io.reactivex.Observable} and {@link org.reactivestreams.Subscriber} classes.</p> <ide> */ <ide> package io.reactivex; <ide><path>src/main/java/io/reactivex/subscribers/package-info.java <ide> */ <ide> <ide> /** <del> * Default wrappers and implementations for Subscriber-based consumer classes and interfaces; <del> * utility classes for creating them from callbacks. <add> * Default wrappers and implementations for Subscriber-based consumer classes and interfaces, <add> * including disposable and resource-tracking variants and <add> * the {@link io.reactivex.subscribers.TestSubscriber} that allows unit testing <add> * {@link io.reactivex.Flowable}-based flows. <ide> */ <ide> package io.reactivex.subscribers;
3
Javascript
Javascript
fix race condition in keyboardavoidingview
b08fff6f869e00c20c0dcdf7aca71284c2f276f0
<ide><path>Libraries/Components/Keyboard/KeyboardAvoidingView.js <ide> class KeyboardAvoidingView extends React.Component<Props, State> { <ide> }; <ide> <ide> _frame: ?ViewLayout = null; <add> _keyboardEvent: ?KeyboardEvent = null; <ide> _subscriptions: Array<EventSubscription> = []; <ide> viewRef: {current: React.ElementRef<any> | null, ...}; <ide> _initialFrameHeight: number = 0; <ide> class KeyboardAvoidingView extends React.Component<Props, State> { <ide> } <ide> <ide> _onKeyboardChange = (event: ?KeyboardEvent) => { <del> if (event == null) { <add> this._keyboardEvent = event; <add> this._updateBottomIfNecesarry(); <add> }; <add> <add> _onLayout = (event: ViewLayoutEvent) => { <add> this._frame = event.nativeEvent.layout; <add> if (!this._initialFrameHeight) { <add> // save the initial frame height, before the keyboard is visible <add> this._initialFrameHeight = this._frame.height; <add> } <add> <add> this._updateBottomIfNecesarry(); <add> }; <add> <add> _updateBottomIfNecesarry = () => { <add> if (this._keyboardEvent == null) { <ide> this.setState({bottom: 0}); <ide> return; <ide> } <ide> <del> const {duration, easing, endCoordinates} = event; <add> const {duration, easing, endCoordinates} = this._keyboardEvent; <ide> const height = this._relativeKeyboardHeight(endCoordinates); <ide> <ide> if (this.state.bottom === height) { <ide> class KeyboardAvoidingView extends React.Component<Props, State> { <ide> this.setState({bottom: height}); <ide> }; <ide> <del> _onLayout = (event: ViewLayoutEvent) => { <del> this._frame = event.nativeEvent.layout; <del> if (!this._initialFrameHeight) { <del> // save the initial frame height, before the keyboard is visible <del> this._initialFrameHeight = this._frame.height; <del> } <del> }; <del> <ide> componentDidMount(): void { <ide> if (Platform.OS === 'ios') { <ide> this._subscriptions = [
1
Mixed
Ruby
add migration history to schema.rb dump
f02d2185ebbe01f455a9a91216ff7094b014ea72
<ide><path>activerecord/CHANGELOG.md <ide> ## Rails 4.0.0 (unreleased) ## <ide> <add>* Add migration history to schema.rb dump. <add> Loading schema.rb with full migration history <add> restores the exact list of migrations that created <add> that schema (including names and fingerprints). This <add> avoids possible mistakes caused by assuming all <add> migrations with a lower version have been run when <add> loading schema.rb. Old schema.rb files without migration <add> history but with the :version setting still work as before. <add> <add> *Josh Susser* <add> <ide> * Add metadata columns to schema_migrations table. <ide> New columns are: migrated_at (timestamp), <ide> fingerprint (md5 hash of migration source), and <ide><path>activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb <ide> def dump_schema_information #:nodoc: <ide> sm_table = ActiveRecord::Migrator.schema_migrations_table_name <ide> <ide> ActiveRecord::SchemaMigration.order('version').map { |sm| <del> "INSERT INTO #{sm_table} (version, migrated_at, name) VALUES ('#{sm.version}',LOCALTIMESTAMP,'#{sm.name}');" <del> }.join "\n\n" <add> "INSERT INTO #{sm_table} (version, migrated_at, fingerprint, name) VALUES ('#{sm.version}',LOCALTIMESTAMP,'#{sm.fingerprint}','#{sm.name}');" <add> }.join("\n\n") <ide> end <ide> <ide> # Should not be called normally, but this operation is non-destructive. <ide><path>activerecord/lib/active_record/schema.rb <ide> def migrations_paths <ide> end <ide> <ide> def define(info, &block) <add> @using_deprecated_version_setting = info[:version].present? <add> SchemaMigration.drop_table <add> initialize_schema_migrations_table <add> <ide> instance_eval(&block) <ide> <del> unless info[:version].blank? <del> initialize_schema_migrations_table <del> assume_migrated_upto_version(info[:version], migrations_paths) <del> end <add> # handle files from pre-4.0 that used :version option instead of dumping migration table <add> assume_migrated_upto_version(info[:version], migrations_paths) if @using_deprecated_version_setting <ide> end <ide> <ide> # Eval the given block. All methods available to the current connection <ide> # adapter are available within the block, so you can easily use the <ide> # database definition DSL to build up your schema (+create_table+, <ide> # +add_index+, etc.). <del> # <del> # The +info+ hash is optional, and if given is used to define metadata <del> # about the current schema (currently, only the schema's version): <del> # <del> # ActiveRecord::Schema.define(version: 20380119000001) do <del> # ... <del> # end <ide> def self.define(info={}, &block) <ide> new.define(info, &block) <ide> end <add> <add> # Create schema migration history. Include migration statements in a block to this method. <add> # <add> # migrations do <add> # migration 20121128235959, "44f1397e3b92442ca7488a029068a5ad", "add_horn_color_to_unicorns" <add> # migration 20121129235959, "4a1eb3965d94406b00002b370854eae8", "add_magic_power_to_unicorns" <add> # end <add> def migrations <add> raise(ArgumentError, "Can't set migrations while using :version option") if @using_deprecated_version_setting <add> yield <add> end <add> <add> # Add a migration to the ActiveRecord::SchemaMigration table. <add> # <add> # The +version+ argument is an integer. <add> # The +fingerprint+ and +name+ arguments are required but may be empty strings. <add> # The migration's +migrated_at+ attribute is set to the current time, <add> # instead of being set explicitly as an argument to the method. <add> # <add> # migration 20121129235959, "4a1eb3965d94406b00002b370854eae8", "add_magic_power_to_unicorns" <add> def migration(version, fingerprint, name) <add> SchemaMigration.create!(version: version, migrated_at: Time.now, fingerprint: fingerprint, name: name) <add> end <ide> end <ide> end <ide><path>activerecord/lib/active_record/schema_dumper.rb <ide> def self.dump(connection=ActiveRecord::Base.connection, stream=STDOUT) <ide> <ide> def dump(stream) <ide> header(stream) <add> migrations(stream) <ide> tables(stream) <ide> trailer(stream) <ide> stream <ide> def header(stream) <ide> stream.puts "# encoding: #{stream.external_encoding.name}" <ide> end <ide> <del> stream.puts <<HEADER <add> header_text = <<HEADER_RUBY <ide> # This file is auto-generated from the current state of the database. Instead <ide> # of editing this file, please use the migrations feature of Active Record to <ide> # incrementally modify your database, and then regenerate this schema definition. <ide> def header(stream) <ide> <ide> ActiveRecord::Schema.define(#{define_params}) do <ide> <del>HEADER <add>HEADER_RUBY <add> stream.puts header_text <ide> end <ide> <ide> def trailer(stream) <ide> stream.puts "end" <ide> end <ide> <add> def migrations(stream) <add> all_migrations = ActiveRecord::SchemaMigration.all.to_a <add> if all_migrations.any? <add> stream.puts(" migrations do") <add> all_migrations.each do |migration| <add> stream.puts(migration.schema_line(" ")) <add> end <add> stream.puts(" end") <add> end <add> end <add> <ide> def tables(stream) <ide> @connection.tables.sort.each do |tbl| <ide> next if ['schema_migrations', ignore_tables].flatten.any? do |ignored| <ide><path>activerecord/lib/active_record/schema_migration.rb <ide> def self.drop_table <ide> def version <ide> super.to_i <ide> end <add> <add> # Construct ruby source to include in schema.rb dump for this migration. <add> # Pass a string of spaces as +indent+ to allow calling code to control how deeply indented the line is. <add> # The generated line includes the migration version, fingerprint, and name. Either fingerprint or name <add> # can be an empty string. <add> # <add> # Example output: <add> # <add> # migration 20121129235959, "ee4be703f9e6e2fc0f4baddebe6eb8f7", "add_magic_power_to_unicorns" <add> def schema_line(indent) <add> %Q(#{indent}migration %s, "%s", "%s") % [version, fingerprint, name] <add> end <ide> end <ide> end <ide><path>activerecord/test/cases/ar_schema_test.rb <ide> def test_schema_subclass <ide> end <ide> end <ide> <add> class ActiveRecordSchemaMigrationsTest < ActiveRecordSchemaTest <add> def setup <add> super <add> ActiveRecord::SchemaMigration.delete_all <add> end <add> <add> def test_migration_adds_row_to_migrations_table <add> schema = ActiveRecord::Schema.new <add> schema.migration(1001, "", "") <add> schema.migration(1002, "123456789012345678901234567890ab", "add_magic_power_to_unicorns") <add> <add> migrations = ActiveRecord::SchemaMigration.all.to_a <add> assert_equal 2, migrations.length <add> <add> assert_equal 1001, migrations[0].version <add> assert_match %r{^2\d\d\d-}, migrations[0].migrated_at.to_s(:db) <add> assert_equal "", migrations[0].fingerprint <add> assert_equal "", migrations[0].name <add> <add> assert_equal 1002, migrations[1].version <add> assert_match %r{^2\d\d\d-}, migrations[1].migrated_at.to_s(:db) <add> assert_equal "123456789012345678901234567890ab", migrations[1].fingerprint <add> assert_equal "add_magic_power_to_unicorns", migrations[1].name <add> end <add> <add> def test_define_clears_schema_migrations <add> assert_nothing_raised do <add> ActiveRecord::Schema.define do <add> migrations do <add> migration(123001, "", "") <add> end <add> end <add> ActiveRecord::Schema.define do <add> migrations do <add> migration(123001, "", "") <add> end <add> end <add> end <add> end <add> <add> def test_define_raises_if_both_version_and_explicit_migrations <add> assert_raise(ArgumentError) do <add> ActiveRecord::Schema.define(version: 123001) do <add> migrations do <add> migration(123001, "", "") <add> end <add> end <add> end <add> end <add> end <add> <ide> end <ide><path>activerecord/test/cases/schema_dumper_test.rb <ide> require "cases/helper" <del> <add># require "cases/migration/helper" <ide> <ide> class SchemaDumperTest < ActiveRecord::TestCase <ide> def setup <ide> def standard_dump <ide> def test_dump_schema_information_outputs_lexically_ordered_versions <ide> versions = %w{ 20100101010101 20100201010101 20100301010101 } <ide> versions.reverse.each do |v| <del> ActiveRecord::SchemaMigration.create!(:version => v, :name => "anon", :migrated_at => Time.now) <add> ActiveRecord::SchemaMigration.create!( <add> :version => v, :migrated_at => Time.now, <add> :fingerprint => "123456789012345678901234567890ab", :name => "anon") <ide> end <ide> <ide> schema_info = ActiveRecord::Base.connection.dump_schema_information <ide> assert_match(/20100201010101.*20100301010101/m, schema_info) <add> target_line = %q{INSERT INTO schema_migrations (version, migrated_at, fingerprint, name) VALUES ('20100101010101',LOCALTIMESTAMP,'123456789012345678901234567890ab','anon');} <add> assert_match target_line, schema_info <ide> end <ide> <ide> def test_magic_comment <ide> def test_schema_dump <ide> assert_no_match %r{create_table "schema_migrations"}, output <ide> end <ide> <add> def test_schema_dump_includes_migrations <add> ActiveRecord::SchemaMigration.delete_all <add> ActiveRecord::Migrator.migrate(MIGRATIONS_ROOT + "/always_safe") <add> <add> output = standard_dump <add> assert_match %r{migrations do}, output, "Missing migrations block" <add> assert_match %r{migration 1001, "[0-9a-f]{32}", "always_safe"}, output, "Missing migration line" <add> assert_match %r{migration 1002, "[0-9a-f]{32}", "still_safe"}, output, "Missing migration line" <add> end <add> <ide> def test_schema_dump_excludes_sqlite_sequence <ide> output = standard_dump <ide> assert_no_match %r{create_table "sqlite_sequence"}, output <ide><path>activerecord/test/migrations/always_safe/1001_always_safe.rb <add>class AlwaysSafe < ActiveRecord::Migration <add> def change <add> # do nothing to avoid side-effect conflicts from running multiple times <add> end <add>end <ide><path>activerecord/test/migrations/always_safe/1002_still_safe.rb <add>class StillSafe < ActiveRecord::Migration <add> def change <add> # do nothing to avoid side-effect conflicts from running multiple times <add> end <add>end
9
Python
Python
fix doc for language code
814b9550d72f918d9eaea94468fa18a15ab710b3
<ide><path>src/transformers/models/mbart/tokenization_mbart.py <ide> def build_inputs_with_special_tokens( <ide> adding special tokens. An MBART sequence has the following format, where ``X`` represents the sequence: <ide> <ide> - ``input_ids`` (for encoder) ``X [eos, src_lang_code]`` <del> - ``decoder_input_ids``: (for decoder) ``[tgt_lang_code] X [eos]`` <add> - ``decoder_input_ids``: (for decoder) ``X [eos, tgt_lang_code]`` <ide> <ide> BOS is never used. Pairs of sequences are not the expected use case, but they will be handled without a <ide> separator. <ide> def prepare_seq2seq_batch( <ide> return model_inputs <ide> <ide> def set_src_lang_special_tokens(self, src_lang) -> None: <del> """Reset the special tokens to the source lang setting. No prefix and suffix=[eos, cur_lang_code].""" <add> """Reset the special tokens to the source lang setting. No prefix and suffix=[eos, src_lang_code].""" <ide> self.cur_lang_code = self.lang_code_to_id[src_lang] <ide> self.prefix_tokens = [] <ide> self.suffix_tokens = [self.eos_token_id, self.cur_lang_code] <ide> <ide> def set_tgt_lang_special_tokens(self, lang: str) -> None: <del> """Reset the special tokens to the target language setting. Prefix [tgt_lang_code], suffix =[eos].""" <add> """Reset the special tokens to the target language setting. No prefix and suffix=[eos, tgt_lang_code].""" <ide> self.cur_lang_code = self.lang_code_to_id[lang] <ide> self.prefix_tokens = [] <ide> self.suffix_tokens = [self.eos_token_id, self.cur_lang_code] <ide><path>src/transformers/models/mbart/tokenization_mbart_fast.py <ide> def build_inputs_with_special_tokens( <ide> An MBART sequence has the following format, where ``X`` represents the sequence: <ide> <ide> - ``input_ids`` (for encoder) ``X [eos, src_lang_code]`` <del> - ``decoder_input_ids``: (for decoder) ``[tgt_lang_code] X [eos]`` <add> - ``decoder_input_ids``: (for decoder) ``X [eos, tgt_lang_code]`` <ide> <ide> BOS is never used. Pairs of sequences are not the expected use case, but they will be handled without a <ide> separator. <ide> def prepare_seq2seq_batch( <ide> return model_inputs <ide> <ide> def set_src_lang_special_tokens(self, src_lang) -> None: <del> """Reset the special tokens to the source lang setting. No prefix and suffix=[eos, cur_lang_code].""" <add> """Reset the special tokens to the source lang setting. No prefix and suffix=[eos, src_lang_code].""" <ide> self.cur_lang_code = self.convert_tokens_to_ids(src_lang) <ide> self.prefix_tokens = [] <ide> self.suffix_tokens = [self.eos_token_id, self.cur_lang_code] <ide> def set_src_lang_special_tokens(self, src_lang) -> None: <ide> ) <ide> <ide> def set_tgt_lang_special_tokens(self, lang: str) -> None: <del> """Reset the special tokens to the target language setting. Prefix [tgt_lang_code], suffix =[eos].""" <add> """Reset the special tokens to the target language setting. No prefix and suffix=[eos, tgt_lang_code].""" <ide> self.cur_lang_code = self.convert_tokens_to_ids(lang) <ide> self.prefix_tokens = [] <ide> self.suffix_tokens = [self.eos_token_id, self.cur_lang_code]
2
Ruby
Ruby
fix typo in migration test
bcff10adeccca10c75ef3c9539deb039c0bd775c
<ide><path>activerecord/test/migration_test.rb <ide> def test_native_types <ide> end <ide> <ide> # Test DateTime column and defaults, including timezone. <del> # FIXME: momemnt of truth is Time on 64-bit platforms. <add> # FIXME: moment of truth may be Time on 64-bit platforms. <ide> if bob.moment_of_truth.is_a?(DateTime) <ide> assert_equal DateTime.now.offset, bob.moment_of_truth.offset <ide> assert_not_equal 0, bob.moment_of_truth.offset
1
Python
Python
fix timedistributed batchnormalization
7c7d73530c1ab1b47f9fb5f0612ec13fef1a26c6
<ide><path>keras/layers/wrappers.py <ide> import copy <ide> from ..engine import Layer <ide> from ..engine import InputSpec <add>from ..engine.topology import _object_list_uid <ide> from ..utils.generic_utils import has_arg <ide> from .. import backend as K <ide> <ide> class Wrapper(Layer): <ide> <ide> def __init__(self, layer, **kwargs): <ide> self.layer = layer <add> # Tracks mapping of Wrapper inputs to inner layer inputs. Useful when <add> # the inner layer has update ops that depend on it's inputs (as opposed <add> # to the inputs to the Wrapper layer). <add> self._input_map = {} <ide> super(Wrapper, self).__init__(**kwargs) <ide> <ide> def build(self, input_shape=None): <ide> def updates(self): <ide> return [] <ide> <ide> def get_updates_for(self, inputs=None): <del> if inputs is None: <del> updates = self.layer.get_updates_for(None) <del> return updates + super(Wrapper, self).get_updates_for(None) <del> return super(Wrapper, self).get_updates_for(inputs) <add> # If the wrapper modifies the inputs, use the modified inputs to <add> # get the updates from the inner layer. <add> inner_inputs = inputs <add> if inputs is not None: <add> uid = _object_list_uid(inputs) <add> if uid in self._input_map: <add> inner_inputs = self._input_map[uid] <add> <add> updates = self.layer.get_updates_for(inner_inputs) <add> updates += super(Wrapper, self).get_updates_for(inputs) <add> return updates <ide> <ide> @property <ide> def losses(self): <ide> def step(x, _): <ide> input_length = input_shape[1] <ide> if not input_length: <ide> input_length = K.shape(inputs)[1] <del> # Shape: (num_samples * timesteps, ...) <add> # Shape: (num_samples * timesteps, ...). And track the <add> # transformation in self._input_map. <add> input_uid = _object_list_uid(inputs) <ide> inputs = K.reshape(inputs, (-1,) + input_shape[2:]) <add> self._input_map[input_uid] = inputs <ide> # (num_samples * timesteps, ...) <ide> y = self.layer.call(inputs, **kwargs) <ide> if hasattr(y, '_uses_learning_phase'): <ide><path>tests/keras/layers/wrappers_test.py <ide> from numpy.testing import assert_allclose <ide> from keras.utils.test_utils import keras_test <ide> from keras.layers import wrappers, Input <del>from keras.layers import core, convolutional, recurrent, embeddings <add>from keras.layers import core, convolutional, recurrent, embeddings, normalization <ide> from keras.models import Sequential, Model, model_from_json <ide> from keras import backend as K <add>from keras.engine.topology import _object_list_uid <ide> <ide> <ide> @keras_test <ide> def test_TimeDistributed(): <ide> outer_model.compile(optimizer='rmsprop', loss='mse') <ide> outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) <ide> <add> # test with BatchNormalization <add> model = Sequential() <add> model.add(wrappers.TimeDistributed(normalization.BatchNormalization(center=True, scale=True), <add> name='bn', input_shape=(10, 2))) <add> model.compile(optimizer='rmsprop', loss='mse') <add> # Assert that mean and variance are 0 and 1. <add> td = model.layers[0] <add> assert np.array_equal(td.get_weights()[2], np.array([0, 0])) <add> assert np.array_equal(td.get_weights()[3], np.array([1, 1])) <add> # Train <add> model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), <add> np.broadcast_to(np.array([0, 1]), (1, 10, 2))) <add> # Assert that mean and variance changed. <add> assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) <add> assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) <add> # Verify input_map has one mapping from inputs to reshaped inputs. <add> uid = _object_list_uid(model.inputs) <add> assert len(td._input_map.keys()) == 1 <add> assert uid in td._input_map <add> assert K.int_shape(td._input_map[uid]) == (None, 2) <add> <ide> <ide> @keras_test <ide> @pytest.mark.skipif((K.backend() == 'cntk'),
2
Text
Text
remove unnecessary newlines
648418196d46da4c93217412dd2c79c63334cac2
<ide><path>docs/recipes/UsingObjectSpreadOperator.md <ide> # Using Object Spread Operator <ide> <del>Since one of the core tenets of Redux is to never mutate state, you'll often find yourself using [`Object.assign()`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) to create <del>copies of objects with new or updated values. For example, in the `todoApp` below `Object.assign()` is used to return a new <del>`state` object with an updated `visibilityFilter` property: <add>Since one of the core tenets of Redux is to never mutate state, you'll often find yourself using [`Object.assign()`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) to create copies of objects with new or updated values. For example, in the `todoApp` below `Object.assign()` is used to return a new `state` object with an updated `visibilityFilter` property: <ide> <ide> ```js <ide> function todoApp(state = initialState, action) { <ide> function todoApp(state = initialState, action) { <ide> <ide> While effective, using `Object.assign()` can quickly make simple reducers difficult to read given its rather verbose syntax. <ide> <del>An alternative approach is to use the [object spread syntax](https://github.com/sebmarkbage/ecmascript-rest-spread) proposed for the next versions of JavaScript which lets you use the spread (`...`) operator to copy enumerable properties from one object to another in a more succinct way. The object spread operator is conceptually similar to the ES6 [array spread operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_operator). We <del>can simplify the `todoApp` example above by using the object spread syntax: <add>An alternative approach is to use the [object spread syntax](https://github.com/sebmarkbage/ecmascript-rest-spread) proposed for the next versions of JavaScript which lets you use the spread (`...`) operator to copy enumerable properties from one object to another in a more succinct way. The object spread operator is conceptually similar to the ES6 [array spread operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_operator). We can simplify the `todoApp` example above by using the object spread syntax: <ide> <ide> ```js <ide> function todoApp(state = initialState, action) {
1
Go
Go
fix files in subdirectories creating bad whiteout
1c0f6653ba82c933885719478e90c13f8d7e32b7
<ide><path>pkg/archive/archive_linux.go <ide> func (overlayWhiteoutConverter) ConvertWrite(hdr *tar.Header, path string, fi os <ide> // convert whiteouts to AUFS format <ide> if fi.Mode()&os.ModeCharDevice != 0 && hdr.Devmajor == 0 && hdr.Devminor == 0 { <ide> // we just rename the file and make it normal <del> hdr.Name = WhiteoutPrefix + hdr.Name <add> dir, filename := filepath.Split(hdr.Name) <add> hdr.Name = filepath.Join(dir, WhiteoutPrefix+filename) <ide> hdr.Mode = 0600 <ide> hdr.Typeflag = tar.TypeReg <ide> hdr.Size = 0
1
Python
Python
add new lfs prune api
08a5f57567d8a975d900b66658bfd3c28c9dbec5
<ide><path>examples/pytorch/language-modeling/run_clm_no_trainer.py <ide> def group_texts(examples): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def group_texts(examples): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/language-modeling/run_mlm_no_trainer.py <ide> def group_texts(examples): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def group_texts(examples): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/multiple-choice/run_swag_no_trainer.py <ide> def preprocess_function(examples): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def preprocess_function(examples): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py <ide> def create_and_fill_np_array(start_or_end_logits, dataset, max_len): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> # intialize all lists to collect the batches <ide> all_start_top_log_probs = [] <ide> def create_and_fill_np_array(start_or_end_logits, dataset, max_len): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/question-answering/run_qa_no_trainer.py <ide> def create_and_fill_np_array(start_or_end_logits, dataset, max_len): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> # Evaluation <ide> logger.info("***** Running Evaluation *****") <ide> def create_and_fill_np_array(start_or_end_logits, dataset, max_len): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/speech-pretraining/run_wav2vec2_pretraining_no_trainer.py <ide> def prepare_dataset(batch): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> <ide> if (args.push_to_hub and epoch < args.num_train_epochs - 1) and accelerator.is_main_process: <del> repo.push_to_hub(commit_message=f"Training in progress step {completed_steps}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress step {completed_steps}", <add> blocking=False, <add> auto_lfs_prune=True, <add> ) <ide> <ide> # if completed steps > `args.max_train_steps` stop <ide> if completed_steps >= args.max_train_steps: <ide> def prepare_dataset(batch): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/summarization/run_summarization_no_trainer.py <ide> def postprocess_text(preds, labels): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def postprocess_text(preds, labels): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/text-classification/run_glue_no_trainer.py <ide> def preprocess_function(examples): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def preprocess_function(examples): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> if args.task_name == "mnli": <ide> # Final evaluation on mismatched validation set <ide><path>examples/pytorch/token-classification/run_ner_no_trainer.py <ide> def compute_metrics(): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def compute_metrics(): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>examples/pytorch/translation/run_translation_no_trainer.py <ide> def postprocess_text(preds, labels): <ide> unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <del> repo.push_to_hub(commit_message=f"Training in progress epoch {epoch}", blocking=False) <add> repo.push_to_hub( <add> commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True <add> ) <ide> <ide> if args.output_dir is not None: <ide> accelerator.wait_for_everyone() <ide> def postprocess_text(preds, labels): <ide> if accelerator.is_main_process: <ide> tokenizer.save_pretrained(args.output_dir) <ide> if args.push_to_hub: <del> repo.push_to_hub(commit_message="End of training") <add> repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True) <ide> <ide> <ide> if __name__ == "__main__": <ide><path>src/transformers/trainer.py <ide> def _push_from_checkpoint(self, checkpoint_folder): <ide> commit_message = f"Training in progress, step {self.state.global_step}" <ide> else: <ide> commit_message = f"Training in progress, epoch {int(self.state.epoch)}" <del> _, self.push_in_progress = self.repo.push_to_hub(commit_message=commit_message, blocking=False) <add> _, self.push_in_progress = self.repo.push_to_hub( <add> commit_message=commit_message, blocking=False, auto_lfs_prune=True <add> ) <ide> finally: <ide> if self.args.hub_strategy == HubStrategy.CHECKPOINT: <ide> # Move back the checkpoint to its place <ide> def push_to_hub(self, commit_message: Optional[str] = "End of training", blockin <ide> if not self.is_world_process_zero(): <ide> return <ide> <del> git_head_commit_url = self.repo.push_to_hub(commit_message=commit_message, blocking=blocking) <add> git_head_commit_url = self.repo.push_to_hub( <add> commit_message=commit_message, blocking=blocking, auto_lfs_prune=True <add> ) <ide> # push separately the model card to be independant from the rest of the model <ide> if self.args.should_save: <ide> self.create_model_card(model_name=model_name, **kwargs) <ide> try: <del> self.repo.push_to_hub(commit_message="update model card README.md", blocking=blocking) <add> self.repo.push_to_hub( <add> commit_message="update model card README.md", blocking=blocking, auto_lfs_prune=True <add> ) <ide> except EnvironmentError as exc: <ide> logger.error(f"Error pushing update to the model card. Please read logs and retry.\n${exc}") <ide>
11
Python
Python
install common.gypi along with headers
55b0bd639dea3e0d56b596aaa6ba2d26458c2be1
<ide><path>tools/install.py <ide> def files(action): <ide> if 'true' == variables.get('node_install_npm'): npm_files(action) <ide> <ide> action([ <add> 'common.gypi', <ide> 'config.gypi', <ide> 'src/node.h', <ide> 'src/node_buffer.h',
1
Go
Go
fix golint issues
6397dd4d3123e0a1b89298d0a2cfe5388410a74f
<ide><path>integration-cli/checker/checker.go <ide> import ( <ide> "gotest.tools/assert/cmp" <ide> ) <ide> <add>// Compare defines the interface to compare values <ide> type Compare func(x interface{}) assert.BoolOrComparison <ide> <add>// False checks if the value is false <ide> func False() Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return !x.(bool) <ide> } <ide> } <ide> <add>// True checks if the value is true <ide> func True() Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return x <ide> } <ide> } <ide> <add>// Equals checks if the value is equal to the given value <ide> func Equals(y interface{}) Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return cmp.Equal(x, y) <ide> } <ide> } <ide> <add>// Contains checks if the value contains the given value <ide> func Contains(y interface{}) Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return cmp.Contains(x, y) <ide> } <ide> } <ide> <add>// Not checks if two values are not <ide> func Not(c Compare) Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> r := c(x) <ide> func Not(c Compare) Compare { <ide> } <ide> } <ide> <add>// DeepEquals checks if two values are equal <ide> func DeepEquals(y interface{}) Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return cmp.DeepEqual(x, y) <ide> } <ide> } <ide> <add>// DeepEquals compares if two values are deepequal <ide> func HasLen(y int) Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return cmp.Len(x, y) <ide> } <ide> } <ide> <add>// DeepEquals checks if the given value is nil <ide> func IsNil() Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return cmp.Nil(x) <ide> } <ide> } <ide> <add>// GreaterThan checks if the value is greater than the given value <ide> func GreaterThan(y int) Compare { <ide> return func(x interface{}) assert.BoolOrComparison { <ide> return x.(int) > y <ide> } <ide> } <del> <del>func NotNil() Compare { <del> return Not(IsNil()) <del>}
1
Javascript
Javascript
fix punycode test for --without-intl
f8063d51d7e83ab6d5c7cfcadc37ed6b4f6d8ef5
<ide><path>benchmark/misc/punycode.js <ide> 'use strict'; <ide> <ide> const common = require('../common.js'); <del>const icu = process.binding('icu'); <add>let icu; <add>try { <add> icu = process.binding('icu'); <add>} catch (err) {} <ide> const punycode = require('punycode'); <ide> <ide> const bench = common.createBenchmark(main, { <del> method: ['punycode', 'icu'], <add> method: ['punycode'].concat(icu !== undefined ? ['icu'] : []), <ide> n: [1024], <ide> val: [ <ide> 'افغانستا.icom.museum', <ide> function main(conf) { <ide> runPunycode(n, val); <ide> break; <ide> case 'icu': <del> runICU(n, val); <del> break; <add> if (icu !== undefined) { <add> runICU(n, val); <add> break; <add> } <add> // fallthrough <ide> default: <ide> throw new Error('Unexpected method'); <ide> }
1
Python
Python
remove assert on optional arg
96881729ce83cfc8e5fa04c903ee4296ad17cfbb
<ide><path>examples/pytorch/summarization/run_summarization.py <ide> class DataTrainingArguments: <ide> def __post_init__(self): <ide> if self.dataset_name is None and self.train_file is None and self.validation_file is None: <ide> raise ValueError("Need either a dataset name or a training/validation file.") <del> elif self.lang is None: <del> raise ValueError("Need to specify the language.") <del> <ide> else: <ide> if self.train_file is not None: <ide> extension = self.train_file.split(".")[-1]
1
PHP
PHP
remove use of file from email
f95e242d7e15545ae312e96d25a2b6f6a7ca5726
<ide><path>src/Mailer/Email.php <ide> use BadMethodCallException; <ide> use Cake\Core\Configure; <ide> use Cake\Core\StaticConfigTrait; <del>use Cake\Filesystem\File; <ide> use Cake\Http\Client\FormDataPart; <ide> use Cake\Log\Log; <ide> use Cake\Utility\Hash; <ide> protected function _attachFiles($boundary = null) <ide> */ <ide> protected function _readFile($path) <ide> { <del> $File = new File($path); <del> <del> return chunk_split(base64_encode($File->read())); <add> return chunk_split(base64_encode((string)file_get_contents($path))); <ide> } <ide> <ide> /**
1
Ruby
Ruby
add missing require to html sanitizer
1bd88fdafd9cd0c1c83e8bf41020d0c251cb3108
<ide><path>actionpack/lib/action_controller/vendor/html-scanner/html/sanitizer.rb <ide> require 'set' <add>require 'cgi' <ide> require 'active_support/core_ext/class/attribute' <ide> <ide> module HTML
1
Python
Python
fix generation docstring
a767276fdd8835c31a37b4b6cefea18d6e86064b
<ide><path>src/transformers/generation_utils.py <ide> def generate( <ide> >>> outputs = model.generate(input_ids=input_ids, max_length=20, repetition_penalty=1.2) <ide> >>> print("Generated:", tokenizer.decode(outputs[0], skip_special_tokens=True)) <ide> <del> >>> tokenizer = AutoTokenizer.from_pretrained("gpt2") <add> >>> tokenizer = AutoTokenizer.from_pretrained("gpt2", use_fast=False) <ide> >>> model = AutoModelForCausalLM.from_pretrained("gpt2") <ide> >>> input_context = "My cute dog" <ide> >>> # get tokens of words that should not be generated <del> >>> bad_words_ids = [tokenizer(bad_word, add_prefix_space=True).input_ids for bad_word in ["idiot", "stupid", "shut up"]] <add> >>> bad_words_ids = tokenizer(["idiot", "stupid", "shut up"], add_prefix_space=True).input_ids <ide> >>> # encode input context <ide> >>> input_ids = tokenizer(input_context, return_tensors="pt").input_ids <ide> >>> # generate sequences without allowing bad_words to be generated <ide><path>src/transformers/models/gpt2/tokenization_gpt2_fast.py <ide> class GPT2TokenizerFast(PreTrainedTokenizerFast): <ide> >>> tokenizer(" Hello world")['input_ids'] <ide> [18435, 995] <ide> <del> You can get around that behavior by passing ``add_prefix_space=True`` when instantiating this tokenizer or when you <del> call it on some text, but since the model was not pretrained this way, it might yield a decrease in performance. <add> You can get around that behavior by passing ``add_prefix_space=True`` when instantiating this tokenizer, but since <add> the model was not pretrained this way, it might yield a decrease in performance. <ide> <ide> .. note:: <ide>
2
Python
Python
add a noun chunker for finnish
e9c26f2ee9f03c2aa6b7cd724f4c0b3717507211
<ide><path>spacy/lang/fi/__init__.py <ide> from .stop_words import STOP_WORDS <ide> from .lex_attrs import LEX_ATTRS <ide> from .punctuation import TOKENIZER_INFIXES, TOKENIZER_SUFFIXES <add>from .syntax_iterators import SYNTAX_ITERATORS <ide> from ...language import Language, BaseDefaults <ide> <ide> <ide> class FinnishDefaults(BaseDefaults): <ide> tokenizer_exceptions = TOKENIZER_EXCEPTIONS <ide> lex_attr_getters = LEX_ATTRS <ide> stop_words = STOP_WORDS <add> syntax_iterators = SYNTAX_ITERATORS <ide> <ide> <ide> class Finnish(Language): <ide><path>spacy/lang/fi/syntax_iterators.py <add>from typing import Iterator, Tuple, Union <add>from ...tokens import Doc, Span <add>from ...symbols import NOUN, PROPN, PRON <add>from ...errors import Errors <add> <add> <add>def noun_chunks(doclike: Union[Doc, Span]) -> Iterator[Tuple[int, int, int]]: <add> """Detect base noun phrases from a dependency parse. Works on both Doc and Span.""" <add> labels = [ <add> "appos", <add> "nsubj", <add> "nsubj:cop", <add> "obj", <add> "obl", <add> "ROOT", <add> ] <add> extend_labels = [ <add> "amod", <add> "compound", <add> "compound:nn", <add> "flat:name", <add> "nmod", <add> "nmod:gobj", <add> "nmod:gsubj", <add> "nmod:poss", <add> "nummod", <add> ] <add> <add> def potential_np_head(word): <add> return word.pos in (NOUN, PROPN) and ( <add> word.dep in np_deps or word.head.pos == PRON <add> ) <add> <add> doc = doclike.doc # Ensure works on both Doc and Span. <add> if not doc.has_annotation("DEP"): <add> raise ValueError(Errors.E029) <add> <add> np_deps = [doc.vocab.strings[label] for label in labels] <add> extend_deps = [doc.vocab.strings[label] for label in extend_labels] <add> np_label = doc.vocab.strings.add("NP") <add> conj_label = doc.vocab.strings.add("conj") <add> <add> rbracket = 0 <add> prev_end = -1 <add> for i, word in enumerate(doclike): <add> if i < rbracket: <add> continue <add> <add> # Is this a potential independent NP head or coordinated with <add> # a NOUN that is itself an independent NP head? <add> # <add> # e.g. "Terveyden ja hyvinvoinnin laitos" <add> if potential_np_head(word) or ( <add> word.dep == conj_label and potential_np_head(word.head) <add> ): <add> # Try to extend to the left to include adjective/num <add> # modifiers, compound words etc. <add> lbracket = word.i <add> for ldep in word.lefts: <add> if ldep.dep in extend_deps: <add> lbracket = ldep.left_edge.i <add> break <add> <add> # Prevent nested chunks from being produced <add> if lbracket <= prev_end: <add> continue <add> <add> rbracket = word.i <add> # Try to extend the span to the right to capture <add> # appositions and noun modifiers <add> for rdep in word.rights: <add> if rdep.dep in extend_deps: <add> rbracket = rdep.i <add> prev_end = rbracket <add> <add> yield lbracket, rbracket + 1, np_label <add> <add> <add>SYNTAX_ITERATORS = {"noun_chunks": noun_chunks} <ide><path>spacy/tests/lang/fi/test_noun_chunks.py <add>import pytest <add>from spacy.tokens import Doc <add> <add> <add>FI_NP_TEST_EXAMPLES = [ <add> ( <add> "Kaksi tyttöä potkii punaista palloa", <add> ["NUM", "NOUN", "VERB", "ADJ", "NOUN"], <add> ["nummod", "nsubj", "ROOT", "amod", "obj"], <add> [1, 1, 0, 1, -2], <add> ["Kaksi tyttöä", "punaista palloa"], <add> ), <add> ( <add> "Erittäin vaarallinen leijona karkasi kiertävän sirkuksen eläintenkesyttäjältä", <add> ["ADV", "ADJ", "NOUN", "VERB", "ADJ", "NOUN", "NOUN"], <add> ["advmod", "amod", "nsubj", "ROOT", "amod", "nmod:poss", "obl"], <add> [1, 1, 1, 0, 1, 1, -3], <add> ["Erittäin vaarallinen leijona", "kiertävän sirkuksen eläintenkesyttäjältä"], <add> ), <add> ( <add> "Leijona raidallisine tassuineen piileksii Porin kaupungin lähellä", <add> ["NOUN", "ADJ", "NOUN", "VERB", "PROPN", "NOUN", "ADP"], <add> ["nsubj", "amod", "nmod", "ROOT", "nmod:poss", "obl", "case"], <add> [3, 1, -2, 0, 1, -2, -1], <add> ["Leijona raidallisine tassuineen", "Porin kaupungin"], <add> ), <add> ( <add> "Lounaalla nautittiin salaattia, maukasta kanaa ja raikasta vettä", <add> ["NOUN", "VERB", "NOUN", "PUNCT", "ADJ", "NOUN", "CCONJ", "ADJ", "NOUN"], <add> ["obl", "ROOT", "obj", "punct", "amod", "conj", "cc", "amod", "conj"], <add> [1, 0, -1, 2, 1, -3, 2, 1, -6], <add> ["Lounaalla", "salaattia", "maukasta kanaa", "raikasta vettä"], <add> ), <add> ( <add> "Minua houkuttaa maalle muuttaminen talven jälkeen", <add> ["PRON", "VERB", "NOUN", "NOUN", "NOUN", "ADP"], <add> ["obj", "ROOT", "nmod", "nsubj", "obl", "case"], <add> [1, 0, 1, -2, -3, -1], <add> ["maalle muuttaminen", "talven"], <add> ), <add> ( <add> "Päivän kohokohta oli vierailu museossa kummilasten kanssa", <add> ["NOUN", "NOUN", "AUX", "NOUN", "NOUN", "NOUN", "ADP"], <add> ["nmod:poss", "nsubj:cop", "cop", "ROOT", "nmod", "obl", "case"], <add> [1, 2, 1, 0, -1, -2, -1], <add> ["Päivän kohokohta", "vierailu museossa", "kummilasten"], <add> ), <add> ( <add> "Yrittäjät maksoivat tuomioistuimen määräämät korvaukset", <add> ["NOUN", "VERB", "NOUN", "VERB", "NOUN"], <add> ["nsubj", "ROOT", "nsubj", "acl", "obj"], <add> [1, 0, 1, 1, -3], <add> ["Yrittäjät", "tuomioistuimen", "korvaukset"], <add> ), <add> ( <add> "Julkisoikeudelliset tai niihin rinnastettavat saatavat ovat suoraan ulosottokelpoisia", <add> ["ADJ", "CCONJ", "PRON", "VERB", "NOUN", "AUX", "ADV", "NOUN"], <add> ["amod", "cc", "obl", "acl", "nsubj:cop", "cop", "advmod", "ROOT"], <add> [4, 3, 1, 1, 3, 2, 1, 0], <add> ["Julkisoikeudelliset tai niihin rinnastettavat saatavat", "ulosottokelpoisia"], <add> ), <add> ( <add> "Se oli ala-arvoista käytöstä kaikilta oppilailta, myös valvojaoppilailta", <add> ["PRON", "AUX", "ADJ", "NOUN", "PRON", "NOUN", "PUNCT", "ADV", "NOUN"], <add> ["nsubj:cop", "cop", "amod", "ROOT", "det", "nmod", "punct", "advmod", "appos"], <add> [3, 2, 1, 0, 1, -2, 2, 1, -3], <add> ["ala-arvoista käytöstä kaikilta oppilailta", "valvojaoppilailta"], <add> ), <add> ( <add> "Isä souti veneellä, jonka hän oli vuokrannut", <add> ["NOUN", "VERB", "NOUN", "PUNCT", "PRON", "PRON", "AUX", "VERB"], <add> ["nsubj", "ROOT", "obl", "punct", "obj", "nsubj", "aux", "acl:relcl"], <add> [1, 0, -1, 4, 3, 2, 1, -5], <add> ["Isä", "veneellä"], <add> ), <add> ( <add> "Kirja, jonka poimin hyllystä, kertoo norsuista", <add> ["NOUN", "PUNCT", "PRON", "VERB", "NOUN", "PUNCT", "VERB", "NOUN"], <add> ["nsubj", "punct", "obj", "acl:relcl", "obl", "punct", "ROOT", "obl"], <add> [6, 2, 1, -3, -1, 1, 0, -1], <add> ["Kirja", "hyllystä", "norsuista"], <add> ), <add> ( <add> "Huomenna on päivä, jota olemme odottaneet", <add> ["NOUN", "AUX", "NOUN", "PUNCT", "PRON", "AUX", "VERB"], <add> ["ROOT", "cop", "nsubj:cop", "punct", "obj", "aux", "acl:relcl"], <add> [0, -1, -2, 3, 2, 1, -4], <add> ["Huomenna", "päivä"], <add> ), <add> ( <add> "Liikkuvuuden lisääminen on yksi korkeakoulutuksen keskeisistä kehittämiskohteista", <add> ["NOUN", "NOUN", "AUX", "PRON", "NOUN", "ADJ", "NOUN"], <add> ["nmod:gobj", "nsubj:cop", "cop", "ROOT", "nmod:poss", "amod", "nmod"], <add> [1, 2, 1, 0, 2, 1, -3], <add> [ <add> "Liikkuvuuden lisääminen", <add> "korkeakoulutuksen keskeisistä kehittämiskohteista", <add> ], <add> ), <add> ( <add> "Kaupalliset palvelut jätetään yksityisten palveluntarjoajien tarjottavaksi", <add> ["ADJ", "NOUN", "VERB", "ADJ", "NOUN", "NOUN"], <add> ["amod", "obj", "ROOT", "amod", "nmod:gsubj", "obl"], <add> [1, 1, 0, 1, 1, -3], <add> ["Kaupalliset palvelut", "yksityisten palveluntarjoajien tarjottavaksi"], <add> ), <add> ( <add> "New York tunnetaan kaupunkina, joka ei koskaan nuku", <add> ["PROPN", "PROPN", "VERB", "NOUN", "PUNCT", "PRON", "AUX", "ADV", "VERB"], <add> ["obj", "flat:name", "ROOT", "obl", "punct", "nsubj", "aux", "advmod", "acl:relcl"], <add> [2, -1, 0, -1, 4, 3, 2, 1, -5], <add> ["New York", "kaupunkina"], <add> ), <add> ( <add> "Loput vihjeet saat herra Möttöseltä", <add> ["NOUN", "NOUN", "VERB", "NOUN", "PROPN"], <add> ["compound:nn", "obj", "ROOT", "compound:nn", "obj"], <add> [1, 1, 0, 1, -2], <add> ["Loput vihjeet", "herra Möttöseltä"], <add> ), <add> ( <add> "mahdollisuus tukea muita päivystysyksiköitä", <add> ["NOUN", "VERB", "PRON", "NOUN"], <add> ["ROOT", "acl", "det", "obj"], <add> [0, -1, 1, -2], <add> ["mahdollisuus", "päivystysyksiköitä"], <add> ), <add> ( <add> "sairaanhoitopiirit harjoittavat leikkaustoimintaa alueellaan useammassa sairaalassa", <add> ["NOUN", "VERB", "NOUN", "NOUN", "ADJ", "NOUN"], <add> ["nsubj", "ROOT", "obj", "obl", "amod", "obl"], <add> [1, 0, -1, -1, 1, -3], <add> ["sairaanhoitopiirit", "leikkaustoimintaa", "alueellaan", "useammassa sairaalassa"], <add> ), <add> ( <add> "Lain mukaan varhaiskasvatus on suunnitelmallista toimintaa", <add> ["NOUN", "ADP", "NOUN", "AUX", "ADJ", "NOUN"], <add> ["obl", "case", "nsubj:cop", "cop", "amod", "ROOT"], <add> [5, -1, 3, 2, 1, 0], <add> ["Lain", "varhaiskasvatus", "suunnitelmallista toimintaa"], <add> ), <add>] <add> <add> <add>def test_noun_chunks_is_parsed(fi_tokenizer): <add> """Test that noun_chunks raises Value Error for 'fi' language if Doc is not parsed. <add> To check this test, we're constructing a Doc <add> with a new Vocab here and forcing is_parsed to 'False' <add> to make sure the noun chunks don't run. <add> """ <add> doc = fi_tokenizer("Tämä on testi") <add> with pytest.raises(ValueError): <add> list(doc.noun_chunks) <add> <add> <add>@pytest.mark.parametrize( <add> "text,pos,deps,heads,expected_noun_chunks", FI_NP_TEST_EXAMPLES <add>) <add>def test_fi_noun_chunks(fi_tokenizer, text, pos, deps, heads, expected_noun_chunks): <add> tokens = fi_tokenizer(text) <add> <add> assert len(heads) == len(pos) <add> doc = Doc( <add> tokens.vocab, <add> words=[t.text for t in tokens], <add> heads=[head + i for i, head in enumerate(heads)], <add> deps=deps, <add> pos=pos, <add> ) <add> <add> noun_chunks = list(doc.noun_chunks) <add> assert len(noun_chunks) == len(expected_noun_chunks) <add> for i, np in enumerate(noun_chunks): <add> assert np.text == expected_noun_chunks[i]
3
Ruby
Ruby
remove redundant arguments in store test helper
85cde3cde1c7712b2abb3383c1faa023f8392dae
<ide><path>activesupport/test/cache/stores/mem_cache_store_test.rb <ide> def random_string(length) <ide> end <ide> <ide> def store <del> [:mem_cache_store, ENV["MEMCACHE_SERVERS"] || "localhost:11211"] <add> [:mem_cache_store] <ide> end <ide> <ide> def emulating_latency
1
Python
Python
remove trailing whitespace
bcc1d50d09dcaee958d3e76146aa1987a7c51706
<ide><path>spacy/language.py <ide> def create_vocab(cls, nlp=None): <ide> else: <ide> return Vocab.load(nlp.path, lex_attr_getters=cls.lex_attr_getters, <ide> tag_map=cls.tag_map, lemmatizer=lemmatizer) <del> <add> <ide> @classmethod <ide> def add_vectors(cls, nlp=None): <ide> if nlp is None or nlp.path is None: <ide> def create_pipeline(self, nlp=None): <ide> tag_map = {} <ide> <ide> tokenizer_exceptions = {} <del> <add> <ide> parser_features = get_templates('parser') <del> <add> <ide> entity_features = get_templates('ner') <ide> <ide> tagger_features = Tagger.feature_templates # TODO -- fix this <ide> def __init__(self, **overrides): <ide> path = util.match_best_version(self.lang, '', util.get_data_path()) <ide> <ide> self.path = path <del> <add> <ide> self.vocab = self.Defaults.create_vocab(self) \ <ide> if 'vocab' not in overrides \ <ide> else overrides['vocab'] <ide> def __call__(self, text, tag=True, parse=True, entity=True): <ide> """Apply the pipeline to some text. The text can span multiple sentences, <ide> and can contain arbtrary whitespace. Alignment into the original string <ide> is preserved. <del> <add> <ide> Args: <ide> text (unicode): The text to be processed. <ide> <ide> def __call__(self, text, tag=True, parse=True, entity=True): <ide> <ide> def pipe(self, texts, tag=True, parse=True, entity=True, n_threads=2, batch_size=1000): <ide> '''Process texts as a stream, and yield Doc objects in order. <del> <add> <ide> Supports GIL-free multi-threading. <del> <add> <ide> Arguments: <ide> texts (iterator) <ide> tag (bool) <ide> def end_training(self, path=None): <ide> path = self.path <ide> elif isinstance(path, basestring): <ide> path = pathlib.Path(path) <del> <add> <ide> if self.tagger: <ide> self.tagger.model.end_training() <ide> self.tagger.model.dump(str(path / 'pos' / 'model')) <ide> def end_training(self, path=None): <ide> if self.entity: <ide> self.entity.model.end_training() <ide> self.entity.model.dump(str(path / 'ner' / 'model')) <del> <add> <ide> strings_loc = path / 'vocab' / 'strings.json' <ide> with strings_loc.open('w', encoding='utf8') as file_: <ide> self.vocab.strings.dump(file_)
1
Java
Java
remove tiles 3 configuration method
d9540ff34233bb36794da5490a21c7eaa632f1dd
<ide><path>spring-webmvc-tiles3/src/main/java/org/springframework/web/servlet/view/tiles3/TilesConfigurer.java <ide> protected AbstractTilesContainerFactory createContainerFactory(ApplicationContex <ide> <ide> private class SpringCompleteAutoloadTilesContainerFactory extends CompleteAutoloadTilesContainerFactory { <ide> <del> @Override <del> protected AttributeEvaluatorFactory createAttributeEvaluatorFactory( <del> ApplicationContext applicationContext, LocaleResolver resolver) { <del> return new BasicAttributeEvaluatorFactory(new DirectAttributeEvaluator()); <del> } <del> <ide> @Override <ide> public TilesContainer createContainer(ApplicationContext applicationContext) { <ide> CachingTilesContainer cachingContainer = (CachingTilesContainer) super.createContainer(applicationContext);
1
Javascript
Javascript
change button color on submit
ee45a147eca02b15ba1ca55d5fc36ee5d399e8db
<ide><path>public/js/lib/coursewares/commonFrameWork_0.0.6.js <ide> function showCompletion() { <ide> <ide> $('#submit-challenge') <ide> .attr('disabled', 'true') <del> .addClass('disabled'); <add> .removeClass('btn-primary') <add> .addClass('btn-warning disabled'); <ide> <ide> e.preventDefault(); <ide> $.post(
1
Javascript
Javascript
permit null as a cipher value
4a741b8dc2c19efcef49ca39362bb7126d80a3d1
<ide><path>lib/_tls_common.js <ide> exports.createSecureContext = function createSecureContext(options) { <ide> } <ide> } <ide> <del> if (ciphers !== undefined) <add> if (ciphers != null) <ide> validateString(ciphers, 'options.ciphers'); <ide> <ide> // Work around an OpenSSL API quirk. cipherList is for TLSv1.2 and below, <ide><path>test/parallel/test-tls-set-ciphers.js <ide> test('AES256-SHA', ':', U, U, 'ERR_INVALID_ARG_VALUE'); <ide> // Using '' is synonymous for "use default ciphers" <ide> test('TLS_AES_256_GCM_SHA384', '', 'TLS_AES_256_GCM_SHA384'); <ide> test('', 'TLS_AES_256_GCM_SHA384', 'TLS_AES_256_GCM_SHA384'); <add> <add>// Using null should be treated the same as undefined. <add>test(null, 'AES256-SHA', 'AES256-SHA'); <add>test('AES256-SHA', null, 'AES256-SHA');
2
Javascript
Javascript
create contexts in object shorthand syntax
6c0056c2075ae07bf5e21ac9ffe3dac74904e43a
<ide><path>lib/dependencies/CommonJsImportsParserPlugin.js <ide> class CommonJsImportsParserPlugin { <ide> regExp: options.unknownContextRegExp, <ide> mode: "sync" <ide> }, <del> expr.range <add> expr.range, <add> undefined, <add> parser.scope.inShorthand <ide> ); <ide> dep.critical = <ide> options.unknownContextCritical && <ide><path>lib/dependencies/CommonJsRequireContextDependency.js <ide> const ContextDependency = require("./ContextDependency"); <ide> const ContextDependencyTemplateAsRequireCall = require("./ContextDependencyTemplateAsRequireCall"); <ide> <ide> class CommonJsRequireContextDependency extends ContextDependency { <del> constructor(options, range, valueRange) { <add> constructor(options, range, valueRange, inShorthand) { <ide> super(options); <ide> <ide> this.range = range; <ide> this.valueRange = valueRange; <add> this.inShorthand = inShorthand; <ide> } <ide> <ide> get type() { <ide> class CommonJsRequireContextDependency extends ContextDependency { <ide> <ide> write(this.range); <ide> write(this.valueRange); <add> write(this.inShorthand); <ide> <ide> super.serialize(context); <ide> } <ide> class CommonJsRequireContextDependency extends ContextDependency { <ide> <ide> this.range = read(); <ide> this.valueRange = read(); <add> this.inShorthand = read(); <ide> <ide> super.deserialize(context); <ide> } <ide><path>lib/dependencies/ContextDependencyTemplateAsRequireCall.js <ide> <ide> "use strict"; <ide> <add>const CommonJsRequireContextDependency = require("./CommonJsRequireContextDependency"); <ide> const ContextDependency = require("./ContextDependency"); <ide> <ide> /** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */ <ide> class ContextDependencyTemplateAsRequireCall extends ContextDependency.Template <ide> { runtimeTemplate, moduleGraph, chunkGraph, runtimeRequirements } <ide> ) { <ide> const dep = /** @type {ContextDependency} */ (dependency); <del> const moduleExports = runtimeTemplate.moduleExports({ <add> let moduleExports = runtimeTemplate.moduleExports({ <ide> module: moduleGraph.getModule(dep), <ide> chunkGraph, <ide> request: dep.request, <ide> runtimeRequirements <ide> }); <ide> <add> if (dep instanceof CommonJsRequireContextDependency && dep.inShorthand) { <add> moduleExports = `${dep.inShorthand}: ${moduleExports}`; <add> } <ide> if (moduleGraph.getModule(dep)) { <ide> if (dep.valueRange) { <ide> if (Array.isArray(dep.replaces)) { <ide><path>test/configCases/parsing/issue-14545/index.js <add>it("should generate valid code when 'require' encounters object shorthand syntax", function() { <add> expect(require("./module").obj.require).toEqual(require("./module").obj.r); <add> expect(require("./module").obj.require).toBeTypeOf("function"); <add>}); <ide><path>test/configCases/parsing/issue-14545/module.js <add>export const obj = {require, r: require} <ide><path>test/configCases/parsing/issue-14545/webpack.config.js <add>/** @type {import("../../../../").Configuration} */ <add>module.exports = { <add> module: { <add> unknownContextRegExp: /^\.\//, <add> unknownContextCritical: false, <add> exprContextRegExp: /^\.\//, <add> exprContextCritical: false <add> } <add>};
6
Javascript
Javascript
allow leading whitespace in declaration regexp
87df048a6b3cc39c5cbd91c8e27cc23a111018e3
<ide><path>examples/jsm/nodes/core/FunctionNode.js <ide> import { TempNode } from './TempNode.js'; <ide> import { NodeLib } from './NodeLib.js'; <ide> <del>var declarationRegexp = /^([a-z_0-9]+)\s([a-z_0-9]+)\s*\((.*?)\)/i, <add>var declarationRegexp = /^\s*([a-z_0-9]+)\s([a-z_0-9]+)\s*\((.*?)\)/i, <ide> propertiesRegexp = /[a-z_0-9]+/ig; <ide> <ide> function FunctionNode( src, includes, extensions, keywords, type ) {
1
Java
Java
eliminate propertysourceaggregator interface
f46a455c72370714a7494ff95bf9d4cd927ebac8
<ide><path>org.springframework.core/src/main/java/org/springframework/core/env/ConfigurableEnvironment.java <ide> <ide> package org.springframework.core.env; <ide> <add>import java.util.LinkedList; <add>import java.util.Map; <add>import java.util.Properties; <add> <ide> import org.springframework.core.convert.ConversionService; <ide> <ide> /** <ide> * @author Chris Beams <ide> * @since 3.1 <ide> */ <del>public interface ConfigurableEnvironment extends Environment, PropertySourceAggregator { <add>public interface ConfigurableEnvironment extends Environment { <ide> <del> /** <del> * TODO SPR-7508: document <del> */ <ide> void setActiveProfiles(String... profiles); <ide> <del> /** <del> * TODO SPR-7508: document <del> */ <ide> void setDefaultProfiles(String... profiles); <ide> <ide> public ConversionService getConversionService(); <ide> <ide> public void setConversionService(ConversionService conversionService); <add> <add> void addPropertySource(PropertySource<?> propertySource); <add> <add> void addPropertySource(String name, Properties properties); <add> <add> void addPropertySource(String name, Map<String, String> propertiesMap); <add> <add> /** <add> * TODO: SPR-7508 document <add> * <add> * Care should be taken to ensure duplicates are not introduced. <add> * <add> * Recommend using {@link LinkedList#set(int, Object)} for replacing items, <add> * and combining {@link LinkedList#remove()} with other methods like <add> * {@link LinkedList#add(Object)} to prevent duplicates. <add> * <add> * Explain how {@link PropertySource#equals(Object)} and hashCode work, and that <add> * recommend using {@link PropertySource#named(String)} for lookups in the list. <add> */ <add> LinkedList<PropertySource<?>> getPropertySources(); <add> <ide> } <ide><path>org.springframework.core/src/main/java/org/springframework/core/env/PropertySourceAggregator.java <del>/* <del> * Copyright 2002-2010 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.core.env; <del> <del>import java.util.LinkedList; <del>import java.util.Map; <del>import java.util.Properties; <del> <del>/** <del> * TODO: SPR-7508 document <del> * <del> * @author Chris Beams <del> * @since 3.1 <del> */ <del>public interface PropertySourceAggregator { <del> <del> void addPropertySource(PropertySource<?> propertySource); <del> <del> void addPropertySource(String name, Properties properties); <del> <del> void addPropertySource(String name, Map<String, String> propertiesMap); <del> <del> /** <del> * TODO: SPR-7508 document <del> * <del> * Care should be taken to ensure duplicates are not introduced. <del> * <del> * Recommend using {@link LinkedList#set(int, Object)} for replacing items, <del> * and combining {@link LinkedList#remove()} with other methods like <del> * {@link LinkedList#add(Object)} to prevent duplicates. <del> * <del> * Explain how {@link PropertySource#equals(Object)} and hashCode work, and that <del> * recommend using {@link PropertySource#named(String)} for lookups in the list. <del> */ <del> LinkedList<PropertySource<?>> getPropertySources(); <del> <del>}
2
Ruby
Ruby
store the formula used to build the keg in the keg
25df0c03d6abd79fbc103f3be9df38d48bd4f938
<ide><path>Library/Homebrew/cmd/list.rb <ide> def initialize(path) <ide> # dylibs have multiple symlinks and we don't care about them <ide> (pnn.extname == ".dylib" || pnn.extname == ".pc") && !pnn.symlink? <ide> end <add> when ".brew" <add> # Ignore .brew <ide> else <ide> if pn.directory? <ide> if pn.symlink? <ide><path>Library/Homebrew/formula_installer.rb <ide> def install <ide> compute_and_install_dependencies if not_pouring && !ignore_deps? <ide> build <ide> clean <add> <add> # Store the formula used to build the keg in the keg. <add> s = formula.path.read.gsub(/ bottle do.+?end\n\n?/m, "") <add> brew_prefix = formula.prefix/".brew" <add> brew_prefix.mkdir <add> Pathname(brew_prefix/"#{formula.name}.rb").atomic_write(s) <ide> end <ide> <ide> build_bottle_postinstall if build_bottle? <ide><path>Library/Homebrew/test/test_formula_installer.rb <ide> def test_a_basic_install <ide> bin = HOMEBREW_PREFIX+"bin" <ide> assert_predicate bin, :directory? <ide> assert_equal 3, bin.children.length <add> assert_predicate f.prefix/".brew/testball.rb", :readable? <ide> end <ide> end <ide>
3
Java
Java
fix thread safety for unsubscribe
77a4f63eba1ccba0fa35996220a994b401f9bcc7
<ide><path>src/main/java/rx/internal/operators/OperatorWindowWithSize.java <ide> final class ExactSubscriber extends Subscriber<T> { <ide> final Subscriber<? super Observable<T>> child; <ide> int count; <ide> BufferUntilSubscriber<T> window; <del> Subscription parentSubscription = this; <add> volatile boolean noWindow = true; <add> final Subscription parentSubscription = this; <ide> public ExactSubscriber(Subscriber<? super Observable<T>> child) { <ide> /** <ide> * See https://github.com/ReactiveX/RxJava/issues/1546 <ide> public ExactSubscriber(Subscriber<? super Observable<T>> child) { <ide> @Override <ide> public void call() { <ide> // if no window we unsubscribe up otherwise wait until window ends <del> if(window == null) { <add> if(noWindow) { <ide> parentSubscription.unsubscribe(); <ide> } <ide> } <ide> public void onStart() { <ide> @Override <ide> public void onNext(T t) { <ide> if (window == null) { <add> noWindow = false; <ide> window = BufferUntilSubscriber.create(); <ide> child.onNext(window); <ide> } <ide> window.onNext(t); <ide> if (++count % size == 0) { <ide> window.onCompleted(); <ide> window = null; <add> noWindow = true; <ide> if (child.isUnsubscribed()) { <ide> parentSubscription.unsubscribe(); <ide> return; <ide> final class InexactSubscriber extends Subscriber<T> { <ide> final Subscriber<? super Observable<T>> child; <ide> int count; <ide> final List<CountedSubject<T>> chunks = new LinkedList<CountedSubject<T>>(); <del> Subscription parentSubscription = this; <add> final Subscription parentSubscription = this; <ide> <ide> public InexactSubscriber(Subscriber<? super Observable<T>> child) { <ide> /**
1
Javascript
Javascript
fix lint errors
5cf9ed238e9dba5e0cdd177170be7161fcc39bb4
<ide><path>src/update-process-env.js <ide> const ENVIRONMENT_VARIABLES_TO_PRESERVE = new Set([ <ide> <ide> const PLATFORMS_KNOWN_TO_WORK = new Set(['darwin', 'linux']); <ide> <del> <ide> // Shell command that returns env var=value lines separated by \0s so that <ide> // newlines are handled properly. Note: need to use %c to inject the \0s <ide> // to work with some non GNU awks. <del>const ENV_COMMAND = 'command awk \'BEGIN{for(v in ENVIRON) printf("%s=%s%c", v, ENVIRON[v], 0)}\'' <add>const ENV_COMMAND = <add> 'command awk \'BEGIN{for(v in ENVIRON) printf("%s=%s%c", v, ENVIRON[v], 0)}\''; <ide> <ide> async function updateProcessEnv(launchEnv) { <ide> let envToAssign; <ide> async function getEnvFromShell(env) { <ide> console.log( <ide> 'warning: ' + <ide> env.SHELL + <del> ' -ilc "' + ENV_COMMAND + '" failed with signal (' + <add> ' -ilc "' + <add> ENV_COMMAND + <add> '" failed with signal (' + <ide> error.signal + <ide> ')' <ide> );
1
Text
Text
add pr 5376 to release notes
af460d2b6906ebce0680105f36eeada867a35d7e
<ide><path>docs/topics/release-notes.md <ide> You can determine your currently installed version using `pip freeze`: <ide> <ide> ## 3.6.x series <ide> <add>### 3.6.5 <add> <add>* Fix `DjangoModelPermissions` to ensure user authentication before calling the view's `get_queryset()` method. As a side effect, this changes the order of the HTTP method permissions and authentication checks, and 405 responses will only be returned when authenticated. If you want to replicate the old behavior, see the PR for details. [#5376][gh5376] <add> <ide> ### 3.6.4 <ide> <ide> **Date**: [21st August 2017][3.6.4-milestone] <ide> For older release notes, [please see the version 2.x documentation][old-release- <ide> [gh5147]: https://github.com/encode/django-rest-framework/issues/5147 <ide> [gh5131]: https://github.com/encode/django-rest-framework/issues/5131 <ide> <del> <del> <add><!-- 3.6.5 --> <add>[gh5376]: https://github.com/encode/django-rest-framework/issues/5376
1
Text
Text
add warnings for translations that need update
cfa00dc9d45917ff27e0e2c0048d25823173a34a
<ide><path>threejs/lessons/ru/threejs-fundamentals.md <ide> Title: Основы Three.js <ide> Description: Твой первый урок по Three.js начинаетсся с основ <ide> TOC: Базовые принципы <ide> <add>{{{warning msgId="updateNeeded"}}} <add> <ide> Это первая статья в серии статей о three.js. <ide> [Three.js](http://threejs.org) это 3D-библиотека, которая максимально <ide> упрощает создание 3D-контента на веб-странице. <ide><path>threejs/lessons/ru/threejs-optimize-lots-of-objects-animated.md <ide> Title: Three.js Оптимизация большого количества а <ide> Description: Анимированные объединенные объекты с морфтаргетами <ide> TOC: Оптимизация множества анимированных объектов <ide> <add>{{{warning msgId="updateNeeded"}}} <add> <ide> Эта статья является продолжением [статьи об оптимизации множества объектов <ide> ](threejs-optimize-lots-of-objects.html). Если вы еще не прочитали это, пожалуйста, прочитайте его, прежде чем продолжить. <ide>
2
Text
Text
remove double spaces in code examples
70b302b189dbe9f90e3b081fa540c909a43ba8d0
<ide><path>guides/source/2_3_release_notes.md <ide> before_save :update_credit_rating, :if => :active, <ide> Rails now has a `:having` option on find (as well as on `has_many` and `has_and_belongs_to_many` associations) for filtering records in grouped finds. As those with heavy SQL backgrounds know, this allows filtering based on grouped results: <ide> <ide> ```ruby <del>developers = Developer.find(:all, :group => "salary", <del> :having => "sum(salary) > 10000", :select => "salary") <add>developers = Developer.find(:all, :group => "salary", <add> :having => "sum(salary) > 10000", :select => "salary") <ide> ``` <ide> <ide> * Lead Contributor: [Emilio Tagua](http://github.com/miloops) <ide><path>guides/source/active_record_querying.md <ide> One important caveat is that `default_scope` will be overridden by <ide> <ide> ```ruby <ide> class User < ActiveRecord::Base <del> default_scope { where state: 'pending' } <add> default_scope { where state: 'pending' } <ide> scope :active, -> { where state: 'active' } <ide> scope :inactive, -> { where state: 'inactive' } <ide> end <ide><path>guides/source/form_helpers.md <ide> end <ide> :name => 'John Doe', <ide> :addresses_attributes => { <ide> '0' => { <del> :kind => 'Home', <add> :kind => 'Home', <ide> :street => '221b Baker Street', <ide> }, <ide> '1' => { <ide><path>guides/source/i18n.md <ide> end <ide> # in your /etc/hosts file to try this out locally <ide> def extract_locale_from_tld <ide> parsed_locale = request.host.split('.').last <del> I18n.available_locales.include?(parsed_locale.to_sym) ? parsed_locale : nil <add> I18n.available_locales.include?(parsed_locale.to_sym) ? parsed_locale : nil <ide> end <ide> ``` <ide> <ide><path>guides/source/initialization.md <ide> This file is as follows: <ide> <ide> ```ruby <ide> #!/usr/bin/env ruby <del>APP_PATH = File.expand_path('../../config/application', __FILE__) <del>require File.expand_path('../../config/boot', __FILE__) <add>APP_PATH = File.expand_path('../../config/application', __FILE__) <add>require File.expand_path('../../config/boot', __FILE__) <ide> require 'rails/commands' <ide> ``` <ide> <ide> The `options[:config]` value defaults to `config.ru` which contains this: <ide> ```ruby <ide> # This file is used by Rack-based servers to start the application. <ide> <del>require ::File.expand_path('../config/environment', __FILE__) <add>require ::File.expand_path('../config/environment', __FILE__) <ide> run <%= app_const %> <ide> ``` <ide> <ide> app = eval "Rack::Builder.new {( " + cfgfile + "\n )}.to_app", <ide> The `initialize` method of `Rack::Builder` will take the block here and execute it within an instance of `Rack::Builder`. This is where the majority of the initialization process of Rails happens. The `require` line for `config/environment.rb` in `config.ru` is the first to run: <ide> <ide> ```ruby <del>require ::File.expand_path('../config/environment', __FILE__) <add>require ::File.expand_path('../config/environment', __FILE__) <ide> ``` <ide> <ide> ### `config/environment.rb` <ide> def self.run(app, options={}) <ide> else <ide> server.register('/', Rack::Handler::Mongrel.new(app)) <ide> end <del> yield server if block_given? <add> yield server if block_given? <ide> server.run.join <ide> end <ide> ``` <ide><path>guides/source/migrations.md <ide> end <ide> # app/models/product.rb <ide> <ide> class Product < ActiveRecord::Base <del> validates :flag, inclusion: { in: [true, false] } <add> validates :flag, inclusion: { in: [true, false] } <ide> validates :fuzz, presence: true <ide> end <ide> ``` <ide><path>guides/source/rails_on_rack.md <ide> To use `rackup` instead of Rails' `rails server`, you can put the following insi <ide> <ide> ```ruby <ide> # Rails.root/config.ru <del>require ::File.expand_path('../config/environment', __FILE__) <add>require ::File.expand_path('../config/environment', __FILE__) <ide> <ide> use Rack::Debugger <ide> use Rack::ContentLength
7
PHP
PHP
add support for setting footer of slack attachment
f996b9e9627c776b1f385a8f4441f411be7a167b
<ide><path>src/Illuminate/Notifications/Channels/SlackWebhookChannel.php <ide> protected function attachments(SlackMessage $message) <ide> 'title_link' => $attachment->url, <ide> 'fields' => $this->fields($attachment), <ide> 'mrkdwn_in' => $attachment->markdown, <add> 'footer' => $attachment->footer, <add> 'ts' => $attachment->timestamp <ide> ]); <ide> })->all(); <ide> } <ide><path>src/Illuminate/Notifications/Messages/SlackAttachment.php <ide> <ide> namespace Illuminate\Notifications\Messages; <ide> <add>use Carbon\Carbon; <add> <ide> class SlackAttachment <ide> { <ide> /** <ide> class SlackAttachment <ide> */ <ide> public $markdown; <ide> <add> /** <add> * The attachment's footer. <add> * <add> * @var string <add> */ <add> public $footer; <add> <add> /** <add> * The attachment's timestamp. <add> * <add> * @var int <add> */ <add> public $timestamp; <add> <ide> /** <ide> * Set the title of the attachment. <ide> * <ide> public function markdown(array $fields) <ide> <ide> return $this; <ide> } <add> <add> /** <add> * Set the footer content. <add> * <add> * @param string $footer <add> * @return $this <add> */ <add> public function footer($footer) <add> { <add> $this->footer = $footer; <add> <add> return $this; <add> } <add> <add> /** <add> * Set the timestamp. <add> * <add> * @param Carbon $timestamp <add> * @return $this <add> */ <add> public function timestamp(Carbon $timestamp) <add> { <add> $this->timestamp = $timestamp->timestamp; <add> <add> return $this; <add> } <ide> }
2
PHP
PHP
parse cookie values "{}" & "[]" as array
db63ba2d8f95e67b6b212860f0310ed65ab4db6a
<ide><path>lib/Cake/Controller/Component/CookieComponent.php <ide> protected function _explode($string) { <ide> $first = substr($string, 0, 1); <ide> if ($first === '{' || $first === '[') { <ide> $ret = json_decode($string, true); <del> return ($ret) ? $ret : $string; <add> return ($ret !== null) ? $ret : $string; <ide> } <ide> $array = array(); <ide> foreach (explode(',', $string) as $pair) {
1
Javascript
Javascript
update configuration snapshots
85b5f2dbc43a3f1a5a5495663c2abc15215d3b4a
<ide><path>test/Defaults.unittest.js <ide> describe("Defaults", () => { <ide> "nodeEnv": false, <ide> "portableRecords": false, <ide> "providedExports": true, <add> "realContentHash": false, <ide> "removeAvailableModules": false, <ide> "removeEmptyChunks": true, <ide> "runtimeChunk": false, <ide> describe("Defaults", () => { <ide> + "moduleIds": "deterministic", <ide> + "nodeEnv": "production", <ide> <add> - "realContentHash": false, <add> + "realContentHash": true, <add> <ide> - "enforceSizeThreshold": 30000, <ide> - "hidePathInfo": false, <ide> - "maxAsyncRequests": Infinity, <ide> describe("Defaults", () => { <ide> + "moduleIds": "deterministic", <ide> + "nodeEnv": "production", <ide> <add> - "realContentHash": false, <add> + "realContentHash": true, <add> <ide> - "enforceSizeThreshold": 30000, <ide> - "hidePathInfo": false, <ide> - "maxAsyncRequests": Infinity, <ide><path>test/Validation.test.js <ide> describe("Validation", () => { <ide> expect(msg).toMatchInlineSnapshot(` <ide> "Invalid configuration object. Webpack has been initialized using a configuration object that does not match the API schema. <ide> - configuration.optimization has an unknown property 'hashedModuleIds'. These properties are valid: <del> object { checkWasmTypes?, chunkIds?, concatenateModules?, emitOnErrors?, flagIncludedChunks?, innerGraph?, mangleExports?, mangleWasmImports?, mergeDuplicateChunks?, minimize?, minimizer?, moduleIds?, noEmitOnErrors?, nodeEnv?, portableRecords?, providedExports?, removeAvailableModules?, removeEmptyChunks?, runtimeChunk?, sideEffects?, splitChunks?, usedExports? } <add> object { checkWasmTypes?, chunkIds?, concatenateModules?, emitOnErrors?, flagIncludedChunks?, innerGraph?, mangleExports?, mangleWasmImports?, mergeDuplicateChunks?, minimize?, minimizer?, moduleIds?, noEmitOnErrors?, nodeEnv?, portableRecords?, providedExports?, realContentHash?, removeAvailableModules?, removeEmptyChunks?, runtimeChunk?, sideEffects?, splitChunks?, usedExports? } <ide> -> Enables/Disables integrated optimizations. <ide> Did you mean optimization.moduleIds: \\"hashed\\" (BREAKING CHANGE since webpack 5)?" <ide> `) <ide> describe("Validation", () => { <ide> expect(msg).toMatchInlineSnapshot(` <ide> "Invalid configuration object. Webpack has been initialized using a configuration object that does not match the API schema. <ide> - configuration.optimization has an unknown property 'namedChunks'. These properties are valid: <del> object { checkWasmTypes?, chunkIds?, concatenateModules?, emitOnErrors?, flagIncludedChunks?, innerGraph?, mangleExports?, mangleWasmImports?, mergeDuplicateChunks?, minimize?, minimizer?, moduleIds?, noEmitOnErrors?, nodeEnv?, portableRecords?, providedExports?, removeAvailableModules?, removeEmptyChunks?, runtimeChunk?, sideEffects?, splitChunks?, usedExports? } <add> object { checkWasmTypes?, chunkIds?, concatenateModules?, emitOnErrors?, flagIncludedChunks?, innerGraph?, mangleExports?, mangleWasmImports?, mergeDuplicateChunks?, minimize?, minimizer?, moduleIds?, noEmitOnErrors?, nodeEnv?, portableRecords?, providedExports?, realContentHash?, removeAvailableModules?, removeEmptyChunks?, runtimeChunk?, sideEffects?, splitChunks?, usedExports? } <ide> -> Enables/Disables integrated optimizations. <ide> Did you mean optimization.chunkIds: \\"named\\" (BREAKING CHANGE since webpack 5)?" <ide> `) <ide> describe("Validation", () => { <ide> expect(msg).toMatchInlineSnapshot(` <ide> "Invalid configuration object. Webpack has been initialized using a configuration object that does not match the API schema. <ide> - configuration.optimization has an unknown property 'occurrenceOrder'. These properties are valid: <del> object { checkWasmTypes?, chunkIds?, concatenateModules?, emitOnErrors?, flagIncludedChunks?, innerGraph?, mangleExports?, mangleWasmImports?, mergeDuplicateChunks?, minimize?, minimizer?, moduleIds?, noEmitOnErrors?, nodeEnv?, portableRecords?, providedExports?, removeAvailableModules?, removeEmptyChunks?, runtimeChunk?, sideEffects?, splitChunks?, usedExports? } <add> object { checkWasmTypes?, chunkIds?, concatenateModules?, emitOnErrors?, flagIncludedChunks?, innerGraph?, mangleExports?, mangleWasmImports?, mergeDuplicateChunks?, minimize?, minimizer?, moduleIds?, noEmitOnErrors?, nodeEnv?, portableRecords?, providedExports?, realContentHash?, removeAvailableModules?, removeEmptyChunks?, runtimeChunk?, sideEffects?, splitChunks?, usedExports? } <ide> -> Enables/Disables integrated optimizations. <ide> Did you mean optimization.chunkIds: \\"size\\" and optimization.moduleIds: \\"size\\" (BREAKING CHANGE since webpack 5)?" <ide> `)
2
Javascript
Javascript
defer useeffect until after paint
11d0781eea7dfa65e0ea9e54d18ff937122f9524
<ide><path>packages/react-reconciler/src/ReactFiberCommitWork.js <ide> import { <ide> requestCurrentTime, <ide> scheduleWork, <ide> } from './ReactFiberScheduler'; <add>import { <add> NoEffect as NoHookEffect, <add> UnmountSnapshot, <add> UnmountMutation, <add> MountMutation, <add> UnmountLayout, <add> MountLayout, <add> UnmountPassive, <add> MountPassive, <add>} from './ReactHookEffectTags'; <ide> <ide> let didWarnAboutUndefinedSnapshotBeforeUpdate: Set<mixed> | null = null; <ide> if (__DEV__) { <ide> function commitBeforeMutationLifeCycles( <ide> finishedWork: Fiber, <ide> ): void { <ide> switch (finishedWork.tag) { <add> case FunctionComponent: <add> case ForwardRef: { <add> commitHookEffectList(UnmountSnapshot, NoHookEffect, finishedWork); <add> return; <add> } <ide> case ClassComponent: { <ide> if (finishedWork.effectTag & Snapshot) { <ide> if (current !== null) { <ide> function commitBeforeMutationLifeCycles( <ide> } <ide> } <ide> <del>function destroyRemainingEffects(firstToDestroy, stopAt) { <del> let effect = firstToDestroy; <del> do { <del> const destroy = effect.value; <del> if (destroy !== null) { <del> destroy(); <del> } <del> effect = effect.next; <del> } while (effect !== stopAt); <add>function commitHookEffectList( <add> unmountTag: number, <add> mountTag: number, <add> finishedWork: Fiber, <add>) { <add> const updateQueue: FunctionComponentUpdateQueue | null = (finishedWork.updateQueue: any); <add> let lastEffect = updateQueue !== null ? updateQueue.lastEffect : null; <add> if (lastEffect !== null) { <add> const firstEffect = lastEffect.next; <add> let effect = firstEffect; <add> do { <add> if ((effect.tag & unmountTag) !== NoHookEffect) { <add> // Unmount <add> const destroy = effect.destroy; <add> effect.destroy = null; <add> if (destroy !== null) { <add> destroy(); <add> } <add> } <add> if ((effect.tag & mountTag) !== NoHookEffect) { <add> // Mount <add> const create = effect.create; <add> const destroy = create(); <add> effect.destroy = typeof destroy === 'function' ? destroy : null; <add> } <add> effect = effect.next; <add> } while (effect !== firstEffect); <add> } <ide> } <ide> <del>function destroyMountedEffects(current) { <del> const oldUpdateQueue: FunctionComponentUpdateQueue | null = (current.updateQueue: any); <del> if (oldUpdateQueue !== null) { <del> const oldLastEffect = oldUpdateQueue.lastEffect; <del> if (oldLastEffect !== null) { <del> const oldFirstEffect = oldLastEffect.next; <del> destroyRemainingEffects(oldFirstEffect, oldFirstEffect); <del> } <del> } <add>export function commitPassiveHookEffects(finishedWork: Fiber): void { <add> commitHookEffectList(UnmountPassive, NoHookEffect, finishedWork); <add> commitHookEffectList(NoHookEffect, MountPassive, finishedWork); <ide> } <ide> <ide> function commitLifeCycles( <ide> function commitLifeCycles( <ide> switch (finishedWork.tag) { <ide> case FunctionComponent: <ide> case ForwardRef: { <del> const updateQueue: FunctionComponentUpdateQueue | null = (finishedWork.updateQueue: any); <del> if (updateQueue !== null) { <del> // Mount new effects and destroy the old ones by comparing to the <del> // current list of effects. This could be a bit simpler if we avoided <del> // the need to compare to the previous effect list by transferring the <del> // old `destroy` method to the new effect during the render phase. <del> // That's how I originally implemented it, but it requires an additional <del> // field on the effect object. <del> // <del> // This supports removing effects from the end of the list. If we adopt <del> // the constraint that hooks are append only, that would also save a bit <del> // on code size. <del> const newLastEffect = updateQueue.lastEffect; <del> if (newLastEffect !== null) { <del> const newFirstEffect = newLastEffect.next; <del> let oldLastEffect = null; <del> if (current !== null) { <del> const oldUpdateQueue: FunctionComponentUpdateQueue | null = (current.updateQueue: any); <del> if (oldUpdateQueue !== null) { <del> oldLastEffect = oldUpdateQueue.lastEffect; <del> } <del> } <del> if (oldLastEffect !== null) { <del> const oldFirstEffect = oldLastEffect.next; <del> let newEffect = newFirstEffect; <del> let oldEffect = oldFirstEffect; <del> <del> // Before mounting the new effects, unmount all the old ones. <del> do { <del> if (oldEffect !== null) { <del> if (newEffect.inputs !== oldEffect.inputs) { <del> const destroy = oldEffect.value; <del> if (destroy !== null) { <del> destroy(); <del> } <del> } <del> oldEffect = oldEffect.next; <del> if (oldEffect === oldFirstEffect) { <del> oldEffect = null; <del> } <del> } <del> newEffect = newEffect.next; <del> } while (newEffect !== newFirstEffect); <del> <del> // Unmount any remaining effects in the old list that do not <del> // appear in the new one. <del> if (oldEffect !== null) { <del> destroyRemainingEffects(oldEffect, oldFirstEffect); <del> } <del> <del> // Now loop through the list again to mount the new effects <del> oldEffect = oldFirstEffect; <del> do { <del> const create = newEffect.value; <del> if (oldEffect !== null) { <del> if (newEffect.inputs !== oldEffect.inputs) { <del> const newDestroy = create(); <del> newEffect.value = <del> typeof newDestroy === 'function' ? newDestroy : null; <del> } else { <del> newEffect.value = oldEffect.value; <del> } <del> oldEffect = oldEffect.next; <del> if (oldEffect === oldFirstEffect) { <del> oldEffect = null; <del> } <del> } else { <del> const newDestroy = create(); <del> newEffect.value = <del> typeof newDestroy === 'function' ? newDestroy : null; <del> } <del> newEffect = newEffect.next; <del> } while (newEffect !== newFirstEffect); <del> } else { <del> let newEffect = newFirstEffect; <del> do { <del> const create = newEffect.value; <del> const newDestroy = create(); <del> newEffect.value = <del> typeof newDestroy === 'function' ? newDestroy : null; <del> newEffect = newEffect.next; <del> } while (newEffect !== newFirstEffect); <del> } <del> } else if (current !== null) { <del> // There are no effects, which means all current effects must <del> // be destroyed <del> destroyMountedEffects(current); <del> } <del> <del> const callbackList = updateQueue.callbackList; <add> commitHookEffectList(UnmountLayout, MountLayout, finishedWork); <add> const newUpdateQueue: FunctionComponentUpdateQueue | null = (finishedWork.updateQueue: any); <add> if (newUpdateQueue !== null) { <add> const callbackList = newUpdateQueue.callbackList; <ide> if (callbackList !== null) { <del> updateQueue.callbackList = null; <add> newUpdateQueue.callbackList = null; <ide> for (let i = 0; i < callbackList.length; i++) { <ide> const update = callbackList[i]; <ide> // Assume this is non-null, since otherwise it would not be part <ide> function commitLifeCycles( <ide> callback(); <ide> } <ide> } <del> } else if (current !== null) { <del> // There are no effects, which means all current effects must <del> // be destroyed <del> destroyMountedEffects(current); <ide> } <ide> break; <ide> } <ide> function commitUnmount(current: Fiber): void { <ide> const firstEffect = lastEffect.next; <ide> let effect = firstEffect; <ide> do { <del> const destroy = effect.value; <add> const destroy = effect.destroy; <ide> if (destroy !== null) { <ide> safelyCallDestroy(current, destroy); <ide> } <ide> function commitDeletion(current: Fiber): void { <ide> <ide> function commitWork(current: Fiber | null, finishedWork: Fiber): void { <ide> if (!supportsMutation) { <add> switch (finishedWork.tag) { <add> case FunctionComponent: <add> case ForwardRef: { <add> commitHookEffectList(UnmountMutation, MountMutation, finishedWork); <add> return; <add> } <add> } <add> <ide> commitContainer(finishedWork); <ide> return; <ide> } <ide> <ide> switch (finishedWork.tag) { <add> case FunctionComponent: <add> case ForwardRef: { <add> commitHookEffectList(UnmountMutation, MountMutation, finishedWork); <add> return; <add> } <ide> case ClassComponent: { <ide> return; <ide> } <ide><path>packages/react-reconciler/src/ReactFiberDispatcher.js <ide> <ide> import {readContext} from './ReactFiberNewContext'; <ide> import { <del> useState, <del> useReducer, <del> useEffect, <add> useAPI, <ide> useCallback, <add> useContext, <add> useEffect, <add> useLayoutEffect, <ide> useMemo, <add> useMutationEffect, <add> useReducer, <ide> useRef, <del> useAPI, <add> useState, <ide> } from './ReactFiberHooks'; <ide> <ide> export const Dispatcher = { <ide> readContext, <del> useState, <del> useReducer, <del> useEffect, <add> useAPI, <ide> useCallback, <add> useContext, <add> useEffect, <add> useLayoutEffect, <ide> useMemo, <add> useMutationEffect, <add> useReducer, <ide> useRef, <del> useAPI, <add> useState, <ide> }; <ide><path>packages/react-reconciler/src/ReactFiberHooks.js <ide> * @flow <ide> */ <ide> <add>import type {ReactContext} from 'shared/ReactTypes'; <ide> import type {Fiber} from './ReactFiber'; <ide> import type {ExpirationTime} from './ReactFiberExpirationTime'; <add>import type {HookEffectTag} from './ReactHookEffectTags'; <ide> <ide> import {NoWork} from './ReactFiberExpirationTime'; <del>import {Callback as CallbackEffect} from 'shared/ReactSideEffectTags'; <add>import {readContext} from './ReactFiberNewContext'; <add>import { <add> Snapshot as SnapshotEffect, <add> Update as UpdateEffect, <add> Callback as CallbackEffect, <add> Passive as PassiveEffect, <add>} from 'shared/ReactSideEffectTags'; <add>import { <add> NoEffect as NoHookEffect, <add> UnmountSnapshot, <add> UnmountMutation, <add> MountMutation, <add> MountLayout, <add> UnmountPassive, <add> MountPassive, <add>} from './ReactHookEffectTags'; <ide> import { <ide> scheduleWork, <ide> computeExpirationForFiber, <ide> type Hook = { <ide> }; <ide> <ide> type Effect = { <del> // For an unmounted effect, this points to the effect constructor. Once it's <del> // mounted, it points to a destroy function (or null). I've opted to reuse <del> // the same field to save memory. <del> value: any, <add> tag: HookEffectTag, <add> create: () => mixed, <add> destroy: (() => mixed) | null, <ide> inputs: Array<mixed>, <ide> next: Effect, <ide> }; <ide> export function finishHooks( <ide> renderedWork.updateQueue = (componentUpdateQueue: any); <ide> } <ide> <add> const didRenderTooFewHooks = <add> currentHook !== null && currentHook.next !== null; <add> <ide> renderExpirationTime = NoWork; <ide> currentlyRenderingFiber = null; <ide> <ide> export function finishHooks( <ide> // renderPhaseUpdates = null; <ide> // numberOfReRenders = 0; <ide> <add> invariant( <add> !didRenderTooFewHooks, <add> 'Rendered fewer hooks than expected. This may be caused by an accidental ' + <add> 'early return statement.', <add> ); <add> <ide> return children; <ide> } <ide> <ide> function basicStateReducer<S>(state: S, action: BasicStateAction<S>): S { <ide> return typeof action === 'function' ? action(state) : action; <ide> } <ide> <add>export function useContext<T>( <add> context: ReactContext<T>, <add> observedBits: void | number | boolean, <add>): T { <add> // Ensure we're in a functional component (class components support only the <add> // .unstable_read() form) <add> resolveCurrentlyRenderingFiber(); <add> return readContext(context, observedBits); <add>} <add> <ide> export function useState<S>( <ide> initialState: S | (() => S), <ide> ): [S, Dispatch<S, BasicStateAction<S>>] { <ide> function pushCallback(workInProgress: Fiber, update: Update<any, any>): void { <ide> workInProgress.effectTag |= CallbackEffect; <ide> } <ide> <del>function pushEffect(value, inputs) { <add>function pushEffect(tag, create, destroy, inputs) { <ide> const effect: Effect = { <del> value, <add> tag, <add> create, <add> destroy, <ide> inputs, <ide> // Circular <ide> next: (null: any), <ide> export function useRef<T>(initialValue: T): {current: T} { <ide> return ref; <ide> } <ide> <add>export function useMutationEffect( <add> create: () => mixed, <add> inputs: Array<mixed> | void | null, <add>): void { <add> useEffectImpl( <add> SnapshotEffect | UpdateEffect, <add> UnmountSnapshot | MountMutation, <add> create, <add> inputs, <add> ); <add>} <add> <add>export function useLayoutEffect( <add> create: () => mixed, <add> inputs: Array<mixed> | void | null, <add>): void { <add> useEffectImpl(UpdateEffect, UnmountMutation | MountLayout, create, inputs); <add>} <add> <ide> export function useEffect( <ide> create: () => mixed, <ide> inputs: Array<mixed> | void | null, <ide> ): void { <add> useEffectImpl( <add> UpdateEffect | PassiveEffect, <add> UnmountPassive | MountPassive, <add> create, <add> inputs, <add> ); <add>} <add> <add>function useEffectImpl(fiberEffectTag, hookEffectTag, create, inputs): void { <ide> currentlyRenderingFiber = resolveCurrentlyRenderingFiber(); <ide> workInProgressHook = createWorkInProgressHook(); <ide> <ide> export function useEffect( <ide> if (currentHook !== null) { <ide> const prevEffect = currentHook.memoizedState; <ide> const prevInputs = prevEffect.inputs; <del> if (inputsAreEqual(nextInputs, prevInputs)) { <del> nextEffect = pushEffect(prevEffect.value, prevInputs); <del> } else { <del> nextEffect = pushEffect(create, nextInputs); <del> } <add> nextEffect = pushEffect( <add> inputsAreEqual(nextInputs, prevInputs) ? NoHookEffect : hookEffectTag, <add> create, <add> prevEffect.destroy, <add> nextInputs, <add> ); <ide> } else { <del> nextEffect = pushEffect(create, nextInputs); <add> nextEffect = pushEffect(hookEffectTag, create, null, nextInputs); <ide> } <ide> <del> // TODO: If we decide not to support removing hooks from the end of the list, <del> // we only need to schedule an effect if the inputs changed. <del> currentlyRenderingFiber.effectTag |= CallbackEffect; <ide> workInProgressHook.memoizedState = nextEffect; <add> currentlyRenderingFiber.effectTag |= fiberEffectTag; <ide> } <ide> <ide> export function useAPI<T>( <ide> export function useAPI<T>( <ide> // TODO: I've implemented this on top of useEffect because it's almost the <ide> // same thing, and it would require an equal amount of code. It doesn't seem <ide> // like a common enough use case to justify the additional size. <del> useEffect(() => { <del> if (typeof ref === 'function') { <del> const refCallback = ref; <del> const inst = create(); <del> refCallback(inst); <del> return () => refCallback(null); <del> } else if (ref !== null && ref !== undefined) { <del> const refObject = ref; <del> const inst = create(); <del> refObject.current = inst; <del> return () => { <del> refObject.current = null; <del> }; <del> } <del> }, nextInputs); <add> useEffectImpl( <add> UpdateEffect, <add> UnmountMutation | MountLayout, <add> () => { <add> if (typeof ref === 'function') { <add> const refCallback = ref; <add> const inst = create(); <add> refCallback(inst); <add> return () => refCallback(null); <add> } else if (ref !== null && ref !== undefined) { <add> const refObject = ref; <add> const inst = create(); <add> refObject.current = inst; <add> return () => { <add> refObject.current = null; <add> }; <add> } <add> }, <add> nextInputs, <add> ); <ide> } <ide> <ide> export function useCallback<T>( <ide><path>packages/react-reconciler/src/ReactFiberRoot.js <ide> type BaseFiberRootProperties = {| <ide> // The currently active root fiber. This is the mutable root of the tree. <ide> current: Fiber, <ide> <add> serialEffectCallback: (() => mixed) | null, <add> serialEffectCallbackHandle: any, <add> <ide> // The following priority levels are used to distinguish between 1) <ide> // uncommitted work, 2) uncommitted work that is suspended, and 3) uncommitted <ide> // work that may be unsuspended. We choose not to track each individual <ide> export function createFiberRoot( <ide> current: uninitializedFiber, <ide> containerInfo: containerInfo, <ide> pendingChildren: null, <add> serialEffectCallback: null, <add> serialEffectCallbackHandle: null, <ide> <ide> earliestPendingTime: NoWork, <ide> latestPendingTime: NoWork, <ide> export function createFiberRoot( <ide> current: uninitializedFiber, <ide> containerInfo: containerInfo, <ide> pendingChildren: null, <add> serialEffectCallback: null, <add> serialEffectCallbackHandle: null, <ide> <ide> earliestPendingTime: NoWork, <ide> latestPendingTime: NoWork, <ide><path>packages/react-reconciler/src/ReactFiberScheduler.js <ide> import type {Batch, FiberRoot} from './ReactFiberRoot'; <ide> import type {ExpirationTime} from './ReactFiberExpirationTime'; <ide> import type {Interaction} from 'scheduler/src/Tracing'; <ide> <del>import {__interactionsRef, __subscriberRef} from 'scheduler/tracing'; <add>import { <add> __interactionsRef, <add> __subscriberRef, <add> unstable_wrap as Schedule_tracing_wrap, <add>} from 'scheduler/tracing'; <add>import { <add> unstable_scheduleCallback as Schedule_scheduleCallback, <add> unstable_cancelCallback as Schedule_cancelCallback, <add>} from 'scheduler'; <ide> import { <ide> invokeGuardedCallback, <ide> hasCaughtError, <ide> import { <ide> Ref, <ide> Incomplete, <ide> HostEffectMask, <add> Passive, <ide> } from 'shared/ReactSideEffectTags'; <ide> import { <ide> HostRoot, <ide> import { <ide> commitLifeCycles, <ide> commitAttachRef, <ide> commitDetachRef, <add> commitPassiveHookEffects, <ide> } from './ReactFiberCommitWork'; <ide> import {Dispatcher} from './ReactFiberDispatcher'; <ide> <ide> let nextRenderDidError: boolean = false; <ide> let nextEffect: Fiber | null = null; <ide> <ide> let isCommitting: boolean = false; <add>let needsPassiveCommit: boolean = false; <ide> <ide> let legacyErrorBoundariesThatAlreadyFailed: Set<mixed> | null = null; <ide> <ide> function commitBeforeMutationLifecycles() { <ide> commitBeforeMutationLifeCycles(current, nextEffect); <ide> } <ide> <del> // Don't cleanup effects yet; <del> // This will be done by commitAllLifeCycles() <ide> nextEffect = nextEffect.nextEffect; <ide> } <ide> <ide> function commitAllLifeCycles( <ide> commitAttachRef(nextEffect); <ide> } <ide> <del> const next = nextEffect.nextEffect; <del> // Ensure that we clean these up so that we don't accidentally keep them. <del> // I'm not actually sure this matters because we can't reset firstEffect <del> // and lastEffect since they're on every node, not just the effectful <del> // ones. So we have to clean everything as we reuse nodes anyway. <del> nextEffect.nextEffect = null; <del> // Ensure that we reset the effectTag here so that we can rely on effect <del> // tags to reason about the current life-cycle. <del> nextEffect = next; <add> if (effectTag & Passive) { <add> needsPassiveCommit = true; <add> } <add> <add> nextEffect = nextEffect.nextEffect; <add> } <add>} <add> <add>function commitPassiveEffects(root: FiberRoot, firstEffect: Fiber): void { <add> // Set this to true to prevent re-entrancy <add> const previousIsRendering = isRendering; <add> isRendering = true; <add> <add> let effect = firstEffect; <add> do { <add> if (effect.effectTag & Passive) { <add> let didError = false; <add> let error; <add> if (__DEV__) { <add> invokeGuardedCallback(null, commitPassiveHookEffects, null, effect); <add> if (hasCaughtError()) { <add> didError = true; <add> error = clearCaughtError(); <add> } <add> } else { <add> try { <add> commitPassiveHookEffects(effect); <add> } catch (e) { <add> didError = true; <add> error = e; <add> } <add> } <add> if (didError) { <add> captureCommitPhaseError(effect, error); <add> } <add> } <add> effect = effect.nextEffect; <add> } while (effect !== null); <add> <add> isRendering = previousIsRendering; <add> <add> // Check if work was scheduled by one of the effects <add> const rootExpirationTime = root.expirationTime; <add> if (rootExpirationTime !== NoWork) { <add> requestWork(root, rootExpirationTime); <ide> } <ide> } <ide> <ide> function markLegacyErrorBoundaryAsFailed(instance: mixed) { <ide> } <ide> <ide> function commitRoot(root: FiberRoot, finishedWork: Fiber): void { <add> const existingSerialEffectCallback = root.serialEffectCallback; <add> const existingSerialEffectCallbackHandle = root.serialEffectCallbackHandle; <add> if (existingSerialEffectCallback !== null) { <add> // A passive callback was scheduled during the previous commit, but it did <add> // not get a chance to flush. Flush it now to ensure serial execution. <add> // This should fire before any new mutations. <add> root.serialEffectCallback = null; <add> if (existingSerialEffectCallbackHandle !== null) { <add> root.serialEffectCallbackHandle = null; <add> Schedule_cancelCallback(existingSerialEffectCallbackHandle); <add> } <add> existingSerialEffectCallback(); <add> } <add> <ide> isWorking = true; <ide> isCommitting = true; <ide> startCommitTimer(); <ide> function commitRoot(root: FiberRoot, finishedWork: Fiber): void { <ide> } <ide> } <ide> <add> if (firstEffect !== null && needsPassiveCommit) { <add> const resolvedFirstEffect = firstEffect; <add> // This commit included a passive effect. These do not need to fire until <add> // after the next paint. Schedule an callback to fire them in an async <add> // event. To ensure serial execution, the callback will be flushed early if <add> // we enter another commit phase before then. <add> needsPassiveCommit = false; <add> let serialEffectCallback; <add> if (enableSchedulerTracing) { <add> // TODO: Avoid this extra callback by mutating the tracing ref directly, <add> // like we do at the beginning of commitRoot. I've opted not to do that <add> // here because that code is still in flux. <add> serialEffectCallback = Schedule_tracing_wrap(() => { <add> root.serialEffectCallback = null; <add> commitPassiveEffects(root, resolvedFirstEffect); <add> }); <add> } else { <add> serialEffectCallback = () => { <add> root.serialEffectCallback = null; <add> commitPassiveEffects(root, resolvedFirstEffect); <add> }; <add> } <add> root.serialEffectCallback = serialEffectCallback; <add> root.serialEffectCallbackHandle = Schedule_scheduleCallback( <add> serialEffectCallback, <add> ); <add> } <add> <ide> isCommitting = false; <ide> isWorking = false; <ide> stopCommitLifeCyclesTimer(); <ide> function dispatch( <ide> value: mixed, <ide> expirationTime: ExpirationTime, <ide> ) { <del> invariant( <del> !isWorking || isCommitting, <del> 'dispatch: Cannot dispatch during the render phase.', <del> ); <del> <ide> let fiber = sourceFiber.return; <ide> while (fiber !== null) { <ide> switch (fiber.tag) { <ide><path>packages/react-reconciler/src/ReactHookEffectTags.js <add>/** <add> * Copyright (c) Facebook, Inc. and its affiliates. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow <add> */ <add> <add>export type HookEffectTag = number; <add> <add>export const NoEffect = /* */ 0b00000000; <add>export const UnmountSnapshot = /* */ 0b00000010; <add>export const UnmountMutation = /* */ 0b00000100; <add>export const MountMutation = /* */ 0b00001000; <add>export const UnmountLayout = /* */ 0b00010000; <add>export const MountLayout = /* */ 0b00100000; <add>export const MountPassive = /* */ 0b01000000; <add>export const UnmountPassive = /* */ 0b10000000; <ide><path>packages/react-reconciler/src/__tests__/ReactHooks-test.internal.js <ide> let ReactNoop; <ide> let useState; <ide> let useReducer; <ide> let useEffect; <add>let useMutationEffect; <add>let useLayoutEffect; <ide> let useCallback; <ide> let useMemo; <ide> let useRef; <ide> let useAPI; <ide> let forwardRef; <add>let flushPassiveEffects; <ide> <ide> describe('ReactHooks', () => { <ide> beforeEach(() => { <ide> jest.resetModules(); <add> <add> jest.mock('scheduler', () => { <add> let scheduledCallbacks = new Map(); <add> <add> flushPassiveEffects = () => { <add> scheduledCallbacks.forEach(cb => { <add> cb(); <add> }); <add> scheduledCallbacks = new Map(); <add> }; <add> <add> return { <add> unstable_scheduleCallback(callback) { <add> const handle = {}; <add> scheduledCallbacks.set(handle, callback); <add> return handle; <add> }, <add> unstable_cancelCallback(handle) { <add> scheduledCallbacks.delete(handle); <add> }, <add> }; <add> }); <add> <ide> ReactFeatureFlags = require('shared/ReactFeatureFlags'); <ide> ReactFeatureFlags.debugRenderPhaseSideEffectsForStrictMode = false; <ide> ReactFeatureFlags.enableHooks = true; <ide> describe('ReactHooks', () => { <ide> useState = React.useState; <ide> useReducer = React.useReducer; <ide> useEffect = React.useEffect; <add> useMutationEffect = React.useMutationEffect; <add> useLayoutEffect = React.useLayoutEffect; <ide> useCallback = React.useCallback; <ide> useMemo = React.useMemo; <ide> useRef = React.useRef; <ide> describe('ReactHooks', () => { <ide> return <Text text={'Count: ' + props.count} />; <ide> } <ide> ReactNoop.render(<Counter count={0} />); <del> expect(ReactNoop.flush()).toEqual(['Count: 0', 'Did commit [0]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Did commit [0]']); <ide> <ide> ReactNoop.render(<Counter count={1} />); <del> expect(ReactNoop.flush()).toEqual(['Count: 1', 'Did commit [1]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> // Effects are deferred until after the commit <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Did commit [1]']); <add> }); <add> <add> it( <add> 'flushes effects serially by flushing old effects before flushing ' + <add> "new ones, if they haven't already fired", <add> () => { <add> function getCommittedText() { <add> const children = ReactNoop.getChildren(); <add> if (children === null) { <add> return null; <add> } <add> return children[0].prop; <add> } <add> <add> function Counter(props) { <add> useEffect(() => { <add> ReactNoop.yield( <add> `Committed state when effect was fired: ${getCommittedText()}`, <add> ); <add> }); <add> return <Text text={props.count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual([0]); <add> expect(ReactNoop.getChildren()).toEqual([span(0)]); <add> <add> // Before the effects have a chance to flush, schedule another update <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual([ <add> 1, <add> // The previous effect flushes before the host mutations <add> 'Committed state when effect was fired: 0', <add> ]); <add> expect(ReactNoop.getChildren()).toEqual([span(1)]); <add> <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <add> 'Committed state when effect was fired: 1', <add> ]); <add> }, <add> ); <add> <add> it('updates have async priority', () => { <add> function Counter(props) { <add> const [count, updateCount] = useState('(empty)'); <add> useEffect( <add> () => { <add> ReactNoop.yield(`Schedule update [${props.count}]`); <add> updateCount(props.count); <add> }, <add> [props.count], <add> ); <add> return <Text text={'Count: ' + count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Count: (empty)']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: (empty)')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Schedule update [0]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Schedule update [1]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> }); <add> <add> it('updates have async priority even if effects are flushed early', () => { <add> function Counter(props) { <add> const [count, updateCount] = useState('(empty)'); <add> useEffect( <add> () => { <add> ReactNoop.yield(`Schedule update [${props.count}]`); <add> updateCount(props.count); <add> }, <add> [props.count], <add> ); <add> return <Text text={'Count: ' + count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Count: (empty)']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: (empty)')]); <add> <add> // Rendering again should flush the previous commit's effects <add> ReactNoop.render(<Counter count={1} />); <add> ReactNoop.flushThrough([ <add> 'Count: (empty)', <add> 'Schedule update [0]', <add> 'Count: 0', <add> ]); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: (empty)')]); <add> <add> expect(ReactNoop.flush()).toEqual(['Schedule update [1]', 'Count: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> }); <add> <add> it( <add> 'in sync mode, useEffect is deferred and updates finish synchronously ' + <add> '(in a single batch)', <add> () => { <add> function Counter(props) { <add> const [count, updateCount] = useState('(empty)'); <add> useEffect( <add> () => { <add> // Update multiple times. These should all be batched together in <add> // a single render. <add> updateCount(props.count); <add> updateCount(props.count); <add> updateCount(props.count); <add> updateCount(props.count); <add> updateCount(props.count); <add> updateCount(props.count); <add> }, <add> [props.count], <add> ); <add> return <Text text={'Count: ' + count} />; <add> } <add> ReactNoop.renderLegacySyncRoot(<Counter count={0} />); <add> // Even in sync mode, effects are deferred until after paint <add> expect(ReactNoop.flush()).toEqual(['Count: (empty)']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: (empty)')]); <add> // Now fire the effects <add> flushPassiveEffects(); <add> // There were multiple updates, but there should only be a <add> // single render <add> expect(ReactNoop.clearYields()).toEqual(['Count: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> }, <add> ); <add> <add> it('flushSync is not allowed', () => { <add> function Counter(props) { <add> const [count, updateCount] = useState('(empty)'); <add> useEffect( <add> () => { <add> ReactNoop.yield(`Schedule update [${props.count}]`); <add> ReactNoop.flushSync(() => { <add> updateCount(props.count); <add> }); <add> }, <add> [props.count], <add> ); <add> return <Text text={'Count: ' + count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Count: (empty)']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: (empty)')]); <add> <add> expect(() => { <add> flushPassiveEffects(); <add> }).toThrow('flushSync was called from inside a lifecycle method'); <ide> }); <ide> <ide> it('unmounts previous effect', () => { <ide> describe('ReactHooks', () => { <ide> return <Text text={'Count: ' + props.count} />; <ide> } <ide> ReactNoop.render(<Counter count={0} />); <del> expect(ReactNoop.flush()).toEqual(['Count: 0', 'Did create [0]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Did create [0]']); <ide> <ide> ReactNoop.render(<Counter count={1} />); <del> expect(ReactNoop.flush()).toEqual([ <del> 'Count: 1', <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Did destroy [0]', <ide> 'Did create [1]', <ide> ]); <del> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <ide> }); <ide> <ide> it('unmounts on deletion', () => { <ide> describe('ReactHooks', () => { <ide> return <Text text={'Count: ' + props.count} />; <ide> } <ide> ReactNoop.render(<Counter count={0} />); <del> expect(ReactNoop.flush()).toEqual(['Count: 0', 'Did create [0]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Did create [0]']); <ide> <ide> ReactNoop.render(null); <ide> expect(ReactNoop.flush()).toEqual(['Did destroy [0]']); <ide> describe('ReactHooks', () => { <ide> return <Text text={'Count: ' + props.count} />; <ide> } <ide> ReactNoop.render(<Counter count={0} />); <del> expect(ReactNoop.flush()).toEqual(['Count: 0', 'Did mount']); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Did mount']); <ide> <ide> ReactNoop.render(<Counter count={1} />); <ide> // No effect, because constructor was hoisted outside render <ide> describe('ReactHooks', () => { <ide> return <Text text={text} />; <ide> } <ide> ReactNoop.render(<Counter label="Count" count={0} />); <del> expect(ReactNoop.flush()).toEqual(['Count: 0', 'Did create [Count: 0]']); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Did create [Count: 0]']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <ide> <ide> ReactNoop.render(<Counter label="Count" count={1} />); <ide> // Count changed <del> expect(ReactNoop.flush()).toEqual([ <del> 'Count: 1', <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Did destroy [Count: 0]', <ide> 'Did create [Count: 1]', <ide> ]); <del> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <ide> <ide> ReactNoop.render(<Counter label="Count" count={1} />); <ide> // Nothing changed, so no effect should have fired <ide> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(null); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <ide> <ide> ReactNoop.render(<Counter label="Total" count={1} />); <ide> // Label changed <del> expect(ReactNoop.flush()).toEqual([ <del> 'Total: 1', <add> expect(ReactNoop.flush()).toEqual(['Total: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Total: 1')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Did destroy [Count: 1]', <ide> 'Did create [Total: 1]', <ide> ]); <del> expect(ReactNoop.getChildren()).toEqual([span('Total: 1')]); <ide> }); <ide> <ide> it('multiple effects', () => { <ide> describe('ReactHooks', () => { <ide> return <Text text={'Count: ' + props.count} />; <ide> } <ide> ReactNoop.render(<Counter count={0} />); <del> expect(ReactNoop.flush()).toEqual([ <del> 'Count: 0', <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Did commit 1 [0]', <ide> 'Did commit 2 [0]', <ide> ]); <del> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <ide> <ide> ReactNoop.render(<Counter count={1} />); <del> expect(ReactNoop.flush()).toEqual([ <del> 'Count: 1', <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Did commit 1 [1]', <ide> 'Did commit 2 [1]', <ide> ]); <del> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <ide> }); <ide> <ide> it('unmounts all previous effects before creating any new ones', () => { <ide> describe('ReactHooks', () => { <ide> return <Text text={'Count: ' + props.count} />; <ide> } <ide> ReactNoop.render(<Counter count={0} />); <del> expect(ReactNoop.flush()).toEqual([ <del> 'Count: 0', <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount A [0]', 'Mount B [0]']); <add> <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual([ <add> 'Unmount A [0]', <add> 'Unmount B [0]', <add> 'Mount A [1]', <add> 'Mount B [1]', <add> ]); <add> }); <add> <add> it('handles errors on mount', () => { <add> function Counter(props) { <add> useEffect(() => { <add> ReactNoop.yield(`Mount A [${props.count}]`); <add> return () => { <add> ReactNoop.yield(`Unmount A [${props.count}]`); <add> }; <add> }); <add> useEffect(() => { <add> ReactNoop.yield('Oops!'); <add> throw new Error('Oops!'); <add> // eslint-disable-next-line no-unreachable <add> ReactNoop.yield(`Mount B [${props.count}]`); <add> return () => { <add> ReactNoop.yield(`Unmount B [${props.count}]`); <add> }; <add> }); <add> return <Text text={'Count: ' + props.count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> expect(() => flushPassiveEffects()).toThrow('Oops'); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Mount A [0]', <del> 'Mount B [0]', <add> 'Oops!', <add> // Clean up effect A. There's no effect B to clean-up, because it <add> // never mounted. <add> 'Unmount A [0]', <ide> ]); <add> expect(ReactNoop.getChildren()).toEqual([]); <add> }); <add> <add> it('handles errors on update', () => { <add> function Counter(props) { <add> useEffect(() => { <add> ReactNoop.yield(`Mount A [${props.count}]`); <add> return () => { <add> ReactNoop.yield(`Unmount A [${props.count}]`); <add> }; <add> }); <add> useEffect(() => { <add> if (props.count === 1) { <add> ReactNoop.yield('Oops!'); <add> throw new Error('Oops!'); <add> } <add> ReactNoop.yield(`Mount B [${props.count}]`); <add> return () => { <add> ReactNoop.yield(`Unmount B [${props.count}]`); <add> }; <add> }); <add> return <Text text={'Count: ' + props.count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount A [0]', 'Mount B [0]']); <ide> <add> // This update will trigger an errror <ide> ReactNoop.render(<Counter count={1} />); <del> expect(ReactNoop.flush()).toEqual([ <del> 'Count: 1', <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> expect(() => flushPassiveEffects()).toThrow('Oops'); <add> expect(ReactNoop.clearYields()).toEqual([ <ide> 'Unmount A [0]', <ide> 'Unmount B [0]', <ide> 'Mount A [1]', <del> 'Mount B [1]', <add> 'Oops!', <add> // Clean up effect A. There's no effect B to clean-up, because it <add> // never mounted. <add> 'Unmount A [1]', <ide> ]); <add> expect(ReactNoop.getChildren()).toEqual([]); <add> }); <add> <add> it('handles errors on unmount', () => { <add> function Counter(props) { <add> useEffect(() => { <add> ReactNoop.yield(`Mount A [${props.count}]`); <add> return () => { <add> ReactNoop.yield('Oops!'); <add> throw new Error('Oops!'); <add> // eslint-disable-next-line no-unreachable <add> ReactNoop.yield(`Unmount A [${props.count}]`); <add> }; <add> }); <add> useEffect(() => { <add> ReactNoop.yield(`Mount B [${props.count}]`); <add> return () => { <add> ReactNoop.yield(`Unmount B [${props.count}]`); <add> }; <add> }); <add> return <Text text={'Count: ' + props.count} />; <add> } <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Count: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span('Count: 0')]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount A [0]', 'Mount B [0]']); <add> <add> // This update will trigger an errror <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual(['Count: 1']); <ide> expect(ReactNoop.getChildren()).toEqual([span('Count: 1')]); <add> expect(() => flushPassiveEffects()).toThrow('Oops'); <add> expect(ReactNoop.clearYields()).toEqual([ <add> 'Oops!', <add> // B unmounts even though an error was thrown in the previous effect <add> 'Unmount B [0]', <add> ]); <add> expect(ReactNoop.getChildren()).toEqual([]); <add> }); <add> }); <add> <add> describe('useMutationEffect and useLayoutEffect', () => { <add> it('fires layout effects after the host has been mutated', () => { <add> function getCommittedText() { <add> const children = ReactNoop.getChildren(); <add> if (children === null) { <add> return null; <add> } <add> return children[0].prop; <add> } <add> <add> function Counter(props) { <add> useLayoutEffect(() => { <add> ReactNoop.yield(`Current: ${getCommittedText()}`); <add> }); <add> return <Text text={props.count} />; <add> } <add> <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual([0, 'Current: 0']); <add> expect(ReactNoop.getChildren()).toEqual([span(0)]); <add> <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual([1, 'Current: 1']); <add> expect(ReactNoop.getChildren()).toEqual([span(1)]); <add> }); <add> <add> it('fires mutation effects before layout effects', () => { <add> let committedText = '(empty)'; <add> <add> function Counter(props) { <add> useMutationEffect(() => { <add> ReactNoop.yield(`Mount mutation [current: ${committedText}]`); <add> committedText = props.count + ''; <add> return () => { <add> ReactNoop.yield(`Unmount mutation [current: ${committedText}]`); <add> }; <add> }); <add> useLayoutEffect(() => { <add> ReactNoop.yield(`Mount layout [current: ${committedText}]`); <add> return () => { <add> ReactNoop.yield(`Unmount layout [current: ${committedText}]`); <add> }; <add> }); <add> useEffect(() => { <add> ReactNoop.yield(`Mount normal [current: ${committedText}]`); <add> return () => { <add> ReactNoop.yield(`Unmount normal [current: ${committedText}]`); <add> }; <add> }); <add> return null; <add> } <add> <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual([ <add> 'Mount mutation [current: (empty)]', <add> 'Mount layout [current: 0]', <add> ]); <add> expect(committedText).toEqual('0'); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount normal [current: 0]']); <add> <add> // Unmount everything <add> ReactNoop.render(null); <add> expect(ReactNoop.flush()).toEqual([ <add> 'Unmount mutation [current: 0]', <add> 'Unmount layout [current: 0]', <add> 'Unmount normal [current: 0]', <add> ]); <add> }); <add> <add> it('force flushes passive effects before firing new mutation effects', () => { <add> let committedText = '(empty)'; <add> <add> function Counter(props) { <add> useMutationEffect(() => { <add> ReactNoop.yield(`Mount mutation [current: ${committedText}]`); <add> committedText = props.count + ''; <add> return () => { <add> ReactNoop.yield(`Unmount mutation [current: ${committedText}]`); <add> }; <add> }); <add> useEffect(() => { <add> ReactNoop.yield(`Mount normal [current: ${committedText}]`); <add> return () => { <add> ReactNoop.yield(`Unmount normal [current: ${committedText}]`); <add> }; <add> }); <add> return null; <add> } <add> <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Mount mutation [current: (empty)]']); <add> expect(committedText).toEqual('0'); <add> <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual([ <add> 'Mount normal [current: 0]', <add> 'Unmount mutation [current: 0]', <add> 'Mount mutation [current: 0]', <add> ]); <add> expect(committedText).toEqual('1'); <add> <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount normal [current: 1]']); <add> }); <add> <add> it('force flushes passive effects before firing new layout effects', () => { <add> let committedText = '(empty)'; <add> <add> function Counter(props) { <add> useLayoutEffect(() => { <add> // Normally this would go in a mutation effect, but this test <add> // intentionally omits a mutation effect. <add> committedText = props.count + ''; <add> <add> ReactNoop.yield(`Mount layout [current: ${committedText}]`); <add> return () => { <add> ReactNoop.yield(`Unmount layout [current: ${committedText}]`); <add> }; <add> }); <add> useEffect(() => { <add> ReactNoop.yield(`Mount normal [current: ${committedText}]`); <add> return () => { <add> ReactNoop.yield(`Unmount normal [current: ${committedText}]`); <add> }; <add> }); <add> return null; <add> } <add> <add> ReactNoop.render(<Counter count={0} />); <add> expect(ReactNoop.flush()).toEqual(['Mount layout [current: 0]']); <add> expect(committedText).toEqual('0'); <add> <add> ReactNoop.render(<Counter count={1} />); <add> expect(ReactNoop.flush()).toEqual([ <add> 'Mount normal [current: 0]', <add> 'Unmount layout [current: 0]', <add> 'Mount layout [current: 1]', <add> ]); <add> expect(committedText).toEqual('1'); <add> <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount normal [current: 1]']); <add> }); <add> <add> it('fires all mutation effects before firing any layout effects', () => { <add> let committedA = '(empty)'; <add> let committedB = '(empty)'; <add> <add> function CounterA(props) { <add> useMutationEffect(() => { <add> ReactNoop.yield( <add> `Mount A mutation [A: ${committedA}, B: ${committedB}]`, <add> ); <add> committedA = props.count + ''; <add> return () => { <add> ReactNoop.yield( <add> `Unmount A mutation [A: ${committedA}, B: ${committedB}]`, <add> ); <add> }; <add> }); <add> useLayoutEffect(() => { <add> ReactNoop.yield( <add> `Mount layout A [A: ${committedA}, B: ${committedB}]`, <add> ); <add> return () => { <add> ReactNoop.yield( <add> `Unmount layout A [A: ${committedA}, B: ${committedB}]`, <add> ); <add> }; <add> }); <add> return null; <add> } <add> <add> function CounterB(props) { <add> useMutationEffect(() => { <add> ReactNoop.yield( <add> `Mount B mutation [A: ${committedA}, B: ${committedB}]`, <add> ); <add> committedB = props.count + ''; <add> return () => { <add> ReactNoop.yield( <add> `Unmount B mutation [A: ${committedA}, B: ${committedB}]`, <add> ); <add> }; <add> }); <add> useLayoutEffect(() => { <add> ReactNoop.yield( <add> `Mount layout B [A: ${committedA}, B: ${committedB}]`, <add> ); <add> return () => { <add> ReactNoop.yield( <add> `Unmount layout B [A: ${committedA}, B: ${committedB}]`, <add> ); <add> }; <add> }); <add> return null; <add> } <add> <add> ReactNoop.render( <add> <React.Fragment> <add> <CounterA count={0} /> <add> <CounterB count={0} /> <add> </React.Fragment>, <add> ); <add> expect(ReactNoop.flush()).toEqual([ <add> // All mutation effects fire before all layout effects <add> 'Mount A mutation [A: (empty), B: (empty)]', <add> 'Mount B mutation [A: 0, B: (empty)]', <add> 'Mount layout A [A: 0, B: 0]', <add> 'Mount layout B [A: 0, B: 0]', <add> ]); <add> expect([committedA, committedB]).toEqual(['0', '0']); <add> <add> ReactNoop.render( <add> <React.Fragment> <add> <CounterA count={1} /> <add> <CounterB count={1} /> <add> </React.Fragment>, <add> ); <add> expect(ReactNoop.flush()).toEqual([ <add> // Note: This shows that the clean-up function of a layout effect is <add> // fired in the same phase as the set-up function of a mutation. <add> 'Unmount A mutation [A: 0, B: 0]', <add> 'Unmount B mutation [A: 0, B: 0]', <add> 'Mount A mutation [A: 0, B: 0]', <add> 'Unmount layout A [A: 1, B: 0]', <add> 'Mount B mutation [A: 1, B: 0]', <add> 'Unmount layout B [A: 1, B: 1]', <add> 'Mount layout A [A: 1, B: 1]', <add> 'Mount layout B [A: 1, B: 1]', <add> ]); <add> expect([committedA, committedB]).toEqual(['1', '1']); <ide> }); <ide> }); <ide> <ide> describe('ReactHooks', () => { <ide> updateC(4); <ide> expect(ReactNoop.flush()).toEqual(['A: 2, B: 3, C: 4']); <ide> expect(ReactNoop.getChildren()).toEqual([span('A: 2, B: 3, C: 4')]); <del> <ide> ReactNoop.render(<App loadC={false} />); <del> expect(ReactNoop.flush()).toEqual(['A: 2, B: 3, C: [not loaded]']); <del> expect(ReactNoop.getChildren()).toEqual([ <del> span('A: 2, B: 3, C: [not loaded]'), <del> ]); <del> <del> updateC(4); <del> // TODO: This hook triggered a re-render even though it's unmounted. <del> // Should we warn? <del> expect(ReactNoop.flush()).toEqual(['A: 2, B: 3, C: [not loaded]']); <del> expect(ReactNoop.getChildren()).toEqual([ <del> span('A: 2, B: 3, C: [not loaded]'), <del> ]); <del> <del> updateB(4); <del> expect(ReactNoop.flush()).toEqual(['A: 2, B: 4, C: [not loaded]']); <del> expect(ReactNoop.getChildren()).toEqual([ <del> span('A: 2, B: 4, C: [not loaded]'), <del> ]); <add> expect(() => ReactNoop.flush()).toThrow( <add> 'Rendered fewer hooks than expected. This may be caused by an ' + <add> 'accidental early return statement.', <add> ); <ide> }); <ide> <ide> it('unmount effects', () => { <ide> describe('ReactHooks', () => { <ide> } <ide> <ide> ReactNoop.render(<App showMore={false} />); <del> expect(ReactNoop.flush()).toEqual(['Mount A']); <add> expect(ReactNoop.flush()).toEqual([]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount A']); <ide> <ide> ReactNoop.render(<App showMore={true} />); <del> expect(ReactNoop.flush()).toEqual(['Mount B']); <add> expect(ReactNoop.flush()).toEqual([]); <add> flushPassiveEffects(); <add> expect(ReactNoop.clearYields()).toEqual(['Mount B']); <ide> <ide> ReactNoop.render(<App showMore={false} />); <del> expect(ReactNoop.flush()).toEqual(['Unmount B']); <del> <del> ReactNoop.render(null); <del> expect(ReactNoop.flush()).toEqual(['Unmount A']); <add> expect(() => ReactNoop.flush()).toThrow( <add> 'Rendered fewer hooks than expected. This may be caused by an ' + <add> 'accidental early return statement.', <add> ); <ide> }); <ide> }); <ide> }); <ide><path>packages/react-reconciler/src/__tests__/ReactNewContext-test.internal.js <ide> describe('ReactNewContext', () => { <ide> return {type: 'span', children: [], prop, hidden: false}; <ide> } <ide> <add> function readContext(Context, observedBits) { <add> const dispatcher = <add> React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.ReactCurrentOwner <add> .currentDispatcher; <add> return dispatcher.readContext(Context, observedBits); <add> } <add> <ide> // We have several ways of reading from context. sharedContextTests runs <ide> // a suite of tests for a given context consumer implementation. <ide> sharedContextTests('Context.Consumer', Context => Context.Consumer); <ide> describe('ReactNewContext', () => { <ide> }, <ide> ); <ide> sharedContextTests( <del> 'useContext inside class component', <add> 'readContext(Context) inside class component', <ide> Context => <ide> class Consumer extends React.Component { <ide> render() { <ide> const observedBits = this.props.unstable_observedBits; <del> const contextValue = useContext(Context, observedBits); <add> const contextValue = readContext(Context, observedBits); <ide> const render = this.props.children; <ide> return render(contextValue); <ide> } <ide> describe('ReactNewContext', () => { <ide> return ( <ide> <FooContext.Consumer> <ide> {foo => { <del> const bar = useContext(BarContext); <add> const bar = readContext(BarContext); <ide> return <Text text={`Foo: ${foo}, Bar: ${bar}`} />; <ide> }} <ide> </FooContext.Consumer> <ide> describe('ReactNewContext', () => { <ide> span('Baz: 2'), <ide> ]); <ide> }); <add> <add> it('throws when used in a class component', () => { <add> const Context = React.createContext(0); <add> class Foo extends React.Component { <add> render() { <add> return useContext(Context); <add> } <add> } <add> ReactNoop.render(<Foo />); <add> expect(ReactNoop.flush).toThrow( <add> 'Hooks can only be called inside the body of a functional component.', <add> ); <add> }); <ide> }); <ide> <ide> it('unwinds after errors in complete phase', () => { <ide><path>packages/react/src/React.js <ide> import {lazy} from './ReactLazy'; <ide> import forwardRef from './forwardRef'; <ide> import memo from './memo'; <ide> import { <add> useAPI, <add> useCallback, <ide> useContext, <del> useState, <del> useReducer, <del> useRef, <ide> useEffect, <del> useCallback, <add> useLayoutEffect, <ide> useMemo, <del> useAPI, <add> useMutationEffect, <add> useReducer, <add> useRef, <add> useState, <ide> } from './ReactHooks'; <ide> import { <ide> createElementWithValidation, <ide> if (enableStableConcurrentModeAPIs) { <ide> } <ide> <ide> if (enableHooks) { <add> React.useAPI = useAPI; <add> React.useCallback = useCallback; <ide> React.useContext = useContext; <del> React.useState = useState; <del> React.useReducer = useReducer; <del> React.useRef = useRef; <ide> React.useEffect = useEffect; <del> React.useCallback = useCallback; <add> React.useLayoutEffect = useLayoutEffect; <ide> React.useMemo = useMemo; <del> React.useAPI = useAPI; <add> React.useMutationEffect = useMutationEffect; <add> React.useReducer = useReducer; <add> React.useRef = useRef; <add> React.useState = useState; <ide> } <ide> <ide> export default React; <ide><path>packages/react/src/ReactHooks.js <ide> */ <ide> <ide> import type {ReactContext} from 'shared/ReactTypes'; <del> <ide> import invariant from 'shared/invariant'; <ide> <ide> import ReactCurrentOwner from './ReactCurrentOwner'; <ide> export function useContext<T>( <ide> observedBits: number | boolean | void, <ide> ) { <ide> const dispatcher = resolveDispatcher(); <del> return dispatcher.readContext(Context, observedBits); <add> return dispatcher.useContext(Context, observedBits); <ide> } <ide> <ide> export function useState<S>(initialState: S | (() => S)) { <ide> export function useEffect( <ide> return dispatcher.useEffect(create, inputs); <ide> } <ide> <add>export function useMutationEffect( <add> create: () => mixed, <add> inputs: Array<mixed> | void | null, <add>) { <add> const dispatcher = resolveDispatcher(); <add> return dispatcher.useMutationEffect(create, inputs); <add>} <add> <add>export function useLayoutEffect( <add> create: () => mixed, <add> inputs: Array<mixed> | void | null, <add>) { <add> const dispatcher = resolveDispatcher(); <add> return dispatcher.useLayoutEffect(create, inputs); <add>} <add> <ide> export function useCallback( <ide> callback: () => mixed, <ide> inputs: Array<mixed> | void | null, <ide><path>packages/shared/ReactSideEffectTags.js <ide> export type SideEffectTag = number; <ide> <ide> // Don't change these two values. They're used by React Dev Tools. <del>export const NoEffect = /* */ 0b00000000000; <del>export const PerformedWork = /* */ 0b00000000001; <add>export const NoEffect = /* */ 0b000000000000; <add>export const PerformedWork = /* */ 0b000000000001; <ide> <ide> // You can change the rest (and add more). <del>export const Placement = /* */ 0b00000000010; <del>export const Update = /* */ 0b00000000100; <del>export const PlacementAndUpdate = /* */ 0b00000000110; <del>export const Deletion = /* */ 0b00000001000; <del>export const ContentReset = /* */ 0b00000010000; <del>export const Callback = /* */ 0b00000100000; <del>export const DidCapture = /* */ 0b00001000000; <del>export const Ref = /* */ 0b00010000000; <del>export const Snapshot = /* */ 0b00100000000; <add>export const Placement = /* */ 0b000000000010; <add>export const Update = /* */ 0b000000000100; <add>export const PlacementAndUpdate = /* */ 0b000000000110; <add>export const Deletion = /* */ 0b000000001000; <add>export const ContentReset = /* */ 0b000000010000; <add>export const Callback = /* */ 0b000000100000; <add>export const DidCapture = /* */ 0b000001000000; <add>export const Ref = /* */ 0b000010000000; <add>export const Snapshot = /* */ 0b000100000000; <add>export const Passive = /* */ 0b001000000000; <ide> <del>// Update & Callback & Ref & Snapshot <del>export const LifecycleEffectMask = /* */ 0b00110100100; <add>// Passive & Update & Callback & Ref & Snapshot <add>export const LifecycleEffectMask = /* */ 0b001110100100; <ide> <ide> // Union of all host effects <del>export const HostEffectMask = /* */ 0b00111111111; <add>export const HostEffectMask = /* */ 0b001111111111; <ide> <del>export const Incomplete = /* */ 0b01000000000; <del>export const ShouldCapture = /* */ 0b10000000000; <add>export const Incomplete = /* */ 0b010000000000; <add>export const ShouldCapture = /* */ 0b100000000000;
11
Javascript
Javascript
parse urls using the browser's dom api
b99d064b6ddbcc9f59ea45004279833e9ea82928
<ide><path>angularFiles.js <ide> angularFiles = { <ide> 'src/ng/httpBackend.js', <ide> 'src/ng/locale.js', <ide> 'src/ng/timeout.js', <add> 'src/ng/urlUtils.js', <ide> <ide> 'src/ng/filter.js', <ide> 'src/ng/filter/filter.js', <ide><path>src/AngularPublic.js <ide> function publishExternalAPI(angular){ <ide> $sniffer: $SnifferProvider, <ide> $templateCache: $TemplateCacheProvider, <ide> $timeout: $TimeoutProvider, <del> $window: $WindowProvider <add> $window: $WindowProvider, <add> $$urlUtils: $$UrlUtilsProvider <ide> }); <ide> } <ide> ]); <ide><path>src/ng/compile.js <ide> function $CompileProvider($provide) { <ide> <ide> this.$get = [ <ide> '$injector', '$interpolate', '$exceptionHandler', '$http', '$templateCache', '$parse', <del> '$controller', '$rootScope', '$document', <add> '$controller', '$rootScope', '$document', '$$urlUtils', <ide> function($injector, $interpolate, $exceptionHandler, $http, $templateCache, $parse, <del> $controller, $rootScope, $document) { <add> $controller, $rootScope, $document, $$urlUtils) { <ide> <ide> var Attributes = function(element, attr) { <ide> this.$$element = element; <ide> function $CompileProvider($provide) { <ide> } <ide> } <ide> <add> nodeName = nodeName_(this.$$element); <ide> <ide> // sanitize a[href] and img[src] values <del> nodeName = nodeName_(this.$$element); <ide> if ((nodeName === 'A' && key === 'href') || <del> (nodeName === 'IMG' && key === 'src')){ <del> urlSanitizationNode.setAttribute('href', value); <del> <del> // href property always returns normalized absolute url, so we can match against that <del> normalizedVal = urlSanitizationNode.href; <del> if (normalizedVal !== '') { <del> if ((key === 'href' && !normalizedVal.match(aHrefSanitizationWhitelist)) || <del> (key === 'src' && !normalizedVal.match(imgSrcSanitizationWhitelist))) { <del> this[key] = value = 'unsafe:' + normalizedVal; <add> (nodeName === 'IMG' && key === 'src')) { <add> // NOTE: $$urlUtils.resolve() doesn't support IE < 8 so we don't sanitize for that case. <add> if (!msie || msie >= 8 ) { <add> normalizedVal = $$urlUtils.resolve(value); <add> if (normalizedVal !== '') { <add> if ((key === 'href' && !normalizedVal.match(aHrefSanitizationWhitelist)) || <add> (key === 'src' && !normalizedVal.match(imgSrcSanitizationWhitelist))) { <add> this[key] = value = 'unsafe:' + normalizedVal; <add> } <ide> } <ide> } <ide> } <ide> <del> <ide> if (writeAttr !== false) { <ide> if (value === null || value === undefined) { <ide> this.$$element.removeAttr(attrName); <ide><path>src/ng/http.js <ide> function parseHeaders(headers) { <ide> } <ide> <ide> <del>var IS_SAME_DOMAIN_URL_MATCH = /^(([^:]+):)?\/\/(\w+:{0,1}\w*@)?([\w\.-]*)?(:([0-9]+))?(.*)$/; <del> <del> <del>/** <del> * Parse a request and location URL and determine whether this is a same-domain request. <del> * <del> * @param {string} requestUrl The url of the request. <del> * @param {string} locationUrl The current browser location url. <del> * @returns {boolean} Whether the request is for the same domain. <del> */ <del>function isSameDomain(requestUrl, locationUrl) { <del> var match = IS_SAME_DOMAIN_URL_MATCH.exec(requestUrl); <del> // if requestUrl is relative, the regex does not match. <del> if (match == null) return true; <del> <del> var domain1 = { <del> protocol: match[2], <del> host: match[4], <del> port: int(match[6]) || DEFAULT_PORTS[match[2]] || null, <del> // IE8 sets unmatched groups to '' instead of undefined. <del> relativeProtocol: match[2] === undefined || match[2] === '' <del> }; <del> <del> match = SERVER_MATCH.exec(locationUrl); <del> var domain2 = { <del> protocol: match[1], <del> host: match[3], <del> port: int(match[5]) || DEFAULT_PORTS[match[1]] || null <del> }; <del> <del> return (domain1.protocol == domain2.protocol || domain1.relativeProtocol) && <del> domain1.host == domain2.host && <del> (domain1.port == domain2.port || (domain1.relativeProtocol && <del> domain2.port == DEFAULT_PORTS[domain2.protocol])); <del>} <del> <del> <ide> /** <ide> * Returns a function that provides access to parsed headers. <ide> * <ide> function $HttpProvider() { <ide> */ <ide> var responseInterceptorFactories = this.responseInterceptors = []; <ide> <del> this.$get = ['$httpBackend', '$browser', '$cacheFactory', '$rootScope', '$q', '$injector', <del> function($httpBackend, $browser, $cacheFactory, $rootScope, $q, $injector) { <add> this.$get = ['$httpBackend', '$browser', '$cacheFactory', '$rootScope', '$q', '$injector', '$$urlUtils', <add> function($httpBackend, $browser, $cacheFactory, $rootScope, $q, $injector, $$urlUtils) { <ide> <ide> var defaultCache = $cacheFactory('$http'); <ide> <ide> function $HttpProvider() { <ide> config.headers = headers; <ide> config.method = uppercase(config.method); <ide> <del> var xsrfValue = isSameDomain(config.url, $browser.url()) <add> var xsrfValue = $$urlUtils.isSameOrigin(config.url) <ide> ? $browser.cookies()[config.xsrfCookieName || defaults.xsrfCookieName] <ide> : undefined; <ide> if (xsrfValue) { <ide><path>src/ng/urlUtils.js <add>'use strict'; <add> <add>function $$UrlUtilsProvider() { <add> this.$get = ['$window', '$document', function($window, $document) { <add> var urlParsingNode = $document[0].createElement("a"), <add> originUrl = resolve($window.location.href, true); <add> <add> /** <add> * @description <add> * Normalizes and optionally parses a URL. <add> * <add> * NOTE: This is a private service. The API is subject to change unpredictably in any commit. <add> * <add> * Implementation Notes for non-IE browsers <add> * ---------------------------------------- <add> * Assigning a URL to the href property of an anchor DOM node, even one attached to the DOM, <add> * results both in the normalizing and parsing of the URL. Normalizing means that a relative <add> * URL will be resolved into an absolute URL in the context of the application document. <add> * Parsing means that the anchor node's host, hostname, protocol, port, pathname and related <add> * properties are all populated to reflect the normalized URL. This approach has wide <add> * compatibility - Safari 1+, Mozilla 1+, Opera 7+,e etc. See <add> * http://www.aptana.com/reference/html/api/HTMLAnchorElement.html <add> * <add> * Implementation Notes for IE <add> * --------------------------- <add> * IE >= 8 and <= 10 normalizes the URL when assigned to the anchor node similar to the other <add> * browsers. However, the parsed components will not be set if the URL assigned did not specify <add> * them. (e.g. if you assign a.href = "foo", then a.protocol, a.host, etc. will be empty.) We <add> * work around that by performing the parsing in a 2nd step by taking a previously normalized <add> * URL (e.g. by assining to a.href) and assigning it a.href again. This correctly populates the <add> * properties such as protocol, hostname, port, etc. <add> * <add> * IE7 does not normalize the URL when assigned to an anchor node. (Apparently, it does, if one <add> * uses the inner HTML approach to assign the URL as part of an HTML snippet - <add> * http://stackoverflow.com/a/472729) However, setting img[src] does normalize the URL. <add> * Unfortunately, setting img[src] to something like "javascript:foo" on IE throws an exception. <add> * Since the primary usage for normalizing URLs is to sanitize such URLs, we can't use that <add> * method and IE < 8 is unsupported. <add> * <add> * References: <add> * http://developer.mozilla.org/en-US/docs/Web/API/HTMLAnchorElement <add> * http://www.aptana.com/reference/html/api/HTMLAnchorElement.html <add> * http://url.spec.whatwg.org/#urlutils <add> * https://github.com/angular/angular.js/pull/2902 <add> * http://james.padolsey.com/javascript/parsing-urls-with-the-dom/ <add> * <add> * @param {string} url The URL to be parsed. <add> * @param {boolean=} parse When true, returns an object for the parsed URL. Otherwise, returns <add> * a single string that is the normalized URL. <add> * @returns {object|string} When parse is true, returns the normalized URL as a string. <add> * Otherwise, returns an object with the following members. <add> * <add> * | member name | Description | <add> * |===============|================| <add> * | href | A normalized version of the provided URL if it was not an absolute URL | <add> * | protocol | The protocol including the trailing colon | <add> * | host | The host and port (if the port is non-default) of the normalizedUrl | <add> * <add> * These fields from the UrlUtils interface are currently not needed and hence not returned. <add> * <add> * | member name | Description | <add> * |===============|================| <add> * | hostname | The host without the port of the normalizedUrl | <add> * | pathname | The path following the host in the normalizedUrl | <add> * | hash | The URL hash if present | <add> * | search | The query string | <add> * <add> */ <add> function resolve(url, parse) { <add> var href = url; <add> if (msie) { <add> // Normalize before parse. Refer Implementation Notes on why this is <add> // done in two steps on IE. <add> urlParsingNode.setAttribute("href", href); <add> href = urlParsingNode.href; <add> } <add> urlParsingNode.setAttribute('href', href); <add> <add> if (!parse) { <add> return urlParsingNode.href; <add> } <add> // urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils <add> return { <add> href: urlParsingNode.href, <add> protocol: urlParsingNode.protocol, <add> host: urlParsingNode.host <add> // Currently unused and hence commented out. <add> // hostname: urlParsingNode.hostname, <add> // port: urlParsingNode.port, <add> // pathname: urlParsingNode.pathname, <add> // hash: urlParsingNode.hash, <add> // search: urlParsingNode.search <add> }; <add> } <add> <add> return { <add> resolve: resolve, <add> /** <add> * Parse a request URL and determine whether this is a same-origin request as the application document. <add> * <add> * @param {string} requestUrl The url of the request. <add> * @returns {boolean} Whether the request is for the same origin as the application document. <add> */ <add> isSameOrigin: function isSameOrigin(requestUrl) { <add> var parsed = resolve(requestUrl, true); <add> return (parsed.protocol === originUrl.protocol && <add> parsed.host === originUrl.host); <add> } <add> }; <add> }]; <add>} <ide><path>test/ng/httpSpec.js <ide> describe('$http', function() { <ide> <ide> $httpBackend.verifyNoOutstandingExpectation = noop; <ide> }); <del> <del> describe('isSameDomain', function() { <del> it('should support various combinations of urls', function() { <del> expect(isSameDomain('path/morepath', <del> 'http://www.adomain.com')).toBe(true); <del> expect(isSameDomain('http://www.adomain.com/path', <del> 'http://www.adomain.com')).toBe(true); <del> expect(isSameDomain('//www.adomain.com/path', <del> 'http://www.adomain.com')).toBe(true); <del> expect(isSameDomain('//www.adomain.com/path', <del> 'https://www.adomain.com')).toBe(true); <del> expect(isSameDomain('//www.adomain.com/path', <del> 'http://www.adomain.com:1234')).toBe(false); <del> expect(isSameDomain('https://www.adomain.com/path', <del> 'http://www.adomain.com')).toBe(false); <del> expect(isSameDomain('http://www.adomain.com:1234/path', <del> 'http://www.adomain.com')).toBe(false); <del> expect(isSameDomain('http://www.anotherdomain.com/path', <del> 'http://www.adomain.com')).toBe(false); <del> }); <del> }); <ide> }); <ide><path>test/ng/urlUtilsSpec.js <add>'use strict'; <add> <add>describe('$$urlUtils', function() { <add> describe('parse', function() { <add> it('should normalize a relative url', inject(function($$urlUtils) { <add> expect($$urlUtils.resolve("foo")).toMatch(/^https?:\/\/[^/]+\/foo$/); <add> })); <add> <add> it('should parse relative URL into component pieces', inject(function($$urlUtils) { <add> var parsed = $$urlUtils.resolve("foo", true); <add> expect(parsed.href).toMatch(/https?:\/\//); <add> expect(parsed.protocol).toMatch(/^https?:/); <add> expect(parsed.host).not.toBe(""); <add> })); <add> }); <add> <add> describe('isSameOrigin', function() { <add> it('should support various combinations of urls', inject(function($$urlUtils, $document) { <add> expect($$urlUtils.isSameOrigin('path')).toBe(true); <add> var origin = $$urlUtils.resolve($document[0].location.href, true); <add> expect($$urlUtils.isSameOrigin('//' + origin.host + '/path')).toBe(true); <add> // Different domain. <add> expect($$urlUtils.isSameOrigin('http://example.com/path')).toBe(false); <add> // Auto fill protocol. <add> expect($$urlUtils.isSameOrigin('//example.com/path')).toBe(false); <add> // Should not match when the ports are different. <add> // This assumes that the test is *not* running on port 22 (very unlikely). <add> expect($$urlUtils.isSameOrigin('//' + origin.hostname + ':22/path')).toBe(false); <add> })); <add> }); <add>});
7
Python
Python
add potsdb lib dep
cc2c68022672589166977e1556752600b3d6df27
<ide><path>setup.py <ide> def get_requires(): <ide> 'IP': ['netifaces'], <ide> 'RAID': ['pymdstat'], <ide> 'DOCKER': ['docker-py'], <del> 'EXPORT': ['influxdb>=1.0.0', 'statsd', 'pika'], <add> 'EXPORT': ['influxdb>=1.0.0', 'potsdb' 'statsd', 'pika'], <ide> 'ACTION': ['pystache'], <ide> 'CPUINFO': ['py-cpuinfo'] <ide> },
1
Ruby
Ruby
remove unnused method from test
f3747204020e7e45ccd30f3993214558d48cf680
<ide><path>railties/test/rails_info_test.rb <ide> def test_html_includes_middleware <ide> end <ide> <ide> protected <del> def svn_info=(info) <del> Rails::Info.module_eval do <del> class << self <del> def svn_info <del> info <del> end <del> end <del> end <del> end <del> <ide> def properties <ide> Rails::Info.properties <ide> end
1
Ruby
Ruby
fix autoload for nullpool
39ae5275c3dc2501d0c19fdd6ae261e8cabf25e9
<ide><path>activerecord/lib/active_record/connection_adapters.rb <ide> module ConnectionAdapters <ide> autoload :DatabaseStatements <ide> autoload :DatabaseLimits <ide> autoload :Quoting <del> autoload :ConnectionPool <ide> autoload :ConnectionHandler <ide> autoload :QueryCache <ide> autoload :Savepoints <ide> end <ide> <add> autoload_at "active_record/connection_adapters/abstract/connection_pool" do <add> autoload :ConnectionPool <add> autoload :NullPool <add> end <add> <ide> autoload_at "active_record/connection_adapters/abstract/transaction" do <ide> autoload :TransactionManager <ide> autoload :NullTransaction
1
PHP
PHP
fix issue with non-sequential array keys
73b0345ff4c2fc55339a19b204131daf27069b2e
<ide><path>lib/Cake/Test/Case/Utility/XmlTest.php <ide> public function testFromArray() { <ide> $this->assertEquals(str_replace(array("\r", "\n"), '', $obj->asXML()), $xmlText); <ide> } <ide> <add>/** <add> * Test non-sequential keys in list types. <add> * <add> * @return void <add> */ <add> public function testFromArrayNonSequentialKeys() { <add> $xmlArray = array( <add> 'Event' => array( <add> array( <add> 'id' => '235', <add> 'Attribute' => array( <add> 0 => array( <add> 'id' => '9646', <add> ), <add> 2 => array( <add> 'id' => '9647', <add> ) <add> ) <add> ) <add> ) <add> ); <add> $obj = Xml::fromArray($xmlArray); <add> $expected = <<<XML <add><?xml version="1.0" encoding="UTF-8"?> <add><Event> <add> <id>235</id> <add> <Attribute> <add> <id>9646</id> <add> </Attribute> <add> <Attribute> <add> <id>9647</id> <add> </Attribute> <add></Event> <add>XML; <add> $this->assertXmlStringEqualsXmlString($expected, $obj->asXML()); <add> } <add> <ide> /** <ide> * data provider for fromArray() failures <ide> * <ide><path>lib/Cake/Utility/Xml.php <ide> protected static function _fromArray($dom, $node, &$data, $format) { <ide> if ($key[0] === '@') { <ide> throw new XmlException(__d('cake_dev', 'Invalid array')); <ide> } <del> if (array_keys($value) === range(0, count($value) - 1)) { // List <add> if (is_numeric(implode(array_keys($value), ''))) { // List <ide> foreach ($value as $item) { <ide> $itemData = compact('dom', 'node', 'key', 'format'); <ide> $itemData['value'] = $item;
2
Text
Text
add instructions to import in replit
7b860f920f452bc60a73c7ffb785d1ba87ad2c8c
<ide><path>client/src/pages/learn/back-end-development-and-apis/basic-node-and-express/index.md <ide> Node.js is a JavaScript runtime that allows developers to write backend (server- <ide> <ide> Express, while not included with Node.js, is another module often used with it. Express runs between the server created by Node.js and the frontend pages of a web application. Express also handles an application's routing. Routing directs users to the correct page based on their interaction with the application. While there are alternatives to using Express, its simplicity makes it a good place to begin when learning the interaction between a backend powered by Node.js and the frontend. <ide> <del>Working on these challenges will involve you writing your code on Replit on our starter project. After completing each challenge you can copy your public Replit URL (to the homepage of your app) into the challenge screen to test it! Optionally you may choose to write your project on another platform but it must be publicly visible for our testing. <add>Working on these challenges involves writing code on Replit in our starter project. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a <code>.replit</code> window. <add>- Select <code>Use run command</code> and click the <code>Done</code> button. <add>- Complete each challenge and copy the public Replit URL (to the homepage of your app) into the challenge screen to test it! <add> <add>Optionally, you may write your project on another platform, but it must be publicly visible for our testing. <ide> <ide> Start this project on Replit using <a href='https://replit.com/github/freeCodeCamp/boilerplate-express'>this link</a> or clone <a href='https://github.com/freeCodeCamp/boilerplate-express/'>this repository</a> on GitHub! If you use Replit, remember to save the link to your project somewhere safe! <ide><path>client/src/pages/learn/back-end-development-and-apis/managing-packages-with-npm/index.md <ide> npm saves packages in a folder named <code>node_modules</code>. These packages c <ide> 2. locally within a project's own <code>node_modules</code> folder, accessible only to that project. <ide> <ide> Most developers prefer to install packages local to each project to create a separation between the dependencies of different projects. <del>Working on these challenges will involve you writing your code on Replit on our starter project. After completing each challenge you can copy your public Replit URL (to the homepage of your app) into the challenge screen to test it! Optionally you may choose to write your project on another platform but it must be publicly visible for our testing. <add> <add>Working on these challenges involves writing code on Replit in our starter project. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a <code>.replit</code> window. <add>- Select <code>Use run command</code> and click the <code>Done</code> button. <add>- Complete each challenge and copy the public Replit URL (to the homepage of your app) into the challenge screen to test it! <add> <add>Optionally, you may write your project on another platform, but it must be publicly visible for our testing. <ide> <ide> Start this project on Replit using <a href='https://replit.com/github/freeCodeCamp/boilerplate-npm'>this link</a> or clone <a href='https://github.com/freeCodeCamp/boilerplate-npm/'>this repository</a> on GitHub! If you use Replit, remember to save the link to your project somewhere safe! <ide><path>client/src/pages/learn/back-end-development-and-apis/mongodb-and-mongoose/index.md <ide> While there are many non-relational databases, Mongo's use of JSON as its docume <ide> <ide> Mongoose.js is an npm module for Node.js that allows you to write objects for Mongo as you would in JavaScript. This can make it easier to construct documents for storage in Mongo. <ide> <del>Working on these challenges will involve you writing your code on Replit on our starter project. After completing each challenge you can copy your public Replit URL (to the homepage of your app) into the challenge screen to test it! Optionally you may choose to write your project on another platform but it must be publicly visible for our testing. <add>Working on these challenges involves writing code on Replit in our starter project. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a <code>.replit</code> window. <add>- Select <code>Use run command</code> and click the <code>Done</code> button. <add>- Complete each challenge and copy the public Replit URL (to the homepage of your app) into the challenge screen to test it! <add> <add>Optionally, you may write your project on another platform, but it must be publicly visible for our testing. <ide> <ide> Start this project on Replit using [this link](https://replit.com/github/freeCodeCamp/boilerplate-mongomongoose) or clone [this repository](https://github.com/freeCodeCamp/boilerplate-mongomongoose/) on GitHub! If you use Replit, remember to save the link to your project somewhere safe! <ide> <ide><path>client/src/pages/learn/information-security/information-security-with-helmetjs/index.md <ide> superBlock: Information Security <ide> <ide> HelmetJS is a type of middleware for Express-based applications that automatically sets HTTP headers to prevent sensitive information from unintentionally being passed between the server and client. While HelmetJS does not account for all situations, it does include support for common ones like Content Security Policy, XSS Filtering, and HTTP Strict Transport Security, among others. HelmetJS can be installed on an Express project from npm, after which each layer of protection can be configured to best fit the project. <ide> <del>Working on these challenges will involve you writing your code on Replit on our starter project. After completing each challenge, you can copy your public Replit URL (to the homepage of your app) into the challenge screen to test it! Optionally, you may choose to write your project on another platform, but it must be publicly visible for our testing. <add>Working on these challenges involves writing code on Replit in our starter project. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a <code>.replit</code> window. <add>- Select <code>Use run command</code> and click the <code>Done</code> button. <add>- Complete each challenge and copy the public Replit URL (to the homepage of your app) into the challenge screen to test it! <add> <add>Optionally, you may write your project on another platform, but it must be publicly visible for our testing. <ide> <ide> Start this project on Replit using <a rel='noopener noreferrer' target='_blank' href='https://replit.com/github/freeCodeCamp/boilerplate-infosec'>this link</a> or clone <a rel='noopener noreferrer' target='_blank' href='https://github.com/freeCodeCamp/boilerplate-infosec/'>this repository</a> on GitHub! If you use Replit, remember to save the link to your project somewhere safe! <ide><path>client/src/pages/learn/quality-assurance/advanced-node-and-express/index.md <ide> superBlock: Quality Assurance <ide> <ide> _Authentication_ is the process or action of verifying the identity of a user or process. Up to this point you have not been able to create an app utilizing this key concept. <ide> <del>The most common and easiest way to use authentication middleware for Node.js is [Passport](http://passportjs.org/). It is easy to learn, light-weight, and extremely flexible allowing for many _strategies_, which we will talk about in later challenges. In addition to authentication we will also look at template engines which allow for use of _Pug_ and web sockets which allow for real time communication between all your clients and your server. Working on these challenges will involve you writing your code on Replit on our starter project. After completing each challenge you can copy your public Replit URL (to the homepage of your app) into the challenge screen to test it! Optionally you may choose to write your project on another platform but it must be publicly visible for our testing. <add>The most common and easiest way to use authentication middleware for Node.js is [Passport](http://passportjs.org/). It is easy to learn, light-weight, and extremely flexible allowing for many _strategies_, which we will talk about in later challenges. In addition to authentication we will also look at template engines which allow for use of _Pug_ and web sockets which allow for real time communication between all your clients and your server. <add> <add>Working on these challenges involves writing code on Replit in our starter project. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a <code>.replit</code> window. <add>- Select <code>Use run command</code> and click the <code>Done</code> button. <add>- Complete each challenge and copy the public Replit URL (to the homepage of your app) into the challenge screen to test it! <add> <add>Optionally, you may write your project on another platform, but it must be publicly visible for our testing. <ide> <ide> Start this project on Replit using [this link](https://replit.com/github/freeCodeCamp/boilerplate-advancednode) or clone [this repository](https://github.com/freeCodeCamp/boilerplate-advancednode/) on GitHub! If you use Replit, remember to save the link to your project somewhere safe. <ide><path>client/src/pages/learn/quality-assurance/quality-assurance-and-testing-with-chai/index.md <ide> superBlock: Quality Assurance <ide> <ide> As your programs become more complex, you need to test them often to make sure any new code you add doesn't break the program's original functionality. Chai is a JavaScript testing library that helps you check that your program still behaves the way you expect it to after you make changes. Using Chai, you can write tests that describe your program's requirements and see if your program meets them. <ide> <del>Working on these challenges will involve you writing your code on Replit on our starter project. After completing each challenge you can copy your public Replit URL (to the homepage of your app) into the challenge screen to test it! Optionally you may choose to write your project on another platform but it must be publicly visible for our testing. <add>Working on these challenges involves writing code on Replit in our starter project. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a <code>.replit</code> window. <add>- Select <code>Use run command</code> and click the <code>Done</code> button. <add>- Complete each challenge and copy the public Replit URL (to the homepage of your app) into the challenge screen to test it! <add> <add>Optionally, you may write your project on another platform, but it must be publicly visible for our testing. <ide> <ide> Start this project on Replit using [this link](https://replit.com/github/freeCodeCamp/boilerplate-mochachai) or clone [this repository](https://github.com/freeCodeCamp/boilerplate-mochachai/) on GitHub! If you use Replit, remember to save the link to your project somewhere safe! <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/back-end-development-and-apis-projects/exercise-tracker.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-exercisetracker" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/back-end-development-and-apis-projects/file-metadata-microservice.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-filemetadata" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/back-end-development-and-apis-projects/request-header-parser-microservice.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-headerparser" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --hints-- <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/back-end-development-and-apis-projects/timestamp-microservice.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-timestamp" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> **Note:** Time zones conversion is not a purpose of this project, so assume all sent valid dates will be parsed with `new Date()` as GMT dates. <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/back-end-development-and-apis-projects/url-shortener-microservice.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-urlshortener" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/basic-node-and-express/meet-the-node-console.md <ide> Working on these challenges will involve you writing your code using one of the <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-express" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete these challenges. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. <ide> <ide> During the development process, it is important to be able to check what’s going on in your code. <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/managing-packages-with-npm/how-to-use-package.json-the-core-of-any-node.js-project-or-npm-package.md <ide> Working on these challenges will involve you writing your code using one of the <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-npm" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete these challenges. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. <ide> <ide> The `package.json` file is the center of any Node.js project or npm package. It stores information about your project, similar to how the &lt;head> section of an HTML document describes the content of a webpage. It consists of a single JSON object where information is stored in key-value pairs. There are only two required fields; "name" and "version", but it’s good practice to provide additional information about your project that could be useful to future users or maintainers. <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/mongodb-and-mongoose/install-and-set-up-mongoose.md <ide> Working on these challenges will involve you writing your code using one of the <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-mongomongoose" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete these challenges. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. <ide> <ide> In this challenge, you will set up a MongoDB Atlas database and import the required packages to connect to it. <ide><path>curriculum/challenges/english/06-quality-assurance/advanced-node-and-express/set-up-a-template-engine.md <ide> Working on these challenges will involve you writing your code using one of the <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-advancednode" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete these challenges. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. <ide> <ide> A template engine enables you to use static template files (such as those written in *Pug*) in your app. At runtime, the template engine replaces variables in a template file with actual values which can be supplied by your server. Then it transforms the template into a static HTML file that is sent to the client. This approach makes it easier to design an HTML page and allows for displaying variables on the page without needing to make an API call from the client. <ide><path>curriculum/challenges/english/06-quality-assurance/quality-assurance-and-testing-with-chai/learn-how-javascript-assertions-work.md <ide> Working on these challenges will involve you writing your code using one of the <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-mochachai" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete these challenges. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/06-quality-assurance/quality-assurance-projects/american-british-translator.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-american-british-english-translator" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/06-quality-assurance/quality-assurance-projects/issue-tracker.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-issuetracker" target="_blank" rel="noopener noreferrer nofollow">this Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/06-quality-assurance/quality-assurance-projects/metric-imperial-converter.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-metricimpconverter" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/06-quality-assurance/quality-assurance-projects/personal-library.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-library" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/06-quality-assurance/quality-assurance-projects/sudoku-solver.md <ide> Build a full stack JavaScript app that is functionally similar to this: <a href= <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-sudoku-solver" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/07-scientific-computing-with-python/scientific-computing-with-python-projects/arithmetic-formatter.md <ide> dashedName: arithmetic-formatter <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-arithmetic-formatter" target="_blank" rel="noopener noreferrer nofollow"> working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> # --instructions-- <ide> <ide> Students in primary school often arrange arithmetic problems vertically to make them easier to solve. For example, "235 + 52" becomes: <ide><path>curriculum/challenges/english/07-scientific-computing-with-python/scientific-computing-with-python-projects/budget-app.md <ide> dashedName: budget-app <ide> # --description-- <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-budget-app" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> <ide> # --instructions-- <ide> <ide><path>curriculum/challenges/english/07-scientific-computing-with-python/scientific-computing-with-python-projects/polygon-area-calculator.md <ide> dashedName: polygon-area-calculator <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-polygon-area-calculator" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> # --instructions-- <ide> <ide> In this project you will use object oriented programming to create a Rectangle class and a Square class. The Square class should be a subclass of Rectangle and inherit methods and attributes. <ide><path>curriculum/challenges/english/07-scientific-computing-with-python/scientific-computing-with-python-projects/probability-calculator.md <ide> dashedName: probability-calculator <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-probability-calculator" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> # --instructions-- <ide> <ide> Suppose there is a hat containing 5 blue balls, 4 red balls, and 2 green balls. What is the probability that a random draw of 4 balls will contain at least 1 red ball and 2 green balls? While it would be possible to calculate the probability using advanced mathematics, an easier way is to write a program to perform a large number of experiments to estimate an approximate probability. <ide><path>curriculum/challenges/english/07-scientific-computing-with-python/scientific-computing-with-python-projects/time-calculator.md <ide> dashedName: time-calculator <ide> <ide> # --description-- <ide> <del>You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-time-calculator" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add>You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-time-calculator" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <ide> <ide> # --instructions-- <ide> <ide><path>curriculum/challenges/english/08-data-analysis-with-python/data-analysis-with-python-projects/demographic-data-analyzer.md <ide> dashedName: demographic-data-analyzer <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-demographic-data-analyzer" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide> - <a href="https://www.freecodecamp.org/news/python-for-everybody/" target="_blank" rel="noopener noreferrer nofollow">Python for Everybody Video Course</a> (14 hours) <ide><path>curriculum/challenges/english/08-data-analysis-with-python/data-analysis-with-python-projects/mean-variance-standard-deviation-calculator.md <ide> dashedName: mean-variance-standard-deviation-calculator <ide> <ide> # --description-- <ide> <del>You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-mean-variance-standard-deviation-calculator" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add>You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-mean-variance-standard-deviation-calculator" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide><path>curriculum/challenges/english/08-data-analysis-with-python/data-analysis-with-python-projects/medical-data-visualizer.md <ide> dashedName: medical-data-visualizer <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-medical-data-visualizer" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide> - <a href="https://www.freecodecamp.org/news/python-for-everybody/" target="_blank" rel="noopener noreferrer nofollow">Python for Everybody Video Course</a>(14 hours) <ide><path>curriculum/challenges/english/08-data-analysis-with-python/data-analysis-with-python-projects/page-view-time-series-visualizer.md <ide> dashedName: page-view-time-series-visualizer <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-page-view-time-series-visualizer" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide> - <a href="https://www.freecodecamp.org/news/python-for-everybody/" target="_blank" rel="noopener noreferrer nofollow">Python for Everybody Video Course</a>(14 hours) <ide><path>curriculum/challenges/english/08-data-analysis-with-python/data-analysis-with-python-projects/sea-level-predictor.md <ide> dashedName: sea-level-predictor <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-sea-level-predictor" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide> - <a href="https://www.freecodecamp.org/news/python-for-everybody/" target="_blank" rel="noopener noreferrer nofollow">Python for Everybody Video Course</a>(14 hours) <ide><path>curriculum/challenges/english/09-information-security/information-security-projects/anonymous-message-board.md <ide> Working on this project will involve you writing your code using one of the foll <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-messageboard" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/09-information-security/information-security-projects/port-scanner.md <ide> dashedName: port-scanner <ide> <ide> # --description-- <ide> <del>You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-port-scanner" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add>You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-port-scanner" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide><path>curriculum/challenges/english/09-information-security/information-security-projects/secure-real-time-multiplayer-game.md <ide> Develop a 2D real time multiplayer game using the HTML Canvas API and Socket.io <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-secure-real-time-multiplayer-game" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/09-information-security/information-security-projects/sha-1-password-cracker.md <ide> dashedName: sha-1-password-cracker <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-SHA-1-password-cracker" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <add> <ide> We are still developing the interactive instructional part of the Python curriculum. For now, here are some videos on the freeCodeCamp.org YouTube channel that will teach you everything you need to know to complete this project: <ide> <ide> - <a href="https://www.freecodecamp.org/news/python-for-everybody/" target="_blank" rel="noopener noreferrer nofollow">Python for Everybody Video Course</a> (14 hours) <ide><path>curriculum/challenges/english/09-information-security/information-security-projects/stock-price-checker.md <ide> Working on this project will involve you writing your code using one of the foll <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-project-stockchecker" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete your project. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field. <ide> <ide> # --instructions-- <ide><path>curriculum/challenges/english/09-information-security/information-security-with-helmetjs/install-and-require-helmet.md <ide> Working on these challenges will involve you writing your code using one of the <ide> - Use <a href="https://replit.com/github/freeCodeCamp/boilerplate-infosec" target="_blank" rel="noopener noreferrer nofollow">our Replit starter project</a> to complete these challenges. <ide> - Use a site builder of your choice to complete the project. Be sure to incorporate all the files from our GitHub repo. <ide> <add>If you use Replit, follow these steps to set up the project: <add> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. <ide> <ide> Helmet helps you secure your Express apps by setting various HTTP headers. <ide><path>curriculum/challenges/english/11-machine-learning-with-python/machine-learning-with-python-projects/rock-paper-scissors.md <ide> For this challenge, you will create a program to play Rock, Paper, Scissors. A p <ide> <ide> You will be <a href="https://replit.com/github/freeCodeCamp/boilerplate-rock-paper-scissors" target="_blank" rel="noopener noreferrer nofollow">working on this project with our Replit starter code</a>. <ide> <add>- Start by importing the project on Replit. <add>- Next, you will see a `.replit` window. <add>- Select `Use run command` and click the `Done` button. <add> <ide> We are still developing the interactive instructional part of the machine learning curriculum. For now, you will have to use other resources to learn how to pass this challenge. <ide> <ide> # --instructions--
38
PHP
PHP
fix more failing tests in console/command
7762846ec0806cfd27b93c454c7aa74492b41d70
<ide><path>lib/Cake/Console/Command/BakeShell.php <ide> <?php <ide> /** <del> * Command-line code generation utility to automate programmer chores. <del> * <del> * Bake is CakePHP's code generation script, which can help you kickstart <del> * application development by writing fully functional skeleton controllers, <del> * models, and views. Going further, Bake can also write Unit Tests for you. <del> * <del> * PHP 5 <del> * <ide> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) <ide> * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org) <ide> * <ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php) <ide> */ <ide> namespace Cake\Console\Command; <add> <ide> use Cake\Console\Shell; <ide> use Cake\Core\App; <ide> use Cake\Core\Configure; <ide><path>lib/Cake/Test/TestCase/Console/Command/BakeShellTest.php <ide> <?php <ide> /** <del> * BakeShell Test Case <del> * <del> * <del> * PHP 5 <del> * <ide> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) <ide> * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org) <ide> * <ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php) <ide> */ <ide> namespace Cake\Test\TestCase\Console\Command; <add> <ide> use Cake\Console\Command\BakeShellShell; <ide> use Cake\Controller\Controller; <ide> use Cake\Core\App; <add>use Cake\Core\Configure; <ide> use Cake\TestSuite\TestCase; <ide> <ide> class BakeShellTest extends TestCase { <ide> class BakeShellTest extends TestCase { <ide> * <ide> * @var array <ide> */ <del> public $fixtures = array('core.user'); <add> public $fixtures = array('core.comment'); <ide> <ide> /** <ide> * setup test <ide> class BakeShellTest extends TestCase { <ide> */ <ide> public function setUp() { <ide> parent::setUp(); <del> $out = $this->getMock('Cake\Console\ConsoleOutput', array(), array(), '', false); <del> $in = $this->getMock('Cake\Console\ConsoleInput', array(), array(), '', false); <add> $out = $this->getMock('Cake\Console\ConsoleOutput', [], [], '', false); <add> $in = $this->getMock('Cake\Console\ConsoleInput', [], [], '', false); <ide> <ide> $this->Shell = $this->getMock( <ide> 'Cake\Console\Command\BakeShell', <del> array('in', 'out', 'hr', 'err', 'createFile', '_stop', '_checkUnitTest'), <del> array($out, $out, $in) <add> ['in', 'out', 'hr', 'err', 'createFile', '_stop', '_checkUnitTest'], <add> [$out, $out, $in] <ide> ); <add> Configure::write('App.namespace', 'TestApp'); <ide> } <ide> <ide> /** <ide> public function tearDown() { <ide> * @return void <ide> */ <ide> public function testAllWithModelName() { <del> $userExists = App::classname('User', 'Model'); <del> $this->skipIf($userExists, 'User class exists, cannot test `bake all [param]`.'); <del> <del> $this->Shell->Model = $this->getMock('Cake\Console\Command\Task\ModelTask', array(), array(&$this->Dispatcher)); <del> $this->Shell->Controller = $this->getMock('Cake\Console\Command\Task\ControllerTask', array(), array(&$this->Dispatcher)); <del> $this->Shell->View = $this->getMock('Cake\Console\Command\Task\ModelTask', array(), array(&$this->Dispatcher)); <del> $this->Shell->DbConfig = $this->getMock('Cake\Console\Command\Task\DbConfigTask', array(), array(&$this->Dispatcher)); <add> $this->Shell->Model = $this->getMock( <add> 'Cake\Console\Command\Task\ModelTask', <add> [], <add> [&$this->Dispatcher] <add> ); <add> $this->Shell->Controller = $this->getMock( <add> 'Cake\Console\Command\Task\ControllerTask', <add> [], <add> [&$this->Dispatcher] <add> ); <add> $this->Shell->View = $this->getMock( <add> 'Cake\Console\Command\Task\ModelTask', <add> [], <add> [&$this->Dispatcher] <add> ); <add> $this->Shell->DbConfig = $this->getMock( <add> 'Cake\Console\Command\Task\DbConfigTask', <add> [], <add> [&$this->Dispatcher] <add> ); <ide> <ide> $this->Shell->DbConfig->expects($this->once()) <ide> ->method('getConfig') <ide> public function testAllWithModelName() { <ide> $this->Shell->View->expects($this->once()) <ide> ->method('execute'); <ide> <del> $this->Shell->expects($this->once())->method('_stop'); <add> $this->Shell->expects($this->once()) <add> ->method('_stop'); <add> <ide> $this->Shell->expects($this->at(0)) <ide> ->method('out') <ide> ->with('Bake All'); <ide> <del> $this->Shell->expects($this->at(5)) <add> $this->Shell->expects($this->at(4)) <ide> ->method('out') <ide> ->with('<success>Bake All complete</success>'); <ide> <ide> $this->Shell->connection = ''; <ide> $this->Shell->params = array(); <del> $this->Shell->args = array('User'); <add> $this->Shell->args = array('Comment'); <ide> $this->Shell->all(); <ide> <del> $this->assertEquals('User', $this->Shell->View->args[0]); <add> $this->assertEquals('Comment', $this->Shell->View->args[0]); <ide> } <ide> }
2
Text
Text
use valid json in translation example
bec7f67c4a2ab81db4962317f12e4ea3867f7e47
<ide><path>docs/guides/languages.md <ide> Video.js uses a JSON object to describe a language, where the keys are English a <ide> "Pause": "Pausa", <ide> "Current Time": "Tiempo reproducido", <ide> "Duration Time": "Duración total", <del> "Remaining Time": "Tiempo restante", <del> ... <add> "Remaining Time": "Tiempo restante" <ide> } <ide> ``` <ide>
1
Ruby
Ruby
fix linter offenses
dabbfe3e3e4d5a6cd2f66ed274a9ec8ef7999f25
<ide><path>Library/Homebrew/cask/cask.rb <ide> def outdated_versions(greedy = false) <ide> def outdated_info(greedy, verbose, json) <ide> if json <ide> { <del> name: token, <add> name: token, <ide> installed_versions: outdated_versions(greedy).join(", "), <del> current_version: version <add> current_version: version, <ide> } <ide> elsif verbose <ide> outdated_info = token << " (#{outdated_versions(greedy).join(", ")})" <ide><path>Library/Homebrew/test/cask/cmd/outdated_spec.rb <ide> it "lists outdated Casks in JSON format" do <ide> result = [ <ide> { <del> name: "local-caffeine", <add> name: "local-caffeine", <ide> installed_versions: "1.2.2", <del> current_version: "1.2.3" <add> current_version: "1.2.3", <ide> }, <ide> { <del> name: "local-transmission", <add> name: "local-transmission", <ide> installed_versions: "2.60", <del> current_version: "2.61" <del> } <add> current_version: "2.61", <add> }, <ide> ].to_json <ide> <ide> expect { <ide> it "ignores --quiet and lists outdated Casks in JSON format" do <ide> result = [ <ide> { <del> name: "local-caffeine", <add> name: "local-caffeine", <ide> installed_versions: "1.2.2", <del> current_version: "1.2.3" <add> current_version: "1.2.3", <ide> }, <ide> { <del> name: "local-transmission", <add> name: "local-transmission", <ide> installed_versions: "2.60", <del> current_version: "2.61" <del> } <add> current_version: "2.61", <add> }, <ide> ].to_json <ide> <ide> expect { <ide> it 'includes the Casks with "auto_updates true" or "version latest" in JSON format' do <ide> result = [ <ide> { <del> name: "auto-updates", <add> name: "auto-updates", <ide> installed_versions: "2.57", <del> current_version: "2.61" <add> current_version: "2.61", <ide> }, <ide> { <del> name: "local-caffeine", <add> name: "local-caffeine", <ide> installed_versions: "1.2.2", <del> current_version: "1.2.3" <add> current_version: "1.2.3", <ide> }, <ide> { <del> name: "local-transmission", <add> name: "local-transmission", <ide> installed_versions: "2.60", <del> current_version: "2.61" <add> current_version: "2.61", <ide> }, <ide> { <del> name: "version-latest-string", <add> name: "version-latest-string", <ide> installed_versions: "latest", <del> current_version: "latest" <del> } <add> current_version: "latest", <add> }, <ide> ].to_json <ide> <ide> expect { <ide> <ide> result = [ <ide> { <del> name: "local-caffeine", <add> name: "local-caffeine", <ide> installed_versions: "1.2.2", <del> current_version: "1.2.3" <add> current_version: "1.2.3", <ide> }, <ide> { <del> name: "local-transmission", <add> name: "local-transmission", <ide> installed_versions: "2.60", <del> current_version: "2.61" <add> current_version: "2.61", <ide> }, <ide> { <del> name: "version-latest-string", <add> name: "version-latest-string", <ide> installed_versions: "latest", <del> current_version: "latest" <del> } <add> current_version: "latest", <add> }, <ide> ].to_json <ide> <ide> expect {
2
Javascript
Javascript
use dynamic port in test-cluster-dgram-reuse
6b1819cff59c674d03f0afca80f33369b36b3926
<ide><path>test/parallel/test-cluster-dgram-reuse.js <ide> if (cluster.isMaster) { <ide> return; <ide> } <ide> <del>const sockets = []; <del>function next() { <del> sockets.push(this); <del> if (sockets.length !== 2) <del> return; <del> <del> // Work around health check issue <del> process.nextTick(() => { <del> for (let i = 0; i < sockets.length; i++) <del> sockets[i].close(close); <del> }); <del>} <del> <ide> let waiting = 2; <ide> function close() { <ide> if (--waiting === 0) <ide> cluster.worker.disconnect(); <ide> } <ide> <del>for (let i = 0; i < 2; i++) <del> dgram.createSocket({ type: 'udp4', reuseAddr: true }).bind(common.PORT, next); <add>const options = { type: 'udp4', reuseAddr: true }; <add>const socket1 = dgram.createSocket(options); <add>const socket2 = dgram.createSocket(options); <add> <add>socket1.bind(0, () => { <add> socket2.bind(socket1.address().port, () => { <add> // Work around health check issue <add> process.nextTick(() => { <add> socket1.close(close); <add> socket2.close(close); <add> }); <add> }); <add>});
1
Python
Python
fix comment typo in modelserializer
d2994e0596c3163ac970b29dad6a61485f938045
<ide><path>rest_framework/serializers.py <ide> def get_field_names(self, declared_fields, info): <ide> # Ensure that all declared fields have also been included in the <ide> # `Meta.fields` option. <ide> <del> # Do not require any fields that are declared a parent class, <add> # Do not require any fields that are declared in a parent class, <ide> # in order to allow serializer subclasses to only include <ide> # a subset of fields. <ide> required_field_names = set(declared_fields)
1
Text
Text
expand the advanced networking page to all options
ca9c35cdf772ddb121447b3877dbcf8caa4c5cdb
<ide><path>docs/sources/use/networking.md <del>page_title: Configure Networking <add>page_title: Network Configuration <ide> page_description: Docker networking <ide> page_keywords: network, networking, bridge, docker, documentation <ide> <del># Configure Networking <add># Network Configuration <ide> <del>## Introduction <add>## TL;DR <ide> <del>Docker uses Linux bridge capabilities to provide network connectivity to <del>containers. The `docker0` bridge interface is managed by Docker for this <del>purpose. When the Docker daemon starts it: <add>When Docker starts, it creates a virtual interface named `docker0` on <add>the host machine. It randomly chooses an address and subnet from the <add>private range defined by [RFC 1918](http://tools.ietf.org/html/rfc1918) <add>that are not in use on the host machine, and assigns it to `docker0`. <add>Docker made the choice `172.17.42.1/16` when I started it a few minutes <add>ago, for example — a 16-bit netmask providing 65,534 addresses for the <add>host machine and its containers. <ide> <del> - Creates the `docker0` bridge if not present <del> - Searches for an IP address range which doesn't overlap with an existing route <del> - Picks an IP in the selected range <del> - Assigns this IP to the `docker0` bridge <add>But `docker0` is no ordinary interface. It is a virtual *Ethernet <add>bridge* that automatically forwards packets between any other network <add>interfaces that are attached to it. This lets containers communicate <add>both with the host machine and with each other. Every time Docker <add>creates a container, it creates a pair of “peer” interfaces that are <add>like opposite ends of a pipe — a packet send on one will be received on <add>the other. It gives one of the peers to the container to become its <add>`eth0` interface and keeps the other peer, with a unique name like <add>`vethAQI2QT`, out in the namespace of the host machine. By binding <add>every `veth*` interface to the `docker0` bridge, Docker creates a <add>virtual subnet shared between the host machine and every Docker <add>container. <add> <add>The remaining sections of this document explain all of the ways that you <add>can use Docker options and — in advanced cases — raw Linux networking <add>commands to tweak, supplement, or entirely replace Docker’s default <add>networking configuration. <ide> <del><!-- --> <add>## Quick Guide to the Options <add> <add>Here is a quick list of the networking-related Docker command-line <add>options, in case it helps you find the section below that you are <add>looking for. <add> <add>Some networking command-line options can only be supplied to the Docker <add>server when it starts up, and cannot be changed once it is running: <add> <add> * `-b BRIDGE` or `--bridge=BRIDGE` — see <add> [Building your own bridge](#bridge-building) <add> <add> * `--bip=CIDR` — see <add> [Customizing docker0](#docker0) <add> <add> * `-H SOCKET...` or `--host=SOCKET...` — <add> This might sound like it would affect container networking, <add> but it actually faces in the other direction: <add> it tells the Docker server over what channels <add> it should be willing to receive commands <add> like “run container” and “stop container.” <add> To learn about the option, <add> read [Bind Docker to another host/port or a Unix socket](../basics/#bind-docker-to-another-hostport-or-a-unix-socket) <add> over in the Basics document. <add> <add> * `--icc=true|false` — see <add> [Communication between containers](#between-containers) <add> <add> * `--ip=IP_ADDRESS` — see <add> [Binding container ports](#binding-ports) <add> <add> * `--ip-forward=true|false` — see <add> [Communication between containers](#between-containers) <add> <add> * `--iptables=true|false` — see <add> [Communication between containers](#between-containers) <add> <add> * `--mtu=BYTES` — see <add> [Customizing docker0](#docker0) <add> <add>There are two networking options that can be supplied either at startup <add>or when `docker run` is invoked. When provided at startup, set the <add>default value that `docker run` will later use if the options are not <add>specified: <add> <add> * `--dns=IP_ADDRESS...` — see <add> [Configuring DNS](#dns) <add> <add> * `--dns-search=DOMAIN...` — see <add> [Configuring DNS](#dns) <add> <add>Finally, several networking options can only be provided when calling <add>`docker run` because they specify something specific to one container: <add> <add> * `-h HOSTNAME` or `--hostname=HOSTNAME` — see <add> [Configuring DNS](#dns) and <add> [How Docker networks a container](#container-networking) <add> <add> * `--link=CONTAINER_NAME:ALIAS` — see <add> [Configuring DNS](#dns) and <add> [Communication between containers](#between-containers) <add> <add> * `--net=bridge|none|container:NAME_or_ID|host` — see <add> [How Docker networks a container](#container-networking) <add> <add> * `-p SPEC` or `--publish=SPEC` — see <add> [Binding container ports](#binding-ports) <add> <add> * `-P` or `--publish-all=true|false` — see <add> [Binding container ports](#binding-ports) <add> <add>The following sections tackle all of the above topics in an order that <add>moves roughly from simplest to most complex. <add> <add>## <a name="dns"></a>Configuring DNS <add> <add>How can Docker supply each container with a hostname and DNS <add>configuration, without having to build a custom image with the hostname <add>written inside? Its trick is to overlay three crucial `/etc` files <add>inside the container with virtual files where it can write fresh <add>information. You can see this by running `mount` inside a container: <add> <add> $$ mount <add> ... <add> /dev/disk/by-uuid/1fec...ebdf on /etc/hostname type ext4 ... <add> /dev/disk/by-uuid/1fec...ebdf on /etc/hosts type ext4 ... <add> tmpfs on /etc/resolv.conf type tmpfs ... <add> ... <add> <add>This arrangement allows Docker to do clever things like keep <add>`resolv.conf` up to date across all containers when the host machine <add>receives new configuration over DHCP later. The exact details of how <add>Docker maintains these files inside the container can change from one <add>Docker version to the next, so you should leave the files themselves <add>alone and use the following Docker options instead. <add> <add>Four different options affect container domain name services. <add> <add> * `-h HOSTNAME` or `--hostname=HOSTNAME` — sets the hostname by which <add> the container knows itself. This is written into `/etc/hostname`, <add> into `/etc/hosts` as the name of the container’s host-facing IP <add> address, and is the name that `/bin/bash` inside the container will <add> display inside its prompt. But the hostname is not easy to see from <add> outside the container. It will not appear in `docker ps` nor in the <add> `/etc/hosts` file of any other container. <add> <add> * `--link=CONTAINER_NAME:ALIAS` — using this option as you `run` a <add> container gives the new container’s `/etc/hosts` an extra entry <add> named `ALIAS` that points to the IP address of the container named <add> `CONTAINER_NAME`. This lets processes inside the new container <add> connect to the hostname `ALIAS` without having to know its IP. The <add> `--link=` option is discussed in more detail below, in the section <add> [Communication between containers](#between-containers). <add> <add> * `--dns=IP_ADDRESS...` — sets the IP addresses added as `server` <add> lines to the container's `/etc/resolv.conf` file. Processes in the <add> container, when confronted with a hostname not in `/etc/hosts`, will <add> connect to these IP addresses on port 53 looking for name resolution <add> services. <add> <add> * `--dns-search=DOMAIN...` — sets the domain names that are searched <add> when a bare unqualified hostname is used inside of the container, by <add> writing `search` lines into the container’s `/etc/resolv.conf`. <add> When a container process attempts to access `host` and the search <add> domain `exmaple.com` is set, for instance, the DNS logic will not <add> only look up `host` but also `host.example.com`. <add> <add>Note that Docker, in the absence of either of the last two options <add>above, will make `/etc/resolv.conf` inside of each container look like <add>the `/etc/resolv.conf` of the host machine where the `docker` daemon is <add>running. The options then modify this default configuration. <add> <add>## <a name="between-containers"></a>Communication between containers <add> <add>Whether two containers can communicate is governed, at the operating <add>system level, by three factors. <add> <add>1. Does the network topology even connect the containers’ network <add> interfaces? By default Docker will attach all containers to a <add> single `docker0` bridge, providing a path for packets to travel <add> between them. See the later sections of this document for other <add> possible topologies. <add> <add>2. Is the host machine willing to forward IP packets? This is governed <add> by the `ip_forward` system parameter. Packets can only pass between <add> containers if this parameter is `1`. Usually you will simply leave <add> the Docker server at its default setting `--ip-forward=true` and <add> Docker will go set `ip_forward` to `1` for you when the server <add> starts up. To check the setting or turn it on manually: <add> <add> # Usually not necessary: turning on forwarding, <add> # on the host where your Docker server is running <add> <add> $ cat /proc/sys/net/ipv4/ip_forward <add> 0 <add> $ sudo echo 1 > /proc/sys/net/ipv4/ip_forward <add> $ cat /proc/sys/net/ipv4/ip_forward <add> 1 <add> <add>3. Do your `iptables` allow this particular connection to be made? <add> Docker will never make changes to your system `iptables` rules if <add> you set `--iptables=false` when the daemon starts. Otherwise the <add> Docker server will add a default rule to the `FORWARD` chain with a <add> blanket `ACCEPT` policy if you retain the default `--icc=true`, or <add> else will set the policy to `DROP` if `--icc=false`. <add> <add>Nearly everyone using Docker will want `ip_forward` to be on, to at <add>least make communication *possible* between containers. But it is a <add>strategic question whether to leave `--icc=true` or change it to <add>`--icc=false` (on Ubuntu, by editing the `DOCKER_OPTS` variable in <add>`/etc/default/docker` and restarting the Docker server) so that <add>`iptables` will protect other containers — and the main host — from <add>having arbitrary ports probed or accessed by a container that gets <add>compromised. <add> <add>If you choose the most secure setting of `--icc=false`, then how can <add>containers communicate in those cases where you *want* them to provide <add>each other services? <add> <add>The answer is the `--link=CONTAINER_NAME:ALIAS` option, which was <add>mentioned in the previous section because of its effect upon name <add>services. If the Docker daemon is running with both `--icc=false` and <add>`--iptables=true` then, when it sees `docker run` invoked with the <add>`--link=` option, the Docker server will insert a pair of `iptables` <add>`ACCEPT` rules so that the new container can connect to the ports <add>exposed by the other container — the ports that it mentioned in the <add>`EXPOSE` lines of its `Dockerfile`. Docker has more documentation on <add>this subject — see the [Link Containers](working_with_links_names.md) <add>page for further details. <add> <add>> **Note**: <add>> The value `CONTAINER_NAME` in `--link=` must either be an <add>> auto-assigned Docker name like `stupefied_pare` or else the name you <add>> assigned with `--name=` when you ran `docker run`. It cannot be a <add>> hostname, which Docker will not recognize in the context of the <add>> `--link=` option. <add> <add>You can run the `iptables` command on your Docker host to see whether <add>the `FORWARD` chain has a default policy of `ACCEPT` or `DROP`: <add> <add> # When --icc=false, you should see a DROP rule: <add> <add> $ sudo iptables -L -n <add> ... <add> Chain FORWARD (policy ACCEPT) <add> target prot opt source destination <add> DROP all -- 0.0.0.0/0 0.0.0.0/0 <add> ... <add> <add> # When a --link= has been created under --icc=false, <add> # you should see port-specific ACCEPT rules overriding <add> # the subsequent DROP policy for all other packets: <add> <add> $ sudo iptables -L -n <add> ... <add> Chain FORWARD (policy ACCEPT) <add> target prot opt source destination <add> ACCEPT tcp -- 172.17.0.2 172.17.0.3 tcp spt:80 <add> ACCEPT tcp -- 172.17.0.3 172.17.0.2 tcp dpt:80 <add> DROP all -- 0.0.0.0/0 0.0.0.0/0 <add> <add>> **Note**: <add>> Docker is careful that its host-wide `iptables` rules fully expose <add>> containers to each other’s raw IP addresses, so connections from one <add>> container to another should always appear to be originating from the <add>> first container’s own IP address. <add> <add>## <a name="binding-ports"></a>Binding container ports to the host <add> <add>By default Docker containers can make connections to the outside world, <add>but the outside world cannot connect to containers. Each outgoing <add>connection will appear to originate from one of the host machine’s own <add>IP addresses thanks to an `iptables` masquerading rule on the host <add>machine that the Docker server creates when it starts: <add> <add> # You can see that the Docker server creates a <add> # masquerade rule that let containers connect <add> # to IP addresses in the outside world: <add> <add> $ sudo iptables -t nat -L -n <add> ... <add> Chain POSTROUTING (policy ACCEPT) <add> target prot opt source destination <add> MASQUERADE all -- 172.17.0.0/16 !172.17.0.0/16 <add> ... <add> <add>But if you want containers to accept incoming connections, you will need <add>to provide special options when invoking `docker run`. These options <add>are covered in more detail on the [Redirect Ports](port_redirection.md) <add>page. There are two approaches. <add> <add>First, you can supply `-P` or `--publish-all=true|false` to `docker run` <add>which is a blanket operation that identifies every port with an `EXPOSE` <add>line in the image’s `Dockerfile` and maps it to a host port somewhere in <add>the range 49000–49900. This tends to be a bit inconvenient, since you <add>then have to run other `docker` sub-commands to learn which external <add>port a given service was mapped to. <add> <add>More convenient is the `-p SPEC` or `--publish=SPEC` option which lets <add>you be explicit about exactly which external port on the Docker server — <add>which can be any port at all, not just those in the 49000–49900 block — <add>you want mapped to which port in the container. <add> <add>Either way, you should be able to peek at what Docker has accomplished <add>in your network stack by examining your NAT tables. <add> <add> # What your NAT rules might look like when Docker <add> # is finished setting up a -P forward: <add> <add> $ iptables -t nat -L -n <add> ... <add> Chain DOCKER (2 references) <add> target prot opt source destination <add> DNAT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:49153 to:172.17.0.2:80 <add> <add> # What your NAT rules might look like when Docker <add> # is finished setting up a -p 80:80 forward: <add> <add> Chain DOCKER (2 references) <add> target prot opt source destination <add> DNAT tcp -- 0.0.0.0/0 0.0.0.0/0 tcp dpt:80 to:172.17.0.2:80 <add> <add>You can see that Docker has exposed these container ports on `0.0.0.0`, <add>the wildcard IP address that will match any possible incoming port on <add>the host machine. If you want to be more restrictive and only allow <add>container services to be contacted through a specific external interface <add>on the host machine, you have two choices. When you invoke `docker run` <add>you can use either `-p IP:host_port:container_port` or `-p IP::port` to <add>specify the external interface for one particular binding. <add> <add>Or if you always want Docker port forwards to bind to one specific IP <add>address, you can edit your system-wide Docker server settings (on <add>Ubuntu, by editing `DOCKER_OPTS` in `/etc/default/docker`) and add the <add>option `--ip=IP_ADDRESS`. Remember to restart your Docker server after <add>editing this setting. <add> <add>Again, this topic is covered without all of these low-level networking <add>details in the [Redirect Ports](port_redirection.md) document if you <add>would like to use that as your port redirection reference instead. <add> <add>## <a name="docker0"></a>Customizing docker0 <add> <add>By default, the Docker server creates and configures the host system’s <add>`docker0` interface as an *Ethernet bridge* inside the Linux kernel that <add>can pass packets back and forth between other physical or virtual <add>network interfaces so that they behave as a single Ethernet network. <add> <add>Docker configures `docker0` with an IP address and netmask so the host <add>machine can both receive and send packets to containers connected to the <add>bridge, and gives it an MTU — the *maximum transmission unit* or largest <add>packet length that the interface will allow — of either 1,500 bytes or <add>else a more specific value copied from the Docker host’s interface that <add>supports its default route. Both are configurable at server startup: <add> <add> * `--bip=CIDR` — supply a specific IP address and netmask for the <add> `docker0` bridge, using standard CIDR notation like <add> `192.168.1.5/24`. <add> <add> * `--mtu=BYTES` — override the maximum packet length on `docker0`. <add> <add>On Ubuntu you would add these to the `DOCKER_OPTS` setting in <add>`/etc/default/docker` on your Docker host and restarting the Docker <add>service. <add> <add>Once you have one or more containers up and running, you can confirm <add>that Docker has properly connected them to the `docker0` bridge by <add>running the `brctl` command on the host machine and looking at the <add>`interfaces` column of the output. Here is a host with two different <add>containers connected: <ide> <del> # List host bridges <del> $ sudo brctl show <del> bridge name bridge id STP enabled interfaces <del> docker0 8000.000000000000 no <del> <del> # Show docker0 IP address <del> $ sudo ifconfig docker0 <del> docker0 Link encap:Ethernet HWaddr xx:xx:xx:xx:xx:xx <del> inet addr:172.17.42.1 Bcast:0.0.0.0 Mask:255.255.0.0 <del> <del>At runtime, a [*specific kind of virtual interface*](#vethxxxx-device) <del>is given to each container which is then bonded to the `docker0` bridge. <del>Each container also receives a dedicated IP address from the same range <del>as `docker0`. The `docker0` IP address is used as the default gateway <del>for the container. <del> <del> # Run a container <del> $ sudo docker run -t -i -d base /bin/bash <del> 52f811c5d3d69edddefc75aff5a4525fc8ba8bcfa1818132f9dc7d4f7c7e78b4 <add> # Display bridge info <ide> <ide> $ sudo brctl show <del> bridge name bridge id STP enabled interfaces <del> docker0 8000.fef213db5a66 no vethQCDY1N <del> <del>Above, `docker0` acts as a bridge for the `vethQCDY1N` interface which <del>is dedicated to the `52f811c5d3d6` container. <del> <del>## How to use a specific IP address range <del> <del>Docker will try hard to find an IP range that is not used by the host. <del>Even though it works for most cases, it's not bullet-proof and sometimes <del>you need to have more control over the IP addressing scheme. <del> <del>For this purpose, Docker allows you to manage the `docker0` bridge or <del>your own one using the `-b=<bridgename>` parameter. <del> <del>In this scenario: <add> bridge name bridge id STP enabled interfaces <add> docker0 8000.3a1d7362b4ee no veth65f9 <add> vethdda6 <ide> <del> - Ensure Docker is stopped <del> - Create your own bridge (`bridge0` for example) <del> - Assign a specific IP to this bridge <del> - Start Docker with the `-b=bridge0` parameter <add>If the `brctl` command is not installed on your Docker host, then on <add>Ubuntu you should be able to run `sudo apt-get install bridge-utils` to <add>install it. <ide> <del><!-- --> <add>Finally, the `docker0` Ethernet bridge settings are used every time you <add>create a new container. Docker selects a free IP address from the range <add>available on the bridge each time you `docker run` a new container, and <add>configures the container’s `eth0` interface with that IP address and the <add>bridge’s netmask. The Docker host’s own IP address on the bridge is <add>used as the default gateway by which each container reaches the rest of <add>the Internet. <ide> <del> # Stop Docker <del> $ sudo service docker stop <del> <del> # Clean docker0 bridge and <del> # add your very own bridge0 <del> $ sudo ifconfig docker0 down <del> $ sudo brctl addbr bridge0 <del> $ sudo ifconfig bridge0 192.168.227.1 netmask 255.255.255.0 <add> # The network, as seen from a container <ide> <del> # Edit your Docker startup file <del> $ echo "DOCKER_OPTS=\"-b=bridge0\"" >> /etc/default/docker <del> <del> # Start Docker <del> $ sudo service docker start <add> $ sudo docker run -i -t --rm base /bin/bash <ide> <del> # Ensure bridge0 IP is not changed by Docker <del> $ sudo ifconfig bridge0 <del> bridge0 Link encap:Ethernet HWaddr xx:xx:xx:xx:xx:xx <del> inet addr:192.168.227.1 Bcast:192.168.227.255 Mask:255.255.255.0 <add> $$ ip addr show eth0 <add> 24: eth0: <BROADCAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP group default qlen 1000 <add> link/ether 32:6f:e0:35:57:91 brd ff:ff:ff:ff:ff:ff <add> inet 172.17.0.3/16 scope global eth0 <add> valid_lft forever preferred_lft forever <add> inet6 fe80::306f:e0ff:fe35:5791/64 scope link <add> valid_lft forever preferred_lft forever <ide> <del> # Run a container <del> docker run -i -t base /bin/bash <add> $$ ip route <add> default via 172.17.42.1 dev eth0 <add> 172.17.0.0/16 dev eth0 proto kernel scope link src 172.17.0.3 <ide> <del> # Container IP in the 192.168.227/24 range <del> root@261c272cd7d5:/# ifconfig eth0 <del> eth0 Link encap:Ethernet HWaddr xx:xx:xx:xx:xx:xx <del> inet addr:192.168.227.5 Bcast:192.168.227.255 Mask:255.255.255.0 <add> $$ exit <ide> <del> # bridge0 IP as the default gateway <del> root@261c272cd7d5:/# route -n <del> Kernel IP routing table <del> Destination Gateway Genmask Flags Metric Ref Use Iface <del> 0.0.0.0 192.168.227.1 0.0.0.0 UG 0 0 0 eth0 <del> 192.168.227.0 0.0.0.0 255.255.255.0 U 0 0 0 eth0 <add>Remember that the Docker host will not be willing to forward container <add>packets out on to the Internet unless its `ip_forward` system setting is <add>`1` — see the section above on [Communication between <add>containers](#between-containers) for details. <ide> <del> # hits CTRL+P then CTRL+Q to detach <add>## <a name="bridge-building"></a>Building your own bridge <ide> <del> # Display bridge info <del> $ sudo brctl show <del> bridge name bridge id STP enabled interfaces <del> bridge0 8000.fe7c2e0faebd no vethAQI2QT <add>If you want to take Docker out of the business of creating its own <add>Ethernet bridge entirely, you can set up your own bridge before starting <add>Docker and use `-b BRIDGE` or `--bridge=BRIDGE` to tell Docker to use <add>your bridge instead. If you already have Docker up and running with its <add>old `bridge0` still configured, you will probably want to begin by <add>stopping the service and removing the interface: <ide> <del>## Container intercommunication <add> # Stopping Docker and removing docker0 <ide> <del>The value of the Docker daemon's `icc` parameter determines whether <del>containers can communicate with each other over the bridge network. <add> $ sudo service docker stop <add> $ sudo ip link set dev docker0 down <add> $ sudo brctl delbr docker0 <ide> <del> - The default, `-icc=true` allows containers to communicate with each other. <del> - `-icc=false` means containers are isolated from each other. <add>Then, before starting the Docker service, create your own bridge and <add>give it whatever configuration you want. Here we will create a simple <add>enough bridge that we really could just have used the options in the <add>previous section to customize `docker0`, but it will be enough to <add>illustrate the technique. <ide> <del>Docker uses `iptables` under the hood to either accept or drop <del>communication between containers. <add> # Create our own bridge <ide> <del>## What is the vethXXXX device? <add> $ sudo brctl addbr bridge0 <add> $ sudo ip addr add 192.168.5.1/24 dev bridge0 <add> $ sudo ip link set dev bridge0 up <ide> <del>Well. Things get complicated here. <add> # Confirming that our bridge is up and running <ide> <del>The `vethXXXX` interface is the host side of a point-to-point link <del>between the host and the corresponding container; the other side of the <del>link is the container's `eth0` interface. This pair (host `vethXXX` and <del>container `eth0`) are connected like a tube. Everything that comes in <del>one side will come out the other side. <add> $ ip addr show bridge0 <add> 4: bridge0: <BROADCAST,MULTICAST> mtu 1500 qdisc noop state UP group default <add> link/ether 66:38:d0:0d:76:18 brd ff:ff:ff:ff:ff:ff <add> inet 192.168.5.1/24 scope global bridge0 <add> valid_lft forever preferred_lft forever <ide> <del>All the plumbing is delegated to Linux network capabilities (check the <del>`ip link` command) and the namespaces infrastructure. <add> # Tell Docker about it and restart (on Ubuntu) <ide> <del>## I want more <add> $ echo 'DOCKER_OPTS="-b=bridge0"' >> /etc/default/docker <add> $ sudo service docker start <ide> <del>Jérôme Petazzoni has create `pipework` to connect together containers in <del>arbitrarily complex scenarios: <del>[https://github.com/jpetazzo/pipework](https://github.com/jpetazzo/pipework) <add>The result should be that the Docker server starts successfully and is <add>now prepared to bind containers to the new bridge. After pausing to <add>verify the bridge’s configuration, try creating a container — you will <add>see that its IP address is in your new IP address range, which Docker <add>will have auto-detected. <add> <add>Just as we learned in the previous section, you can use the `brctl show` <add>command to see Docker add and remove interfaces from the bridge as you <add>start and stop containers, and can run `ip addr` and `ip route` inside a <add>container to see that it has been given an address in the bridge’s IP <add>address range and has been told to use the Docker host’s IP address on <add>the bridge as its default gateway to the rest of the Internet. <add> <add>## <a name="container-networking"></a>How Docker networks a container <add> <add>While Docker is under active development and continues to tweak and <add>improve its network configuration logic, the shell commands in this <add>section are rough equivalents to the steps that Docker takes when <add>configuring networking for each new container. <add> <add>Let’s review a few basics. <add> <add>To communicate using the Internet Protocol (IP), a machine needs access <add>to at least one network interface at which packets can be sent and <add>received, and a routing table that defines the range of IP addresses <add>reachable through that interface. Network interfaces do not have to be <add>physical devices. In fact, the `lo` loopback interface available on <add>every Linux machine (and inside each Docker container) is entirely <add>virtual — the Linux kernel simply copies loopback packets directly from <add>the sender’s memory into the receiver’s memory. <add> <add>Docker uses special virtual interfaces to let containers communicate <add>with the host machine — pairs of virtual interfaces called “peers” that <add>are linked inside of the host machine’s kernel so that packets can <add>travel between them. They are simple to create, as we will see in a <add>moment. <add> <add>The steps with which Docker configures a container are: <add> <add>1. Create a pair of peer virtual interfaces. <add> <add>2. Give one of them a unique name like `veth65f9`, keep it inside of <add> the main Docker host, and bind it to `docker0` or whatever bridge <add> Docker is supposed to be using. <add> <add>3. Toss the other interface over the wall into the new container (which <add> will already have been provided with an `lo` interface) and rename <add> it to the much prettier name `eth0` since, inside of the container’s <add> separate and unique network interface namespace, there are no <add> physical interfaces with which this name could collide. <add> <add>4. Give the container’s `eth0` a new IP address from within the <add> bridge’s range of network addresses, and set its default route to <add> the IP address that the Docker host owns on the bridge. <add> <add>With these steps complete, the container now possesses an `eth0` <add>(virtual) network card and will find itself able to communicate with <add>other containers and the rest of the Internet. <add> <add>You can opt out of the above process for a particular container by <add>giving the `--net=` option to `docker run`, which takes four possible <add>values. <add> <add> * `--net=bridge` — The default action, that connects the container to <add> the Docker bridge as described above. <add> <add> * `--net=host` — Tells Docker to skip placing the container inside of <add> a separate network stack. In essence, this choice tells Docker to <add> **not containerize the container’s networking**! While container <add> processes will still be confined to their own filesystem and process <add> list and resource limits, a quick `ip addr` command will show you <add> that, network-wise, they live “outside” in the main Docker host and <add> have full access to its network interfaces. Note that this does <add> **not** let the container reconfigure the host network stack — that <add> would require `--privileged=true` — but it does let container <add> processes open low-numbered ports like any other root process. <add> <add> * `--net=container:NAME_or_ID` — Tells Docker to put this container’s <add> processes inside of the network stack that has already been created <add> inside of another container. The new container’s processes will be <add> confined to their own filesystem and process list and resource <add> limits, but will share the same IP address and port numbers as the <add> first container, and processes on the two containers will be able to <add> connect to each other over the loopback interface. <add> <add> * `--net=none` — Tells Docker to put the container inside of its own <add> network stack but not to take any steps to configure its network, <add> leaving you free to build any of the custom configurations explored <add> in the last few sections of this document. <add> <add>To get an idea of the steps that are necessary if you use `--net=none` <add>as described in that last bullet point, here are the commands that you <add>would run to reach roughly the same configuration as if you had let <add>Docker do all of the configuration: <add> <add> # At one shell, start a container and <add> # leave its shell idle and running <add> <add> $ sudo docker run -i -t --rm --net=none base /bin/bash <add> root@63f36fc01b5f:/# <add> <add> # At another shell, learn the container process ID <add> # and create its namespace entry in /var/run/netns/ <add> # for the "ip netns" command we will be using below <add> <add> $ sudo docker inspect -f '{{.State.Pid}}' 63f36fc01b5f <add> 2778 <add> $ pid=2778 <add> $ sudo mkdir -p /var/run/netns <add> $ sudo ln -s /proc/$pid/ns/net /var/run/netns/$pid <add> <add> # Check the bridge’s IP address and netmask <add> <add> $ ip addr show docker0 <add> 21: docker0: ... <add> inet 172.17.42.1/16 scope global docker0 <add> ... <add> <add> # Create a pair of "peer" interfaces A and B, <add> # bind the A end to the bridge, and bring it up <add> <add> $ sudo ip link add A type veth peer name B <add> $ sudo brctl addif docker0 A <add> $ sudo ip link set A up <add> <add> # Place B inside the container's network namespace, <add> # rename to eth0, and activate it with a free IP <add> <add> $ sudo ip link set B netns $pid <add> $ sudo ip netns exec $pid ip link set dev B name eth0 <add> $ sudo ip netns exec $pid ip link set eth0 up <add> $ sudo ip netns exec $pid ip addr add 172.17.42.99/16 dev eth0 <add> $ sudo ip netns exec $pid ip route add default via 172.17.42.1 <add> <add>At this point your container should be able to perform networking <add>operations as usual. <add> <add>When you finally exit the shell and Docker cleans up the container, the <add>network namespace is destroyed along with our virtual `eth0` — whose <add>destruction in turn destroys interface `A` out in the Docker host and <add>automatically un-registers it from the `docker0` bridge. So everything <add>gets cleaned up without our having to run any extra commands! Well, <add>almost everything: <add> <add> # Clean up dangling symlinks in /var/run/netns <add> <add> find -L /var/run/netns -type l -delete <add> <add>Also note that while the script above used modern `ip` command instead <add>of old deprecated wrappers like `ipconfig` and `route`, these older <add>commands would also have worked inside of our container. The `ip addr` <add>command can be typed as `ip a` if you are in a hurry. <add> <add>Finally, note the importance of the `ip netns exec` command, which let <add>us reach inside and configure a network namespace as root. The same <add>commands would not have worked if run inside of the container, because <add>part of safe containerization is that Docker strips container processes <add>of the right to configure their own networks. Using `ip netns exec` is <add>what let us finish up the configuration without having to take the <add>dangerous step of running the container itself with `--privileged=true`. <add> <add>## Tools and Examples <add> <add>Before diving into the following sections on custom network topologies, <add>you might be interested in glancing at a few external tools or examples <add>of the same kinds of configuration. Here are two: <add> <add> * Jérôme Petazzoni has create a `pipework` shell script to help you <add> connect together containers in arbitrarily complex scenarios: <add> <https://github.com/jpetazzo/pipework> <add> <add> * Brandon Rhodes has created a whole network topology of Docker <add> containers for the next edition of Foundations of Python Network <add> Programming that includes routing, NAT’d firewalls, and servers that <add> offer HTTP, SMTP, POP, IMAP, Telnet, SSH, and FTP: <add> <https://github.com/brandon-rhodes/fopnp/tree/m/playground> <add> <add>Both tools use networking commands very much like the ones you saw in <add>the previous section, and will see in the following sections. <add> <add>## <a name="point-to-point"></a>Building a point-to-point connection <add> <add>By default, Docker attaches all containers to the virtual subnet <add>implemented by `docker0`. You can create containers that are each <add>connected to some different virtual subnet by creating your own bridge <add>as shown in [Building your own bridge](#bridge-building), starting each <add>container with `docker run --net=none`, and then attaching the <add>containers to your bridge with the shell commands shown in [How Docker <add>networks a container](#container-networking). <add> <add>But sometimes you want two particular containers to be able to <add>communicate directly without the added complexity of both being bound to <add>a host-wide Ethernet bridge. <add> <add>The solution is simple: when you create your pair of peer interfaces, <add>simply throw *both* of them into containers, and configure them as <add>classic point-to-point links. The two containers will then be able to <add>communicate directly (provided you manage to tell each container the <add>other’s IP address, of course). You might adjust the instructions of <add>the previous section to go something like this: <add> <add> # Start up two containers in two terminal windows <add> <add> $ sudo docker run -i -t --rm --net=none base /bin/bash <add> root@1f1f4c1f931a:/# <add> <add> $ sudo docker run -i -t --rm --net=none base /bin/bash <add> root@12e343489d2f:/# <add> <add> # Learn the container process IDs <add> # and create their namespace entries <add> <add> $ sudo docker inspect -f '{{.State.Pid}}' 1f1f4c1f931a <add> 2989 <add> $ sudo docker inspect -f '{{.State.Pid}}' 12e343489d2f <add> 3004 <add> $ sudo mkdir -p /var/run/netns <add> $ sudo ln -s /proc/2989/ns/net /var/run/netns/2989 <add> $ sudo ln -s /proc/3004/ns/net /var/run/netns/3004 <add> <add> # Create the "peer" interfaces and hand them out <add> <add> $ sudo ip link add A type veth peer name B <add> <add> $ sudo ip link set A netns 2989 <add> $ sudo ip netns exec 2989 ip addr add 10.1.1.1/32 dev A <add> $ sudo ip netns exec 2989 ip link set A up <add> $ sudo ip netns exec 2989 ip route add 10.1.1.2/32 dev A <add> <add> $ sudo ip link set B netns 3004 <add> $ sudo ip netns exec 3004 ip addr add 10.1.1.2/32 dev B <add> $ sudo ip netns exec 3004 ip link set B up <add> $ sudo ip netns exec 3004 ip route add 10.1.1.1/32 dev B <add> <add>The two containers should now be able to ping each other and make <add>connections sucessfully. Point-to-point links like this do not depend <add>on a subnet nor a netmask, but on the bare assertion made by `ip route` <add>that some other single IP address is connected to a particular network <add>interface. <add> <add>Note that point-to-point links can be safely combined with other kinds <add>of network connectivity — there is no need to start the containers with <add>`--net=none` if you want point-to-point links to be an addition to the <add>container’s normal networking instead of a replacement. <add> <add>A final permutation of this pattern is to create the point-to-point link <add>between the Docker host and one container, which would allow the host to <add>communicate with that one container on some single IP address and thus <add>communicate “out-of-band” of the bridge that connects the other, more <add>usual containers. But unless you have very specific networking needs <add>that drive you to such a solution, it is probably far preferable to use <add>`--icc=false` to lock down inter-container communication, as we explored <add>earlier.
1
Go
Go
fix root detection
0484b2c3254238a6c534f7d417c12c10b694f0d0
<ide><path>utils.go <ide> func RootIsShared() bool { <ide> if data, err := ioutil.ReadFile("/proc/self/mountinfo"); err == nil { <ide> for _, line := range strings.Split(string(data), "\n") { <ide> cols := strings.Split(line, " ") <del> if len(cols) >= 6 && cols[3] == "/" && cols[4] == "/" { <add> if len(cols) >= 6 && cols[4] == "/" { <ide> return strings.HasPrefix(cols[6], "shared") <ide> } <ide> }
1
Java
Java
improve error handling when response is committed
dd0d270ba26e163d41fd2b06e1ea13afe1ca6aac
<ide><path>spring-web/src/main/java/org/springframework/web/server/adapter/HttpWebHandlerAdapter.java <ide> public class HttpWebHandlerAdapter extends WebHandlerDecorator implements HttpHa <ide> * <p>TODO: <ide> * This definition is currently duplicated between HttpWebHandlerAdapter <ide> * and AbstractSockJsSession. It is a candidate for a common utility class. <del> * @see #indicatesDisconnectedClient(Throwable) <add> * @see #isDisconnectedClientError(Throwable) <ide> */ <ide> private static final Set<String> DISCONNECTED_CLIENT_EXCEPTIONS = <ide> new HashSet<>(Arrays.asList("ClientAbortException", "EOFException", "EofException")); <ide> public Mono<Void> handle(ServerHttpRequest request, ServerHttpResponse response) <ide> ServerWebExchange exchange = createExchange(request, response); <ide> return getDelegate().handle(exchange) <ide> .onErrorResume(ex -> { <del> response.setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR); <del> logHandleFailure(ex); <add> handleFailure(response, ex); <ide> return Mono.empty(); <ide> }) <ide> .then(Mono.defer(response::setComplete)); <ide> protected ServerWebExchange createExchange(ServerHttpRequest request, ServerHttp <ide> getCodecConfigurer(), getLocaleContextResolver()); <ide> } <ide> <del> private void logHandleFailure(Throwable ex) { <del> if (indicatesDisconnectedClient(ex)) { <add> private void handleFailure(ServerHttpResponse response, Throwable ex) { <add> boolean statusCodeChanged = response.setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR); <add> if (isDisconnectedClientError(ex)) { <ide> if (disconnectedClientLogger.isTraceEnabled()) { <ide> disconnectedClientLogger.trace("Looks like the client has gone away", ex); <ide> } <ide> else if (disconnectedClientLogger.isDebugEnabled()) { <ide> "' to TRACE level.)"); <ide> } <ide> } <add> else if (!statusCodeChanged) { <add> logger.error("Unhandled failure: " + ex.getMessage() + ", " + <add> "response already committed with status=" + response.getStatusCode()); <add> } <ide> else { <ide> logger.error("Failed to handle request", ex); <ide> } <ide> } <ide> <del> private boolean indicatesDisconnectedClient(Throwable ex) { <add> private boolean isDisconnectedClientError(Throwable ex) { <ide> String message = NestedExceptionUtils.getMostSpecificCause(ex).getMessage(); <ide> message = (message != null ? message.toLowerCase() : ""); <ide> String className = ex.getClass().getSimpleName(); <ide><path>spring-web/src/main/java/org/springframework/web/server/handler/ResponseStatusExceptionHandler.java <ide> import org.apache.commons.logging.LogFactory; <ide> import reactor.core.publisher.Mono; <ide> <add>import org.springframework.http.HttpStatus; <ide> import org.springframework.web.server.ResponseStatusException; <ide> import org.springframework.web.server.ServerWebExchange; <ide> import org.springframework.web.server.WebExceptionHandler; <ide> public class ResponseStatusExceptionHandler implements WebExceptionHandler { <ide> @Override <ide> public Mono<Void> handle(ServerWebExchange exchange, Throwable ex) { <ide> if (ex instanceof ResponseStatusException) { <del> // Response may be committed but we'll try.. <del> logger.debug(ex.getMessage()); <del> exchange.getResponse().setStatusCode(((ResponseStatusException) ex).getStatus()); <del> return exchange.getResponse().setComplete(); <add> HttpStatus status = ((ResponseStatusException) ex).getStatus(); <add> if (exchange.getResponse().setStatusCode(status)) { <add> logger.trace(ex.getMessage()); <add> return exchange.getResponse().setComplete(); <add> } <ide> } <ide> return Mono.error(ex); <ide> } <ide><path>spring-web/src/test/java/org/springframework/web/server/handler/ResponseStatusExceptionHandlerTests.java <ide> public void unresolvedException() throws Exception { <ide> public void responseCommitted() throws Exception { <ide> Throwable ex = new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Oops"); <ide> this.exchange.getResponse().setStatusCode(HttpStatus.CREATED); <del> this.exchange.getResponse().setComplete() <del> .then(this.handler.handle(this.exchange, ex)) <del> .block(Duration.ofSeconds(5)); <del> assertEquals(HttpStatus.CREATED, this.exchange.getResponse().getStatusCode()); <add> Mono<Void> mono = this.exchange.getResponse().setComplete() <add> .then(this.handler.handle(this.exchange, ex)); <add> StepVerifier.create(mono).consumeErrorWith(actual -> assertSame(ex, actual)).verify(); <ide> } <ide> <ide> }
3
Mixed
Python
add trim_whitespace to charfield
7f801b9a01fa7df3b081ddec803bd0d34cc3b35b
<ide><path>docs/api-guide/fields.md <ide> A text representation. Optionally validates the text to be shorter than `max_len <ide> <ide> Corresponds to `django.db.models.fields.CharField` or `django.db.models.fields.TextField`. <ide> <del>**Signature:** `CharField(max_length=None, min_length=None, allow_blank=False)` <add>**Signature:** `CharField(max_length=None, min_length=None, allow_blank=False, trim_whitespace=True)` <ide> <ide> - `max_length` - Validates that the input contains no more than this number of characters. <ide> - `min_length` - Validates that the input contains no fewer than this number of characters. <ide> - `allow_blank` - If set to `True` then the empty string should be considered a valid value. If set to `False` then the empty string is considered invalid and will raise a validation error. Defaults to `False`. <add>- `trim_whitespace` - If set to `True` then leading and trailing whitespace is trimmed. Defaults to `True`. <ide> <ide> The `allow_null` option is also available for string fields, although its usage is discouraged in favor of `allow_blank`. It is valid to set both `allow_blank=True` and `allow_null=True`, but doing so means that there will be two differing types of empty value permissible for string representations, which can lead to data inconsistencies and subtle application bugs. <ide> <ide> As an example, let's create a field that can be used represent the class name of <ide> # We pass the object instance onto `to_representation`, <ide> # not just the field attribute. <ide> return obj <del> <add> <ide> def to_representation(self, obj): <ide> """ <ide> Serialize the object's class name. <ide><path>rest_framework/fields.py <ide> class CharField(Field): <ide> <ide> def __init__(self, **kwargs): <ide> self.allow_blank = kwargs.pop('allow_blank', False) <add> self.trim_whitespace = kwargs.pop('trim_whitespace', True) <ide> max_length = kwargs.pop('max_length', None) <ide> min_length = kwargs.pop('min_length', None) <ide> super(CharField, self).__init__(**kwargs) <ide> def run_validation(self, data=empty): <ide> return super(CharField, self).run_validation(data) <ide> <ide> def to_internal_value(self, data): <del> return six.text_type(data) <add> value = six.text_type(data) <add> <add> if self.trim_whitespace: <add> return value.strip() <add> <add> return value <ide> <ide> def to_representation(self, value): <del> return six.text_type(value) <add> representation = six.text_type(value) <add> <add> if self.trim_whitespace: <add> return representation.strip() <add> <add> return representation <ide> <ide> <ide> class EmailField(CharField): <ide><path>tests/test_fields.py <ide> class TestCharField(FieldValues): <ide> } <ide> field = serializers.CharField() <ide> <add> def test_trim_whitespace_default(self): <add> field = serializers.CharField() <add> assert field.to_representation(' abc ') == 'abc' <add> <add> def test_trim_whitespace_disabled(self): <add> field = serializers.CharField(trim_whitespace=False) <add> assert field.to_representation(' abc ') == ' abc ' <add> <ide> <ide> class TestEmailField(FieldValues): <ide> """
3
Javascript
Javascript
add new limit for size
f9efe05d4b2a016e96b48596989ff4d8cbf1095a
<ide><path>test/integration/size-limit/test/index.test.js <ide> describe('Production response size', () => { <ide> console.log(`Response Sizes:\n${responseSizes.map(obj => ` ${obj.url}: ${obj.bytes} (bytes)`).join('\n')} \nOverall: ${responseSizeKilobytes} KB`) <ide> <ide> // These numbers are without gzip compression! <del> expect(responseSizeKilobytes).toBeLessThanOrEqual(212) // Kilobytes <add> expect(responseSizeKilobytes).toBeLessThanOrEqual(216) // Kilobytes <ide> }) <ide> })
1
Javascript
Javascript
add the function setmaterialindex to geometryutils
a81dfc9c99a54cbbb0cdb8bbfbfac2f28725879b
<ide><path>src/extras/GeometryUtils.js <ide> THREE.GeometryUtils = { <ide> geometry.faces = faces; <ide> geometry.faceVertexUvs = faceVertexUvs; <ide> <del> } <add> }, <add> <add> setMaterialIndex: function ( geometry, index, startFace, endFace ){ <add> <add> var faces = geometry.faces; <add> var start = startFace || 0; <add> var end = endFace || faces.length - 1; <add> <add> for ( var i = start; i <= end; i ++ ) { <add> <add> faces[i].materialIndex = index; <add> <add> } <add> <add> } <ide> <ide> }; <ide>
1
PHP
PHP
fix boolean value in assertsessionhaserrors
80c39186b4e6875fe5c78342c63b5d9334e0b22d
<ide><path>src/Illuminate/Foundation/Testing/TestResponse.php <ide> public function assertSessionHasErrors($keys = [], $format = null, $errorBag = ' <ide> if (is_int($key)) { <ide> PHPUnit::assertTrue($errors->has($value), "Session missing error: $value"); <ide> } else { <add> if (is_bool($value)) { <add> $value = (string) $value; <add> } <add> <ide> PHPUnit::assertContains($value, $errors->get($key, $format)); <ide> } <ide> }
1
Python
Python
remove detr building
323ea8979081e131b70c6649ce91b0a6ba8a9c6a
<ide><path>research/object_detection/core/target_assigner.py <ide> def create_target_assigner(reference, stage=None, <ide> use_matmul_gather=use_matmul_gather) <ide> box_coder_instance = faster_rcnn_box_coder.FasterRcnnBoxCoder() <ide> <del> elif reference == 'DETR': <del> return DETRTargetAssigner() <del> <ide> else: <ide> raise ValueError('No valid combination of reference and stage.') <ide>
1
Javascript
Javascript
change var to let in lib/_stream_duplex.js
7f50839f7de0fc5ff86093f56bebcc44ad5a3ac5
<ide><path>lib/_stream_duplex.js <ide> Object.setPrototypeOf(Duplex, Readable); <ide> { <ide> // Allow the keys array to be GC'ed. <ide> const keys = Object.keys(Writable.prototype); <del> for (var v = 0; v < keys.length; v++) { <add> for (let v = 0; v < keys.length; v++) { <ide> const method = keys[v]; <ide> if (!Duplex.prototype[method]) <ide> Duplex.prototype[method] = Writable.prototype[method];
1
Javascript
Javascript
fix a typo
d430e1358227f316ef41650c8e1b9674de11ab84
<ide><path>packages/react-test-renderer/src/__tests__/ReactShallowRenderer-test.js <ide> describe('ReactShallowRenderer', () => { <ide> ]); <ide> }); <ide> <del> it('should stop the upade when setState returns null or undefined', () => { <add> it('should stop the update when setState returns null or undefined', () => { <ide> const log = []; <ide> let instance; <ide> class Component extends React.Component {
1
Python
Python
add mysql to bq support for tinyint
ad4faf6e2ad0cd99097d3f0342581fd793a85add
<ide><path>airflow/contrib/operators/mysql_to_gcs.py <ide> def type_map(cls, mysql_type): <ide> when a schema_filename is set. <ide> """ <ide> d = { <add> FIELD_TYPE.TINY: 'INTEGER', <ide> FIELD_TYPE.BIT: 'INTEGER', <ide> FIELD_TYPE.DATETIME: 'TIMESTAMP', <ide> FIELD_TYPE.DECIMAL: 'FLOAT',
1
Ruby
Ruby
update associations.rb api documentation [ci skip]
9cafaadc0cce68143bb7390f9b147ed23da38737
<ide><path>activerecord/lib/active_record/associations.rb <ide> def has_one(name, scope = nil, **options) <ide> # Returns the associated object. +nil+ is returned if none is found. <ide> # [association=(associate)] <ide> # Assigns the associate object, extracts the primary key, and sets it as the foreign key. <add> # No modification or deletion of existing records takes place. <ide> # [build_association(attributes = {})] <ide> # Returns a new object of the associated type that has been instantiated <ide> # with +attributes+ and linked to this object through a foreign key, but has not yet been saved.
1
Go
Go
define all of the signals defined by kill -l
d1ae13b0b06c109fb7c62bf86f3a9337bca630b5
<ide><path>pkg/signal/signal_linux.go <ide> import ( <ide> "syscall" <ide> ) <ide> <add>const ( <add> sigrtmin = 34 <add> sigrtmax = 64 <add>) <add> <ide> // SignalMap is a map of Linux signals. <ide> var SignalMap = map[string]syscall.Signal{ <del> "ABRT": syscall.SIGABRT, <del> "ALRM": syscall.SIGALRM, <del> "BUS": syscall.SIGBUS, <del> "CHLD": syscall.SIGCHLD, <del> "CLD": syscall.SIGCLD, <del> "CONT": syscall.SIGCONT, <del> "FPE": syscall.SIGFPE, <del> "HUP": syscall.SIGHUP, <del> "ILL": syscall.SIGILL, <del> "INT": syscall.SIGINT, <del> "IO": syscall.SIGIO, <del> "IOT": syscall.SIGIOT, <del> "KILL": syscall.SIGKILL, <del> "PIPE": syscall.SIGPIPE, <del> "POLL": syscall.SIGPOLL, <del> "PROF": syscall.SIGPROF, <del> "PWR": syscall.SIGPWR, <del> "QUIT": syscall.SIGQUIT, <del> "SEGV": syscall.SIGSEGV, <del> "STKFLT": syscall.SIGSTKFLT, <del> "STOP": syscall.SIGSTOP, <del> "SYS": syscall.SIGSYS, <del> "TERM": syscall.SIGTERM, <del> "TRAP": syscall.SIGTRAP, <del> "TSTP": syscall.SIGTSTP, <del> "TTIN": syscall.SIGTTIN, <del> "TTOU": syscall.SIGTTOU, <del> "UNUSED": syscall.SIGUNUSED, <del> "URG": syscall.SIGURG, <del> "USR1": syscall.SIGUSR1, <del> "USR2": syscall.SIGUSR2, <del> "VTALRM": syscall.SIGVTALRM, <del> "WINCH": syscall.SIGWINCH, <del> "XCPU": syscall.SIGXCPU, <del> "XFSZ": syscall.SIGXFSZ, <add> "ABRT": syscall.SIGABRT, <add> "ALRM": syscall.SIGALRM, <add> "BUS": syscall.SIGBUS, <add> "CHLD": syscall.SIGCHLD, <add> "CLD": syscall.SIGCLD, <add> "CONT": syscall.SIGCONT, <add> "FPE": syscall.SIGFPE, <add> "HUP": syscall.SIGHUP, <add> "ILL": syscall.SIGILL, <add> "INT": syscall.SIGINT, <add> "IO": syscall.SIGIO, <add> "IOT": syscall.SIGIOT, <add> "KILL": syscall.SIGKILL, <add> "PIPE": syscall.SIGPIPE, <add> "POLL": syscall.SIGPOLL, <add> "PROF": syscall.SIGPROF, <add> "PWR": syscall.SIGPWR, <add> "QUIT": syscall.SIGQUIT, <add> "SEGV": syscall.SIGSEGV, <add> "STKFLT": syscall.SIGSTKFLT, <add> "STOP": syscall.SIGSTOP, <add> "SYS": syscall.SIGSYS, <add> "TERM": syscall.SIGTERM, <add> "TRAP": syscall.SIGTRAP, <add> "TSTP": syscall.SIGTSTP, <add> "TTIN": syscall.SIGTTIN, <add> "TTOU": syscall.SIGTTOU, <add> "UNUSED": syscall.SIGUNUSED, <add> "URG": syscall.SIGURG, <add> "USR1": syscall.SIGUSR1, <add> "USR2": syscall.SIGUSR2, <add> "VTALRM": syscall.SIGVTALRM, <add> "WINCH": syscall.SIGWINCH, <add> "XCPU": syscall.SIGXCPU, <add> "XFSZ": syscall.SIGXFSZ, <add> "RTMIN": sigrtmin, <add> "RTMIN+1": sigrtmin + 1, <add> "RTMIN+2": sigrtmin + 2, <add> "RTMIN+3": sigrtmin + 3, <add> "RTMIN+4": sigrtmin + 4, <add> "RTMIN+5": sigrtmin + 5, <add> "RTMIN+6": sigrtmin + 6, <add> "RTMIN+7": sigrtmin + 7, <add> "RTMIN+8": sigrtmin + 8, <add> "RTMIN+9": sigrtmin + 9, <add> "RTMIN+10": sigrtmin + 10, <add> "RTMIN+11": sigrtmin + 11, <add> "RTMIN+12": sigrtmin + 12, <add> "RTMIN+13": sigrtmin + 13, <add> "RTMIN+14": sigrtmin + 14, <add> "RTMIN+15": sigrtmin + 15, <add> "RTMAX-14": sigrtmax - 14, <add> "RTMAX-13": sigrtmax - 13, <add> "RTMAX-12": sigrtmax - 12, <add> "RTMAX-11": sigrtmax - 11, <add> "RTMAX-10": sigrtmax - 10, <add> "RTMAX-9": sigrtmax - 9, <add> "RTMAX-8": sigrtmax - 8, <add> "RTMAX-7": sigrtmax - 7, <add> "RTMAX-6": sigrtmax - 6, <add> "RTMAX-5": sigrtmax - 5, <add> "RTMAX-4": sigrtmax - 4, <add> "RTMAX-3": sigrtmax - 3, <add> "RTMAX-2": sigrtmax - 2, <add> "RTMAX-1": sigrtmax - 1, <add> "RTMAX": sigrtmax, <ide> }
1
Ruby
Ruby
fix indentation [ci skip]
8df1e6333f8fb53ba7d044a1a06ff7e7366005ea
<ide><path>railties/lib/rails/generators/app_base.rb <ide> def set_default_accessors! <ide> <ide> def database_gemfile_entry <ide> return [] if options[:skip_active_record] <del> GemfileEntry.version gem_for_database, nil, <del> "Use #{options[:database]} as the database for Active Record" <add> GemfileEntry.version gem_for_database, nil, <add> "Use #{options[:database]} as the database for Active Record" <ide> end <ide> <ide> def include_all_railties?
1
Ruby
Ruby
fix typo in test name
5cdeb5ef7db1bab944ff0ee2d64c6c896758bac6
<ide><path>activesupport/test/spec_type_test.rb <ide> class SomeRandomModel < ActiveRecord::Base; end <ide> <ide> class SpecTypeTest < ActiveSupport::TestCase <del> <ide> def assert_support actual <ide> assert_equal ActiveSupport::TestCase, actual <ide> end <ide> def assert_spec actual <ide> assert_equal MiniTest::Spec, actual <ide> end <ide> <del> def test_spec_type_resolves_for_actitive_record_constants <add> def test_spec_type_resolves_for_active_record_constants <ide> assert_support MiniTest::Spec.spec_type(SomeRandomModel) <ide> end <ide> <ide><path>activesupport/test/testing/constant_lookup_test.rb <ide> require 'abstract_unit' <ide> <ide> class Foo; end <del>class Bar < Foo; <add>class Bar < Foo <ide> def index; end <ide> def self.index; end <ide> end
2
Javascript
Javascript
fix lgtm error
5cb6d53903436d5259568d5ddd5ac929c92f94a3
<ide><path>examples/js/loaders/AssimpLoader.js <ide> THREE.AssimpLoader.prototype = { <ide> <ide> } <ide> <del> function aiColor4D() { <del> <del> this.r = 0; <del> this.g = 0; <del> this.b = 0; <del> this.a = 0; <del> this.toTHREE = function () { <del> <del> return new THREE.Color( this.r, this.g, this.b, this.a ); <del> <del> }; <del> <del> } <del> <ide> function aiColor3D() { <ide> <ide> this.r = 0; <ide> THREE.AssimpLoader.prototype = { <ide> this.a = 0; <ide> this.toTHREE = function () { <ide> <del> return new THREE.Color( this.r, this.g, this.b, 1 ); <add> return new THREE.Color( this.r, this.g, this.b ); <ide> <ide> }; <ide> <ide> THREE.AssimpLoader.prototype = { <ide> <ide> } <ide> <del> function Read_aiColor4D( stream ) { <del> <del> var c = new aiColor4D(); <del> c.r = readFloat( stream ); <del> c.g = readFloat( stream ); <del> c.b = readFloat( stream ); <del> c.a = readFloat( stream ); <del> return c; <del> <del> } <del> <ide> function Read_aiQuaternion( stream ) { <ide> <ide> var v = new aiQuaternion(); <ide> THREE.AssimpLoader.prototype = { <ide> <ide> } <ide> <del> function ReadArray_aiColor4D( stream, data, size ) { <del> <del> for ( var i = 0; i < size; i ++ ) data[ i ] = Read_aiColor4D( stream ); <del> <del> } <del> <ide> function ReadArray_aiVectorKey( stream, data, size ) { <ide> <ide> for ( var i = 0; i < size; i ++ ) data[ i ] = Read_aiVectorKey( stream ); <ide><path>examples/jsm/loaders/AssimpLoader.js <ide> AssimpLoader.prototype = { <ide> <ide> } <ide> <del> function aiColor4D() { <del> <del> this.r = 0; <del> this.g = 0; <del> this.b = 0; <del> this.a = 0; <del> this.toTHREE = function () { <del> <del> return new Color( this.r, this.g, this.b, this.a ); <del> <del> }; <del> <del> } <del> <ide> function aiColor3D() { <ide> <ide> this.r = 0; <ide> AssimpLoader.prototype = { <ide> <ide> } <ide> <del> function Read_aiColor4D( stream ) { <del> <del> var c = new aiColor4D(); <del> c.r = readFloat( stream ); <del> c.g = readFloat( stream ); <del> c.b = readFloat( stream ); <del> c.a = readFloat( stream ); <del> return c; <del> <del> } <del> <ide> function Read_aiQuaternion( stream ) { <ide> <ide> var v = new aiQuaternion(); <ide> AssimpLoader.prototype = { <ide> <ide> } <ide> <del> function ReadArray_aiColor4D( stream, data, size ) { <del> <del> for ( var i = 0; i < size; i ++ ) data[ i ] = Read_aiColor4D( stream ); <del> <del> } <del> <ide> function ReadArray_aiVectorKey( stream, data, size ) { <ide> <ide> for ( var i = 0; i < size; i ++ ) data[ i ] = Read_aiVectorKey( stream );
2
Python
Python
fix debug message
177c534254a52a3c5d6db176bc49701a030fb4f1
<ide><path>glances/plugins/glances_cloud.py <ide> def run(self): <ide> logger.debug('cloud plugin - Connection to {} timed out'.format(r_url)) <ide> break <ide> except Exception as e: <del> logger.debug('cloud plugin - Can not connect to the AWS EC2 API {}'.format(r_url, e)) <add> logger.debug('cloud plugin - Cannot connect to the AWS EC2 API {}: {}'.format(r_url, e)) <ide> break <ide> else: <ide> if r.ok:
1
Text
Text
add bullet for universal in list of examples
3873c631204c923efd31929eba91a417c237db0e
<ide><path>README.md <ide> It helps you write applications that behave consistently, run in different envir <ide> You can use Redux together with [React](https://facebook.github.io/react/), or with any other view library. <ide> It is tiny (2kB) and has no dependencies. <ide> <del>[![build status](https://img.shields.io/travis/rackt/redux/master.svg?style=flat-square)](https://travis-ci.org/rackt/redux) <del>[![npm version](https://img.shields.io/npm/v/redux.svg?style=flat-square)](https://www.npmjs.com/package/redux) <add>[![build status](https://img.shields.io/travis/rackt/redux/master.svg?style=flat-square)](https://travis-ci.org/rackt/redux) <add>[![npm version](https://img.shields.io/npm/v/redux.svg?style=flat-square)](https://www.npmjs.com/package/redux) <ide> [![npm downloads](https://img.shields.io/npm/dm/redux.svg?style=flat-square)](https://www.npmjs.com/package/redux) <ide> [![redux channel on slack](https://img.shields.io/badge/slack-redux@reactiflux-61DAFB.svg?style=flat-square)](http://www.reactiflux.com) <ide> <ide> For PDF, ePub, and MOBI exports for offline reading, and instructions on how to <ide> * [Counter](http://rackt.github.io/redux/docs/introduction/Examples.html#counter) ([source](https://github.com/rackt/redux/tree/master/examples/counter)) <ide> * [TodoMVC](http://rackt.github.io/redux/docs/introduction/Examples.html#todomvc) ([source](https://github.com/rackt/redux/tree/master/examples/todomvc)) <ide> * [Async](http://rackt.github.io/redux/docs/introduction/Examples.html#async) ([source](https://github.com/rackt/redux/tree/master/examples/async)) <add>* [Universal](http://rackt.github.io/redux/docs/introduction/Examples.html#universal) ([source](https://github.com/rackt/redux/tree/master/examples/universal)) <ide> * [Real World](http://rackt.github.io/redux/docs/introduction/Examples.html#real-world) ([source](https://github.com/rackt/redux/tree/master/examples/real-world)) <ide> <ide> If you’re new to the NPM ecosystem and have troubles getting a project up and running, or aren’t sure where to paste the gist above, check out [simplest-redux-example](https://github.com/jackielii/simplest-redux-example) that uses Redux together with React and Browserify.
1
Ruby
Ruby
add basic cron support
302975829db019bdcd33f1662dbe04a765b08d5f
<ide><path>Library/Homebrew/service.rb <ide> def run_type(value = nil) <ide> case T.unsafe(value) <ide> when nil <ide> @run_type <del> when :immediate, :interval <add> when :immediate, :interval, :cron <ide> @run_type = value <del> when :cron <del> raise TypeError, "Service#run_type does not support cron" <ide> when Symbol <ide> raise TypeError, "Service#run_type allows: '#{RUN_TYPE_IMMEDIATE}'/'#{RUN_TYPE_INTERVAL}'/'#{RUN_TYPE_CRON}'" <ide> else <ide> def interval(value = nil) <ide> end <ide> end <ide> <add> sig { params(value: T.nilable(String)).returns(T.nilable(Hash)) } <add> def cron(value = nil) <add> case T.unsafe(value) <add> when nil <add> @cron <add> when String <add> @cron = parse_cron(T.must(value)) <add> else <add> raise TypeError, "Service#cron expects a String" <add> end <add> end <add> <add> sig { returns(T::Hash[Symbol, T.any(Integer, String)]) } <add> def default_cron_values <add> { <add> Month: "*", <add> Day: "*", <add> Weekday: "*", <add> Hour: "*", <add> Minute: "*", <add> } <add> end <add> <add> sig { params(cron_statement: String).returns(T::Hash[Symbol, T.any(Integer, String)]) } <add> def parse_cron(cron_statement) <add> parsed = default_cron_values <add> <add> case cron_statement <add> when "@hourly" <add> parsed[:Minute] = 0 <add> when "@daily" <add> parsed[:Minute] = 0 <add> parsed[:Hour] = 0 <add> when "@weekly" <add> parsed[:Minute] = 0 <add> parsed[:Hour] = 0 <add> parsed[:Weekday] = 0 <add> when "@monthly" <add> parsed[:Minute] = 0 <add> parsed[:Hour] = 0 <add> parsed[:Day] = 1 <add> when "@yearly", "@annually" <add> parsed[:Minute] = 0 <add> parsed[:Hour] = 0 <add> parsed[:Day] = 1 <add> parsed[:Month] = 1 <add> else <add> cron_parts = cron_statement.split <add> raise TypeError, "Service#parse_cron expects a valid cron syntax" if cron_parts.length != 5 <add> <add> [:Minute, :Hour, :Day, :Month, :Weekday].each_with_index do |selector, index| <add> parsed[selector] = Integer(cron_parts.fetch(index)) if cron_parts.fetch(index) != "*" <add> end <add> end <add> <add> parsed <add> end <add> <ide> sig { params(variables: T::Hash[String, String]).returns(T.nilable(T::Hash[String, String])) } <ide> def environment_variables(variables = {}) <ide> case T.unsafe(variables) <ide> def to_plist <ide> base[:StandardErrorPath] = @error_log_path if @error_log_path.present? <ide> base[:EnvironmentVariables] = @environment_variables unless @environment_variables.empty? <ide> <add> if @cron.present? && @run_type == RUN_TYPE_CRON <add> base[:StartCalendarInterval] = @cron.reject { |_, value| value == "*" } <add> end <add> <ide> base.to_plist <ide> end <ide> <ide> def to_systemd_timer <ide> <ide> instance_eval(&@service_block) <ide> options = [] <del> options << "Persistent=true=" if @run_type == RUN_TYPE_CRON <add> options << "Persistent=true" if @run_type == RUN_TYPE_CRON <ide> options << "OnUnitActiveSec=#{@interval}" if @run_type == RUN_TYPE_INTERVAL <ide> <add> if @run_type == RUN_TYPE_CRON <add> minutes = @cron[:Minute] == "*" ? "*" : format("%02d", @cron[:Minute]) <add> hours = @cron[:Hour] == "*" ? "*" : format("%02d", @cron[:Hour]) <add> options << "OnCalendar=#{@cron[:Weekday]}-*-#{@cron[:Month]}-#{@cron[:Day]} #{hours}:#{minutes}:00" <add> end <add> <ide> timer + options.join("\n") <ide> end <ide> end <ide><path>Library/Homebrew/test/service_spec.rb <ide> end <ide> end <ide> <del> describe "#run_type" do <del> it "throws for cron type" do <add> describe "#process_type" do <add> it "throws for unexpected type" do <ide> f.class.service do <ide> run opt_bin/"beanstalkd" <del> run_type :cron <add> process_type :cow <ide> end <ide> <del> expect { f.service.manual_command }.to raise_error TypeError, "Service#run_type does not support cron" <add> expect { <add> f.service.manual_command <add> }.to raise_error TypeError, "Service#process_type allows: 'background'/'standard'/'interactive'/'adaptive'" <ide> end <add> end <ide> <add> describe "#run_type" do <ide> it "throws for unexpected type" do <ide> f.class.service do <ide> run opt_bin/"beanstalkd" <ide> EOS <ide> expect(plist).to eq(plist_expect) <ide> end <add> <add> it "returns valid cron plist" do <add> f.class.service do <add> run opt_bin/"beanstalkd" <add> run_type :cron <add> cron "@daily" <add> end <add> <add> plist = f.service.to_plist <add> plist_expect = <<~EOS <add> <?xml version="1.0" encoding="UTF-8"?> <add> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <add> <plist version="1.0"> <add> <dict> <add> \t<key>Label</key> <add> \t<string>homebrew.mxcl.formula_name</string> <add> \t<key>ProgramArguments</key> <add> \t<array> <add> \t\t<string>#{HOMEBREW_PREFIX}/opt/formula_name/bin/beanstalkd</string> <add> \t</array> <add> \t<key>RunAtLoad</key> <add> \t<false/> <add> \t<key>StartCalendarInterval</key> <add> \t<dict> <add> \t\t<key>Hour</key> <add> \t\t<integer>0</integer> <add> \t\t<key>Minute</key> <add> \t\t<integer>0</integer> <add> \t</dict> <add> </dict> <add> </plist> <add> EOS <add> expect(plist).to eq(plist_expect) <add> end <ide> end <ide> <ide> describe "#to_systemd_unit" do <ide> EOS <ide> expect(unit).to eq(unit_expect) <ide> end <add> <add> it "throws on incomplete cron" do <add> f.class.service do <add> run opt_bin/"beanstalkd" <add> run_type :cron <add> cron "1 2 3 4" <add> end <add> <add> expect { <add> f.service.to_systemd_timer <add> }.to raise_error TypeError, "Service#parse_cron expects a valid cron syntax" <add> end <add> <add> it "returns valid cron timers" do <add> styles = { <add> "@hourly": "*-*-*-* *:00:00", <add> "@daily": "*-*-*-* 00:00:00", <add> "@weekly": "0-*-*-* 00:00:00", <add> "@monthly": "*-*-*-1 00:00:00", <add> "@yearly": "*-*-1-1 00:00:00", <add> "@annually": "*-*-1-1 00:00:00", <add> "5 5 5 5 5": "5-*-5-5 05:05:00", <add> } <add> <add> styles.each do |cron, calendar| <add> f.class.service do <add> run opt_bin/"beanstalkd" <add> run_type :cron <add> cron cron.to_s <add> end <add> <add> unit = f.service.to_systemd_timer <add> unit_expect = <<~EOS <add> [Unit] <add> Description=Homebrew generated timer for formula_name <add> <add> [Install] <add> WantedBy=timers.target <add> <add> [Timer] <add> Unit=homebrew.formula_name <add> Persistent=true <add> OnCalendar=#{calendar} <add> EOS <add> expect(unit).to eq(unit_expect.chomp) <add> end <add> end <ide> end <ide> <ide> describe "#timed?" do
2
Ruby
Ruby
reword the docs for association_foreign_key
2db4ec9a58f841e8f380d9b18f27b58bff31a339
<ide><path>activerecord/lib/active_record/associations.rb <ide> def belongs_to(association_id, options = {}) <ide> # <b>WARNING:</b> If you're overwriting the table name of either class, the +table_name+ method <ide> # MUST be declared underneath any +has_and_belongs_to_many+ declaration in order to work. <ide> # [:foreign_key] <del> # Specify the foreign key used for the association (on "this side" of association). <del> # By default this is guessed to be the name of this class in lower-case and "_id" suffixed. <del> # So a Person class that makes a +has_and_belongs_to_many+ association to Project <del> # will use "person_id" as the default <tt>:foreign_key</tt>. <add> # Specify the foreign key used for the association. By default this is guessed to be the name <add> # of this class in lower-case and "_id" suffixed. So a Person class that makes a +has_and_belongs_to_many+ association <add> # to Project will use "person_id" as the default <tt>:foreign_key</tt>. <ide> # [:association_foreign_key] <del> # Specify the association foreign key used for the association (on the "other side" of association). <add> # Specify the foreign key used for the association on the receiving side of the association. <ide> # By default this is guessed to be the name of the associated class in lower-case and "_id" suffixed. <ide> # So if a Person class makes a +has_and_belongs_to_many+ association to Project, <ide> # the association will use "project_id" as the default <tt>:association_foreign_key</tt>.
1
Text
Text
fix markdown formatting
0974b392effe990a3c21d01862d4a31a9ae799b9
<ide><path>activesupport/CHANGELOG.md <ide> <ide> *Agis Anastasopoulos* <ide> <del>* `fast_xs` support has been removed. Use 'String#encode(xml: :attr)`. <add>* `fast_xs` support has been removed. Use `String#encode(xml: :attr)`. <ide> <ide> * `ActiveSupport::Notifications::Instrumenter#instrument` should yield <ide> its payload.
1
PHP
PHP
fix docblock error reported by phpstan
db6e8303a68adc712663bd42c7e8d3e7466c748c
<ide><path>src/Validation/Validation.php <ide> public static function naturalNumber($check, bool $allowZero = false): bool <ide> * legal finite on this platform. <ide> * <ide> * @param mixed $check Value to check <del> * @param int|float|null $lower Lower limit <del> * @param int|float|null $upper Upper limit <add> * @param float|null $lower Lower limit <add> * @param float|null $upper Upper limit <ide> * @return bool Success <ide> */ <ide> public static function range($check, ?float $lower = null, ?float $upper = null): bool
1
Ruby
Ruby
require pg when this adapter is loaded
df9abfab6f213440d3c4dd75973a1ffa8a21b34b
<ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb <ide> require 'active_record/connection_adapters/abstract_adapter' <ide> require 'active_support/core_ext/kernel/requires' <ide> require 'active_support/core_ext/object/blank' <add>require 'pg' <ide> <ide> module ActiveRecord <ide> class Base <ide> # Establishes a connection to the database that's used by all Active Record objects <ide> def self.postgresql_connection(config) # :nodoc: <del> require 'pg' <del> <ide> config = config.symbolize_keys <ide> host = config[:host] <ide> port = config[:port] || 5432 <ide> username = config[:username].to_s if config[:username] <ide> password = config[:password].to_s if config[:password] <ide> <del> if config.has_key?(:database) <add> if config.key?(:database) <ide> database = config[:database] <ide> else <ide> raise ArgumentError, "No database specified. Missing argument: database." <ide> def xml(*args) <ide> end <ide> end <ide> <del> ADAPTER_NAME = 'PostgreSQL'.freeze <add> ADAPTER_NAME = 'PostgreSQL' <ide> <ide> NATIVE_DATABASE_TYPES = { <del> :primary_key => "serial primary key".freeze, <add> :primary_key => "serial primary key", <ide> :string => { :name => "character varying", :limit => 255 }, <ide> :text => { :name => "text" }, <ide> :integer => { :name => "integer" },
1
PHP
PHP
defer resolution of cache factory
b35afc7aa78f591aee364614660d406424d1e52e
<ide><path>src/Illuminate/Session/Middleware/StartSession.php <ide> class StartSession <ide> */ <ide> protected $manager; <ide> <add> /** <add> * The callback that can resolve an instance of the cache factory. <add> * <add> * @var callable <add> */ <add> protected $cacheFactoryResolver; <add> <ide> /** <ide> * Create a new session middleware. <ide> * <ide> * @param \Illuminate\Session\SessionManager $manager <del> * @param \Illuminate\Contracts\Cache\Factory $cache <add> * @param callable $cacheFactoryResolver <ide> * @return void <ide> */ <del> public function __construct(SessionManager $manager, CacheFactory $cache) <add> public function __construct(SessionManager $manager, callable $cacheFactoryResolver = null) <ide> { <ide> $this->manager = $manager; <del> $this->cache = $cache; <add> $this->cacheFactoryResolver = $cacheFactoryResolver; <ide> } <ide> <ide> /** <ide> protected function handleRequestWhileBlocking(Request $request, $session, Closur <ide> ? $request->route()->locksFor() <ide> : 10; <ide> <del> $lock = $this->cache->driver($this->manager->blockDriver()) <add> $lock = $this->cache($this->manager->blockDriver()) <ide> ->lock('session:'.$session->getId(), $lockFor) <ide> ->betweenBlockedAttemptsSleepFor(50); <ide> <ide> protected function sessionIsPersistent(array $config = null) <ide> <ide> return ! is_null($config['driver'] ?? null); <ide> } <add> <add> /** <add> * Resolve the given cache driver. <add> * <add> * @param string $cache <add> * @return \Illuminate\Cache\Store <add> */ <add> protected function cache($driver) <add> { <add> return call_user_func($this->cacheFactoryResolver)->driver($driver); <add> } <ide> } <ide><path>src/Illuminate/Session/SessionServiceProvider.php <ide> <ide> namespace Illuminate\Session; <ide> <add>use Illuminate\Contracts\Cache\Factory as CacheFactory; <ide> use Illuminate\Session\Middleware\StartSession; <ide> use Illuminate\Support\ServiceProvider; <ide> <ide> public function register() <ide> <ide> $this->registerSessionDriver(); <ide> <del> $this->app->singleton(StartSession::class); <add> $this->app->singleton(StartSession::class, function () { <add> return new StartSession($this->app->make(SessionManager::class), function () { <add> return $this->app->make(CacheFactory::class); <add> }); <add> }); <ide> } <ide> <ide> /**
2
Mixed
Javascript
forbid concurrent operations on dir handle
d3a8a23089af06bb047bf9bad7531fbfc70f6314
<ide><path>doc/api/errors.md <ide> An unknown Diffie-Hellman group name was given. See <ide> <ide> The [`fs.Dir`][] was previously closed. <ide> <add><a id="ERR_DIR_CONCURRENT_OPERATION"></a> <add>### `ERR_DIR_CONCURRENT_OPERATION` <add><!-- YAML <add>added: REPLACEME <add>--> <add> <add>A synchronous read or close call was attempted on an [`fs.Dir`][] which has <add>ongoing asynchronous operations. <add> <ide> <a id="ERR_DNS_SET_SERVERS_FAILED"></a> <ide> ### `ERR_DNS_SET_SERVERS_FAILED` <ide> <ide><path>lib/internal/errors.js <ide> E('ERR_CRYPTO_SIGN_KEY_REQUIRED', 'No key provided to sign', Error); <ide> E('ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH', <ide> 'Input buffers must have the same byte length', RangeError); <ide> E('ERR_DIR_CLOSED', 'Directory handle was closed', Error); <add>E('ERR_DIR_CONCURRENT_OPERATION', <add> 'Cannot do synchronous work on directory handle with concurrent ' + <add> 'asynchronous operations', Error); <ide> E('ERR_DNS_SET_SERVERS_FAILED', 'c-ares failed to set servers: "%s" [%s]', <ide> Error); <ide> E('ERR_DOMAIN_CALLBACK_NOT_AVAILABLE', <ide><path>lib/internal/fs/dir.js <ide> const dirBinding = internalBinding('fs_dir'); <ide> const { <ide> codes: { <ide> ERR_DIR_CLOSED, <add> ERR_DIR_CONCURRENT_OPERATION, <ide> ERR_INVALID_CALLBACK, <ide> ERR_MISSING_ARGS <ide> } <ide> const kDirOptions = Symbol('kDirOptions'); <ide> const kDirReadImpl = Symbol('kDirReadImpl'); <ide> const kDirReadPromisified = Symbol('kDirReadPromisified'); <ide> const kDirClosePromisified = Symbol('kDirClosePromisified'); <add>const kDirOperationQueue = Symbol('kDirOperationQueue'); <ide> <ide> class Dir { <ide> constructor(handle, path, options) { <ide> class Dir { <ide> this[kDirPath] = path; <ide> this[kDirClosed] = false; <ide> <add> // Either `null` or an Array of pending operations (= functions to be called <add> // once the current operation is done). <add> this[kDirOperationQueue] = null; <add> <ide> this[kDirOptions] = { <ide> bufferSize: 32, <ide> ...getOptions(options, { <ide> class Dir { <ide> throw new ERR_INVALID_CALLBACK(callback); <ide> } <ide> <add> if (this[kDirOperationQueue] !== null) { <add> this[kDirOperationQueue].push(() => { <add> this[kDirReadImpl](maybeSync, callback); <add> }); <add> return; <add> } <add> <ide> if (this[kDirBufferedEntries].length > 0) { <ide> const [ name, type ] = this[kDirBufferedEntries].splice(0, 2); <ide> if (maybeSync) <ide> class Dir { <ide> <ide> const req = new FSReqCallback(); <ide> req.oncomplete = (err, result) => { <add> process.nextTick(() => { <add> const queue = this[kDirOperationQueue]; <add> this[kDirOperationQueue] = null; <add> for (const op of queue) op(); <add> }); <add> <ide> if (err || result === null) { <ide> return callback(err, result); <ide> } <ide> class Dir { <ide> getDirent(this[kDirPath], result[0], result[1], callback); <ide> }; <ide> <add> this[kDirOperationQueue] = []; <ide> this[kDirHandle].read( <ide> this[kDirOptions].encoding, <ide> this[kDirOptions].bufferSize, <ide> class Dir { <ide> throw new ERR_DIR_CLOSED(); <ide> } <ide> <add> if (this[kDirOperationQueue] !== null) { <add> throw new ERR_DIR_CONCURRENT_OPERATION(); <add> } <add> <ide> if (this[kDirBufferedEntries].length > 0) { <ide> const [ name, type ] = this[kDirBufferedEntries].splice(0, 2); <ide> return getDirent(this[kDirPath], name, type); <ide> class Dir { <ide> throw new ERR_INVALID_CALLBACK(callback); <ide> } <ide> <add> if (this[kDirOperationQueue] !== null) { <add> this[kDirOperationQueue].push(() => { <add> this.close(callback); <add> }); <add> return; <add> } <add> <ide> this[kDirClosed] = true; <ide> const req = new FSReqCallback(); <ide> req.oncomplete = callback; <ide> class Dir { <ide> throw new ERR_DIR_CLOSED(); <ide> } <ide> <add> if (this[kDirOperationQueue] !== null) { <add> throw new ERR_DIR_CONCURRENT_OPERATION(); <add> } <add> <ide> this[kDirClosed] = true; <ide> const ctx = { path: this[kDirPath] }; <ide> const result = this[kDirHandle].close(undefined, ctx); <ide><path>test/parallel/test-fs-opendir.js <ide> const dirclosedError = { <ide> code: 'ERR_DIR_CLOSED' <ide> }; <ide> <add>const dirconcurrentError = { <add> code: 'ERR_DIR_CONCURRENT_OPERATION' <add>}; <add> <ide> const invalidCallbackObj = { <ide> code: 'ERR_INVALID_CALLBACK', <ide> name: 'TypeError' <ide> async function doAsyncIterDirClosedTest() { <ide> assert.throws(() => dir.close(), dirclosedError); <ide> } <ide> doAsyncIterDirClosedTest().then(common.mustCall()); <add> <add>// Check that readSync() and closeSync() during read() throw exceptions <add>async function doConcurrentAsyncAndSyncOps() { <add> const dir = await fs.promises.opendir(testDir); <add> const promise = dir.read(); <add> <add> assert.throws(() => dir.closeSync(), dirconcurrentError); <add> assert.throws(() => dir.readSync(), dirconcurrentError); <add> <add> await promise; <add> dir.closeSync(); <add>} <add>doConcurrentAsyncAndSyncOps().then(common.mustCall()); <add> <add>// Check that concurrent read() operations don't do weird things. <add>async function doConcurrentAsyncOps() { <add> const dir = await fs.promises.opendir(testDir); <add> const promise1 = dir.read(); <add> const promise2 = dir.read(); <add> <add> assertDirent(await promise1); <add> assertDirent(await promise2); <add> dir.closeSync(); <add>} <add>doConcurrentAsyncOps().then(common.mustCall()); <add> <add>// Check that concurrent read() + close() operations don't do weird things. <add>async function doConcurrentAsyncMixedOps() { <add> const dir = await fs.promises.opendir(testDir); <add> const promise1 = dir.read(); <add> const promise2 = dir.close(); <add> <add> assertDirent(await promise1); <add> await promise2; <add>} <add>doConcurrentAsyncMixedOps().then(common.mustCall());
4
PHP
PHP
use class constants
416a7029991e8731970fce949ad472b56260fe64
<ide><path>tests/TestCase/Console/CommandRunnerTest.php <ide> use Cake\Http\BaseApplication; <ide> use Cake\TestSuite\TestCase; <ide> use Cake\TestSuite\Stub\ConsoleOutput; <add>use TestApp\Shell\SampleShell; <ide> <ide> /** <ide> * Test case for the CommandCollection <ide> public function testRunValidCommandWithAbort() <ide> ->setConstructorArgs([$this->config]) <ide> ->getMock(); <ide> <del> $commands = new CommandCollection(['failure' => 'TestApp\Shell\SampleShell']); <add> $commands = new CommandCollection(['failure' => SampleShell::class]); <ide> $app->method('console')->will($this->returnValue($commands)); <ide> <ide> $output = new ConsoleOutput(); <ide> public function testRunValidCommandReturnInteger() <ide> ->setConstructorArgs([$this->config]) <ide> ->getMock(); <ide> <del> $commands = new CommandCollection(['failure' => 'TestApp\Shell\SampleShell']); <add> $commands = new CommandCollection(['failure' => SampleShell::class]); <ide> $app->method('console')->will($this->returnValue($commands)); <ide> <ide> $output = new ConsoleOutput();
1
Text
Text
add @joshgav to collaborators
b417087ca7268de5ec6da503f428ee2ddd38fdfd
<ide><path>README.md <ide> more information about the governance of the Node.js project, see <ide> **Yuval Brik** &lt;yuval@brik.org.il&gt; <ide> * [joaocgreis](https://github.com/joaocgreis) - <ide> **João Reis** &lt;reis@janeasystems.com&gt; <add>* [joshgav](https://github.com/joshgav) - <add>**Josh Gavant** &lt;josh.gavant@outlook.com&gt; <ide> * [julianduque](https://github.com/julianduque) - <ide> **Julian Duque** &lt;julianduquej@gmail.com&gt; <ide> * [JungMinu](https://github.com/JungMinu) -
1
Text
Text
add changelog entry for
850e6aaad9c276d1b84708448221eb3becf0b917
<ide><path>activerecord/CHANGELOG.md <add>* SQLite3: Implement `add_foreign_key` and `remove_foreign_key`. <add> <add> *Ryuta Kamizono* <add> <ide> * Deprecate using class level querying methods if the receiver scope <ide> regarded as leaked. Use `klass.unscoped` to avoid the leaking scope. <ide>
1
Javascript
Javascript
add webgl2 option to webglrenderer
949623b530c590223ab35bb83263c35ae4d7c0bf
<ide><path>src/renderers/WebGLRenderer.js <ide> function WebGLRenderer( parameters ) { <ide> _antialias = parameters.antialias !== undefined ? parameters.antialias : false, <ide> _premultipliedAlpha = parameters.premultipliedAlpha !== undefined ? parameters.premultipliedAlpha : true, <ide> _preserveDrawingBuffer = parameters.preserveDrawingBuffer !== undefined ? parameters.preserveDrawingBuffer : false, <del> _powerPreference = parameters.powerPreference !== undefined ? parameters.powerPreference : 'default'; <add> _powerPreference = parameters.powerPreference !== undefined ? parameters.powerPreference : 'default', <add> _webgl2 = parameters.webgl2 !== undefined ? parameters.webgl2 : false; <ide> <ide> var currentRenderList = null; <ide> var currentRenderState = null; <ide> function WebGLRenderer( parameters ) { <ide> _canvas.addEventListener( 'webglcontextlost', onContextLost, false ); <ide> _canvas.addEventListener( 'webglcontextrestored', onContextRestore, false ); <ide> <del> _gl = _context || _canvas.getContext( 'webgl', contextAttributes ) || _canvas.getContext( 'experimental-webgl', contextAttributes ); <add> var webglVersion = _webgl2 && typeof WebGL2RenderingContext !== 'undefined' ? 'webgl2' : 'webgl'; <add> <add> _gl = _context || _canvas.getContext( webglVersion, contextAttributes ); <add> <add> if ( _gl === null && webglVersion === 'webgl' ) _gl = _canvas.getContext( 'experimental-webgl', contextAttributes ); <ide> <ide> if ( _gl === null ) { <ide> <del> if ( _canvas.getContext( 'webgl' ) !== null ) { <add> if ( _canvas.getContext( webglVersion ) !== null ) { <ide> <ide> throw new Error( 'Error creating WebGL context with your selected attributes.' ); <ide>
1
PHP
PHP
improve docblocks of assoc arrays
ddac51709eb89344373c83c67aacf964e673e924
<ide><path>src/Form/Form.php <ide> public function setErrors(array $errors) <ide> * Defaults to `true`/`'default'`. <ide> * <ide> * @param array $data Form data. <del> * @param array $options List of options. <add> * @param array<string, mixed> $options List of options. <ide> * @return bool False on validation failure, otherwise returns the <ide> * result of the `_execute()` method. <ide> */ <ide><path>src/Form/FormProtector.php <ide> protected function extractToken($formData): ?string <ide> /** <ide> * Return hash parts for the token generation <ide> * <del> * @param array $formData Form data. <del> * @return array <add> * @param array<string, array> $formData Form data. <add> * @return array<string, array> <ide> * @psalm-return array{fields: array, unlockedFields: array} <ide> */ <ide> protected function extractHashParts(array $formData): array <ide> public function buildTokenData(string $url = '', string $sessionId = ''): array <ide> * Generate validation hash. <ide> * <ide> * @param array $fields Fields list. <del> * @param array $unlockedFields Unlocked fields. <add> * @param array<string> $unlockedFields Unlocked fields. <ide> * @param string $url Form URL. <ide> * @param string $sessionId Session Id. <ide> * @return string <ide><path>tests/test_app/TestApp/Model/Table/ArticlesTable.php <ide> public function initialize(array $config): void <ide> * Find published <ide> * <ide> * @param \Cake\ORM\Query $query The query <del> * @param array $options The options <add> * @param array<string, mixed> $options The options <ide> */ <ide> public function findPublished($query, array $options = []): Query <ide> { <ide><path>tests/test_app/TestApp/Model/Table/AuthUsersTable.php <ide> class AuthUsersTable extends Table <ide> * Custom finder <ide> * <ide> * @param \Cake\ORM\Query $query The query to find with <del> * @param array $options The options to find with <add> * @param array<string, mixed> $options The options to find with <ide> * @return \Cake\ORM\Query The query builder <ide> */ <ide> public function findAuth(Query $query, array $options): Query <ide> public function findAuth(Query $query, array $options): Query <ide> * Custom finder <ide> * <ide> * @param \Cake\ORM\Query $query The query to find with <del> * @param array $options The options to find with <add> * @param array<string, mixed> $options The options to find with <ide> * @return \Cake\ORM\Query The query builder <ide> */ <ide> public function findUsername(Query $query, array $options): Query <ide><path>tests/test_app/TestApp/Model/Table/AuthorsTable.php <ide> */ <ide> class AuthorsTable extends Table <ide> { <add> /** <add> * @param array<string, mixed> $config <add> * @return void <add> */ <ide> public function initialize(array $config): void <ide> { <ide> $this->hasMany('articles'); <ide> } <ide> <add> /** <add> * @param \Cake\ORM\Query $query <add> * @param array<string, mixed> $options <add> * @return \Cake\ORM\Query <add> */ <ide> public function findByAuthor(Query $query, array $options = []): Query <ide> { <ide> if (isset($options['author_id'])) { <ide> public function findByAuthor(Query $query, array $options = []): Query <ide> * Finder that applies a formatter to test dirty associations <ide> * <ide> * @param \Cake\ORM\Query $query The query <del> * @param array $options The options <add> * @param array<string, mixed> $options The options <ide> */ <ide> public function findFormatted(Query $query, array $options = []): Query <ide> { <ide><path>tests/test_app/TestApp/Model/Table/GreedyCommentsTable.php <ide> public function initialize(array $config): void <ide> * Overload find to cause issues. <ide> * <ide> * @param string $type Find type <del> * @param array $options find options <add> * @param array<string, mixed> $options find options <ide> */ <del> public function find(string $type = 'all', $options = []): Query <add> public function find(string $type = 'all', array $options = []): Query <ide> { <ide> if (empty($options['conditions'])) { <ide> $options['conditions'] = []; <ide><path>tests/test_app/TestApp/Utility/Base.php <ide> class Base <ide> <ide> /** <ide> * @param string[] $properties An array of properties and the merge strategy for them. <del> * @param array $options The options to use when merging properties. <add> * @param array<string, mixed> $options The options to use when merging properties. <ide> */ <del> public function mergeVars($properties, $options = []): void <add> public function mergeVars(array $properties, array $options = []): void <ide> { <ide> $this->_mergeVars($properties, $options); <ide> }
7
Java
Java
fix caching tests
c90ca15addf048ce1ea17b1149052d97a7655ea7
<ide><path>spring-context/src/test/java/org/springframework/cache/config/AbstractAnnotationTests.java <ide> <ide> package org.springframework.cache.config; <ide> <add>import java.io.IOException; <ide> import java.util.Collection; <ide> import java.util.UUID; <ide> <ide> public void testCheckedThrowable(CacheableService<?> service) throws Exception { <ide> service.throwChecked(arg); <ide> fail("Excepted exception"); <ide> } catch (Exception ex) { <add> assertEquals("Wrong exception type", IOException.class, ex.getClass()); <ide> assertEquals(arg, ex.getMessage()); <ide> } <ide> } <ide> public void testUncheckedThrowable(CacheableService<?> service) throws Exception <ide> service.throwUnchecked(Long.valueOf(1)); <ide> fail("Excepted exception"); <ide> } catch (RuntimeException ex) { <del> assertTrue("Excepted different exception type and got " + ex.getClass(), <del> ex instanceof UnsupportedOperationException); <del> // expected <add> assertEquals("Wrong exception type", UnsupportedOperationException.class, ex.getClass()); <add> assertEquals("1", ex.getMessage()); <ide> } <ide> } <ide> <ide><path>spring-context/src/test/java/org/springframework/cache/config/AnnotatedClassCacheableService.java <ide> <ide> package org.springframework.cache.config; <ide> <add>import java.io.IOException; <ide> import java.util.concurrent.atomic.AtomicLong; <ide> <ide> import org.springframework.cache.annotation.CacheEvict; <ide> public Number nullInvocations() { <ide> <ide> @Override <ide> public Long throwChecked(Object arg1) throws Exception { <del> throw new UnsupportedOperationException(arg1.toString()); <add> throw new IOException(arg1.toString()); <ide> } <ide> <ide> @Override <ide> public Long throwUnchecked(Object arg1) { <del> throw new UnsupportedOperationException(); <add> throw new UnsupportedOperationException(arg1.toString()); <ide> } <ide> <ide> // multi annotations <ide><path>spring-context/src/test/java/org/springframework/cache/config/CustomInterceptorTests.java <ide> <ide> package org.springframework.cache.config; <ide> <add>import java.io.IOException; <ide> import java.util.Map; <ide> <ide> import org.junit.After; <ide> public void customInterceptorAppliesWithCheckedException() { <ide> } <ide> catch (RuntimeException e) { <ide> assertNotNull("missing original exception", e.getCause()); <del> assertEquals(Exception.class, e.getCause().getClass()); <add> assertEquals(IOException.class, e.getCause().getClass()); <ide> } <ide> catch (Exception e) { <ide> fail("Wrong exception type " + e); <ide><path>spring-context/src/test/java/org/springframework/cache/config/DefaultCacheableService.java <ide> <ide> package org.springframework.cache.config; <ide> <add>import java.io.IOException; <ide> import java.util.concurrent.atomic.AtomicLong; <ide> <ide> import org.springframework.cache.annotation.CacheEvict; <ide> public Number nullInvocations() { <ide> @Override <ide> @Cacheable("testCache") <ide> public Long throwChecked(Object arg1) throws Exception { <del> throw new Exception(arg1.toString()); <add> throw new IOException(arg1.toString()); <ide> } <ide> <ide> @Override
4
Ruby
Ruby
remove xcode dependence from env.rb
a0763dfc2c2e95a19c8e1789ad8f4d25710f3029
<ide><path>Library/Homebrew/extend/ENV.rb <ide> def setup_build_environment <ide> when :gcc then self.gcc <ide> end <ide> <add> # we must have a working compiler! <add> ENV['CC'] = '/usr/bin/cc' unless File.exist? ENV['CC'] <add> ENV['CXX'] = '/usr/bin/c++' unless File.exist? ENV['CXX'] <add> <ide> # In rare cases this may break your builds, as the tool for some reason wants <ide> # to use a specific linker. However doing this in general causes formula to <ide> # build more successfully because we are changing CC and many build systems <ide> def Og <ide> end <ide> <ide> def gcc_4_0_1 <del> self['CC'] = self['LD'] = '/usr/bin/gcc-4.0' <add> self['CC'] = '/usr/bin/gcc-4.0' <ide> self['CXX'] = '/usr/bin/g++-4.0' <del> self.O3 <add> remove_from_cflags '-O4' <ide> remove_from_cflags '-march=core2' <ide> remove_from_cflags %r{-msse4(\.\d)?} <ide> end <ide> alias_method :gcc_4_0, :gcc_4_0_1 <ide> <ide> def gcc <del> if MacOS.xcode_version < '4' <del> self['CC'] = '/usr/bin/cc' <del> self['CXX'] = '/usr/bin/c++' <del> elsif MacOS.xcode_version >= '4.2' <del> # Apple stopped adding the -4.2 suffixes <del> self['CC'] = "#{MacOS.xcode_prefix}/usr/bin/gcc" <del> self['CXX'] = "#{MacOS.xcode_prefix}/usr/bin/g++" <del> else <del> # With Xcode4 cc, c++, gcc and g++ are actually symlinks to llvm-gcc <del> self['CC'] = "#{MacOS.xcode_prefix}/usr/bin/gcc-4.2" <del> self['CXX'] = "#{MacOS.xcode_prefix}/usr/bin/g++-4.2" <del> end <add> self['CC'] = "/usr/bin/gcc-4.2" <add> self['CXX'] = "/usr/bin/g++-4.2" <ide> remove_from_cflags '-O4' <ide> end <ide> alias_method :gcc_4_2, :gcc <ide> <ide> def llvm <del> if MacOS.xcode_version < '4' <del> self.gcc <del> elsif MacOS.xcode_version < '4.1' <del> self['CC'] = "#{MacOS.xcode_prefix}/usr/bin/llvm-gcc" <del> self['CXX'] = "#{MacOS.xcode_prefix}/usr/bin/llvm-g++" <del> else <del> self['CC'] = '/usr/bin/cc' <del> self['CXX'] = '/usr/bin/c++' <del> end <add> self['CC'] = "/usr/bin/llvm-gcc" <add> self['CXX'] = "/usr/bin/llvm-g++" <ide> end <ide> <ide> def clang <del> if MacOS.xcode_version > '4' <del> self['CC'] = "#{MacOS.xcode_prefix}/usr/bin/clang" <del> self['CXX'] = "#{MacOS.xcode_prefix}/usr/bin/clang++" <del> else <del> self.gcc <del> end <add> self['CC'] = "/usr/bin/clang" <add> self['CXX'] = "/usr/bin/clang++" <ide> end <ide> <ide> def fortran
1
Javascript
Javascript
add banner to all dist/
4f732e8caabce9c9a48bf95d8a2e9869d5507aeb
<ide><path>Gruntfile.js <ide> module.exports = function(grunt) { <ide> linebreak: false <ide> }, <ide> files: { <del> src: ['dist/*.min.js'] <add> src: ['dist/*.js'] <ide> } <ide> } <ide> },
1
Mixed
Javascript
add support for custom completions
9fbe456db15a63409916c207cc1157321a307409
<ide><path>doc/api/repl.md <ide> added: v0.1.91 <ide> `undefined`. Defaults to `false`. <ide> * `writer` {Function} The function to invoke to format the output of each <ide> command before writing to `output`. Defaults to [`util.inspect()`][]. <add> * `completer` {Function} An optional function used for custom Tab auto <add> completion. See [`readline.InterfaceCompleter`][] for an example. <ide> * `replMode` - A flag that specifies whether the default evaluator executes <ide> all JavaScript commands in strict mode, default mode, or a hybrid mode <ide> ("magic" mode.) Acceptable values are: <ide> see: https://gist.github.com/2053342 <ide> [`util.inspect()`]: util.html#util_util_inspect_object_options <ide> [here]: util.html#util_custom_inspect_function_on_objects <ide> [`readline.Interface`]: readline.html#readline_class_interface <add>[`readline.InterfaceCompleter`]: readline.html#readline_use_of_the_completer_function <ide><path>lib/repl.js <ide> function REPLServer(prompt, <ide> self.bufferedCommand = ''; <ide> self.lines.level = []; <ide> <del> function complete(text, callback) { <del> self.complete(text, callback); <del> } <add> // Figure out which "complete" function to use. <add> self.completer = (typeof options.completer === 'function') <add> ? options.completer <add> : complete; <ide> <ide> Interface.call(this, { <ide> input: self.inputStream, <ide> output: self.outputStream, <del> completer: complete, <add> completer: self.completer, <ide> terminal: options.terminal, <ide> historySize: options.historySize, <ide> prompt <ide> function filteredOwnPropertyNames(obj) { <ide> return Object.getOwnPropertyNames(obj).filter(intFilter); <ide> } <ide> <add>REPLServer.prototype.complete = function() { <add> this.completer.apply(this, arguments); <add>}; <add> <ide> // Provide a list of completions for the given leading text. This is <ide> // given to the readline interface for handling tab completion. <ide> // <ide> function filteredOwnPropertyNames(obj) { <ide> // <ide> // Warning: This eval's code like "foo.bar.baz", so it will run property <ide> // getter code. <del>REPLServer.prototype.complete = function(line, callback) { <add>function complete(line, callback) { <ide> // There may be local variables to evaluate, try a nested REPL <ide> if (this.bufferedCommand !== undefined && this.bufferedCommand.length) { <ide> // Get a new array of inputed lines <ide> REPLServer.prototype.complete = function(line, callback) { <ide> <ide> callback(null, [completions || [], completeOn]); <ide> } <del>}; <add>} <ide> <ide> <ide> /** <ide><path>test/parallel/test-repl-tab-complete.js <ide> testMe.complete('console.lo', common.mustCall(function(error, data) { <ide> assert.deepStrictEqual(data, [['console.log'], 'console.lo']); <ide> })); <ide> <del>// Tab Complete will return globaly scoped variables <add>// Tab Complete will return globally scoped variables <ide> putIn.run(['};']); <ide> testMe.complete('inner.o', common.mustCall(function(error, data) { <ide> assert.deepStrictEqual(data, works); <ide> if (typeof Intl === 'object') { <ide> testNonGlobal.complete('I', common.mustCall((error, data) => { <ide> assert.deepStrictEqual(data, builtins); <ide> })); <add> <add>// To test custom completer function. <add>// Sync mode. <add>const customCompletions = 'aaa aa1 aa2 bbb bb1 bb2 bb3 ccc ddd eee'.split(' '); <add>const testCustomCompleterSyncMode = repl.start({ <add> prompt: '', <add> input: putIn, <add> output: putIn, <add> completer: function completerSyncMode(line) { <add> const hits = customCompletions.filter((c) => { <add> return c.indexOf(line) === 0; <add> }); <add> // Show all completions if none found. <add> return [hits.length ? hits : customCompletions, line]; <add> } <add>}); <add> <add>// On empty line should output all the custom completions <add>// without complete anything. <add>testCustomCompleterSyncMode.complete('', common.mustCall((error, data) => { <add> assert.deepStrictEqual(data, [ <add> customCompletions, <add> '' <add> ]); <add>})); <add> <add>// On `a` should output `aaa aa1 aa2` and complete until `aa`. <add>testCustomCompleterSyncMode.complete('a', common.mustCall((error, data) => { <add> assert.deepStrictEqual(data, [ <add> 'aaa aa1 aa2'.split(' '), <add> 'a' <add> ]); <add>})); <add> <add>// To test custom completer function. <add>// Async mode. <add>const testCustomCompleterAsyncMode = repl.start({ <add> prompt: '', <add> input: putIn, <add> output: putIn, <add> completer: function completerAsyncMode(line, callback) { <add> const hits = customCompletions.filter((c) => { <add> return c.indexOf(line) === 0; <add> }); <add> // Show all completions if none found. <add> callback(null, [hits.length ? hits : customCompletions, line]); <add> } <add>}); <add> <add>// On empty line should output all the custom completions <add>// without complete anything. <add>testCustomCompleterAsyncMode.complete('', common.mustCall((error, data) => { <add> assert.deepStrictEqual(data, [ <add> customCompletions, <add> '' <add> ]); <add>})); <add> <add>// On `a` should output `aaa aa1 aa2` and complete until `aa`. <add>testCustomCompleterAsyncMode.complete('a', common.mustCall((error, data) => { <add> assert.deepStrictEqual(data, [ <add> 'aaa aa1 aa2'.split(' '), <add> 'a' <add> ]); <add>}));
3
Python
Python
fix autoencoder serialization
78feed7fa98f2f7fc03e3e21b69b7b2997f0cef2
<ide><path>keras/utils/layer_utils.py <ide> import copy <ide> <ide> from ..layers.advanced_activations import LeakyReLU, PReLU <del>from ..layers.core import Dense, Merge, Dropout, Activation, Reshape, Flatten, RepeatVector, Layer <add>from ..layers.core import Dense, Merge, Dropout, Activation, Reshape, Flatten, RepeatVector, Layer, AutoEncoder <ide> from ..layers.core import ActivityRegularization, TimeDistributedDense, AutoEncoder, MaxoutDense <ide> from ..layers.convolutional import Convolution1D, Convolution2D, MaxPooling1D, MaxPooling2D, ZeroPadding2D <ide> from ..layers.embeddings import Embedding, WordContextProduct <ide> def container_from_config(original_layer_dict): <ide> graph_layer.add_output(**output) <ide> return graph_layer <ide> <add> elif name == 'AutoEncoder': <add> kwargs = {'encoder': container_from_config(layer_dict.get('encoder_config')), <add> 'decoder': container_from_config(layer_dict.get('decoder_config'))} <add> for kwarg in ['output_reconstruction', 'weights']: <add> if kwarg in layer_dict: <add> kwargs[kwarg] = layer_dict[kwarg] <add> return AutoEncoder(**kwargs) <add> <ide> else: <ide> layer_dict.pop('name') <ide> <ide><path>tests/manual/check_autoencoder.py <ide> from __future__ import absolute_import <ide> from __future__ import print_function <ide> from keras.datasets import mnist <del>from keras.models import Sequential <add>from keras.models import Sequential, model_from_config <ide> from keras.layers.core import AutoEncoder, Dense, Activation, TimeDistributedDense, Flatten <ide> from keras.layers.recurrent import LSTM <ide> from keras.layers.embeddings import Embedding <ide> # autoencoder model test # <ide> ########################## <ide> <add> <ide> def build_lstm_autoencoder(autoencoder, X_train, X_test): <ide> X_train = X_train[:, np.newaxis, :] <ide> X_test = X_test[:, np.newaxis, :] <ide> def build_deep_classical_autoencoder(autoencoder): <ide> print("Error: unknown autoencoder type!") <ide> exit(-1) <ide> <del> autoencoder.get_config(verbose=1) <ide> autoencoder.compile(loss='mean_squared_error', optimizer='adam') <ide> # Do NOT use validation data with return output_reconstruction=True <ide> autoencoder.fit(X_train, X_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=1) <ide> def build_deep_classical_autoencoder(autoencoder): <ide> <ide> print('Loss change:', (score[0] - classical_score[0])/classical_score[0], '%') <ide> print('Accuracy change:', (score[1] - classical_score[1])/classical_score[1], '%') <add> <add> # check serialization <add> config = autoencoder.get_config(verbose=1) <add> autoencoder = model_from_config(config)
2
Text
Text
update cjs-module-lexer repo link
2e2a6fecd9b1aaffcb932fcc415439f359c84fdd
<ide><path>doc/api/esm.md <ide> success! <ide> [`process.dlopen`]: process.md#processdlopenmodule-filename-flags <ide> [`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String <ide> [`util.TextDecoder`]: util.md#class-utiltextdecoder <del>[cjs-module-lexer]: https://github.com/guybedford/cjs-module-lexer/tree/1.2.2 <add>[cjs-module-lexer]: https://github.com/nodejs/cjs-module-lexer/tree/1.2.2 <ide> [custom https loader]: #https-loader <ide> [load hook]: #loadurl-context-defaultload <ide> [resolve hook]: #resolvespecifier-context-defaultresolve
1
Ruby
Ruby
add alias `r` for rails runner
bfff5eac774df495fbf9f39a792b714788ea54d8
<ide><path>railties/lib/rails/commands.rb <ide> "g" => "generate", <ide> "c" => "console", <ide> "s" => "server", <del> "db" => "dbconsole" <add> "db" => "dbconsole", <add> "r" => "runner" <ide> } <ide> <ide> command = ARGV.shift
1
Ruby
Ruby
remove identity map from benchmark script
ca75091fc72224a5f46f95564578a20b88543458
<ide><path>activerecord/examples/performance.rb <ide> def self.email <ide> ) <ide> end <ide> <del>ActiveRecord::IdentityMap.enabled = true unless ENV['IM'] == "disabled" <del> <del>def clear_identity_map! <del> ActiveRecord::IdentityMap.clear <del>end <del> <ide> require 'benchmark' <ide> <ide> Benchmark.bm(46) do |x| <del> clear_identity_map! <ide> ar_obj = Exhibit.find(1) <ide> attrs = { :name => 'sam' } <ide> attrs_first = { :name => 'sam' }
1
Javascript
Javascript
simplify `devsettings` implementation
70cd569e7e4cceac81023eae4ea5089cff2f9b59
<ide><path>Libraries/Utilities/DevSettings.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @format <ide> * @flow strict-local <add> * @format <ide> */ <ide> <ide> import NativeDevSettings from '../NativeModules/specs/NativeDevSettings'; <ide> import NativeEventEmitter from '../EventEmitter/NativeEventEmitter'; <ide> <del>interface IDevSettings { <del> addMenuItem(title: string, handler: () => mixed): void; <del> reload(reason?: string): void; <del> onFastRefresh(): void; <del>} <add>let DevSettings: { <add> addMenuItem(title: string, handler: () => mixed): void, <add> reload(reason?: string): void, <add> onFastRefresh(): void, <add>} = { <add> addMenuItem(title: string, handler: () => mixed): void {}, <add> reload(reason?: string): void {}, <add> onFastRefresh(): void {}, <add>}; <ide> <ide> type DevSettingsEventDefinitions = { <ide> didPressMenuItem: [{title: string}], <ide> }; <ide> <del>class DevSettings extends NativeEventEmitter<DevSettingsEventDefinitions> <del> implements IDevSettings { <del> _menuItems: Map<string, () => mixed>; <del> <del> constructor() { <del> super(NativeDevSettings); <del> <del> this._menuItems = new Map(); <del> } <add>if (__DEV__) { <add> const emitter = new NativeEventEmitter<DevSettingsEventDefinitions>( <add> NativeDevSettings, <add> ); <add> const menuItems = new Map(); <ide> <del> addMenuItem(title: string, handler: () => mixed) { <del> // Make sure items are not added multiple times. This can <del> // happen when hot reloading the module that registers the <del> // menu items. The title is used as the id which means we <del> // don't support multiple items with the same name. <del> const oldHandler = this._menuItems.get(title); <del> if (oldHandler != null) { <del> this.removeListener('didPressMenuItem', oldHandler); <del> } else { <del> NativeDevSettings.addMenuItem(title); <del> } <del> <del> this._menuItems.set(title, handler); <del> this.addListener('didPressMenuItem', event => { <del> if (event.title === title) { <del> handler(); <add> DevSettings = { <add> addMenuItem(title: string, handler: () => mixed): void { <add> // Make sure items are not added multiple times. This can <add> // happen when hot reloading the module that registers the <add> // menu items. The title is used as the id which means we <add> // don't support multiple items with the same name. <add> const oldHandler = menuItems.get(title); <add> if (oldHandler != null) { <add> emitter.removeListener('didPressMenuItem', oldHandler); <add> } else { <add> NativeDevSettings.addMenuItem(title); <ide> } <del> }); <del> } <del> <del> reload(reason?: string) { <del> if (typeof NativeDevSettings.reloadWithReason === 'function') { <del> NativeDevSettings.reloadWithReason(reason ?? 'Uncategorized from JS'); <del> } else { <del> NativeDevSettings.reload(); <del> } <del> } <del> <del> onFastRefresh() { <del> if (typeof NativeDevSettings.onFastRefresh === 'function') { <del> NativeDevSettings.onFastRefresh(); <del> } <del> } <ide> <del> // TODO: Add other dev setting methods exposed by the native module. <del>} <del> <del>// Avoid including the full `NativeDevSettings` class in prod. <del>class NoopDevSettings implements IDevSettings { <del> addMenuItem(title: string, handler: () => mixed) {} <del> reload(reason?: string) {} <del> onFastRefresh() {} <add> menuItems.set(title, handler); <add> emitter.addListener('didPressMenuItem', event => { <add> if (event.title === title) { <add> handler(); <add> } <add> }); <add> }, <add> reload(reason?: string): void { <add> if (NativeDevSettings.reloadWithReason != null) { <add> NativeDevSettings.reloadWithReason(reason ?? 'Uncategorized from JS'); <add> } else { <add> NativeDevSettings.reload(); <add> } <add> }, <add> onFastRefresh(): void { <add> NativeDevSettings.onFastRefresh?.(); <add> }, <add> }; <ide> } <ide> <del>module.exports = ((__DEV__ <del> ? new DevSettings() <del> : new NoopDevSettings()): IDevSettings); <add>module.exports = DevSettings;
1
Javascript
Javascript
remove hasownproperty usage
105d0731d725435188aaf76af55953ab9b68da4f
<ide><path>threejs/resources/webgl-debug-helper.js <ide> // Override the getError function with one that returns our saved results. <ide> if (wrapper.getError) { <ide> wrapper.getError = function() { <del> for (const err in glErrorShadow) { <del> if (glErrorShadow.hasOwnProperty(err)) { <del> if (glErrorShadow[err]) { <del> glErrorShadow[err] = false; <del> return err; <del> } <add> for (const err of glErrorShadow) { <add> if (glErrorShadow[err]) { <add> glErrorShadow[err] = false; <add> return err; <ide> } <ide> } <ide> return ctx.NO_ERROR;
1
Python
Python
add support for gradient checkpointing
4c9e0f029e55d9f22d1c119d4be018a3e552b0a0
<ide><path>src/transformers/models/bert_generation/modeling_bert_generation.py <ide> class BertGenerationPreTrainedModel(PreTrainedModel): <ide> <ide> config_class = BertGenerationConfig <ide> base_model_prefix = "bert" <add> supports_gradient_checkpointing = True <ide> _keys_to_ignore_on_load_missing = [r"position_ids"] <ide> <ide> def _init_weights(self, module): <ide> def _init_weights(self, module): <ide> module.bias.data.zero_() <ide> module.weight.data.fill_(1.0) <ide> <add> def _set_gradient_checkpointing(self, module, value=False): <add> if isinstance(module, BertEncoder): <add> module.gradient_checkpointing = value <add> <ide> <ide> BERT_GENERATION_START_DOCSTRING = r""" <ide> <ide><path>src/transformers/models/encoder_decoder/modeling_encoder_decoder.py <ide> class EncoderDecoderModel(PreTrainedModel): <ide> """ <ide> config_class = EncoderDecoderConfig <ide> base_model_prefix = "encoder_decoder" <add> main_input_name = "input_ids" <add> supports_gradient_checkpointing = True <ide> <ide> def __init__( <ide> self, <ide> def tie_weights(self): <ide> self.encoder, self.decoder._modules[decoder_base_model_prefix], self.decoder.base_model_prefix <ide> ) <ide> <add> def _set_gradient_checkpointing(self, module, value=False): <add> # call both encoder and decoder function on gradient checkpointing <add> self.encoder._set_gradient_checkpointing(module, value=value) <add> self.decoder._set_gradient_checkpointing(module, value=value) <add> <ide> def get_encoder(self): <ide> return self.encoder <ide> <ide><path>tests/models/encoder_decoder/test_modeling_encoder_decoder.py <ide> def test_encoder_decoder_model_shared_weights(self): <ide> input_ids_dict = self.prepare_config_and_inputs() <ide> self.create_and_check_encoder_decoder_shared_weights(**input_ids_dict) <ide> <add> def test_training_gradient_checkpointing(self): <add> inputs_dict = self.prepare_config_and_inputs() <add> encoder_model, decoder_model = self.get_encoder_decoder_model( <add> inputs_dict["config"], inputs_dict["decoder_config"] <add> ) <add> <add> model = EncoderDecoderModel(encoder=encoder_model, decoder=decoder_model) <add> model.train() <add> model.gradient_checkpointing_enable() <add> model.config.decoder_start_token_id = 0 <add> model.config.pad_token_id = 0 <add> <add> model_inputs = { <add> "input_ids": inputs_dict["input_ids"], <add> "attention_mask": inputs_dict["attention_mask"], <add> "labels": inputs_dict["labels"], <add> "decoder_input_ids": inputs_dict["decoder_input_ids"], <add> } <add> loss = model(**model_inputs).loss <add> loss.backward() <add> <ide> @slow <ide> def test_real_model_save_load_from_pretrained(self): <ide> model_2 = self.get_pretrained_model()
3
Python
Python
update spanish tokenizer
1d64527727795109cd3510fe3788fd3e5857b4ce
<ide><path>spacy/es/tokenizer_exceptions.py <ide> from __future__ import unicode_literals <ide> <ide> from ..symbols import * <del>from ..language_data import PRON_LEMMA <add>from ..language_data import PRON_LEMMA, DET_LEMMA <ide> <ide> <ide> TOKENIZER_EXCEPTIONS = { <del> "accidentarse": [ <del> {ORTH: "accidentar", LEMMA: "accidentar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "aceptarlo": [ <del> {ORTH: "aceptar", LEMMA: "aceptar", TAG: AUX}, <del> {ORTH: "lo", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "acompañarla": [ <del> {ORTH: "acompañar", LEMMA: "acompañar", TAG: AUX}, <del> {ORTH: "la", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "advertirle": [ <del> {ORTH: "advertir", LEMMA: "advertir", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <ide> "al": [ <ide> {ORTH: "a", LEMMA: "a", TAG: ADP}, <ide> {ORTH: "el", LEMMA: "el", TAG: DET} <ide> ], <ide> <del> "anunciarnos": [ <del> {ORTH: "anunciar", LEMMA: "anunciar", TAG: AUX}, <del> {ORTH: "nos", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "asegurándole": [ <del> {ORTH: "asegurando", LEMMA: "asegurar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "considerarle": [ <del> {ORTH: "considerar", LEMMA: "considerar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "decirle": [ <del> {ORTH: "decir", LEMMA: "decir", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <add> "consigo": [ <add> {ORTH: "con", LEMMA: "con"}, <add> {ORTH: "sigo", LEMMA: PRON_LEMMA, NORM: "sí"} <ide> ], <ide> <del> "decirles": [ <del> {ORTH: "decir", LEMMA: "decir", TAG: AUX}, <del> {ORTH: "les", LEMMA: PRON_LEMMA, TAG: "PRON"} <add> "conmigo": [ <add> {ORTH: "con", LEMMA: "con"}, <add> {ORTH: "migo", LEMMA: PRON_LEMMA, NORM: "mí"} <ide> ], <ide> <del> "decirte": [ <del> {ORTH: "Decir", LEMMA: "decir", TAG: AUX}, <del> {ORTH: "te", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "dejarla": [ <del> {ORTH: "dejar", LEMMA: "dejar", TAG: AUX}, <del> {ORTH: "la", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "dejarnos": [ <del> {ORTH: "dejar", LEMMA: "dejar", TAG: AUX}, <del> {ORTH: "nos", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "dejándole": [ <del> {ORTH: "dejando", LEMMA: "dejar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <add> "contigo": [ <add> {ORTH: "con", LEMMA: "con"}, <add> {ORTH: "tigo", LEMMA: PRON_LEMMA, NORM: "ti"} <ide> ], <ide> <ide> "del": [ <ide> {ORTH: "de", LEMMA: "de", TAG: ADP}, <del> {ORTH: "el", LEMMA: "el", TAG: DET} <del> ], <del> <del> "demostrarles": [ <del> {ORTH: "demostrar", LEMMA: "demostrar", TAG: AUX}, <del> {ORTH: "les", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "diciéndole": [ <del> {ORTH: "diciendo", LEMMA: "decir", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "diciéndoles": [ <del> {ORTH: "diciendo", LEMMA: "decir", TAG: AUX}, <del> {ORTH: "les", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "diferenciarse": [ <del> {ORTH: "diferenciar", LEMMA: "diferenciar", TAG: AUX}, <del> {ORTH: "se", LEMMA: "él", TAG: "PRON"} <del> ], <del> <del> "divirtiéndome": [ <del> {ORTH: "divirtiendo", LEMMA: "divertir", TAG: AUX}, <del> {ORTH: "me", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "ensanchándose": [ <del> {ORTH: "ensanchando", LEMMA: "ensanchar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "explicarles": [ <del> {ORTH: "explicar", LEMMA: "explicar", TAG: AUX}, <del> {ORTH: "les", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "haberla": [ <del> {ORTH: "haber", LEMMA: "haber", TAG: AUX}, <del> {ORTH: "la", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "haberlas": [ <del> {ORTH: "haber", LEMMA: "haber", TAG: AUX}, <del> {ORTH: "las", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "haberlo": [ <del> {ORTH: "haber", LEMMA: "haber", TAG: AUX}, <del> {ORTH: "lo", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "haberlos": [ <del> {ORTH: "haber", LEMMA: "haber", TAG: AUX}, <del> {ORTH: "los", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "haberme": [ <del> {ORTH: "haber", LEMMA: "haber", TAG: AUX}, <del> {ORTH: "me", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "haberse": [ <del> {ORTH: "haber", LEMMA: "haber", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "hacerle": [ <del> {ORTH: "hacer", LEMMA: "hacer", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "hacerles": [ <del> {ORTH: "hacer", LEMMA: "hacer", TAG: AUX}, <del> {ORTH: "les", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "hallarse": [ <del> {ORTH: "hallar", LEMMA: "hallar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "imaginaros": [ <del> {ORTH: "imaginar", LEMMA: "imaginar", TAG: AUX}, <del> {ORTH: "os", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "insinuarle": [ <del> {ORTH: "insinuar", LEMMA: "insinuar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "justificarla": [ <del> {ORTH: "justificar", LEMMA: "justificar", TAG: AUX}, <del> {ORTH: "la", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "mantenerlas": [ <del> {ORTH: "mantener", LEMMA: "mantener", TAG: AUX}, <del> {ORTH: "las", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "mantenerlos": [ <del> {ORTH: "mantener", LEMMA: "mantener", TAG: AUX}, <del> {ORTH: "los", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "mantenerme": [ <del> {ORTH: "mantener", LEMMA: "mantener", TAG: AUX}, <del> {ORTH: "me", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "pasarte": [ <del> {ORTH: "pasar", LEMMA: "pasar", TAG: AUX}, <del> {ORTH: "te", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "pedirle": [ <del> {ORTH: "pedir", LEMMA: "pedir", TAG: AUX}, <del> {ORTH: "le", LEMMA: "él", TAG: "PRON"} <add> {ORTH: "l", LEMMA: "el", TAG: DET} <ide> ], <ide> <ide> "pel": [ <del> {ORTH: "per", LEMMA: "per", TAG: ADP}, <del> {ORTH: "el", LEMMA: "el", TAG: DET} <del> ], <del> <del> "pidiéndonos": [ <del> {ORTH: "pidiendo", LEMMA: "pedir", TAG: AUX}, <del> {ORTH: "nos", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "poderle": [ <del> {ORTH: "poder", LEMMA: "poder", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "preguntarse": [ <del> {ORTH: "preguntar", LEMMA: "preguntar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "preguntándose": [ <del> {ORTH: "preguntando", LEMMA: "preguntar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "presentarla": [ <del> {ORTH: "presentar", LEMMA: "presentar", TAG: AUX}, <del> {ORTH: "la", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "pudiéndolo": [ <del> {ORTH: "pudiendo", LEMMA: "poder", TAG: AUX}, <del> {ORTH: "lo", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "pudiéndose": [ <del> {ORTH: "pudiendo", LEMMA: "poder", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "quererle": [ <del> {ORTH: "querer", LEMMA: "querer", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "rasgarse": [ <del> {ORTH: "Rasgar", LEMMA: "rasgar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "repetirlo": [ <del> {ORTH: "repetir", LEMMA: "repetir", TAG: AUX}, <del> {ORTH: "lo", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "robarle": [ <del> {ORTH: "robar", LEMMA: "robar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "seguirlos": [ <del> {ORTH: "seguir", LEMMA: "seguir", TAG: AUX}, <del> {ORTH: "los", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "serle": [ <del> {ORTH: "ser", LEMMA: "ser", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "serlo": [ <del> {ORTH: "ser", LEMMA: "ser", TAG: AUX}, <del> {ORTH: "lo", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "señalándole": [ <del> {ORTH: "señalando", LEMMA: "señalar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "suplicarle": [ <del> {ORTH: "suplicar", LEMMA: "suplicar", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "tenerlos": [ <del> {ORTH: "tener", LEMMA: "tener", TAG: AUX}, <del> {ORTH: "los", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "vengarse": [ <del> {ORTH: "vengar", LEMMA: "vengar", TAG: AUX}, <del> {ORTH: "se", LEMMA: PRON_LEMMA, TAG: "PRON"} <del> ], <del> <del> "verla": [ <del> {ORTH: "ver", LEMMA: "ver", TAG: AUX}, <del> {ORTH: "la", LEMMA: PRON_LEMMA, TAG: "PRON"} <add> {ORTH: "pe", LEMMA: "per", TAG: ADP}, <add> {ORTH: "l", LEMMA: "el", TAG: DET} <ide> ], <ide> <del> "verle": [ <del> {ORTH: "ver", LEMMA: "ver", TAG: AUX}, <del> {ORTH: "le", LEMMA: PRON_LEMMA, TAG: "PRON"} <add> "pal": [ <add> {ORTH: "pa", LEMMA: "para"}, <add> {ORTH: "l", LEMMA: DET_LEMMA, NORM: "el"} <ide> ], <ide> <del> "volverlo": [ <del> {ORTH: "volver", LEMMA: "volver", TAG: AUX}, <del> {ORTH: "lo", LEMMA: PRON_LEMMA, TAG: "PRON"} <add> "pala": [ <add> {ORTH: "pa", LEMMA: "para"}, <add> {ORTH: "la", LEMMA: DET_LEMMA} <ide> ], <ide> <ide> "aprox.": [
1
Javascript
Javascript
convert the secondary toolbar to es6 syntax
67049602c57fc1ce450097d1d5a6d9590276e3ae
<ide><path>web/secondary_toolbar.js <ide> import { mozL10n, SCROLLBAR_PADDING } from './ui_utils'; <ide> * the document properties dialog. <ide> */ <ide> <del>/** <del> * @class <del> */ <del>var SecondaryToolbar = (function SecondaryToolbarClosure() { <add>class SecondaryToolbar { <ide> /** <del> * @constructs SecondaryToolbar <ide> * @param {SecondaryToolbarOptions} options <ide> * @param {HTMLDivElement} mainContainer <ide> * @param {EventBus} eventBus <ide> */ <del> function SecondaryToolbar(options, mainContainer, eventBus) { <add> constructor(options, mainContainer, eventBus) { <ide> this.toolbar = options.toolbar; <ide> this.toggleButton = options.toggleButton; <ide> this.toolbarButtonContainer = options.toolbarButtonContainer; <ide> var SecondaryToolbar = (function SecondaryToolbarClosure() { <ide> this.eventBus.on('resize', this._setMaxHeight.bind(this)); <ide> } <ide> <del> SecondaryToolbar.prototype = { <del> /** <del> * @return {boolean} <del> */ <del> get isOpen() { <del> return this.opened; <del> }, <del> <del> setPageNumber: function SecondaryToolbar_setPageNumber(pageNumber) { <del> this.pageNumber = pageNumber; <del> this._updateUIState(); <del> }, <del> <del> setPagesCount: function SecondaryToolbar_setPagesCount(pagesCount) { <del> this.pagesCount = pagesCount; <del> this._updateUIState(); <del> }, <del> <del> reset: function SecondaryToolbar_reset() { <del> this.pageNumber = 0; <del> this.pagesCount = 0; <del> this._updateUIState(); <del> }, <del> <del> _updateUIState: function SecondaryToolbar_updateUIState() { <del> var items = this.items; <del> <del> items.firstPage.disabled = (this.pageNumber <= 1); <del> items.lastPage.disabled = (this.pageNumber >= this.pagesCount); <del> items.pageRotateCw.disabled = this.pagesCount === 0; <del> items.pageRotateCcw.disabled = this.pagesCount === 0; <del> }, <del> <del> _bindClickListeners: function SecondaryToolbar_bindClickListeners() { <del> // Button to toggle the visibility of the secondary toolbar. <del> this.toggleButton.addEventListener('click', this.toggle.bind(this)); <del> <del> // All items within the secondary toolbar. <del> for (let button in this.buttons) { <del> let { element, eventName, close, } = this.buttons[button]; <del> <del> element.addEventListener('click', (evt) => { <del> if (eventName !== null) { <del> this.eventBus.dispatch(eventName, { source: this, }); <del> } <del> if (close) { <del> this.close(); <del> } <del> }); <del> } <del> }, <del> <del> _bindHandToolListener: <del> function SecondaryToolbar_bindHandToolListener(toggleHandToolButton) { <del> var isHandToolActive = false; <del> this.eventBus.on('handtoolchanged', function (e) { <del> if (isHandToolActive === e.isActive) { <del> return; <add> /** <add> * @return {boolean} <add> */ <add> get isOpen() { <add> return this.opened; <add> } <add> <add> setPageNumber(pageNumber) { <add> this.pageNumber = pageNumber; <add> this._updateUIState(); <add> } <add> <add> setPagesCount(pagesCount) { <add> this.pagesCount = pagesCount; <add> this._updateUIState(); <add> } <add> <add> reset() { <add> this.pageNumber = 0; <add> this.pagesCount = 0; <add> this._updateUIState(); <add> } <add> <add> _updateUIState() { <add> this.items.firstPage.disabled = (this.pageNumber <= 1); <add> this.items.lastPage.disabled = (this.pageNumber >= this.pagesCount); <add> this.items.pageRotateCw.disabled = this.pagesCount === 0; <add> this.items.pageRotateCcw.disabled = this.pagesCount === 0; <add> } <add> <add> _bindClickListeners() { <add> // Button to toggle the visibility of the secondary toolbar. <add> this.toggleButton.addEventListener('click', this.toggle.bind(this)); <add> <add> // All items within the secondary toolbar. <add> for (let button in this.buttons) { <add> let { element, eventName, close, } = this.buttons[button]; <add> <add> element.addEventListener('click', (evt) => { <add> if (eventName !== null) { <add> this.eventBus.dispatch(eventName, { source: this, }); <ide> } <del> isHandToolActive = e.isActive; <del> if (isHandToolActive) { <del> toggleHandToolButton.title = <del> mozL10n.get('hand_tool_disable.title', null, 'Disable hand tool'); <del> toggleHandToolButton.firstElementChild.textContent = <del> mozL10n.get('hand_tool_disable_label', null, 'Disable hand tool'); <del> } else { <del> toggleHandToolButton.title = <del> mozL10n.get('hand_tool_enable.title', null, 'Enable hand tool'); <del> toggleHandToolButton.firstElementChild.textContent = <del> mozL10n.get('hand_tool_enable_label', null, 'Enable hand tool'); <add> if (close) { <add> this.close(); <ide> } <ide> }); <del> }, <del> <del> open: function SecondaryToolbar_open() { <del> if (this.opened) { <del> return; <del> } <del> this.opened = true; <del> this._setMaxHeight(); <add> } <add> } <ide> <del> this.toggleButton.classList.add('toggled'); <del> this.toolbar.classList.remove('hidden'); <del> }, <add> _bindHandToolListener(toggleHandToolButton) { <add> let isHandToolActive = false; <ide> <del> close: function SecondaryToolbar_close() { <del> if (!this.opened) { <add> this.eventBus.on('handtoolchanged', function(evt) { <add> if (isHandToolActive === evt.isActive) { <ide> return; <ide> } <del> this.opened = false; <del> this.toolbar.classList.add('hidden'); <del> this.toggleButton.classList.remove('toggled'); <del> }, <del> <del> toggle: function SecondaryToolbar_toggle() { <del> if (this.opened) { <del> this.close(); <add> isHandToolActive = evt.isActive; <add> <add> if (isHandToolActive) { <add> toggleHandToolButton.title = <add> mozL10n.get('hand_tool_disable.title', null, 'Disable hand tool'); <add> toggleHandToolButton.firstElementChild.textContent = <add> mozL10n.get('hand_tool_disable_label', null, 'Disable hand tool'); <ide> } else { <del> this.open(); <del> } <del> }, <del> <del> /** <del> * @private <del> */ <del> _setMaxHeight: function SecondaryToolbar_setMaxHeight() { <del> if (!this.opened) { <del> return; // Only adjust the 'max-height' if the toolbar is visible. <add> toggleHandToolButton.title = <add> mozL10n.get('hand_tool_enable.title', null, 'Enable hand tool'); <add> toggleHandToolButton.firstElementChild.textContent = <add> mozL10n.get('hand_tool_enable_label', null, 'Enable hand tool'); <ide> } <del> this.containerHeight = this.mainContainer.clientHeight; <add> }); <add> } <ide> <del> if (this.containerHeight === this.previousContainerHeight) { <del> return; <del> } <del> this.toolbarButtonContainer.setAttribute('style', <del> 'max-height: ' + (this.containerHeight - SCROLLBAR_PADDING) + 'px;'); <add> open() { <add> if (this.opened) { <add> return; <add> } <add> this.opened = true; <add> this._setMaxHeight(); <add> <add> this.toggleButton.classList.add('toggled'); <add> this.toolbar.classList.remove('hidden'); <add> } <ide> <del> this.previousContainerHeight = this.containerHeight; <add> close() { <add> if (!this.opened) { <add> return; <ide> } <del> }; <add> this.opened = false; <add> this.toolbar.classList.add('hidden'); <add> this.toggleButton.classList.remove('toggled'); <add> } <ide> <del> return SecondaryToolbar; <del>})(); <add> toggle() { <add> if (this.opened) { <add> this.close(); <add> } else { <add> this.open(); <add> } <add> } <add> <add> /** <add> * @private <add> */ <add> _setMaxHeight() { <add> if (!this.opened) { <add> return; // Only adjust the 'max-height' if the toolbar is visible. <add> } <add> this.containerHeight = this.mainContainer.clientHeight; <add> <add> if (this.containerHeight === this.previousContainerHeight) { <add> return; <add> } <add> this.toolbarButtonContainer.setAttribute('style', <add> 'max-height: ' + (this.containerHeight - SCROLLBAR_PADDING) + 'px;'); <add> <add> this.previousContainerHeight = this.containerHeight; <add> } <add>} <ide> <ide> export { <ide> SecondaryToolbar,
1
Javascript
Javascript
use json.stringify to cast console output
3f205c5060c84c3bb09b68a587838b3c6fd62f08
<ide><path>client/src/client/workers/test-evaluator.js <ide> const oldLog = self.console.log.bind(self.console); <ide> self.console.log = function proxyConsole(...args) { <ide> self.postMessage({ <ide> type: 'LOG', <del> data: args.map(log => JSON.stringify(log)).join(' ') <add> data: args.map(arg => JSON.stringify(arg)).join(' ') <ide> }); <ide> return oldLog(...args); <ide> }; <ide> <ide> self.onmessage = async e => { <ide> /* eslint-disable no-unused-vars */ <del> const { <del> code = '' <del> } = e.data; <add> const { code = '' } = e.data; <ide> const assert = chai.assert; <ide> // Fake Deep Equal dependency <ide> const DeepEqual = (a, b) => JSON.stringify(a) === JSON.stringify(b); <ide><path>client/src/templates/Challenges/utils/frame.js <ide> const mountFrame = document => ({ element, ...rest }) => { <ide> const buildProxyConsole = proxyLogger => ctx => { <ide> const oldLog = ctx.window.console.log.bind(ctx.window.console); <ide> ctx.window.console.log = function proxyConsole(...args) { <del> proxyLogger(args); <add> proxyLogger(args.map(arg => JSON.stringify(arg)).join(' ')); <ide> return oldLog(...args); <ide> }; <ide> return ctx;
2
Python
Python
remove certifi test
5cf1f8fc74f58d84e6fa81eabedbaf652c5ba2b2
<ide><path>libcloud/test/test_httplib_ssl.py <ide> def test_connect_throws_friendly_error_message_on_ssl_wrap_connection_reset_by_p <ide> self.assertEqual(e.errno, 105) <ide> self.assertTrue('Some random error' in str(e)) <ide> <del> def test_certifi_ca_bundle_in_search_path(self): <del> mock_certifi_ca_bundle_path = '/certifi/bundle/path' <del> <del> # Certifi not available <del> import libcloud.security <del> reload(libcloud.security) <del> <del> original_length = len(libcloud.security.CA_CERTS_PATH) <del> <del> self.assertTrue(mock_certifi_ca_bundle_path not in <del> libcloud.security.CA_CERTS_PATH) <del> <del> # Certifi is available <del> mock_certifi = mock.Mock() <del> mock_certifi.where.return_value = mock_certifi_ca_bundle_path <del> sys.modules['certifi'] = mock_certifi <del> <del> # Certifi CA bundle path should be injected at the begining of search list <del> import libcloud.security <del> reload(libcloud.security) <del> <del> self.assertEqual(libcloud.security.CA_CERTS_PATH[0], <del> mock_certifi_ca_bundle_path) <del> self.assertEqual(len(libcloud.security.CA_CERTS_PATH), <del> (original_length + 1)) <del> <del> # Certifi is available, but USE_CERTIFI is set to False <del> os.environ['LIBCLOUD_SSL_USE_CERTIFI'] = 'false' <del> <del> import libcloud.security <del> reload(libcloud.security) <del> <del> self.assertTrue(mock_certifi_ca_bundle_path not in <del> libcloud.security.CA_CERTS_PATH) <del> self.assertEqual(len(libcloud.security.CA_CERTS_PATH), original_length) <del> <del> # And enabled <del> os.environ['LIBCLOUD_SSL_USE_CERTIFI'] = 'true' <del> <del> import libcloud.security <del> reload(libcloud.security) <del> <del> self.assertEqual(libcloud.security.CA_CERTS_PATH[0], <del> mock_certifi_ca_bundle_path) <del> self.assertEqual(len(libcloud.security.CA_CERTS_PATH), <del> (original_length + 1)) <del> <del> <ide> if __name__ == '__main__': <ide> sys.exit(unittest.main())
1
Text
Text
add docs about how to use remote browser in test
22ca875f9c80a3d648442b4fc30a0cf44bf6be00
<ide><path>guides/source/testing.md <ide> class ApplicationSystemTestCase < ActionDispatch::SystemTestCase <ide> end <ide> ``` <ide> <add>If you want to use a remote browser, e.g. <add>[Headless Chrome in Docker](https://github.com/SeleniumHQ/docker-selenium), <add>you have to add remote `url` through `options`. <add> <add>```ruby <add>require "test_helper" <add> <add>class ApplicationSystemTestCase < ActionDispatch::SystemTestCase <add> options = ENV["SELENIUM_REMOTE_URL"].present? ? { url: ENV["SELENIUM_REMOTE_URL"] } : {} <add> driven_by :selenium, using: :headless_chrome, options: options <add>end <add>``` <add> <add>In such a case, the gem `webdrivers` is no longer required. You could remove it <add>completely or add `require:` option in `Gemfile`. <add> <add>```ruby <add># ... <add>group :test do <add> gem "webdrivers", require: !ENV["SELENIUM_REMOTE_URL"] || ENV["SELENIUM_REMOTE_URL"].empty? <add>end <add>``` <add> <add>Now you should get a connection to remote browser. <add> <add>```bash <add>$ SELENIUM_REMOTE_URL=http://localhost:4444/wd/hub bin/rails test:system <add>``` <add> <add>If your application in test is running remote too, e.g. Docker container, <add>Capybara needs more input about how to <add>[call remote servers](https://github.com/teamcapybara/capybara#calling-remote-servers). <add> <add>```ruby <add>require "test_helper" <add> <add>class ApplicationSystemTestCase < ActionDispatch::SystemTestCase <add> def setup <add> Capybara.server_host = "0.0.0.0" # bind to all interfaces <add> Capybara.app_host = "http://#{IPSocket.getaddress(Socket.gethostname)}" if ENV["SELENIUM_REMOTE_URL"].present? <add> super <add> end <add> # ... <add>end <add>``` <add> <add>Now you should get a connection to remote browser and server, regardless if it <add>is running in Docker container or CI. <add> <ide> If your Capybara configuration requires more setup than provided by Rails, this <ide> additional configuration could be added into the `application_system_test_case.rb` <ide> file.
1
Text
Text
add 3.6 release to index
b884bdd39129070aca47233dc70ad7556cf764b6
<ide><path>docs/index.md <ide> General guides to using REST framework. <ide> * [3.3 Announcement][3.3-announcement] <ide> * [3.4 Announcement][3.4-announcement] <ide> * [3.5 Announcement][3.5-announcement] <add>* [3.6 Announcement][3.6-announcement] <ide> * [Kickstarter Announcement][kickstarter-announcement] <ide> * [Mozilla Grant][mozilla-grant] <ide> * [Funding][funding] <ide> OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. <ide> [3.3-announcement]: topics/3.3-announcement.md <ide> [3.4-announcement]: topics/3.4-announcement.md <ide> [3.5-announcement]: topics/3.5-announcement.md <add>[3.6-announcement]: topics/3.6-announcement.md <ide> [kickstarter-announcement]: topics/kickstarter-announcement.md <ide> [mozilla-grant]: topics/mozilla-grant.md <ide> [funding]: topics/funding.md
1
Go
Go
fix empty-lines (revive)
05042ce4723297f1fceabd2d7b928ff543537dfb
<ide><path>daemon/images/image_delete.go <ide> func (i *ImageService) ImageDelete(ctx context.Context, imageRef string, force, <ide> records = append(records, untaggedRecord) <ide> } else { <ide> remainingRefs = append(remainingRefs, repoRef) <del> <ide> } <ide> } <ide> repoRefs = remainingRefs <ide><path>daemon/images/image_list.go <ide> func (i *ImageService) Images(ctx context.Context, opts types.ImageListOptions) <ide> } <ide> if summary.RepoDigests == nil && summary.RepoTags == nil { <ide> if opts.All || len(i.imageStore.Children(id)) == 0 { <del> <ide> if opts.Filters.Contains("dangling") && !danglingOnly { <ide> // dangling=false case, so dangling image is not needed <ide> continue <ide><path>daemon/images/image_search.go <ide> var acceptedSearchFilterTags = map[string]bool{ <ide> func (i *ImageService) SearchRegistryForImages(ctx context.Context, searchFilters filters.Args, term string, limit int, <ide> authConfig *registry.AuthConfig, <ide> headers map[string][]string) (*registry.SearchResults, error) { <del> <ide> if err := searchFilters.Validate(acceptedSearchFilterTags); err != nil { <ide> return nil, err <ide> } <ide><path>daemon/images/image_squash.go <ide> import ( <ide> // The existing image(s) is not destroyed. <ide> // If no parent is specified, a new image with the diff of all the specified image's layers merged into a new layer that has no parents. <ide> func (i *ImageService) SquashImage(id, parent string) (string, error) { <del> <ide> var ( <ide> img *image.Image <ide> err error <ide><path>daemon/images/store.go <ide> func (c *contentStoreForPull) Writer(ctx context.Context, opts ...content.Writer <ide> if err := o(&cfg); err != nil { <ide> return nil, err <ide> } <del> <ide> } <ide> c.addDigested(cfg.Desc.Digest) <ide> }
5
Javascript
Javascript
replace string concatenation with template
a0aff57c5a5e46a2f93fc5bce50191993f98c673
<ide><path>lib/_http_client.js <ide> function ClientRequest(options, cb) { <ide> <ide> var path; <ide> if (options.path) { <del> path = '' + options.path; <add> path = String(options.path); <ide> var invalidPath; <ide> if (path.length <= 39) { // Determined experimentally in V8 5.4 <ide> invalidPath = isInvalidPath(path);
1
Python
Python
make matrix_power again work for object arrays
b080c5b7a7cf1c0da91e6c0ecb1fdd490b45ce5c
<ide><path>numpy/linalg/linalg.py <ide> def matrix_power(a, n): <ide> of the same shape as M is returned. If ``n < 0``, the inverse <ide> is computed and then raised to the ``abs(n)``. <ide> <add> .. note:: Stacks of object matrices are not currently supported. <add> <ide> Parameters <ide> ---------- <ide> a : (..., M, M) array_like <ide> def matrix_power(a, n): <ide> except TypeError: <ide> raise TypeError("exponent must be an integer") <ide> <add> # Fall back on dot for object arrays. Object arrays are not supported by <add> # the current implementation of matmul using einsum <add> if a.dtype != object: <add> fmatmul = matmul <add> elif a.ndim == 2: <add> fmatmul = dot <add> else: <add> raise NotImplementedError( <add> "matrix_power not supported for stacks of object arrays") <add> <ide> if n == 0: <ide> a = empty_like(a) <ide> a[...] = eye(a.shape[-2], dtype=a.dtype) <ide> def matrix_power(a, n): <ide> return a <ide> <ide> elif n == 2: <del> return matmul(a, a) <add> return fmatmul(a, a) <ide> <ide> elif n == 3: <del> return matmul(matmul(a, a), a) <add> return fmatmul(fmatmul(a, a), a) <ide> <ide> # Use binary decomposition to reduce the number of matrix multiplications. <ide> # Here, we iterate over the bits of n, from LSB to MSB, raise `a` to <ide> # increasing powers of 2, and multiply into the result as needed. <ide> z = result = None <ide> while n > 0: <del> z = a if z is None else matmul(z, z) <add> z = a if z is None else fmatmul(z, z) <ide> n, bit = divmod(n, 2) <ide> if bit: <del> result = z if result is None else matmul(result, z) <add> result = z if result is None else fmatmul(result, z) <ide> <ide> return result <ide> <ide><path>numpy/linalg/tests/test_linalg.py <ide> def test_future_rcond(self): <ide> assert_(len(w) == 1) <ide> <ide> <add>@pytest.mark.parametrize('dt', [np.dtype(c) for c in '?bBhHiIqQefdgFDGO']) <ide> class TestMatrixPower(object): <del> R90 = array([[0, 1], [-1, 0]]) <del> Arb22 = array([[4, -7], [-2, 10]]) <add> <add> rshft_0 = np.eye(4) <add> rshft_1 = rshft_0[[3, 0, 1, 2]] <add> rshft_2 = rshft_0[[2, 3, 0, 1]] <add> rshft_3 = rshft_0[[1, 2, 3, 0]] <add> rshft_all = [rshft_0, rshft_1, rshft_2, rshft_3] <ide> noninv = array([[1, 0], [0, 0]]) <del> arbfloat = array([[[0.1, 3.2], [1.2, 0.7]], <del> [[0.2, 6.4], [2.4, 1.4]]]) <add> stacked = np.block([[[rshft_0]]]*2) <add> #FIXME the 'e' dtype might work in future <add> dtnoinv = [object, np.dtype('e'), np.dtype('g'), np.dtype('G')] <ide> <del> large = identity(10) <del> t = large[1, :].copy() <del> large[1, :] = large[0, :] <del> large[0, :] = t <ide> <del> def test_large_power(self): <add> def test_large_power(self, dt): <add> power = matrix_power <add> rshft = self.rshft_1.astype(dt) <ide> assert_equal( <del> matrix_power(self.R90, 2 ** 100 + 2 ** 10 + 2 ** 5 + 1), self.R90) <add> matrix_power(rshft, 2**100 + 2**10 + 2**5 + 0), self.rshft_0) <ide> assert_equal( <del> matrix_power(self.R90, 2 ** 100 + 2 ** 10 + 1), self.R90) <add> matrix_power(rshft, 2**100 + 2**10 + 2**5 + 1), self.rshft_1) <ide> assert_equal( <del> matrix_power(self.R90, 2 ** 100 + 2 + 1), -self.R90) <del> <del> def test_large_power_trailing_zero(self): <add> matrix_power(rshft, 2**100 + 2**10 + 2**5 + 2), self.rshft_2) <ide> assert_equal( <del> matrix_power(self.R90, 2 ** 100 + 2 ** 10 + 2 ** 5), identity(2)) <add> matrix_power(rshft, 2**100 + 2**10 + 2**5 + 3), self.rshft_3) <ide> <del> def testip_zero(self): <add> def test_power_is_zero(self, dt): <ide> def tz(M): <ide> mz = matrix_power(M, 0) <ide> assert_equal(mz, identity_like_generalized(M)) <ide> assert_equal(mz.dtype, M.dtype) <del> for M in [self.Arb22, self.arbfloat, self.large]: <del> tz(M) <del> <del> def testip_one(self): <del> def tz(M): <del> mz = matrix_power(M, 1) <del> assert_equal(mz, M) <del> assert_equal(mz.dtype, M.dtype) <del> for M in [self.Arb22, self.arbfloat, self.large]: <del> tz(M) <del> <del> def testip_two(self): <del> def tz(M): <del> mz = matrix_power(M, 2) <del> assert_equal(mz, matmul(M, M)) <del> assert_equal(mz.dtype, M.dtype) <del> for M in [self.Arb22, self.arbfloat, self.large]: <del> tz(M) <del> <del> def testip_invert(self): <del> def tz(M): <del> mz = matrix_power(M, -1) <del> assert_almost_equal(matmul(mz, M), identity_like_generalized(M)) <del> for M in [self.R90, self.Arb22, self.arbfloat, self.large]: <del> tz(M) <del> <del> def test_invert_noninvertible(self): <del> assert_raises(LinAlgError, matrix_power, self.noninv, -1) <del> <del> def test_invalid(self): <del> assert_raises(TypeError, matrix_power, self.R90, 1.5) <del> assert_raises(TypeError, matrix_power, self.R90, [1]) <del> assert_raises(LinAlgError, matrix_power, np.array([1]), 1) <del> assert_raises(LinAlgError, matrix_power, np.array([[1], [2]]), 1) <del> assert_raises(LinAlgError, matrix_power, np.ones((4, 3, 2)), 1) <del> <del> <del>class TestBoolPower(object): <add> <add> for mat in self.rshft_all: <add> tz(mat.astype(dt)) <add> if dt != object: <add> tz(self.stacked.astype(dt)) <add> <add> def test_power_is_one(self, dt): <add> def tz(mat): <add> mz = matrix_power(mat, 1) <add> assert_equal(mz, mat) <add> assert_equal(mz.dtype, mat.dtype) <add> <add> for mat in self.rshft_all: <add> tz(mat.astype(dt)) <add> if dt != object: <add> tz(self.stacked.astype(dt)) <add> <add> def test_power_is_two(self, dt): <add> def tz(mat): <add> mz = matrix_power(mat, 2) <add> mmul = matmul if mat.dtype != object else dot <add> assert_equal(mz, mmul(mat, mat)) <add> assert_equal(mz.dtype, mat.dtype) <add> <add> for mat in self.rshft_all: <add> tz(mat.astype(dt)) <add> if dt != object: <add> tz(self.stacked.astype(dt)) <add> <add> def test_power_is_minus_one(self, dt): <add> def tz(mat): <add> invmat = matrix_power(mat, -1) <add> mmul = matmul if mat.dtype != object else dot <add> assert_almost_equal( <add> mmul(invmat, mat), identity_like_generalized(mat)) <add> <add> for mat in self.rshft_all: <add> if dt not in self.dtnoinv: <add> tz(mat.astype(dt)) <add> <add> def test_exceptions_bad_power(self, dt): <add> mat = self.rshft_0.astype(dt) <add> assert_raises(TypeError, matrix_power, mat, 1.5) <add> assert_raises(TypeError, matrix_power, mat, [1]) <add> <add> <add> def test_exceptions_non_square(self, dt): <add> assert_raises(LinAlgError, matrix_power, np.array([1], dt), 1) <add> assert_raises(LinAlgError, matrix_power, np.array([[1], [2]], dt), 1) <add> assert_raises(LinAlgError, matrix_power, np.ones((4, 3, 2), dt), 1) <add> <add> def test_exceptions_not_invertible(self, dt): <add> if dt in self.dtnoinv: <add> return <add> mat = self.noninv.astype(dt) <add> assert_raises(LinAlgError, matrix_power, mat, -1) <ide> <del> def test_square(self): <del> A = array([[True, False], [True, True]]) <del> assert_equal(matrix_power(A, 2), A) <ide> <ide> <ide> class TestEigvalshCases(HermitianTestCase, HermitianGeneralizedTestCase):
2
Text
Text
expand type families and bash touch documentation
1567a16b2f112dc73130527b429b71cadf73fbeb
<ide><path>guide/english/bash/bash-touch/index.md <ide> title: Bash touch <ide> ## Bash command: touch <ide> <ide> **Change timestamps of files.** <add>**Quickly create new files.** <ide> <ide> ``` <ide> touch [options] filename <ide> ``` <ide> <del>Creates empty file if not exists or modify times of existing files to current time. <add>Creates empty file if filename does not exist or modifies timestamps of existing files to current time. <add> <add>This command can create multiple empty files in one line of code. <ide> <ide> Commonly used options: <ide> - `-t` change timestamp to specific date ((YYYYMMDDHHMM.SS)) instead of current time. <ide> touch -am file3 <ide> <ide> ### More Information: <ide> * [Man pages](http://man7.org/linux/man-pages/man1/touch.1.html) <add> <add>* [Examples and options to use with the touch command](https://ss64.com/bash/touch.html) <ide><path>guide/english/typography/type-families/index.md <ide> Punctuation gives type emotion, and gives indications on when to stop, slow down <ide> - Script-Cursive <ide> <ide> #### More Information: <add>[About Typeface Families (Fonts.com)](https://www.fonts.com/content/learning/fontology/level-1/type-families/about-typeface-families") <add> <add>[Typeface (Wikipedia)](https://en.wikipedia.org/wiki/Typeface)
2
Go
Go
normalize comment formatting
2deeb6ad9640761bf017b522b0fa4ae56fae8a1f
<ide><path>builder/remotecontext/remote_test.go <ide> import ( <ide> "gotest.tools/fs" <ide> ) <ide> <del>var binaryContext = []byte{0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00} //xz magic <add>var binaryContext = []byte{0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00} // xz magic <ide> <ide> func TestSelectAcceptableMIME(t *testing.T) { <ide> validMimeStrings := []string{
1
Javascript
Javascript
remove obsolete <<content>> transclusion
bd530e225750e9b30b6300fc3d7447a5f1071667
<ide><path>src/ng/compile.js <ide> function $CompileProvider($provide) { <ide> Suffix = 'Directive', <ide> COMMENT_DIRECTIVE_REGEXP = /^\s*directive\:\s*([\d\w\-_]+)\s+(.*)$/, <ide> CLASS_DIRECTIVE_REGEXP = /(([\d\w\-_]+)(?:\:([^;]+))?;?)/, <del> CONTENT_REGEXP = /\<\<content\>\>/i, <ide> HAS_ROOT_ELEMENT = /^\<[\s\S]*\>$/; <ide> <ide> <ide> function $CompileProvider($provide) { <ide> assertNoDuplicate('template', templateDirective, directive, element); <ide> templateDirective = directive; <ide> <del> // include the contents of the original element into the template and replace the element <del> var content = directiveValue.replace(CONTENT_REGEXP, element.html()); <del> templateNode = jqLite(content)[0]; <add> templateNode = jqLite(directiveValue)[0]; <ide> if (directive.replace) { <ide> replaceWith(rootElement, element, templateNode); <ide> <ide> function $CompileProvider($provide) { <ide> <ide> ii = directives.length; <ide> } else { <del> element.html(content); <add> element.html(directiveValue); <ide> } <ide> } <ide> <ide> function $CompileProvider($provide) { <ide> <ide> $http.get(asyncWidgetDirective.templateUrl, {cache: $templateCache}). <ide> success(function(content) { <del> content = trim(content).replace(CONTENT_REGEXP, html); <ide> if (replace && !content.match(HAS_ROOT_ELEMENT)) { <ide> throw Error('Template must have exactly one root element: ' + content); <ide> } <ide><path>test/ng/compileSpec.js <ide> describe('$compile', function() { <ide> $compileProvider.directive('replace', valueFn({ <ide> restrict: 'CAM', <ide> replace: true, <del> template: '<div class="log" style="width: 10px" high-log>Hello: <<CONTENT>></div>', <add> template: '<div class="log" style="width: 10px" high-log>Replace!</div>', <ide> compile: function(element, attr) { <ide> attr.$set('compiled', 'COMPILED'); <ide> expect(element).toBe(attr.$$element); <ide> } <ide> })); <ide> $compileProvider.directive('append', valueFn({ <ide> restrict: 'CAM', <del> template: '<div class="log" style="width: 10px" high-log>Hello: <<CONTENT>></div>', <add> template: '<div class="log" style="width: 10px" high-log>Append!</div>', <ide> compile: function(element, attr) { <ide> attr.$set('compiled', 'COMPILED'); <ide> expect(element).toBe(attr.$$element); <ide> describe('$compile', function() { <ide> <ide> <ide> it('should replace element with template', inject(function($compile, $rootScope) { <del> element = $compile('<div><div replace>content</div><div>')($rootScope); <del> expect(element.text()).toEqual('Hello: content'); <add> element = $compile('<div><div replace>ignore</div><div>')($rootScope); <add> expect(element.text()).toEqual('Replace!'); <ide> expect(element.find('div').attr('compiled')).toEqual('COMPILED'); <ide> })); <ide> <ide> <ide> it('should append element with template', inject(function($compile, $rootScope) { <del> element = $compile('<div><div append>content</div><div>')($rootScope); <del> expect(element.text()).toEqual('Hello: content'); <add> element = $compile('<div><div append>ignore</div><div>')($rootScope); <add> expect(element.text()).toEqual('Append!'); <ide> expect(element.find('div').attr('compiled')).toEqual('COMPILED'); <ide> })); <ide> <ide> <del> it('should compile replace template', inject(function($compile, $rootScope, log) { <del> element = $compile('<div><div replace medium-log>{{ "angular" }}</div><div>') <add> it('should compile template when replacing', inject(function($compile, $rootScope, log) { <add> element = $compile('<div><div replace medium-log>ignore</div><div>') <ide> ($rootScope); <ide> $rootScope.$digest(); <del> expect(element.text()).toEqual('Hello: angular'); <add> expect(element.text()).toEqual('Replace!'); <ide> // HIGH goes after MEDIUM since it executes as part of replaced template <ide> expect(log).toEqual('MEDIUM; HIGH; LOG'); <ide> })); <ide> <ide> <del> it('should compile append template', inject(function($compile, $rootScope, log) { <del> element = $compile('<div><div append medium-log>{{ "angular" }}</div><div>') <add> it('should compile template when appending', inject(function($compile, $rootScope, log) { <add> element = $compile('<div><div append medium-log>ignore</div><div>') <ide> ($rootScope); <ide> $rootScope.$digest(); <del> expect(element.text()).toEqual('Hello: angular'); <add> expect(element.text()).toEqual('Append!'); <ide> expect(log).toEqual('HIGH; LOG; MEDIUM'); <ide> })); <ide> <ide> describe('$compile', function() { <ide> } <ide> })); <ide> <del> it('should play nice with repeater when inline', inject(function($compile, $rootScope) { <add> it('should play nice with repeater when replacing', inject(function($compile, $rootScope) { <ide> element = $compile( <ide> '<div>' + <del> '<div ng-repeat="i in [1,2]" replace>{{i}}; </div>' + <add> '<div ng-repeat="i in [1,2]" replace></div>' + <ide> '</div>')($rootScope); <ide> $rootScope.$digest(); <del> expect(element.text()).toEqual('Hello: 1; Hello: 2; '); <add> expect(element.text()).toEqual('Replace!Replace!'); <ide> })); <ide> <ide> <del> it('should play nice with repeater when append', inject(function($compile, $rootScope) { <add> it('should play nice with repeater when appending', inject(function($compile, $rootScope) { <ide> element = $compile( <ide> '<div>' + <del> '<div ng-repeat="i in [1,2]" append>{{i}}; </div>' + <add> '<div ng-repeat="i in [1,2]" append></div>' + <ide> '</div>')($rootScope); <ide> $rootScope.$digest(); <del> expect(element.text()).toEqual('Hello: 1; Hello: 2; '); <add> expect(element.text()).toEqual('Append!Append!'); <ide> })); <ide> <ide> <ide> describe('$compile', function() { <ide> <ide> beforeEach(module( <ide> function($compileProvider) { <del> $compileProvider.directive('hello', valueFn({ restrict: 'CAM', templateUrl: 'hello.html' })); <del> $compileProvider.directive('cau', valueFn({ restrict: 'CAM', templateUrl:'cau.html' })); <add> $compileProvider.directive('hello', valueFn({ <add> restrict: 'CAM', templateUrl: 'hello.html', transclude: true <add> })); <add> $compileProvider.directive('cau', valueFn({ <add> restrict: 'CAM', templateUrl:'cau.html' <add> })); <ide> <ide> $compileProvider.directive('cError', valueFn({ <ide> restrict: 'CAM', <ide> describe('$compile', function() { <ide> })); <ide> <ide> <del> it('should work when widget is in root element', inject( <add> it('should work when directive is on the root element', inject( <ide> function($compile, $httpBackend, $rootScope) { <del> $httpBackend.expect('GET', 'hello.html').respond('<span>3==<<content>></span>'); <add> $httpBackend.expect('GET', 'hello.html'). <add> respond('<span>3==<span ng-transclude></span></span>'); <ide> element = jqLite('<b class="hello">{{1+2}}</b>'); <ide> $compile(element)($rootScope); <ide> <ide> describe('$compile', function() { <ide> )); <ide> <ide> <del> it('should work when widget is a repeater', inject( <add> it('should work when directive is a repeater', inject( <ide> function($compile, $httpBackend, $rootScope) { <del> $httpBackend.expect('GET', 'hello.html').respond('<span>i=<<content>>;</span>'); <add> $httpBackend.expect('GET', 'hello.html'). <add> respond('<span>i=<span ng-transclude></span>;</span>'); <ide> element = jqLite('<div><b class=hello ng-repeat="i in [1,2]">{{i}}</b></div>'); <ide> $compile(element)($rootScope); <ide>
2
Ruby
Ruby
log every sql statement, even when they error
98e001641ff0f859349cd60d270fec972edc16de
<ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb <ide> def exec_no_cache(sql, name, binds) <ide> end <ide> <ide> def exec_cache(sql, name, binds) <add> stmt_key = prepare_statement(sql) <add> <ide> log(sql, name, binds) do <del> begin <del> stmt_key = prepare_statement(sql) <del> <del> @connection.send_query_prepared(stmt_key, binds.map { |col, val| <del> type_cast(val, col) <del> }) <del> @connection.block <del> @connection.get_last_result <del> rescue PGError => e <del> # Get the PG code for the failure. Annoyingly, the code for <del> # prepared statements whose return value may have changed is <del> # FEATURE_NOT_SUPPORTED. Check here for more details: <del> # http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573 <del> begin <del> code = e.result.result_error_field(PGresult::PG_DIAG_SQLSTATE) <del> rescue <del> raise e <del> end <del> if FEATURE_NOT_SUPPORTED == code <del> @statements.delete sql_key(sql) <del> retry <del> else <del> raise e <del> end <del> end <add> @connection.send_query_prepared(stmt_key, binds.map { |col, val| <add> type_cast(val, col) <add> }) <add> @connection.block <add> @connection.get_last_result <add> end <add> rescue ActiveRecord::StatementInvalid => e <add> pgerror = e.original_exception <add> <add> # Get the PG code for the failure. Annoyingly, the code for <add> # prepared statements whose return value may have changed is <add> # FEATURE_NOT_SUPPORTED. Check here for more details: <add> # http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573 <add> begin <add> code = pgerror.result.result_error_field(PGresult::PG_DIAG_SQLSTATE) <add> rescue <add> raise e <add> end <add> if FEATURE_NOT_SUPPORTED == code <add> @statements.delete sql_key(sql) <add> retry <add> else <add> raise e <ide> end <ide> end <ide>
1